From 07c2ef54276470bf6be087eb4e0ad2f1c76e4f1b Mon Sep 17 00:00:00 2001 From: Mark Bolwell Date: Thu, 5 Sep 2024 08:36:16 +0100 Subject: [PATCH] Initial Signed-off-by: Mark Bolwell --- .ansible-lint | 23 + .config/.secrets.baseline | 119 ++ .gitattributes | 20 + .gitignore | 45 + .pre-commit-config.yaml | 64 + .yamllint | 32 + CONTRIBUTING.rst | 69 + Changelog.md | 5 + LICENSE | 21 + README.md | 121 ++ collections/requirements.yml | 14 + defaults/main.yml | 1284 +++++++++++++++++ handlers/main.yml | 193 +++ meta/main.yml | 28 + site.yml | 8 + tasks/LE_audit_setup.yml | 32 + tasks/audit_only.yml | 30 + tasks/auditd.yml | 29 + tasks/main.yml | 234 +++ tasks/parse_etc_password.yml | 33 + tasks/post_remediation_audit.yml | 44 + tasks/pre_remediation_audit.yml | 120 ++ tasks/prelim.yml | 319 ++++ tasks/section_1/cis_1.1.1.x.yml | 356 +++++ tasks/section_1/cis_1.1.2.1.x.yml | 84 ++ tasks/section_1/cis_1.1.2.2.x.yml | 58 + tasks/section_1/cis_1.1.2.3.x.yml | 55 + tasks/section_1/cis_1.1.2.4.x.yml | 55 + tasks/section_1/cis_1.1.2.5.x.yml | 59 + tasks/section_1/cis_1.1.2.6.x.yml | 58 + tasks/section_1/cis_1.1.2.7.x.yml | 58 + tasks/section_1/cis_1.2.1.x.yml | 65 + tasks/section_1/cis_1.2.2.x.yml | 15 + tasks/section_1/cis_1.3.1.x.yml | 173 +++ tasks/section_1/cis_1.4.x.yml | 57 + tasks/section_1/cis_1.5.x.yml | 147 ++ tasks/section_1/cis_1.6.x.yml | 129 ++ tasks/section_1/cis_1.7.x.yml | 337 +++++ tasks/section_1/main.yml | 75 + tasks/section_2/cis_2.1.x.yml | 768 ++++++++++ tasks/section_2/cis_2.2.x.yml | 99 ++ tasks/section_2/cis_2.3.1.x.yml | 40 + tasks/section_2/cis_2.3.2.x.yml | 65 + tasks/section_2/cis_2.3.3.x.yml | 79 + tasks/section_2/cis_2.4.1.x.yml | 166 +++ tasks/section_2/cis_2.4.2.x.yml | 40 + tasks/section_2/main.yml | 33 + tasks/section_3/cis_3.1.x.yml | 112 ++ tasks/section_3/cis_3.2.x.yml | 121 ++ tasks/section_3/cis_3.3.x.yml | 352 +++++ tasks/section_3/main.yml | 13 + tasks/section_4/cis_4.1.1.yml | 43 + tasks/section_4/cis_4.2.x.yml | 186 +++ tasks/section_4/cis_4.3.x.yml | 247 ++++ tasks/section_4/cis_4.4.1.x.yml | 412 ++++++ tasks/section_4/cis_4.4.2.x.yml | 188 +++ tasks/section_4/cis_4.4.3.x.yml | 185 +++ tasks/section_4/main.yml | 32 + tasks/section_5/cis_5.1.x.yml | 508 +++++++ tasks/section_5/cis_5.2.x.yml | 148 ++ tasks/section_5/cis_5.3.1.x.yml | 48 + tasks/section_5/cis_5.3.2.x.yml | 92 ++ tasks/section_5/cis_5.3.3.1.x.yml | 106 ++ tasks/section_5/cis_5.3.3.2.x.yml | 235 +++ tasks/section_5/cis_5.3.3.3.x.yml | 82 ++ tasks/section_5/cis_5.3.3.4.x.yml | 105 ++ tasks/section_5/cis_5.4.1.x.yml | 208 +++ tasks/section_5/cis_5.4.2.x.yml | 244 ++++ tasks/section_5/cis_5.4.3.x.yml | 63 + tasks/section_5/main.yml | 55 + tasks/section_6/cis_6.1.1.x.yml | 101 ++ tasks/section_6/cis_6.1.2.x.yml | 155 ++ tasks/section_6/cis_6.1.3.8.yml | 31 + tasks/section_6/cis_6.1.3.x.yml | 201 +++ tasks/section_6/cis_6.1.4.1.yml | 40 + tasks/section_6/cis_6.2.1.x.yml | 113 ++ tasks/section_6/cis_6.2.2.x.yml | 75 + tasks/section_6/cis_6.2.3.x.yml | 301 ++++ tasks/section_6/cis_6.2.4.x.yml | 172 +++ tasks/section_6/cis_6.3.x.yml | 110 ++ tasks/section_6/main.yml | 44 + tasks/section_7/cis_7.1.x.yml | 326 +++++ tasks/section_7/cis_7.2.x.yml | 348 +++++ tasks/section_7/main.yml | 9 + tasks/warning_facts.yml | 20 + templates/ansible_vars_goss.yml.j2 | 743 ++++++++++ templates/audit/98_auditd_exception.rules.j2 | 8 + templates/audit/99_auditd.rules.j2 | 106 ++ templates/chrony.conf.j2 | 93 ++ .../etc/chrony/sources.d/pool.sources.j2 | 7 + .../etc/chrony/sources.d/server.sources.j2 | 7 + templates/etc/dconf/db/00-automount_lock.j2 | 9 + templates/etc/dconf/db/00-autorun_lock.j2 | 6 + templates/etc/dconf/db/00-media-automount.j2 | 7 + templates/etc/dconf/db/00-media-autorun.j2 | 6 + templates/etc/dconf/db/00-screensaver.j2 | 17 + templates/etc/dconf/db/00-screensaver_lock.j2 | 9 + templates/etc/grub.d/00_user.j2 | 8 + templates/etc/issue.j2 | 1 + templates/etc/issue.net.j2 | 1 + templates/etc/motd.j2 | 1 + .../pwquality.conf.d/50-pwcomplexity.conf.j2 | 7 + .../pwquality.conf.d/50-pwdictcheck.conf.j2 | 3 + .../pwquality.conf.d/50-pwdifok.conf.j2 | 3 + .../pwquality.conf.d/50-pwlength.conf.j2 | 3 + .../pwquality.conf.d/50-pwmaxsequence.conf.j2 | 3 + .../50-pwquality_enforce.conf.j2 | 3 + .../pwquality.conf.d/50-pwrepeat.conf.j2 | 3 + .../pwquality.conf.d/50-pwroot.conf.j2 | 3 + .../etc/sysctl.d/60-disable_ipv6.conf.j2 | 7 + .../journald.conf.d/forwardtosyslog.conf.j2 | 4 + .../systemd/journald.conf.d/rotation.conf.j2 | 8 + .../systemd/journald.conf.d/storage.conf.j2 | 13 + .../etc/systemd/system/aidecheck.service.j2 | 9 + .../etc/systemd/system/aidecheck.timer.j2 | 9 + templates/etc/systemd/system/tmp.mount.j2 | 17 + .../timesyncd.conf.d/50-timesyncd.conf.j2 | 9 + templates/fs_with_cves.sh.j2 | 58 + templates/usr/share/pam-configs/faillock.j2 | 6 + .../usr/share/pam-configs/faillock_notify.j2 | 9 + templates/usr/share/pam-configs/pam_unix.j2 | 23 + templates/usr/share/pam-configs/pwhistory.j2 | 6 + templates/usr/share/pam-configs/pwquality.j2 | 8 + vars/audit.yml | 41 + vars/is_container.yml | 104 ++ vars/main.yml | 14 + 126 files changed, 13191 insertions(+) create mode 100755 .ansible-lint create mode 100644 .config/.secrets.baseline create mode 100644 .gitattributes create mode 100644 .gitignore create mode 100644 .pre-commit-config.yaml create mode 100755 .yamllint create mode 100644 CONTRIBUTING.rst create mode 100644 Changelog.md create mode 100644 LICENSE create mode 100644 README.md create mode 100644 collections/requirements.yml create mode 100644 defaults/main.yml create mode 100644 handlers/main.yml create mode 100644 meta/main.yml create mode 100644 site.yml create mode 100644 tasks/LE_audit_setup.yml create mode 100644 tasks/audit_only.yml create mode 100644 tasks/auditd.yml create mode 100644 tasks/main.yml create mode 100644 tasks/parse_etc_password.yml create mode 100644 tasks/post_remediation_audit.yml create mode 100644 tasks/pre_remediation_audit.yml create mode 100644 tasks/prelim.yml create mode 100644 tasks/section_1/cis_1.1.1.x.yml create mode 100644 tasks/section_1/cis_1.1.2.1.x.yml create mode 100644 tasks/section_1/cis_1.1.2.2.x.yml create mode 100644 tasks/section_1/cis_1.1.2.3.x.yml create mode 100644 tasks/section_1/cis_1.1.2.4.x.yml create mode 100644 tasks/section_1/cis_1.1.2.5.x.yml create mode 100644 tasks/section_1/cis_1.1.2.6.x.yml create mode 100644 tasks/section_1/cis_1.1.2.7.x.yml create mode 100644 tasks/section_1/cis_1.2.1.x.yml create mode 100644 tasks/section_1/cis_1.2.2.x.yml create mode 100644 tasks/section_1/cis_1.3.1.x.yml create mode 100644 tasks/section_1/cis_1.4.x.yml create mode 100644 tasks/section_1/cis_1.5.x.yml create mode 100644 tasks/section_1/cis_1.6.x.yml create mode 100644 tasks/section_1/cis_1.7.x.yml create mode 100644 tasks/section_1/main.yml create mode 100644 tasks/section_2/cis_2.1.x.yml create mode 100644 tasks/section_2/cis_2.2.x.yml create mode 100644 tasks/section_2/cis_2.3.1.x.yml create mode 100644 tasks/section_2/cis_2.3.2.x.yml create mode 100644 tasks/section_2/cis_2.3.3.x.yml create mode 100644 tasks/section_2/cis_2.4.1.x.yml create mode 100644 tasks/section_2/cis_2.4.2.x.yml create mode 100644 tasks/section_2/main.yml create mode 100644 tasks/section_3/cis_3.1.x.yml create mode 100644 tasks/section_3/cis_3.2.x.yml create mode 100644 tasks/section_3/cis_3.3.x.yml create mode 100644 tasks/section_3/main.yml create mode 100644 tasks/section_4/cis_4.1.1.yml create mode 100644 tasks/section_4/cis_4.2.x.yml create mode 100644 tasks/section_4/cis_4.3.x.yml create mode 100644 tasks/section_4/cis_4.4.1.x.yml create mode 100644 tasks/section_4/cis_4.4.2.x.yml create mode 100644 tasks/section_4/cis_4.4.3.x.yml create mode 100644 tasks/section_4/main.yml create mode 100644 tasks/section_5/cis_5.1.x.yml create mode 100644 tasks/section_5/cis_5.2.x.yml create mode 100644 tasks/section_5/cis_5.3.1.x.yml create mode 100644 tasks/section_5/cis_5.3.2.x.yml create mode 100644 tasks/section_5/cis_5.3.3.1.x.yml create mode 100644 tasks/section_5/cis_5.3.3.2.x.yml create mode 100644 tasks/section_5/cis_5.3.3.3.x.yml create mode 100644 tasks/section_5/cis_5.3.3.4.x.yml create mode 100644 tasks/section_5/cis_5.4.1.x.yml create mode 100644 tasks/section_5/cis_5.4.2.x.yml create mode 100644 tasks/section_5/cis_5.4.3.x.yml create mode 100644 tasks/section_5/main.yml create mode 100644 tasks/section_6/cis_6.1.1.x.yml create mode 100644 tasks/section_6/cis_6.1.2.x.yml create mode 100644 tasks/section_6/cis_6.1.3.8.yml create mode 100644 tasks/section_6/cis_6.1.3.x.yml create mode 100644 tasks/section_6/cis_6.1.4.1.yml create mode 100644 tasks/section_6/cis_6.2.1.x.yml create mode 100644 tasks/section_6/cis_6.2.2.x.yml create mode 100644 tasks/section_6/cis_6.2.3.x.yml create mode 100644 tasks/section_6/cis_6.2.4.x.yml create mode 100644 tasks/section_6/cis_6.3.x.yml create mode 100644 tasks/section_6/main.yml create mode 100644 tasks/section_7/cis_7.1.x.yml create mode 100644 tasks/section_7/cis_7.2.x.yml create mode 100644 tasks/section_7/main.yml create mode 100644 tasks/warning_facts.yml create mode 100644 templates/ansible_vars_goss.yml.j2 create mode 100644 templates/audit/98_auditd_exception.rules.j2 create mode 100644 templates/audit/99_auditd.rules.j2 create mode 100644 templates/chrony.conf.j2 create mode 100644 templates/etc/chrony/sources.d/pool.sources.j2 create mode 100644 templates/etc/chrony/sources.d/server.sources.j2 create mode 100644 templates/etc/dconf/db/00-automount_lock.j2 create mode 100644 templates/etc/dconf/db/00-autorun_lock.j2 create mode 100644 templates/etc/dconf/db/00-media-automount.j2 create mode 100644 templates/etc/dconf/db/00-media-autorun.j2 create mode 100644 templates/etc/dconf/db/00-screensaver.j2 create mode 100644 templates/etc/dconf/db/00-screensaver_lock.j2 create mode 100644 templates/etc/grub.d/00_user.j2 create mode 100644 templates/etc/issue.j2 create mode 100644 templates/etc/issue.net.j2 create mode 100644 templates/etc/motd.j2 create mode 100644 templates/etc/security/pwquality.conf.d/50-pwcomplexity.conf.j2 create mode 100644 templates/etc/security/pwquality.conf.d/50-pwdictcheck.conf.j2 create mode 100644 templates/etc/security/pwquality.conf.d/50-pwdifok.conf.j2 create mode 100644 templates/etc/security/pwquality.conf.d/50-pwlength.conf.j2 create mode 100644 templates/etc/security/pwquality.conf.d/50-pwmaxsequence.conf.j2 create mode 100644 templates/etc/security/pwquality.conf.d/50-pwquality_enforce.conf.j2 create mode 100644 templates/etc/security/pwquality.conf.d/50-pwrepeat.conf.j2 create mode 100644 templates/etc/security/pwquality.conf.d/50-pwroot.conf.j2 create mode 100644 templates/etc/sysctl.d/60-disable_ipv6.conf.j2 create mode 100644 templates/etc/systemd/journald.conf.d/forwardtosyslog.conf.j2 create mode 100644 templates/etc/systemd/journald.conf.d/rotation.conf.j2 create mode 100644 templates/etc/systemd/journald.conf.d/storage.conf.j2 create mode 100644 templates/etc/systemd/system/aidecheck.service.j2 create mode 100644 templates/etc/systemd/system/aidecheck.timer.j2 create mode 100644 templates/etc/systemd/system/tmp.mount.j2 create mode 100644 templates/etc/systemd/timesyncd.conf.d/50-timesyncd.conf.j2 create mode 100644 templates/fs_with_cves.sh.j2 create mode 100644 templates/usr/share/pam-configs/faillock.j2 create mode 100644 templates/usr/share/pam-configs/faillock_notify.j2 create mode 100644 templates/usr/share/pam-configs/pam_unix.j2 create mode 100644 templates/usr/share/pam-configs/pwhistory.j2 create mode 100644 templates/usr/share/pam-configs/pwquality.j2 create mode 100644 vars/audit.yml create mode 100644 vars/is_container.yml create mode 100644 vars/main.yml diff --git a/.ansible-lint b/.ansible-lint new file mode 100755 index 0000000..c7095e2 --- /dev/null +++ b/.ansible-lint @@ -0,0 +1,23 @@ +--- + +parseable: true +quiet: true +skip_list: + - 'schema' + - 'no-changed-when' + - 'var-spacing' + - 'experimental' + - 'name[play]' + - 'name[casing]' + - 'name[template]' + - 'key-order[task]' + - 'yaml[line-length]' + - '204' + - '305' + - '303' + - '403' + - '306' + - '602' + - '208' +use_default_rules: true +verbosity: 0 diff --git a/.config/.secrets.baseline b/.config/.secrets.baseline new file mode 100644 index 0000000..45fd996 --- /dev/null +++ b/.config/.secrets.baseline @@ -0,0 +1,119 @@ +{ + "version": "1.4.0", + "plugins_used": [ + { + "name": "ArtifactoryDetector" + }, + { + "name": "AWSKeyDetector" + }, + { + "name": "AzureStorageKeyDetector" + }, + { + "name": "Base64HighEntropyString", + "limit": 4.5 + }, + { + "name": "BasicAuthDetector" + }, + { + "name": "CloudantDetector" + }, + { + "name": "DiscordBotTokenDetector" + }, + { + "name": "GitHubTokenDetector" + }, + { + "name": "HexHighEntropyString", + "limit": 3.0 + }, + { + "name": "IbmCloudIamDetector" + }, + { + "name": "IbmCosHmacDetector" + }, + { + "name": "JwtTokenDetector" + }, + { + "name": "KeywordDetector", + "keyword_exclude": "" + }, + { + "name": "MailchimpDetector" + }, + { + "name": "NpmDetector" + }, + { + "name": "PrivateKeyDetector" + }, + { + "name": "SendGridDetector" + }, + { + "name": "SlackDetector" + }, + { + "name": "SoftlayerDetector" + }, + { + "name": "SquareOAuthDetector" + }, + { + "name": "StripeDetector" + }, + { + "name": "TwilioKeyDetector" + } + ], + "filters_used": [ + { + "path": "detect_secrets.filters.allowlist.is_line_allowlisted" + }, + { + "path": "detect_secrets.filters.common.is_ignored_due_to_verification_policies", + "min_level": 2 + }, + { + "path": "detect_secrets.filters.heuristic.is_indirect_reference" + }, + { + "path": "detect_secrets.filters.heuristic.is_likely_id_string" + }, + { + "path": "detect_secrets.filters.heuristic.is_lock_file" + }, + { + "path": "detect_secrets.filters.heuristic.is_not_alphanumeric_string" + }, + { + "path": "detect_secrets.filters.heuristic.is_potential_uuid" + }, + { + "path": "detect_secrets.filters.heuristic.is_prefixed_with_dollar_sign" + }, + { + "path": "detect_secrets.filters.heuristic.is_sequential_string" + }, + { + "path": "detect_secrets.filters.heuristic.is_swagger_file" + }, + { + "path": "detect_secrets.filters.heuristic.is_templated_secret" + }, + { + "path": "detect_secrets.filters.regex.should_exclude_file", + "pattern": [ + ".config/.gitleaks-report.json", + "tasks/parse_etc_password.yml" + ] + } + ], + "results": {}, + "generated_at": "2023-09-20T07:45:19Z" +} diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000..2cc61e7 --- /dev/null +++ b/.gitattributes @@ -0,0 +1,20 @@ +# https://docs.github.com/en/repositories/working-with-files/managing-files/customizing-how-changed-files-appear-on-github +# Default behaviour +* text=auto + +# https://docs.github.com/en/get-started/getting-started-with-git/configuring-git-to-handle-line-endings +# Ensure to read artcile prior to adding +# Scripts should have Unix endings +*.py text eol=lf +*.sh text eol=lf + +# Windows Batch or PowerShell scripts should have CRLF endings +*.bat text eol=crlf +*.ps1 text eol=crlf + +# adding github settings to show correct language +*.sh linguist-detectable=true +*.yml linguist-detectable=true +*.ps1 linguist-detectable=true +*.j2 linguist-detectable=true +*.md linguist-documentation diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..4e3ce79 --- /dev/null +++ b/.gitignore @@ -0,0 +1,45 @@ +.env +*.log +*.retry +.vagrant +tests/*redhat-subscription +tests/Dockerfile +*.iso +*.box +packer_cache +delete* +ignore* +# VSCode +.vscode +vagrant + +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# DS_Store +.DS_Store +._* + +# Linux Editors +*~ +\#*\# +/.emacs.desktop +/.emacs.desktop.lock +.elc +auto-save-list +tramp +.\#* +*.swp +*.swo +rh-creds.env +travis.env + +# Lockdown-specific +benchparse/ +*xccdf.xml +*.retry + +# GitHub Action/Workflow files +.github/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..9e462a3 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,64 @@ +--- +##### CI for use by github no need for action to be added +##### Inherited +ci: + autofix_prs: false + skip: [detect-aws-credentials, ansible-lint ] + +repos: +- repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.6.0 + hooks: + # Safety + - id: detect-aws-credentials + - id: detect-private-key + + # git checks + - id: check-merge-conflict + - id: check-added-large-files + - id: check-case-conflict + + # General checks + - id: trailing-whitespace + name: Trim Trailing Whitespace + description: This hook trims trailing whitespace. + entry: trailing-whitespace-fixer + language: python + types: [text] + args: [--markdown-linebreak-ext=md] + - id: end-of-file-fixer + +# Scan for passwords +- repo: https://github.com/Yelp/detect-secrets + rev: v1.5.0 + hooks: + - id: detect-secrets + +- repo: https://github.com/gitleaks/gitleaks + rev: v8.18.4 + hooks: + - id: gitleaks + +- repo: https://github.com/ansible-community/ansible-lint + rev: v24.6.1 + hooks: + - id: ansible-lint + name: Ansible-lint + description: This hook runs ansible-lint. + entry: python3 -m ansiblelint --force-color site.yml -c .ansible-lint + language: python + # do not pass files to ansible-lint, see: + # https://github.com/ansible/ansible-lint/issues/611 + pass_filenames: false + always_run: true + additional_dependencies: + # https://github.com/pre-commit/pre-commit/issues/1526 + # If you want to use specific version of ansible-core or ansible, feel + # free to override `additional_dependencies` in your own hook config + # file. + - ansible-core>=2.10.1 + +- repo: https://github.com/adrienverge/yamllint.git + rev: v1.35.1 # or higher tag + hooks: + - id: yamllint diff --git a/.yamllint b/.yamllint new file mode 100755 index 0000000..dff2457 --- /dev/null +++ b/.yamllint @@ -0,0 +1,32 @@ +--- + +extends: default + +ignore: | + tests/ + molecule/ + .github/ + .gitlab-ci.yml + *molecule.yml + +rules: + indentation: + # Requiring consistent indentation within a file, either indented or not + indent-sequences: consistent + braces: + max-spaces-inside: 1 + level: error + brackets: + max-spaces-inside: 1 + level: error + empty-lines: + max: 1 + line-length: disable + key-duplicates: enable + new-line-at-end-of-file: enable + new-lines: + type: unix + trailing-spaces: enable + truthy: + allowed-values: ['true', 'false'] + check-keys: true diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst new file mode 100644 index 0000000..2a7fd38 --- /dev/null +++ b/CONTRIBUTING.rst @@ -0,0 +1,69 @@ +Contributing to MindPoint Group Projects +======================================== + +Rules +----- +1) All commits must be GPG signed (details in Signing section) +2) All commits must have Signed-off-by (Signed-off-by: Joan Doe ) in the commit message (details in Signing section) +3) All work is done in your own branch or fork +4) Pull requests + a) From within the repo: All pull requests go into the devel branch. There are automated checks for signed commits, signoff in commit messages, and functional testing + b) From a forked repo: All pull requests will go into a staging branch within the repo. There are automated checks for signed commits, signoff in commit messages, and functional testing when going from staging to devel +4) All pull requests go into the devel branch. There are automated checks for signed commits, signoff in commit messages, and functional testing) +5) Be open and nice to each other + +Workflow +-------- +- Your work is done in your own individual branch. Make sure to to Signed-off and GPG sign all commits you intend to merge +- All community Pull Requests are into the devel branch. There are automated checks for GPG signed, Signed-off in commits, and functional tests before being approved. If your pull request comes in from outside of our repo, the pull request will go into a staging branch. There is info needed from our repo for our CI/CD testing. +- Once your changes are merged and a more detailed review is complete, an authorized member will merge your changes into the main branch for a new release +Signing your contribution +------------------------- + +We've chosen to use the Developer's Certificate of Origin (DCO) method +that is employed by the Linux Kernel Project, which provides a simple +way to contribute to MindPoint Group projects. + +The process is to certify the below DCO 1.1 text +:: + + Developer's Certificate of Origin 1.1 + + By making a contribution to this project, I certify that: + + (a) The contribution was created in whole or in part by me and I + have the right to submit it under the open-source license + indicated in the file; or + + (b) The contribution is based upon previous work that, to the best + of my knowledge, is covered under an appropriate open-source + license and I have the right under that license to submit that + work with modifications, whether created in whole or in part + by me, under the same open-source license (unless I am + permitted to submit under a different license), as indicated + in the file; or + + (c) The contribution was provided directly to me by some other + person who certified (a), (b), or (c) and I have not modified + it. + + (d) I understand and agree that this project and the contribution + are public and that a record of the contribution (including all + personal information I submit with it, including my sign-off) is + maintained indefinitely and may be redistributed consistent with + this project or the open source license(s) involved. +:: + +Then, when it comes time to submit a contribution, include the +following text in your contribution commit message: + +:: + + Signed-off-by: Joan Doe + +:: + + +This message can be entered manually, or if you have configured git +with the correct `user.name` and `user.email`, you can use the `-s` +option to `git commit` to automatically include the signoff message. diff --git a/Changelog.md b/Changelog.md new file mode 100644 index 0000000..be6de28 --- /dev/null +++ b/Changelog.md @@ -0,0 +1,5 @@ +# Ubuntu24CIS + +## Based on CIS v1.0.0 + +### Initial diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..e9cb70f --- /dev/null +++ b/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2023 MindPoint Group / Lockdown Enterprise / Lockdown Enterprise Releases + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/README.md b/README.md new file mode 100644 index 0000000..d78b0c5 --- /dev/null +++ b/README.md @@ -0,0 +1,121 @@ +# Ubuntu 24 CIS + +## Configure a Ubuntu 24 machine to be [CIS](https://www.cisecurity.org/cis-benchmarks/) compliant + +### Based on CIS Ubuntu Linux 24.04 LTS Benchmark v1.0.0 [Release](https://downloads.cisecurity.org/#/) + +![Org Stars](https://img.shields.io/github/stars/ansible-lockdown?label=Org%20Stars&style=social) +![Stars](https://img.shields.io/github/stars/ansible-lockdown/ubuntu24-cis?label=Repo%20Stars&style=social) +![Forks](https://img.shields.io/github/forks/ansible-lockdown/ubuntu24-cis?style=social) +![followers](https://img.shields.io/github/followers/ansible-lockdown?style=social) +[![Twitter URL](https://img.shields.io/twitter/url/https/twitter.com/AnsibleLockdown.svg?style=social&label=Follow%20%40AnsibleLockdown)](https://twitter.com/AnsibleLockdown) + +![Discord Badge](https://img.shields.io/discord/925818806838919229?logo=discord) + +![Release Branch](https://img.shields.io/badge/Release%20Branch-Main-brightgreen) +![Release Tag](https://img.shields.io/github/v/release/ansible-lockdown/UBUNTU24-CIS) +![Release Date](https://img.shields.io/github/release-date/ansible-lockdown/UBUNTU24-CIS) + +[![Main Pipeline Status](https://github.com/ansible-lockdown/UBUNTU24-CIS/actions/workflows/main_pipeline_validation.yml/badge.svg?)](https://github.com/ansible-lockdown/UBUNTU24-CIS/actions/workflows/main_pipeline_validation.yml) + +[![Devel Pipeline Status](https://github.com/ansible-lockdown/UBUNTU24-CIS/actions/workflows/devel_pipeline_validation.yml/badge.svg?)](https://github.com/ansible-lockdown/UBUNTU24-CIS/actions/workflows/devel_pipeline_validation.yml) +![Devel Commits](https://img.shields.io/github/commit-activity/m/ansible-lockdown/UBUNTU24-CIS/devel?color=dark%20green&label=Devel%20Branch%20Commits) + +![Issues Open](https://img.shields.io/github/issues-raw/ansible-lockdown/UBUNTU24-CIS?label=Open%20Issues) +![Issues Closed](https://img.shields.io/github/issues-closed-raw/ansible-lockdown/UBUNTU24-CIS?label=Closed%20Issues&&color=success) +![Pull Requests](https://img.shields.io/github/issues-pr/ansible-lockdown/UBUNTU24-CIS?label=Pull%20Requests) + +![License](https://img.shields.io/github/license/ansible-lockdown/UBUNTU24-CIS?label=License) + +--- + +## Looking for support? + +[Lockdown Enterprise](https://www.lockdownenterprise.com#GH_AL_UB24_cis) + +[Ansible support](https://www.mindpointgroup.com/cybersecurity-products/ansible-counselor#GH_AL_UB24_cis) + +### Community + +Join us on our [Discord Server](https://www.lockdownenterprise.com/discord) to ask questions, discuss features, or just chat with other Ansible-Lockdown users. + +## Caution(s) + +This role **will make changes to the system** that could break things. This is not an auditing tool but rather a remediation tool to be used after an audit has been conducted. + +This role was developed against a clean install of the Operating System. If you are implementing on an existing system, please review this role for any site-specific changes that are needed. + +## Documentation + +- [Read The Docs](https://ansible-lockdown.readthedocs.io/en/latest/) +- [Getting Started](https://www.lockdownenterprise.com/docs/getting-started-with-lockdown#GH_AL_UB24_cis) +- [Customizing Roles](https://www.lockdownenterprise.com/docs/customizing-lockdown-enterprise#GH_AL_UB24_cis) +- [Per-Host Configuration](https://www.lockdownenterprise.com/docs/per-host-lockdown-enterprise-configuration#GH_AL_UB24_cis) +- [Getting the Most Out of the Role](https://www.lockdownenterprise.com/docs/get-the-most-out-of-lockdown-enterprise#GH_AL_UB24_cis) + +## Requirements + +**General:** + +- Basic knowledge of Ansible, below are some links to the Ansible documentation to help get started if you are unfamiliar with Ansible + - [Main Ansible documentation page](https://docs.ansible.com) + - [Ansible Getting Started](https://docs.ansible.com/ansible/latest/user_guide/intro_getting_started.html) + - [Tower User Guide](https://docs.ansible.com/ansible-tower/latest/html/userguide/index.html) + - [Ansible Community Info](https://docs.ansible.com/ansible/latest/community/index.html) +- Functioning Ansible and/or Tower Installed, configured, and running. This includes all of the base Ansible/Tower configurations, needed packages installed, and infrastructure setup. +- Please read through the tasks in this role to gain an understanding of what each control is doing. Some of the tasks are disruptive and can have unintended consequences in a live production system. Also, familiarize yourself with the variables in the defaults/main.yml file or the [Main Variables Wiki Page](https://github.com/ansible-lockdown/UBUNTU24-CIS/wiki/Main-Variables). + +**Technical Dependencies:** + +- Running Ansible/Tower setup (this role is tested against Ansible version 2.12.1 and newer) +- Python3 Ansible run environment +- goss >= 0.4.4 (If using for audit) + +## Auditing (new) + +This can be turned on or off within the defaults/main.yml file with the variable run_audit. The value is false by default, please refer to the wiki for more details. + +This is a much quicker, very lightweight, checking (where possible) config compliance and live/running settings. + +A new form of auditing has been developed, by using a small (12MB) go binary called [goss](https://github.com/goss-org/goss) along with the relevant configurations to check. Without the need for infrastructure or other tooling. +This audit will not only check the config has the correct setting but aims to capture if it is running with that configuration also trying to remove [false positives](https://www.mindpointgroup.com/blog/is-compliance-scanning-still-relevant/) in the process. + +Refer to [UBUNTU24-CIS-Audit](https://github.com/ansible-lockdown/UBUNTU24-CIS-Audit). + +Further audit documentation can be found at [Read The Docs](https://ansible-lockdown.readthedocs.io/en/latest/) + +## Role Variables + +This role is designed so the end user should not have to edit the tasks themselves. All customizing should be done via the defaults/main.yml file or with extra vars within the project, job, workflow, etc. + +## Branches + +- **devel** - This is the default branch and the working development branch. Community pull requests will be pulled into this branch +- **main** - This is the release branch +- **all other branches** - Individual community member branches + +## Community Contribution + +We encourage you (the community) to contribute to this role. Please read the rules below. + +- Your work is done in your own individual branch. Make sure to Signed-off and GPG sign all commits you intend to merge. +- All community Pull Requests are pulled into the devel branch +- Pull Requests into devel will confirm your commits have a GPG signature, Signed-off, and a functional test before being approved +- Once your changes are merged and a more detailed review is complete, an authorized member will merge your changes into the main branch for a new release + +## Pipeline Testing + +uses: + +- ansible-core 2.16.6 +- ansible collections - pulls in the latest version based on the requirements file +- runs the audit using the devel branch +- This is an automated test that occurs on pull requests into devel + +## Added Extras + +- [pre-commit](https://pre-commit.com) can be tested and can be run from within the directory + +```sh +pre-commit run +``` diff --git a/collections/requirements.yml b/collections/requirements.yml new file mode 100644 index 0000000..8ebc618 --- /dev/null +++ b/collections/requirements.yml @@ -0,0 +1,14 @@ +--- + +collections: + - name: community.general + source: https://github.com/ansible-collections/community.general + type: git + + - name: community.crypto + source: https://github.com/ansible-collections/community.crypto + type: git + + - name: ansible.posix + source: https://github.com/ansible-collections/ansible.posix + type: git diff --git a/defaults/main.yml b/defaults/main.yml new file mode 100644 index 0000000..100826c --- /dev/null +++ b/defaults/main.yml @@ -0,0 +1,1284 @@ +--- + +## Switching on/off specific baseline sections +# These variables govern whether the tasks of a particular section are to be executed when running the role. +# E.g: If you want to execute the tasks of Section 1 you should set the "_section1" variable to true. +# to "true". If you do not want the tasks from that section to get executed you simply set the variable to "false". +ubtu24cis_section1: true +ubtu24cis_section2: true +ubtu24cis_section3: true +ubtu24cis_section4: true +ubtu24cis_section5: true +ubtu24cis_section6: true +ubtu24cis_section7: true + +## Reboot system before audit +# System will reboot if false, can give better audit results +skip_reboot: true + +## Benchmark name and profiles used by auditing control role +# The audit variable found at the base +benchmark: UBUNTU24-CIS +benchmark_version: v1.0.0 + +# Used for audit +ubtu24cis_level_1: true +ubtu24cis_level_2: true + +## Possibly disruptive tasks +# This variable governs whether tasks of a potentially disruptive nature on your system +# (e.g., removing .forward and .netrc files for interactive users) are carried out. +# If you set this variable to "true", the role will carry such tasks; if you set it to +# "false", these tasks will be skipped. +ubtu24cis_disruption_high: true + +## Unrestricted boot +# Setting this variable to false enables the system to +# boot *without* querying for the bootloader password. +ubtu24cis_ask_passwd_to_boot: false + +## Usage on containerized images +# The role discovers dynamically (in tasks/main.yml) whether it +# is executed on a container image and sets the variable +# system_is_container the true. Otherwise, the default value +# 'false' is left unchanged. +container_vars_file: is_container.yml + +## Root user used +# Root by default is not used unless setup by user +# The role will only run certain commands if set to true +# This allows the ability to skip tasks that may cause an issue +ubtu24cis_uses_root: false + +### +### Settings for associated Audit role using Goss +### + +########################################### +### Goss is required on the remote host ### +### vars/auditd.yml for other settings ### + +# Allow audit to setup the requirements including installing git (if option chosen and downloading and adding goss binary to system) +setup_audit: false + +# enable audits to run - this runs the audit and get the latest content +run_audit: false +# Run heavy tests - some tests can have more impact on a system enabling these can have greater impact on a system +audit_run_heavy_tests: true + +## Only run Audit do not remediate +audit_only: false +### As part of audit_only ### +# This will enable files to be copied back to control node in audit_only mode +fetch_audit_files: false +# Path to copy the files to will create dir structure in audit_only mode +audit_capture_files_dir: /some/location to copy to on control node +############################# + +# How to retrieve audit binary +# Options are copy or download - detailed settings at the bottom of this file +# you will need to access to either github or the file already dowmloaded +get_audit_binary_method: download + +## if get_audit_binary_method - copy the following needs to be updated for your environment +## it is expected that it will be copied from somewhere accessible to the control node +## e.g copy from ansible control node to remote host +audit_bin_copy_location: /some/accessible/path + +# how to get audit files onto host options +# options are git/copy/archive/get_url other e.g. if you wish to run from already downloaded conf +audit_content: git + +# If using either archive, copy, get_url: +## Note will work with .tar files - zip will require extra configuration +### If using get_url this is expecting github url in tar.gz format e.g. +### https://github.com/ansible-lockdown/UBUNTU24-CIS-Audit/archive/refs/heads/benchmark-v1.0.0.tar.gz +audit_conf_source: "some path or url to copy from" + +# Destination for the audit content to be placed on managed node +# note may not need full path e.g. /opt with the directory being the {{ benchmark }}-Audit directory +audit_conf_dest: "/opt" + +# Where the audit logs are stored +audit_log_dir: '/opt' + +### Goss Settings ## +####### END ######## + +## Running in EC2 pipeline +# Skip events for EC2 instance testing pipeline. +# This variable is set to true for automated tests +# carried out for this role by the role maintainers. +system_is_ec2: false + +## +## Rule-specific switches +## +## Use the switches below to disable specific rules independently of the chosen profile +## + +## Section 1 Fixes +# Section 1 is Initial setup (FileSystem Configuration, Configure Software Updates, Filesystem Integrity Checking, Secure Boot Settings, +# Additional Process Hardening, Mandatory Access Control, Command Line Warning Banners, and GNOME Display Manager) + +# 1.1 Filesystems +# 1.1.1 Configure Filesystem Kernel Modules +ubtu24cis_rule_1_1_1_1: true +ubtu24cis_rule_1_1_1_2: true +ubtu24cis_rule_1_1_1_3: true +ubtu24cis_rule_1_1_1_4: true +ubtu24cis_rule_1_1_1_5: true +ubtu24cis_rule_1_1_1_6: true +ubtu24cis_rule_1_1_1_7: true +ubtu24cis_rule_1_1_1_8: true +ubtu24cis_rule_1_1_1_9: true +ubtu24cis_rule_1_1_1_10: true + +# 1.1.2 Configure Filesystem Partitions +# /tmp +ubtu24cis_rule_1_1_2_1_1: true +ubtu24cis_rule_1_1_2_1_2: true +ubtu24cis_rule_1_1_2_1_3: true +ubtu24cis_rule_1_1_2_1_4: true + +# /dev/shm +ubtu24cis_rule_1_1_2_2_1: true +ubtu24cis_rule_1_1_2_2_2: true +ubtu24cis_rule_1_1_2_2_3: true +ubtu24cis_rule_1_1_2_2_4: true + +# /home +ubtu24cis_rule_1_1_2_3_1: true +ubtu24cis_rule_1_1_2_3_2: true +ubtu24cis_rule_1_1_2_3_3: true + +# /var +ubtu24cis_rule_1_1_2_4_1: true +ubtu24cis_rule_1_1_2_4_2: true +ubtu24cis_rule_1_1_2_4_3: true + +# /var/tmp +ubtu24cis_rule_1_1_2_5_1: true +ubtu24cis_rule_1_1_2_5_2: true +ubtu24cis_rule_1_1_2_5_3: true +ubtu24cis_rule_1_1_2_5_4: true + +# /var/log +ubtu24cis_rule_1_1_2_6_1: true +ubtu24cis_rule_1_1_2_6_2: true +ubtu24cis_rule_1_1_2_6_3: true +ubtu24cis_rule_1_1_2_6_4: true + +# /var/log/audit +ubtu24cis_rule_1_1_2_7_1: true +ubtu24cis_rule_1_1_2_7_2: true +ubtu24cis_rule_1_1_2_7_3: true +ubtu24cis_rule_1_1_2_7_4: true + +# 1.2 Package mgmt +# 1.2.1 Configure Package repositories +ubtu24cis_rule_1_2_1_1: true +ubtu24cis_rule_1_2_1_2: true +# 1.2.2 Configure Package updates +ubtu24cis_rule_1_2_2_1: true + +# 1.3 Mandatory Access Control +## 1.3.1 Configure AppArmor +ubtu24cis_rule_1_3_1_1: true +ubtu24cis_rule_1_3_1_2: true +ubtu24cis_rule_1_3_1_3: true +ubtu24cis_rule_1_3_1_4: true + +# 1.4 Configure Bootloader +ubtu24cis_rule_1_4_1: true +ubtu24cis_rule_1_4_2: true + +# 1.5 Configure additional Process Hardening +ubtu24cis_rule_1_5_1: true +ubtu24cis_rule_1_5_2: true +ubtu24cis_rule_1_5_3: true +ubtu24cis_rule_1_5_4: true +ubtu24cis_rule_1_5_5: true + +# 1.6 Configure Command Line Warning Banners +ubtu24cis_rule_1_6_1: true +ubtu24cis_rule_1_6_2: true +ubtu24cis_rule_1_6_3: true +ubtu24cis_rule_1_6_4: true +ubtu24cis_rule_1_6_5: true +ubtu24cis_rule_1_6_6: true + +# 1.7 Configure GNOME Display Manager +ubtu24cis_rule_1_7_1: true +ubtu24cis_rule_1_7_2: true +ubtu24cis_rule_1_7_3: true +ubtu24cis_rule_1_7_4: true +ubtu24cis_rule_1_7_5: true +ubtu24cis_rule_1_7_6: true +ubtu24cis_rule_1_7_7: true +ubtu24cis_rule_1_7_8: true +ubtu24cis_rule_1_7_9: true +ubtu24cis_rule_1_7_10: true + +## Section 2 Fixes +# Section 2 is Services (Special Purpose Services, and service clients) + +# 2.1 Configure Server Services +ubtu24cis_rule_2_1_1: true +ubtu24cis_rule_2_1_2: true +ubtu24cis_rule_2_1_3: true +ubtu24cis_rule_2_1_4: true +ubtu24cis_rule_2_1_5: true +ubtu24cis_rule_2_1_6: true +ubtu24cis_rule_2_1_7: true +ubtu24cis_rule_2_1_8: true +ubtu24cis_rule_2_1_9: true +ubtu24cis_rule_2_1_10: true +ubtu24cis_rule_2_1_11: true +ubtu24cis_rule_2_1_12: true +ubtu24cis_rule_2_1_13: true +ubtu24cis_rule_2_1_14: true +ubtu24cis_rule_2_1_15: true +ubtu24cis_rule_2_1_16: true +ubtu24cis_rule_2_1_17: true +ubtu24cis_rule_2_1_18: true +ubtu24cis_rule_2_1_19: true +ubtu24cis_rule_2_1_20: true +ubtu24cis_rule_2_1_21: true +ubtu24cis_rule_2_1_22: true + +# 2.2 Configure client services +ubtu24cis_rule_2_2_1: true +ubtu24cis_rule_2_2_2: true +ubtu24cis_rule_2_2_3: true +ubtu24cis_rule_2_2_4: true +ubtu24cis_rule_2_2_5: true +ubtu24cis_rule_2_2_6: true + +# Ensure time synchronization is in use +ubtu24cis_rule_2_3_1_1: true +# Configure systemd-timesyncd +ubtu24cis_rule_2_3_2_1: true +ubtu24cis_rule_2_3_2_2: true +# Configure Chrony +ubtu24cis_rule_2_3_3_1: true +ubtu24cis_rule_2_3_3_2: true +ubtu24cis_rule_2_3_3_3: true + +# 2.4 Job Schedulers +# 2.4.1 Configure Cron +ubtu24cis_rule_2_4_1_1: true +ubtu24cis_rule_2_4_1_2: true +ubtu24cis_rule_2_4_1_3: true +ubtu24cis_rule_2_4_1_4: true +ubtu24cis_rule_2_4_1_5: true +ubtu24cis_rule_2_4_1_6: true +ubtu24cis_rule_2_4_1_7: true +ubtu24cis_rule_2_4_1_8: true +# Configure At +ubtu24cis_rule_2_4_2_1: true + +## Section 3 Network Configuration +# 3.1 Configure Network Devices +ubtu24cis_rule_3_1_1: true +ubtu24cis_rule_3_1_2: true +ubtu24cis_rule_3_1_3: true +# 3.2 Configure Network Kernel Modules (Host Only) +ubtu24cis_rule_3_2_1: true +ubtu24cis_rule_3_2_2: true +ubtu24cis_rule_3_2_3: true +ubtu24cis_rule_3_2_4: true +# 3.3 Configure Network Kernel Parameters (Host and Router) +ubtu24cis_rule_3_3_1: true +ubtu24cis_rule_3_3_2: true +ubtu24cis_rule_3_3_3: true +ubtu24cis_rule_3_3_4: true +ubtu24cis_rule_3_3_5: true +ubtu24cis_rule_3_3_6: true +ubtu24cis_rule_3_3_7: true +ubtu24cis_rule_3_3_8: true +ubtu24cis_rule_3_3_9: true +ubtu24cis_rule_3_3_10: true +ubtu24cis_rule_3_3_11: true + +## Section 4 Host Based Firewall + +# 4.1 single firewall +ubtu24cis_rule_4_1_1: true + +# 4.2 Configure UncomplicatedFirewall +ubtu24cis_rule_4_2_1: true +ubtu24cis_rule_4_2_2: true +ubtu24cis_rule_4_2_3: true +ubtu24cis_rule_4_2_4: true +ubtu24cis_rule_4_2_5: true +ubtu24cis_rule_4_2_6: true +ubtu24cis_rule_4_2_7: true +# 4.3 Configure nftables +ubtu24cis_rule_4_3_1: true +ubtu24cis_rule_4_3_2: true +ubtu24cis_rule_4_3_3: true +ubtu24cis_rule_4_3_4: true +ubtu24cis_rule_4_3_5: true +ubtu24cis_rule_4_3_6: true +ubtu24cis_rule_4_3_7: true +ubtu24cis_rule_4_3_8: true +ubtu24cis_rule_4_3_9: true +ubtu24cis_rule_4_3_10: true + +# 4.4.1 Configure iptables software +ubtu24cis_rule_4_4_1_1: true +ubtu24cis_rule_4_4_1_2: true +ubtu24cis_rule_4_4_1_3: true + +# 4.4.2 Configure IPv4 iptables +ubtu24cis_rule_4_4_2_1: true +ubtu24cis_rule_4_4_2_2: true +ubtu24cis_rule_4_4_2_3: true +ubtu24cis_rule_4_4_2_4: true +# 4.4.3 Configure IPv6 iptables +ubtu24cis_rule_4_4_3_1: true +ubtu24cis_rule_4_4_3_2: true +ubtu24cis_rule_4_4_3_3: true +ubtu24cis_rule_4_4_3_4: true + +## Section 5 Access Control +# 5.1 Configure SSH Server +ubtu24cis_rule_5_1_1: true +ubtu24cis_rule_5_1_2: true +ubtu24cis_rule_5_1_3: true +ubtu24cis_rule_5_1_4: true +ubtu24cis_rule_5_1_5: true +ubtu24cis_rule_5_1_6: true +ubtu24cis_rule_5_1_7: true +ubtu24cis_rule_5_1_8: true +ubtu24cis_rule_5_1_9: true +ubtu24cis_rule_5_1_10: true +ubtu24cis_rule_5_1_11: true +ubtu24cis_rule_5_1_12: true +ubtu24cis_rule_5_1_13: true +ubtu24cis_rule_5_1_14: true +ubtu24cis_rule_5_1_15: true +ubtu24cis_rule_5_1_16: true +ubtu24cis_rule_5_1_17: true +ubtu24cis_rule_5_1_18: true +ubtu24cis_rule_5_1_19: true +ubtu24cis_rule_5_1_20: true +ubtu24cis_rule_5_1_21: true +ubtu24cis_rule_5_1_22: true +# 5.2 Configure privilege escalation +ubtu24cis_rule_5_2_1: true +ubtu24cis_rule_5_2_2: true +ubtu24cis_rule_5_2_3: true +ubtu24cis_rule_5_2_4: true +ubtu24cis_rule_5_2_5: true +ubtu24cis_rule_5_2_6: true +ubtu24cis_rule_5_2_7: true +# 5.3.1 Configure PAM software packages +ubtu24cis_rule_5_3_1_1: true +ubtu24cis_rule_5_3_1_2: true +ubtu24cis_rule_5_3_1_3: true +# 5.3.2 Configure pam-auth-update profiles +ubtu24cis_rule_5_3_2_1: true +ubtu24cis_rule_5_3_2_2: true +ubtu24cis_rule_5_3_2_3: true +ubtu24cis_rule_5_3_2_4: true +# 5.3.3.1 Configure pam_faillock module +ubtu24cis_rule_5_3_3_1_1: true +ubtu24cis_rule_5_3_3_1_2: true +ubtu24cis_rule_5_3_3_1_3: true +# 5.3.3.2 Configure pam_quality module +ubtu24cis_rule_5_3_3_2_1: true +ubtu24cis_rule_5_3_3_2_2: true +ubtu24cis_rule_5_3_3_2_3: true +ubtu24cis_rule_5_3_3_2_4: true +ubtu24cis_rule_5_3_3_2_5: true +ubtu24cis_rule_5_3_3_2_6: true +ubtu24cis_rule_5_3_3_2_7: true +ubtu24cis_rule_5_3_3_2_8: true +# 5.3.3.3 Configure pam_history module +# This are added as part of 5.3.2.4 using jinja2 template +ubtu24cis_rule_5_3_3_3_1: true +ubtu24cis_rule_5_3_3_3_2: true +ubtu24cis_rule_5_3_3_3_3: true +# 5.3.3.4 Configure pam_unix module +ubtu24cis_rule_5_3_3_4_1: true +ubtu24cis_rule_5_3_3_4_2: true +ubtu24cis_rule_5_3_3_4_3: true +ubtu24cis_rule_5_3_3_4_4: true +# 5.4 User Accounts and Environment +# 5.4.1 Configure shadow password suite parameters +ubtu24cis_rule_5_4_1_1: true +ubtu24cis_rule_5_4_1_2: true +ubtu24cis_rule_5_4_1_3: true +ubtu24cis_rule_5_4_1_4: true +ubtu24cis_rule_5_4_1_5: true +ubtu24cis_rule_5_4_1_6: true +# 5.4.2 Configure root and system accounts and environment +ubtu24cis_rule_5_4_2_1: true +ubtu24cis_rule_5_4_2_2: true +ubtu24cis_rule_5_4_2_3: true +ubtu24cis_rule_5_4_2_4: true +ubtu24cis_rule_5_4_2_5: true +ubtu24cis_rule_5_4_2_6: true +ubtu24cis_rule_5_4_2_7: true +ubtu24cis_rule_5_4_2_8: true +# 5.4.2 Configure user default environment +ubtu24cis_rule_5_4_3_1: true +ubtu24cis_rule_5_4_3_2: true +ubtu24cis_rule_5_4_3_3: true + +## Section 6 + +# 6.2.1.x Configure systemd-journald service +ubtu24cis_rule_6_1_1_1: true +ubtu24cis_rule_6_1_1_2: true +ubtu24cis_rule_6_1_1_3: true +ubtu24cis_rule_6_1_1_4: true +# 6.1.2.1 Configure journald +ubtu24cis_rule_6_1_2_1_1: true +ubtu24cis_rule_6_1_2_1_2: true +ubtu24cis_rule_6_1_2_1_3: true +ubtu24cis_rule_6_1_2_1_4: true +ubtu24cis_rule_6_1_2_2: true +ubtu24cis_rule_6_1_2_3: true +ubtu24cis_rule_6_1_2_4: true +# 6.1.3 Configure rsyslog +ubtu24cis_rule_6_1_3_1: true +ubtu24cis_rule_6_1_3_2: true +ubtu24cis_rule_6_1_3_3: true +ubtu24cis_rule_6_1_3_4: true +ubtu24cis_rule_6_1_3_5: true +ubtu24cis_rule_6_1_3_6: true +ubtu24cis_rule_6_1_3_7: true +# 6.1.3.8 logrotate +ubtu24cis_rule_6_1_3_8: true +# 6.1.4.1 configure logfiles +ubtu24cis_rule_6_1_4_1: true +# 6.2.1 Configure auditd services +ubtu24cis_rule_6_2_1_1: true +ubtu24cis_rule_6_2_1_2: true +ubtu24cis_rule_6_2_1_3: true +ubtu24cis_rule_6_2_1_4: true +# 6.2.2 Configure auditd data retention +ubtu24cis_rule_6_2_2_1: true +ubtu24cis_rule_6_2_2_2: true +ubtu24cis_rule_6_2_2_3: true +ubtu24cis_rule_6_2_2_4: true +# 6.2.3 Configure auditd rules +ubtu24cis_rule_6_2_3_1: true +ubtu24cis_rule_6_2_3_2: true +ubtu24cis_rule_6_2_3_3: true +ubtu24cis_rule_6_2_3_4: true +ubtu24cis_rule_6_2_3_5: true +ubtu24cis_rule_6_2_3_6: true +ubtu24cis_rule_6_2_3_7: true +ubtu24cis_rule_6_2_3_8: true +ubtu24cis_rule_6_2_3_9: true +ubtu24cis_rule_6_2_3_10: true +ubtu24cis_rule_6_2_3_11: true +ubtu24cis_rule_6_2_3_12: true +ubtu24cis_rule_6_2_3_13: true +ubtu24cis_rule_6_2_3_14: true +ubtu24cis_rule_6_2_3_15: true +ubtu24cis_rule_6_2_3_16: true +ubtu24cis_rule_6_2_3_17: true +ubtu24cis_rule_6_2_3_18: true +ubtu24cis_rule_6_2_3_19: true +ubtu24cis_rule_6_2_3_20: true +ubtu24cis_rule_6_2_3_21: true +# 6.2.4 Configure audit file access +ubtu24cis_rule_6_2_4_1: true +ubtu24cis_rule_6_2_4_2: true +ubtu24cis_rule_6_2_4_3: true +ubtu24cis_rule_6_2_4_4: true +ubtu24cis_rule_6_2_4_5: true +ubtu24cis_rule_6_2_4_6: true +ubtu24cis_rule_6_2_4_7: true +ubtu24cis_rule_6_2_4_8: true +ubtu24cis_rule_6_2_4_9: true +ubtu24cis_rule_6_2_4_10: true +# 6.3 Configure Filesystem Integrity Checking +ubtu24cis_rule_6_3_1: true +ubtu24cis_rule_6_3_2: true +ubtu24cis_rule_6_3_3: true + +## Section 7 +# 7.1 System File Permissions +ubtu24cis_rule_7_1_1: true +ubtu24cis_rule_7_1_2: true +ubtu24cis_rule_7_1_3: true +ubtu24cis_rule_7_1_4: true +ubtu24cis_rule_7_1_5: true +ubtu24cis_rule_7_1_6: true +ubtu24cis_rule_7_1_7: true +ubtu24cis_rule_7_1_8: true +ubtu24cis_rule_7_1_9: true +ubtu24cis_rule_7_1_10: true +ubtu24cis_rule_7_1_11: true +ubtu24cis_rule_7_1_12: true +ubtu24cis_rule_7_1_13: true +# 7.2 Local User and Group Settings +ubtu24cis_rule_7_2_1: true +ubtu24cis_rule_7_2_2: true +ubtu24cis_rule_7_2_3: true +ubtu24cis_rule_7_2_4: true +ubtu24cis_rule_7_2_5: true +ubtu24cis_rule_7_2_6: true +ubtu24cis_rule_7_2_7: true +ubtu24cis_rule_7_2_8: true +ubtu24cis_rule_7_2_9: true +ubtu24cis_rule_7_2_10: true + +## +## Service configuration variables. +## +## Set the respective variable to true to keep the service. +## otherwise the service is stopped and disabled +## +# Service configuration +# Options are +# true to leave installed if exists not changes take place +# false - this removes the package +# mask - if a dependancy for product so cannot be removed +# Server Services +ubtu24cis_autofs_services: false +ubtu24cis_autofs_mask: false +ubtu24cis_avahi_server: false +ubtu24cis_avahi_mask: false +ubtu24cis_dhcp_server: false +ubtu24cis_dhcp_mask: false +ubtu24cis_dns_server: false +ubtu24cis_dns_mask: false +ubtu24cis_dnsmasq_server: false +ubtu24cis_dnsmasq_mask: false +ubtu24cis_ftp_server: false +ubtu24cis_ftp_mask: false +ubtu24cis_ldap_server: false +ubtu24cis_ldap_mask: false +ubtu24cis_message_server: false # This is for messaging dovecot and cyrus-imap +ubtu24cis_message_mask: false +ubtu24cis_nfs_server: true +ubtu24cis_nfs_mask: true +ubtu24cis_nis_server: true # set to mask if nis client required +ubtu24cis_nis_mask: false +ubtu24cis_print_server: false # replaces cups +ubtu24cis_print_mask: false +ubtu24cis_rpc_server: true +ubtu24cis_rpc_mask: true +ubtu24cis_rsync_server: false +ubtu24cis_rsync_mask: false +ubtu24cis_samba_server: false +ubtu24cis_samba_mask: false +ubtu24cis_snmp_server: false +ubtu24cis_snmp_mask: false +ubtu24cis_telnet_server: false +ubtu24cis_telnet_mask: false +ubtu24cis_tftp_server: false +ubtu24cis_tftp_mask: false +ubtu24cis_squid_server: false +ubtu24cis_squid_mask: false +ubtu24cis_apache2_server: false +ubtu24cis_apache2_mask: false +ubtu24cis_nginx_server: false +ubtu24cis_nginx_mask: false +ubtu24cis_xinetd_server: false +ubtu24cis_xinetd_mask: false +ubtu24cis_xwindow_server: false # will remove mask not an option +ubtu24cis_is_mail_server: false + +# Client Services +ubtu24cis_nis_client_required: false # Same package as NIS server +ubtu24cis_rsh_client: false +ubtu24cis_talk_client: false +ubtu24cis_telnet_required: false +ubtu24cis_ldap_clients_required: false +ubtu24cis_ftp_client: false + +## System functionality configuration variables +## +## There are certain functionalities of a system +## that may require either to skip certain CIS rules +## or install certain packages. +## Set the respective variable to `true` in order to +## enable a certain functionality on the system + +# This variable governs whether specific CIS rules +# concerned with acceptance and routing of packages +# are skipped. +ubtu24cis_is_router: false + +## IPv4 requirement toggle +# This variable governs whether ipv4 is enabled or disabled. +ubtu24cis_ipv4_required: true + +## IPv6 requirement toggle +# This variable governs whether ipv6 is enabled or disabled. +ubtu24cis_ipv6_required: false + +## Desktop requirement toggle +# This variable governs, whether CIS rules regarding GDM +# and X-Windows are carried out. +ubtu24cis_desktop_required: false + +## Purge apt packages +# This will allow the purging of any packages that are marked to be removed +# This will also purge any packages not removed via this playbook +ubtu24cis_purge_apt: false + +## +## Section 1 Control Variables +## + +## tmp mount type +# This variable determines, to which mount type +# the tmp mount type will be set, if it cannot be +# correctly discovered. will force the tmp_mnt type +# if not correctly discovered. +# Possible values are `tmp_systemd` or `fstab`- +expected_tmp_mnt: fstab + +## Controls 1.3.1.x - apparmor +# AppArmor security policies define what system resources applications can access and their privileges. +# This automatically limits the damage that the software can do to files accessible by the calling user. +# The following variables are related to the set of rules from section 1.6.1.x + +## Controls 1.3.1.3 and 1.3.1.4 Ensure all AppArmor Profiles are in enforce (1.3.1.3/4) or complain (1.3.1.3) mode + +# This variable disables the implementation of rules 1.3.1.3 and 1.3.1.4 +# regarding enforcing profiles or putting them in complain mode +ubtu24cis_apparmor_disable: false + +# This variable specifies whether enforce mode or complain mode is set in Control 1.3.1.3. +# Possible values are `enforce` and `complain`. +# ATTENTION: if Control 1.3.1.4 is run (e.g., when running level 2 rules), it OVERRIDES control 1.3.1.3 +# and sets `enforce` mode, no matter what this variable's value is. +ubtu24cis_apparmor_mode: complain + +## Controls 1.4.x - Boot password +# +# THIS VARIABLE SHOULD BE CHANGED AND INCORPORATED INTO VAULT +# THIS VALUE IS WHAT THE ROOT PW WILL BECOME!!!!!!!! +# HAVING THAT PW EXPOSED IN RAW TEXT IS NOT SECURE!!!! +ubtu24cis_grub_user: root +ubtu24cis_set_grub_user_pass: false +ubtu24cis_grub_user_passwd: '$y$j9T$MBA5l/tQyWifM869nQjsi.$cTy0ConcNjIYOn6Cppo5NAky20osrkRxz4fEWA8xac6' # Set to changeme +ubtu24cis_grub_user_file: /etc/grub.d/00_user +ubtu24cis_bootloader_password_hash: "grub.pbkdf2.sha512.changethispassword" # pragma: allowlist secret +ubtu24cis_set_boot_pass: false + +ubtu24cis_grub_file: /boot/grub/grub.cfg + +## Controls 1.5.x +# Ability to set file in which the kernel systcl changes are placed +ubtu24cis_sysctl_kernel_conf: /etc/sysctl.d/98_cis_kernel.conf + +# 1.5.2 Ability to choose ptrace value +# options = 1, 2 or 3 +ubtu24_ptrace_value: 1 + + +## Controls 1.6.x - Warning banners +# The controls 1.6.x set various warning banners and protect the respective files +# by tightening the access rights. + +# This variable specifies the warning banner displayed to the user +# after local login, remote login, and as motd (message of the day) +# Noe that the banner text must not contain the below items in order to be +# compliant with CIS: \m, \r, \s, \v or references to the OS platform +ubtu24cis_warning_banner: | + Authorized uses only. All activity may be monitored and reported. + +# This variable governs, whether dynamic motd is disabled (as required by control 1.7.1) +ubtu24cis_disable_dynamic_motd: true + +## Controls 1.7.x - Settings for GDM +# This variable specifies the GNOME configuration database file to which configurations are written. +# (See https://help.gnome.org/admin/system-admin-guide/stable/dconf-keyfiles.html.en) +# The default database is `local`. +ubtu24cis_dconf_db_name: local +# This variable governs the number of seconds of inactivity before the screen goes blank. +ubtu24cis_screensaver_idle_delay: 900 +# This variable governs the number of seconds the screen remains blank before it is locked. +ubtu24cis_screensaver_lock_delay: 5 + +## +## Section 2 Control Variables +## + +## Control 2.3.1.1 +# This variable choses the tool used for time synchronization +# The two options are `chrony`and `systemd-timesyncd`. +ubtu24cis_time_sync_tool: "systemd-timesyncd" + +## Controls 2.3.x - Configure time pools & servers for chrony and timesyncd +# The following variable represents a list of of time server pools used +# for configuring chrony and timesyncd. +# Each list item contains two settings, `name` (the domain name of the pool) and synchronization `options`. +# The default setting for the `options` is `iburst maxsources 4` -- please refer to the documentation +# of the time synchronization mechanism you are using. +ubtu24cis_time_pool: + - name: time.nist.gov + options: iburst maxsources 4 +# The following variable represents a list of of time servers used +# for configuring chrony and timesyncd +# Each list item contains two settings, `name` (the domain name of the server) and synchronization `options`. +# The default setting for the `options` is `iburst` -- please refer to the documentation +# of the time synchronization mechanism you are using. +ubtu24cis_time_servers: + - name: time-a-g.nist.gov + options: iburst + - name: time-b-g.nist.gov + options: iburst + - name: time-c-g.nist.gov + options: iburst + +## +## Section 3 Control Variables +## + +## Control 3.1.1 - Ensure system is checked to determine if IPv6 is enabled +# This variable governs the mechanism of how the disabling of IPV6 is carried out. +# Its possible values are `grub` and `sysctl`. +ubtu24cis_ipv6_disable: grub + +## Control 3.1.2 - Ensure wireless interfaces are disabled +# This variable is used for a preliminary task regarding the installation of Network Manager +# If it is set to `true`, Network Manager is installed during the "prelim" section of +# the role. If set to `false`, Network Manager is not installed. Bear in mind that without +# it, ensuring that wireless interfaces are disabled will not be possible! +ubtu24cis_install_network_manager: true + +## Control 3.1.3 - Ensure bluetooth Services are not in use +# This control managed how the bluetooth service is managaed +# Options are +# true to leave installed if exists not changes take place +# false - this removes the package +# mask - if a dependancy for product so cannot be removed +ubtu24cis_bluetooth_service: false +ubtu24cis_bluetooth_mask: false + +## Control 3.3.x - Networking configuration +# This variable contains the path to the file in which, sysctl saves its configurations. +# Its default value is `/etc/sysctl.conf`. +ubtu24cis_sysctl_network_conf: /etc/sysctl.conf + +## +## Section 4 Control Variables +## + +## Controls 4.1.x, 4.2.x, and 4.3.x - Firewall configuration +# This variable represents the toggle for which firewall package is used. +# The options that have an effect on the system are `ufw` and `iptables`. +# The option `nftables` is also possible, but will only result in a message, +# that `nftables` has been chosen; all settings have to be carried out manually. +# Any other value, e.g. `none` will skip all firewall-related controls. +ubtu24cis_firewall_package: "ufw" + +# This variable is used in a preliminary task that forces the UFW firewall to use /etc/sysctl.conf +# settings. If set to "true" the task will get done in the prelim section of the role. +ubtu24cis_ufw_use_sysctl: true + +## Control 4.1.5 - Ensure ufw outbound connections are configured +# The value of this variable represents the ports for the firewall to allow oubound traffic from. +# If you want to allow outbound traffic on all ports, set the variable to `all`, e.g., +# `ubtu24cis_ufw_allow_out_ports: "all"`. +ubtu24cis_ufw_allow_out_ports: + - 53 + - 80 + - 443 + +## Controls 4.2.x - nftables +# Nftables is not supported in this role. Some tasks have parts of them commented out, this is one example +# of such a task. +# "ubtu24cis_nftables_table_name" is the name of the table in nftables you want to create. +# The default nftables table name is "inet filter". This variable name will be the one all +# nftables configs are applied to. +# ubtu24cis_nftables_table_name: "inet filter" + +## +## Section 5 Control Variables +## +## Controls 5.1.x -- various sshd settings +ubtu24cis_sshd_default_log_level: "INFO" +ubtu24cis_sshd_default_max_auth_tries: 4 +ubtu24cis_sshd_default_max_sessions: 8 +ubtu24cis_sshd_default_login_grace_time: 60 +ubtu24cis_sshd_default_client_alive_interval: 300 +ubtu24cis_sshd_default_client_alive_count_max: 3 +# Removed chacha20-poly1305 due to cve2023-48795 +# all Ciphers, KEX and Macs set to FIPS 140 +# This will nee dto be adjusted according to your site requirements +ubtu24cis_sshd_default_ciphers: + - aes256-gcm@openssh.com + - aes128-gcm@openssh.com + - aes256-ctr + - aes192-ctr + - aes128-ctr +ubtu24cis_sshd_default_macs: + - hmac-sha1 + - hmac-sha2-256 + # - hmac-sha2-384 # hashed out seen as bad ssh2 MAC + - hmac-sha2-512 +ubtu24cis_sshd_default_kex_algorithms: + - ecdh-sha2-nistp256 + - ecdh-sha2-nistp384 + - ecdh-sha2-nistp521 + - diffie-hellman-group-exchange-sha256 + - diffie-hellman-group16-sha512 + - diffie-hellman-group18-sha512 + - diffie-hellman-group14-sha256 + +ubtu24cis_sshd: + # This variable is used to control the verbosity of the logging produced by the SSH server. + # The options for setting it are as follows: + # - `QUIET`: Minimal logging; + # - `FATAL`: logs only fatal errors; + # - `ERROR`: logs error messages; + # - `INFO`: logs informational messages in addition to errors; + # - `VERBOSE`: logs a higher level of detail, including login attempts and key exchanges; + # - `DEBUG`: generates very detailed debugging information including sensitive information. + log_level: "{{ ubtu24cis_sshd_default_log_level }}" + # This variable specifies the maximum number of authentication attempts that are + # allowed for a single SSH session. + max_auth_tries: "{{ubtu24cis_sshd_default_max_auth_tries}}" + # This variable specifies the encryption algorithms that can be used for securing + # data transmission. + ciphers: "{{ ubtu24cis_sshd_default_ciphers }}" + # This variable specifies a list of message authentication code algorithms (MACs) that are allowed for verifying + # the integrity of data exchanged. + macs: "{{ ubtu24cis_sshd_default_macs }}" + # This variable is used to state the key exchange algorithms used to establish secure encryption + # keys during the initial connection setup. + kex_algorithms: "{{ ubtu24cis_sshd_default_kex_algorithms }}" + # This variable sets the time interval in seconds between sending "keep-alive" + # messages from the server to the client. These types of messages are intended to + # keep the connection alive and prevent it being terminated due to inactivity. + client_alive_interval: "{{ ubtu24cis_sshd_default_client_alive_interval }}" + # This variable sets the maximum number of unresponsive "keep-alive" messages + # that can be sent from the server to the client before the connection is considered + # inactive and thus, closed. + client_alive_count_max: "{{ ubtu24cis_sshd_default_client_alive_count_max }}" + # This variable specifies the amount of seconds allowed for successful authentication to + # the SSH server. + login_grace_time: "{{ ubtu24cis_sshd_default_login_grace_time }}" + # This variables is used to set the maximum number of open sessions per connection. + max_sessions: "{{ ubtu24cis_sshd_default_max_sessions }}" + # This variable, if specified, configures a list of USER name patterns, separated by spaces, to allow SSH + # access for users whose user name matches one of the patterns. This is done + # by setting the value of `AllowUsers` option in `/etc/ssh/sshd_config` file. + # If an USER@HOST format will be used, the specified user will be allowed only on that particular host. + # The allow/deny directives process order: DenyUsers, AllowUsers, DenyGroups, AllowGroups. + # For more info, see https://linux.die.net/man/5/sshd_config + allow_users: "" + # (String) This variable, if specified, configures a list of GROUP name patterns, separated by spaces, to allow SSH access + # for users whose primary group or supplementary group list matches one of the patterns. This is done + # by setting the value of `AllowGroups` option in `/etc/ssh/sshd_config` file. + # The allow/deny directives process order: DenyUsers, AllowUsers, DenyGroups, AllowGroups. + # For more info, https://linux.die.net/man/5/sshd_config + allow_groups: "" + # This variable, if specified, configures a list of USER name patterns, separated by spaces, to prevent SSH access + # for users whose user name matches one of the patterns. This is done + # by setting the value of `DenyUsers` option in `/etc/ssh/sshd_config` file. + # If an USER@HOST format will be used, the specified user will be restricted only on that particular host. + # The allow/deny directives process order: DenyUsers, AllowUsers, DenyGroups, AllowGroups. + # For more info, see https://linux.die.net/man/5/sshd_config + deny_users: "" + # This variable, if specified, configures a list of GROUP name patterns, separated by spaces, to prevent SSH access + # for users whose primary group or supplementary group list matches one of the patterns. This is done + # by setting the value of `DenyGroups` option in `/etc/ssh/sshd_config` file. + # The allow/deny directives process order: DenyUsers, AllowUsers, DenyGroups, AllowGroups. + # For more info, see https://linux.die.net/man/5/sshd_config + deny_groups: "" + +## Control 5.2.1 +# This variable represents the name of the sudo package to install +# CIS recommends `sudo` or, if LDAP functionality is required, `sudo-ldap`. +ubtu24cis_sudo_package: "sudo" + +## Control 5.2.3 +# This variable defines the path and file name of the sudo log file. +ubtu24cis_sudo_logfile: "/var/log/sudo.log" +## Control 5.2.6 +# This variable sets the duration (in minutes) during which a user's authentication credentials +# are cached after successfully authenticating using "sudo". This allows the user to execute +# multiple commands with elevated privileges without needing to re-enter their password for each +# command within the specified time period. CIS requires a value of at most 15 minutes. +ubtu24cis_sudo_timestamp_timeout: 15 +## Control 5.2.7 +# This variable determines the group of users that are allowed to use the su command. +# one to specify a user group that is allowed to use the "su" command. +# CIS requires that such a group be created (named according to site policy) and be kept empty. +ubtu24cis_sugroup: nosugroup + +## 5.3.2.x +# Path to find templates and where to put file for pam-auth +ubtu24cis_pam_confd_dir: 'usr/share/pam-configs/' +### Note: controls also managed with disruption high due to the nature of pam changes +# Create file will create/replace with the name + +# Controls 5.3.2.1 - pam_unix +# Name of file +ubtu24cis_pam_pwunix_file: 'pam_unix' +# Should NOT be enabled if allowing custom config that enabled pam_faillock +ubtu24cis_pam_create_pamunix_file: false +# Allow pam-auth-update --enable ubtu24cis_pam_pwunix_file to run +ubtu24cis_pam_auth_unix: true + +# 5.3.2.2 - pam_faillock +# Name of files +ubtu24cis_pam_faillock_file: 'faillock' +ubtu24cis_pam_faillock_notify_file: 'faillock_notify' +# Allow pam-auth-update --enable ubtu24cis_pam_faillock_file +# and +# Allow pam-auth-update --enable ubtu24cis_pam_faillock_notify_file +ubtu24cis_pam_auth_faillock: true +# Allow new file to be created or overwrite existing with same name +ubtu24cis_pam_create_faillock_files: true + +# 5.3.2.3 - pam_pwquality +# Name of files +ubtu24cis_pam_pwquality_file: 'pwquality' +# Allow new file to be created or overwrite existing with same name +ubtu24cis_pam_create_pwquality_files: true +# Allow pam-auth-update --enable ubtu24cis_pam_pwquality_file +ubtu24cis_pam_auth_pwquality: true + +# 5.3.2.4 - pam_pwhistory +# Name of file +ubtu24cis_pam_pwhistory_file: 'pwhistory' +# Allow new file to be created or overwrite existing with same name +# filepath also affects controls 5.3.3.3.1, 5.3.3.3.2, 5.3.3.3.3 +ubtu24cis_pam_create_pwhistory_files: true +# Allow pam-auth-update --enable ubtu24cis_pam_pwhistory_file +ubtu24cis_pam_auth_pwhistory: true + +# 5.3.3.1.1 - faillock_deny +ubtu24cis_faillock_deny: 3 + +# 5.3.3.1.2 - faillock unlock time +ubtu24cis_faillock_unlock_time: 900 + +# 5.3.3.1.3 - lock root +# This allow optional - even_deny_root or root_unlock_time +ubtu24cis_pamroot_lock_option: even_deny_root +ubtu24cis_pamroot_lock_string: even_deny_root + +# 5.3.3.2.1 - password difok +ubtu24cis_passwd_difok_file: etc/security/pwquality.conf.d/50-pwdifok.conf # pragma: allowlist secret +ubtu24cis_passwd_difok_value: 2 + +# 5.3.3.2.2 - password minlength +ubtu24cis_passwd_minlen_file: etc/security/pwquality.conf.d/50-pwlength.conf # pragma: allowlist secret +ubtu24cis_passwd_minlen_value: 14 + +# 5.3.3.2.3 - password complex +ubtu24cis_passwd_complex_file: etc/security/pwquality.conf.d/50-pwcomplexity.conf # pragma: allowlist secret +ubtu24cis_passwd_minclass: 3 +ubtu24cis_passwd_dcredit: -1 +ubtu24cis_passwd_ucredit: -2 +ubtu24cis_passwd_ocredit: 0 +ubtu24cis_passwd_lcredit: -2 + +# 5.3.3.2.4 - password maxrepeat +ubtu24cis_passwd_maxrepeat_file: etc/security/pwquality.conf.d/50-pwrepeat.conf # pragma: allowlist secret +ubtu24cis_passwd_maxrepeat_value: 3 + +# 5.3.3.2.5 - password maxsequence +ubtu24cis_passwd_maxsequence_file: etc/security/pwquality.conf.d/50-pwmaxsequence.conf # pragma: allowlist secret +ubtu24cis_passwd_maxsequence_value: 3 + +# 5.3.3.2.6 - password dictcheck +ubtu24cis_passwd_dictcheck_file: etc/security/pwquality.conf.d/50-pwdictcheck.conf # pragma: allowlist secret +ubtu24cis_passwd_dictcheck_value: 1 + +# 5.3.3.2.7 - password quality enforce +ubtu24cis_passwd_quality_enforce_file: etc/security/pwquality.conf.d/50-pwquality_enforce.conf # pragma: allowlist secret +ubtu24cis_passwd_quality_enforce_value: 1 + +# 5.3.3.2.8 - password quality enforce for root included with 5.3.3.2.7 +ubtu24cis_passwd_quality_enforce_root_file: etc/security/pwquality.conf.d/50-pwroot.conf # pragma: allowlist secret +ubtu24cis_passwd_quality_enforce_root_value: enforce_for_root # pragma: allowlist secret + +## 5.3.3.3 Configure pam_pwhistory module +# Uses value for ubtu24cis_pam_pwhistory_file in 5.3.2.4 +# Control 5.3.3.3.1 +# This variable represents the number of password change cycles, after which +# a user can re-use a password. # CIS requires a value of 24 or more. +ubtu24cis_pamd_pwhistory_remember: 24 + +## Controls 5.4.1.x - Password settings +ubtu24cis_pass: + ## Control 5.4.1.1 + # This variable governs after how many days a password expires. + # CIS requires a value of 365 or less. + max_days: 365 + ## Control 5.4.1.2 + # This variable specifies the minimum number of days allowed between changing passwords. + # CIS requires a value of at least 1. + min_days: 1 + ## Control 5.5.1.3 + # This variable governs, how many days before a password expires, the user will be warned. + # CIS requires a value of at least 7. + warn_age: 7 + ## Control 5.4.1.5 + # This variable specifies the number of days of inactivity before an account will be locked. + # CIS requires a value of 45 days or less. + inactive: 45 + +# 5.4.2.6 root umask +ubtu24cis_root_umask: '0027' # 0027 or more restrictive +## Control 5.4.4 +# ubtu24cis_passwd_hash_algo is the hashing algorithm used +ubtu24cis_passwd_hash_algo: yescrypt # pragma: allowlist secret +# Set pam as well as login defs if PAM is required +ubtu24cis_passwd_setpam_hash_algo: false + +## Control 5.4.3 - Default user mask +# The following variable specifies the "umask" to set in the `/etc/bash.bashrc` and `/etc/profile`. +# The value needs to be `027` or more restrictive to comply with CIS standards +ubtu24cis_bash_umask: '027' + +## Control 5.4.3.2 - Configuring user shell timeout +# This dictionary is related to ensuring the rule about user shell timeout +# This variable represents the amount of seconds a command or process is allowed to +# run before being forcefully terminated. +# CIS requires a value of at most 900 seconds. +ubtu24cis_shell_session_timeout: 900 +# This variable specifies the path of the timeout setting file. +# (TMOUT setting can be set in multiple files, but only one is required for the +# rule to pass. Options are: +# - a file in `/etc/profile.d/` ending in `.s`, +# - `/etc/profile`, or +# - `/etc/bash.bashrc`. +ubtu24cis_shell_session_file: /etc/profile.d/tmout.sh + +## +## Section 6 Control Variables +## + +# Set which syslog service +# journald or rsyslog +ubtu24cis_syslog_service: 'journald' + +## Controls 6.1.1.x journald + +# This variable specifies the address of the remote log host where logs are being sent. +ubtu24cis_remote_log_server: 192.168.2.100 +# This variable expresses whether the system is used as a log server or not. +# If set to `true`, controls that interfere with log server functionality or +# require that own system logs be sent to some other log server are skipped. +ubtu24cis_system_is_log_server: false + +## Controls 6.1.1.2.x & 6.1.1.x journald +# This variable specifies the path to the private key file used by the remote journal +# server to authenticate itself to the client. This key is used alongside the server's +# public certificate to establish secure communication. +ubtu24cis_journal_upload_serverkeyfile: +# This variable specifies the path to the public certificate file of the remote journal +# server. This certificate is used to verify the authenticity of the remote server. +ubtu24cis_journal_servercertificatefile: +# This variable specifies the path to a file containing one or more public certificates +# of certificate authorities (CAs) that the client trusts. These trusted certificates are used +# to validate the authenticity of the remote server's certificate. +ubtu24cis_journal_trustedcertificatefile: + +# 6.1.1.1.3 +# These variable specifies how much disk space the journal may use up at most +# Specify values in bytes or use K, M, G, T, P, E as units for the specified sizes. +# See https://www.freedesktop.org/software/systemd/man/journald.conf.html for more information. +# ATTENTION: Uncomment the keyword below when values are set! +ubtu24cis_journald_systemmaxuse: "#SystemMaxUse=" +ubtu24cis_journald_systemkeepfree: "#SystemKeepFree=" +ubtu24cis_journald_runtimemaxuse: "#RuntimeMaxUse=" +ubtu24cis_journald_runtimekeepfree: "#RuntimeKeepFree=" +# This variable specifies, the maximum time to store entries in a single journal +# file before rotating to the next one. Set to 0 to turn off this feature. +# The given values is interpreted as seconds, unless suffixed with the units +# `year`, `month`, `week`, `day`, `h` or `m` to override the default time unit of seconds. +# ATTENTION: Uncomment the keyword below when values are set! +ubtu24cis_journald_maxfilesec: "#MaxFileSec=" + +## Controls 6.2.3.x - Audit template +# This variable is set to true by tasks 6.2.3.1 to 6.2.3.20. As a result, the +# audit settings are overwritten with the role's template. In order to exclude +# specific rules, you must set the variable of form `ubtu24cis_rule_4_1_3_x` above +# to `false`. +update_audit_template: false + +## Advanced option found in auditd post +## users whose actions are not logged by auditd +ubtu24cis_allow_auditd_uid_user_exclusions: false + +# add a list of uids +ubtu24cis_auditd_uid_exclude: + - 1999 + +# 6.1.3.8 +# ubtu24cis_logrotate sets the daily, weekly, monthly, yearly value for the log rotation +# To conform to CIS standards this just needs to comply with your site policy +ubtu24cis_logrotate: "daily" + + +## Control 6.2.1.4 - Ensure audit_backlog_limit is sufficient +# This variable represents the audit backlog limit, i.e., the maximum number of audit records that the +# system can buffer in memory, if the audit subsystem is unable to process them in real-time. +# Buffering in memory is useful in situations, where the audit system is overwhelmed +# with incoming audit events, and needs to temporarily store them until they can be processed. +# This variable should be set to a sufficient value. The CIS baseline recommends at least `8192` as value. +ubtu24cis_audit_back_log_limit: 8192 + +## Controls 6.2.2.x - What to do when log files fill up +## Control 6.2.2.1 - Ensure audit log storage size is configured +# This variable specifies the maximum size in MB that an audit log file can reach +# before it is archived or deleted to make space for the new audit data. +# This should be set based on your sites policy. CIS does not provide a specific value. +ubtu24cis_max_log_file_size: 10 + +## Control 6.2.3.2 +# This variable determines what action the audit system should take when the maximum +# size of a log file is reached. +# The options for setting this variable are as follows: +# - `ignore`: the system does nothing when the size of a log file is full; +# - `syslog`: a message is sent to the system log indicating the problem; +# - `suspend`: the system suspends recording audit events until the log file is cleared or rotated; +# - `rotate`: the log file is rotated (archived) and a new empty log file is created; +# - `keep_logs`: the system attempts to keep as many logs as possible without violating disk space constraints. +# CIS prescribes the value `keep_logs`. +ubtu24cis_auditd_max_log_file_action: keep_logs + +## Control 6.2.3.3 +# This variable determines how the system should act in case of issues with disk +# The disk_full_action parameter tells the system what action to take when no free space is available on the partition that holds the audit log files. +# Valid values are ignore, syslog, rotate, exec, suspend, single, and halt. +# +# The disk_error_action parameter tells the system what action to take when an error is detected on the partition that holds the audit log files. +# Valid values are ignore, syslog, exec, suspend, single, and halt. +# +# CIS prescribes +# disk_full_action parameter: +# Set to halt - the auditd daemon will shutdown the system when the disk partition containing the audit logs becomes full. +# Set to single - the auditd daemon will put the computer system in single user mode when the disk partition containing the audit logs becomes full. +# +# disk_error_action parameter: +# Set to halt - the auditd daemon will shutdown the system when an error is detected on the partition that holds the audit log files. +# Set to single - the auditd daemon will put the computer system in single user mode when an error is detected on the partition that holds the audit log files. +# Set to syslog - the auditd daemon will issue no more than 5 consecutive warnings to syslog when an error is detected on the partition that holds the audit log files. +ubtu24cis_auditd_disk_full_action: halt +ubtu24cis_auditd_disk_error_action: syslog + +# Control 6.2.2.4 +# Wait to do when space left is low. +ubtu24cis_auditd_space_left_action: email +ubtu24cis_auditd_admin_space_left_action: halt + +## Control 6.2.x - allow aide to be configured +# AIDE is a file integrity checking tool, similar in nature to Tripwire. +# While it cannot prevent intrusions, it can detect unauthorized changes +# to configuration files by alerting when the files are changed. Review +# the AIDE quick start guide and AIDE documentation before proceeding. +# By setting this variable to `true`, all of the settings related to AIDE will be applied! +ubtu24cis_config_aide: true + +## When Initializing aide this can take longer on some systems +# changing the values enables user to change to thier own requirements +ubtu24cis_aide_init: + # Maximum Time in seconds + async: 45 + # Polling Interval in seconds + poll: 0 + +## Control 6.3 +# Set how aide is scanned either cron or timer +ubtu24cis_aide_scan: cron +# These are the crontab settings for periodical checking of the filesystem's integrity using AIDE. +# The sub-settings of this variable provide the parameters required to configure +# the cron job on the target system. +# Cron is a time-based job scheduling program in Unix OS, which allows tasks to be scheduled +# and executed automatically at a certain point in time. +ubtu24cis_aide_cron: + # This variable represents the user account under which the cron job for AIDE will run. + cron_user: root + # This variable represents the path to the AIDE crontab file. + cron_file: /etc/cron.d/aide_cron + # This variable represents the actual command or script that the cron job + # will execute for running AIDE. + aide_job: '/usr/bin/aide --config /etc/aide/aide.conf --check' + # These variables define the schedule for the cron job + # This variable governs the minute of the time of day when the AIDE cronjob is run. + # It must be in the range `0-59`. + aide_minute: 0 + # This variable governs the hour of the time of day when the AIDE cronjob is run. + # It must be in the range `0-23`. + aide_hour: 5 + # This variable governs the day of the month when the AIDE cronjob is run. + # `*` signifies that the job is run on all days; furthermore, specific days + # can be given in the range `1-31`; several days can be concatenated with a comma. + # The specified day(s) can must be in the range `1-31`. + aide_day: '*' + # This variable governs months when the AIDE cronjob is run. + # `*` signifies that the job is run in every month; furthermore, specific months + # can be given in the range `1-12`; several months can be concatenated with commas. + # The specified month(s) can must be in the range `1-12`. + aide_month: '*' + # This variable governs the weekdays, when the AIDE cronjob is run. + # `*` signifies that the job is run on all weekdays; furthermore, specific weekdays + # can be given in the range `0-7` (both `0` and `7` represent Sunday); several weekdays + # can be concatenated with commas. + aide_weekday: '*' + + +## +## Section 7 Control Variables +## + +## Controls 6.2.11 & 6.2.12 +# The minimum and maximum UIDs to be used when enforcing +# and checking controls 6.2.11 and 6.2.12 can either be +# discovered automatically via logins.def or set manually +# in this file +# If min/maxx UIDs are to be discovered automatically, +# set this variable to `true`, otherwise to `false`. +discover_int_uid: false +# This variable sets the minimum number from which to search for UID +# Note that the value will be dynamically overwritten if variable `dicover_int_uid` has +# been set to `true`. +min_int_uid: 1000 + +# This variable sets the maximum number at which the search stops for UID +# Note that the value will be dynamically overwritten if variable `dicover_int_uid` has +# been set to `true`. +max_int_uid: 65533 + +## Control 6.1.9 +# The following variable is a toggle for enabling/disabling the automated +# removal of world-writable permissions from all files. +# Possible values are `true` and `false`. +ubtu24cis_no_world_write_adjust: true + +## Control 6.2.7 +# This variable is a toggle foe enabling/disabling the automated modification of +# permissions on dot files. +# Possible values are `true` and `false`. +ubtu24cis_dotperm_ansiblemanaged: true + +## Section 7 + +# 7.1.12 Ensure no files or directories without an owner and a group exist +ubtu24cis_exclude_unowned_search_path: (! -path "/run/user/*" -a ! -path "/proc/*" -a ! -path "*/containerd/*" -a ! -path "*/kubelet/pods/*" -a ! -path "*/kubelet/plugins/*" -a ! -path "/sys/fs/cgroup/memory/*" -a ! -path "/var/*/private/*") + +# Control 7.1.12 +# The value of this variable specifies the owner that will be set for unowned files and directories. +ubtu24cis_unowned_owner: root +ubtu24cis_ungrouped_group: root +# This variable is a toggle for enabling/disabling the automated +# setting of an owner (specified in variable `ubtu24cis_unowned_owner`) +# for all unowned files and directories. +# Possible values are `true` and `false`. +ubtu24cis_ownership_adjust: true + +## Control 7.1.13 +# This variable is a toggle for enabling/disabling the automated removal +# of the SUID bit from all files on all mounts. +# Possible values are `true` and `false`. +ubtu24cis_suid_sgid_adjust: false diff --git a/handlers/main.yml b/handlers/main.yml new file mode 100644 index 0000000..c557a4e --- /dev/null +++ b/handlers/main.yml @@ -0,0 +1,193 @@ +--- + +- name: Writing the tmp file | tmp_systemd + when: + - "'/tmp' in mount_names" + - item.mount == "/tmp" + - tmp_mnt_type == 'tmp_systemd' + ansible.builtin.template: + src: etc/systemd/system/tmp.mount.j2 + dest: /etc/systemd/system/tmp.mount + owner: root + group: root + mode: '0644' + with_items: + - "{{ ansible_facts.mounts }}" + loop_control: + label: "{{ item.device }}" + listen: Writing and remounting tmp + +- name: Writing the tmp file | fstab + when: + - "'/tmp' in mount_names" + - tmp_mnt_type == 'fstab' + - item.mount == "/tmp" + ansible.posix.mount: + path: /tmp + src: "{{ item.device }}" + state: present + fstype: "{{ item.fstype }}" + opts: defaults,{{ tmp_partition_mount_options | unique | join(',') }} + with_items: + - "{{ ansible_facts.mounts }}" + loop_control: + label: "{{ item.device }}" + listen: Writing and remounting tmp + +- name: Update_Initramfs + ansible.builtin.shell: update-initramfs -u + notify: Set_reboot_required + +- name: Remount tmp + ansible.posix.mount: + path: /tmp + state: remounted + when: + - "'/tmp' in mount_names" + listen: Writing and remounting tmp + +- name: Remount var + ansible.posix.mount: + path: /var + state: remounted + +- name: Remount var_tmp + ansible.posix.mount: + path: /var/tmp + state: remounted + +- name: Remount var_log + ansible.posix.mount: + path: /var/log + state: remounted + +- name: Remount var_log_audit + ansible.posix.mount: + path: /var/log/audit + state: remounted + +- name: Remount home + ansible.posix.mount: + path: /home + state: remounted + +- name: Remount dev_shm + ansible.posix.mount: + path: /dev/shm + src: /dev/shm + state: remounted + +- name: Grub update + ansible.builtin.shell: update-grub + failed_when: false + notify: Set_reboot_required + +- name: Restart timeservice + ansible.builtin.systemd: + name: "{{ ubtu24cis_time_sync_tool }}" + state: restarted + +- name: Reload systemctl + ansible.builtin.systemd: + daemon_reload: true + +- name: Update dconf + ansible.builtin.shell: dconf update + failed_when: false + +- name: Restart postfix + ansible.builtin.service: + name: postfix + state: restarted + +- name: Restart syslog service + ansible.builtin.systemd: + name: "{{ ubtu24cis_syslog_service }}" + state: restarted + +- name: Restart journald + ansible.builtin.systemd: + name: systemd-journald + state: restarted + +- name: Restart exim4 + ansible.builtin.systemd: + name: exim4 + state: restarted + +- name: Flush ipv4 route table + when: ansible_facts.virtualization_type != "docker" + ansible.posix.sysctl: + name: net.ipv4.route.flush + value: '1' + sysctl_set: true + +- name: Flush ipv6 route table + when: + - ansible_facts.virtualization_type != "docker" + - ubtu24cis_ipv6_required + ansible.posix.sysctl: + name: net.ipv6.route.flush + value: '1' + sysctl_set: true + +- name: Reload ufw + community.general.ufw: + state: reloaded + +- name: Iptables persistent + ansible.builtin.shell: bash -c "iptables-save > /etc/iptables/rules.v4" + changed_when: ubtu24cis_iptables_save.rc == 0 + failed_when: ubtu24cis_iptables_save.rc > 0 + register: ubtu24cis_iptables_save + +- name: Ip6tables persistent + ansible.builtin.shell: bash -c "ip6tables-save > /etc/iptables/rules.v6" + changed_when: ubtu24cis_ip6tables_save.rc == 0 + failed_when: ubtu24cis_ip6tables_save.rc > 0 + register: ubtu24cis_ip6tables_save + +- name: Pam_auth_update_pwunix + ansible.builtin.shell: pam-auth-update --enable {{ ubtu24cis_pam_pwunix_file }} + +- name: Pam_auth_update_pwfaillock + ansible.builtin.shell: pam-auth-update --enable {{ ubtu24cis_pam_faillock_file }} + +- name: Pam_auth_update_pwfaillock_notify + ansible.builtin.shell: pam-auth-update --enable {{ ubtu24cis_pam_faillock_notify_file }} + +- name: Pam_auth_update_pwquality + ansible.builtin.shell: pam-auth-update --enable {{ ubtu24cis_pam_pwquality_file }} + +- name: Pam_auth_update_pwhistory + ansible.builtin.shell: pam-auth-update --enable {{ ubtu24cis_pam_pwhistory_file }} + +- name: Auditd rules reload + when: + - not prelim_auditd_immutable_check or + '"No change" not in ubtu24cis_rule_4_1_3_21_augen_check.stdout' + ansible.builtin.shell: augenrules --load + +- name: Audit_immutable_fact + when: + - audit_rules_updated.changed + - auditd_immutable_check is defined + ansible.builtin.debug: + msg: "Reboot required for auditd to apply new rules as immutable set" + notify: Set_reboot_required + +- name: Restart auditd + when: + - audit_rules_updated is defined + tags: + - skip_ansible_lint + ansible.builtin.shell: service auditd restart + +- name: Restart sshd + ansible.builtin.systemd: + name: ssh + state: restarted + +- name: Set_reboot_required + ansible.builtin.set_fact: + change_requires_reboot: true diff --git a/meta/main.yml b/meta/main.yml new file mode 100644 index 0000000..c993c99 --- /dev/null +++ b/meta/main.yml @@ -0,0 +1,28 @@ +--- + +galaxy_info: + author: "Mark Bolwell" + description: "Apply the Ubuntu 24 CIS benchmarks" + company: "MindPoint Group" + license: MIT + namespace: mindpointgroup + role_name: ubuntu24_cis + min_ansible_version: 2.12.1 + platforms: + - name: Ubuntu + versions: + - jammy + galaxy_tags: + - system + - security + - cis + - hardening + - benchmark + - compliance + - complianceascode + - ubuntu24 +collections: + - community.general + - community.crypto + - ansible.posix +dependencies: [] diff --git a/site.yml b/site.yml new file mode 100644 index 0000000..0358dc3 --- /dev/null +++ b/site.yml @@ -0,0 +1,8 @@ +--- + +- hosts: all + become: true + + roles: + + - role: "{{ playbook_dir }}" diff --git a/tasks/LE_audit_setup.yml b/tasks/LE_audit_setup.yml new file mode 100644 index 0000000..ffbb324 --- /dev/null +++ b/tasks/LE_audit_setup.yml @@ -0,0 +1,32 @@ +--- + +- name: Pre Audit Setup | Set audit package name + block: + - name: Pre Audit Setup | Set audit package name | 64bit + when: ansible_facts.machine == "x86_64" + ansible.builtin.set_fact: + audit_pkg_arch_name: AMD64 + + - name: Pre Audit Setup | Set audit package name | ARM64 + when: ansible_facts.machine == "arm64" + ansible.builtin.set_fact: + audit_pkg_arch_name: ARM64 + +- name: Pre Audit Setup | Download audit binary + when: get_audit_binary_method == 'download' + ansible.builtin.get_url: + url: "{{ audit_bin_url }}{{ audit_pkg_arch_name }}" + dest: "{{ audit_bin }}" + owner: root + group: root + checksum: "{{ audit_bin_version[audit_pkg_arch_name + '_checksum'] }}" + mode: '0555' + +- name: Pre Audit Setup | Copy audit binary + when: get_audit_binary_method == 'copy' + ansible.builtin.copy: + src: "{{ audit_bin_copy_location }}" + dest: "{{ audit_bin }}" + mode: '0555' + owner: root + group: root diff --git a/tasks/audit_only.yml b/tasks/audit_only.yml new file mode 100644 index 0000000..f162339 --- /dev/null +++ b/tasks/audit_only.yml @@ -0,0 +1,30 @@ +--- + +- name: Audit_Only | Create local Directories for hosts + when: fetch_audit_files + delegate_to: localhost + become: false + ansible.builtin.file: + mode: '0755' + path: "{{ audit_capture_files_dir }}/{{ inventory_hostname }}" + recurse: true + state: directory + +- name: Audit_only | Get audits from systems and put in group dir + when: fetch_audit_files + ansible.builtin.fetch: + dest: "{{ audit_capture_files_dir }}/{{ inventory_hostname }}/" + flat: true + mode: '0644' + src: "{{ pre_audit_outfile }}" + +- name: Audit_only | Show Audit Summary + when: + - audit_only + ansible.builtin.debug: + msg: "{{ audit_results.split('\n') }}" + +- name: Audit_only | Stop Playbook Audit Only selected + when: + - audit_only + ansible.builtin.meta: end_play diff --git a/tasks/auditd.yml b/tasks/auditd.yml new file mode 100644 index 0000000..39f6450 --- /dev/null +++ b/tasks/auditd.yml @@ -0,0 +1,29 @@ +--- + +- name: "POST | AUDITD | Apply auditd template for section 4.1.3.x" + when: update_audit_template + ansible.builtin.template: + src: audit/99_auditd.rules.j2 + dest: /etc/audit/rules.d/99_auditd.rules + owner: root + group: root + mode: '0640' + register: audit_rules_updated + notify: + - Auditd rules reload + - Audit_immutable_fact + - Restart auditd + - Set_reboot_required + +- name: POST | AUDITD | Set up auditd user logging exceptions + when: ubtu24cis_allow_auditd_uid_user_exclusions + ansible.builtin.template: + src: audit/98_auditd_exception.rules.j2 + dest: /etc/audit/rules.d/98_auditd_exceptions.rules + owner: root + group: root + mode: '0600' + notify: Restart auditd + +- name: POST | AUDITD | Flush handlers + ansible.builtin.meta: flush_handlers diff --git a/tasks/main.yml b/tasks/main.yml new file mode 100644 index 0000000..92a8b04 --- /dev/null +++ b/tasks/main.yml @@ -0,0 +1,234 @@ +--- + +- name: Check OS version and family + when: + - ansible_facts.distribution == 'Ubuntu' + - ansible_facts.distribution_major_version is version_compare('24', '!=') + tags: + - always + ansible.builtin.fail: + msg: "This role can only be run against Ubuntu 24. {{ ansible_facts.distribution }} {{ ansible_facts.distribution_major_version }} is not supported." + +- name: Check ansible version + tags: + - always + ansible.builtin.assert: + that: ansible_version.full is version_compare(min_ansible_version, '>=') + fail_msg: "You must use Ansible {{ min_ansible_version }} or greater" + success_msg: "This role is running a supported version of ansible {{ ansible_version.full }} >= {{ min_ansible_version }}" + +# This control should always run as this can pass on unintended issues. +- name: "Check password set for connecting user" + when: + - ubtu24cis_rule_5_2_4 + - ansible_env.SUDO_USER is defined + tags: + - always + block: + - name: Capture current password state of connecting user" + ansible.builtin.shell: "grep -w {{ ansible_env.SUDO_USER }} /etc/shadow | awk -F: '{print $2}'" + changed_when: false + failed_when: false + check_mode: false + register: ansible_user_password_set + + - name: "Assert that password set for {{ ansible_env.SUDO_USER }} and account not locked" + ansible.builtin.assert: + that: ansible_user_password_set.stdout != "!!" and ansible_user_password_set.stdout | length > 10 + fail_msg: "You have {{ sudo_password_rule }} enabled but the user = {{ ansible_env.SUDO_USER }} has no password set - It can break access" + success_msg: "You have a password set for sudo user {{ ansible_env.SUDO_USER }}" + vars: + sudo_password_rule: ubtu24cis_rule_5_2_4 # pragma: allowlist secret + +- name: Ensure root password is set + when: + - ubtu24cis_rule_5_4_2_4 + tags: + - always + block: + - name: Ensure root password is set + ansible.builtin.shell: passwd -S root | egrep -e "(Password set, SHA512 crypt|root P |Password locked)" + changed_when: false + failed_when: false + register: root_passwd_set + + - name: Ensure root password is set + ansible.builtin.assert: + that: root_passwd_set.rc == 0 + fail_msg: "You have rule 5.4.2.4 enabled this requires that you have a root password set - Please manually set a root password" + success_msg: "You have a root password set" + +- name: Check ubtu24cis_bootloader_password_hash variable has been changed + when: + - ubtu24cis_set_boot_pass + - ubtu24cis_rule_1_4_1 + tags: + - always + ansible.builtin.assert: + that: ubtu24cis_bootloader_password_hash.find('grub.pbkdf2.sha512') != -1 and ubtu24cis_bootloader_password_hash != 'grub.pbkdf2.sha512.changethispassword' # pragma: allowlist secret + msg: "This role will not be able to run single user password commands as ubtu24cis_bootloader_password_hash variable has not been set correctly" + +- name: Check ubtu24cis_grub_user password variable has been changed + when: ubtu24cis_rule_1_4_1 + tags: + - always + block: + - name: Check ubtu24cis_grub_user password variable has been changed | check password is set + ansible.builtin.shell: "grep ^{{ ubtu24cis_grub_user }} /etc/shadow | awk -F : '{print $2}'" + changed_when: false + register: ubtu24cis_password_set_grub_user + + - name: Check ubtu24cis_grub_user password variable has been changed | check password is set + when: + - "'$y$' in ubtu24cis_password_set_grub_user.stdout" + - ubtu24cis_set_grub_user_pass + - ubtu24cis_rule_1_4_1 + ansible.builtin.assert: + that: ubtu24cis_password_set_grub_user.stdout.find('$y$') != -1 or ubtu24cis_grub_user_passwd.find('$y$') != -1 and ubtu24cis_grub_user_passwd != '$y$j9T$MBA5l/tQyWifM869nQjsi.$cTy0ConcNjIYOn6Cppo5NAky20osrkRxz4fEWA8xac6' + msg: "This role will not set the {{ ubtu24cis_grub_user }} user password is not set or ubtu24cis_grub_user_passwd variable has not been set correctly" + + - name: Check ubtu24cis_grub_user password variable has been changed | if password blank or incorrect type and not being set + when: + - not ubtu24cis_set_grub_user_pass + ansible.builtin.assert: + that: ( ubtu24cis_password_set_grub_user.stdout | length > 10 ) and '$y$' in ubtu24cis_password_set_grub_user.stdout + fail_msg: "Grub User {{ ubtu24cis_grub_user }} has no password set or incorrect encryption" + success_msg: "Grub User {{ ubtu24cis_grub_user }} has a valid password set to be used in single user mode" + +- name: Setup rules if container + when: + - ansible_connection == 'docker' or + ansible_facts.virtualization_type in ["docker", "lxc", "openvz", "podman", "container"] + tags: + - container_discovery + - always + block: + - name: Discover and set container variable if required + ansible.builtin.set_fact: + system_is_container: true + + - name: Load variable for container + ansible.builtin.include_vars: + file: "{{ container_vars_file }}" + + - name: Output if discovered is a container + when: + - system_is_container + ansible.builtin.debug: + msg: system has been discovered as a container + +- name: Gather the package facts before prelim + tags: + - always + ansible.builtin.package_facts: + manager: auto + +- name: Run prelim tasks + tags: + - prelim_tasks + - run_audit + - always + ansible.builtin.import_tasks: + file: prelim.yml + +- name: Gather the package facts after prelim + tags: + - always + ansible.builtin.package_facts: + manager: auto + +- name: Run parse /etc/passwd + when: + - ubtu24cis_section5 or + ubtu24cis_section6 or + ubtu24cis_section7 + tags: + - always + ansible.builtin.import_tasks: + file: parse_etc_password.yml + +- name: Include section 1 patches + when: ubtu24cis_section1 + ansible.builtin.import_tasks: + file: section_1/main.yml + +- name: Include section 2 patches + when: ubtu24cis_section2 + ansible.builtin.import_tasks: + file: section_2/main.yml + +- name: Include section 3 patches + when: ubtu24cis_section3 + ansible.builtin.import_tasks: + file: section_3/main.yml + +- name: Include section 4 patches + when: ubtu24cis_section4 + ansible.builtin.import_tasks: + file: section_4/main.yml + +- name: Include section 5 patches + when: ubtu24cis_section5 + ansible.builtin.import_tasks: + file: section_5/main.yml + +- name: Include section 6 patches + when: ubtu24cis_section6 + ansible.builtin.import_tasks: + file: section_6/main.yml + +- name: Include section 7 patches + when: ubtu24cis_section7 + ansible.builtin.import_tasks: + file: section_7/main.yml + +- name: Run auditd logic + when: update_audit_template + tags: + - always + ansible.builtin.import_tasks: + file: auditd.yml + +- name: Flush handlers + ansible.builtin.meta: flush_handlers + +- name: Reboot system + tags: + - always + block: + - name: Reboot system if not skipped + when: + - not skip_reboot + - change_requires_reboot + ansible.builtin.reboot: + + - name: Warning a reboot required but skip option set + when: + - skip_reboot + - change_requires_reboot + ansible.builtin.debug: + msg: "Warning!! changes have been made that require a reboot to be implemented but skip reboot was set - Can affect compliance check results" + changed_when: true + +- name: Run post remediation audit + when: + - run_audit + tags: + - run_audit + ansible.builtin.import_tasks: + file: post_remediation_audit.yml + +- name: Show Audit Summary + when: + - run_audit + tags: + - run_audit + ansible.builtin.debug: + msg: "{{ audit_results.split('\n') }}" + +- name: If Warnings found Output count and control IDs affected + when: warn_count != 0 + tags: + - always + ansible.builtin.debug: + msg: "You have {{ warn_count }} Warning(s) that require investigating that are related to the following benchmark ID(s) {{ warn_control_list }}" diff --git a/tasks/parse_etc_password.yml b/tasks/parse_etc_password.yml new file mode 100644 index 0000000..53ea924 --- /dev/null +++ b/tasks/parse_etc_password.yml @@ -0,0 +1,33 @@ +--- + +- name: "PRELIM | Parse /etc/passwd" + tags: + - always + block: + - name: "PRELIM | Parse /etc/passwd | Get /etc/password contents" + ansible.builtin.shell: cat /etc/passwd + changed_when: false + check_mode: false + register: ubtu24cis_passwd_file_audit + + - name: "PRELIM | Parse /etc/passwd | Split passwd entries" + ansible.builtin.set_fact: + ubtu24cis_passwd: "{{ ubtu24cis_passwd_file_audit.stdout_lines | map('regex_replace', ld_passwd_regex, ld_passwd_yaml) | map('from_yaml') | list }}" + + with_items: "{{ ubtu24cis_passwd_file_audit.stdout_lines }}" + vars: + ld_passwd_regex: >- + ^(?P[^:]*):(?P[^:]*):(?P[^:]*):(?P[^:]*):(?P[^:]*):(?P[^:]*):(?P[^:]*) + ld_passwd_yaml: | # pragma: allowlist secret + id: >-4 + \g + password: >-4 + \g + uid: \g + gid: \g + gecos: >-4 + \g + dir: >-4 + \g + shell: >-4 + \g diff --git a/tasks/post_remediation_audit.yml b/tasks/post_remediation_audit.yml new file mode 100644 index 0000000..9b06b24 --- /dev/null +++ b/tasks/post_remediation_audit.yml @@ -0,0 +1,44 @@ +--- + +- name: Post Audit | Run post_remediation {{ benchmark }} audit + ansible.builtin.shell: "{{ audit_conf_dir }}/run_audit.sh -v {{ audit_vars_path }} -f {{ audit_format }} -o {{ post_audit_outfile }} -g \"{{ group_names }}\"" + changed_when: true + environment: + AUDIT_BIN: "{{ audit_bin }}" + AUDIT_CONTENT_LOCATION: "{{ audit_conf_dest | default('/opt') }}" + AUDIT_FILE: goss.yml + +- name: Post Audit | ensure audit files readable by users + ansible.builtin.file: + path: "{{ item }}" + mode: '0644' + state: file + loop: + - "{{ post_audit_outfile }}" + - "{{ pre_audit_outfile }}" + +- name: Post Audit | Capture audit data if json format + when: + - audit_format == "json" + block: + - name: Post Audit | Capture audit data if json format + ansible.builtin.shell: grep -E '"summary-line.*Count:.*Failed' "{{ post_audit_outfile }}" | cut -d'"' -f4 + register: post_audit_summary + changed_when: false + + - name: Post Audit | Set Fact for audit summary + ansible.builtin.set_fact: + post_audit_results: "{{ post_audit_summary.stdout }}" + +- name: Post Audit | Capture audit data if documentation format + when: + - audit_format == "documentation" + block: + - name: Post Audit | Capture audit data if documentation format + ansible.builtin.shell: "tail -2 {{ post_audit_outfile }}" + register: post_audit_summary + changed_when: false + + - name: Post Audit | Set Fact for audit summary + ansible.builtin.set_fact: + post_audit_results: "{{ post_audit_summary.stdout }}" diff --git a/tasks/pre_remediation_audit.yml b/tasks/pre_remediation_audit.yml new file mode 100644 index 0000000..6f215c3 --- /dev/null +++ b/tasks/pre_remediation_audit.yml @@ -0,0 +1,120 @@ +--- + +- name: Pre Audit Setup | Setup the LE audit + when: + - setup_audit + tags: + - setup_audit + ansible.builtin.include_tasks: + file: LE_audit_setup.yml + +- name: Pre Audit Setup | Ensure {{ audit_conf_dir }} exists + ansible.builtin.file: + path: "{{ audit_conf_dir }}" + state: directory + mode: '0755' + +- name: Pre Audit Setup | If using git for content set up + when: + - audit_content == 'git' + block: + - name: Pre Audit Setup | Install git + ansible.builtin.package: + name: git + state: present + + - name: Pre Audit Setup | Retrieve audit content files from git + ansible.builtin.git: + repo: "{{ audit_file_git }}" + dest: "{{ audit_conf_dir }}" + version: "{{ audit_git_version }}" + +- name: Pre Audit Setup | Copy to audit content files to server + when: + - audit_content == 'copy' + ansible.builtin.copy: + src: "{{ audit_conf_source }}" + dest: "{{ audit_conf_dest }}" + mode: preserve + +- name: Pre Audit Setup | Unarchive audit content files on server + when: + - audit_content == 'archive' + ansible.builtin.unarchive: + src: "{{ audit_conf_source }}" + dest: "{{ audit_conf_dest }}" + +- name: Pre Audit Setup | Get audit content from url + when: + - audit_content == 'get_url' + ansible.builtin.unarchive: + src: "{{ audit_conf_source }}" + dest: "{{ audit_conf_dest }}/{{ benchmark }}-Audit" + remote_src: "{{ ( audit_conf_source is contains ('http'))| ternary(true, false ) }}" + extra_opts: "{{ (audit_conf_source is contains ('github')) | ternary('--strip-components=1', [] ) }}" + +- name: Pre Audit Setup | Check Goss is available + when: + - run_audit + block: + - name: Pre Audit Setup | Check for goss file + ansible.builtin.stat: + path: "{{ audit_bin }}" + register: goss_available + + - name: Pre Audit Setup | If audit ensure goss is available + when: + - not goss_available.stat.exists + ansible.builtin.assert: + msg: "Audit has been selected: unable to find goss binary at {{ audit_bin }}" + +- name: Pre Audit Setup | Copy ansible default vars values to test audit + tags: + - goss_template + - run_audit + when: + - run_audit + ansible.builtin.template: + src: ansible_vars_goss.yml.j2 + dest: "{{ audit_vars_path }}" + mode: '0600' + +- name: Pre Audit | Run pre_remediation {{ benchmark }} audit + ansible.builtin.shell: "{{ audit_conf_dir }}/run_audit.sh -v {{ audit_vars_path }} -f {{ audit_format }} -o {{ pre_audit_outfile }} -g \"{{ group_names }}\"" + changed_when: true + environment: + AUDIT_BIN: "{{ audit_bin }}" + AUDIT_CONTENT_LOCATION: "{{ audit_conf_dest | default('/opt') }}" + AUDIT_FILE: goss.yml + +- name: Pre Audit | Capture audit data if json format + when: + - audit_format == "json" + block: + - name: Pre Audit | Capture audit data if json format + ansible.builtin.shell: grep -E '\"summary-line.*Count:.*Failed' "{{ pre_audit_outfile }}" | cut -d'"' -f4 + register: pre_audit_summary + changed_when: false + + - name: Pre Audit | Set Fact for audit summary + ansible.builtin.set_fact: + pre_audit_results: "{{ pre_audit_summary.stdout }}" + +- name: Pre Audit | Capture audit data if documentation format + when: + - audit_format == "documentation" + block: + - name: Pre Audit | Capture audit data if documentation format + ansible.builtin.shell: tail -2 "{{ pre_audit_outfile }}" | tac | tr '\n' ' ' + register: pre_audit_summary + changed_when: false + + - name: Pre Audit | Set Fact for audit summary + ansible.builtin.set_fact: + pre_audit_results: "{{ pre_audit_summary.stdout }}" + +- name: Audit_Only | Run Audit Only + when: + - audit_only + ansible.builtin.import_tasks: + file: audit_only.yml diff --git a/tasks/prelim.yml b/tasks/prelim.yml new file mode 100644 index 0000000..e4b123c --- /dev/null +++ b/tasks/prelim.yml @@ -0,0 +1,319 @@ +--- + +- name: "PRELIM | AUDIT | Set default values for facts" + ansible.builtin.set_fact: + control_1_6_1_4_was_run: false + ubtu24cis_apparmor_enforce_only: false + changed_when: false + +- name: "PRELIM | AUDIT | Register if snap being used" + ansible.builtin.shell: df -h | grep -wc "/snap" + changed_when: false + failed_when: prelim_snap_pkg_mgr.rc not in [ 0, 1 ] + register: prelim_snap_pkg_mgr + tags: + - rule_1.1.1.2 + - always + when: + - ubtu24cis_rule_1_1_1_6 + +- name: "PRELIM | AUDIT | Register if squashfs is built into the kernel" + ansible.builtin.shell: cat /lib/modules/$(uname -r)/modules.builtin | grep -c "squashfs" + changed_when: false + failed_when: prelim_squashfs_builtin.rc not in [ 0, 1 ] + register: prelim_squashfs_builtin + tags: + - always + when: + - ubtu24cis_rule_1_1_1_6 + +- name: "PRELIM | AUDIT | Section 1.1 | Create list of mount points" + ansible.builtin.set_fact: + mount_names: "{{ ansible_facts.mounts | map(attribute='mount') | list }}" + tags: + - always + +- name: PRELIM | AUDIT | Capture tmp mount type | discover mount tmp type + when: + - "'/tmp' in mount_names" + - ubtu24cis_rule_1_1_2_1_1 or + ubtu24cis_rule_1_1_2_1_2 or + ubtu24cis_rule_1_1_2_1_3 or + ubtu24cis_rule_1_1_2_1_4 + tags: + - always + block: + - name: PRELIM | AUDIT | Capture tmp mount type | discover mount tmp type + ansible.builtin.shell: systemctl is-enabled tmp.mount + register: prelim_tmp_mnt_type + changed_when: false + failed_when: prelim_tmp_mnt_type.rc not in [ 0, 1 ] + + - name: PRELIM | AUDIT | Capture tmp mount type | Set to expected_tmp_mnt variable + when: "'generated' in prelim_tmp_mnt_type.stdout" + ansible.builtin.set_fact: + tmp_mnt_type: "{{ expected_tmp_mnt }}" + + - name: PRELIM | AUDIT | Capture tmp mount type | Set systemd service + when: "'generated' not in prelim_tmp_mnt_type.stdout" + ansible.builtin.set_fact: + tmp_mnt_type: tmp_systemd + +- name: PRELIM | Initialize the mount options variable + tags: + - always + block: + - name: PRELIM | Initializing the var if there is no /tmp mount | set_fact + when: "'/tmp' not in mount_names" + ansible.builtin.set_fact: + tmp_partition_mount_options: [] + + - name: PRELIM | Initializing the var if there is a /tmp mount | set_fact + when: + - item.mount == "/tmp" + - "'/tmp' in mount_names" + ansible.builtin.set_fact: + tmp_partition_mount_options: "{{ item.options.split(',') }}" + loop: "{{ ansible_facts.mounts }}" + +- name: Include audit specific variables + when: + - run_audit or audit_only + - setup_audit + tags: + - setup_audit + - run_audit + ansible.builtin.include_vars: + file: audit.yml + +- name: Include pre-remediation audit tasks + when: + - run_audit or audit_only + - setup_audit + tags: + - run_audit + ansible.builtin.import_tasks: + file: pre_remediation_audit.yml + +- name: "PRELIM | PATCH | Run apt update" + when: + - ubtu24cis_rule_1_2_1_1 or + ubtu24cis_rule_1_2_2_1 + tags: + - always + ansible.builtin.package: + update_cache: true + +- name: "PRELIM | AUDIT | Wireless adapter pre-requisites" + when: + - ubtu24cis_rule_3_1_2 + - not system_is_container + tags: + - always + block: + - name: "PRELIM | AUDIT | Discover is wireless adapter on system" + ansible.builtin.shell: find /sys/class/net/*/ -type d -name wireless + register: prelim_wireless_adapters + changed_when: false + failed_when: prelim_wireless_adapters.rc not in [ 0, 1 ] + + - name: "PRELIM | AUDIT | If wireless adapter present capture module" + when: + - prelim_wireless_adapters.rc == 0 + ansible.builtin.shell: | + for driverdir in $(find /sys/class/net/*/ -type d -name wireless | xargs -0 dirname); + do basename "$(readlink -f "$driverdir"/device/driver/module)"; + done | sort -u + changed_when: false + failed_when: prelim_wireless_modules.rc not in [ 0, 1 ] + register: prelim_wireless_modules + +- name: "PRELIM | PATCH | 5.3.4.5 | Find all sudoers files." + when: + - ubtu24cis_rule_5_2_4 or + ubtu24cis_rule_5_2_5 + tags: + - always + ansible.builtin.shell: "find /etc/sudoers /etc/sudoers.d/ -type f ! -name '*~' ! -name '*.*'" + changed_when: false + failed_when: false + check_mode: false + register: prelim_sudoers_files + +- name: "PRELIM | PATCH | Ensure conf.d directory exists required for 5.3.3.2.x" + when: + - ubtu24cis_rule_5_3_3_2_1 or + ubtu24cis_rule_5_3_3_2_2 or + ubtu24cis_rule_5_3_3_2_3 or + ubtu24cis_rule_5_3_3_2_4 or + ubtu24cis_rule_5_3_3_2_5 or + ubtu24cis_rule_5_3_3_2_6 + tags: + - always + ansible.builtin.file: + path: '/etc/security/pwquality.conf.d' + state: directory + owner: root + group: root + mode: '0750' + +- name: "PRELIM | AUDIT | Discover Interactive UID MIN and MIN from logins.def" + when: + - not discover_int_uid + tags: + - always + block: + - name: "PRELIM | AUDIT | Capture UID_MIN information from logins.def" + ansible.builtin.shell: grep -w "^UID_MIN" /etc/login.defs | awk '{print $NF}' + changed_when: false + register: prelim_uid_min_id + + - name: "PRELIM | AUDIT | Capture UID_MAX information from logins.def" + ansible.builtin.shell: grep -w "^UID_MAX" /etc/login.defs | awk '{print $NF}' + changed_when: false + register: prelim_uid_max_id + + - name: "PRELIM | AUDIT | Capture GID_MIN information from logins.def" + ansible.builtin.shell: grep -w "^GID_MIN" /etc/login.defs | awk '{print $NF}' + changed_when: false + register: prelim_gid_min_id + + - name: "PRELIM | AUDIT | Set_facts for interactive uid/gid" + ansible.builtin.set_fact: + min_int_uid: "{{ prelim_uid_min_id.stdout }}" + max_int_uid: "{{ prelim_uid_max_id.stdout }}" + min_int_gid: "{{ prelim_gid_min_id.stdout }}" + +- name: "PRELIM | AUDIT | Interactive Users" + tags: + - always + ansible.builtin.shell: > + grep -E -v '^(root|halt|sync|shutdown)' /etc/passwd | awk -F: '(!index($7, "sbin/nologin") && $7 != "/bin/nologin" && $7 != "/bin/false" && $7 != "/dev/null") { print $1 }' + changed_when: false + register: prelim_interactive_usernames + +- name: "PRELIM | AUDIT | Interactive User accounts home directories" + tags: + - always + ansible.builtin.shell: > + grep -E -v '^(root|halt|sync|shutdown)' /etc/passwd | awk -F: '(!index($7, "sbin/nologin") && $7 != "/bin/nologin" && $7 != "/bin/false" && $7 != "/dev/null") { print $6 }' + changed_when: false + register: prelim_interactive_users_home + +- name: "PRELIM | AUDIT | Interactive UIDs" + tags: + - always + ansible.builtin.shell: > + grep -E -v '^(root|halt|sync|shutdown)' /etc/passwd | awk -F: '(!index($7, "sbin/nologin") && $7 != "/bin/nologin" && $7 != "/bin/false") { print $3 }' + changed_when: false + register: prelim_interactive_uids + +- name: "PRELIM | AUDIT | Gather UID 0 accounts other than root" + when: + - ubtu24cis_rule_5_4_2_1 + tags: + - rule_5.4.2.1 + - level1-server + - level1-workstation + - users + - always + ansible.builtin.shell: "cat /etc/passwd | awk -F: '($3 == 0 && $1 != \"root\") {i++;print $1 } END {exit i}'" + changed_when: false + check_mode: false + register: prelim_uid_zero_accounts_except_root + +- name: "PRELIM | PATCH | Create journald conf.d directory" + when: + - ubtu24cis_rule_6_1_2_2 or + ubtu24cis_rule_6_1_2_3 or + ubtu24cis_rule_6_1_2_4 + tags: + - always + ansible.builtin.file: + path: /etc/systemd/journald.conf.d + state: directory + owner: root + group: root + mode: '0755' + +- name: "PRELIM | PATCH | Ensure auditd is installed" + when: + - ubtu24cis_rule_6_2_1_1 or + ubtu24cis_rule_6_2_4_1 or + ubtu24cis_rule_6_2_4_6 or + ubtu24cis_rule_6_2_4_8 + tags: + - level2-server + - level2-workstation + - patch + - auditd + - always + block: + - name: "PRELIM | PATCH | Ensure auditd is installed" + when: + - "'auditd' not in ansible_facts.packages or + 'auditd-plugins' not in ansible_facts.packages" + ansible.builtin.package: + name: ['auditd', 'audispd-plugins'] + state: present + + - name: "PRELIM | AUDIT | Audit conf and rules files | list files" + ansible.builtin.find: + path: /etc/audit/ + file_type: file + recurse: true + patterns: '*.conf,*.rules' + register: prelim_auditd_conf_files + +- name: "PRELIM | AUDIT | Check if auditd is immutable before changes" + when: "'auditd' in ansible_facts.packages" + tags: + - always + ansible.builtin.shell: auditctl -l | grep -c '-e 2' + changed_when: false + failed_when: prelim_auditd_immutable_check.rc not in [ 0, 1 ] + register: prelim_auditd_immutable_check + +- name: "PRELIM | AUDIT | 6.2.4.x | Capture information about auditd logfile path | discover file" + when: + - ubtu24cis_rule_6_2_4_1 or + ubtu24cis_rule_6_2_4_2 or + ubtu24cis_rule_6_2_4_3 or + ubtu24cis_rule_6_2_4_4 + tags: + - level2-server + - level2-workstation + - patch + - auditd + - rule_6.3.4.1 + - rule_6.3.4.2 + - rule_6.3.4.3 + - rule_6.3.4.4 + ansible.builtin.shell: "grep ^log_file /etc/audit/auditd.conf | awk '{ print $NF }'" + changed_when: false + failed_when: prelim_auditd_logfile.rc not in [ 0, 1 ] + register: prelim_auditd_logfile + +- name: "PRELIM | PATCH | Install ACL" + when: + - ubtu24cis_rule_7_2_9 + - "'acl' not in ansible_facts.packages" + tags: + - always + ansible.builtin.package: + name: acl + state: present + +## Optional + +- name: "Optional | PATCH | UFW firewall force to use /etc/sysctl.conf settings" + when: + - ubtu24cis_firewall_package == "ufw" + - ubtu24cis_ufw_use_sysctl + tags: + - always + ansible.builtin.lineinfile: + path: /etc/default/ufw + regexp: ^IPT_SYSCTL=.* + line: IPT_SYSCTL=/etc/sysctl.conf + mode: '0644' diff --git a/tasks/section_1/cis_1.1.1.x.yml b/tasks/section_1/cis_1.1.1.x.yml new file mode 100644 index 0000000..448e1e9 --- /dev/null +++ b/tasks/section_1/cis_1.1.1.x.yml @@ -0,0 +1,356 @@ +--- + +- name: "1.1.1.1 | PATCH | Ensure cramfs kernel module is not available" + when: + - ubtu24cis_rule_1_1_1_1 + tags: + - level1-server + - level1-workstation + - patch + - rule_1.1.1.1 + - filesystems + - cramfs + - NIST800-53R5_CM-7 + block: + - name: "1.1.1.1 | PATCH | Ensure cramfs kernel module is not available | Edit modprobe config" + ansible.builtin.lineinfile: + path: /etc/modprobe.d/CIS.conf + regexp: "^(#)?install cramfs(\\s|$)" + line: "install cramfs /bin/true" + create: true + mode: '0600' + + - name: "1.1.1.1 | PATCH | Ensure cramfs kernel module is not available | blacklist" + ansible.builtin.lineinfile: + path: /etc/modprobe.d/blacklist.conf + regexp: "^(#)?blacklist cramfs(\\s|$)" + line: "blacklist cramfs" + create: true + mode: '0600' + + - name: "1.1.1.1 | PATCH | Ensure cramfs kernel module is not available | Disable cramfs" + when: + - not system_is_container + community.general.modprobe: + name: cramfs + state: absent + +- name: "1.1.1.2 | PATCH | Ensure freevxfs kernel module is not available" + when: + - ubtu24cis_rule_1_1_1_2 + tags: + - level1-server + - level1-workstation + - patch + - rule_1.1.1.2 + - filesystems + - freevxfs + - NIST800-53R5_CM-7 + block: + - name: "1.1.1.2 | PATCH | Ensure freevxfs kernel module is not available | Edit modprobe config" + ansible.builtin.lineinfile: + path: /etc/modprobe.d/CIS.conf + regexp: "^(#)?install freevxfs(\\s|$)" + line: "install freevxfs /bin/true" + create: true + mode: '0600' + + - name: "1.1.1.2 | PATCH | Ensure freevxfs kernel module is not available | blacklist" + ansible.builtin.lineinfile: + path: /etc/modprobe.d/blacklist.conf + regexp: "^(#)?blacklist freevxfs(\\s|$)" + line: "blacklist freevxfs" + create: true + mode: '0600' + + - name: "1.1.1.2 | PATCH | Ensure freevxfs kernel module is not available | Disable freevxfs" + when: + - not system_is_container + community.general.modprobe: + name: freevxfs + state: absent + +- name: "1.1.1.3 | PATCH | Ensure hfs kernel module is not available" + when: + - ubtu24cis_rule_1_1_1_3 + tags: + - level1-server + - level1-workstation + - patch + - rule_1.1.1.3 + - filesystems + - hfs + - NIST800-53R5_CM-7 + block: + - name: "1.1.1.3 | PATCH | Ensure hfs kernel module is not available | Edit modprobe config" + ansible.builtin.lineinfile: + path: /etc/modprobe.d/CIS.conf + regexp: "^(#)?install hfs(\\s|$)" + line: "install hfs /bin/true" + create: true + mode: '0600' + + - name: "1.1.1.3 | PATCH | Ensure hfs kernel module is not available | blacklist" + ansible.builtin.lineinfile: + path: /etc/modprobe.d/blacklist.conf + regexp: "^(#)?blacklist hfs(\\s|$)" + line: "blacklist hfs" + create: true + mode: '0600' + + - name: "1.1.1.3 | PATCH | Ensure hfs kernel module is not available | Disable hfs" + when: + - not system_is_container + community.general.modprobe: + name: hfs + state: absent + +- name: "1.1.1.4 | PATCH | Ensure hfsplus kernel module is not available" + when: + - ubtu24cis_rule_1_1_1_4 + tags: + - level1-server + - level1-workstation + - patch + - rule_1.1.1.4 + - filesystems + - hfsplus + - NIST800-53R5_CM-7 + block: + - name: "1.1.1.4 | PATCH | Ensure hfsplus kernel module is not available | Edit modprobe config" + ansible.builtin.lineinfile: + path: /etc/modprobe.d/CIS.conf + regexp: "^(#)?install hfsplus(\\s|$)" + line: "install hfsplus /bin/true" + create: true + mode: '0600' + + - name: "1.1.1.4 | PATCH | Ensure hfsplus kernel module is not available | blacklist" + ansible.builtin.lineinfile: + path: /etc/modprobe.d/blacklist.conf + regexp: "^(#)?blacklist hfsplus(\\s|$)" + line: "blacklist hfsplus" + create: true + mode: '0600' + + - name: "1.1.1.4 | PATCH | Ensure hfsplus kernel module is not available | Disable hfsplus" + when: + - not system_is_container + community.general.modprobe: + name: hfsplus + state: absent + +- name: "1.1.1.5 | PATCH | Ensure jffs2 kernel module is not available" + when: + - ubtu24cis_rule_1_1_1_5 + tags: + - level1-server + - level1-workstation + - patch + - rule_1.1.1.5 + - filesystems + - jffs2 + - NIST800-53R5_CM-7 + block: + - name: "1.1.1.5 | PATCH | Ensure jffs2 kernel module is not available | Edit modprobe config" + ansible.builtin.lineinfile: + path: /etc/modprobe.d/CIS.conf + regexp: "^(#)?install jffs2(\\s|$)" + line: "install jffs2 /bin/true" + create: true + mode: '0600' + + - name: "1.1.1.5 | PATCH | Ensure jffs2 kernel module is not available | blacklist" + ansible.builtin.lineinfile: + path: /etc/modprobe.d/blacklist.conf + regexp: "^(#)?blacklist jffs2(\\s|$)" + line: "blacklist jffs2" + create: true + mode: '0600' + + - name: "1.1.1.5 | PATCH | Ensure jffs2 kernel module is not available | Disable jffs2" + when: + - not system_is_container + community.general.modprobe: + name: jffs2 + state: absent + +- name: "1.1.1.6 | PATCH | Ensure overlayfs kernel module is not available" + when: + - ubtu24cis_rule_1_1_1_6 + tags: + - level2-server + - level2-workstation + - patch + - rule_1.1.1.6 + - filesystems + - overlayfs + - NIST800-53R5_CM-7 + block: + - name: "1.1.1.6 | PATCH | Ensure overlayfs kernel module is not available | Edit modprobe config" + ansible.builtin.lineinfile: + path: /etc/modprobe.d/CIS.conf + regexp: "^(#)?install overlayfs(\\s|$)" + line: "install overlayfs /bin/true" + create: true + mode: '0600' + + - name: "1.1.1.6 | PATCH | Ensure overlayfs kernel module is not available | blacklist" + ansible.builtin.lineinfile: + path: /etc/modprobe.d/blacklist.conf + regexp: "^(#)?blacklist overlayfs(\\s|$)" + line: "blacklist overlayfs" + create: true + mode: '0600' + + - name: "1.1.1.6 | PATCH | Ensure overlayfs kernel module is not available | Disable overlayfs" + when: + - not system_is_container + community.general.modprobe: + name: overlayfs + state: absent + +- name: "1.1.1.7 | PATCH | Ensure squashfs kernel module is not available" + when: + - ubtu24cis_rule_1_1_1_7 + - not prelim_squashfs_builtin + - prelim_snap_pkg_mgr.rc != 0 + tags: + - level2-server + - level2-workstation + - patch + - rule_1.1.1.7 + - filesystems + - squashfs + - NIST800-53R5_CM-7 + block: + - name: "1.1.1.7 | PATCH | Ensure squashfs kernel module is not available | Edit modprobe config" + ansible.builtin.lineinfile: + path: /etc/modprobe.d/CIS.conf + regexp: "^(#)?install squashfs(\\s|$)" + line: "install squashfs /bin/true" + create: true + mode: '0600' + + - name: "1.1.1.7 | PATCH | Ensure squashfs kernel module is not available | blacklist" + ansible.builtin.lineinfile: + path: /etc/modprobe.d/blacklist.conf + regexp: "^(#)?blacklist squashfs(\\s|$)" + line: "blacklist squashfs" + create: true + mode: '0600' + + - name: "1.1.1.7 | PATCH | Ensure squashfs kernel module is not available | Disable squashfs" + when: + - not system_is_container + community.general.modprobe: + name: squashfs + state: absent + +- name: "1.1.1.8 | PATCH | Ensure udf kernel module is not available" + when: + - ubtu24cis_rule_1_1_1_8 + tags: + - level2-server + - level2-workstation + - patch + - rule_1.1.1.8 + - filesystems + - udf + - NIST800-53R5_CM-7 + block: + - name: "1.1.1.8 | PATCH | Ensure udf kernel module is not available | Edit modprobe config" + ansible.builtin.lineinfile: + path: /etc/modprobe.d/CIS.conf + regexp: "^(#)?install udf(\\s|$)" + line: "install udf /bin/true" + create: true + mode: '0600' + + - name: "1.1.1.8 | PATCH | Ensure udf kernel module is not available | blacklist" + ansible.builtin.lineinfile: + path: /etc/modprobe.d/blacklist.conf + regexp: "^(#)?blacklist udf(\\s|$)" + line: "blacklist udf" + create: true + mode: '0600' + + - name: "1.1.1.8 | PATCH | Ensure udf kernel module is not available | Disable udf" + when: + - not system_is_container + community.general.modprobe: + name: udf + state: absent + +- name: "1.1.1.9 | PATCH | Ensure usb-storage kernel module is not available" + when: + - ubtu24cis_rule_1_1_1_9 + tags: + - level1-server + - level2-workstation + - patch + - rule_1.1.1.9 + - filesystems + - usb + - NIST800-53R5_SI-3 + block: + - name: "1.1.1.9 | PATCH | Ensure usb-storage kernel module is not available | Edit modprobe config" + ansible.builtin.lineinfile: + path: /etc/modprobe.d/CIS.conf + regexp: "^(#)?install usb-storage(\\s|$)" + line: "install usb-storage /bin/true" + create: true + mode: '0600' + + - name: "1.1.1.9 | PATCH | Ensure usb-storage kernel module is not available | blacklist" + ansible.builtin.lineinfile: + path: /etc/modprobe.d/blacklist.conf + regexp: "^(#)?blacklist usb-storage(\\s|$)" + line: "blacklist usb-storage" + create: true + mode: '0600' + + - name: "1.1.1.9 | PATCH | Ensure usb-storage kernel module is not available | Disable usb" + when: + - not system_is_container + community.general.modprobe: + name: usb-storage + state: absent + +- name: "1.1.1.10 | PATCH | Ensure unused filesystems kernel modules are not available" + when: + - ubtu24cis_rule_1_1_1_10 + tags: + - level1-server + - level1-workstation + - patch + - rule_1.1.1.10 + - filesystems + vars: + warn_control_id: '1.1.1.10' + block: + - name: "1.1.1.10 | PATCH | Ensure unused filesystems kernel modules are not available | Add discovery script" + ansible.builtin.template: + src: fs_with_cves.sh.j2 + dest: /var/fs_with_cves.sh + owner: root + group: root + mode: '0744' + + - name: "1.1.1.10 | AUDIT | Ensure unused filesystems kernel modules are not available | Run discovery script" + ansible.builtin.shell: /var/fs_with_cves.sh + changed_when: false + failed_when: discovered_fs_modules_loaded.rc not in [ 0, 99 ] + register: discovered_fs_modules_loaded + + - name: "1.1.1.10 | AUDIT | Ensure unused filesystems kernel modules are not available | Output Warning" + when: discovered_fs_modules_loaded.stdout | length > 0 + ansible.builtin.debug: + msg: | + "Warning!! Discovered loaded Filesystem modules that need attention. This is a manual task + {{ discovered_fs_modules_loaded.stdout_lines}}" + + - name: "1.1.1.9 | AUDIT | Ensure unused filesystems kernel modules are not available | Capture Warning" + when: discovered_fs_modules_loaded.stdout | length > 0 + ansible.builtin.import_tasks: + file: warning_facts.yml diff --git a/tasks/section_1/cis_1.1.2.1.x.yml b/tasks/section_1/cis_1.1.2.1.x.yml new file mode 100644 index 0000000..f43690d --- /dev/null +++ b/tasks/section_1/cis_1.1.2.1.x.yml @@ -0,0 +1,84 @@ +--- + +- name: "1.1.2.1.1 | AUDIT | Ensure /tmp is a separate partition" + when: + - required_mount not in mount_names + - ubtu24cis_rule_1_1_2_1_1 + tags: + - level1-server + - level1-workstation + - audit + - mounts + - rule_1.1.2.1.1 + - NIST800-53R5_CM-7 + - tmp + vars: + warn_control_id: '1.1.2.1.1' + required_mount: '/tmp' + block: + - name: "1.1.2.1.1 | AUDIT | Ensure /tmp is a separate partition | Absent" + ansible.builtin.debug: + msg: "Warning!! {{ required_mount }} doesn't exist. This is a manual task" + + - name: "1.1.2.1.1 | WARN | Ensure /tmp is a separate partition | warn_count" + ansible.builtin.import_tasks: + file: warning_facts.yml + +- name: "1.1.2.1.2 | PATCH | Ensure nodev option set on /tmp partition" + when: + - required_mount in mount_names + - ubtu24cis_rule_1_1_2_1_2 + tags: + - level1-server + - level1-workstation + - patch + - mounts + - rule_1.1.2.1.2 + - NIST800-53R5_CM-7 + - tmp + vars: + required_mount: '/tmp' + ansible.builtin.set_fact: + tmp_partition_mount_options: "{{ tmp_partition_mount_options + [ 'nodev' ] }}" + changed_when: true + notify: Writing and remounting tmp + +- name: "1.1.2.1.3 | PATCH | Ensure nosuid option set on /tmp partition" + when: + - required_mount in mount_names + - ubtu24cis_rule_1_1_2_1_3 + tags: + - level1-server + - level1-workstation + - patch + - mounts + - rule_1.1.2.1.3 + - NIST800-53R5_AC-3 + - NIST800-53R5_MP-2 + - tmp + vars: + required_mount: '/tmp' + ansible.builtin.set_fact: + tmp_partition_mount_options: "{{ tmp_partition_mount_options + [ 'nosuid' ] }}" + changed_when: true + notify: Writing and remounting tmp + +- name: "1.1.2.1.4 | PATCH | Ensure noexec option set on /tmp partition" + when: + - required_mount in mount_names + - ubtu24cis_rule_1_1_2_1_4 + tags: + - level1-server + - level1-workstation + - patch + - mounts + - rule_1.1.2.1.4 + - NIST800-53R5_AC-3 + - NIST800-53R5_MP-2 + - tmp + vars: + required_mount: '/tmp' + ansible.builtin.set_fact: + tmp_partition_mount_options: "{{ tmp_partition_mount_options + [ 'noexec' ] }}" + changed_when: true + notify: Writing and remounting tmp diff --git a/tasks/section_1/cis_1.1.2.2.x.yml b/tasks/section_1/cis_1.1.2.2.x.yml new file mode 100644 index 0000000..0a66566 --- /dev/null +++ b/tasks/section_1/cis_1.1.2.2.x.yml @@ -0,0 +1,58 @@ +--- + +- name: "1.1.2.2.1 | PATCH | Ensure /dev/shm is a separate partition" + when: + - ubtu24cis_rule_1_1_2_2_1 + tags: + - level1-server + - level1-workstation + - audit + - mounts + - rule_1.1.2.2.1 + - NIST800-53R5_CM-7 + vars: + warn_control_id: '1.1.2.2.1' + required_mount: '/dev/shm' + block: + - name: "1.1.2.2.1 | AUDIT | Ensure /dev/shm is a separate partition | check for mount" + ansible.builtin.shell: findmnt -kn "{{ required_mount }}" + changed_when: false + failed_when: discovered_shm_mount.rc not in [ 0, 1 ] + register: discovered_shm_mount + + - name: "1.1.2.2.1 | AUDIT | Ensure /dev/shm is a separate partition | Absent" + when: discovered_shm_mount is undefined + ansible.builtin.debug: + msg: "Warning!! {{ required_mount }} is not mounted on a separate partition" + + - name: "1.1.2.2.1 | AUDIT | Ensure /dev/shm is a separate partition | Present" + when: discovered_shm_mount is undefined + ansible.builtin.import_tasks: + file: warning_facts.yml + +- name: | + "1.1.2.2.2 | PATCH | Ensure nodev option set on /dev/shm partition + 1.1.2.2.3 | PATCH | Ensure nosuid option set on /dev/shm partition + 1.1.2.2.4 | PATCH | Ensure noexec option set on /dev/shm partition" + when: + - discovered_shm_mount is defined + - ubtu24cis_rule_1_1_2_2_2 or + ubtu24cis_rule_1_1_2_2_3 or + ubtu24cis_rule_1_1_2_2_4 + tags: + - level1-server + - level1-workstation + - patch + - mounts + - rule_1.1.2.2.1 + - rule_1.1.2.2.2 + - rule_1.1.2.2.3 + - NIST800-53R5_AC-3 + - NIST800-53R5_MP-2 + notify: Set_reboot_required + ansible.posix.mount: + name: /dev/shm + src: tmpfs + fstype: tmpfs + state: mounted + opts: defaults,{% if ubtu24cis_rule_1_1_2_2_2 %}nodev,{% endif %}{% if ubtu24cis_rule_1_1_2_2_3 %}nosuid,{% endif %}{% if ubtu24cis_rule_1_1_2_2_4 %}noexec{% endif %} diff --git a/tasks/section_1/cis_1.1.2.3.x.yml b/tasks/section_1/cis_1.1.2.3.x.yml new file mode 100644 index 0000000..36c1ecc --- /dev/null +++ b/tasks/section_1/cis_1.1.2.3.x.yml @@ -0,0 +1,55 @@ +--- + +- name: "1.1.2.3.1 | AUDIT | Ensure separate partition exists for /home" + when: + - ubtu24cis_rule_1_1_2_3_1 + - "'/home' not in mount_names" + tags: + - level2-server + - level2-workstation + - audit + - mounts + - rule_1.1.2.3.1 + - NIST800-53R5_CM-7 + vars: + warn_control_id: '1.1.2.3.1' + required_mount: '/home' + block: + - name: "1.1.2.3.1 | AUDIT | Ensure separate partition exists for /home | Warn if partition is absent" + ansible.builtin.debug: + msg: "Warning!! {{ required_mount }} doesn't exist. This is a manual task" + register: home_mount_absent + changed_when: home_mount_absent.skipped is undefined + + - name: "1.1.2.3.1 | AUDIT | Ensure separate partition exists for /home | Present" + ansible.builtin.import_tasks: + file: warning_facts.yml + +# skips if mount is absent +- name: | + "1.1.2.3.2 | PATCH | Ensure nodev option set on /home partition + 1.1.2.3.3 | PATCH | Ensure nosuid option set on /home partition + when: + - "'/home' in mount_names" + - item.mount == "/home" + - ubtu24cis_rule_1_1_2_3_2 or + ubtu24cis_rule_1_1_2_3_3 + tags: + - level1-server + - level1-workstation + - patch + - mounts + - rule_1.1.2.3.2 + - rule_1.1.2.3.3 + - NIST800-53R5_AC-3 + - NIST800-53R5_MP-2 + notify: Set_reboot_required + ansible.posix.mount: + name: /home + src: "{{ item.device }}" + fstype: "{{ item.fstype }}" + state: present + opts: defaults,{% if ubtu24cis_rule_1_1_2_3_2 %}nodev,{% endif %}{% if ubtu24cis_rule_1_1_2_3_3 %}nosuid{% endif %} + loop: "{{ ansible_facts.mounts }}" + loop_control: + label: "{{ item.device }}" diff --git a/tasks/section_1/cis_1.1.2.4.x.yml b/tasks/section_1/cis_1.1.2.4.x.yml new file mode 100644 index 0000000..6805c9e --- /dev/null +++ b/tasks/section_1/cis_1.1.2.4.x.yml @@ -0,0 +1,55 @@ +--- + +- name: "1.1.2.4.1 | AUDIT | Ensure separate partition exists for /var" + when: + - "'/var' not in mount_names" + - ubtu24cis_rule_1_1_2_4_1 + tags: + - level2-server + - level2-workstation + - patch + - mounts + - rule_1.1.2.4.1 + - NIST800-53R5_CM-7 + vars: + warn_control_id: '1.1.2.4.1' + required_mount: '/var' + block: + - name: "1.1.2.4.1 | AUDIT | Ensure separate partition exists for /var | Warn if partition is absent" + ansible.builtin.debug: + msg: "Warning!! {{ required_mount }} doesn't exist. This is a manual task" + register: var_mount_absent + changed_when: var_mount_absent.skipped is undefined + + - name: "1.1.2.4.1 | AUDIT | Ensure separate partition exists for /var | Present" + ansible.builtin.import_tasks: + file: warning_facts.yml + +# skips if mount is absent +- name: | + "1.1.2.4.2 | PATCH | Ensure nodev option set on /var partition" + "1.1.2.4.3 | PATCH | Ensure nosuid option set on /var partition" + when: + - "'/var' in mount_names" + - item.mount == "/var" + - ubtu24cis_rule_1_1_2_4_2 or + ubtu24cis_rule_1_1_2_4_3 + tags: + - level1-server + - level1-workstation + - patch + - mounts + - rule_1.1.2.4.2 + - rule_1.1.2.4.3 + - NIST800-53R5_AC-3 + - NIST800-53R5_MP-2 + notify: Set_reboot_required + ansible.posix.mount: + name: /var + src: "{{ item.device }}" + fstype: "{{ item.fstype }}" + state: present + opts: defaults,{% if ubtu24cis_rule_1_1_2_4_2 %}nodev,{% endif %}{% if ubtu24cis_rule_1_1_2_4_3 %}nosuid{% endif %} + loop: "{{ ansible_facts.mounts }}" + loop_control: + label: "{{ item.device }}" diff --git a/tasks/section_1/cis_1.1.2.5.x.yml b/tasks/section_1/cis_1.1.2.5.x.yml new file mode 100644 index 0000000..803fa18 --- /dev/null +++ b/tasks/section_1/cis_1.1.2.5.x.yml @@ -0,0 +1,59 @@ +--- + +# Skips if mount is absent +- name: "1.1.2.5.1 | AUDIT | Ensure separate partition exists for /var/tmp" + when: + - ubtu24cis_rule_1_1_2_5_1 + - "'/var/tmp' not in mount_names" + tags: + - level2-server + - level2-workstation + - audit + - mounts + - rule_1.1.2.5.1 + - NIST800-53R5_CM-7 + vars: + warn_control_id: '1.1.2.5.1' + required_mount: '/var/tmp' + block: + - name: "1.1.2.5.1 | AUDIT | Ensure separate partition exists for /var/tmp | Warn if partition is absent" + ansible.builtin.debug: + msg: "Warning!! {{ required_mount }} doesn't exist. This is a manual task" + register: var_tmp_mount_absent + changed_when: var_tmp_mount_absent.skipped is undefined + + - name: "1.1.2.5.1 | AUDIT | Ensure separate partition exists for /var/tmp | Present" + ansible.builtin.import_tasks: + file: warning_facts.yml + +# skips if mount is absent +- name: | + "1.1.2.5.2 | PATCH | Ensure nodev option set on /var/tmp partition" + "1.1.2.5.3 | PATCH | Ensure nosuid option set on /var/tmp partition" + "1.1.2.5.4 | PATCH | Ensure noexec option set on /var/tmp partition" + when: + - "'/var/tmp' in mount_names" + - item.mount == "/var/tmp" + - ubtu24cis_rule_1_1_2_5_2 or + ubtu24cis_rule_1_1_2_5_3 or + ubtu24cis_rule_1_1_2_5_4 + tags: + - level1-server + - level1-workstation + - patch + - mounts + - rule_1.1.2.5.2 + - rule_1.1.2.5.3 + - rule_1.1.2.5.4 + - NIST800-53R5_AC-3 + - NIST800-53R5_MP-2 + notify: Set_reboot_required + ansible.posix.mount: + name: /var/tmp + src: "{{ item.device }}" + fstype: "{{ item.fstype }}" + state: present + opts: defaults,{% if ubtu24cis_rule_1_1_2_5_2 %}nodev,{% endif %}{% if ubtu24cis_rule_1_1_2_5_3 %}nosuid,{% endif %}{% if ubtu24cis_rule_1_1_2_5_4 %}noexec{% endif %} + loop: "{{ ansible_facts.mounts }}" + loop_control: + label: "{{ item.device }}" diff --git a/tasks/section_1/cis_1.1.2.6.x.yml b/tasks/section_1/cis_1.1.2.6.x.yml new file mode 100644 index 0000000..06c658e --- /dev/null +++ b/tasks/section_1/cis_1.1.2.6.x.yml @@ -0,0 +1,58 @@ +--- + +- name: "1.1.2.6.1 | AUDIT | Ensure separate partition exists for /var/log" + when: + - ubtu24cis_rule_1_1_2_6_1 + - "'/var/log' not in mount_names" + tags: + - level2-server + - level2-workstation + - audit + - mounts + - rule_1.1.2.6.1 + - NIST800-53R5_CM-7 + vars: + warn_control_id: '1.1.2.6.1' + required_mount: '/var/log' + block: + - name: "1.1.2.6.1 | AUDIT | Ensure separate partition exists for /var/log | Warn if partition is absent" + ansible.builtin.debug: + msg: "Warning!! {{ required_mount }} doesn't exist. This is a manual task" + register: var_log_mount_absent + changed_when: var_log_mount_absent.skipped is undefined + + - name: "1.1.2.6.1 | AUDIT | Ensure separate partition exists for /var/log | Present" + ansible.builtin.import_tasks: + file: warning_facts.yml + +# skips if mount is absent +- name: | + "1.1.2.6.2 | PATCH | Ensure nodev option set on /var/log partition" + "1.1.2.6.3 | PATCH | Ensure nosuid option set on /var/log partition" + "1.1.2.6.4 | PATCH | Ensure noexec option set on /var/log partition" + when: + - "'/var/log' in mount_names" + - item.mount == "/var/log" + - ubtu24cis_rule_1_1_2_6_2 or + ubtu24cis_rule_1_1_2_6_3 or + ubtu24cis_rule_1_1_2_6_4 + tags: + - level1-server + - level1-workstation + - patch + - mounts + - rule_1.1.2.6.2 + - rule_1.1.2.6.3 + - rule_1.1.2.6.4 + - NIST800-53R5_AC-3 + - NIST800-53R5_MP-2 + notify: Set_reboot_required + ansible.posix.mount: + name: /var/log + src: "{{ item.device }}" + fstype: "{{ item.fstype }}" + state: present + opts: defaults,{% if ubtu24cis_rule_1_1_2_6_2 %}nodev,{% endif %}{% if ubtu24cis_rule_1_1_2_6_3 %}nosuid,{% endif %}{% if ubtu24cis_rule_1_1_2_6_4 %}noexec{% endif %} + loop: "{{ ansible_facts.mounts }}" + loop_control: + label: "{{ item.device }}" diff --git a/tasks/section_1/cis_1.1.2.7.x.yml b/tasks/section_1/cis_1.1.2.7.x.yml new file mode 100644 index 0000000..c598eac --- /dev/null +++ b/tasks/section_1/cis_1.1.2.7.x.yml @@ -0,0 +1,58 @@ +--- + +- name: "1.1.2.7.1 | AUDIT | Ensure separate partition exists for /var/log/audit" + when: + - ubtu24cis_rule_1_1_2_7_1 + - "'/var/log/audit' not in mount_names" + tags: + - level2-server + - level2-workstation + - audit + - mounts + - rule_1.1.2.7.1 + - NIST800-53R5_CM-7 + vars: + warn_control_id: '1.1.2.7.1' + required_mount: '/var/log/audit' + block: + - name: "1.1.2.7.1 | AUDIT | Ensure separate partition exists for /var/log/audit | Warn if partition is absent" + ansible.builtin.debug: + msg: "Warning!! {{ required_mount }} doesn't exist. This is a manual task" + register: var_log_audit_mount_absent + changed_when: var_log_audit_mount_absent.skipped is undefined + + - name: "1.1.2.7.1 | AUDIT | Ensure separate partition exists for /var/log/audit | Present" + ansible.builtin.import_tasks: + file: warning_facts.yml + +# skips if mount is absent +- name: | + "1.1.2.7.2 | PATCH | Ensure nodev option set on /var/log/audit partition" + "1.1.2.7.3 | PATCH | Ensure nosuid option set on /var/log/audit partition" + "1.1.2.7.4 | PATCH | Ensure noexec option set on /var/log/audit partition" + when: + - "'/var/log/audit' in mount_names" + - item.mount == "/var/log/audit" + - ubtu24cis_rule_1_1_2_7_2 or + ubtu24cis_rule_1_1_2_7_3 or + ubtu24cis_rule_1_1_2_7_4 + tags: + - level1-server + - level1-workstation + - patch + - mounts + - rule_1.1.2.7.2 + - rule_1.1.2.7.3 + - rule_1.1.2.7.4 + - NIST800-53R5_AC-3 + - NIST800-53R5_MP-2 + notify: Set_reboot_required + ansible.posix.mount: + name: /var/log/audit + src: "{{ item.device }}" + fstype: "{{ item.fstype }}" + state: present + opts: defaults,{% if ubtu24cis_rule_1_1_2_7_2 %}nodev,{% endif %}{% if ubtu24cis_rule_1_1_2_7_3 %}nosuid,{% endif %}{% if ubtu24cis_rule_1_1_2_7_4 %}noexec{% endif %} + loop: "{{ ansible_facts.mounts }}" + loop_control: + label: "{{ item.device }}" diff --git a/tasks/section_1/cis_1.2.1.x.yml b/tasks/section_1/cis_1.2.1.x.yml new file mode 100644 index 0000000..6805f18 --- /dev/null +++ b/tasks/section_1/cis_1.2.1.x.yml @@ -0,0 +1,65 @@ +--- + +- name: "1.2.1.1 | AUDIT | Ensure GPG keys are configured" + when: + - ubtu24cis_rule_1_2_1_1 + tags: + - level1-server + - level1-workstation + - audit + - rule_1.2.1.1 + - NIST800-53R5_SI-2 + - gpg + - keys + vars: + warn_control_id: '1.2.1.1' + block: + - name: "1.2.1.1 | AUDIT | Ensure GPG keys are configured | Get apt gpg keys" + ansible.builtin.shell: apt-key list + changed_when: false + failed_when: false + check_mode: false + register: ubtu24cis_1_2_1_1_apt_gpgkeys + + - name: "1.2.1.1 | AUDIT | Ensure GPG keys are configured | Message out apt gpg keys" + ansible.builtin.debug: + msg: + - "Warning!! Below are the apt gpg keys configured" + - "Please review to make sure they are configured" + - "in accordance with site policy" + - "{{ ubtu24cis_1_2_1_1_apt_gpgkeys.stdout_lines }}" + + - name: "1.2.1.1 | WARN | Ensure GPG keys are configured | warn_count" + ansible.builtin.import_tasks: + file: warning_facts.yml + +- name: "1.2.1.2 | AUDIT | Ensure package manager repositories are configured" + when: + - ubtu24cis_rule_1_2_1_2 + tags: + - level1-server + - level1-workstation + - audit + - rule_1.2.1.2 + - NIST800-53R5_SI-2 + - apt + vars: + warn_control_id: '1.2.1.2' + block: + - name: "1.2.1.2 | AUDIT | Ensure package manager repositories are configured | Get repositories" + ansible.builtin.shell: apt-cache policy + changed_when: false + failed_when: false + check_mode: false + register: ubtu24cis_1_2_1_2_apt_policy + + - name: "1.2.1.2 | AUDIT | Ensure package manager repositories are configured | Message out repository configs" + ansible.builtin.debug: + msg: + - "Warning!! Below are the apt package repositories" + - "Please review to make sure they conform to your sites policies" + - "{{ ubtu24cis_1_2_1_2_apt_policy.stdout_lines }}" + + - name: "1.2.1.2 | WARN | Ensure package manager repositories are configured | warn_count" + ansible.builtin.import_tasks: + file: warning_facts.yml diff --git a/tasks/section_1/cis_1.2.2.x.yml b/tasks/section_1/cis_1.2.2.x.yml new file mode 100644 index 0000000..984534c --- /dev/null +++ b/tasks/section_1/cis_1.2.2.x.yml @@ -0,0 +1,15 @@ +--- + +- name: "1.2.2.1 | PATCH | Ensure updates, patches, and additional security software are installed" + when: + - ubtu24cis_rule_1_2_2_1 + tags: + - level1-server + - level1-workstation + - patch + - rule_1.2.2.1 + - NIST800-53R5_SI-2 + - patch + ansible.builtin.package: + name: "*" + state: latest diff --git a/tasks/section_1/cis_1.3.1.x.yml b/tasks/section_1/cis_1.3.1.x.yml new file mode 100644 index 0000000..578304d --- /dev/null +++ b/tasks/section_1/cis_1.3.1.x.yml @@ -0,0 +1,173 @@ +--- + +- name: "1.3.1.1 | PATCH | Ensure AppArmor is installed" + when: + - ubtu24cis_rule_1_3_1_1 + - "'apparmor' not in ansible_facts.packages or + 'apparmor-utils' not in ansible_facts.packages" + tags: + - level1-server + - level1-workstation + - patch + - rule_1.3.1.1 + - NIST800-53R5_AC-3 + - apparmor + ansible.builtin.package: + name: ['apparmor', 'apparmor-utils'] + state: present + +- name: "1.3.1.2 | PATCH | Ensure AppArmor is enabled in the bootloader configuration" + when: + - ubtu24cis_rule_1_3_1_2 + tags: + - level1-server + - level1-workstation + - patch + - rule_1.3.1.2 + - NIST800-53R5_AC-3 + - apparmor + block: + - name: "1.3.1.2 | AUDIT | Ensure AppArmor is enabled in the bootloader configuration | Get current settings" + ansible.builtin.shell: grep "GRUB_CMDLINE_LINUX=" /etc/default/grub | cut -f2 -d'"' + changed_when: false + failed_when: false + check_mode: false + register: ubtu24cis_1_3_1_2_cmdline_settings + + - name: "1.3.1.2 | PATCH | Ensure AppArmor is enabled in the bootloader configuration | Set apparmor settings if none exist" + when: ubtu24cis_1_3_1_2_cmdline_settings.stdout is not search('apparmor=') + ansible.builtin.lineinfile: + path: /etc/default/grub + regexp: ^(GRUB_CMDLINE_LINUX=")(|apparmor=\d\s)(.*\w+") + line: \1apparmor=1 \3 + backrefs: true + notify: Grub update + + - name: "1.3.1.2 | PATCH | Ensure AppArmor is enabled in the bootloader configuration | Set security settings if none exist" + when: ubtu24cis_1_3_1_2_cmdline_settings.stdout is not search('security=') + ansible.builtin.lineinfile: + path: /etc/default/grub + regexp: ^(GRUB_CMDLINE_LINUX=")(|security=\w+\s)(.*\w+") + line: \1security=apparmor \3 + backrefs: true + notify: Grub update + + - name: "1.3.1.2 | PATCH | Ensure AppArmor is enabled in the bootloader configuration | Set apparmor settings if none exist" + when: + - "'apparmor' not in ubtu24cis_1_3_1_2_cmdline_settings.stdout" + - "'security' not in ubtu24cis_1_3_1_2_cmdline_settings.stdout" + ansible.builtin.lineinfile: + path: /etc/default/grub + regexp: '^GRUB_CMDLINE_LINUX=' + line: 'GRUB_CMDLINE_LINUX="apparmor=1 security=apparmor {{ ubtu24cis_1_3_1_2_cmdline_settings.stdout }}"' + insertafter: '^GRUB_' + notify: Grub update + + - name: "1.3.1.2 | PATCH | Ensure AppArmor is enabled in the bootloader configuration | Replace apparmor settings when exists" + when: + - "'apparmor' in ubtu24cis_1_3_1_2_cmdline_settings.stdout or + 'security' in ubtu24cis_1_3_1_2_cmdline_settings.stdout" + ansible.builtin.replace: + path: /etc/default/grub + regexp: "{{ item.regexp }}" + replace: "{{ item.replace }}" + with_items: + - { regexp: 'apparmor=\w+', replace: 'apparmor=1' } + - { regexp: 'security=\w+', replace: 'security=apparmor' } + notify: Grub update + +# Controls 1.3.1.4 and 1.3.1.3 target the same setting and thus should not be run together. +# Because control 1.3.1.4 is stricter than 1.3.1.3, we need to change the order -- +# control 1.3.1.4 then registers the fact that is has run and thus keeps 1.3.1.3 from running. + +- name: "1.3.1.4 | PATCH | Ensure all AppArmor Profiles are enforcing" + when: + - ubtu24cis_rule_1_3_1_4 + - not ubtu24cis_apparmor_disable + tags: + - level2-server + - level2-workstation + - scored + - patch + - rule_1.3.1.4 + - NIST800-53R5_AC-3 + - apparmor + block: + - name: "1.3.1.4 | PATCH | Ensure all AppArmor Profiles are enforcing | Make sure that 1.3.1.3 is not run" + ansible.builtin.set_fact: + control_1_3_1_4_was_run: true + ubtu24cis_apparmor_enforce_only: true + changed_when: false + + - name: "1.3.1.4 | PATCH | Ensure all AppArmor Profiles are enforcing | Get pre apply enforce count" + ansible.builtin.shell: apparmor_status | grep "profiles are in enforce mode" | tr -d -c 0-9 + changed_when: false + failed_when: false + register: ubtu24cis_1_3_1_4_pre_count + + - name: "1.3.1.4 | PATCH | Ensure all AppArmor Profiles are enforcing | Apply enforcing to /etc/apparmor.d profiles" + ansible.builtin.shell: aa-enforce /etc/apparmor.d/* + changed_when: false + failed_when: false + + - name: "1.3.1.4 | PATCH | Ensure all AppArmor Profiles are enforcing | Get post apply enforce count" + ansible.builtin.shell: apparmor_status | grep "profiles are in enforce mode" | tr -d -c 0-9 + changed_when: false + failed_when: false + register: ubtu24cis_1_3_1_4_post_count + + - name: "1.3.1.4 | PATCH | Ensure all AppArmor Profiles are enforcing | This flags for idempotency" + when: ubtu24cis_1_3_1_4_pre_count.stdout != ubtu24cis_1_3_1_4_post_count.stdout + ansible.builtin.debug: + msg: Changed! The profiles in /etc/apparmor.d were set to enforcing + changed_when: true + +- name: "1.3.1.3 | PATCH | Ensure all AppArmor Profiles are in enforce or complain mode" + when: + - ubtu24cis_rule_1_3_1_3 + - not ubtu24cis_apparmor_disable + - not control_1_3_1_4_was_run + tags: + - level1-server + - level1-workstation + - patch + - rule_1.3.1.3 + - NIST800-53R5_AC-3 + - apparmor + block: + - name: "1.3.1.3 | AUDIT | Ensure all AppArmor Profiles are in enforce or complain | Set ubtu24cis_apparmor_enforce_only true for GOSS" + when: + - ubtu24cis_apparmor_mode == "enforce" + ansible.builtin.set_fact: + ubtu24cis_apparmor_enforce_only: true + changed_when: false + + - name: "1.3.1.3 | AUDIT | Ensure all AppArmor Profiles are in enforce or complain | Set ubtu24cis_apparmor_enforce_only false for GOSS" + when: + - ubtu24cis_apparmor_mode == "complain" + ansible.builtin.set_fact: + ubtu24cis_apparmor_enforce_only: false + changed_when: false + + - name: "1.3.1.3 | PATCH | Ensure all AppArmor Profiles are in enforce or complain mode | Get pre apply enforce count" + ansible.builtin.shell: apparmor_status | grep "profiles are in {{ubtu24cis_apparmor_mode}} mode" | tr -d -c 0-9 + changed_when: false + failed_when: false + register: ubtu24cis_1_3_1_3_pre_count + + - name: "1.3.1.3 | PATCH | Ensure all AppArmor Profiles are in enforce or complain mode | Apply complaining/enforcing to /etc/apparmor.d profiles" + ansible.builtin.shell: aa-{{ubtu24cis_apparmor_mode}} /etc/apparmor.d/* + changed_when: false + failed_when: false + + - name: "1.3.1.3 | PATCH | Ensure all AppArmor Profiles are in enforce or complain mode | Get post apply enforce count" + ansible.builtin.shell: apparmor_status | grep "profiles are in {{ubtu24cis_apparmor_mode}} mode" | tr -d -c 0-9 + changed_when: false + failed_when: false + register: ubtu24cis_1_3_1_3_post_count + + - name: "1.3.1.3 | PATCH | Ensure all AppArmor Profiles are in enforce or complain mode | This flags for idempotency" + when: ubtu24cis_1_3_1_3_pre_count.stdout != ubtu24cis_1_3_1_3_post_count.stdout + ansible.builtin.debug: + msg: Changed! The profiles in /etc/apparmor.d were set to {{ubtu24cis_apparmor_mode}} mode + changed_when: true diff --git a/tasks/section_1/cis_1.4.x.yml b/tasks/section_1/cis_1.4.x.yml new file mode 100644 index 0000000..92311a2 --- /dev/null +++ b/tasks/section_1/cis_1.4.x.yml @@ -0,0 +1,57 @@ +--- + +- name: "1.4.1 | PATCH | Ensure bootloader password is set" + when: + - ubtu24cis_set_boot_pass + - ubtu24cis_rule_1_4_1 + tags: + - level1-server + - level1-workstation + - patch + - rule_1.4.1 + - NIST800-53R5_AC-3 + - grub + block: + - name: "1.4.1 | PATCH | Ensure bootloader password is set" + ansible.builtin.template: + src: etc/grub.d/00_user.j2 + dest: "{{ ubtu24cis_grub_user_file }}" + owner: root + group: root + mode: '0755' + notify: Grub update + + - name: "1.4.1 | PATCH | Ensure bootloader password is set | allow unrestricted boot" + when: not ubtu24cis_ask_passwd_to_boot + ansible.builtin.lineinfile: + path: "/etc/grub.d/10_linux" + regexp: '(^CLASS="--class gnu-linux --class gnu --class os).*"$' + line: '\g<1> --unrestricted"' + backrefs: true + notify: Grub update + +- name: "1.4.2 | PATCH | Ensure access to bootloader config is configured" + when: + - ubtu24cis_rule_1_4_2 + tags: + - level1-server + - level1-workstation + - patch + - rule_1.4.2 + - NIST800-53R5_AC-3 + - grub + block: + - name: "1.4.2 | AUDIT | Ensure access to bootloader config is configured | Check for Grub file" + ansible.builtin.stat: + path: "{{ ubtu24cis_grub_file }}" + check_mode: false + register: ubtu24cis_1_4_2_grub_cfg_status + + - name: "1.4.2 | PATCH | Ensure access to bootloader config is configured | Set permissions" + when: + - ubtu24cis_1_4_2_grub_cfg_status.stat.exists + ansible.builtin.file: + path: "{{ ubtu24cis_grub_file }}" + owner: root + group: root + mode: 'u-x,go-rwx' diff --git a/tasks/section_1/cis_1.5.x.yml b/tasks/section_1/cis_1.5.x.yml new file mode 100644 index 0000000..9d1a965 --- /dev/null +++ b/tasks/section_1/cis_1.5.x.yml @@ -0,0 +1,147 @@ +--- + +- name: "1.5.1 | PATCH | Ensure address space layout randomization (ASLR) is enabled | Set active kernel parameter" + when: + - ubtu24cis_rule_1_5_1 + tags: + - level1-server + - level1-workstation + - patch + - rule_1.5.1 + - NIST800-53R5_CM-6 + - aslr + ansible.posix.sysctl: + name: kernel.randomize_va_space + value: '2' + state: present + sysctl_file: "{{ ubtu24cis_sysctl_kernel_conf }}" + reload: true + sysctl_set: true + ignoreerrors: true + +- name: "1.5.2 | PATCH | Ensure ptrace_scope is restricted" + when: + - ubtu24cis_rule_1_5_2 + tags: + - level1-server + - level1-workstation + - patch + - rule_1.5.2 + - NIST800-53R5_CM-6 + - ptrace + ansible.posix.sysctl: + name: kernel.yama.ptrace_scope + value: "{{ ubtu24_ptrace_value }}" + state: present + sysctl_file: "{{ ubtu24cis_sysctl_kernel_conf }}" + reload: true + sysctl_set: true + ignoreerrors: true + +- name: "1.5.3 | PATCH | Ensure core dumps are restricted" + when: + - ubtu24cis_rule_1_5_3 + tags: + - level1-server + - level1-workstation + - patch + - rule_1.5.3 + - NIST800-53R5_CM-6 + - coredump + block: + - name: "1.5.3 | PATCH | Ensure core dumps are restricted | kernel sysctl" + ansible.posix.sysctl: + name: fs.suid_dumpable + value: '0' + state: present + sysctl_file: "{{ ubtu24cis_sysctl_kernel_conf }}" + reload: true + sysctl_set: true + ignoreerrors: true + + - name: "1.5.3 | PATCH | Ensure core dumps are restricted | security limits" + ansible.builtin.lineinfile: + path: /etc/security/limits.d/99_zero_core.conf + regexp: '^\* hard core' + line: '* hard core 0' + create: true + owner: root + group: root + mode: '0644' + + - name: "1.5.3 | PATCH | Ensure core dumps are restricted | sysctl.conf" + ansible.builtin.lineinfile: + path: /etc/sysctl.conf + regexp: '^fs.suid_dumpable' + line: fs.suid_dumpable=0 + owner: root + group: root + mode: '0644' + notify: Reload systemctl + + - name: "1.5.3 | PATCH | Ensure core dumps are restricted | coredump.conf" + ansible.builtin.lineinfile: + path: /etc/systemd/coredump.conf + regexp: "{{ item.regexp }}" + line: "{{ item.line }}" + create: true + owner: root + group: root + mode: '0644' + loop: + - { regexp: '^Storage', line: 'Storage=none' } + - { regexp: '^ProcessSizeMax', line: 'ProcessSizeMax=0' } + +- name: "1.5.4 | PATCH | Ensure prelink is not installed" + when: + - ubtu24cis_rule_1_5_4 + - "'prelink' in ansible_facts.packages" + tags: + - level1-server + - level1-workstation + - patch + - rule_1.5.4 + - NIST800-53R5_CM-1 + - NIST800-53R5_CM-3 + - NIST800-53R5_CM-6 + - prelink + block: + - name: "1.5.4 | PATCH | Ensure prelink is not installed | Restore binaries to normal" + ansible.builtin.shell: prelink -ua + changed_when: false + failed_when: false + + - name: "1.5.4 | PATCH | Ensure prelink is not installed| Remove prelink package" + ansible.builtin.package: + name: prelink + state: absent + purge: "{{ ubtu24cis_purge_apt }}" + +- name: "1.5.5 | PATCH | Ensure Automatic Error Reporting is not enabled" + when: + - ubtu24cis_rule_1_5_5 + tags: + - level1-server + - level1-workstation + - patch + - rule_1.5.5 + - NIST800-53R5_NA + - apport + block: + - name: "1.5.5 | PATCH | Ensure Automatic Error Reporting is not enabled | disable" + ansible.builtin.lineinfile: + path: /etc/default/apport + regexp: ^enabled + line: enabled=0 + create: true + owner: root + group: root + mode: '0644' + + - name: "1.5.5 | PATCH | Ensure Automatic Error Reporting is not enabled | remove package" + when: + - "'apport' in ansible_facts.packages" + ansible.builtin.package: + name: apport + state: absent + purge: "{{ ubtu24cis_purge_apt }}" diff --git a/tasks/section_1/cis_1.6.x.yml b/tasks/section_1/cis_1.6.x.yml new file mode 100644 index 0000000..e0be92e --- /dev/null +++ b/tasks/section_1/cis_1.6.x.yml @@ -0,0 +1,129 @@ +--- + +- name: "1.6.1 | PATCH | Ensure message of the day is configured properly" + when: + - ubtu24cis_rule_1_6_1 + tags: + - level1-server + - level1-workstation + - patch + - rule_1.6.1 + - NIST800-53R5_CM-1 + - NIST800-53R5_CM-3 + - NIST800-53R5_CM-6 + - motd + block: + - name: "1.6.1 | PATCH | Ensure message of the day is configured properly | motd" + ansible.builtin.template: + src: etc/motd.j2 + dest: /etc/motd + + - name: "1.6.1 | PATCH | Ensure message of the day is configured properly | disable dynamic_motd" + when: ubtu24cis_disable_dynamic_motd + ansible.builtin.lineinfile: + path: /etc/pam.d/sshd + regexp: "{{ item.regexp }}" + line: "{{ item.line }}" + backrefs: true + loop: + - { regexp: '(session\s+optional\s+pam_motd.so\s+motd=/run/motd.dynamic)', line: '# \1' } + - { regexp: '(session\s+optional\s+pam_motd.so noupdate)', line: '# \1' } + - { regexp: '# Pam_motd.so disabled for CIS benchmark', line: '# Pam_motd.so disabled for CIS benchmark' } + +- name: "1.6.2 | PATCH | Ensure local login warning banner is configured properly" + when: + - ubtu24cis_rule_1_6_2 + tags: + - level1-server + - level1-workstation + - patch + - rule_1.6.2 + - NIST800-53R5_CM-1 + - NIST800-53R5_CM-3 + - NIST800-53R5_CM-6 + - banner + block: + - name: "1.6.2 | PATCH | Ensure local login warning banner is configured properly | issue" + ansible.builtin.template: + src: etc/issue.j2 + dest: /etc/issue + + - name: "1.6.2 | PATCH | Ensure local login warning banner is kept on package upgrade | issue" + community.general.dpkg_divert: + path: /etc/issue + +- name: "1.6.3 | PATCH | Ensure remote login warning banner is configured properly" + when: + - ubtu24cis_rule_1_6_3 + tags: + - level1-server + - level1-workstation + - patch + - rule_1.6.3 + - NIST800-53R5_CM-1 + - NIST800-53R5_CM-3 + - NIST800-53R5_CM-6 + - banner + block: + - name: "1.6.3 | PATCH | Ensure remote login warning banner is configured properly | issue.net" + ansible.builtin.template: + src: etc/issue.net.j2 + dest: /etc/issue.net + + - name: "1.6.3 | PATCH | Ensure remote login warning banner is kept on package upgrade | issue.net" + community.general.dpkg_divert: + path: /etc/issue.net + +- name: "1.6.4 | PATCH | Ensure permissions on /etc/motd are configured" + when: + - ubtu24cis_rule_1_6_4 + tags: + - level1-server + - level1-workstation + - patch + - rule_1.6.4 + - NIST800-53R5_AC-3 + - NIST800-53R5_MP-2 + - permissions + - motd + ansible.builtin.file: + path: /etc/motd + owner: root + group: root + mode: 'u-x,go-wx' + +- name: "1.6.5 | PATCH | Ensure permissions on /etc/issue are configured" + when: + - ubtu24cis_rule_1_6_5 + tags: + - level1-server + - level1-workstation + - patch + - rule_1.6.5 + - NIST800-53R5_AC-3 + - NIST800-53R5_MP-2 + - permissions + - banner + ansible.builtin.file: + path: /etc/issue + owner: root + group: root + mode: 'u-x,go-wx' + +- name: "1.6.6 | PATCH | Ensure permissions on /etc/issue.net are configured" + when: + - ubtu24cis_rule_1_6_6 + tags: + - level1-server + - level1-workstation + - patch + - rule_1.6.6 + - NIST800-53R5_AC-3 + - NIST800-53R5_MP-2 + - permissions + - banner + ansible.builtin.file: + path: /etc/issue.net + owner: root + group: root + mode: 'u-x,go-wx' diff --git a/tasks/section_1/cis_1.7.x.yml b/tasks/section_1/cis_1.7.x.yml new file mode 100644 index 0000000..f876ab7 --- /dev/null +++ b/tasks/section_1/cis_1.7.x.yml @@ -0,0 +1,337 @@ +--- + +- name: "1.7.1 | PATCH | Ensure GNOME Display Manager is removed" + ansible.builtin.package: + name: gdm3 + state: absent + when: + - ubtu24cis_rule_1_7_1 + - not ubtu24cis_desktop_required + - ubtu24cis_disruption_high + - "'gdm3' in ansible_facts.packages" + tags: + - level2-server + - patch + - rule_1.7.1 + - NIST800-53R5_CM-11 + - gnome + +- name: "1.7.2 | PATCH | Ensure GDM login banner is configured" + when: + - ubtu24cis_rule_1_7_2 + - ubtu24cis_desktop_required + tags: + - level1-server + - level1-workstation + - patch + - rule_1.7.2 + - NIST800-53R5_CM-1 + - NIST800-53R5_CM-2 + - NIST800-53R5_CM-6 + - NIST800-53R5_CM-7 + - NIST800-53R5_IA-5 + - gnome + block: + - name: "1.7.2 | PATCH | Ensure GDM login banner is configured | make directory" + ansible.builtin.file: + path: "/etc/dconf/db/{{ ubtu24cis_dconf_db_name }}.d" + owner: root + group: root + mode: '0755' + state: directory + + - name: "1.7.2 | PATCH | Ensure GDM login banner is configured | banner settings" + ansible.builtin.lineinfile: + path: "/etc/dconf/db/{{ ubtu24cis_dconf_db_name }}.d/00-login-screen" + regexp: "{{ item.regexp }}" + line: "{{ item.line }}" + insertafter: "{{ item.insertafter }}" + create: true + owner: root + group: root + mode: '0644' + loop: + - { regexp: '\[org\/gnome\/login-screen\]', line: '[org/gnome/login-screen]', insertafter: EOF } + - { regexp: 'banner-message-enable', line: 'banner-message-enable=true', insertafter: '\[org\/gnome\/login-screen\]'} + - { regexp: 'banner-message-text', line: "banner-message-text='{{ ubtu24cis_warning_banner | regex_replace('\n', ' ') | trim }}'", insertafter: 'banner-message-enable' } + notify: Update dconf + +- name: "1.7.3 | PATCH | Ensure disable-user-list option is enabled" + when: + - ubtu24cis_rule_1_7_3 + - ubtu24cis_desktop_required + tags: + - level1-server + - level1-workstation + - patch + - rule_1.7.3 + - NIST800-53R5_CM-1 + - NIST800-53R5_CM-2 + - NIST800-53R5_CM-6 + - NIST800-53R5_CM-7 + - NIST800-53R5_IA-5 + - gnome + block: + - name: "1.7.3 | PATCH | Ensure disable-user-list option is enabled | make directories" + ansible.builtin.file: + path: "{{ item }}" + owner: root + group: root + mode: '0755' + state: directory + loop: + - /etc/dconf/db/{{ ubtu24cis_dconf_db_name }}.d + - /etc/dconf/profile + + - name: "1.7.3 | PATCH | Ensure disable-user-list option is enabled | disable-user-list setting login-screen" + ansible.builtin.lineinfile: + path: "/etc/dconf/db/{{ ubtu24cis_dconf_db_name }}.d/00-login-screen" + regexp: "{{ item.regexp }}" + line: "{{ item.line }}" + insertafter: "{{ item.insertafter }}" + create: true + owner: root + group: root + mode: '0644' + loop: + - { regexp: '\[org\/gnome\/login-screen\]', line: '[org/gnome/login-screen]', insertafter: EOF } + - { regexp: 'disable-user-list', line: 'disable-user-list=true', insertafter: '\[org\/gnome\/login-screen\]'} + + - name: "1.7.3 | PATCH | Ensure disable-user-list option is enabled | disable-user-list setting profile" + ansible.builtin.lineinfile: + path: "/etc/dconf/profile/{{ ubtu24cis_dconf_db_name }}" + regexp: "{{ item.regexp }}" + line: "{{ item.line }}" + insertafter: "{{ item.insertafter }}" + create: true + owner: root + group: root + mode: '0644' + loop: + - { regexp: '^user-db:user', line: 'user-db:user', insertafter: EOF } + - { regexp: '^system-db:{{ ubtu24cis_dconf_db_name }}', line: 'system-db:{{ ubtu24cis_dconf_db_name }}', insertafter: 'user-db:user'} + - { regexp: '^file-db:/usr/share/gdm/greeter-dconf-defaults', line: 'file-db:/usr/share/gdm/greeter-dconf-defaults', insertafter: 'system-db:{{ ubtu24cis_dconf_db_name }}'} + notify: Update dconf + +- name: "1.7.4 | PATCH | Ensure GDM screen locks when the user is idle" + when: + - ubtu24cis_rule_1_7_4 + - ubtu24cis_desktop_required + tags: + - level1-server + - level1-workstation + - patch + - rule_1.7.4 + - NIST800-53R5_NA + - gnome + block: + - name: "1.7.4 | PATCH | Ensure GDM screen locks when the user is idle | session profile" + ansible.builtin.lineinfile: + path: "/etc/dconf/profile/{{ ubtu24cis_dconf_db_name }}" + regexp: "{{ item.regexp }}" + line: "{{ item.line }}" + insertafter: "{{ item.after | default(omit) }}" + create: true + loop: + - { regexp: 'user-db:user', line: 'user-db:user' } + - { regexp: 'system-db:{{ ubtu24cis_dconf_db_name }}', line: 'system-db:{{ ubtu24cis_dconf_db_name }}', after: '^user-db.*' } + + - name: "1.7.4 | PATCH | Ensure GDM screen locks when the user is idle | make directory" + ansible.builtin.file: + path: "/etc/dconf/db/{{ ubtu24cis_dconf_db_name }}.d" + owner: root + group: root + mode: '0755' + state: directory + notify: Update dconf + + - name: "1.7.4 | PATCH | Ensure GDM screen locks when the user is idle | session script" + ansible.builtin.template: + src: etc/dconf/db/00-screensaver.j2 + dest: "/etc/dconf/db/{{ ubtu24cis_dconf_db_name }}.d/00-screensaver" + owner: root + group: root + mode: '0644' + notify: Update dconf + +- name: "1.7.5 | PATCH | Ensure GDM screen locks cannot be overridden" + when: + - ubtu24cis_rule_1_7_5 + - ubtu24cis_desktop_required + tags: + - level1-server + - level1-workstation + - patch + - rule_1.7.5 + - NIST800-53R5_CM-11 + - gnome + block: + - name: "1.7.5 | PATCH | Ensure GDM screen locks cannot be overridden | make lock directory" + ansible.builtin.file: + path: "/etc/dconf/db/{{ ubtu24cis_dconf_db_name }}.d/locks" + owner: root + group: root + mode: '0755' + state: directory + notify: Update dconf + + - name: "1.7.5 | PATCH | Ensure GDM screen locks cannot be overridden | make lockfile" + ansible.builtin.template: + src: etc/dconf/db/00-screensaver_lock.j2 + dest: "/etc/dconf/db/{{ ubtu24cis_dconf_db_name }}.d/locks/00-screensaver" + owner: root + group: root + mode: '0644' + notify: Update dconf + +- name: "1.7.6 | PATCH | Ensure GDM automatic mounting of removable media is disabled" + when: + - ubtu24cis_rule_1_7_6 + - ubtu24cis_desktop_required + tags: + - level1-server + - level2-workstation + - patch + - rule_1.7.6 + - NIST800-53R5_CM-1 + - NIST800-53R5_CM-2 + - NIST800-53R5_CM-6 + - NIST800-53R5_CM-7 + - NIST800-53R5_IA-5 + - gnome + block: + - name: "1.7.6 | PATCH | Ensure GDM automatic mounting of removable media is disabled | make directory" + ansible.builtin.file: + path: "/etc/dconf/db/{{ ubtu24cis_dconf_db_name }}.d" + owner: root + group: root + mode: '0755' + state: directory + notify: Update dconf + + - name: "1.7.6 | PATCH | Ensure GDM automatic mounting of removable media is disabled | session script" + ansible.builtin.template: + src: etc/dconf/db/00-media-automount.j2 + dest: "/etc/dconf/db/{{ ubtu24cis_dconf_db_name }}.d/00-media-automount" + owner: root + group: root + mode: '0644' + notify: Update dconf + +- name: "1.7.7 | PATCH | Ensure GDM disabling automatic mounting of removable media is not overridden" + when: + - ubtu24cis_rule_1_7_7 + - ubtu24cis_desktop_required + tags: + - level1-server + - level2-workstation + - patch + - rule_1.7.7 + - NIST800-53R5_CM-1 + - NIST800-53R5_CM-2 + - NIST800-53R5_CM-6 + - NIST800-53R5_CM-7 + - NIST800-53R5_IA-5 + - gnome + block: + - name: "1.7.7 | PATCH | Ensure GDM disabling automatic mounting of removable media is not overridden | make lock directory" + ansible.builtin.file: + path: "/etc/dconf/db/{{ ubtu24cis_dconf_db_name }}.d/locks" + owner: root + group: root + mode: '0755' + state: directory + notify: Update dconf + + - name: "1.7.7 | PATCH | Ensure GDM disabling automatic mounting of removable media is not overridden | make lockfile" + ansible.builtin.template: + src: etc/dconf/db/00-automount_lock.j2 + dest: "/etc/dconf/db/{{ ubtu24cis_dconf_db_name }}.d/locks/00-automount_lock" + owner: root + group: root + mode: '0644' + notify: Update dconf + +- name: "1.7.8 | PATCH | Ensure GDM autorun-never is enabled" + when: + - ubtu24cis_rule_1_7_8 + - ubtu24cis_desktop_required + tags: + - level1-server + - level2-workstation + - patch + - rule_1.7.8 + - NIST800-53R5_CM-1 + - NIST800-53R5_CM-2 + - NIST800-53R5_CM-6 + - NIST800-53R5_CM-7 + - NIST800-53R5_IA-5 + - gnome + block: + - name: "1.7.8 | PATCH | Ensure GDM autorun-never is enabled | make directory" + ansible.builtin.file: + path: "/etc/dconf/db/{{ ubtu24cis_dconf_db_name }}.d" + owner: root + group: root + mode: '0755' + state: directory + notify: Update dconf + + - name: "1.7.8 | PATCH | Ensure GDM autorun-never is enabled | session script" + ansible.builtin.template: + src: etc/dconf/db/00-media-autorun.j2 + dest: "/etc/dconf/db/{{ ubtu24cis_dconf_db_name }}.d/00-media-autorun" + owner: root + group: root + mode: '0644' + notify: Update dconf + +- name: "1.7.9 | PATCH | Ensure GDM autorun-never is not overridden" + when: + - ubtu24cis_rule_1_7_9 + - ubtu24cis_desktop_required + tags: + - level1-server + - level2-workstation + - patch + - rule_1.7.9 + - NIST800-53R5_CM-1 + - NIST800-53R5_CM-2 + - NIST800-53R5_CM-6 + - NIST800-53R5_CM-7 + - NIST800-53R5_IA-5 + - gnome + block: + - name: "1.7.9 | PATCH | Ensure GDM autorun-never is not overridden | make lock directory" + ansible.builtin.file: + path: "/etc/dconf/db/{{ ubtu24cis_dconf_db_name }}.d/locks" + owner: root + group: root + mode: '0755' + state: directory + notify: Update dconf + + - name: "1.7.9 | PATCH | Ensure GDM autorun-never is not overridden | make lockfile" + ansible.builtin.template: + src: etc/dconf/db/00-autorun_lock.j2 + dest: "/etc/dconf/db/{{ ubtu24cis_dconf_db_name }}.d/locks/00-autorun_lock" + owner: root + group: root + mode: '0644' + notify: Update dconf + +- name: "1.7.10 | PATCH | Ensure XDCMP is not enabled" + when: + - ubtu24cis_rule_1_7_10 + tags: + - level1-server + - level1-workstation + - patch + - rule_1.7.10 + - NIST800-53R5_SI-4 + - gnome + - xdcmp + ansible.builtin.lineinfile: + path: /etc/gdm3/custom.conf + regexp: '^Enable.*=.*true' + state: absent diff --git a/tasks/section_1/main.yml b/tasks/section_1/main.yml new file mode 100644 index 0000000..137b959 --- /dev/null +++ b/tasks/section_1/main.yml @@ -0,0 +1,75 @@ +--- + +- name: "SECTION | 1.1.1 | Configure Filesystem Kernel Modules" + ansible.builtin.import_tasks: + file: cis_1.1.1.x.yml + when: not system_is_container + +- name: "SECTION | 1.1.2.1 | Configure /tmp" + ansible.builtin.import_tasks: + file: cis_1.1.2.1.x.yml + when: not system_is_container + +- name: "SECTION | 1.1.2.2 | Configure /dev/shm" + ansible.builtin.import_tasks: + file: cis_1.1.2.2.x.yml + when: not system_is_container + +- name: "SECTION | 1.1.2.3 | Configure /home" + ansible.builtin.import_tasks: + file: cis_1.1.2.3.x.yml + +- name: "SECTION | 1.1.2.4 | Configure /var" + ansible.builtin.import_tasks: + file: cis_1.1.2.4.x.yml + when: not system_is_container + +- name: "SECTION | 1.1.2.5 | Configure /var/tmp" + ansible.builtin.import_tasks: + file: cis_1.1.2.5.x.yml + when: not system_is_container + +- name: "SECTION | 1.1.2.6 | Configure /var/log" + ansible.builtin.import_tasks: + file: cis_1.1.2.6.x.yml + when: not system_is_container + +- name: "SECTION | 1.1.2.7 | Configure /var/log/audit" + ansible.builtin.import_tasks: + file: cis_1.1.2.7.x.yml + when: not system_is_container + +- name: "SECTION | 1.2.1 | Configure Package Repositories" + ansible.builtin.import_tasks: + file: cis_1.2.1.x.yml + when: not system_is_container + +- name: "SECTION | 1.2.2 | Configure Package Updates" + ansible.builtin.import_tasks: + file: cis_1.2.2.x.yml + when: not system_is_container + +- name: "SECTION | 1.3 | Configure AppArmor" + ansible.builtin.import_tasks: + file: cis_1.3.1.x.yml + when: not system_is_container + +- name: "SECTION | 1.4 | Configure Bootloader" + ansible.builtin.import_tasks: + file: cis_1.4.x.yml + +- name: "SECTION | 1.5 | Configure Additional Process Hardening" + ansible.builtin.import_tasks: + file: cis_1.5.x.yml + when: not system_is_container + +- name: "SECTION | 1.6 | Command Line Warning Banners" + ansible.builtin.import_tasks: + file: cis_1.6.x.yml + +- name: "SECTION | 1.7 | Configure DNOME Display Manager" + when: + - "'gdm3' in ansible_facts.packages" + - not system_is_container + ansible.builtin.import_tasks: + file: cis_1.7.x.yml diff --git a/tasks/section_2/cis_2.1.x.yml b/tasks/section_2/cis_2.1.x.yml new file mode 100644 index 0000000..b6a710f --- /dev/null +++ b/tasks/section_2/cis_2.1.x.yml @@ -0,0 +1,768 @@ +--- + +- name: "2.1.1 | PATCH | Ensure autofs services are not in use" + when: + - ubtu24cis_rule_2_1_1 + - "'autofs' in ansible_facts.packages" + tags: + - level1-server + - level2-workstation + - patch + - rule_2.1.1 + - NIST800-53R5_SI-3 + - NIST800-53R5_MP-7 + block: + - name: "2.1.1 | PATCH | Ensure autofs services are not in use | Remove Package" + when: + - not ubtu24cis_autofs_services + - not ubtu24cis_autofs_mask + ansible.builtin.package: + name: autofs + state: absent + purge: "{{ ubtu24cis_purge_apt }}" + + - name: "2.1.1 | PATCH | Ensure autofs services are not in use | Mask service" + when: + - not ubtu24cis_autofs_services + - ubtu24cis_autofs_mask + notify: Systemd_daemon_reload + ansible.builtin.systemd: + name: autofs + enabled: false + state: stopped + masked: true + +- name: "2.1.2 | PATCH | Ensure avahi daemon services are not in use" + when: + - ubtu24cis_rule_2_1_2 + tags: + - level1-server + - level2-workstation + - patch + - avahi + - rule_2.1.2 + - NIST800-53R5_SI-4 + block: + - name: "2.1.2 | PATCH | Ensure avahi daemon services are not in use | Remove package" + when: + - not ubtu24cis_avahi_server + - not ubtu24cis_avahi_mask + - "'avahi' in ansible_facts.packages or 'avahi-autopd' in ansible_facts.packages" + ansible.builtin.package: + name: + - avahi-autoipd + - avahi + state: absent + purge: "{{ ubtu24cis_purge_apt }}" + + - name: "2.1.2 | PATCH | Ensure avahi daemon services are not in use | Mask service" + when: + - not ubtu24cis_avahi_server + - ubtu24cis_avahi_mask + notify: Systemd_daemon_reload + ansible.builtin.systemd: + name: "{{ item }}" + enabled: false + state: stopped + masked: true + loop: + - avahi-daemon.socket + - avahi-daemon.service + +- name: "2.1.3 | PATCH | Ensure dhcp server services are not in use" + when: + - ubtu24cis_rule_2_1_3 + tags: + - level1-server + - level1-workstation + - patch + - dhcp + - rule_2.1.3 + - NIST800-53R5_CM-7 + block: + - name: "2.1.3 | PATCH | Ensure dhcp server services are not in use | Remove package" + when: + - not ubtu24cis_dhcp_server + - not ubtu24cis_dhcp_mask + - "'isc-dhcp-server' in ansible_facts.packages" + ansible.builtin.package: + name: isc-dhcp-server + state: absent + purge: "{{ ubtu24cis_purge_apt }}" + + - name: "2.1.3 | PATCH | Ensure dhcp server services are not in use | Mask service" + when: + - not ubtu24cis_dhcp_server + - ubtu24cis_dhcp_mask + notify: Systemd_daemon_reload + ansible.builtin.systemd: + name: "{{ item }}" + enabled: false + state: stopped + masked: true + loop: + - isc-dhcp-server.service + - isc-dhcp-server6.service + +- name: "2.1.4 | PATCH | Ensure dns server services are not in use" + when: + - ubtu24cis_rule_2_1_4 + tags: + - level1-server + - level1-workstation + - patch + - dns + - rule_2.1.4 + - NIST800-53R5_CM-7 + block: + - name: "2.1.4 | PATCH | Ensure dns server services are not in use | Remove package" + when: + - "'bind9' in ansible_facts.packages" + - not ubtu24cis_dns_server + - not ubtu24cis_dns_mask + ansible.builtin.package: + name: bind9 + state: absent + purge: "{{ ubtu24cis_purge_apt }}" + + - name: "2.1.4 | PATCH | Ensure dns server services are not in use | Mask service" + when: + - not ubtu24cis_dns_server + - ubtu24cis_dns_mask + notify: Systemd_daemon_reload + ansible.builtin.systemd: + name: named.service + enabled: false + state: stopped + masked: true + +- name: "2.1.5 | PATCH | Ensure dnsmasq server services are not in use" + when: + - ubtu24cis_rule_2_1_5 + tags: + - level1-server + - level1-workstation + - patch + - dns + - rule_2.1.5 + - NIST800-53R5_CM-7 + block: + - name: "2.1.5 | PATCH | Ensure dnsmasq server services are not in use | Remove package" + when: + - "'dnsmasq' in ansible_facts.packages" + - not ubtu24cis_dnsmasq_server + - not ubtu24cis_dnsmasq_mask + ansible.builtin.package: + name: dnsmasq + state: absent + purge: "{{ ubtu24cis_purge_apt }}" + + - name: "2.1.5 | PATCH | Ensure dnsmasq server services are not in use | Mask service" + when: + - not ubtu24cis_dnsmasq_server + - ubtu24cis_dnsmasq_mask + notify: Systemd_daemon_reload + ansible.builtin.systemd: + name: dnsmasq.service + enabled: false + state: stopped + masked: true + +- name: "2.1.6 | PATCH | Ensure ftp server services are not in use" + when: + - ubtu24cis_rule_2_1_6 + tags: + - level1-server + - level1-workstation + - automation + - patch + - ftp + - rule_2.1.6 + - NIST800-53R5_CM-7 + block: + - name: "2.1.6 | PATCH | Ensure ftp server services are not in use | Remove package" + when: + - "'vsftp' in ansible_facts.packages" + - not ubtu24cis_ftp_server + - not ubtu24cis_ftp_mask + ansible.builtin.package: + name: vsftpd + state: absent + purge: "{{ ubtu24cis_purge_apt }}" + + - name: "2.1.6 | PATCH | Ensure ftp server services are not in use | Mask service" + when: + - not ubtu24cis_ftp_server + - ubtu24cis_ftp_mask + notify: Systemd_daemon_reload + ansible.builtin.systemd: + name: vsftpd.service + enabled: false + state: stopped + masked: true + +- name: "2.1.7 | PATCH | Ensure ldap server services are not in use" + when: + - ubtu24cis_rule_2_1_7 + tags: + - level1-server + - level1-workstation + - patch + - ldap + - rule_2.1.7 + - NIST800-53R5_CM-7 + block: + - name: "2.1.7 | PATCH | Ensure ldap server services are not in use | Remove package" + when: + - "'slapd' in ansible_facts.packages" + - not ubtu24cis_ldap_server + - not ubtu24cis_ldap_mask + ansible.builtin.package: + name: slapd + state: absent + purge: "{{ ubtu24cis_purge_apt }}" + + - name: "2.1.7 | PATCH | Ensure ldap server services are not in use | Mask service" + when: + - not ubtu24cis_ldap_server + - ubtu24cis_ldap_mask + notify: Systemd_daemon_reload + ansible.builtin.systemd: + name: slapd.service + enabled: false + state: stopped + masked: true + +- name: "2.1.8 | PATCH | Ensure message access server services are not in use" + when: + - ubtu24cis_rule_2_1_8 + tags: + - level1-server + - level1-workstation + - patch + - dovecot + - imap + - pop3 + - rule_2.1.8 + - NIST800-53R5_CM-7 + block: + - name: "2.1.8 | PATCH | Ensure message access server services are not in use | Remove package" + when: + - "'dovecot-pop3d' in ansible_facts.packages or 'dovecot-imapd' in ansible_facts.packages" + - not ubtu24cis_message_server + - not ubtu24cis_message_mask + ansible.builtin.package: + name: + - dovecot-pop3d + - dovecot-imapd + state: absent + purge: "{{ ubtu24cis_purge_apt }}" + + - name: "2.1.8 | PATCH | Ensure message access server services are not in use | Mask service" + when: + - not ubtu24cis_message_server + - ubtu24cis_message_mask + notify: Systemd_daemon_reload + ansible.builtin.systemd: + name: "{{ item }}" + enabled: false + state: stopped + masked: true + loop: + - "dovecot.socket" + - "dovecot.service" + +- name: "2.1.9 | PATCH | Ensure network file system services are not in use" + when: + - ubtu24cis_rule_2_1_9 + tags: + - level1-server + - level1-workstation + - patch + - nfs + - services + - rule_2.1.9 + - NIST800-53R5_CM-6 + - NIST800-53R5_CM-7 + block: + - name: "2.1.9 | PATCH | Ensure network file system services are not in use | Remove package" + when: + - "'nfs-kernel-server' in ansible_facts.packages" + - not ubtu24cis_nfs_server + - not ubtu24cis_nfs_mask + ansible.builtin.package: + name: nfs-kernel-server + state: absent + purge: "{{ ubtu24cis_purge_apt }}" + + - name: "2.1.9 | PATCH | Ensure network file system services are not in use | Mask service" + when: + - not ubtu24cis_nfs_server + - ubtu24cis_nfs_mask + notify: Systemd_daemon_reload + ansible.builtin.systemd: + name: nfs-server.service + enabled: false + state: stopped + masked: true + +- name: "2.1.10 | PATCH | Ensure nis server services are not in use" + when: + - ubtu24cis_rule_2_1_10 + tags: + - level1-server + - level1-workstation + - patch + - nis + - rule_2.1.10 + - NIST800-53R5_CM-7 + notify: Systemd_daemon_reload + block: + - name: "2.1.10 | PATCH | Ensure nis server services are not in use | Remove package" + when: + - "'ypserv' in ansible_facts.packages" + - not ubtu24cis_nis_server + - not ubtu24cis_nis_mask + ansible.builtin.package: + name: ypserv + state: absent + purge: "{{ ubtu24cis_purge_apt }}" + + - name: "2.1.10 | PATCH | Ensure nis server services are not in use | Mask service" + when: + - not ubtu24cis_nis_server + - ubtu24cis_nis_mask + ansible.builtin.systemd: + name: ypserv.service + enabled: false + state: stopped + masked: true + +- name: "2.1.11 | PATCH | Ensure print server services are not in use" + when: + - ubtu24cis_rule_2_1_11 + tags: + - level1-server + - patch + - cups + - rule_2.1.11 + - NIST800-53R5_CM-7 + block: + - name: "2.1.11 | PATCH | Ensure print server services are not in use | Remove package" + when: + - "'cups' in ansible_facts.packages" + - not ubtu24cis_print_server + - not ubtu24cis_print_mask + ansible.builtin.package: + name: cups + state: absent + purge: "{{ ubtu24cis_purge_apt }}" + + - name: "2.1.11 | PATCH | Ensure print server services are not in use | Mask service" + when: + - not ubtu24cis_print_server + - ubtu24cis_print_mask + notify: Systemd_daemon_reload + ansible.builtin.systemd: + name: "{{ item }}" + enabled: false + state: stopped + masked: true + loop: + - "cups.socket" + - "cups.service" + +- name: "2.1.12 | PATCH | Ensure rpcbind services are not in use" + when: + - ubtu24cis_rule_2_1_12 + tags: + - level1-server + - level1-workstation + - patch + - rpc + - rule_2.1.12 + - NIST800-53R5_CM-6 + - NIST800-53R5_CM-7 + block: + - name: "2.1.12 | PATCH | Ensure rpcbind services are not in use | Remove package" + when: + - "'rpcbind' in ansible_facts.packages" + - not ubtu24cis_rpc_server + - not ubtu24cis_rpc_mask + ansible.builtin.package: + name: rpcbind + state: absent + purge: "{{ ubtu24cis_purge_apt }}" + + - name: "2.1.12 | PATCH | Ensure rpcbind services are not in use | Mask service" + when: + - not ubtu24cis_rpc_server + - ubtu24cis_rpc_mask + notify: Systemd_daemon_reload + ansible.builtin.systemd: + name: "{{ item }}" + enabled: false + state: stopped + masked: true + loop: + - rpcbind.service + - rpcbind.socket + +- name: "2.1.13 | PATCH | Ensure rsync services are not in use" + when: + - ubtu24cis_rule_2_1_13 + tags: + - level1-server + - level1-workstation + - patch + - rsync + - rule_2.1.13 + - NIST800-53R5_CM-7 + block: + - name: "2.1.13 | PATCH | Ensure rsync services are not in use | Remove package" + when: + - "'rsync' in ansible_facts.packages" + - not ubtu24cis_rsync_server + - not ubtu24cis_rsync_mask + ansible.builtin.package: + name: rsync + state: absent + purge: "{{ ubtu24cis_purge_apt }}" + + - name: "2.1.13 | PATCH | Ensure rsync services are not in use | Mask service" + when: + - not ubtu24cis_rsync_server + - ubtu24cis_rsync_mask + notify: Systemd_daemon_reload + ansible.builtin.systemd: + name: rsyncd.service + enabled: false + state: stopped + masked: true + +- name: "2.1.14 | PATCH | Ensure samba file server services are not in use" + when: + - ubtu24cis_rule_2_1_14 + tags: + - level1-server + - level1-workstation + - patch + - samba + - rule_2.1.14 + - NIST800-53R5_CM-6 + - NIST800-53R5_CM-7 + block: + - name: "2.1.14 | PATCH | Ensure samba file server services are not in use | Remove package" + when: + - "'samba' in ansible_facts.packages" + - not ubtu24cis_samba_server + - not ubtu24cis_samba_mask + ansible.builtin.package: + name: samba + state: absent + purge: "{{ ubtu24cis_purge_apt }}" + + - name: "2.1.14 | PATCH | Ensure samba file server services are not in use | Mask service" + when: + - not ubtu24cis_samba_server + - ubtu24cis_samba_mask + notify: Systemd_daemon_reload + ansible.builtin.systemd: + name: smbd.service + enabled: false + state: stopped + masked: true + +- name: "2.1.15 | PATCH | Ensure snmp services are not in use" + when: + - ubtu24cis_rule_2_1_15 + tags: + - level1-server + - level1-workstation + - automation + - patch + - samba + - rule_2.1.15 + - NIST800-53R5_CM-7 + block: + - name: "2.1.15 | PATCH | Ensure snmp services are not in use | Remove package" + when: + - "'snmpd' in ansible_facts.packages" + - not ubtu24cis_snmp_server + - not ubtu24cis_snmp_mask + ansible.builtin.package: + name: snmpd + state: absent + purge: "{{ ubtu24cis_purge_apt }}" + + - name: "2.1.15 | PATCH | Ensure snmp services are not in use | Mask service" + when: + - not ubtu24cis_snmp_server + - ubtu24cis_snmp_mask + notify: Systemd_daemon_reload + ansible.builtin.systemd: + name: snmpd.service + enabled: false + state: stopped + masked: true + +- name: "2.1.16 | PATCH | Ensure tftp server services are not in use" + when: + - ubtu24cis_rule_2_1_16 + tags: + - level1-server + - level1-workstation + - patch + - tftp + - rule_2.1.16 + - NIST800-53R5_CM-7 + block: + - name: "2.1.16 | PATCH | Ensure tftp server services are not in use | Remove package" + when: + - "'tftpd-hpa' in ansible_facts.packages" + - not ubtu24cis_tftp_server + - not ubtu24cis_tftp_mask + ansible.builtin.package: + name: tftpd-hpa + state: absent + purge: "{{ ubtu24cis_purge_apt }}" + + - name: "2.1.16 | PATCH | Ensure tftp server services are not in use | Mask service" + when: + - not ubtu24cis_tftp_server + - ubtu24cis_tftp_mask + notify: Systemd_daemon_reload + ansible.builtin.systemd: + name: tftpd-hpa.service + enabled: false + state: stopped + masked: true + +- name: "2.1.17 | PATCH | Ensure web proxy server services are not in use" + when: + - ubtu24cis_rule_2_1_17 + tags: + - level1-server + - level1-workstation + - patch + - squid + - rule_2.1.17 + - NIST800-53R5_CM-7 + block: + - name: "2.1.17 | PATCH | Ensure web proxy server services are not in use | Remove package" + when: + - "'squid' in ansible_facts.packages" + - not ubtu24cis_squid_server + - not ubtu24cis_squid_mask + ansible.builtin.package: + name: squid + state: absent + purge: "{{ ubtu24cis_purge_apt }}" + + - name: "2.1.17 | PATCH | Ensure web proxy server services are not in use | Mask service" + when: + - not ubtu24cis_squid_server + - ubtu24cis_squid_mask + notify: Systemd_daemon_reload + ansible.builtin.systemd: + name: squid.service + enabled: false + state: stopped + masked: true + +- name: "2.1.18 | PATCH | Ensure web server services are not in use" + when: + - ubtu24cis_rule_2_1_18 + tags: + - level1-server + - level1-workstation + - patch + - httpd + - nginx + - webserver + - rule_2.1.18 + - NIST800-53R5_CM-7 + block: + - name: "2.1.18 | PATCH | Ensure web server services are not in use | Remove httpd server" + when: + - not ubtu24cis_apache2_server + - not ubtu24cis_apache2_mask + - "'apache2' in ansible_facts.packages" + ansible.builtin.package: + name: apache2 + state: absent + purge: "{{ ubtu24cis_purge_apt }}" + + - name: "2.1.18 | PATCH | Ensure web server services are not in use | Remove nginx server" + when: + - not ubtu24cis_nginx_server + - not ubtu24cis_nginx_mask + - "'nginx' in ansible_facts.packages" + ansible.builtin.package: + name: nginx + state: absent + purge: "{{ ubtu24cis_purge_apt }}" + + - name: "2.1.18 | PATCH | Ensure web server services are not in use | Mask httpd service" + when: + - not ubtu24cis_apache2_server + - ubtu24cis_apache2_mask + - "'apache2' in ansible_facts.packages" + notify: Systemd_daemon_reload + ansible.builtin.systemd: + name: + enabled: false + state: stopped + masked: true + loop: + - apache2.service + - apache2.socket + + - name: "2.1.18 | PATCH | Ensure web server services are not in use | Mask nginx service" + when: + - not ubtu24cis_nginx_server + - ubtu24cis_nginx_mask + - "'nginx' in ansible_facts.packages" + notify: Systemd_daemon_reload + ansible.builtin.systemd: + name: ngnix.service + enabled: false + state: stopped + masked: true + +- name: "2.1.19 | PATCH | Ensure xinetd services are not in use" + when: + - ubtu24cis_rule_2_1_19 + tags: + - level1-server + - level1-workstation + - patch + - xinetd + - rule_2.1.19 + - NIST800-53R5_CM-7 + block: + - name: "2.1.19 | PATCH | Ensure xinetd services are not in use | Remove package" + when: + - "'xinetd' in ansible_facts.packages" + - not ubtu24cis_xinetd_server + - not ubtu24cis_xinetd_mask + ansible.builtin.package: + name: xinetd + state: absent + purge: "{{ ubtu24cis_purge_apt }}" + + - name: "2.1.19 | PATCH | Ensure xinetd services are not in use | Mask service" + when: + - not ubtu24cis_xinetd_server + - ubtu24cis_xinetd_mask + notify: Systemd_daemon_reload + ansible.builtin.systemd: + name: xinetd.service + enabled: false + state: stopped + masked: true + +- name: "2.1.20 | PATCH | Ensure X window server services are not in use" + when: + - not ubtu24cis_xwindow_server + - "'xorg-x11-server-common' in ansible_facts.packages" + - ubtu24cis_rule_2_1_20 + tags: + - level2-server + - patch + - xwindow + - rule_2.1.20 + - NIST800-53R5_CM-11 + ansible.builtin.package: + name: xorg-x11-server-common + state: absent + purge: "{{ ubtu24cis_purge_apt }}" + +- name: "2.1.21 | PATCH | Ensure mail transfer agents are configured for local-only mode" + when: + - not ubtu24cis_is_mail_server + - ubtu24cis_rule_2_1_21 + tags: + - level1-server + - level1-workstation + - patch + - postfix + - rule_2.1.21 + - NIST800-53R5_CM-7 + vars: + warn_control_id: '2.2.21' + block: + - name: "2.1.21 | PATCH | Ensure mail transfer agents are configured for local-only mode | Make changes if exim4 installed" + when: "'exim4' in ansible_facts.packages" + notify: Restart exim4 + ansible.builtin.lineinfile: + path: /etc/exim4/update-exim4.conf.conf + regexp: "{{ item.regexp }}" + line: "{{ item.line }}" + with_items: + - { regexp: '^dc_eximconfig_configtype', line: "dc_eximconfig_configtype='local'" } + - { regexp: '^dc_local_interfaces', line: "dc_local_interfaces='127.0.0.1 ; ::1'" } + - { regexp: '^dc_readhost', line: "dc_readhost=''" } + - { regexp: '^dc_relay_domains', line: "dc_relay_domains=''" } + - { regexp: '^dc_minimaldns', line: "dc_minimaldns='false'" } + - { regexp: '^dc_relay_nets', line: "dc_relay_nets=''" } + - { regexp: '^dc_smarthost', line: "dc_smarthost=''" } + - { regexp: '^dc_use_split_config', line: "dc_use_split_config='false'" } + - { regexp: '^dc_hide_mailname', line: "dc_hide_mailname=''" } + - { regexp: '^dc_mailname_in_oh', line: "dc_mailname_in_oh='true'" } + - { regexp: '^dc_localdelivery', line: "dc_localdelivery='mail_spool'" } + + - name: "2.1.21 | PATCH | Ensure mail transfer agents are configured for local-only mode | Make changes if postfix is installed" + when: "'postfix' in ansible_facts.packages" + notify: Restart postfix + ansible.builtin.lineinfile: + path: /etc/postfix/main.cf + regexp: '^(#)?inet_interfaces' + line: 'inet_interfaces = loopback-only' + + - name: "2.1.21 | WARN | Ensure mail transfer agents are configured for local-only mode | Message out other main agents" + when: + - "'exim4' not in ansible_facts.packages" + - "'postfix' not in ansible_facts.packages" + ansible.builtin.debug: + msg: + - "Warning!! You are not using either exim4 or postfix, please ensure mail services set for local only mode" + - "Please review your vendors documentation to configure local-only mode" + + - name: "2.1.21 | WARN | Ensure mail transfer agents are configured for local-only mode | warn_count" + when: + - "'exim4' not in ansible_facts.packages" + - "'postfix' not in ansible_facts.packages" + ansible.builtin.import_tasks: + file: warning_facts.yml + +- name: "2.1.22 | AUDIT | Ensure only approved services are listening on a network interface" + when: + - ubtu24cis_rule_2_1_22 + tags: + - level1-server + - level1-workstation + - audit + - services + - rule_2.1.22 + - NIST800-53R5_CM-7 + vars: + warn_control_id: '2.1.22' + block: + - name: "2.1.22 | AUDIT | Ensure only approved services are listening on a network interface | Get list of services" + ansible.builtin.shell: systemctl list-units --type=service + changed_when: false + failed_when: ubtu24cis_2_1_22_services.rc not in [ 0, 1 ] + check_mode: false + register: ubtu24cis_2_1_22_services + + - name: "2.1.22 | AUDIT | Ensure only approved services are listening on a network interface | Display list of services" + ansible.builtin.debug: + msg: + - "Warning!! Below are the list of services, both active and inactive" + - "Please review to make sure all are essential" + - "{{ ubtu24cis_2_1_22_services.stdout_lines }}" + + - name: "2.1.22 | AUDIT | Ensure only approved services are listening on a network interface | Warn Count" + ansible.builtin.import_tasks: + file: warning_facts.yml diff --git a/tasks/section_2/cis_2.2.x.yml b/tasks/section_2/cis_2.2.x.yml new file mode 100644 index 0000000..cc049c2 --- /dev/null +++ b/tasks/section_2/cis_2.2.x.yml @@ -0,0 +1,99 @@ +--- + +- name: "2.2.1 | PATCH | Ensure NIS Client is not installed" + when: + - ubtu24cis_rule_2_2_1 + - not ubtu24cis_nis_server + tags: + - level1-server + - level1-workstation + - rule_2.2.1 + - NIST800-53R5_CM-7 + - NIST800-53R5_CM-11 + - nis + ansible.builtin.package: + name: nis + state: absent + purge: "{{ ubtu24cis_purge_apt }}" + +- name: "2.2.2 | PATCH | Ensure rsh client is not installed" + when: + - ubtu24cis_rule_2_2_2 + - not ubtu24cis_rsh_client + tags: + - level1-server + - level1-workstation + - patch + - rule_2.2.2 + - NIST800-53R5_CM-7 + - rsh + ansible.builtin.package: + name: rsh-client + state: absent + purge: "{{ ubtu24cis_purge_apt }}" + +- name: "2.2.3 | PATCH | Ensure talk client is not installed" + when: + - ubtu24cis_rule_2_2_3 + - not ubtu24cis_talk_client + tags: + - level1-server + - level1-workstation + - patch + - rule_2.2.3 + - NIST800-53R5_CM-7 + - talk + ansible.builtin.package: + name: talk + state: absent + purge: "{{ ubtu24cis_purge_apt }}" + +- name: "2.2.4 | PATCH | Ensure telnet client is not installed" + when: + - ubtu24cis_rule_2_2_4 + - not ubtu24cis_telnet_required + tags: + - level1-server + - level1-workstation + - patch + - rule_2.2.4 + - NIST800-53R5_CM-7 + - NIST800-53R5_CM-11 + - telnet + ansible.builtin.package: + name: telnet + state: absent + purge: "{{ ubtu24cis_purge_apt }}" + +- name: "2.2.5 | PATCH | Ensure ldap client is not installed" + when: + - ubtu24cis_rule_2_2_5 + - not ubtu24cis_ldap_clients_required + tags: + - level1-server + - level1-workstation + - patch + - rule_2.2.5 + - NIST800-53R5_CM-7 + - ldap + ansible.builtin.package: + name: ldap-utils + state: absent + purge: "{{ ubtu24cis_purge_apt }}" + +- name: "2.2.6 | PATCH | Ensure ftp is not installed" + when: + - ubtu24cis_rule_2_2_6 + - not ubtu24cis_ftp_client + tags: + - level1-server + - level1-workstation + - patch + - rule_2.2.6 + - NIST800-53R5_CM-7 + - NIST800-53R5_CM-11 + - ftp + ansible.builtin.package: + name: ftp + state: absent + purge: "{{ ubtu24cis_purge_apt }}" diff --git a/tasks/section_2/cis_2.3.1.x.yml b/tasks/section_2/cis_2.3.1.x.yml new file mode 100644 index 0000000..f1f3874 --- /dev/null +++ b/tasks/section_2/cis_2.3.1.x.yml @@ -0,0 +1,40 @@ +--- + +- name: "2.3.1.1 | PATCH | Ensure a single time synchronization daemon is in use" + when: + - ubtu24cis_rule_2_3_1_1 + tags: + - level1-server + - level1-workstation + - patch + - rule_2.3.1.1 + - NIST800-53R5_AU-3 + - NIST800-53R5_AU-12 + - chrony + - ntp + - systemd-timesyncd + block: + - name: "2.3.1.1 | PATCH | Ensure a single time synchronization daemon is in use | Pkg installed" + ansible.builtin.package: + name: "{{ ubtu24cis_time_sync_tool }}" + state: present + + - name: "2.3.1.1 | PATCH | Ensure a single time synchronization daemon is in use | other pkgs removed" + when: item != ubtu24cis_time_sync_tool + ansible.builtin.package: + name: "{{ item }}" + state: absent + loop: + - chrony + - ntp + + - name: "2.3.1.1 | PATCH | Ensure a single time synchronization daemon is in use | mask service" + when: + - ubtu24cis_time_sync_tool != "systemd-timesyncd" + - "'systemd-timesyncd' in ansible_facts.packages" + ansible.builtin.service: + name: systemd-timesyncd + state: stopped + enabled: false + masked: true + daemon_reload: true diff --git a/tasks/section_2/cis_2.3.2.x.yml b/tasks/section_2/cis_2.3.2.x.yml new file mode 100644 index 0000000..98a339b --- /dev/null +++ b/tasks/section_2/cis_2.3.2.x.yml @@ -0,0 +1,65 @@ +--- + +- name: "2.3.2.1 | PATCH | Ensure systemd-timesyncd configured with authorized timeserver" + when: + - ubtu24cis_rule_2_3_2_1 + tags: + - level1-server + - level1-workstation + - patch + - rule_2.3.2.1 + - NIST800-53R5_AU-7 + - NIST800-53R5_AU-8 + - timesyncd + block: + - name: "2.3.2.1 | PATCH | Ensure systemd-timesyncd configured with authorized timeserver | create conf.d dir" + ansible.builtin.file: + path: /etc/systemd/timesyncd.conf.d + owner: root + group: root + mode: '0755' + state: directory + + - name: "2.3.2.1 | PATCH | Ensure systemd-timesyncd configured with authorized timeserver | sources" + ansible.builtin.template: + src: "{{ item }}.j2" + dest: "/{{ item }}" + mode: '0644' + owner: root + group: root + loop: + - "etc/systemd/timesyncd.conf.d/50-timesyncd.conf" + notify: Restart timeservice + +- name: "2.3.2.2 | PATCH | Ensure systemd-timesyncd is enabled and running" + when: + - ubtu24cis_rule_2_3_2_2 + tags: + - level1-server + - level1-workstation + - rule_2.3.2.2 + - NIST800-53R5_AU-7 + - NIST800-53R5_AU-8 + - timesyncd + block: + - name: "2.3.2.2 | PATCH | Ensure systemd-timesyncd is enabled and running | enable if timesyncd" + ansible.builtin.systemd: + name: systemd-timesyncd + state: started + enabled: true + + - name: "2.3.2.2 | PATCH | Ensure systemd-timesyncd is enabled and running | disable other time sources | chrony" + when: "'chrony' in ansible_facts.packages" + ansible.builtin.systemd: + name: chrony + state: stopped + enabled: false + masked: true + + - name: "2.3.2.2 | PATCH | Ensure systemd-timesyncd is enabled and running | disable other time sources | ntp" + when: "'ntp' in ansible_facts.packages" + ansible.builtin.systemd: + name: ntp + state: stopped + enabled: false + masked: true diff --git a/tasks/section_2/cis_2.3.3.x.yml b/tasks/section_2/cis_2.3.3.x.yml new file mode 100644 index 0000000..5e9e9f0 --- /dev/null +++ b/tasks/section_2/cis_2.3.3.x.yml @@ -0,0 +1,79 @@ +--- + +- name: "2.3.3.1 | PATCH | Ensure chrony is configured with authorized timeserver" + when: + - ubtu24cis_rule_2_3_3_1 + tags: + - level1-server + - level1-workstation + - patch + - rule_2.3.3.1 + - NIST800-53R5_AU-3 + - NIST800-53R5_AU-12 + - chrony + block: + - name: "2.3.3.1 | PATCH | Ensure chrony is configured with authorized timeserver | sources" + ansible.builtin.template: + src: "{{ item }}.j2" + dest: "/{{ item }}" + mode: '0644' + owner: root + group: root + loop: + - etc/chrony/sources.d/pool.sources + - etc/chrony/sources.d/server.sources + notify: Restart timeservice + + - name: "2.3.3.1 | PATCH | Ensure chrony is configured with authorized timeserver | load sources" + ansible.builtin.lineinfile: + path: /etc/chrony/chrony.conf + regexp: '^sourcedir /etc/chrony/sources.d' + line: sourcedir /etc/chrony/sources.d + notify: Restart timeservice + +- name: "2.3.3.2 | PATCH | Ensure chrony is running as user _chrony" + when: + - ubtu24cis_rule_2_3_3_2 + tags: + - level1-server + - level1-workstation + - patch + - rule_2.3.3.2 + - NIST800-53R5_AU-8 + - chrony + ansible.builtin.lineinfile: + path: /etc/chrony/chrony.conf + regexp: '^user _chrony' + line: 'user _chrony' + +- name: "2.3.3.3 | PATCH | Ensure chrony is enabled and running" + when: + - ubtu24cis_rule_2_3_3_3 + tags: + - level1-server + - level1-workstation + - rule_2.3.3.3 + - NIST800-53R5_AU-8 + - chrony + block: + - name: "2.3.3.3 | PATCH | Ensure chrony is enabled and running" + ansible.builtin.systemd: + name: chrony + state: started + enabled: true + + - name: "2.3.3.3 | PATCH | Ensure chrony is enabled and running | disable other time sources | timesyncd" + when: "'systemd-timesyncd' in ansible_facts.packages" + ansible.builtin.systemd: + name: systemd-timesyncd + state: stopped + enabled: false + masked: true + + - name: "2.3.3.3 | PATCH | Ensure chrony is enabled and running | disable other time sources | ntpd" + when: "'ntpd' in ansible_facts.packages" + ansible.builtin.systemd: + name: ntpd + state: stopped + enabled: false + masked: true diff --git a/tasks/section_2/cis_2.4.1.x.yml b/tasks/section_2/cis_2.4.1.x.yml new file mode 100644 index 0000000..19342cb --- /dev/null +++ b/tasks/section_2/cis_2.4.1.x.yml @@ -0,0 +1,166 @@ +--- + +- name: "2.4.1.1 | PATCH | Ensure cron daemon is enabled and running" + when: + - ubtu24cis_rule_2_4_1_1 + tags: + - level1-server + - level1-workstation + - patch + - rule_2.4.1.1 + - NIST800-53R5_CM-1 + - NIST800-53R5_CM-2 + - NIST800-53R5_CM-6 + - NIST800-53R5_CM-7 + - NIST800-53R5_IA-5 + - cron + ansible.builtin.systemd: + name: cron + state: started + enabled: true + +- name: "2.4.1.2 | PATCH | Ensure permissions on /etc/crontab are configured" + when: + - ubtu24cis_rule_2_4_1_2 + tags: + - level1-server + - level1-workstation + - patch + - rule_2.4.1.2 + - NIST800-53R5_AC-3 + - NIST800-53R5_MP-2 + - cron + ansible.builtin.file: + path: /etc/crontab + owner: root + group: root + mode: '0600' + +- name: "2.4.1.3 | PATCH | Ensure permissions on /etc/cron.hourly are configured" + when: + - ubtu24cis_rule_2_4_1_3 + tags: + - level1-server + - level1-workstation + - patch + - rule_2.4.1.3 + - NIST800-53R5_AC-3 + - NIST800-53R5_MP-2 + - cron + ansible.builtin.file: + path: /etc/cron.hourly + owner: root + group: root + mode: '0700' + state: directory + +- name: "2.4.1.4 | PATCH | Ensure permissions on /etc/cron.daily are configured" + when: + - ubtu24cis_rule_2_4_1_4 + tags: + - level1-server + - level1-workstation + - patch + - rule_2.4.1.4 + - NIST800-53R5_AC-3 + - NIST800-53R5_MP-2 + - cron + ansible.builtin.file: + path: /etc/cron.daily + owner: root + group: root + mode: '0700' + state: directory + +- name: "2.4.1.5 | PATCH | Ensure permissions on /etc/cron.weekly are configured" + when: + - ubtu24cis_rule_2_4_1_5 + tags: + - level1-server + - level1-workstation + - patch + - rule_2.4.1.5 + - NIST800-53R5_AC-3 + - NIST800-53R5_MP-2 + - cron + ansible.builtin.file: + path: /etc/cron.weekly + owner: root + group: root + mode: '0700' + state: directory + +- name: "2.4.1.6 | PATCH | Ensure permissions on /etc/cron.monthly are configured" + when: + - ubtu24cis_rule_2_4_1_6 + tags: + - level1-server + - level1-workstation + - patch + - rule_2.4.1.6 + - NIST800-53R5_AC-3 + - NIST800-53R5_MP-2 + - cron + ansible.builtin.file: + path: /etc/cron.monthly + owner: root + group: root + mode: '0700' + state: directory + +- name: "2.4.1.7 | PATCH | Ensure permissions on /etc/cron.d are configured" + when: + - ubtu24cis_rule_2_4_1_7 + tags: + - level1-server + - level1-workstation + - patch + - rule_2.4.1.7 + - NIST800-53R5_AC-3 + - NIST800-53R5_MP-2 + - cron + ansible.builtin.file: + path: /etc/cron.d + owner: root + group: root + mode: '0700' + state: directory + +- name: "2.4.1.8 | PATCH | Ensure cron is restricted to authorized users" + when: + - ubtu24cis_rule_2_4_1_8 + tags: + - level1-server + - level1-workstation + - patch + - rule_2.4.1.8 + - NIST800-53R5_AC-3 + - NIST800-53R5_MP-2 + - cron + block: + - name: "2.4.1.8 | PATCH | Ensure cron is restricted to authorized users | Remove cron.deny" + ansible.builtin.file: + path: /etc/cron.deny + state: absent + + - name: "2.4.1.8 | PATCH | Ensure cron is restricted to authorized users | Check for cron.allow" + ansible.builtin.stat: + path: /etc/cron.allow + register: ubtu24cis_2_4_1_8_status + + - name: "2.4.1.8 | PATCH | Ensure cron is restricted to authorized users | Create cron.allow if doesn't exist" + when: not ubtu24cis_2_4_1_8_status.stat.exists + ansible.builtin.file: + path: /etc/cron.allow + owner: root + group: root + mode: 'u-x,g-wx,o-rwx' + state: touch + + - name: "2.4.1.8 | PATCH | Ensure cron is restricted to authorized users | Update cron.allow if exists" + when: ubtu24cis_2_4_1_8_status.stat.exists + ansible.builtin.file: + path: /etc/cron.allow + owner: root + group: root + mode: 'u-x,g-wx,o-rwx' diff --git a/tasks/section_2/cis_2.4.2.x.yml b/tasks/section_2/cis_2.4.2.x.yml new file mode 100644 index 0000000..e49b137 --- /dev/null +++ b/tasks/section_2/cis_2.4.2.x.yml @@ -0,0 +1,40 @@ +--- + +- name: "2.4.2.1 | PATCH | Ensure at is restricted to authorized users" + when: + - - ubtu24cis_rule_2_4_2_1 + tags: + - level1-server + - level1-workstation + - patch + - rule_2.4.2.1 + - NIST800-53R5_AC-3 + - NIST800-53R5_MP-2 + - cron + block: + - name: "2.4.2.1 | PATCH | Ensure at is restricted to authorized users | Remove at.deny" + ansible.builtin.file: + path: /etc/at.deny + state: absent + + - name: "2.4.2.1 | PATCH | Ensure at is restricted to authorized users | Check for at.allow" + ansible.builtin.stat: + path: /etc/at.allow + register: ubtu24cis_2_4_2_1_status + + - name: "2.4.2.1 | PATCH | Ensure at is restricted to authorized users | Create at.allow if doesn't exist" + when: not ubtu24cis_2_4_2_1_status.stat.exists + ansible.builtin.file: + path: /etc/at.allow + owner: root + group: root + mode: 'u-x,g-wx,o-rwx' + state: touch + + - name: "2.4.2.1 | PATCH | Ensure at is restricted to authorized users | update at.allow if exists" + when: ubtu24cis_2_4_2_1_status.stat.exists + ansible.builtin.file: + path: /etc/at.allow + owner: root + group: root + mode: 'u-x,g-wx,o-rwx' diff --git a/tasks/section_2/main.yml b/tasks/section_2/main.yml new file mode 100644 index 0000000..1512409 --- /dev/null +++ b/tasks/section_2/main.yml @@ -0,0 +1,33 @@ +--- + +- name: "SECTION | 2.1.x | Configure Server Services" + ansible.builtin.import_tasks: + file: cis_2.1.x.yml + +- name: "SECTION | 2.2.x | Configure Clients Services" + ansible.builtin.import_tasks: + file: cis_2.2.x.yml + +- name: "SECTION | 2.3.1.x | Time service " + ansible.builtin.import_tasks: + file: cis_2.3.1.x.yml + +- name: "SECTION | 2.3.2.x | Configure systemd-timesyncd" + when: + - ubtu24cis_time_sync_tool == "systemd-timesyncd" + ansible.builtin.import_tasks: + file: cis_2.3.2.x.yml + +- name: "SECTION | 2.3.3.x | Configure Chrony" + when: + - ubtu24cis_time_sync_tool == "chrony" + ansible.builtin.import_tasks: + file: cis_2.3.3.x.yml + +- name: "SECTION | 2.4.1.x | Configure Cron" + ansible.builtin.import_tasks: + file: cis_2.4.1.x.yml + +- name: "SECTION | 2.4.2.x | Configure At" + ansible.builtin.import_tasks: + file: cis_2.4.2.x.yml diff --git a/tasks/section_3/cis_3.1.x.yml b/tasks/section_3/cis_3.1.x.yml new file mode 100644 index 0000000..e30bb08 --- /dev/null +++ b/tasks/section_3/cis_3.1.x.yml @@ -0,0 +1,112 @@ +--- + +- name: "3.1.1 | PATCH | Ensure IPv6 status is identified" + when: + - ubtu24cis_rule_3_1_1 + - not ubtu24cis_ipv6_required + tags: + - level1-server + - level1-workstation + - patch + - rule_3.1.1 + - NIST800-53R5_CM-7 + - ipv6 + block: + - name: "3.1.1 | PATCH | Ensure IPv6 status is identified | Replace ipv6.disable if it exists" + when: ubtu24cis_ipv6_disable == 'grub' + ansible.builtin.replace: + path: /etc/default/grub + regexp: '^(GRUB_CMDLINE_LINUX=.*)\bipv6\.disable=\d\b(.*$)' + replace: '\1ipv6.disable=1\2' + register: ipv6disable_replaced + notify: Grub update + + - name: "3.1.1 | PATCH | Ensure IPv6 status is identified | Check grub cmdline linux" + ansible.builtin.shell: grep "GRUB_CMDLINE_LINUX=" /etc/default/grub | cut -f2 -d'"' + changed_when: false + failed_when: false + check_mode: false + register: ubtu24cis_3_1_1_cmdline_settings + + - name: "3.1.1 | PATCH | Ensure IPv6 status is identified | Insert ipv6.disable if it doesn't exist" + when: + - ubtu24cis_ipv6_disable == 'grub' + - ipv6disable_replaced is not changed + - "'ipv6.disable' not in ubtu24cis_3_1_1_cmdline_settings.stdout" + ansible.builtin.lineinfile: + path: /etc/default/grub + regexp: '^(GRUB_CMDLINE_LINUX=".*)"$' + line: '\1 ipv6.disable=1"' + backrefs: true + notify: Grub update + + - name: "3.1.1 | PATCH | Ensure IPv6 status is identified | Remove net.ipv6.conf.all.disable_ipv6" + when: ubtu24cis_ipv6_disable == 'sysctl' + ansible.builtin.template: + src: "{{ item }}.j2" + dest: "/{{ item }}" + owner: root + group: root + mode: '0640' + notify: Flush ipv6 route table + loop: + - etc/sysctl.d/60-disable_ipv6.conf + +- name: "3.1.2 | PATCH | Ensure wireless interfaces are disabled" + when: + - ubtu24cis_rule_3_1_2 + tags: + - level1-server + - patch + - rule_3.1.2 + - NIST800-53R5_CM-7 + - wireless + vars: + warn_control_id: '3.1.2' + block: + - name: "3.1.2 | PATCH | Ensure wireless interfaces are disabled | Create modprobe.d file" + ansible.builtin.lineinfile: + path: /etc/modprobe.d/{{ item }}.conf + regexp: '^(#)?install true(\\s|$)' + line: install {{ item }} true + create: true + loop: "{{ prelim_wireless_modules.stdout_lines }}" + + - name: "3.1.2 | PATCH | Ensure dccp kernel module is not available | blacklist" + ansible.builtin.lineinfile: + path: /etc/modprobe.d/blacklist.conf + regexp: "^(#)?blacklist {{ item }}(\\s|$)" + line: "blacklist {{ item }}" + create: true + mode: '0600' + loop: "{{ prelim_wireless_modules.stdout_lines }}" + +- name: "3.1.3 | PATCH | Ensure bluetooth services are not in use" + when: + - ubtu24cis_rule_3_1_3 + tags: + - level1-server + - level2-workstation + - patch + - bluetooth + - rule_3.1.3 + - NIST800-53R5_CM-7 + block: + - name: "3.1.3 | PATCH | Ensure bluetooth services are not in use | pkg" + when: + - not ubtu24cis_bluetooth_service + - not ubtu24cis_bluetooth_mask + ansible.builtin.package: + name: bluez + state: absent + + - name: "3.1.3 | PATCH | Ensure bluetooth services are not in use | mask" + when: + - not ubtu24cis_bluetooth_service + - ubtu24cis_bluetooth_mask + notify: Systemd_daemon_reload + ansible.builtin.systemd: + name: bluetooth.service + enabled: false + state: stopped + masked: true diff --git a/tasks/section_3/cis_3.2.x.yml b/tasks/section_3/cis_3.2.x.yml new file mode 100644 index 0000000..9c7a860 --- /dev/null +++ b/tasks/section_3/cis_3.2.x.yml @@ -0,0 +1,121 @@ +--- + +- name: "3.2.1 | PATCH | Ensure dccp kernel module is not available" + when: + - ubtu24cis_rule_3_2_1 + tags: + - level2-server + - level2-workstation + - patch + - rule_3.2.1 + - NIST800-53R5_CM-7 + - NIST800-53R5_SI-4 + - dccp + block: + - name: "3.2.1 | PATCH | Ensure dccp kernel module is not available | modprobe" + ansible.builtin.lineinfile: + path: /etc/modprobe.d/dccp.conf + regexp: '^(#)?install dccp(\\s|$)' + line: "{{ item }}" + create: true + loop: + - install dccp /bin/true + - blacklist dccp + + - name: "3.2.1 | PATCH | Ensure dccp kernel module is not available | blacklist" + ansible.builtin.lineinfile: + path: /etc/modprobe.d/blacklist.conf + regexp: "^(#)?blacklist cramfs(\\s|$)" + line: "blacklist cramfs" + create: true + mode: '0600' + +- name: "3.2.2 | PATCH | Ensure tipc kernel module is not available" + when: + - ubtu24cis_rule_3_2_2 + tags: + - level2-server + - level2-workstation + - patch + - rule_3.2.2 + - NIST800-53R5_CM-7 + - NIST800-53R5_SI-4 + - tipc + block: + - name: "3.2.2 | PATCH | Ensure tipc kernel module is not available | modprobe" + ansible.builtin.lineinfile: + path: /etc/modprobe.d/tipc.conf + regexp: '^(#)?install tipc(\\s|$)' + line: "{{ item }}" + create: true + loop: + - install tipc /bin/true + - blacklist tipc + + - name: "3.2.2 | PATCH | Ensure tipc kernel module is not available | blacklist" + ansible.builtin.lineinfile: + path: /etc/modprobe.d/blacklist.conf + regexp: "^(#)?blacklist tipc(\\s|$)" + line: "blacklist tipc" + create: true + mode: '0600' + +- name: "3.2.3 | PATCH | Ensure rds kernel module is not available" + when: + - ubtu24cis_rule_3_2_3 + tags: + - level2-server + - level2-workstation + - patch + - rule_3.2.3 + - NIST800-53R5_CM-7 + - NIST800-53R5_SI-4 + - rds + block: + - name: "3.2.3 | PATCH | Ensure rds kernel module is not available | modprobe" + ansible.builtin.lineinfile: + path: /etc/modprobe.d/rds.conf + regexp: '^(#)?install rds(\\s|$)' + line: "{{ item }}" + create: true + loop: + - install rds /bin/true + - blacklist rds + + - name: "3.2.3 | PATCH | Ensure rds kernel module is not available | blacklist" + ansible.builtin.lineinfile: + path: /etc/modprobe.d/blacklist.conf + regexp: "^(#)?blacklist rds(\\s|$)" + line: "blacklist rds" + create: true + mode: '0600' + +- name: "3.2.4 | PATCH | Ensure sctp kernel module is not available" + when: + - ubtu24cis_rule_3_2_4 + tags: + - level2-server + - level2-workstation + - patch + - rule_3.2.4 + - NIST800-53R5_CM-7 + - NIST800-53R5_SI-4 + - sctp + block: + - name: "3.2.4 | PATCH | Ensure sctp kernel module is not available | modprobe" + ansible.builtin.lineinfile: + path: /etc/modprobe.d/sctp.conf + regexp: '^(#)?install sctp(\\s|$)' + line: "{{ item }}" + create: true + loop: + - install sctp /bin/true + - blacklist sctp + + - name: "3.2.4 | PATCH | Ensure sctp kernel module is not available | blacklist" + ansible.builtin.lineinfile: + path: /etc/modprobe.d/blacklist.conf + regexp: "^(#)?blacklist sctp(\\s|$)" + line: "blacklist sctp" + create: true + mode: '0600' diff --git a/tasks/section_3/cis_3.3.x.yml b/tasks/section_3/cis_3.3.x.yml new file mode 100644 index 0000000..8aa0094 --- /dev/null +++ b/tasks/section_3/cis_3.3.x.yml @@ -0,0 +1,352 @@ +--- + +- name: "3.3.1 | PATCH | Ensure IP forwarding is disabled" + when: + - ubtu24cis_rule_3_3_1 + - not ubtu24cis_is_router + tags: + - level1-server + - level1-workstation + - patch + - rule_3.3.1 + - NIST800-53R5_CM-1 + - NIST800-53R5_CM-2 + - NIST800-53R5_CM-6 + - NIST800-53R5_CM-7 + - NIST800-53R5_IA-5 + - ip_forwarding + - sysctl + block: + - name: "3.3.1 | PATCH | Ensure IP forwarding is disabled | IPv4 settings" + ansible.posix.sysctl: + name: net.ipv4.ip_forward + value: '0' + sysctl_set: true + sysctl_file: "{{ ubtu24cis_sysctl_network_conf }}" + state: present + reload: true + ignoreerrors: true + notify: + - Flush ipv4 route table + + - name: "3.3.1 | PATCH | Ensure IP forwarding is disabled | IPv6 settings" + when: ubtu24cis_ipv6_disable == 'sysctl' + ansible.posix.sysctl: + name: net.ipv6.conf.all.forwarding + value: '0' + sysctl_set: true + sysctl_file: "{{ ubtu24cis_sysctl_network_conf }}" + state: present + reload: true + ignoreerrors: true + notify: + - Flush ipv6 route table + +- name: "3.3.2 | PATCH | Ensure packet redirect sending is disabled" + when: + - ubtu24cis_rule_3_3_2 + - not ubtu24cis_is_router + tags: + - level1-server + - level1-workstation + - patch + - rule_3.3.2 + - NIST800-53R5_CM-1 + - NIST800-53R5_CM-2 + - NIST800-53R5_CM-6 + - NIST800-53R5_CM-7 + - NIST800-53R5_IA-5 + - packet_redirect + - sysctl + ansible.posix.sysctl: + name: "{{ item }}" + value: '0' + sysctl_set: true + sysctl_file: "{{ ubtu24cis_sysctl_network_conf }}" + state: present + reload: true + ignoreerrors: true + loop: + - net.ipv4.conf.all.send_redirects + - net.ipv4.conf.default.send_redirects + notify: Flush ipv4 route table + +- name: "3.3.3 | PATCH | Ensure bogus ICMP responses are ignored" + when: + - ubtu24cis_rule_3_3_3 + tags: + - level1-server + - level1-workstation + - patch + - rule_3.3.3 + - NIST800-53R5_CM-1 + - NIST800-53R5_CM-2 + - NIST800-53R5_CM-6 + - NIST800-53R5_CM-7 + - NIST800-53R5_IA-5 + - icmp + - sysctl + ansible.posix.sysctl: + name: net.ipv4.icmp_ignore_bogus_error_responses + value: '1' + sysctl_set: true + sysctl_file: "{{ ubtu24cis_sysctl_network_conf }}" + state: present + reload: true + ignoreerrors: true + notify: Flush ipv4 route table + +- name: "3.3.4 | PATCH | Ensure broadcast ICMP requests are ignored" + when: + - ubtu24cis_rule_3_3_4 + tags: + - level1-server + - level1-workstation + - patch + - rule_3.3.4 + - NIST800-53R5_CM-1 + - NIST800-53R5_CM-2 + - NIST800-53R5_CM-6 + - NIST800-53R5_CM-7 + - NIST800-53R5_IA-5 + - icmp + - sysctl + ansible.posix.sysctl: + name: net.ipv4.icmp_echo_ignore_broadcasts + value: '1' + sysctl_set: true + sysctl_file: "{{ ubtu24cis_sysctl_network_conf }}" + state: present + reload: true + ignoreerrors: true + notify: Flush ipv4 route table + +- name: "3.3.5 | PATCH | Ensure ICMP redirects are not accepted" + when: + - ubtu24cis_rule_3_3_5 + tags: + - level1-server + - level1-workstation + - patch + - rule_3.3.5 + - NIST800-53R5_CM-1 + - NIST800-53R5_CM-2 + - NIST800-53R5_CM-6 + - NIST800-53R5_CM-7 + - NIST800-53R5_IA-5 + - icmp + - sysctl + block: + - name: "3.3.5 | PATCH | Ensure ICMP redirects are not accepted | IPv4 settings" + ansible.posix.sysctl: + name: "{{ item }}" + value: '0' + sysctl_set: true + sysctl_file: "{{ ubtu24cis_sysctl_network_conf }}" + state: present + reload: true + ignoreerrors: true + loop: + - net.ipv4.conf.all.accept_redirects + - net.ipv4.conf.default.accept_redirects + notify: Flush ipv4 route table + + - name: "3.3.5 | PATCH | Ensure ICMP redirects are not accepted | IPv6 settings" + ansible.posix.sysctl: + name: "{{ item }}" + value: '0' + sysctl_set: true + sysctl_file: "{{ ubtu24cis_sysctl_network_conf }}" + state: present + reload: true + ignoreerrors: true + when: ubtu24cis_ipv6_disable == 'sysctl' + loop: + - net.ipv6.conf.all.accept_redirects + - net.ipv6.conf.default.accept_redirects + notify: Flush ipv6 route table + +- name: "3.3.6 | PATCH | Ensure secure ICMP redirects are not accepted" + when: + - ubtu24cis_rule_3_3_6 + tags: + - level1-server + - level1-workstation + - patch + - rule_3.3.6 + - NIST800-53R5_CM-1 + - NIST800-53R5_CM-2 + - NIST800-53R5_CM-6 + - NIST800-53R5_CM-7 + - NIST800-53R5_IA-5 + - icmp + - sysctl + ansible.posix.sysctl: + name: "{{ item }}" + value: '0' + sysctl_set: true + sysctl_file: "{{ ubtu24cis_sysctl_network_conf }}" + state: present + reload: true + ignoreerrors: true + loop: + - net.ipv4.conf.all.secure_redirects + - net.ipv4.conf.default.secure_redirects + notify: Flush ipv4 route table + +- name: "3.3.7 | PATCH | Ensure Reverse Path Filtering is enabled" + when: + - ubtu24cis_rule_3_3_7 + tags: + - level1-server + - level1-workstation + - patch + - rule_3.3.7 + - NIST800-53R5_CM-1 + - NIST800-53R5_CM-2 + - NIST800-53R5_CM-6 + - NIST800-53R5_CM-7 + - NIST800-53R5_IA-5 + - reverse_path_filtering + - sysctl + ansible.posix.sysctl: + name: "{{ item }}" + value: '1' + sysctl_set: true + sysctl_file: "{{ ubtu24cis_sysctl_network_conf }}" + state: present + reload: true + ignoreerrors: true + loop: + - net.ipv4.conf.all.rp_filter + - net.ipv4.conf.default.rp_filter + notify: Flush ipv4 route table + +- name: "3.3.8 | PATCH | Ensure source routed packets are not accepted" + when: + - ubtu24cis_rule_3_3_8 + - not ubtu24cis_is_router + tags: + - level1-server + - level1-workstation + - patch + - rule_3.3.8 + - NIST800-53R5_CM-1 + - NIST800-53R5_CM-2 + - NIST800-53R5_CM-6 + - NIST800-53R5_CM-7 + - NIST800-53R5_IA-5 + - routed_packets + - sysctl + block: + - name: "3.3.8 | PATCH | Ensure source routed packets are not accepted | IPv4 settings" + ansible.posix.sysctl: + name: "{{ item }}" + value: '0' + sysctl_set: true + sysctl_file: "{{ ubtu24cis_sysctl_network_conf }}" + state: present + reload: true + ignoreerrors: true + loop: + - net.ipv4.conf.all.accept_source_route + - net.ipv4.conf.default.accept_source_route + notify: Flush ipv4 route table + + - name: "3.3.8 | PATCH | Ensure source routed packets are not accepted | IPv6 settings" + ansible.posix.sysctl: + name: "{{ item }}" + value: '0' + sysctl_set: true + sysctl_file: "{{ ubtu24cis_sysctl_network_conf }}" + state: present + reload: true + ignoreerrors: true + when: ubtu24cis_ipv6_disable == 'sysctl' + loop: + - net.ipv6.conf.all.accept_source_route + - net.ipv6.conf.default.accept_source_route + notify: Flush ipv6 route table + +- name: "3.3.9 | PATCH | Ensure suspicious packets are logged" + when: + - ubtu24cis_rule_3_3_9 + tags: + - level1-server + - level1-workstation + - patch + - rule_3.3.9 + - NIST800-53R5_CM-1 + - NIST800-53R5_CM-2 + - NIST800-53R5_CM-6 + - NIST800-53R5_CM-7 + - NIST800-53R5_IA-5 + - suspicious_packets + - sysctl + ansible.posix.sysctl: + name: "{{ item }}" + value: '1' + sysctl_set: true + sysctl_file: "{{ ubtu24cis_sysctl_network_conf }}" + state: present + reload: true + ignoreerrors: true + loop: + - net.ipv4.conf.all.log_martians + - net.ipv4.conf.default.log_martians + notify: Flush ipv4 route table + +- name: "3.3.10 | PATCH | Ensure tcp syn cookies is enabled" + when: + - ubtu24cis_rule_3_3_10 + tags: + - level1-server + - level1-workstation + - patch + - rule_3.3.10 + - NIST800-53R5_CM-1 + - NIST800-53R5_CM-2 + - NIST800-53R5_CM-6 + - NIST800-53R5_CM-7 + - NIST800-53R5_IA-5 + - tcp_syn_cookies + - sysctl + ansible.posix.sysctl: + name: net.ipv4.tcp_syncookies + value: '1' + sysctl_set: true + sysctl_file: "{{ ubtu24cis_sysctl_network_conf }}" + state: present + reload: true + ignoreerrors: true + notify: Flush ipv4 route table + +- name: "3.3.11 | PATCH | Ensure IPv6 router advertisements are not accepted" + when: + - ubtu24cis_rule_3_3_11 + - ubtu24cis_ipv6_required + tags: + - level1-server + - level1-workstation + - patch + - rule_3.3.11 + - NIST800-53R5_CM-1 + - NIST800-53R5_CM-2 + - NIST800-53R5_CM-6 + - NIST800-53R5_CM-7 + - NIST800-53R5_IA-5 + - ipv6 + - router_advertisements + - sysctl + ansible.posix.sysctl: + name: "{{ item }}" + value: '0' + sysctl_set: true + sysctl_file: "{{ ubtu24cis_sysctl_network_conf }}" + state: present + reload: true + ignoreerrors: true + loop: + - net.ipv6.conf.all.accept_ra + - net.ipv6.conf.default.accept_ra + notify: Flush ipv6 route table diff --git a/tasks/section_3/main.yml b/tasks/section_3/main.yml new file mode 100644 index 0000000..5bc0bec --- /dev/null +++ b/tasks/section_3/main.yml @@ -0,0 +1,13 @@ +--- + +- name: "SECTION | 3.1 | Configure Network Devices" + ansible.builtin.import_tasks: + file: cis_3.1.x.yml + +- name: "SECTION | 3.2 | Configure Network Kernel Modules" + ansible.builtin.import_tasks: + file: cis_3.2.x.yml + +- name: "SECTION | 3.3 | Configure Network Kernel Parameters" + ansible.builtin.import_tasks: + file: cis_3.3.x.yml diff --git a/tasks/section_4/cis_4.1.1.yml b/tasks/section_4/cis_4.1.1.yml new file mode 100644 index 0000000..e602535 --- /dev/null +++ b/tasks/section_4/cis_4.1.1.yml @@ -0,0 +1,43 @@ +--- + +- name: "4.1.1 | PATCH | Ensure a single firewall configuration utility is in use" + when: + - ubtu24cis_rule_4_1_1 + tags: + - level1-server + - level1-workstation + - patch + - rule_4.1.1 + - NIST800-53R5_NA + vars: + warn_control_id: '4.1.1' + block: + + - name: "4.1.1 | PATCH | Ensure a single firewall configuration utility is in use | Check packages" + ansible.builtin.shell: dpkg-query -l | grep -Ec "^ii\s*ufw|^ii\s*iptables|^ii\s*nftables" + changed_when: false + failed_when: discovered_firewall_pkgs.rc not in [ 0, 1 ] + register: discovered_firewall_pkgs + + - name: "4.1.1 | PATCH | Ensure a single firewall configuration utility is in use | Check enabled" + when: discovered_firewall_pkgs.stdout not in [ 0, 1 ] + ansible.builtin.shell: for svc in ufw nftables iptables; do if [ "$(systemctl is-enabled $svc | grep enabled &> /dev/null)" ]; then fw_enabled=$(( fw_enabled +1 )); fi; done; echo $fw_enabled + changed_when: false + register: discovered_enabled_firewalls + environment: + fw_enabled: 0 + + - name: "4.1.1 | AUDIT | Ensure a single firewall configuration utility is in use | Message out warning" + when: + - discovered_enabled_firewalls.stdout is defined + - discovered_enabled_firewalls.stdout|int >= 2 + ansible.builtin.debug: + msg: "Warning!! You have more than one firewalls installed and running or enabled. Please use UFW, iptables, or manually manage nftables" + + - name: "4.1.1 | AUDIT | Ensure a single firewall configuration utility is in use | Set warning count" + when: + - discovered_enabled_firewalls.stdout is defined + - discovered_enabled_firewalls.stdout|int >= 2 + ansible.builtin.import_tasks: + file: warning_facts.yml + diff --git a/tasks/section_4/cis_4.2.x.yml b/tasks/section_4/cis_4.2.x.yml new file mode 100644 index 0000000..04565c0 --- /dev/null +++ b/tasks/section_4/cis_4.2.x.yml @@ -0,0 +1,186 @@ +--- + +- name: "4.2.1 | PATCH | Ensure ufw is installed" + when: + - ubtu24cis_rule_4_2_1 + - "'ufw' not in ansible_facts.packages" + tags: + - level1-server + - level1-workstation + - patch + - rule_4.2.1 + - NIST800-53R5_SC-7 + - apt + - ufw + ansible.builtin.package: + name: ufw + state: present + +- name: "4.2.2 | PATCH | Ensure iptables-persistent is not installed with ufw" + when: + - ubtu24cis_rule_4_2_2 + - "'iptables-persistent' in ansible_facts.packages" + tags: + - level1-server + - level1-workstation + - patch + - rule_4.2.2 + - NIST800-53R5_SC-7 + - ufw + ansible.builtin.package: + name: iptables-persistent + state: absent + +# Adding the allow OpenSSH rule while enabling ufw to allow ansible to run after enabling +- name: "4.2.3 | PATCH | Ensure ufw service is enabled" + when: + - ubtu24cis_rule_4_2_3 + tags: + - level1-server + - level1-workstation + - patch + - rule_4.2.3 + - NIST800-53R5_SC-7 + - ufw + block: + - name: "4.2.3 | PATCH | Ensure ufw service is enabled | ssh port enabled" + community.general.ufw: + rule: allow + name: OpenSSH + state: enabled + notify: Reload ufw + + - name: "4.2.3 | PATCH | Ensure ufw service is enabled | service" + ansible.builtin.systemd: + name: ufw + enabled: true + force: true + state: started + +- name: "4.2.4 | PATCH | Ensure ufw loopback traffic is configured" + when: + - ubtu24cis_rule_4_2_4 + tags: + - level1-server + - level1-workstation + - patch + - rule_4.2.4 + - NIST800-53R5_SC-7 + - ufw + block: + - name: "4.2.4 | PATCH | Ensure ufw loopback traffic is configured | Set allow in ufw rules" + community.general.ufw: + rule: allow + direction: in + interface: lo + notify: Reload ufw + + - name: "4.2.4 | PATCH | Ensure loopback traffic is configured | Set allow out ufw rules" + community.general.ufw: + rule: allow + direction: out + interface: lo + notify: Reload ufw + + - name: "4.2.4 | PATCH | Ensure ufw loopback traffic is configured | Set deny ufw rules IPv4" + community.general.ufw: + rule: deny + direction: in + from_ip: 127.0.0.0/8 + notify: Reload ufw + + - name: "4.2.4 | PATCH | Ensure ufw loopback traffic is configured | Set deny ufw rules IPv6" + when: ubtu24cis_ipv6_required + community.general.ufw: + rule: deny + direction: in + from_ip: '::1' + notify: Reload ufw + +- name: "4.2.5 | PATCH | Ensure ufw outbound connections are configured" + when: + - ubtu24cis_rule_4_2_5 + tags: + - level1-server + - level1-workstation + - patch + - rule_4.2.5 + - NIST800-53R5_SC-7 + - ufw + block: + - name: "4.2.5 | PATCH | Ensure ufw outbound connections are configured | Custom ports" + when: ubtu24cis_ufw_allow_out_ports != "all" + community.general.ufw: + rule: allow + direction: out + to_port: '{{ item }}' + with_items: + - "{{ ubtu24cis_ufw_allow_out_ports }}" + notify: Reload ufw + + - name: "4.2.5 | PATCH | Ensure ufw outbound connections are configured | Allow all" + when: "'all' in ubtu24cis_ufw_allow_out_ports" + community.general.ufw: + rule: allow + direction: out + notify: Reload ufw + +- name: "4.2.6 | AUDIT | Ensure ufw firewall rules exist for all open ports" + when: + - ubtu24cis_rule_4_2_6 + tags: + - level1-server + - level1-workstation + - audit + - rule_4.2.6 + - NIST800-53R5_SC-7 + - ufw + vars: + warn_control_id: '4.2.6' + block: + - name: "4.2.6 | AUDIT | Ensure ufw firewall rules exist for all open ports | Get list of open ports" + ansible.builtin.shell: ss -4tuln + changed_when: false + failed_when: false + check_mode: false + register: ubtu24cis_4_2_6_open_listen_ports + + - name: "4.2.6 | AUDIT | Ensure ufw firewall rules exist for all open ports | Get list of firewall rules" + ansible.builtin.shell: ufw status + changed_when: false + failed_when: false + check_mode: false + register: ubtu24cis_4_2_6_firewall_rules + + - name: "4.2.6 | AUDIT | Ensure ufw firewall rules exist for all open ports | Message out settings" + ansible.builtin.debug: + msg: + - "Warning!! Below are the listening ports and firewall rules" + - "Please create firewall rule for any open ports if not already done" + - "*****---Open Listen Ports---*****" + - "{{ ubtu24cis_4_2_6_open_listen_ports.stdout_lines }}" + - "*****---Firewall Rules---*****" + - "{{ ubtu24cis_4_2_6_firewall_rules.stdout_lines }}" + + - name: "4.2.6 | AUDIT | Ensure ufw firewall rules exist for all open ports | Set warning count" + ansible.builtin.import_tasks: + file: warning_facts.yml + +- name: "4.2.7 | PATCH | Ensure ufw default deny firewall policy" + when: + - ubtu24cis_rule_4_2_7 + tags: + - level1-server + - level1-workstation + - patch + - rule_4.2.7 + - NIST800-53R5_SC-7 + - ufw + community.general.ufw: + default: deny + direction: "{{ item }}" + loop: + - incoming + - outgoing + - routed + notify: Reload ufw diff --git a/tasks/section_4/cis_4.3.x.yml b/tasks/section_4/cis_4.3.x.yml new file mode 100644 index 0000000..be19726 --- /dev/null +++ b/tasks/section_4/cis_4.3.x.yml @@ -0,0 +1,247 @@ +--- + +# --------------- +# --------------- +# NFTables is unsupported with this role. However I have the actions commented out as a guide +# --------------- +# --------------- +- name: "4.3.1 | AUDIT | Ensure nftables is installed" + when: + - ubtu24cis_rule_4_3_1 + - ubtu24cis_firewall_package == "nftables" + tags: + - level1-server + - level1-workstation + - audit + - rule_4.3.1 + - NIST800-53R5_CA-9 + - nftables + vars: + warn_control_id: '4.3.1' + block: + - name: "4.3.1 | AUDIT | Ensure nftables is installed | Message out warning" + ansible.builtin.debug: + msg: "Warning!! NFTables is not supported in this role. Please use UFW, iptables, or manually manage nftables" + + - name: "4.3.1 | AUDIT | Ensure nftables is installed | Set warning count" + ansible.builtin.import_tasks: + file: warning_facts.yml + +- name: "4.3.2 | AUDIT | Ensure ufw is not in use with nftables" + when: + - ubtu24cis_rule_4_3_2 + - ubtu24cis_firewall_package == "nftables" + tags: + - level1-server + - level1-workstation + - audit + - rule_4.3.2 + - NIST800-53R5_SC-7 + - nftables + vars: + warn_control_id: '4.3.2' + block: + - name: "4.3.2 | AUDIT | Ensure ufw is not in use with nftables | Message out warning" + ansible.builtin.debug: + msg: "Warning!! NFTables is not supported in this role. Please use UFW, iptables, or manually manage nftables" + # ansible.builtin.package: + # name: ufw + # state: absent + + - name: "4.3.2 | AUDIT | Ensure ufw is not in use with nftables | Set warning count" + ansible.builtin.import_tasks: + file: warning_facts.yml + +- name: "4.3.3 | AUDIT | Ensure iptables are flushed with nftables" + when: + - ubtu24cis_rule_4_3_3 + - ubtu24cis_firewall_package == "nftables" + tags: + - level1-server + - level1-workstation + - audit + - rule_4.3.3 + - NIST800-53R5_CA-9 + - NIST800-53R5_SC-7 + - nftables + vars: + warn_control_id: '4.3.3' + block: + - name: "4.3.3 | AUDIT | Ensure iptables are flushed with nftables | Message out warning" + ansible.builtin.debug: + msg: "Warning!! NFTables is not supported in this role. Please use UFW, iptables, or manually manage nftables" + # ansible.builtin.iptables: + # flush: yes + + - name: "4.3.3 | AUDIT | Ensure iptables are flushed with nftables | Set warning count" + ansible.builtin.import_tasks: + file: warning_facts.yml + +- name: "4.3.4 | AUDIT | Ensure a nftables table exists" + when: + - ubtu24cis_rule_4_3_4 + - ubtu24cis_firewall_package == "nftables" + tags: + - level1-server + - level1-workstation + - patch + - rule_4.3.4 + - NIST800-53R5_CA-9 + - NIST800-53R5_SC-7 + - nftables + vars: + warn_control_id: '4.3.4' + block: + - name: "4.3.4 | AUDIT | Ensure a nftables table exists" + ansible.builtin.debug: + msg: "Warning!! NFTables is not supported in this role. Please use UFW, iptables, or manually manage nftables | Message out warning" + # ansible.builtin.shell: "nft create table {{ ubtu24cis_nftables_table_name }}" + # changed_when: ubtu24cis_4_3_4_new_table.rc == 0 + # failed_when: false + # check_mode: false + # register: ubtu24cis_4_3_4_new_table + + - name: "4.3.4 | AUDIT | Ensure a nftables table exists | Set warning count" + ansible.builtin.import_tasks: + file: warning_facts.yml + +- name: "4.3.5 | AUDIT | Ensure nftables base chains exist" + when: + - ubtu24cis_rule_4_3_5 + - ubtu24cis_firewall_package == "nftables" + tags: + - level1-server + - level1-workstation + - audit + - rule_4.3.5 + - NIST800-53R5_NA + - nftables + vars: + warn_control_id: '4.3.5' + block: + - name: "4.3.5 | AUDIT | Ensure nftables base chains exist" + ansible.builtin.debug: + msg: "Warning!! NFTables is not supported in this role. Please use UFW, iptables, or manually manage nftables | Message out warning" + + - name: "4.3.5 | AUDIT | Ensure nftables base chains exist | Set warning count" + ansible.builtin.import_tasks: + file: warning_facts.yml + +- name: "4.3.6 | AUDIT | Ensure nftables loopback traffic is configured" + when: + - ubtu24cis_rule_4_3_6 + - ubtu24cis_firewall_package == "nftables" + tags: + - level1-server + - level1-workstation + - audit + - rule_4.3.6 + - NIST800-53R5_CA-9 + - NIST800-53R5_SC-7 + - nftables + vars: + warn_control_id: '4.3.6' + block: + - name: "4.3.6 | AUDIT | Ensure nftables loopback traffic is configured | Message out warning" + ansible.builtin.debug: + msg: "Warning!! NFTables is not supported in this role. Please use UFW, iptables, or manually manage nftables" + + - name: "4.3.6 | AUDIT | Ensure nftables loopback traffic is configured | Set warning count" + ansible.builtin.import_tasks: + file: warning_facts.yml + +- name: "4.3.7 | AUDIT | Ensure nftables outbound and established connections are configured" + when: + - ubtu24cis_rule_4_3_7 + - ubtu24cis_firewall_package == "nftables" + tags: + - level1-server + - level1-workstation + - audit + - rule_4.3.7 + - NIST800-53R5_CA-9 + - NIST800-53R5_SC-7 + - nftables + vars: + warn_control_id: '4.3.7' + block: + - name: "4.3.7 | AUDIT | Ensure nftables outbound and established connections are configured | Message out warning" + ansible.builtin.debug: + msg: "Warning!! NFTables is not supported in this role. Please use UFW, iptables, or manually manage nftables" + + - name: "4.3.7 | AUDIT | Ensure nftables outbound and established connections are configured | Set warning count" + ansible.builtin.import_tasks: + file: warning_facts.yml + +- name: "4.3.8 | AUDIT | Ensure nftables default deny firewall policy" + when: + - ubtu24cis_rule_4_3_8 + - ubtu24cis_firewall_package == "nftables" + tags: + - level1-server + - level1-workstation + - audit + - rule_4.3.8 + - NIST800-53R5_CA-9 + - NIST800-53R5_SC-7 + - nftables + vars: + warn_control_id: '4.3.8' + block: + - name: "4.3.8 | AUDIT | Ensure nftables default deny firewall policy | Message out warning" + ansible.builtin.debug: + msg: "Warning!! NFTables is not supported in this role. Please use UFW, iptables, or manually manage nftables" + + - name: "4.3.8 | AUDIT | Ensure nftables default deny firewall policy | Set warning count" + ansible.builtin.import_tasks: + file: warning_facts.yml + +- name: "4.3.9 | AUDIT | Ensure nftables service is enabled" + when: + - ubtu24cis_rule_4_3_9 + - ubtu24cis_firewall_package == "nftables" + tags: + - level1-server + - level1-workstation + - audit + - rule_4.3.9 + - NIST800-53R5_CA-9 + - NIST800-53R5_SC-7 + - nftables + vars: + warn_control_id: '4.3.9' + block: + - name: "4.3.9 | AUDIT | Ensure nftables service is enabled | Message out warning" + ansible.builtin.debug: + msg: "Warning!! NFTables is not supported in this role. Please use UFW, iptables, or manually manage nftables" + # ansible.builtin.service: + # name: nftables + # state: started + # enabled: yes + + - name: "4.3.9 | AUDIT | Ensure nftables service is enabled | Set warning count" + ansible.builtin.import_tasks: + file: warning_facts.yml + +- name: "4.3.10 | AUDIT | Ensure nftables rules are permanent" + when: + - ubtu24cis_rule_4_3_10 + - ubtu24cis_firewall_package == "nftables" + tags: + - level1-server + - level1-workstation + - audit + - rule_4.3.10 + - NIST800-53R5_CA-9 + - NIST800-53R5_SC-7 + - nftables + vars: + warn_control_id: '4.3.10' + block: + - name: "4.3.10 | AUDIT | Ensure nftables rules are permanent | Message out warning" + ansible.builtin.debug: + msg: "Warning!! NFTables is not supported in this role. Please use UFW, iptables, or manually manage nftables" + + - name: "4.3.10 | AUDIT | Ensure nftables rules are permanent | Set warning count" + ansible.builtin.import_tasks: + file: warning_facts.yml diff --git a/tasks/section_4/cis_4.4.1.x.yml b/tasks/section_4/cis_4.4.1.x.yml new file mode 100644 index 0000000..6eabb20 --- /dev/null +++ b/tasks/section_4/cis_4.4.1.x.yml @@ -0,0 +1,412 @@ +--- + +- name: "4.4.1.1 | PATCH | Ensure iptables packages are installed" + when: + - ubtu24cis_rule_4_4_1_1 + - ubtu24cis_firewall_package == "iptables" + tags: + - level1-server + - level1-workstation + - patch + - rule_4.4.1.1 + - NIST800-53R5_CA-9 + - NIST800-53R5_SC-7 + - iptables + ansible.builtin.package: + name: ['iptables', 'iptables-persistent'] + state: present + +- name: "4.4.1.2 | PATCH | Ensure nftables is not installed with iptables" + when: + - ubtu24cis_rule_4_4_1_2 + - ubtu24cis_firewall_package == "iptables" + tags: + - level1-server + - level1-workstation + - patch + - rule_4.4.1.2 + - NIST800-53R5_CA-9 + - NIST800-53R5_SC-7 + - iptables + ansible.builtin.package: + name: nftables + state: absent + purge: "{{ ubtu24cis_purge_apt }}" + +- name: "4.4.1.3 | PATCH | Ensure ufw is uninstalled or disabled with iptables" + when: + - ubtu24cis_rule_4_4_1_3 + - ubtu24cis_firewall_package == "iptables" + tags: + - level1-server + - level1-workstation + - patch + - rule_4.4.1.3 + - NIST800-53R5_CA-9 + - NIST800-53R5_SC-7 + - iptables + ansible.builtin.package: + name: ufw + state: absent + +- name: "4.4.1.1 | PATCH | Ensure iptables default deny firewall policy" + when: + - ubtu24cis_rule_4_4_1_1 + - ubtu24cis_ipv4_required + - not system_is_ec2 + tags: + - level1-server + - level1-workstation + - patch + - rule_4.4.1.1 + - iptables + block: + - name: "4.4.1.1 | PATCH | Ensure iptables default deny firewall policy | Configure SSH to be allowed in" + ansible.builtin.iptables: + chain: INPUT + protocol: tcp + destination_port: 22 + jump: ACCEPT + ctstate: 'NEW,ESTABLISHED' + notify: Iptables persistent + + - name: "4.4.1.1 | PATCH | Ensure iptables default deny firewall policy | Configure SSH to be allowed out" + ansible.builtin.iptables: + chain: OUTPUT + protocol: tcp + source_port: 22 + jump: ACCEPT + ctstate: 'NEW,ESTABLISHED' + notify: Iptables persistent + + - name: "4.4.1.1 | PATCH | Ensure iptables default deny firewall policy | Enable apt traffic" + ansible.builtin.iptables: + chain: INPUT + ctstate: 'ESTABLISHED' + jump: ACCEPT + notify: Iptables persistent + + - name: "4.4.1.1 | PATCH | Ensure iptables default deny firewall policy | Set drop items" + ansible.builtin.iptables: + policy: DROP + chain: "{{ item }}" + notify: Iptables persistent + with_items: + - INPUT + - FORWARD + - OUTPUT + +- name: "4.4.1.2 | PATCH | Ensure iptables loopback traffic is configured" + when: + - ubtu24cis_rule_4_4_1_2 + - ubtu24cis_firewall_package == "iptables" + - ubtu24cis_ipv4_required + tags: + - level1-server + - level1-workstation + - patch + - rule_4.4.1.2 + - iptables + block: + - name: "4.4.1.2 | PATCH | Ensure iptables loopback traffic is configured | INPUT loopback ACCEPT" + ansible.builtin.iptables: + action: append + chain: INPUT + in_interface: lo + jump: ACCEPT + notify: Iptables persistent + + - name: "4.4.1.2 | PATCH | Ensure iptables loopback traffic is configured | OUTPUT loopback ACCEPT" + ansible.builtin.iptables: + action: append + chain: OUTPUT + out_interface: lo + jump: ACCEPT + notify: Iptables persistent + + - name: "4.4.1.2 | PATCH | Ensure iptables loopback traffic is configured | OUTPUT loopback ACCEPT" + ansible.builtin.iptables: + action: append + chain: INPUT + source: 127.0.0.0/8 + jump: DROP + notify: Iptables persistent + +- name: "4.4.1.3 | PATCH | Ensure iptables outbound and established connections are configured" + when: + - ubtu24cis_rule_4_4_1_3 + - ubtu24cis_firewall_package == "iptables" + - ubtu24cis_ipv4_required + tags: + - level1-server + - level1-workstation + - patch + - rule_4.4.1.3 + - iptables + ansible.builtin.iptables: + action: append + chain: '{{ item.chain }}' + protocol: '{{ item.protocol }}' + match: state + ctstate: '{{ item.ctstate }}' + jump: ACCEPT + notify: Iptables persistent + with_items: + - { chain: OUTPUT, protocol: tcp, ctstate: 'NEW,ESTABLISHED' } + - { chain: OUTPUT, protocol: udp, ctstate: 'NEW,ESTABLISHED' } + - { chain: OUTPUT, protocol: icmp, ctstate: 'NEW,ESTABLISHED' } + - { chain: INPUT, protocol: tcp, ctstate: 'ESTABLISHED' } + - { chain: INPUT, protocol: udp, ctstate: 'ESTABLISHED' } + - { chain: INPUT, protocol: icmp, ctstate: 'ESTABLISHED' } + +- name: "4.4.1.4 | AUDIT | Ensure iptables firewall rules exist for all open ports" + when: + - ubtu24cis_rule_4_4_1_4 + - ubtu24cis_firewall_package == "iptables" + - ubtu24cis_ipv4_required + tags: + - level1-server + - level1-workstation + - audit + - rule_4.4.1.4 + - iptables + block: + - name: "4.4.1.4 | AUDIT | Ensure iptables firewall rules exist for all open ports | Get list of open ports" + ansible.builtin.shell: ss -4tuln + changed_when: false + failed_when: false + check_mode: false + register: ubtu24cis_4_4_1_4_open_ports + + - name: "4.4.1.4 | AUDIT | Ensure iptables firewall rules exist for all open ports | Get list of rules" + ansible.builtin.shell: iptables -L INPUT -v -n + changed_when: false + failed_when: false + check_mode: false + register: ubtu24cis_4_4_1_4_current_rules + + - name: "4.4.1.4 | AUDIT | Ensure iptables firewall rules exist for all open ports | Warn about settings" + ansible.builtin.debug: + msg: + - "Warning!! Below is the list the open ports and current rules" + - "Please create a rule for any open port that does not have a current rule" + - "Open Ports:" + - "{{ ubtu24cis_4_4_1_4_open_ports.stdout_lines }}" + - "Current Rules:" + - "{{ ubtu24cis_4_4_1_4_current_rules.stdout_lines }}" + + - name: "4.4.1.4 | AUDIT | Ensure iptables firewall rules exist for all open ports | Set warning count" + ansible.builtin.import_tasks: + file: warning_facts.yml + vars: + warn_control_id: '4.4.1.4' + +# --------------- +# --------------- +# This is not a control however using the iptables module only writes to memery +# if a reboot occurs that means changes can revert. This task will make the +# above iptables settings permanent +# --------------- +# --------------- +# - name: "Make IPTables persistent | Not a control" +# block: +# - name: "Make IPTables persistent | Install iptables-persistent" +# ansible.builtin.package: +# name: iptables-persistent +# state: present + +# - name: "Make IPTables persistent | Save to persistent files" +# ansible.builtin.shell: bash -c "iptables-save > /etc/iptables/rules.v4" +# changed_when: ubtu24cis_iptables_save.rc == 0 +# failed_when: ubtu24cis_iptables_save.rc > 0 +# register: ubtu24cis_iptables_save +# when: +# - ubtu24cis_firewall_package == "iptables" +# - ubtu24cis_save_iptables_cis_rules +# - ubtu24cis_rule_4_4_1_1 or +# ubtu24cis_rule_4_4_1_2 or +# ubtu24cis_rule_4_4_1_3 or +# ubtu24cis_rule_4_4_1_4 + +- name: "4.4.1.1 | PATCH | Ensure ip6tables default deny firewall policy" + when: + - ubtu24cis_rule_4_4_1_1 + - ubtu24cis_ipv6_required + tags: + - level1-server + - level1-workstation + - patch + - rule_4.4.1.1 + - ip6tables + block: + - name: "4.4.1.1 | PATCH | Ensure ip6tables default deny firewall policy | Configure SSH to be allowed out" + ansible.builtin.iptables: + chain: OUTPUT + protocol: tcp + source_port: 22 + jump: ACCEPT + ctstate: 'NEW,ESTABLISHED' + ip_version: ipv6 + notify: Ip6tables persistent + + - name: "4.4.1.1 | PATCH | Ensure ip6tables default deny firewall policy | Enable apt traffic" + ansible.builtin.iptables: + chain: INPUT + ctstate: 'ESTABLISHED' + jump: ACCEPT + ip_version: ipv6 + notify: Ip6tables persistent + + - name: "4.4.1.1 | PATCH | Ensure ip6tables default deny firewall policy | Set drop items" + ansible.builtin.iptables: + policy: DROP + chain: "{{ item }}" + ip_version: ipv6 + notify: Ip6tables persistent + loop: + - INPUT + - FORWARD + - OUTPUT + +- name: "4.4.1.2 | PATCH | Ensure ip6tables loopback traffic is configured" + when: + - ubtu24cis_rule_4_4_1_2 + - ubtu24cis_firewall_package == "iptables" + - ubtu24cis_ipv6_required + - not ubtu24cis_ipv4_required + tags: + - level1-server + - level1-workstation + - patch + - rule_4.4.1.2 + - ip6tables + block: + - name: "4.4.1.2 | PATCH | Ensure ip6tables loopback traffic is configured | INPUT loopback ACCEPT" + ansible.builtin.iptables: + action: append + chain: INPUT + in_interface: lo + jump: ACCEPT + ip_version: ipv6 + notify: Ip6tables persistent + + - name: "4.4.1.2 | PATCH | Ensure ip6tables loopback traffic is configured | OUTPUT loopback ACCEPT" + ansible.builtin.iptables: + action: append + chain: OUTPUT + out_interface: lo + jump: ACCEPT + ip_version: ipv6 + notify: Ip6tables persistent + + - name: "4.4.1.2 | PATCH | Ensure ip6tables loopback traffic is configured | INPUT loopback drop" + ansible.builtin.iptables: + action: append + chain: INPUT + source: ::1 + jump: DROP + ip_version: ipv6 + notify: Ip6tables persistent + +- name: "4.4.1.3 | PATCH | Ensure ip6tables outbound and established connections are configured" + when: + - ubtu24cis_rule_4_4_1_3 + - ubtu24cis_firewall_package == "iptables" + - ubtu24cis_ipv6_required + - not ubtu24cis_ipv4_required + tags: + - level1-server + - level1-workstation + - patch + - rule_4.4.1.3 + - ip6tables + ansible.builtin.iptables: + action: append + chain: '{{ item.chain }}' + protocol: '{{ item.protocol }}' + match: state + ctstate: '{{ item.ctstate }}' + jump: ACCEPT + ip_version: ipv6 + notify: Ip6tables persistent + loop: + - { chain: OUTPUT, protocol: tcp, ctstate: 'NEW,ESTABLISHED' } + - { chain: OUTPUT, protocol: udp, ctstate: 'NEW,ESTABLISHED' } + - { chain: OUTPUT, protocol: icmp, ctstate: 'NEW,ESTABLISHED' } + - { chain: INPUT, protocol: tcp, ctstate: 'ESTABLISHED' } + - { chain: INPUT, protocol: udp, ctstate: 'ESTABLISHED' } + - { chain: INPUT, protocol: icmp, ctstate: 'ESTABLISHED' } + +- name: "4.4.1.4 | AUDIT | Ensure ip6tables firewall rules exist for all open ports" + when: + - ubtu24cis_rule_4_4_1_4 + - ubtu24cis_firewall_package == "iptables" + - ubtu24cis_ipv6_required + - not ubtu24cis_ipv4_required + tags: + - level1-server + - level1-workstation + - audit + - rule_4.4.1.4 + - ip6tables + vars: + warn_control_id: '4.4.1.4' + block: + - name: "4.4.1.4 | AUDIT | Ensure ip6tables firewall rules exist for all open ports | Get list of open ports" + ansible.builtin.shell: ss -6tuln + changed_when: false + failed_when: false + check_mode: false + register: ubtu24cis_4_4_1_4_open_ports + + - name: "4.4.1.4 | AUDIT | Ensure ip6tables firewall rules exist for all open ports | Get list of rules" + ansible.builtin.shell: ip6tables -L INPUT -v -n + changed_when: false + failed_when: false + check_mode: false + register: ubtu24cis_4_4_1_4_current_rules + + - name: "4.4.1.4 | AUDIT | Ensure ip6tables firewall rules exist for all open ports | Warn about settings" + ansible.builtin.debug: + msg: + - "Warning!! Below is the list the open ports and current rules" + - "Please create a rule for any open port that does not have a current rule" + - "Open Ports:" + - "{{ ubtu24cis_4_4_1_4_open_ports.stdout_lines }}" + - "Current Rules:" + - "{{ ubtu24cis_4_4_1_4_current_rules.stdout_lines }}" + + - name: "4.4.1.4 | AUDIT | Ensure ip6tables firewall rules exist for all open ports | Set warning count" + ansible.builtin.import_tasks: + file: warning_facts.yml + +# --------------- +# --------------- +# This is not a control however using the ip6tables module only writes to memery +# if a reboot occurs that means changes can revert. This task will make the +# above ip6tables settings permanent +# --------------- +# --------------- +# via handler +# - name: "Make IP6Tables persistent | Not a control" +# block: +# - name: "Make IP6Tables persistent | Install iptables-persistent" +# ansible.builtin.package: +# name: iptables-persistent +# state: present +# when: "'iptables-persistent' not in ansible_facts.packages" + +# - name: "Make IP6Tables persistent | Save to persistent files" +# ansible.builtin.shell: bash -c "ip6tables-save > /etc/iptables/rules.v6" +# changed_when: ubtu24cis_ip6tables_save.rc == 0 +# failed_when: ubtu24cis_ip6tables_save.rc > 0 +# register: ubtu24cis_ip6tables_save +# when: +# - ubtu24cis_firewall_package == "iptables" +# - ubtu24cis_ipv6_required +# - not ubtu24cis_ipv4_required +# - ubtu24cis_save_iptables_cis_rules +# - ubtu24cis_rule_4_4_1_1 or +# ubtu24cis_rule_4_4_1_2 or +# ubtu24cis_rule_4_4_1_3 or +# ubtu24cis_rule_4_4_1_4 diff --git a/tasks/section_4/cis_4.4.2.x.yml b/tasks/section_4/cis_4.4.2.x.yml new file mode 100644 index 0000000..ff5338f --- /dev/null +++ b/tasks/section_4/cis_4.4.2.x.yml @@ -0,0 +1,188 @@ +--- + +- name: "4.4.2.1 | PATCH | Ensure iptables default deny firewall policy" + when: + - ubtu24cis_rule_4_4_2_1 + - ubtu24cis_ipv4_required + - not system_is_ec2 + tags: + - level1-server + - level1-workstation + - patch + - rule_4.4.2.1 + - NIST800-53R5_CA-9 + - NIST800-53R5_SC-7 + - iptables + block: + - name: "4.4.2.1 | PATCH | Ensure iptables default deny firewall policy | Configure SSH to be allowed in" + ansible.builtin.iptables: + chain: INPUT + protocol: tcp + destination_port: 22 + jump: ACCEPT + ctstate: 'NEW,ESTABLISHED' + notify: Iptables persistent + + - name: "4.4.2.1 | PATCH | Ensure iptables default deny firewall policy | Configure SSH to be allowed out" + ansible.builtin.iptables: + chain: OUTPUT + protocol: tcp + source_port: 22 + jump: ACCEPT + ctstate: 'NEW,ESTABLISHED' + notify: Iptables persistent + + - name: "4.4.2.1 | PATCH | Ensure iptables default deny firewall policy | Enable apt traffic" + ansible.builtin.iptables: + chain: INPUT + ctstate: 'ESTABLISHED' + jump: ACCEPT + notify: Iptables persistent + + - name: "4.4.2.1 | PATCH | Ensure iptables default deny firewall policy | Set drop items" + ansible.builtin.iptables: + policy: DROP + chain: "{{ item }}" + notify: Iptables persistent + loop: + - INPUT + - FORWARD + - OUTPUT + +- name: "4.4.2.2 | PATCH | Ensure iptables loopback traffic is configured" + when: + - ubtu24cis_rule_4_4_2_2 + - ubtu24cis_firewall_package == "iptables" + - ubtu24cis_ipv4_required + tags: + - level1-server + - level1-workstation + - patch + - rule_4.4.2.2 + - NIST800-53R5_CA-9 + - NIST800-53R5_SC-7 + - iptables + block: + - name: "4.4.2.2 | PATCH | Ensure iptables loopback traffic is configured | INPUT loopback ACCEPT" + ansible.builtin.iptables: + action: append + chain: INPUT + in_interface: lo + jump: ACCEPT + notify: Iptables persistent + + - name: "4.4.2.2 | PATCH | Ensure iptables loopback traffic is configured | OUTPUT loopback ACCEPT" + ansible.builtin.iptables: + action: append + chain: OUTPUT + out_interface: lo + jump: ACCEPT + notify: Iptables persistent + + - name: "4.4.2.2 | PATCH | Ensure iptables loopback traffic is configured | OUTPUT loopback ACCEPT" + ansible.builtin.iptables: + action: append + chain: INPUT + source: 127.0.0.0/8 + jump: DROP + notify: Iptables persistent + +- name: "4.4.2.3 | PATCH | Ensure iptables outbound and established connections are configured" + when: + - ubtu24cis_rule_4_4_2_3 + - ubtu24cis_firewall_package == "iptables" + - ubtu24cis_ipv4_required + tags: + - level1-server + - level1-workstation + - patch + - rule_4.4.2.3 + - NIST800-53R5_CA-9 + - NIST800-53R5_SC-7 + - iptables + ansible.builtin.iptables: + action: append + chain: '{{ item.chain }}' + protocol: '{{ item.protocol }}' + match: state + ctstate: '{{ item.ctstate }}' + jump: ACCEPT + notify: Iptables persistent + with_items: + - { chain: OUTPUT, protocol: tcp, ctstate: 'NEW,ESTABLISHED' } + - { chain: OUTPUT, protocol: udp, ctstate: 'NEW,ESTABLISHED' } + - { chain: OUTPUT, protocol: icmp, ctstate: 'NEW,ESTABLISHED' } + - { chain: INPUT, protocol: tcp, ctstate: 'ESTABLISHED' } + - { chain: INPUT, protocol: udp, ctstate: 'ESTABLISHED' } + - { chain: INPUT, protocol: icmp, ctstate: 'ESTABLISHED' } + +- name: "4.4.2.4 | AUDIT | Ensure iptables firewall rules exist for all open ports" + when: + - ubtu24cis_rule_4_4_2_4 + - ubtu24cis_firewall_package == "iptables" + - ubtu24cis_ipv4_required + tags: + - level1-server + - level1-workstation + - audit + - rule_4.4.2.4 + - NIST800-53R5_CA-9 + - NIST800-53R5_SC-7 + - iptables + vars: + warn_control_id: '4.4.2.4' + block: + - name: "4.4.2.4 | AUDIT | Ensure iptables firewall rules exist for all open ports | Get list of open ports" + ansible.builtin.shell: ss -4tuln + changed_when: false + failed_when: false + check_mode: false + register: ubtu24cis_4_4_1_4_open_ports + + - name: "4.4.2.4 | AUDIT | Ensure iptables firewall rules exist for all open ports | Get list of rules" + ansible.builtin.shell: iptables -L INPUT -v -n + changed_when: false + failed_when: false + check_mode: false + register: ubtu24cis_4_4_2_4_current_rules + + - name: "4.4.2.4 | AUDIT | Ensure iptables firewall rules exist for all open ports | Warn about settings" + ansible.builtin.debug: + msg: + - "Warning!! Below is the list the open ports and current rules" + - "Please create a rule for any open port that does not have a current rule" + - "Open Ports:" + - "{{ ubtu24cis_4_4_2_4_open_ports.stdout_lines }}" + - "Current Rules:" + - "{{ ubtu24cis_4_4_2_4_current_rules.stdout_lines }}" + + - name: "4.4.2.4 | AUDIT | Ensure iptables firewall rules exist for all open ports | Set warning count" + ansible.builtin.import_tasks: + file: warning_facts.yml + +# --------------- +# --------------- +# This is not a control however using the iptables module only writes to memory +# if a reboot occurs that means changes can revert. This task will make the +# above iptables settings permanent +# --------------- +# --------------- +# - name: "Make IPTables persistent | Not a control" +# block: +# - name: "Make IPTables persistent | Install iptables-persistent" +# ansible.builtin.package: +# name: iptables-persistent +# state: present + +# - name: "Make IPTables persistent | Save to persistent files" +# ansible.builtin.shell: bash -c "iptables-save > /etc/iptables/rules.v4" +# changed_when: ubtu24cis_iptables_save.rc == 0 +# failed_when: ubtu24cis_iptables_save.rc > 0 +# register: ubtu24cis_iptables_save +# when: +# - ubtu24cis_firewall_package == "iptables" +# - ubtu24cis_save_iptables_cis_rules +# - ubtu24cis_rule_4_4_2_1 or +# ubtu24cis_rule_4_4_2_2 or +# ubtu24cis_rule_4_4_2_3 or +# ubtu24cis_rule_4_4_2_4 diff --git a/tasks/section_4/cis_4.4.3.x.yml b/tasks/section_4/cis_4.4.3.x.yml new file mode 100644 index 0000000..b963698 --- /dev/null +++ b/tasks/section_4/cis_4.4.3.x.yml @@ -0,0 +1,185 @@ +--- + +- name: "4.4.3.1 | PATCH | Ensure ip6tables default deny firewall policy" + when: + - ubtu24cis_rule_4_4_3_1 + tags: + - level1-server + - level1-workstationå + - patch + - rule_4.4.3.1 + - NIST800-53R5_CA-9 + - NIST800-53R5_SC-7 + - ip6tables + block: + - name: "4.4.3.1 | PATCH | Ensure ip6tables default deny firewall policy | Configure SSH to be allowed out" + ansible.builtin.iptables: + chain: OUTPUT + protocol: tcp + source_port: 22 + jump: ACCEPT + ctstate: 'NEW,ESTABLISHED' + ip_version: ipv6 + notify: Ip6tables persistent + + - name: "4.4.3.1 | PATCH | Ensure ip6tables default deny firewall policy | Enable apt traffic" + ansible.builtin.iptables: + chain: INPUT + ctstate: 'ESTABLISHED' + jump: ACCEPT + ip_version: ipv6 + notify: Ip6tables persistent + + - name: "4.4.3.1 | PATCH | Ensure ip6tables default deny firewall policy | Set drop items" + ansible.builtin.iptables: + policy: DROP + chain: "{{ item }}" + ip_version: ipv6 + notify: Ip6tables persistent + loop: + - INPUT + - FORWARD + - OUTPUT + +- name: "4.4.3.2 | PATCH | Ensure ip6tables loopback traffic is configured" + when: + - ubtu24cis_rule_4_4_3_2 + - not ubtu24cis_ipv4_required + tags: + - level1-server + - level1-workstation + - patch + - rule_4.4.3.2 + - NIST800-53R5_CA-9 + - NIST800-53R5_SC-7 + - ip6tables + block: + - name: "4.4.3.2 | PATCH | Ensure ip6tables loopback traffic is configured | INPUT loopback ACCEPT" + ansible.builtin.iptables: + action: append + chain: INPUT + in_interface: lo + jump: ACCEPT + ip_version: ipv6 + notify: Ip6tables persistent + + - name: "4.4.3.2 | PATCH | Ensure ip6tables loopback traffic is configured | OUTPUT loopback ACCEPT" + ansible.builtin.iptables: + action: append + chain: OUTPUT + out_interface: lo + jump: ACCEPT + ip_version: ipv6 + notify: Ip6tables persistent + + - name: "4.4.3.2 | PATCH | Ensure ip6tables loopback traffic is configured | INPUT loopback drop" + ansible.builtin.iptables: + action: append + chain: INPUT + source: ::1 + jump: DROP + ip_version: ipv6 + notify: Ip6tables persistent + +- name: "4.4.3.3 | PATCH | Ensure ip6tables outbound and established connections are configured" + when: + - ubtu24cis_rule_4_4_3_3 + - not ubtu24cis_ipv4_required + tags: + - level1-server + - level1-workstation + - patch + - rule_4.4.3.3 + - NIST800-53R5_CA-9 + - NIST800-53R5_SC-7 + - ip6tables + ansible.builtin.iptables: + action: append + chain: '{{ item.chain }}' + protocol: '{{ item.protocol }}' + match: state + ctstate: '{{ item.ctstate }}' + jump: ACCEPT + ip_version: ipv6 + notify: Ip6tables persistent + loop: + - { chain: OUTPUT, protocol: tcp, ctstate: 'NEW,ESTABLISHED' } + - { chain: OUTPUT, protocol: udp, ctstate: 'NEW,ESTABLISHED' } + - { chain: OUTPUT, protocol: icmp, ctstate: 'NEW,ESTABLISHED' } + - { chain: INPUT, protocol: tcp, ctstate: 'ESTABLISHED' } + - { chain: INPUT, protocol: udp, ctstate: 'ESTABLISHED' } + - { chain: INPUT, protocol: icmp, ctstate: 'ESTABLISHED' } + +- name: "4.4.3.4 | AUDIT | Ensure ip6tables firewall rules exist for all open ports" + when: + - ubtu24cis_rule_4_4_3_4 + - not ubtu24cis_ipv4_required + tags: + - level1-server + - level1-workstation + - audit + - rule_4.4.3.4 + - NIST800-53R5_CA-9 + - NIST800-53R5_SC-7 + - ip6tables + vars: + warn_control_id: '4.4.3.4' + block: + - name: "4.4.3.4 | AUDIT | Ensure ip6tables firewall rules exist for all open ports | Get list of open ports" + ansible.builtin.shell: ss -6tuln + changed_when: false + failed_when: false + check_mode: false + register: ubtu24cis_4_4_3_4_open_ports + + - name: "4.4.3.4 | AUDIT | Ensure ip6tables firewall rules exist for all open ports | Get list of rules" + ansible.builtin.shell: ip6tables -L INPUT -v -n + changed_when: false + failed_when: false + check_mode: false + register: ubtu24cis_4_4_3_4_current_rules + + - name: "4.4.3.4 | AUDIT | Ensure ip6tables firewall rules exist for all open ports | Warn about settings" + ansible.builtin.debug: + msg: + - "Warning!! Below is the list the open ports and current rules" + - "Please create a rule for any open port that does not have a current rule" + - "Open Ports:" + - "{{ ubtu24cis_4_4_3_4_open_ports.stdout_lines }}" + - "Current Rules:" + - "{{ ubtu24cis_4_4_3_4_current_rules.stdout_lines }}" + + - name: "4.4.3.4 | AUDIT | Ensure ip6tables firewall rules exist for all open ports | Set warning count" + ansible.builtin.import_tasks: + file: warning_facts.yml + +# --------------- +# --------------- +# This is not a control however using the ip6tables module only writes to memory +# if a reboot occurs that means changes can revert. This task will make the +# above ip6tables settings permanent +# --------------- +# --------------- +# via handler +# - name: "Make IP6Tables persistent | Not a control" +# block: +# - name: "Make IP6Tables persistent | Install iptables-persistent" +# ansible.builtin.package: +# name: iptables-persistent +# state: present +# when: "'iptables-persistent' not in ansible_facts.packages" + +# - name: "Make IP6Tables persistent | Save to persistent files" +# ansible.builtin.shell: bash -c "ip6tables-save > /etc/iptables/rules.v6" +# changed_when: ubtu24cis_ip6tables_save.rc == 0 +# failed_when: ubtu24cis_ip6tables_save.rc > 0 +# register: ubtu24cis_ip6tables_save +# when: +# - ubtu24cis_firewall_package == "iptables" +# - ubtu24cis_ipv6_required +# - not ubtu24cis_ipv4_required +# - ubtu24cis_save_iptables_cis_rules +# - ubtu24cis_rule_4_4_1_1 or +# ubtu24cis_rule_4_4_1_2 or +# ubtu24cis_rule_4_4_1_3 or +# ubtu24cis_rule_4_4_1_4 diff --git a/tasks/section_4/main.yml b/tasks/section_4/main.yml new file mode 100644 index 0000000..c8ba0ea --- /dev/null +++ b/tasks/section_4/main.yml @@ -0,0 +1,32 @@ +--- + +- name: "SECTION | 4.1 | Single firewall installed" + ansible.builtin.import_tasks: + file: cis_4.1.1.yml + +- name: "SECTION | 4.2 | Configure UnComplicatedFirewall" + when: ubtu24cis_firewall_package == "ufw" + ansible.builtin.import_tasks: + file: cis_4.2.x.yml + +- name: "SECTION | 4.3 | Configure nftables software" + when: ubtu24cis_firewall_package == "nftables" + ansible.builtin.import_tasks: + file: cis_4.3.x.yml + +- name: "SECTION | 4.4.1.x | Configure iptables software" + when: ubtu24cis_firewall_package == "nftables" + ansible.builtin.import_tasks: + file: cis_4.4.1.x.yml + +- name: "SECTION | 4.4.2.x | Configure ipv4 iptables" + when: ubtu24cis_firewall_package == "iptables" + ansible.builtin.import_tasks: + file: cis_4.4.2.x.yml + +- name: "SECTION | 4.4.3.x | Configure ipv6 iptables" + when: + - ubtu24cis_firewall_package == "iptables" + - ubtu24cis_ipv6_required + ansible.builtin.import_tasks: + file: cis_4.4.3.x.yml diff --git a/tasks/section_5/cis_5.1.x.yml b/tasks/section_5/cis_5.1.x.yml new file mode 100644 index 0000000..18a7eef --- /dev/null +++ b/tasks/section_5/cis_5.1.x.yml @@ -0,0 +1,508 @@ +--- + +- name: "5.1.1 | PATCH | Ensure permissions on /etc/ssh/sshd_config are configured" + when: + - ubtu24cis_rule_5_1_1 + tags: + - level1-server + - level1-workstation + - patch + - rule_5.1.1 + - NIST800-53R5_AC-3 + - NIST800-53R5_MP-2 + - sshd + block: + - name: "5.1.1 | PATCH | Ensure permissions on /etc/ssh/sshd_config are configured" + ansible.builtin.file: + path: /etc/ssh/sshd_config + owner: root + group: root + mode: 'u-x,go-rwx' + + - name: "5.1.1 | PATCH | Ensure permissions on /etc/ssh/sshd_config are configured | find conf files" + ansible.builtin.find: + paths: /etc/ssh/sshd_config.d + file_type: file + patterns: '*.conf' + register: discovered_sshd_confs + + - name: "5.1.1 | PATCH | Ensure permissions on /etc/ssh/sshd_config are configured" + ansible.builtin.file: + path: "{{ item.path }}" + recurse: false + owner: root + group: root + mode: 'u-x,go-rwx' + loop: "{{ discovered_sshd_confs.files }}" + +- name: "5.1.2 | PATCH | Ensure permissions on SSH private host key files are configured" + when: + - ubtu24cis_rule_5_1_2 + tags: + - level1-server + - level1-workstation + - patch + - rule_5.1.2 + - NIST800-53R5_AC-3 + - NIST800-53R5_MP-2 + - sshd + block: + - name: "5.1.2 | AUDIT | Ensure permissions on SSH private host key files are configured | Find ssh_host private keys" + ansible.builtin.find: + paths: /etc/ssh + patterns: 'ssh_host_*_key' + register: ubtu24cis_5_1_2_ssh_host_priv_keys + + - name: "5.1.2 | PATCH | Ensure permissions on SSH private host key files are configured | Set permissions" + ansible.builtin.file: + path: "{{ item.path }}" + owner: root + group: root + mode: 'o-x,go-rwx' + with_items: + - "{{ ubtu24cis_5_1_2_ssh_host_priv_keys.files }}" + loop_control: + label: "{{ item.path }}" + +- name: "5.1.3 | PATCH | Ensure permissions on SSH public host key files are configured" + when: + - ubtu24cis_rule_5_1_3 + tags: + - level1-server + - level1-workstation + - patch + - rule_5.1.3 + - NIST800-53R5_AC-3 + - NIST800-53R5_MP-2 + - sshd + block: + - name: "5.1.3 | AUDIT | Ensure permissions on SSH public host key files are configured | Find ssh_host public keys" + ansible.builtin.find: + paths: /etc/ssh + patterns: 'ssh_host_*_key.pub' + register: ubtu24cis_5_1_3_ssh_host_pub_keys + + - name: "5.1.3 | PATCH | Ensure permissions on SSH public host key files are configured | Set permissions" + ansible.builtin.file: + path: "{{ item.path }}" + owner: root + group: root + mode: '0644' + with_items: + - "{{ ubtu24cis_5_1_3_ssh_host_pub_keys.files }}" + loop_control: + label: "{{ item.path }}" + +- name: "5.1.4 | PATCH | Ensure sshd access is configured" + when: + - ubtu24cis_rule_5_1_4 + tags: + - level1-server + - level1-workstation + - patch + - rule_5.1.4 + - NIST800-53R5_AC-3 + - NIST800-53R5_MP-2 + - sshd + block: + - name: "5.1.4 | PATCH | Ensure sshd access is configured | Add allowed users" + when: "ubtu24cis_sshd['allow_users']| default('') | length > 0 " + ansible.builtin.lineinfile: + path: /etc/ssh/sshd_config + regexp: '(?i)^(#|)\s*AllowUsers' + line: 'AllowUsers {{ ubtu24cis_sshd.allow_users }}' + validate: 'sshd -t -f %s' + notify: Restart sshd + + - name: "5.1.4 | PATCH | Ensure sshd access is configured | Add allowed groups" + when: "ubtu24cis_sshd['allow_groups']| default('') | length > 0" + ansible.builtin.lineinfile: + path: /etc/ssh/sshd_config + regexp: '(?i)^(#|)\s*AllowGroups' + line: 'AllowGroups {{ ubtu24cis_sshd.allow_groups }}' + validate: 'sshd -t -f %s' + notify: Restart sshd + + - name: "5.1.4 | PATCH | Ensure sshd access is configured | Add deny users" + when: "ubtu24cis_sshd['deny_users']| default('') | length > 0" + ansible.builtin.lineinfile: + path: /etc/ssh/sshd_config + regexp: '(?i)^(#|)\s*DenyUsers' + line: 'DenyUsers {{ ubtu24cis_sshd.deny_users }}' + validate: 'sshd -t -f %s' + notify: Restart sshd + + - name: "5.1.4 | PATCH | Ensure sshd access is configured | Add deny groups" + when: "ubtu24cis_sshd['deny_groups']| default('') | length > 0" + ansible.builtin.lineinfile: + path: /etc/ssh/sshd_config + regexp: '^DenyGroups|^#DenyGroups' + line: 'DenyGroups {{ ubtu24cis_sshd.deny_groups }}' + validate: 'sshd -t -f %s' + notify: Restart sshd + +- name: "5.1.5| PATCH | Ensure sshd Banner is configured" + when: + - ubtu24cis_rule_5_1_5 + tags: + - level1-server + - level1-workstation + - patch + - rule_5.1.5 + - NIST800-53R5_CM-1 + - NIST800-53R5_CM-2 + - NIST800-53R5_CM-6 + - NIST800-53R5_CM-7 + - NIST800-53R5_IA-5 + - sshd + ansible.builtin.lineinfile: + path: /etc/ssh/sshd_config + regexp: ^Banner + line: Banner /etc/issue.net + insertafter: '^# no default banner path' + validate: 'sshd -t -f %s' + notify: Restart sshd + +- name: "5.1.6 | PATCH | Ensure sshd Ciphers are configured" + when: + - ubtu24cis_rule_5_1_6 + tags: + - level1-server + - level1-workstation + - patch + - rule_5.1.6 + - NIST800-53R5_SC-8 + - sshd + ansible.builtin.lineinfile: + path: /etc/ssh/sshd_config + regexp: ^Ciphers + line: "Ciphers {{ ubtu24cis_sshd.ciphers | join(',') }}" + insertafter: '^# Ciphers and keying' + validate: 'sshd -t -f %s' + notify: Restart sshd + +- name: "5.1.7 | PATCH | Ensure sshd ClientAliveInterval and ClientAliveCountMax are configured" + when: + - ubtu24cis_rule_5_1_7 + tags: + - level1-server + - level1-workstation + - patch + - rule_5.1.7 + - NIST800-53R5_CM-1 + - NIST800-53R5_CM-2 + - NIST800-53R5_CM-6 + - NIST800-53R5_CM-7 + - NIST800-53R5_IA-5 + - sshdd + ansible.builtin.lineinfile: + path: /etc/ssh/sshd_config + regexp: "{{ item.regexp }}" + line: "{{ item.line }}" + validate: 'sshd -t -f %s' + with_items: + - { regexp: '^ClientAliveInterval', line: 'ClientAliveInterval {{ ubtu24cis_sshd.client_alive_interval | default(ubtu24cis_sshd_default_client_alive_interval) }}' } + - { regexp: '^ClientAliveCountMax', line: 'ClientAliveCountMax {{ ubtu24cis_sshd.client_alive_count_max | default(ubtu24cis_sshd_default_client_alive_count_max) }}' } + notify: Restart sshd + +- name: "5.1.8 | PATCH | Ensure sshd DisableForwarding is enabled" + when: + - ubtu24cis_rule_5_1_8 + tags: + - level2-server + - level1-workstation + - patch + - rule_5.1.8 + - NIST800-53R5_CM-7 + - sshd + ansible.builtin.lineinfile: + path: /etc/ssh/sshd_config + regexp: (?i)^(#|)\s*DisableForwarding + line: 'DisableForwarding yes' + validate: 'sshd -t -f %s' + notify: Restart sshd + +- name: "5.1.9 | PATCH | Ensure sshd GSSAPIAuthentication is is disabled" + when: + - ubtu24cis_rule_5_1_9 + tags: + - level2-server + - level1-workstation + - patch + - rule_5.1.9 + - NIST800-53R5_CM-1 + - NIST800-53R5_CM-2 + - NIST800-53R5_CM-6 + - NIST800-53R5_CM-7 + - NIST800-53R5_IA-5 + - sshd + ansible.builtin.lineinfile: + path: /etc/ssh/sshd_config + regexp: (?i)^(#|)\s*GSSAPIAuthentication + line: 'GSSAPIAuthentication no' + validate: 'sshd -t -f %s' + notify: Restart sshd + +- name: "5.1.10 | PATCH | Ensure SSH HostbasedAuthentication is disabled" + when: + - ubtu24cis_rule_5_1_10 + tags: + - level1-server + - level1-workstation + - patch + - rule_5.1.10 + - NIST800-53R5_CM-1 + - NIST800-53R5_CM-2 + - NIST800-53R5_CM-6 + - NIST800-53R5_CM-7 + - NIST800-53R5_IA-5 + - sshd + ansible.builtin.lineinfile: + path: /etc/ssh/sshd_config + regexp: (?i)^(#|)\s*HostbasedAuthentication + line: 'HostbasedAuthentication no' + validate: 'sshd -t -f %s' + notify: Restart sshd + +- name: "5.1.11 | PATCH | Ensure SSH IgnoreRhosts is enabled" + when: + - ubtu24cis_rule_5_1_11 + tags: + - level1-server + - level1-workstation + - patch + - rule_5.1.11 + - NIST800-53R5_CM-1 + - NIST800-53R5_CM-2 + - NIST800-53R5_CM-6 + - NIST800-53R5_CM-7 + - NIST800-53R5_IA-5 + - sshd + ansible.builtin.lineinfile: + path: /etc/ssh/sshd_config + regexp: (?i)^(#|)\s*IgnoreRhosts + line: 'IgnoreRhosts yes' + validate: 'sshd -t -f %s' + notify: Restart sshd + +- name: "5.1.12 | PATCH | Ensure only strong Key Exchange algorithms are used" + when: + - ubtu24cis_rule_5_1_12 + tags: + - level1-server + - level1-workstation + - patch + - rule_5.1.12 + - NIST800-53R5_SC-8 + - sshd + ansible.builtin.lineinfile: + path: /etc/ssh/sshd_config + regexp: (?i)^(#|)\s*KexAlgorithms + line: "KexAlgorithms {{ ubtu24cis_sshd.kex_algorithms | default(ubtu24cis_sshd_default_kex_algorithms) | join(',') }}" + insertafter: '^# Ciphers and keying' + validate: 'sshd -t -f %s' + notify: Restart sshd + +- name: "5.1.13 | PATCH | Ensure SSH LoginGraceTime is configured" + when: + - ubtu24cis_rule_5_1_13 + tags: + - level1-server + - level1-workstation + - patch + - rule_5.1.13 + - NIST800-53R5_CM-6 + - sshd + ansible.builtin.lineinfile: + path: /etc/ssh/sshd_config + regexp: (?i)^(#|)\s*LoginGraceTime + line: 'LoginGraceTime {{ ubtu24cis_sshd.login_grace_time | default(ubtu24cis_sshd_default_login_grace_time) }}' + insertafter: '^# Authentication' + validate: 'sshd -t -f %s' + notify: Restart sshd + +- name: "5.1.14 | PATCH | Ensure SSH LogLevel is configured" + when: + - ubtu24cis_rule_5_1_14 + tags: + - level1-server + - level1-workstation + - patch + - rule_5.1.14 + - NIST800-53R5_AU-2 + - NIST800-53R5_AU-12 + - NIST800-53R5_SI-5 + - sshd + ansible.builtin.lineinfile: + path: /etc/ssh/sshd_config + regexp: (?i)^(#|)\s*LogLevel + line: 'LogLevel {{ ubtu24cis_sshd.log_level | default(ubtu24cis_sshd_default_log_level) }}' + insertafter: '^# Logging' + validate: 'sshd -t -f %s' + notify: Restart sshd + +- name: "5.1.15 | PATCH | Ensure only strong MAC algorithms are used" + when: + - ubtu24cis_rule_5_1_15 + tags: + - level1-server + - level1-workstation + - patch + - rule_5.1.15 + - NIST800-53R5_CM-1 + - NIST800-53R5_CM-2 + - NIST800-53R5_CM-6 + - NIST800-53R5_CM-7 + - NIST800-53R5_IA-5 + - sshd + ansible.builtin.lineinfile: + path: /etc/ssh/sshd_config + regexp: (?i)^(#|)\s*MACs + line: "MACs {{ ubtu24cis_sshd.macs | default(ubtu24cis_sshd_default_macs) | join(',') }}" + insertafter: '^# Ciphers and keying' + validate: 'sshd -t -f %s' + notify: Restart sshd + +- name: "5.1.16 | PATCH | Ensure SSH MaxAuthTries is set to 4 or less" + when: + - ubtu24cis_rule_5_1_16 + tags: + - level1-server + - level1-workstation + - patch + - rule_5.1.16 + - NIST800-53R5_AU-3 + - sshd + ansible.builtin.lineinfile: + path: /etc/ssh/sshd_config + regexp: (?i)^(#|)\s*MaxAuthTries + line: 'MaxAuthTries {{ ubtu24cis_sshd.max_auth_tries | default(ubtu24cis_sshd_default_max_auth_tries) }}' + insertafter: '^# Authentication' + validate: 'sshd -t -f %s' + notify: Restart sshd + +- name: "5.1.17 | PATCH | Ensure sshd MaxSessions is configured" + when: + - ubtu24cis_rule_5_1_17 + tags: + - level1-server + - level1-workstation + - patch + - rule_5.1.17 + - NIST800-53R5_CM-1 + - NIST800-53R5_CM-2 + - NIST800-53R5_CM-6 + - NIST800-53R5_CM-7 + - NIST800-53R5_IA-5 + - sshd + ansible.builtin.lineinfile: + path: /etc/ssh/sshd_config + regexp: (?i)^(#|)\s*MaxSessions + line: 'MaxSessions {{ ubtu24cis_sshd.max_sessions | default(ubtu24cis_sshd_default_max_sessions) }}' + insertafter: '^# Authentication' + validate: 'sshd -t -f %s' + notify: Restart sshd + +- name: "5.1.18 | PATCH | Ensure SSH MaxStartups is configured" + when: + - ubtu24cis_rule_5_1_18 + tags: + - level1-server + - level1-workstation + - patch + - rule_5.1.18 + - NIST800-53R5_CM-1 + - NIST800-53R5_CM-2 + - NIST800-53R5_CM-6 + - NIST800-53R5_CM-7 + - NIST800-53R5_IA-5 + - sshd + ansible.builtin.lineinfile: + path: /etc/ssh/sshd_config + regexp: (?i)^(#|)\s*MaxStartups + line: 'MaxStartups 10:30:60' + validate: 'sshd -t -f %s' + notify: Restart sshd + +- name: "5.1.19 | PATCH | Ensure SSH PermitEmptyPasswords is disabled" + when: + - ubtu24cis_rule_5_1_19 + tags: + - level1-server + - level1-workstation + - patch + - rule_5.1.19 + - NIST800-53R5_CM-1 + - NIST800-53R5_CM-2 + - NIST800-53R5_CM-6 + - NIST800-53R5_CM-7 + - NIST800-53R5_IA-5 + - sshd + ansible.builtin.lineinfile: + path: /etc/ssh/sshd_config + regexp: (?i)^(#|)\s*PermitEmptyPasswords + line: 'PermitEmptyPasswords no' + insertafter: '# To disable tunneled clear text passwords' + validate: 'sshd -t -f %s' + notify: Restart sshd + +- name: "5.1.20 | PATCH | Ensure sshd PermitRootLogin is disabled" + when: + - ubtu24cis_rule_5_1_20 + tags: + - level1-server + - level1-workstation + - patch + - rule_5.1.20 + - NIST800-53R5_AC-6 + - sshd + ansible.builtin.lineinfile: + path: /etc/ssh/sshd_config + regexp: (?i)^(#|)\s*PermitRootLogin + line: 'PermitRootLogin no' + validate: 'sshd -t -f %s' + notify: Restart sshd + +- name: "5.1.21 | PATCH | Ensure SSH PermitUserEnvironment is disabled" + when: + - ubtu24cis_rule_5_1_21 + tags: + - level1-server + - level1-workstation + - patch + - rule_5.1.21 + - NIST800-53R5_CM-1 + - NIST800-53R5_CM-2 + - NIST800-53R5_CM-6 + - NIST800-53R5_CM-7 + - NIST800-53R5_IA-5 + - sshd + ansible.builtin.lineinfile: + path: /etc/ssh/sshd_config + regexp: (?i)^(#|)\s*PermitUserEnvironment + line: 'PermitUserEnvironment no' + validate: 'sshd -t -f %s' + notify: Restart sshd + +- name: "5.1.22 | PATCH | Ensure sshd UsePAM is enabled" + when: + - ubtu24cis_rule_5_1_22 + tags: + - level1-server + - level1-workstation + - patch + - rule_5.1.22 + - NIST800-53R5_CM-1 + - NIST800-53R5_CM-2 + - NIST800-53R5_CM-6 + - NIST800-53R5_CM-7 + - NIST800-53R5_IA-5 + - sshd + - pam + ansible.builtin.lineinfile: + path: /etc/ssh/sshd_config + regexp: (?i)^(#|)\s*UsePAM + line: 'UsePAM yes' + insertafter: '^# and ChallengeResponseAuthentication' + validate: 'sshd -t -f %s' + notify: Restart sshd diff --git a/tasks/section_5/cis_5.2.x.yml b/tasks/section_5/cis_5.2.x.yml new file mode 100644 index 0000000..224c28e --- /dev/null +++ b/tasks/section_5/cis_5.2.x.yml @@ -0,0 +1,148 @@ +--- + +- name: "5.2.1 | PATCH | Ensure sudo is installed" + when: + - ubtu24cis_rule_5_2_1 + tags: + - level1-server + - level1-workstation + - patch + - rule_5.2.1 + - NIST800-53R5_AC-6 + - sudo + ansible.builtin.package: + name: "{{ ubtu24cis_sudo_package }}" + state: present + +- name: "5.2.2 | PATCH | Ensure sudo commands use pty" + when: + - ubtu24cis_rule_5_2_2 + tags: + - level1-server + - level1-workstation + - patch + - rule_5.2.2 + - NIST800-53R5_AC-6 + - sudo + ansible.builtin.lineinfile: + path: /etc/sudoers + regexp: '^Defaults\s+use_' + line: 'Defaults use_pty' + insertafter: '^\s*Defaults' + +- name: "5.2.3 | PATCH | Ensure sudo log file exists" + when: + - ubtu24cis_rule_5_2_3 + tags: + - level1-server + - level1-workstation + - patch + - rule_5.2.3 + - NIST800-53R5_AU-3 + - NIST800-53R5_AU-12 + - sudo + ansible.builtin.lineinfile: + path: /etc/sudoers + regexp: '^Defaults\s+logfile' + line: 'Defaults logfile="{{ ubtu24cis_sudo_logfile }}"' + insertafter: '^\s*Defaults' + +- name: "5.2.4 | PATCH | Ensure users must provide password for escalation" + when: + - ubtu24cis_rule_5_2_4 + tags: + - level2-server + - level2-workstation + - patch + - sudo + - rule_5.2.4 + - NIST800-53R5_AC-6 + ansible.builtin.replace: + path: "{{ item }}" + regexp: '^([^#|{% if system_is_ec2 %}ec2-user{% endif %}].*)NOPASSWD(.*)' + replace: '\1PASSWD\2' + validate: '/usr/sbin/visudo -cf %s' + loop: "{{ prelim_sudoers_files.stdout_lines }}" + +- name: "5.2.5 | PATCH | Ensure re-authentication for privilege escalation is not disabled globally" + when: + - ubtu24cis_rule_5_2_5 + tags: + - level1-server + - level1-workstation + - patch + - sudo + - rule_5.2.5 + - NIST800-53R5_AC-6 + ansible.builtin.replace: + path: "{{ item }}" + regexp: '^([^#].*)!authenticate(.*)' + replace: '\1authenticate\2' + validate: '/usr/sbin/visudo -cf %s' + loop: "{{ prelim_sudoers_files.stdout_lines }}" + +- name: "5.2.6 | PATCH | Ensure sudo authentication timeout is configured correctly" + when: + - ubtu24cis_rule_5_2_6 + tags: + - level1-server + - level1-workstation + - patch + - sudo + - rule_5.2.6 + - NIST800-53R5_AC-6 + block: + - name: "5.2.6 | AUDIT | Ensure sudo authentication timeout is configured correctly | Get files with timeout set" + ansible.builtin.shell: grep -is 'timestamp_timeout' /etc/sudoers /etc/sudoers.d/* | cut -d":" -f1 | uniq | sort + changed_when: false + failed_when: false + register: ubtu24cis_5_2_6_timeout_files + + - name: "5.2.6 | PATCH | Ensure sudo authentication timeout is configured correctly | Set value if no results" + when: ubtu24cis_5_2_6_timeout_files.stdout | length == 0 + ansible.builtin.lineinfile: + path: /etc/sudoers + regexp: '^\s*Defaults/s+timestamp_timeout=' + line: "Defaults timestamp_timeout={{ ubtu24cis_sudo_timestamp_timeout }}" + insertafter: '^\s*Defaults' + validate: '/usr/sbin/visudo -cf %s' + + - name: "5.2.6 | PATCH | Ensure sudo authentication timeout is configured correctly | Set value if has results" + when: ubtu24cis_5_2_6_timeout_files.stdout | length > 0 + ansible.builtin.replace: + path: "{{ item }}" + regexp: 'timestamp_timeout=(\d+)' + replace: "timestamp_timeout={{ ubtu24cis_sudo_timestamp_timeout }}" + validate: '/usr/sbin/visudo -cf %s' + loop: "{{ ubtu24cis_5_2_6_timeout_files.stdout_lines }}" + +- name: "5.2.7 | PATCH | Ensure access to the su command is restricted" + when: + - ubtu24cis_rule_5_2_7 + tags: + - level1-server + - level1-workstation + - patch + - sudo + - rule_5.2.7 + - NIST800-53R5_AC-3 + - NIST800-53R5_MP-2 + block: + - name: "5.2.7 | PATCH | Ensure access to the su command is restricted | Ensure sugroup exists" + ansible.builtin.group: + name: "{{ ubtu24cis_sugroup }}" + state: present + register: ubtu24cis_5_2_7_sugroup + + - name: "5.2.7 | PATCH | Ensure access to the su command is restricted | remove users from group" + ansible.builtin.lineinfile: + path: /etc/group + regexp: '^{{ ubtu24cis_sugroup }}(:.:.*:).*$' + line: '{{ ubtu24cis_sugroup }}\g<1>' + backrefs: true + + - name: "5.2.7 | PATCH | Ensure access to the su command is restricted | Setting pam_wheel to use_uid" + ansible.builtin.lineinfile: + path: /etc/pam.d/su + regexp: '^(#)?auth\s+required\s+pam_wheel\.so' + line: 'auth required pam_wheel.so use_uid group={{ ubtu24cis_sugroup }}' diff --git a/tasks/section_5/cis_5.3.1.x.yml b/tasks/section_5/cis_5.3.1.x.yml new file mode 100644 index 0000000..5f87935 --- /dev/null +++ b/tasks/section_5/cis_5.3.1.x.yml @@ -0,0 +1,48 @@ +--- + +- name: "5.3.1.1 | PATCH | Ensure latest version of pam is installed" + when: + - ubtu24cis_rule_5_3_1_1 + - ansible_facts.packages['libpam-runtime'][0]['version'] is version('1.5.3-5', '<=') or + "'libpam-runtime' not in ansible_facts.packages" + tags: + - level1-server + - level1-workstation + - patch + - pam + - rule_5.3.1.1 + - NIST800-53R5_NA + ansible.builtin.package: + name: libpam-runtime + state: latest + +- name: "5.3.1.2 | PATCH | Ensure libpam-modules is installed" + when: + - ubtu24cis_rule_5_3_1_2 + - ansible_facts.packages['libpam-modules'][0]['version'] is version('1.5.3-5', '<=') or + "'libpam-modules' not in ansible_facts.packages" + tags: + - level1-server + - level1-workstation + - patch + - pam + - rule_5.3.1.2 + - NIST800-53R5_NA + ansible.builtin.package: + name: libpam-modules + state: latest + +- name: "5.3.1.3 | PATCH | Ensure libpam-pwquality is installed" + when: + - ubtu24cis_rule_5_3_1_3 + - "'libpam-pwquality' not in ansible_facts.packages" + tags: + - level1-server + - level1-workstation + - patch + - pam + - rule_5.3.1.3 + - NIST800-53R5_NA + ansible.builtin.package: + name: libpam-pwquality + state: latest diff --git a/tasks/section_5/cis_5.3.2.x.yml b/tasks/section_5/cis_5.3.2.x.yml new file mode 100644 index 0000000..89a431f --- /dev/null +++ b/tasks/section_5/cis_5.3.2.x.yml @@ -0,0 +1,92 @@ +--- + +- name: "5.3.2.1 | PATCH | Ensure pam_unix module is enabled" + when: + - ubtu24cis_rule_5_3_2_1 + - ubtu24cis_disruption_high + - ubtu24cis_pam_auth_unix + - ubtu24cis_pam_create_pamunix_file + tags: + - level1-server + - level1-workstation + - patch + - rule_5.3.2.1 + - NIST800-53R5_IA-5 + - pam_auth_update + - pam_unix + ansible.builtin.template: + src: "{{ ubtu24cis_pam_confd_dir }}{{ ubtu24cis_pam_pwunix_file }}.j2" + dest: "/{{ ubtu24cis_pam_confd_dir }}{{ ubtu24cis_pam_pwunix_file }}" + owner: root + group: root + mode: '0600' + notify: Pam_auth_update_pwunix + +- name: "5.3.2.2 | PATCH | Ensure pam_faillock module is enabled" + when: + - ubtu24cis_rule_5_3_2_2 + - ubtu24cis_disruption_high + - ubtu24cis_pam_auth_faillock + - ubtu24cis_pam_create_faillock_files + tags: + - level1-server + - level1-workstation + - patch + - rule_5.3.2.2 + - NIST800-53R5_NA + - pam_auth_update + - pam_faillock + ansible.builtin.template: + src: "{{ ubtu24cis_pam_confd_dir }}{{ item }}.j2" + dest: "/{{ ubtu24cis_pam_confd_dir }}{{ item }}" + owner: root + group: root + mode: '0600' + loop: + - "{{ ubtu24cis_pam_faillock_file }}" + - "{{ ubtu24cis_pam_faillock_notify_file }}" + notify: + - Pam_auth_update_pwfaillock + - Pam_auth_update_pwfaillock_notify + +- name: "5.3.2.3 | PATCH | Ensure pam_pwquality module is enabled" + when: + - ubtu24cis_rule_5_3_2_3 + - ubtu24cis_disruption_high + - ubtu24cis_pam_create_pwquality_files + tags: + - level1-server + - level1-workstation + - patch + - rule_5.3.2.3 + - NIST800-53R5_NA + - pam_auth_update + - pam_quality + ansible.builtin.template: + src: "{{ ubtu24cis_pam_confd_dir }}{{ ubtu24cis_pam_pwquality_file }}.j2" + dest: "/{{ ubtu24cis_pam_confd_dir }}{{ ubtu24cis_pam_pwquality_file }}" + owner: root + group: root + mode: '0600' + notify: Pam_auth_update_pwquality + +- name: "5.3.2.4 | PATCH | Ensure pam_pwhistory module is enabled" + when: + - ubtu24cis_rule_5_3_2_4 + - ubtu24cis_disruption_high + - ubtu24cis_pam_create_pwhistory_files + tags: + - level1-server + - level1-workstation + - patch + - rule_5.3.2.4 + - NIST800-53R5_NA + - pam_auth_update + - pam_history + ansible.builtin.template: + src: "{{ ubtu24cis_pam_confd_dir }}{{ ubtu24cis_pam_pwhistory_file }}.j2" + dest: "/{{ ubtu24cis_pam_confd_dir }}{{ ubtu24cis_pam_pwhistory_file }}" + owner: root + group: root + mode: '0600' + notify: Pam_auth_update_pwhistory diff --git a/tasks/section_5/cis_5.3.3.1.x.yml b/tasks/section_5/cis_5.3.3.1.x.yml new file mode 100644 index 0000000..de650f3 --- /dev/null +++ b/tasks/section_5/cis_5.3.3.1.x.yml @@ -0,0 +1,106 @@ +--- + +- name: "5.3.3.1.1 | PATCH | Ensure password failed attempts lockout is configured" + when: + - ubtu24cis_rule_5_3_3_1_1 + tags: + - level1-server + - level1-workstation + - patch + - rule_5.3.3.1.1 + - NIST800-53R5_NA + - pam + block: + - name: "5.3.3.1.1 | PATCH | Ensure password failed attempts lockout is configured | configure faillock.conf" + ansible.builtin.lineinfile: + path: /etc/security/faillock.conf + regexp: '^deny' + line: "deny = {{ ubtu24cis_faillock_deny }}" + insertafter: '^# end of pam-auth-update config' + create: true + + - name: "5.3.3.1.1 | AUDIT | Ensure password failed attempts lockout is configured | discover pam config with deny" + ansible.builtin.shell: grep -Pl -- '\bpam_faillock\.so\h+([^#\n\r]+\h+)?deny\b' /usr/share/pam-configs/* + register: ubtu24cis_faillock_deny_files + changed_when: false + failed_when: ubtu24cis_faillock_deny_files.rc not in [ 0, 1 ] + + - name: "5.3.3.1.1 | PATCH | Ensure password failed attempts lockout is configured | if exists remove deny from faillock line in pam-auth conf files" + when: ubtu24cis_faillock_deny_files.stdout | length > 0 + ansible.builtin.replace: + path: "{{ item }}" + regexp: '(*.pam_faillock.so\s*)deny\s*=\s*\d+\b(.*)' + replace: \1\2 + with_fileglob: + - '/usr/share/pam-configs/*' + - '/etc/pam.d/*' + +- name: "5.3.3.1.2 | PATCH | Ensure password unlock time is configured" + when: + - ubtu24cis_rule_5_3_3_1_2 + tags: + - level1-server + - level1-workstation + - patch + - rule_5.3.3.1.2 + - NIST800-53R5_NA + - pam + block: + - name: "5.3.3.1.2 | PATCH | Ensure password unlock time is configured | configure faillock.conf" + ansible.builtin.lineinfile: + path: /etc/security/faillock.conf + regexp: '^unlock_time' + line: "unlock_time = {{ ubtu24cis_faillock_unlock_time }}" + insertafter: '^# end of pam-auth-update config' + create: true + + - name: "5.3.3.1.2 | AUDIT | Ensure password unlock time is configured | discover pam config with unlock_time" + ansible.builtin.shell: grep -Pl -- '\bpam_faillock\.so\h+([^#\n\r]+\h+)?unlock_time\b' /usr/share/pam-configs/* + register: ubtu24cis_faillock_unlock_files + changed_when: false + failed_when: ubtu24cis_faillock_unlock_files.rc not in [ 0, 1 ] + + - name: "5.3.3.1.2 | PATCH | Ensure password unlock time is configured | if exists remove unlock_time from faillock line in pam-auth conf files" + when: ubtu24cis_faillock_unlock_files.stdout | length > 0 + ansible.builtin.replace: + path: "{{ item }}" + regexp: '(*.pam_faillock.so\s*)unlock_time\s*=\s*\b(.*)' + replace: \1\2 + with_fileglob: + - '/usr/share/pam-configs/*' + - '/etc/pam.d/*' + +- name: "5.3.3.1.3 | PATCH | Ensure password failed attempts lockout includes root account" + when: + - ubtu24cis_rule_5_3_3_1_3 + tags: + - level2-server + - level2-workstation + - patch + - rule_5.3.3.1.3 + - NIST800-53R5_NA + - pam + block: + - name: "5.3.3.1.3 | PATCH | Ensure password failed attempts lockout includes root account | configure faillock.conf" + ansible.builtin.lineinfile: + path: /etc/security/faillock.conf + regexp: '^{{ ubtu24cis_pamroot_lock_option }}' + line: "{{ ubtu24cis_pamroot_lock_string }}" + insertafter: '^# end of pam-auth-update config' + create: true + + - name: "5.3.3.1.3 | AUDIT | Ensure password failed attempts lockout includes root account | discover pam config with unlock_time" + ansible.builtin.shell: grep -Pl -- '\bpam_faillock\.so\h+([^#\n\r]+\h+)?(even_deny_root\b|root_unlock_time\s*=\s*\d+\b)' /usr/share/pam-configs/* + register: ubtu24cis_faillock_rootlock_files + changed_when: false + failed_when: ubtu24cis_faillock_rootlock_files.rc not in [ 0, 1 ] + + - name: "5.3.3.1.3 | PATCH | Ensure password failed attempts lockout includes root account | if exists remove unlock_time from faillock line in pam-auth conf files" + when: ubtu24cis_faillock_rootlock_files.stdout | length > 0 + ansible.builtin.replace: + path: "{{ item }}" + regexp: '(*.pam_faillock.so\s*)(even_deny_root\b|root_unlock_time\s*=\s*\d+\b)(.*)' + replace: \1\3 + with_fileglob: + - '/usr/share/pam-configs/*' + - '/etc/pam.d/*' diff --git a/tasks/section_5/cis_5.3.3.2.x.yml b/tasks/section_5/cis_5.3.3.2.x.yml new file mode 100644 index 0000000..b11d4dd --- /dev/null +++ b/tasks/section_5/cis_5.3.3.2.x.yml @@ -0,0 +1,235 @@ +--- + +- name: "5.3.3.2.1 | PATCH | Ensure password number of changed characters is configured" + when: + - ubtu24cis_rule_5_3_3_2_1 + tags: + - level1-server + - level1-workstation + - patch + - rule_5.3.3.2.1 + - NIST800-53R5_IA-5 + - pam + block: + - name: "5.3.3.2.1 | PATCH | Ensure password number of changed characters is configured | Remove difok from conf files except expected file" + when: + - item != ubtu24cis_passwd_difok_file + ansible.builtin.replace: + path: "{{ item }}" + regexp: 'difok\s*=\s*\d+\b' + replace: '' + with_fileglob: + - '/etc/security/pwquality.conf' + - '/etc/security/pwquality.conf.d/*.conf' + - '/etc/pam.d/common-password' + + - name: "5.3.3.2.1 | PATCH | Ensure password number of changed characters is configured | Ensure difok file exists" + ansible.builtin.template: + src: "{{ ubtu24cis_passwd_difok_file }}.j2" + dest: "/{{ ubtu24cis_passwd_difok_file }}" + owner: root + group: root + mode: '0600' + +- name: "5.3.3.2.2 | PATCH | Ensure minimum password length is configured" + when: + - ubtu24cis_rule_5_3_3_2_2 + tags: + - level1-server + - level1-workstation + - patch + - rule_5.3.3.2.2 + - NIST800-53R5_IA-5 + - pam + block: + - name: "5.3.3.2.2 | PATCH | Ensure minimum password length is configured | Remove minlen from conf files except expected file" + when: + - item != ubtu24cis_passwd_minlen_file + ansible.builtin.replace: + path: "{{ item }}" + regexp: 'minlen\s*=\s*\d+\b' + replace: '' + with_fileglob: + - '/etc/security/pwquality.conf' + - '/etc/security/pwquality.conf.d/*.conf' + - '/etc/pam.d/common-password' + + - name: "5.3.3.2.2 | PATCH | Ensure minimum password length is configured | Ensure minlen file exists" + ansible.builtin.template: + src: "{{ ubtu24cis_passwd_minlen_file }}.j2" + dest: "/{{ ubtu24cis_passwd_minlen_file }}" + owner: root + group: root + mode: '0600' + +- name: "5.3.3.2.3 | PATCH | Ensure password complexity is configured" + when: + - ubtu24cis_rule_5_3_3_2_3 + tags: + - level1-server + - level1-workstation + - patch + - rule_5.3.3.2.3 + - NIST800-53R5_IA-5 + - pam + block: + - name: "5.3.3.2.3 | PATCH | Ensure password complexity is configured | Remove pwd complex settings from conf files except expected file" + when: + - item != ubtu24cis_passwd_complex_file + ansible.builtin.replace: + path: "{{ item }}" + regexp: '(minclass|[dulo]credit)\s*=\s*(-\d|\d+)\b' + replace: '' + with_fileglob: + - '/etc/security/pwquality.conf' + - '/etc/security/pwquality.conf.d/*.conf' + - '/etc/pam.d/common-password' + + - name: "5.3.3.2.3 | PATCH | Ensure password complexity is configured | Ensure complexity file exists" + ansible.builtin.template: + src: "{{ ubtu24cis_passwd_complex_file }}.j2" + dest: "/{{ ubtu24cis_passwd_complex_file }}" + owner: root + group: root + mode: '0600' + +- name: "5.3.3.2.4 | PATCH | Ensure password same consecutive characters is configured" + when: + - ubtu24cis_rule_5_3_3_2_4 + tags: + - level1-server + - level1-workstation + - patch + - rule_5.3.3.2.4 + - NIST800-53R5_IA-5 + - pam + block: + - name: "5.3.3.2.4 | PATCH | Ensure password same consecutive characters is configured | Remove maxrepeat settings from conf files except expected file" + when: + - item != ubtu24cis_passwd_maxrepeat_file + ansible.builtin.replace: + path: "{{ item }}" + regexp: 'maxrepeat\s*=\s*\d+\b' + replace: '' + with_fileglob: + - '/etc/security/pwquality.conf' + - '/etc/security/pwquality.conf.d/*.conf' + - '/etc/pam.d/common-password' + + - name: "5.3.3.2.4 | PATCH | Ensure password same consecutive characters is configured | Ensure maxrepeat file exists" + ansible.builtin.template: + src: "{{ ubtu24cis_passwd_maxrepeat_file }}.j2" + dest: "/{{ ubtu24cis_passwd_maxrepeat_file }}" + owner: root + group: root + mode: '0600' + +- name: "5.3.3.2.5 | PATCH | Ensure password maximum sequential characters is is configured" + when: + - ubtu24cis_rule_5_3_3_2_5 + tags: + - level1-server + - level1-workstation + - patch + - rule_5.3.3.2.5 + - NIST800-53R5_IA-5 + - pam + block: + - name: "5.3.3.2.5 | PATCH | Ensure password maximum sequential characters is configured | Remove maxsequence settings from conf files except expected file" + when: + - item != ubtu24cis_passwd_maxsequence_file + ansible.builtin.replace: + path: "{{ item }}" + regexp: 'maxsequence\s*=\s*\d+\b' + replace: '' + with_fileglob: + - '/etc/security/pwquality.conf' + - '/etc/security/pwquality.conf.d/*.conf' + - '/etc/pam.d/common-password' + + - name: "5.3.3.2.5 | PATCH | Ensure password maximum sequential characters is configured | Ensure maxsequence file exists" + ansible.builtin.template: + src: "{{ ubtu24cis_passwd_maxsequence_file }}.j2" + dest: "/{{ ubtu24cis_passwd_maxsequence_file }}" + owner: root + group: root + mode: '0600' + +- name: "5.3.3.2.6 | PATCH | Ensure password dictionary check is enabled" + when: + - ubtu24cis_rule_5_3_3_2_6 + tags: + - level1-server + - level1-workstation + - patch + - rule_5.3.3.2.6 + - NIST800-53R5_IA-5 + - pam + block: + - name: "5.3.3.2.6 | PATCH | Ensure password dictionary check is enabled | Remove dictcheck settings from conf files except expected file" + when: + - item != ubtu24cis_passwd_dictcheck_file + ansible.builtin.replace: + path: "{{ item }}" + regexp: 'dictcheck\s*=\s*\d+\b' + replace: '' + with_fileglob: + - '/etc/security/pwquality.conf' + - '/etc/security/pwquality.conf.d/*.conf' + - '/etc/pam.d/common-password' + + - name: "5.3.3.2.6 | PATCH | Ensure password dictionary check is enabled | Ensure dictcheck file exists" + ansible.builtin.template: + src: "{{ ubtu24cis_passwd_dictcheck_file }}.j2" + dest: "/{{ ubtu24cis_passwd_dictcheck_file }}" + owner: root + group: root + mode: '0600' + +- name: "5.3.3.2.7 | PATCH | Ensure password quality checking is enforced" + when: + - ubtu24cis_rule_5_3_3_2_7 + tags: + - level1-server + - level1-workstation + - patch + - rule_5.3.3.2.7 + - NIST800-53R5_IA-5 + - pam + block: + - name: "5.3.3.2.7 | PATCH | Ensure password quality checking is enforced | Remove quality enforcement settings from conf files except expected file" + when: + - item != ubtu24cis_passwd_quality_enforce_file + ansible.builtin.replace: + path: "{{ item }}" + regexp: 'enforcing\s*=\s*\d+\b' + replace: '' + with_fileglob: + - '/etc/security/pwquality.conf' + - '/etc/security/pwquality.conf.d/*.conf' + - '/etc/pam.d/common-password' + + - name: "5.3.3.2.7 | PATCH | Ensure password quality checking is enforced | Ensure quality enforcement file exists" + ansible.builtin.template: + src: "{{ ubtu24cis_passwd_quality_enforce_file }}.j2" + dest: "/{{ ubtu24cis_passwd_quality_enforce_file }}" + owner: root + group: root + mode: '0600' + +- name: "5.3.3.2.8 | PATCH | Ensure password quality is enforced for the root user" + when: + - ubtu24cis_rule_5_3_3_2_8 + tags: + - level1-server + - level1-workstation + - patch + - rule_5.3.3.2.8 + - NIST800-53R5_IA-5 + - pam + ansible.builtin.template: + src: "{{ ubtu24cis_passwd_quality_enforce_root_file }}.j2" + dest: "/{{ ubtu24cis_passwd_quality_enforce_root_file }}" + owner: root + group: root + mode: '0600' diff --git a/tasks/section_5/cis_5.3.3.3.x.yml b/tasks/section_5/cis_5.3.3.3.x.yml new file mode 100644 index 0000000..8f05a73 --- /dev/null +++ b/tasks/section_5/cis_5.3.3.3.x.yml @@ -0,0 +1,82 @@ +--- + +- name: "5.3.3.3.1 | PATCH | Ensure password history remember is configured" + when: + - ubtu24cis_rule_5_3_3_3_1 + - ubtu24cis_disruption_high + tags: + - level1-server + - level1-workstation + - patch + - rule_5.3.3.3.1 + - NIST800-53R5_IA-5 + - pam + block: + - name: "5.3.3.3.1 | AUDIT | Ensure password history remember is configured | Check existing files" + ansible.builtin.shell: grep -Psi -- '^\s*password\s+[^#\n\r]+\s+pam_pwhistory\.so\s+([^#\n\r]+\s+)?remember=\d+\b' /etc/pam.d/common-password + register: ubtu24_pwhistory_remember + changed_when: false + failed_when: ubtu24_pwhistory_remember.rc not in [0, 1] + + - name: "5.3.3.3.1 | PATCH | Ensure password number of changed characters is configured | Ensure remember is set" + when: ubtu24_pwhistory_remember.stdout | length > 0 + ansible.builtin.lineinfile: + path: "/{{ ubtu24cis_pam_confd_dir }}{{ ubtu24cis_pam_pwhistory_file }}" + regexp: ^(password\s+[^#\n\r]+\s+pam_pwhistory\.so\s+)(.*)(remember=\d+) + line: '\1\2\3 remember={{ ubtu24cis_pamd_pwhistory_remember }}' + backrefs: true + notify: Pam_auth_update_pwhistory + +- name: "5.3.3.3.2 | PATCH | Ensure password history is enforced for the root user" + when: + - ubtu24cis_rule_5_3_3_3_2 + - ubtu24cis_disruption_high + tags: + - level1-server + - level1-workstation + - patch + - rule_5.3.3.3.2 + - NIST800-53R5_IA-5 + - pam + block: + - name: "5.3.3.3.2 | AUDIT | Ensure password history is enforced for the root user | Check existing files" + ansible.builtin.shell: grep -Psi -- '^\s*password\s+[^#\n\r]+\s+pam_pwhistory\.so\s+([^#\n\r]+\s+)?enforce_for_root\b' /etc/pam.d/common-password + register: ubtu24_pwhistory_enforce_for_root + changed_when: false + failed_when: ubtu24_pwhistory_enforce_for_root.rc not in [0, 1] + + - name: "5.3.3.3.2 | PATCH | Ensure password history is enforced for the root user | Ensure remember is set" + when: ubtu24_pwhistory_enforce_for_root.stdout | length > 0 + ansible.builtin.lineinfile: + path: "/{{ ubtu24cis_pam_confd_dir }}{{ ubtu24cis_pam_pwhistory_file }}" + regexp: ^(password\s+[^#\n\r]+\s+pam_pwhistory\.so\s+)(.*)(enforce_for_root) + line: '\1\2\3 enforce_for_root' + backrefs: true + notify: Pam_auth_update_pwhistory + +- name: "5.3.3.3.3 | PATCH | Ensure pam_pwhistory includes use_authtok" + when: + - ubtu24cis_rule_5_3_3_3_3 + - ubtu24cis_disruption_high + tags: + - level1-server + - level1-workstation + - patch + - rule_5.3.3.3.2 + - NIST800-53R5_IA-5 + - pam + block: + - name: "5.3.3.3.3 | AUDIT | Ensure pam_pwhistory includes use_authtok | Check existing files" + ansible.builtin.shell: grep -Psi -- '^\s*password\s+[^#\n\r]+\s+pam_pwhistory\.so\s+([^#\n\r]+\s+)?use_authtok\b' /etc/pam.d/common-password + register: ubtu24_pwhistory_use_authtok + changed_when: false + failed_when: ubtu24_pwhistory_use_authtok.rc not in [0, 1] + + - name: "5.3.3.3.3 | PATCH | Ensure pam_pwhistory includes use_authtok | Ensure remember is set" + when: ubtu24_pwhistory_use_authtok.stdout | length > 0 + ansible.builtin.lineinfile: + path: "/{{ ubtu24cis_pam_confd_dir }}{{ ubtu24cis_pam_pwhistory_file }}" + regexp: ^(password\s+[^#\n\r]+\s+pam_pwhistory\.so\s+)(.*)(use_authtok) + line: '\1\2\3 use_authtok' + backrefs: true + notify: Pam_auth_update_pwhistory diff --git a/tasks/section_5/cis_5.3.3.4.x.yml b/tasks/section_5/cis_5.3.3.4.x.yml new file mode 100644 index 0000000..6d2b0fc --- /dev/null +++ b/tasks/section_5/cis_5.3.3.4.x.yml @@ -0,0 +1,105 @@ +--- + +- name: "5.3.3.4.1 | PATCH | Ensure pam_unix does not include nullok" + when: + - ubtu24cis_rule_5_3_3_4_1 + - ubtu24cis_disruption_high + tags: + - level1-server + - level1-workstation + - patch + - rule_5.3.3.4.1 + - NIST800-53R5_NA + - pam + block: + - name: "5.3.3.4.1 | PATCH | Ensure pam_unix does not include nullok | capture state" + ansible.builtin.shell: grep -E "pam_unix.so.*nullok" /etc/pam.d/common-* /usr/share/pam-configs/* | cut -d ':' -f1 | uniq + changed_when: false + failed_when: ubtu24cis_pam_nullok.rc not in [ 0, 1 ] + register: ubtu24cis_pam_nullok + + - name: "5.3.3.4.1 | PATCH | Ensure pam_unix does not include nullok | Ensure nullok removed" + when: ubtu24cis_pam_nullok.stdout | length > 0 + ansible.builtin.replace: + path: "{{ item }}" + regexp: nullok + replace: '' + loop: "{{ ubtu24cis_pam_nullok.stdout_lines }}" + notify: Pam_auth_update_pwunix + +- name: "5.3.3.4.2 | PATCH | Ensure pam_unix does not include remember" + when: + - ubtu24cis_rule_5_3_3_4_2 + tags: + - level1-server + - level1-workstation + - patch + - pam + - rule_5.3.3.4.2 + - NIST800-53R5_NA + block: + - name: "5.3.3.4.2 | AUDIT | Ensure pam_unix does not include remember | capture state" + ansible.builtin.shell: grep -PH -- '^\h*^\h*[^#\n\r]+\h+pam_unix\.so\b' /etc/pam.d/common-{password,auth,account,session,session-noninteractive} | grep -Pv -- '\bremember=\d\b' + changed_when: false + failed_when: ubtu24cis_pam_remember.rc not in [ 0, 1 ] + register: ubtu24cis_pam_remember + + - name: "5.3.3.4.2 | PATCH | Ensure pam_unix does not include remember | Ensure remember removed" + when: ubtu24cis_pam_remember.stdout | length > 0 + ansible.builtin.replace: + path: "/{{ ubtu24cis_pam_confd_dir }}{{ ubtu24cis_pam_pwunix_file }}" + regexp: remember=\d+ + replace: '' + notify: Pam_auth_update_pwunix + +- name: "5.3.3.4.3 | PATCH | Ensure pam_unix includes a strong password hashing algorithm" + when: + - ubtu24cis_rule_5_3_3_4_3 + tags: + - level1-server + - level1-workstation + - patch + - pam + - rule_5.3.3.4.3 + - NIST800-53R5_IA-5 + block: + - name: "5.3.3.4.3 | AUDIT | Ensure pam_unix includes a strong password hashing algorithm | capture state" + ansible.builtin.shell: grep -PH -- '^\h*password\h+([^#\n\r]+)\h+pam_unix\.so\h+([^#\n\r]+\h+)?("{{ ubtu24cis_passwd_hash_algo }}")\b' /etc/pam.d/common-password + changed_when: false + failed_when: ubtu24cis_pam_pwhash.rc not in [ 0, 1 ] + register: ubtu24cis_pam_pwhash + + - name: "5.3.3.4.3 | PATCH | Ensure pam_unix includes a strong password hashing algorithm | Ensure hash algorithm set" + when: ubtu24cis_pam_remember.stdout | length > 0 + ansible.builtin.replace: + path: "/{{ ubtu24cis_pam_confd_dir }}{{ ubtu24cis_pam_pwunix_file }}" + regexp: "(md5|bigcrypt|sha256|blowfish|gost_yescrypt|sha512|yescrypt)" + replace: '{{ ubtu24cis_passwd_hash_algo }}' + notify: Pam_auth_update_pwunix + +- name: "5.3.3.4.4 | PATCH | Ensure pam_unix includes use_authtok" + when: + - ubtu24cis_rule_5_3_3_4_4 + tags: + - level1-server + - level1-workstation + - patch + - pam + - rule_5.3.3.4.4 + - NIST800-53R5_IA-5 + block: + - name: "5.3.3.4.4 | PATCH | Ensure pam_unix includes use_authtok | capture state" + ansible.builtin.shell: grep -PH -- '^\h*password\h+([^#\n\r]+)\h+pam_unix\.so\h+([^#\n\r]+\h+)?use_authtok\b' /etc/pam.d/common-password + changed_when: false + failed_when: ubtu24cis_pam_authtok.rc not in [ 0, 1 ] + register: ubtu24cis_pam_authtok + + - name: "5.3.3.4.4 | PATCH | Ensure pam_unix includes use_authtok | pam_files" + when: + - ubtu24cis_pam_authtok is defined + - ubtu24cis_pam_authtok | length > 0 + ansible.builtin.lineinfile: + path: "/etc/pam.d/common-password" + regexp: ^(\s*password\s+[success=end.*]\s+pam_unix\.so)(.*)\s+use_authtok\s*=\s*\S+(.*$) + line: \1\2\3 use_authtok + backrefs: true diff --git a/tasks/section_5/cis_5.4.1.x.yml b/tasks/section_5/cis_5.4.1.x.yml new file mode 100644 index 0000000..c0846a0 --- /dev/null +++ b/tasks/section_5/cis_5.4.1.x.yml @@ -0,0 +1,208 @@ +--- + +- name: "5.4.1.1 | PATCH | Ensure password expiration is configured" + when: + - ubtu24cis_rule_5_4_1_1 + tags: + - level1-server + - level1-workstation + - patch + - rule_5.4.1.1 + - NIST800-53R5_CM-1 + - NIST800-53R5_CM-2 + - NIST800-53R5_CM-6 + - NIST800-53R5_CM-7 + - NIST800-53R5_IA-5 + - user + - login + block: + - name: "5.4.1.1 | PATCH | Ensure password expiration is configured | Set /etc/login.defs PASS_MAX_DAYS" + ansible.builtin.lineinfile: + path: /etc/login.defs + regexp: '^PASS_MAX_DAYS|^#PASS_MAX_DAYS' + line: 'PASS_MAX_DAYS {{ ubtu24cis_pass.max_days }}' + insertafter: '# Password aging controls' + + - name: "5.4.1.1 | PATCH | Ensure password expiration is configured | Get existing users PASS_MAX_DAYS" + ansible.builtin.shell: "awk -F: '(/^[^:]+:[^!*]/ && ($5>{{ ubtu24cis_pass.max_days }} || $5<{{ ubtu24cis_pass.min_days }} || $5 == -1)){print $1}' /etc/shadow" + changed_when: false + failed_when: false + register: ubtu24cis_max_days + + - name: "5.4.1.1 | PATCH | Ensure password expiration is configured | Set existing users PASS_MAX_DAYS" + when: + - ubtu24cis_disruption_high + - (item != 'root') or (not ubtu24cis_uses_root) + ansible.builtin.shell: chage --maxdays {{ ubtu24cis_pass.max_days }} {{ item }} + failed_when: false + changed_when: ubtu24cis_max_days.stdout | length > 0 + loop: "{{ ubtu24cis_max_days.stdout_lines }}" + +- name: "5.4.1.2 | PATCH | Ensure minimum password age is configured" + when: + - ubtu24cis_rule_5_4_1_2 + tags: + - level2-server + - level2-workstation + - patch + - rule_5.4.1.2 + - NIST800-53R5_NA + - user + - login + block: + - name: "5.4.1.2 | PATCH | Ensure minimum password age is configured | Set /etc/login.defs PASS_MIN_DAYS" + ansible.builtin.lineinfile: + path: /etc/login.defs + regexp: '^PASS_MIN_DAYS|^#PASS_MIN_DAYS' + line: 'PASS_MIN_DAYS {{ ubtu24cis_pass.min_days }}' + + - name: "5.4.1.2 | PATCH | Ensure minimum password age is configured | Get existing users PASS_MIN_DAYS" + ansible.builtin.shell: "awk -F: '(/^[^:]+:[^!*]/ && ($4<{{ ubtu24cis_pass.min_days }})) {print $1}' /etc/shadow" + changed_when: false + failed_when: false + register: ubtu24cis_passwd_min_days + + - name: "5.4.1.2 | PATCH | Ensure minimum password age is configured | Set existing users PASS_MIN_DAYS" + when: + - ubtu24cis_disruption_high + - (item != 'root') or (not ubtu24cis_uses_root) + ansible.builtin.shell: chage --mindays {{ ubtu24cis_pass.min_days }} {{ item }} + failed_when: false + changed_when: ubtu24cis_passwd_min_days.stdout |length > 0 + loop: "{{ ubtu24cis_passwd_min_days.stdout_lines }}" + +- name: "5.4.1.3 | PATCH | Ensure password expiration warning days is configured" + when: + - ubtu24cis_rule_5_4_1_3 + tags: + - level1-server + - level1-workstation + - patch + - rule_5.4.1.3 + - NIST800-53R5_NA + - user + - login + block: + - name: "5.4.1.3 | PATCH | Ensure password expiration warning days is configured | Set /etc/login.defs PASS_WARN_AGE" + ansible.builtin.lineinfile: + path: /etc/login.defs + regexp: '^PASS_WARN_AGE|^#PASS_WARN_AGE' + line: 'PASS_WARN_AGE {{ ubtu24cis_pass.warn_age }}' + + - name: "5.4.1.3 | PATCH | Ensure password expiration warning days is configured | Get existing users PASS_WARN_AGE" + ansible.builtin.shell: "awk -F: '(/^[^:]+:[^!*]/ && $6<{{ ubtu24cis_pass.warn_age }}){print $1}' /etc/shadow" + changed_when: false + failed_when: false + register: ubtu24cis_passwd_warn_days + + - name: "5.4.1.3 | PATCH | Ensure password expiration warning days is configured | Set existing users PASS_WARN_AGE" + when: + - ubtu24cis_disruption_high + - (item != 'root') or (not ubtu24cis_uses_root) + ansible.builtin.shell: chage --maxdays {{ ubtu24cis_pass.warn_age }} {{ item }} + failed_when: false + changed_when: ubtu24cis_passwd_warn_days.stdout | length > 0 + loop: "{{ ubtu24cis_passwd_warn_days.stdout_lines }}" + +- name: "5.4.1.4 | PATCH | Ensure strong password hashing algorithm is configured" + when: + - ubtu24cis_rule_5_4_1_4 + tags: + - level1-server + - level1-workstation + - patch + - rule_5.4.1.4 + - NIST800-53R5_IA-5 + - pam + ansible.builtin.lineinfile: + path: /etc/login.defs + regexp: '^ENCRYPT_METHOD' + line: 'ENCRYPT_METHOD {{ ubtu24cis_passwd_hash_algo | upper }}' + +- name: "5.4.1.5 | PATCH | Ensure inactive password lock is configured" + when: + - ubtu24cis_rule_5_4_1_5 + tags: + - level1-server + - level1-workstation + - patch + - rule_5.4.1.5 + - NIST800-53R5_NA + - user + - login + block: + - name: "5.4.1.5 | AUDIT | Ensure inactive password lock is configured | General setting" + ansible.builtin.shell: useradd -D | grep INACTIVE | cut -d= -f2 + changed_when: false + failed_when: false + register: ubtu24cis_passwd_inactive_setting + + - name: "5.4.1.5 | PATCH | Ensure inactive password lock is configured| Set inactive period for new users" + when: ubtu24cis_passwd_inactive_setting.stdout != ubtu24cis_pass.inactive | string + ansible.builtin.shell: useradd -D -f {{ ubtu24cis_pass.inactive }} + failed_when: false + + - name: "5.4.1.5 | AUDIT | Ensure inactive password lock is configured | Get Individual users" + ansible.builtin.shell: "awk -F: '(/^[^:]+:[^!*]/ && ($7~/(\\s*|-1)/ || ( $7>1 && $7<{{ ubtu24cis_pass.inactive }}))) {print $1}' /etc/shadow" + changed_when: false + failed_when: false + register: ubtu24cis_passwd_inactive_users + + - name: "5.4.1.5 | PATCH | Ensure inactive password lock is configured | Set inactive period for existing users" + when: + - ubtu24cis_disruption_high + - ubtu24cis_passwd_inactive_users.stdout | length > 0 + - (item != 'root') and (not ubtu24cis_uses_root) + ansible.builtin.shell: chage --inactive {{ ubtu24cis_pass.inactive }} {{ item }} + failed_when: false + with_items: + - "{{ ubtu24cis_passwd | map(attribute='id') | list | intersect(ubtu24cis_passwd_inactive_users.stdout_lines) | list }}" + +- name: "5.4.1.6 | PATCH | Ensure all users last password change date is in the past" + when: + - ubtu24cis_rule_5_4_1_6 + tags: + - level1-server + - level1-workstation + - patch + - rule_5.4.1.6 + - NIST800-53R5_NA + - user + - login + vars: + warn_control_id: '5.4.1.6' + block: + - name: "5.4.1.6 | AUDIT | Ensure all users last password change date is in the past | Get current date in Unix Time" + ansible.builtin.shell: echo $(($(date --utc --date "$1" +%s)/86400)) + changed_when: false + failed_when: false + check_mode: false + register: ubtu24cis_current_time + + - name: "5.4.1.6 | AUDIT | Ensure all users last password change date is in the past | Get list of users with last changed PW date in future" + ansible.builtin.shell: "cat /etc/shadow | awk -F: '{if($3>{{ ubtu24cis_current_time.stdout }})print$1}'" + changed_when: false + failed_when: false + check_mode: false + register: ubtu24cis_passwd_future_user_list + + - name: "5.4.1.6 | PATCH | Ensure all users last password change date is in the past | Warn about users" + when: ubtu24cis_passwd_future_user_list.stdout | length > 0 + ansible.builtin.debug: + msg: + - "WARNING!! The following accounts have the last PW change date in the future" + - "{{ ubtu24cis_passwd_future_user_list.stdout_lines }}" + + - name: "5.4.1.6 | WARN | Ensure all users last password change date is in the past | warn_count" + when: ubtu24cis_passwd_future_user_list.stdout | length > 0 + ansible.builtin.import_tasks: + file: warning_facts.yml + + - name: "5.4.1.6 | PATCH | Ensure all users last password change date is in the past | Lock accounts with future PW changed dates" + when: + - ubtu24cis_disruption_high + - ubtu24cis_passwd_future_user_list.stdout | length > 0 + ansible.builtin.shell: passwd --expire {{ item }} + failed_when: false + with_items: + - "{{ ubtu24cis_passwd_future_user_list.stdout_lines }}" diff --git a/tasks/section_5/cis_5.4.2.x.yml b/tasks/section_5/cis_5.4.2.x.yml new file mode 100644 index 0000000..d071408 --- /dev/null +++ b/tasks/section_5/cis_5.4.2.x.yml @@ -0,0 +1,244 @@ +--- + +- name: "5.4.2.1 | PATCH | Ensure root is the only UID 0 account" + when: + - ubtu24cis_rule_5_4_2_1 + - prelim_uid_zero_accounts_except_root.rc + - ubtu24cis_disruption_high + tags: + - level1-server + - level1-workstation + - patch + - accounts + - users + - rule_5.4.2.1 + - NIST800-53R5_CM-1 + - NIST800-53R5_CM-2 + - NIST800-53R5_CM-6 + - NIST800-53R5_CM-7 + - NIST800-53R5_IA-5 + ansible.builtin.shell: passwd -l {{ item }} + changed_when: false + failed_when: false + loop: "{{ prelim_uid_zero_accounts_except_root.stdout_lines }}" + +- name: "5.4.2.2 | PATCH | Ensure root is the only GID 0 account" + when: + - ubtu24cis_rule_5_4_2_2 + - ubtu24cis_disruption_high + tags: + - level1-server + - level1-workstation + - patch + - rule_5.4.2.2 + - NIST800-53R5_CM-1 + - NIST800-53R5_CM-2 + - NIST800-53R5_CM-6 + - NIST800-53R5_CM-7 + - NIST800-53R5_IA-5 + - user + - system + block: + - name: "5.4.2.2 | AUDIT | Ensure root is the only GID 0 account | Get members of gid 0" + ansible.builtin.shell: "awk -F: '($1 !~ /^(sync|shutdown|halt|operator)/ && $4==\"0\") {print $1}' /etc/passwd | grep -wv 'root'" + register: discovered_gid0_members + changed_when: false + failed_when: discovered_gid0_members.rc not in [ 0, 1 ] + + - name: "5.4.2.2 | PATCH | Ensure root is the only GID 0 account | Remove users not root from gid 0" + when: + - discovered_gid0_members is defined + - discovered_gid0_members.stdout | length > 0 + ansible.builtin.user: + name: "{{ item }}" + gid: 0 + state: absent + loop: + - discovered_gid0_members.stdout_lines + +- name: "5.4.2.3 | AUDIT | Ensure group root is the only GID 0 group" + when: + - ubtu24cis_rule_5_4_2_3 + tags: + - level1-server + - level1-workstation + - patch + - rule_5.4.2.3 + - NIST800-53R5_CM-1 + - NIST800-53R5_CM-2 + - NIST800-53R5_CM-6 + - NIST800-53R5_CM-7 + - NIST800-53R5_IA-5 + - user + - system + block: + - name: "5.4.2.3 | AUDIT | Ensure group root is the only GID 0 group | Get groups with gid 0" + ansible.builtin.shell: "awk -F: '$3==\"0\"{print $1}' /etc/group | grep -vw 'root'" + register: discovered_gid0_groups + changed_when: false + failed_when: discovered_gid0_groups.rc not in [ 0, 1 ] + + - name: "5.4.2.3 | AUDIT | Ensure group root is the only GID 0 group | Warning if others gid 0 groups" + when: + - discovered_gid0_groups is defined + - discovered_gid0_groups.stdout | length > 0 + ansible.builtin.debug: + msg: + - "Warning!! You have other groups assigned to GID 0 - Please resolve" + - "{{ discovered_gid0_groups.stdout_lines }}" + + - name: "5.4.2.3 | WARN | Ensure group root is the only GID 0 group | warn_count" + when: + - discovered_gid0_groups is defined + - discovered_gid0_groups.stdout | length > 0 + ansible.builtin.import_tasks: + file: warning_facts.yml + vars: + warn_control_id: '5.4.2.3' + +- name: "5.4.2.4 | PATCH | Ensure root password is set" + when: + - ubtu24cis_rule_5_4_2_4 + tags: + - level1-server + - level1-workstation + - patch + - shadow_suite + - rule_5.4.2.4 + - NIST800-53R5_NA + ansible.builtin.debug: + msg: "This is set as an assert in tasks/main" + +- name: "5.4.2.5 | PATCH | Ensure root PATH Integrity" + when: + - ubtu24cis_rule_5_4_2_5 + tags: + - level1-server + - level1-workstation + - patch + - paths + - rule_5.4.2.5 + - NIST800-53R5_CM-1 + - NIST800-53R5_CM-2 + - NIST800-53R5_CM-6 + - NIST800-53R5_CM-7 + - NIST800-53R5_IA-5 + block: + - name: "5.4.2.5 | AUDIT | Ensure root PATH Integrity | Get root paths" + ansible.builtin.shell: sudo -Hiu root env | grep '^PATH' | cut -d= -f2 + changed_when: false + register: discovered_root_paths + + - name: "5.4.2.5 | AUDIT | Ensure root PATH Integrity | Get root paths" + when: discovered_root_paths is defined + ansible.builtin.shell: sudo -Hiu root env | grep '^PATH' | cut -d= -f2 | tr ":" "\n" + changed_when: false + register: discovered_root_paths_split + + - name: "5.4.2.5 | AUDIT | Ensure root PATH Integrity | Set fact" + when: discovered_root_paths is defined + ansible.builtin.set_fact: + root_paths: "{{ discovered_root_paths.stdout }}" + + - name: "5.4.2.5 | AUDIT | Ensure root PATH Integrity | Check for empty dirs" + when: discovered_root_paths is defined + ansible.builtin.shell: 'echo {{ root_paths }} | grep -q "::" && echo "roots path contains a empty directory (::)"' + changed_when: false + failed_when: discovered_root_path_empty_dir.rc not in [ 0, 1 ] + register: discovered_root_path_empty_dir + + - name: "5.4.2.5 | AUDIT | Ensure root PATH Integrity | Check for trailing ':'" + when: discovered_root_paths is defined + ansible.builtin.shell: '{{ root_paths }} | cut -d= -f2 | grep -q ":$" && echo "roots path contains a trailing (:)"' + changed_when: false + failed_when: discovered_root_path_trailing_colon.rc not in [ 0, 1 ] + register: discovered_root_path_trailing_colon + + - name: "5.4.2.5 | AUDIT | Ensure root PATH Integrity | Check for owner and permissions" + when: discovered_root_paths is defined + block: + - name: "5.4.2.5 | AUDIT | Ensure root PATH Integrity | Check for owner and permissions" + ansible.builtin.stat: + path: "{{ item }}" + register: discovered_root_path_perms + loop: "{{ discovered_root_paths_split.stdout_lines }}" + + - name: "5.4.2.5 | AUDIT | Ensure root PATH Integrity | Set permissions" + when: + - item.stat.exists + - item.stat.isdir + - item.stat.pw_name != 'root' or item.stat.gr_name != 'root' or item.stat.woth or item.stat.wgrp + - (item != 'root') and (not ubtu24cis_uses_root) + ansible.builtin.file: + path: "{{ item.stat.path }}" + state: directory + owner: root + group: root + mode: '0755' + follow: false + loop: "{{ discovered_root_path_perms.results }}" + loop_control: + label: "{{ item }}" + +- name: "5.4.2.6 | PATCH | Ensure root user umask is configured" + when: + - ubtu24cis_rule_5_4_2_6 + tags: + - level1-server + - level1-workstation + - patch + - shadow_suite + - rule_5.4.2.6 + - NIST800-53R5_AC-3 + - NIST800-53R5_MP-2 + ansible.builtin.lineinfile: + path: /root/.bash_profile + regexp: \s*umask + line: "umask {{ ubtu24cis_root_umask }}" + create: true + +- name: "5.4.2.7 | PATCH | Ensure system accounts do not have a valid login shell" + when: + - ubtu24cis_rule_5_4_2_7 + - "item.id not in prelim_interactive_usernames.stdout" + - "'root' not in item.id" + - ubtu24cis_disruption_high + tags: + - level1-server + - level1-workstation + - patch + - shadow_suite + - rule_5.4.2.7 + - NIST800-53R5_AC-2 + - NIST800-53R5_AC-3 + - NIST800-53R5_AC-11 + - NIST800-53R5_MP-2 + ansible.builtin.user: + name: "{{ item.id }}" + shell: /usr/sbin/nologin + loop: "{{ ubtu24cis_passwd }}" + loop_control: + label: "{{ item.id }}" + +- name: "5.4.2.8 | PATCH | Ensure accounts without a valid login shell are locked | Lock accounts" + when: + - ubtu24cis_rule_5_4_2_8 + - ubtu24cis_disruption_high + - "item.id not in prelim_interactive_usernames.stdout" + - "'root' not in item.id" + tags: + - level1-server + - level1-workstation + - patch + - shadow_suite + - rule_5.4.2.8 + - NIST800-53R5_AC-2 + - NIST800-53R5_AC-3 + - NIST800-53R5_AC-11 + - NIST800-53R5_MP-2 + ansible.builtin.user: + name: "{{ item.id }}" + password_lock: true + loop: "{{ ubtu24cis_passwd }}" + loop_control: + label: "{{ item.id }}" diff --git a/tasks/section_5/cis_5.4.3.x.yml b/tasks/section_5/cis_5.4.3.x.yml new file mode 100644 index 0000000..38fe20a --- /dev/null +++ b/tasks/section_5/cis_5.4.3.x.yml @@ -0,0 +1,63 @@ +--- + +- name: "5.4.3.1 | PATCH | Ensure nologin is not listed in /etc/shells" + when: + - ubtu24cis_rule_5_4_3_1 + tags: + - level2-server + - level2-workstation + - patch + - shells + - rule_5.4.3.1 + - NIST800-53R5_CM-1 + - NIST800-53R5_CM-2 + - NIST800-53R5_CM-6 + - NIST800-53R5_CM-7 + - NIST800-53R5_IA-5 + ansible.builtin.replace: + path: /etc/shells + regexp: nologin + replace: "" + +- name: "5.4.3.2 | PATCH | Ensure default user shell timeout is configured" + when: + - ubtu24cis_rule_5_4_3_2 + tags: + - level1-server + - level1-workstation + - patch + - shell + - rule_5.4.3.2 + - NIST800-53R5_NA + ansible.builtin.blockinfile: + path: "{{ item.path }}" + state: "{{ item.state }}" + marker: "# {mark} - CIS benchmark - Ansible-lockdown" + create: true + mode: '0644' + block: | + TMOUT={{ ubtu24cis_shell_session_timeout }} + readonly TMOUT + export TMOUT + loop: + - { path: "{{ ubtu24cis_shell_session_file }}", state: present } + - { path: /etc/profile, state: "{{ (ubtu24cis_shell_session_file == '/etc/profile') | ternary('present', 'absent') }}" } + +- name: "5.4.3.3 | PATCH | Ensure default user umask is configured" + when: + - ubtu24cis_rule_5_4_3_3 + tags: + - level1-server + - level1-workstation + - patch + - umask + - rule_5.4.3.3 + - NIST800-53R5_AC-3 + - NIST800-53R5_MP-2 + ansible.builtin.replace: + path: "{{ item.path }}" + regexp: (?i)(umask\s+\d\d\d) + replace: '{{ item.line }} {{ ubtu24cis_bash_umask }}' + loop: + - { path: '/etc/profile', line: 'umask' } + - { path: '/etc/login.defs', line: 'UMASK' } diff --git a/tasks/section_5/main.yml b/tasks/section_5/main.yml new file mode 100644 index 0000000..e8d9d95 --- /dev/null +++ b/tasks/section_5/main.yml @@ -0,0 +1,55 @@ +--- + +- name: "SECTION | 5.1 | Configure SSH Server" + ansible.builtin.import_tasks: + file: cis_5.1.x.yml + +- name: "SECTION | 5.2 | Configure privilege escalation" + when: not system_is_container + ansible.builtin.import_tasks: + file: cis_5.2.x.yml + +- name: "SECTION | 5.3.1.x | Configure PAM software packages" + when: not system_is_container + ansible.builtin.import_tasks: + file: cis_5.3.1.x.yml + +- name: "SECTION | 5.3.2.x | Configure pam-auth-update" + when: not system_is_container + ansible.builtin.import_tasks: + file: cis_5.3.2.x.yml + +- name: "SECTION | 5.3.3.1.x | Configure pam_faillock module" + when: not system_is_container + ansible.builtin.import_tasks: + file: cis_5.3.3.1.x.yml + +- name: "SECTION | 5.3.3.2.x | Configure pam_pwquality module" + when: not system_is_container + ansible.builtin.import_tasks: + file: cis_5.3.3.2.x.yml + +- name: "SECTION | 5.3.3.3.x | Configure pam_pwhistory module" + when: not system_is_container + ansible.builtin.import_tasks: + file: cis_5.3.3.3.x.yml + +- name: "SECTION | 5.3.3.4.x | Configure pam_unix module" + when: not system_is_container + ansible.builtin.import_tasks: + file: cis_5.3.3.4.x.yml + +- name: "SECTION | 5.4.1.x | Configure shadow password suite parameters" + when: not system_is_container + ansible.builtin.import_tasks: + file: cis_5.4.1.x.yml + +- name: "SECTION | 5.4.2.x | Configure root and system accounts and environment" + when: not system_is_container + ansible.builtin.import_tasks: + file: cis_5.4.2.x.yml + +- name: "SECTION | 5.4.3.x | Configure user default environment" + when: not system_is_container + ansible.builtin.import_tasks: + file: cis_5.4.3.x.yml diff --git a/tasks/section_6/cis_6.1.1.x.yml b/tasks/section_6/cis_6.1.1.x.yml new file mode 100644 index 0000000..c3365e2 --- /dev/null +++ b/tasks/section_6/cis_6.1.1.x.yml @@ -0,0 +1,101 @@ +--- + +- name: "6.1.1.1 | PATCH | Ensure journald service is enabled and active" + when: + - ubtu24cis_rule_6_1_1_1 + tags: + - level1-server + - level1-workstation + - audit + - journald + - rule_6.1.1.1 + - NIST800-53R5_AU-2 + - NIST800-53R5_AU-7 + - NIST800-53R5_AU-12 + ansible.builtin.systemd: + name: systemd-journald.service + masked: false + state: started + +- name: "6.1.1.2 | PATCH | Ensure journald log file access is configured" + when: + - ubtu24cis_rule_6_1_1_2 + tags: + - level1-server + - level1-workstation + - audit + - journald + - rule_6.1.1.2 + - NIST800-53R5_AC-3 + - NIST800-53R5_AU-2 + - NIST800-53R5_AU-12 + - NIST800-53R5_MP-2 + - NIST800-53R5_SI-5 + block: + - name: "6.1.1.2 | PATCH | Ensure journald log file access is configured | Default file permissions" + ansible.builtin.file: + path: /usr/lib/tmpfiles.d/systemd.conf + mode: '0640' + + - name: "6.1.1.2 | AUDIT | Ensure journald log file access is configured | Check for override file" + ansible.builtin.stat: + path: /etc/tmpfiles.d/systemd.conf + register: tmpfile_override + + - name: "6.1.1.2 | AUDIT | Ensure journald log file access is configured | If override file check for journal" + when: tmpfile_override.stat.exists + ansible.builtin.shell: grep -E 'z /var/log/journal/%m/system.journal \d*' /usr/lib/tmpfiles.d/systemd.conf + register: journald_fileperms_override + changed_when: false + failed_when: journald_fileperms_override.rc not in [ 0, 1 ] + + - name: "6.1.1.2 | AUDIT | Ensure journald log file access is configured | Warning if override found" + when: + - tmpfile_override.stat.exists + - journald_fileperms_override.stdout | length > 0 + ansible.builtin.debug: + msg: "Warning!! - tmpfiles override found /usr/lib/tmpfiles.d/systemd.conf affecting journald files please confirm matches site policy" + + - name: "6.1.1.2 | AUDIT | Ensure journald log file access is configured | Warning if override found" + when: + - tmpfile_override.stat.exists + - journald_fileperms_override.stdout | length > 0 + ansible.builtin.import_tasks: + file: warning_facts.yml + vars: + warn_control_id: '6.1.1.2' + +- name: "6.1.1.3 | PATCH | Ensure journald log file rotation is configured" + when: + - ubtu24cis_rule_6_1_1_3 + tags: + - level1-server + - level1-workstation + - patch + - journald + - rule_6.1.1.3 + - NIST800-53R5_AU-2 + - NIST800-53R5_AU-7 + - NIST800-53R5_AU-12 + notify: Restart journald + block: + - name: "6.1.1.3 | PATCH | Ensure journald log file rotation is configured | Add file" + ansible.builtin.template: + src: etc/systemd/journald.conf.d/rotation.conf.j2 + dest: /etc/systemd/journald.conf.d/rotation.conf + owner: root + group: root + mode: '0640' + + - name: "6.1.1.3 | PATCH | Ensure journald log file rotation is configured | comment out current entries" + ansible.builtin.replace: + path: /etc/systemd/journald.conf + regexp: "{{ item }}" + replace: '#\1' + loop: + - '^(\s*SystemMaxUse\s*=.*)' + - '^(\s*SystemKeepFree\s*=.*)' + - '^(\s*RuntimeMaxUse\s*=)' + - '^(\s*RuntimeKeepFree\s*=.*)' + - '^(\s*MaxFileSec\s*=.*)' + diff --git a/tasks/section_6/cis_6.1.2.x.yml b/tasks/section_6/cis_6.1.2.x.yml new file mode 100644 index 0000000..754bae8 --- /dev/null +++ b/tasks/section_6/cis_6.1.2.x.yml @@ -0,0 +1,155 @@ +--- + +- name: "6.1.2.1.1 | PATCH | Ensure systemd-journal-remote is installed" + when: + - ubtu24cis_rule_6_1_2_1_1 + - not ubtu24cis_system_is_log_server + tags: + - level1-server + - level1-workstation + - patch + - journald + - rule_6.1.2.1.1 + - NIST800-53R5_AU-2 + - NIST800-53R5_AU-7 + - NIST800-53R5_AU-12 + ansible.builtin.package: + name: systemd-journal-remote + state: present + +- name: "6.1.2.1.2 | PATCH | Ensure systemd-journal-upload authentication is configured" + when: + - ubtu24cis_rule_6_1_2_1_2 + - not ubtu24cis_system_is_log_server + tags: + - level1-server + - level1-workstation + - patch + - journald + - rule_6.1.2.1.2 + - NIST800-53R5_AU-2 + - NIST800-53R5_AU-12 + notify: Restart journald + ansible.builtin.lineinfile: + path: /etc/systemd/journal-upload.conf + regexp: "{{ item.regexp }}" + line: "{{ item.line }}" + loop: + - { regexp: 'URL=', line: 'URL={{ ubtu24cis_remote_log_server }}'} + - { regexp: 'ServerKeyFile=', line: 'ServerKeyFile={{ ubtu24cis_journal_upload_serverkeyfile }}'} + - { regexp: 'ServerCertificateFile=', line: 'ServerCertificateFile={{ ubtu24cis_journal_servercertificatefile }}'} + - { regexp: 'TrustedCertificateFile=', line: 'TrustedCertificateFile={{ ubtu24cis_journal_trustedcertificatefile }}'} + +- name: "6.1.2.1.3 | PATCH | Ensure systemd-journal-upload is enabled and active" + when: + - not ubtu24cis_system_is_log_server + - ubtu24cis_rule_6_1_2_1_3 + tags: + - level1-server + - level1-workstation + - patch + - journald + - rule_6.1.2.1.3 + - NIST800-53R5_AU-2 + - NIST800-53R5_AU-12 + ansible.builtin.systemd: + name: systemd-journal-upload + masked: false + enabled: true + +- name: "6.1.2.1.4 | PATCH | Ensure systemd-journal-remote service is not in use" + when: + - not ubtu24cis_system_is_log_server + - ubtu24cis_rule_6_1_2_1_4 + tags: + - level1-server + - level1-workstation + - patch + - journald + - rule_6.1.2.1.4 + - NIST800-53R5_AU-2 + - NIST800-53R5_AU-7 + - NIST800-53R5_AU-12 + ansible.builtin.systemd: + name: "{{ item }}" + state: stopped + enabled: false + masked: true + loop: + - systemd-journal-remote.socket + - systemd-journal-remote.service + +- name: "6.1.2.2 | PATCH | Ensure journald ForwardToSyslog is disabled" + when: + - ubtu24cis_rule_6_1_2_2 + tags: + - level1-server + - level2-workstation + - patch + - journald + - rule_6.1.2.2 + notify: Restart journald + block: + - name: "6.1.2.2 | PATCH | Ensure journald ForwardToSyslog is disabled | Add file" + ansible.builtin.template: + src: etc/systemd/journald.conf.d/forwardtosyslog.conf.j2 + dest: /etc/systemd/journald.conf.d/forwardtosyslog.conf + owner: root + group: root + mode: '0640' + + - name: "6.1.2.2 | PATCH | Ensure journald ForwardToSyslog is disabled | comment out current entries" + ansible.builtin.replace: + path: /etc/systemd/journald.conf + regexp: ^(\s*ForwardToSyslog) + replace: '#\1' + +- name: "6.1.2.3 | PATCH | Ensure journald Compress is configured" + when: + - ubtu24cis_rule_6_1_2_3 + tags: + - level1-server + - level1-workstation + - patch + - journald + - rule_6.1.2.3 + notify: Restart journald + block: + - name: "6.1.2.3 | PATCH | Ensure journald Compress is configured | Add file" + ansible.builtin.template: + src: etc/systemd/journald.conf.d/storage.conf.j2 # Added to the same file as 6.2.1.5 + dest: /etc/systemd/journald.conf.d/storage.conf + owner: root + group: root + mode: '0640' + + - name: "6.1.2.3 | PATCH | Ensure journald Compress is configured | comment out current entries" + ansible.builtin.replace: + path: /etc/systemd/journald.conf + regexp: (?i)^(\s*compress=) + replace: '#\1' + +- name: "6.1.2.4 | PATCH | Ensure journald Storage is configured" + when: + - ubtu24cis_rule_6_1_2_4 + tags: + - level1-server + - level1-workstation + - patch + - journald + - rule_6.1.2.4 + notify: Restart journald + block: + - name: "6.1.2.4 | PATCH | Ensure journald Storage is configured | Add file" + ansible.builtin.template: + src: etc/systemd/journald.conf.d/storage.conf.j2 + dest: /etc/systemd/journald.conf.d/storage.conf + owner: root + group: root + mode: '0640' + + - name: "6.1.2.4 | PATCH | Ensure journald Storage is configured | comment out current entries" + ansible.builtin.replace: + path: /etc/systemd/journald.conf + regexp: (?i)^(\s*storage=) + replace: '#\1' diff --git a/tasks/section_6/cis_6.1.3.8.yml b/tasks/section_6/cis_6.1.3.8.yml new file mode 100644 index 0000000..3f4e49c --- /dev/null +++ b/tasks/section_6/cis_6.1.3.8.yml @@ -0,0 +1,31 @@ +--- + +- name: "6.1.3.8 | PATCH | Ensure logrotate is configured" + when: + - ubtu24cis_rule_6_1_3_8 + tags: + - level1-server + - level1-workstation + - manual + - patch + - logrotate + - rule_6.1.3.8 + - NIST800-53R5_AU-8 + block: + - name: "6.1.3.8 | AUDIT | Ensure logrotate is configured | Get logrotate settings" + ansible.builtin.find: + paths: /etc/logrotate.d/ + register: ubtu24cis_log_rotate_conf + + - name: "6.1.3.8 | PATCH | Ensure logrotate is configured | conf files" + ansible.builtin.replace: + path: "{{ item.path }}" + regexp: '^(\s*)(daily|weekly|monthly|yearly)$' + replace: "\\1{{ ubtu24cis_logrotate }}" + loop: "{{ ubtu24cis_log_rotate_conf.files }}" + + - name: "6.1.3.8 | PATCH | Ensure logrotate is configured | logrotate.conf" + ansible.builtin.replace: + path: /etc/logrotate.conf + regexp: '^(\s*)(daily|weekly|monthly|yearly)$' + replace: "\\1{{ ubtu24cis_logrotate }}" diff --git a/tasks/section_6/cis_6.1.3.x.yml b/tasks/section_6/cis_6.1.3.x.yml new file mode 100644 index 0000000..bb46529 --- /dev/null +++ b/tasks/section_6/cis_6.1.3.x.yml @@ -0,0 +1,201 @@ +--- + +- name: "6.1.3.1 | PATCH | Ensure rsyslog is installed" + when: + - ubtu24cis_rule_6_1_3_1 + - "'rsyslog' not in ansible_facts.packages" + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_6.1.3.1 + - NIST800-53R5_AU-2 + - NIST800-53R5_AU-3 + - NIST800-53R5_AU-12 + - rsyslog + - apt + ansible.builtin.package: + name: rsyslog + state: present + +- name: "6.1.3.2 | PATCH | Ensure rsyslog service is enabled" + when: + - ubtu24cis_rule_6_1_3_2 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_6.1.3.2 + - NIST800-53R5_AU-2 + - NIST800-53R5_AU-7 + - NIST800-53R5_AU-12 + - rsyslog + ansible.builtin.systemd: + name: rsyslog + enabled: true + +- name: "6.1.3.3 | PATCH | Ensure journald is configured to send logs to rsyslog" + when: + - ubtu24cis_rule_6_1_3_3 + tags: + - level1-server + - level1-workstation + - manual + - patch + - journald + - rule_6.1.3.3 + - NIST800-53R5_AC-3 + - NIST800-53R5_AU-2 + - NIST800-53R5_AU-4 + - NIST800-53R5_AU-12 + - NIST800-53R5_MP-2 + ansible.builtin.lineinfile: + path: /etc/systemd/journald.conf + regexp: ^ForwardToSyslog= + line: ForwardToSyslog=yes + notify: Restart syslog service + +- name: "6.1.3.4 | PATCH | Ensure rsyslog log file creation mode is configured" + when: + - ubtu24cis_rule_6_1_3_4 + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_6.1.3.4 + - NIST800-53R5_AC-3 + - NIST800-53R5_AC-6 + - NIST800-53R5_MP-2 + - rsyslog + ansible.builtin.lineinfile: + path: /etc/rsyslog.conf + regexp: '^\$FileCreateMode|^#\$FileCreateMode' + line: '$FileCreateMode 0640' + notify: Restart syslog service + +- name: "6.1.3.5 | PATCH | Ensure logging is configured" + when: + - ubtu24cis_rule_6_1_3_5 + tags: + - level1-server + - level1-workstation + - manual + - patch + - rule_6.1.3.5 + - NIST800-53R5_AU-2 + - NIST800-53R5_AU-7 + - NIST800-53R5_AU-12 + - rsyslog + vars: + warn_control_id: '6.1.3.5' + block: + - name: "6.1.3.5 | AUDIT | Ensure logging is configured | Find configuration file" + ansible.builtin.shell: grep -r "*.emerg" /etc/* | cut -f1 -d":" + changed_when: false + failed_when: false + check_mode: false + register: ubtu24cis_6_1_3_5_rsyslog_config_path + + - name: "6.1.3.5 | AUDIT | Ensure logging is configured | Gather rsyslog current config" + ansible.builtin.shell: "cat {{ ubtu24cis_6_1_3_5_rsyslog_config_path.stdout }}" + changed_when: false + failed_when: false + check_mode: false + register: ubtu24cis_6_1_3_5_rsyslog_config + + - name: "6.1.3.5 | AUDIT | Ensure logging is configured | Message out config" + when: not ubtu24cis_rsyslog_ansible_managed + ansible.builtin.debug: + msg: + - "Warning!! Below is the current logging configurations for rsyslog, please review" + - "{{ ubtu24cis_6_1_3_5_rsyslog_config.stdout_lines }}" + + - name: "6.1.3.5 | PATCH | Ensure logging is configured | Set warning count" + when: not ubtu24cis_rsyslog_ansible_managed + ansible.builtin.import_tasks: + file: warning_facts.yml + + - name: "6.1.3.5 | PATCH | Ensure logging is configured | Automated rsyslog configuration" + when: ubtu24cis_rsyslog_ansible_managed + ansible.builtin.lineinfile: + path: "{{ ubtu24cis_6_1_3_5_rsyslog_config_path.stdout }}" + regexp: "{{ item.regexp }}" + line: "{{ item.line }}" + insertafter: "{{ item.insertafter }}" + with_items: + - { regexp: '^\*.emerg', line: '*.emerg :omusrmsg:*', insertafter: '^# Emergencies are sent to everybody logged in' } + - { regexp: '^auth,authpriv.\*', line: 'auth,authpriv.* /var/log/secure', insertafter: '^# First some standard log files. Log by facility' } + - { regexp: '^mail.\*|^#mail.\*', line: 'mail.* -/var/log/mail', insertafter: '^# First some standard log files' } + - { regexp: '^cron.\*|^#cron.\*', line: 'cron.* /var/log/cron', insertafter: '^# First some standard log files' } + - { regexp: '^mail.info|^#mail.info', line: 'mail.info -/var/log/mail.info', insertafter: '^# Logging for the mail system' } + - { regexp: '^mail.warn|^#mail.warn', line: 'mail.warning -/var/log/mail.warn', insertafter: '^# Logging for the mail system.' } + - { regexp: '^mail.err|^#mail.err', line: 'mail.err /var/log/mail.err', insertafter: '^# Logging for the mail system.' } + - { regexp: '^\*.=warning;\*.=err|^#\*.=warning;\*.=err', line: '*.=warning;*.=err -/var/log/warn', insertafter: '^# First some standard log files' } + - { regexp: '^\*.crit|^#\*.crit', line: '*.crit /var/log/warn', insertafter: '^# First some standard log files' } + - { regexp: '^\*.\*;mail.none;news.none|^#\*.\*;mail.none;news.none', line: '*.*;mail.none;news.none -/var/log/messages', insertafter: '^# First some standard log files' } + - { regexp: '^local0,local1.\*|^#local0,local1.\*', line: 'local0,local1.* -/var/log/localmessages', insertafter: '^# First some standard log files' } + - { regexp: '^local2,local3.\*|^#local2,local3.\*', line: 'local2,local3.* -/var/log/localmessages', insertafter: '^# First some standard log files' } + - { regexp: '^local4,local5.\*|^#local4,local5.\*', line: 'local4,local5.* -/var/log/localmessages', insertafter: '^# First some standard log files' } + - { regexp: '^local6,local7.\*|^#local6,local7.\*', line: 'local6,local7.* -/var/log/localmessages', insertafter: '^# First some standard log files' } + loop_control: + label: "{{ item.line }}" + notify: Restart syslog service + +- name: "6.1.3.6 | PATCH | Ensure rsyslog is configured to send logs to a remote log host" + when: + - ubtu24cis_rule_6_1_3_6 + - not ubtu24cis_system_is_log_server + tags: + - level1-server + - level1-workstation + - automated + - patch + - rule_6.1.3.6 + - NIST800-53R5_AU-6 + - rsyslog + ansible.builtin.blockinfile: + path: /etc/rsyslog.conf + block: | + ##Enable sending of logs over TCP add the following line: + *.* @@{{ ubtu24cis_remote_log_server }} + insertafter: EOF + +- name: "6.1.3.7 | PATCH | Ensure rsyslog is not configured to receive logs from a remote client" + when: + - ubtu24cis_rule_6_1_3_7 + tags: + - level1-server + - level1-workstation + - manual + - patch + - rule_6.1.3.7 + - NIST800-53R5_AU-2 + - NIST800-53R5_AU-7 + - NIST800-53R5_AU-12 + - NIST800-53R5_CM-6 + - rsyslog + block: + - name: "6.1.3.7 | PATCH | Ensure rsyslog is not configured to receive logs from a remote client | When not a log host" + when: not ubtu24cis_system_is_log_server + ansible.builtin.replace: + path: /etc/rsyslog.conf + regexp: '({{ item }})' + replace: '#\1' + with_items: + - '^(\$ModLoad|module(load="imtcp"))' + - '^(\$(InputTCP|InputRELP|UDP)ServerRun|input(type="imtcp" port=".*"))' + notify: Restart syslog service + + - name: "6.1.3.7 | PATCH | Ensure rsyslog is not configured to receive logs from a remote client | When a log server" + when: ubtu24cis_system_is_log_server + ansible.builtin.lineinfile: + path: /etc/rsyslog.conf + regexp: "{{ item.regexp }}" + line: "{{ item.line }}" + with_items: + - { regexp: '^\$ModLoad|^#\$ModLoad', line: '$ModLoad imtcp' } + - { regexp: '^\$InputTCPServerRun|^#\$InputTCPServerRun', line: '$InputTCPServerRun 514' } + notify: Restart syslog service diff --git a/tasks/section_6/cis_6.1.4.1.yml b/tasks/section_6/cis_6.1.4.1.yml new file mode 100644 index 0000000..14e2942 --- /dev/null +++ b/tasks/section_6/cis_6.1.4.1.yml @@ -0,0 +1,40 @@ +--- + +- name: "6.1.4.1 | PATCH | Ensure access to all logfiles has been configured" + when: + - ubtu24cis_rule_6_1_4_1 + tags: + - level1-server + - level1-workstation + - patch + - logfiles + - rule_6.1.4.1 + - NIST800-53R5_AC-3 + - NIST800-53R5_MP-2 + block: + - name: "6.1.4.1 | AUDIT | Ensure access to all logfiles has been configured | find files" + ansible.builtin.shell: find /var/log/ -type f -exec ls {} \; + changed_when: false + failed_when: false + register: discovered_logfiles + + - name: "6.1.4.1 | PATCH | Ensure access to all logfiles has been configured | change permissions" + when: + - item != "/var/log/btmp" + - item != "/var/log/utmp" + - item != "/var/log/wtmp" + - item != "/var/log/lastlog" + ansible.builtin.file: + path: "{{ item }}" + mode: u-x,g-wx,o-rwx + loop: "{{ discovered_logfiles.stdout_lines }}" + + - name: "6.1.4.1 | PATCH | Ensure access to all logfiles has been configured | change permissions" + ansible.builtin.file: + path: "{{ item }}" + mode: 'ug-x,o-wx' + with_fileglob: + - "/var/log/*tmp" + - "/var/log/lastlog*" + - "/var/log/sssd*" + - "/var/log/SSSD*" diff --git a/tasks/section_6/cis_6.2.1.x.yml b/tasks/section_6/cis_6.2.1.x.yml new file mode 100644 index 0000000..5c5df2b --- /dev/null +++ b/tasks/section_6/cis_6.2.1.x.yml @@ -0,0 +1,113 @@ +--- + +- name: "6.2.1.1 | PATCH | Ensure auditd packages are installed" + when: + - ubtu24cis_rule_6_2_1_1 + - "'auditd' not in ansible_facts.packages or + 'audisd-plugins' not in ansible_facts.packages" + tags: + - level2-server + - level2-workstation + - patch + - rule_6.2.1.1 + - NIST800-53R5_AU-2 + - NIST800-53R5_AU-3 + - NIST800-53R5_AU-12 + - NIST800-53R5_SI-5 + - auditd + ansible.builtin.package: + name: ['auditd', 'audispd-plugins'] + state: present + +- name: "6.2.1.2 | PATCH | Ensure auditd service is enabled and active" + when: + - ubtu24cis_rule_6_2_1_2 + tags: + - level2-server + - level2-workstation + - patch + - rule_6.2.1.2 + - NIST800-53R5_AU-2 + - NIST800-53R5_AU-3 + - NIST800-53R5_AU-12 + - auditd + ansible.builtin.service: + name: auditd + state: started + enabled: true + masked: false + +- name: "6.2.1.3 | PATCH | Ensure auditing for processes that start prior to auditd is enabled" + when: + - ubtu24cis_rule_6_2_1_3 + tags: + - level2-server + - level2-workstation + - patch + - rule_6.2.1.3 + - NIST800-53R5_AU-2 + - NIST800-53R5_AU-3 + - NIST800-53R5_AU-12 + - auditd + block: + - name: "6.2.1.3 | AUDIT | Ensure auditing for processes that start prior to auditd is enabled | Get GRUB_CMDLINE_LINUX" + ansible.builtin.shell: grep "GRUB_CMDLINE_LINUX=" /etc/default/grub | cut -f2 -d'"' + changed_when: false + failed_when: false + check_mode: false + register: ubtu24cis_6_2_1_3_cmdline_settings + + - name: "6.2.1.3 | PATCH | Ensure auditing for processes that start prior to auditd is enabled | Add setting if doesn't exist" + when: "'audit=' not in ubtu24cis_6_2_1_3_cmdline_settings.stdout" + ansible.builtin.lineinfile: + path: /etc/default/grub + regexp: '^GRUB_CMDLINE_LINUX=' + line: 'GRUB_CMDLINE_LINUX="{{ ubtu24cis_6_2_1_3_cmdline_settings.stdout }} audit=1"' + notify: Grub update + + - name: "6.2.1.3 | PATCH | Ensure auditing for processes that start prior to auditd is enabled | Update setting if exists" + when: "'audit=' in ubtu24cis_6_2_1_3_cmdline_settings.stdout" + ansible.builtin.replace: + dest: /etc/default/grub + regexp: 'audit=([0-9]+)' + replace: 'audit=1' + after: '^GRUB_CMDLINE_LINUX="' + before: '"' + notify: Grub update + +- name: "6.2.1.4 | PATCH | Ensure audit_backlog_limit is sufficient" + when: + - ubtu24cis_rule_6_2_1_4 + tags: + - level2-server + - level2-workstation + - patch + - rule_6.2.1.4 + - NIST800-53R5_AU-2 + - NIST800-53R5_AU-3 + - NIST800-53R5_AU-12 + - auditd + block: + - name: "6.2.1.4 | PATCH | Ensure audit_backlog_limit is sufficient | Get current GRUB_CMDLINE_LINUX" + ansible.builtin.shell: grep "GRUB_CMDLINE_LINUX=" /etc/default/grub | cut -f2 -d'"' + changed_when: false + failed_when: false + check_mode: false + register: ubtu24cis_6_2_1_4_cmdline_settings + + - name: "6.2.1.4 | PATCH | Ensure audit_backlog_limit is sufficient | Add setting if doesn't exist" + when: "'audit_backlog_limit=' not in ubtu24cis_6_2_1_4_cmdline_settings.stdout" + ansible.builtin.lineinfile: + path: /etc/default/grub + regexp: '^GRUB_CMDLINE_LINUX=' + line: 'GRUB_CMDLINE_LINUX="{{ ubtu24cis_6_2_1_4_cmdline_settings.stdout }} audit_backlog_limit={{ ubtu24cis_audit_back_log_limit }}"' + notify: Grub update + + - name: "6.2.1.4 | PATCH | Ensure audit_backlog_limit is sufficient | Update setting if exists" + ansible.builtin.replace: + dest: /etc/default/grub + regexp: 'audit_backlog_limit=([0-9]+)' + replace: 'audit_backlog_limit={{ ubtu24cis_audit_back_log_limit }}' + after: '^GRUB_CMDLINE_LINUX="' + before: '"' + notify: Grub update diff --git a/tasks/section_6/cis_6.2.2.x.yml b/tasks/section_6/cis_6.2.2.x.yml new file mode 100644 index 0000000..039ea57 --- /dev/null +++ b/tasks/section_6/cis_6.2.2.x.yml @@ -0,0 +1,75 @@ +--- + +- name: "6.2.2.1 | PATCH | Ensure audit log storage size is configured" + when: + - ubtu24cis_rule_6_2_2_1 + tags: + - level2-server + - level2-workstation + - patch + - rule_6.2.2.1 + - NIST800-53R5_AU-8 + - auditd + ansible.builtin.lineinfile: + dest: /etc/audit/auditd.conf + regexp: "^max_log_file( |=)" + line: "max_log_file = {{ ubtu24cis_max_log_file_size }}" + state: present + notify: Restart auditd + +- name: "6.2.2.2 | PATCH | Ensure audit logs are not automatically deleted" + when: + - ubtu24cis_rule_6_2_2_2 + tags: + - level2-server + - level2-workstation + - patch + - rule_6.2.2.2 + - NIST800-53R5_AU-8 + - auditd + ansible.builtin.lineinfile: + path: /etc/audit/auditd.conf + regexp: '^max_log_file_action' + line: "max_log_file_action = {{ ubtu24cis_auditd_max_log_file_action }}" + notify: Restart auditd + +- name: "6.2.2.3 | PATCH | Ensure system is disabled when audit logs are full" + when: + - ubtu24cis_rule_6_2_2_3 + tags: + - level2-server + - level2-workstation + - patch + - rule_6.2.2.3 + - NIST800-53R5_AU-8 + - auditd + ansible.builtin.lineinfile: + path: /etc/audit/auditd.conf + regexp: "{{ item.regexp }}" + line: "{{ item.line }}" + with_items: + - { regexp: '^disk_full_action', line: "disk_full_action = {{ ubtu24cis_auditd_disk_full_action }}" } + - { regexp: '^disk_error_action', line: "disk_error_action = {{ ubtu24cis_auditd_disk_error_action }}" } + notify: Restart auditd + +- name: "6.2.2.4 | PATCH | Ensure system warns when audit logs are low on space" + when: + - ubtu24cis_rule_6_2_2_4 + tags: + - level2-server + - level2-workstation + - patch + - auditd + - rule_6.2.2.4 + - NIST800-53R5_AU-2 + - NIST800-53R5_AU-3 + - NIST800-53R5_AU-12 + - NIST800-53R5_SI-5 + ansible.builtin.lineinfile: + path: /etc/audit/auditd.conf + regexp: "{{ item.regexp }}" + line: "{{ item.line }}" + loop: + - { regexp: '^admin_space_left_action', line: 'admin_space_left_action = {{ ubtu24cis_auditd_admin_space_left_action }}' } + - { regexp: '^space_left_action', line: 'space_left_action = {{ ubtu24cis_auditd_space_left_action }}' } + notify: Restart auditd diff --git a/tasks/section_6/cis_6.2.3.x.yml b/tasks/section_6/cis_6.2.3.x.yml new file mode 100644 index 0000000..979819f --- /dev/null +++ b/tasks/section_6/cis_6.2.3.x.yml @@ -0,0 +1,301 @@ +--- + +- name: "6.2.3.1 | PATCH | Ensure changes to system administration scope (sudoers) is collected" + when: + - ubtu24cis_rule_6_2_3_1 + tags: + - level2-server + - level2-workstation + - patch + - rule_6.2.3.1 + - NIST800-53R5_AU-3 + - auditd + ansible.builtin.set_fact: + update_audit_template: true + +- name: "6.2.3.2 | PATCH | Ensure actions as another user are always logged" + when: + - ubtu24cis_rule_6_2_3_2 + tags: + - level2-server + - level2-workstation + - patch + - rule_6.2.3.2 + - NIST800-53R5_AU-3 + - auditd + ansible.builtin.set_fact: + update_audit_template: true + +- name: "6.2.3.3 | PATCH | Ensure events that modify the sudo log file are collected" + when: + - ubtu24cis_rule_6_2_3_3 + tags: + - level2-server + - level2-workstation + - patch + - rule_6.2.3.3 + - NIST800-53R5_AU-3 + - auditd + ansible.builtin.set_fact: + update_audit_template: true + +- name: "6.2.3.4 | PATCH | Ensure events that modify date and time information are collected" + when: + - ubtu24cis_rule_6_2_3_4 + tags: + - level2-server + - level2-workstation + - patch + - rule_6.2.3.4 + - NIST800-53R5_AU-3 + - NIST800-53R5_CM-6 + - auditd + ansible.builtin.set_fact: + update_audit_template: true + +- name: "6.2.3.5 | PATCH | Ensure events that modify the system's network environment are collected" + when: + - ubtu24cis_rule_6_2_3_5 + tags: + - level2-server + - level2-workstation + - patch + - rule_6.2.3.5 + - NIST800-53R5_AU-3 + - NIST800-53R5_CM-6 + - auditd + ansible.builtin.set_fact: + update_audit_template: true + +- name: "6.2.3.6 | PATCH | Ensure use of privileged commands is collected" + when: + - ubtu24cis_rule_6_2_3_6 + tags: + - level2-server + - level2-workstation + - patch + - rule_6.2.3.6 + - NIST800-53R5_AU-3 + - auditd + block: + - name: "6.2.3.6 | AUDIT | Ensure use of privileged commands is collected | Get list of privileged programs" + ansible.builtin.shell: for i in $(findmnt -n -l -k -it $(awk '/nodev/ { print $2 }' /proc/filesystems | paste -sd,) | grep -Pv "noexec|nosuid" | awk '{print $1}'); do find $i -xdev -type f -perm -4000 -o -type f -perm -2000 2>/dev/null; done + register: priv_procs + changed_when: false + check_mode: false + + - name: "6.2.3.6 | PATCH | Ensure use of privileged commands is collected | Set privileged rules" + ansible.builtin.set_fact: + update_audit_template: true + +- name: "6.2.3.7 | PATCH | Ensure unsuccessful file access attempts are collected" + when: + - ubtu24cis_rule_6_2_3_7 + tags: + - level2-server + - level2-workstation + - patch + - rule_6.2.3.7 + - NIST800-53R5_AU-3 + - auditd + ansible.builtin.set_fact: + update_audit_template: true + +- name: "6.2.3.8 | PATCH | Ensure events that modify user/group information are collected" + when: + - ubtu24cis_rule_6_2_3_8 + tags: + - level2-server + - level2-workstation + - patch + - rule_6.2.3.8 + - NIST800-53R5_AU-3 + - auditd + ansible.builtin.set_fact: + update_audit_template: true + +- name: "6.2.3.9 | PATCH | Ensure discretionary access control permission modification events are collected" + when: + - ubtu24cis_rule_6_2_3_9 + tags: + - level2-server + - level2-workstation + - patch + - rule_6.2.3.9 + - NIST800-53R5_AU-3 + - NIST800-53R5_CM-6 + - auditd + ansible.builtin.set_fact: + update_audit_template: true + +- name: "6.2.3.10 | PATCH | Ensure successful file system mounts are collected" + when: + - ubtu24cis_rule_6_2_3_10 + tags: + - level2-server + - level2-workstation + - patch + - rule_6.2.3.10 + - NIST800-53R5_CM-6 + - auditd + ansible.builtin.set_fact: + update_audit_template: true + +- name: "6.2.3.11 | PATCH | Ensure session initiation information is collected" + when: + - ubtu24cis_rule_6_2_3_11 + tags: + - level2-server + - level2-workstation + - patch + - rule_6.2.3.11 + - NIST800-53R5_AU-3 + - auditd + ansible.builtin.set_fact: + update_audit_template: true + +- name: "6.2.3.12 | PATCH | Ensure login and logout events are collected" + when: + - ubtu24cis_rule_6_2_3_12 + tags: + - level2-server + - level2-workstation + - patch + - rule_6.2.3.12 + - NIST800-53R5_AU-3 + - auditd + ansible.builtin.set_fact: + update_audit_template: true + +- name: "6.2.3.13 | PATCH | Ensure file deletion events by users are collected" + when: + - ubtu24cis_rule_6_2_3_13 + tags: + - level2-server + - level2-workstation + - patch + - rule_6.2.3.13 + - NIST800-53R5_AU-12 + - NIST800-53R5_SC-7 + - auditd + ansible.builtin.set_fact: + update_audit_template: true + +- name: "6.2.3.14 | PATCH | Ensure events that modify the system's Mandatory Access Controls are collected" + when: + - ubtu24cis_rule_6_2_3_14 + tags: + - level2-server + - level2-workstation + - patch + - rule_6.2.3.14 + - NIST800-53R5_AU-3 + - NIST800-53R5_CM-6 + - auditd + ansible.builtin.set_fact: + update_audit_template: true + +- name: "6.2.3.15 | PATCH | Ensure successful and unsuccessful attempts to use the chcon command are recorded" + when: + - ubtu24cis_rule_6_2_3_15 + tags: + - level2-server + - level2-workstation + - patch + - rule_6.2.3.15 + - NIST800-53R5_AU-2 + - NIST800-53R5_AU-12 + - NIST800-53R5_SI-5 + - auditd + ansible.builtin.set_fact: + update_audit_template: true + +- name: "6.2.3.16 | PATCH | Ensure successful and unsuccessful attempts to use the setfacl command are recorded" + when: + - ubtu24cis_rule_6_2_3_16 + tags: + - level2-server + - level2-workstation + - patch + - rule_6.2.3.16 + - NIST800-53R5_AU-2 + - NIST800-53R5_AU-12 + - NIST800-53R5_SI-5 + - auditd + ansible.builtin.set_fact: + update_audit_template: true + +- name: "6.2.3.17 | PATCH | Ensure successful and unsuccessful attempts to use the chacl command are recorded" + when: + - ubtu24cis_rule_6_2_3_17 + tags: + - level2-server + - level2-workstation + - patch + - rule_6.2.3.17 + - NIST800-53R5_AU-2 + - NIST800-53R5_AU-12 + - NIST800-53R5_SI-5 + - auditd + ansible.builtin.set_fact: + update_audit_template: true + +- name: "6.2.3.18 | PATCH | Ensure successful and unsuccessful attempts to use the usermod command are recorded" + when: + - ubtu24cis_rule_6_2_3_18 + tags: + - level2-server + - level2-workstation + - patch + - rule_6.2.3.18 + - NIST800-53R5_AU-2 + - NIST800-53R5_AU-12 + - NIST800-53R5_SI-5 + - auditd + ansible.builtin.set_fact: + update_audit_template: true + +- name: "6.2.3.19 | PATCH | Ensure kernel module loading and unloading is collected" + when: + - ubtu24cis_rule_6_2_3_19 + tags: + - level2-server + - level2-workstation + - patch + - rule_6.2.3.19 + - NIST800-53R5_AU-3 + - NIST800-53R5_CM-6 + - auditd + ansible.builtin.set_fact: + update_audit_template: true + +- name: "6.2.3.20 | PATCH | Ensure the audit configuration is immutable" + when: + - ubtu24cis_rule_6_2_3_20 + tags: + - level2-server + - level2-workstation + - patch + - rule_6.2.3.20 + - NIST800-53R5_AC-3 + - NIST800-53R5_AU-3 + - NIST800-53R5_AU-12 + - NIST800-53R5_MP-2 + - auditd + ansible.builtin.set_fact: + update_audit_template: true + +- name: "6.2.3.21 | PATCH | Ensure the running and on disk configuration is the same" + when: + - ubtu24cis_rule_6_2_3_21 + tags: + - level2-server + - level2-workstation + - scored + - patch + - rule_6.2.3.21 + - NIST800-53R5_AU-3 + - auditd + ansible.builtin.shell: augenrules --check + changed_when: false + register: ubtu24cis_rule_6_2_3_21_augen_check diff --git a/tasks/section_6/cis_6.2.4.x.yml b/tasks/section_6/cis_6.2.4.x.yml new file mode 100644 index 0000000..10bd3da --- /dev/null +++ b/tasks/section_6/cis_6.2.4.x.yml @@ -0,0 +1,172 @@ +--- + +- name: | + "6.2.4.1 | PATCH | Ensure audit log files mode is configured" + "6.2.4.2 | PATCH | Ensure audit log files owner is configured" + "6.2.4.3 | PATCH | Ensure audit log files group owner is configured" + when: + - ubtu24cis_rule_6_2_4_1 or + ubtu24cis_rule_6_2_4_2 or + ubtu24cis_rule_6_2_4_3 + tags: + - level1-server + - level1-workstation + - patch + - auditd + - rule_6.2.4.1 + - rule_6.2.4.2 + - rule_6.2.4.3 + - NIST800-53R5_AU-3 + ansible.builtin.file: + path: "{{ prelim_auditd_logfile.stdout }}" + owner: root + group: root + mode: 'u-x,g-wx,o-rwx' + +- name: "6.2.4.4 | PATCH | Ensure the audit log file directory mode is configured" + when: + - ubtu24cis_rule_6_2_4_4 + tags: + - level1-server + - level1-workstation + - patch + - auditd + - rule_6.2.4.4 + - NIST800-53R5_AU-3 + block: + - name: "6.2.4.4 | AUDIT | Ensure the audit log file directory mode is configured | get current permissions" + ansible.builtin.stat: + path: "{{ prelim_auditd_logfile.stdout | dirname }}" + register: auditlog_dir + + - name: "6.2.4.4 | PATCH | Ensure the audit log file directory mode is configured | set permissions" + ansible.builtin.file: + path: "{{ auditlog_dir.stat.path }}" + state: directory + mode: 'g-w,o-rwx' + +- name: "6.2.4.5 | PATCH | Ensure audit configuration files mode is configured" + when: + - ubtu24cis_rule_6_2_4_5 + tags: + - level1-server + - level1-workstation + - patch + - auditd + - rule_6.2.4.5 + - NIST800-53R5_AU-3 + ansible.builtin.file: + path: "{{ item.path }}" + mode: 'u-x,g-wx,o-rwx' + loop: "{{ prelim_auditd_conf_files.files }}" + loop_control: + label: "{{ item.path }}" + +- name: "6.2.4.6 | PATCH | Ensure audit configuration files owner is configured" + when: + - ubtu24cis_rule_6_2_4_6 + tags: + - level1-server + - level1-workstation + - patch + - auditd + - rule_6.2.4.6 + - NIST800-53R5_AU-3 + ansible.builtin.file: + path: "{{ item.path }}" + owner: root + loop: "{{ prelim_auditd_conf_files.files }}" + loop_control: + label: "{{ item.path }}" + +- name: "6.2.4.7 | PATCH | Ensure audit configuration files group owner is configured" + when: + - ubtu24cis_rule_6_2_4_7 + tags: + - level1-server + - level1-workstation + - patch + - auditd + - rule_6.2.4.7 + - NIST800-53R5_AU-3 + ansible.builtin.file: + path: "{{ item.path }}" + group: root + loop: "{{ prelim_auditd_conf_files.files }}" + loop_control: + label: "{{ item.path }}" + +- name: "6.2.4.8 | PATCH | Ensure audit tools mode is configured" + when: + - ubtu24cis_rule_6_2_4_8 + tags: + - level1-server + - level1-workstation + - patch + - auditd + - rule_6.2.4.8 + - NIST800-53R5_AU-3 + block: + - name: "6.2.4.8 | AUDIT | Ensure audit tools mode is configured | get current mode" + ansible.builtin.stat: + path: "{{ item }}" + register: "audit_bins" + loop: + - /sbin/auditctl + - /sbin/aureport + - /sbin/ausearch + - /sbin/autrace + - /sbin/auditd + - /sbin/augenrules + + - name: "6.2.4.8 | PATCH | Ensure audit tools mode is configured | set if required" + when: not item.stat.mode is match('07(0|5)0') + ansible.builtin.file: + path: "{{ item.item }}" + mode: '0750' + loop: "{{ audit_bins.results }}" + loop_control: + label: "{{ item.item }}" + +- name: "6.2.4.9 | PATCH | Ensure audit tools owner is configured" + when: + - ubtu24cis_rule_6_2_4_9 + tags: + - level1-server + - level1-workstation + - patch + - auditd + - rule_6.2.4.9 + - NIST800-53R5_AU-3 + ansible.builtin.file: + path: "{{ item }}" + owner: root + group: root + loop: + - /sbin/auditctl + - /sbin/aureport + - /sbin/ausearch + - /sbin/autrace + - /sbin/auditd + - /sbin/augenrules + +- name: "6.2.4.10 | PATCH | Ensure audit tools group owner is configured" + when: + - ubtu24cis_rule_6_2_4_10 + tags: + - level1-server + - level1-workstation + - patch + - auditd + - rule_6.2.4.10 + - NIST800-53R5_AU-3 + ansible.builtin.file: + path: "{{ item }}" + group: root + loop: + - /sbin/auditctl + - /sbin/aureport + - /sbin/ausearch + - /sbin/autrace + - /sbin/auditd + - /sbin/augenrules diff --git a/tasks/section_6/cis_6.3.x.yml b/tasks/section_6/cis_6.3.x.yml new file mode 100644 index 0000000..a32a0ae --- /dev/null +++ b/tasks/section_6/cis_6.3.x.yml @@ -0,0 +1,110 @@ +--- + +- name: "6.3.1 | PATCH | Ensure AIDE is installed" + when: + - ubtu24cis_rule_6_3_1 + - ubtu24cis_config_aide + tags: + - level1-server + - level1-workstation + - patch + - rule_6.3.1 + - NIST800-53R5_AU-2 + - aide + block: + - name: "6.3.1 | PATCH | Ensure AIDE is installed" + when: + - "'aide' not in ansible_facts.packages or + 'aide-common' not in ansible_facts.packages" + ansible.builtin.package: + name: ['aide', 'aide-common'] + state: present + update_cache: true + register: ubtu24cis_rule_6_3_1_aide_added + + - name: "6.3.1 | PATCH | Ensure AIDE is installed | Recapture packages" + when: ubtu24cis_rule_6_3_1_aide_added.skipped is not defined + ansible.builtin.package_facts: + manager: auto + + - name: "6.3.1 | PATCH | Ensure AIDE is installed | Configure AIDE" + ansible.builtin.shell: aideinit && mv /var/lib/aide/aide.db.new /var/lib/aide/aide.db + args: + creates: /var/lib/aide/aide.db + changed_when: false + failed_when: false + async: "{{ ubtu24cis_aide_init.async }}" + poll: "{{ ubtu24cis_aide_init.poll }}" + when: not ansible_check_mode + +- name: "6.3.2 | PATCH | Ensure filesystem integrity is regularly checked" + when: + - ubtu24cis_config_aide + - ubtu24cis_rule_6_3_2 + tags: + - level1-server + - level1-workstation + - patch + - rule_6.3.2 + - NIST800-53R5_AU-2 + - cron + - aide + block: + - name: "6.3.2 | PATCH | Ensure filesystem integrity is regularly checked | cron" + when: ubtu24cis_aide_scan == 'cron' + ansible.builtin.cron: + name: Run AIDE integrity check + cron_file: "{{ ubtu24cis_aide_cron['cron_file'] }}" + user: "{{ ubtu24cis_aide_cron['cron_user'] }}" + minute: "{{ ubtu24cis_aide_cron['aide_minute'] | default('0') }}" + hour: "{{ ubtu24cis_aide_cron['aide_hour'] | default('5') }}" + day: "{{ ubtu24cis_aide_cron['aide_day'] | default('*') }}" + month: "{{ ubtu24cis_aide_cron['aide_month'] | default('*') }}" + weekday: "{{ ubtu24cis_aide_cron['aide_weekday'] | default('*') }}" + job: "{{ ubtu24cis_aide_cron['aide_job'] }}" + + - name: "6.3.2 | PATCH | Ensure filesystem integrity is regularly checked | timer template" + when: ubtu24cis_aide_scan == 'timer' + ansible.builtin.template: + src: "{{ item }}.j2" + dest: "/{{ item }}" + owner: root + group: root + mode: '0644' + loop: + - etc/systemd/system/aidecheck.service + - etc/systemd/system/aidecheck.timer + + - name: "6.3.2 | PATCH | Ensure filesystem integrity is regularly checked | timer service" + when: ubtu24cis_aide_scan == 'timer' + ansible.builtin.systemd: + name: "{{ item }}" + state: started + enabled: true + daemon_reload: true + loop: + - aidecheck.service + - aidecheck.timer + +- name: "6.3.3 | Ensure cryptographic mechanisms are used to protect the integrity of audit tools" + when: + - ubtu24cis_rule_6_3_3 + tags: + - level1-server + - level1-workstation + - aide + - file_integrity + - patch + - rule_6.3.3 + - NIST800-53R5_NA + ansible.builtin.blockinfile: + path: /etc/aide/aide.conf + marker: "# {mark} Audit tools - CIS benchmark - Ansible-lockdown" + block: | + /usr/sbin/auditctl p+i+n+u+g+s+b+acl+xattrs+sha512 + /usr/sbin/auditd p+i+n+u+g+s+b+acl+xattrs+sha512 + /usr/sbin/ausearch p+i+n+u+g+s+b+acl+xattrs+sha512 + /usr/sbin/aureport p+i+n+u+g+s+b+acl+xattrs+sha512 + /usr/sbin/autrace p+i+n+u+g+s+b+acl+xattrs+sha512 + /usr/sbin/augenrules p+i+n+u+g+s+b+acl+xattrs+sha512 + validate: aide -D --config %s diff --git a/tasks/section_6/main.yml b/tasks/section_6/main.yml new file mode 100644 index 0000000..5a3983b --- /dev/null +++ b/tasks/section_6/main.yml @@ -0,0 +1,44 @@ +--- + +- name: "SECTION | 6.1.1.x | Configure systemd-journald service" + when: ubtu24cis_syslog_service == 'journald' + ansible.builtin.import_tasks: + file: cis_6.1.1.x.yml + +- name: "SECTION | 6.1.2.x | Configure journald" + when: ubtu24cis_syslog_service == 'journald' + ansible.builtin.import_tasks: + file: cis_6.1.2.x.yml + +- name: "SECTION | 6.1.3.x | Configure rsyslog" + when: ubtu24cis_syslog_service == 'rsyslog' + ansible.builtin.import_tasks: + file: cis_6.1.3.x.yml + +- name: "SECTION | 6.1.3.8 | Logrotate" + ansible.builtin.import_tasks: + file: cis_6.1.3.8.yml + +- name: "SECTION | 6.1.4.1 | Configure Logfiles" + ansible.builtin.import_tasks: + file: cis_6.1.4.1.yml + +- name: "SECTION | 6.2.1 | Configure auditd Service" + ansible.builtin.import_tasks: + file: cis_6.2.1.x.yml + +- name: "SECTION | 6.2.2 | Configure data retention" + ansible.builtin.import_tasks: + file: cis_6.2.2.x.yml + +- name: "SECTION | 6.2.3 | Configure auditd rules" + ansible.builtin.import_tasks: + file: cis_6.2.3.x.yml + +- name: "SECTION | 6.2.4 | Configure auditd file access" + ansible.builtin.import_tasks: + file: cis_6.2.4.x.yml + +- name: "SECTION | 6.3.x | Configure Filesystem Integrity Checking" + ansible.builtin.import_tasks: + file: cis_6.3.x.yml diff --git a/tasks/section_7/cis_7.1.x.yml b/tasks/section_7/cis_7.1.x.yml new file mode 100644 index 0000000..4655766 --- /dev/null +++ b/tasks/section_7/cis_7.1.x.yml @@ -0,0 +1,326 @@ +--- + +- name: "7.1.1 | PATCH | Ensure permissions on /etc/passwd are configured" + when: + - ubtu24cis_rule_7_1_1 + tags: + - level1-server + - level1-workstation + - patch + - permissions + - rule_7.1.1 + - NIST800-53R5_AC-3 + - NIST800-53R5_MP-2 + ansible.builtin.file: + path: /etc/passwd + owner: root + group: root + mode: 'u-x,go-wx' + +- name: "7.1.2 | PATCH | Ensure permissions on /etc/passwd- are configured" + when: + - ubtu24cis_rule_7_1_2 + tags: + - level1-server + - level1-workstation + - patch + - permissions + - rule_7.1.2 + - NIST800-53R5_AC-3 + - NIST800-53R5_MP-2 + ansible.builtin.file: + path: /etc/passwd- + owner: root + group: root + mode: 'u-x,go-wx' + +- name: "7.1.3 | PATCH | Ensure permissions on /etc/group are configured" + when: + - ubtu24cis_rule_7_1_3 + tags: + - level1-server + - level1-workstation + - patch + - permissions + - rule_7.1.3 + - NIST800-53R5_AC-3 + - NIST800-53R5_MP-2 + ansible.builtin.file: + path: /etc/group + owner: root + group: root + mode: 'u-x,go-wx' + +- name: "7.1.4 | PATCH | Ensure permissions on /etc/group- are configured" + when: + - ubtu24cis_rule_7_1_4 + tags: + - level1-server + - level1-workstation + - patch + - permissionss + - rule_7.1.4 + - NIST800-53R5_AC-3 + - NIST800-53R5_MP-2 + ansible.builtin.file: + path: /etc/group- + owner: root + group: root + mode: 'u-x,go-wx' + +- name: "7.1.5 | PATCH | Ensure permissions on /etc/shadow are configured" + when: + - ubtu24cis_rule_7_1_5 + tags: + - level1-server + - level1-workstation + - patch + - permissions + - rule_7.1.5 + - NIST800-53R5_AC-3 + - NIST800-53R5_MP-2 + ansible.builtin.file: + path: /etc/shadow + owner: root + group: root + mode: 'u-x,g-wx,o-rwx' + +- name: "7.1.6 | PATCH | Ensure permissions on /etc/shadow- are configured" + when: + - ubtu24cis_rule_7_1_6 + tags: + - level1-server + - level1-workstation + - patch + - permissions + - rule_7.1.6 + - NIST800-53R5_AC-3 + - NIST800-53R5_MP-2 + ansible.builtin.file: + path: /etc/shadow- + owner: root + group: root + mode: 'u-x,g-wx,o-rwx' + +- name: "7.1.7 | PATCH | Ensure permissions on /etc/gshadow are configured" + when: + - ubtu24cis_rule_7_1_7 + tags: + - level1-server + - level1-workstation + - patch + - permissions + - rule_7.1.7 + - NIST800-53R5_AC-3 + - NIST800-53R5_MP-2 + ansible.builtin.file: + path: /etc/gshadow + owner: root + group: root + mode: 'u-x,g-wx,o-rwx' + +- name: "7.1.8 | PATCH | Ensure permissions on /etc/gshadow- are configured" + when: + - ubtu24cis_rule_7_1_8 + tags: + - level1-server + - level1-workstation + - patch + - permissions + - rule_7.1.8 + - NIST800-53R5_AC-3 + - NIST800-53R5_MP-2 + ansible.builtin.file: + path: /etc/gshadow- + owner: root + group: root + mode: 'u-x,g-wx,o-rwx' + +- name: "7.1.9 | PATCH | Ensure permissions on /etc/shells are configured" + when: + - ubtu24cis_rule_7_1_9 + tags: + - level1-server + - level1-workstation + - patch + - permissions + - rule_7.1.9 + - NIST800-53R5_AC-3 + - NIST800-53R5_MP-2 + ansible.builtin.file: + path: /etc/shells + owner: root + group: root + mode: 'u-x,go-wx' + +- name: "7.1.10 | PATCH | Ensure permissions on /etc/security/opasswd are configured" + loop: + - /etc/security/opasswd + - /etc/security/opasswd.old + when: + - ubtu24cis_rule_7_1_10 + tags: + - level1-server + - level1-workstation + - patch + - permissions + - rule_7.1.10 + - NIST800-53R5_AC-3 + - NIST800-53R5_MP-2 + ansible.builtin.file: + path: /etc/security/opasswd + owner: root + group: root + mode: 'u-x,go-rwx' + +- name: "7.1.11 | PATCH | Ensure world writable files and directories are secured" + when: + - ubtu24cis_rule_7_1_11 + tags: + - level1-server + - level1-workstation + - patch + - files + - permissions + - rule_7.1.11 + - NIST800-53R5_AC-3 + - NIST800-53R5_MP-2 + block: + - name: "7.1.11 | AUDIT | Ensure world writable files and directories are secured | Get list of world-writable files" + ansible.builtin.shell: df --local -P | awk {'if (NR!=1) print $6'} | xargs -I '{}' find '{}' -xdev -type f -perm -0002 + failed_when: false + changed_when: false + register: ubtu24cis_worldwriteable + + - name: "7.1.11 | PATCH | Ensure world writable files and directories are secured | Adjust world-writable files if they exist (Configurable)" + ansible.builtin.file: + path: '{{ item }}' + mode: o-w + state: touch + loop: "{{ ubtu24cis_worldwriteable.stdout_lines }}" + when: + - ubtu24cis_worldwriteable.stdout_lines is defined + - ubtu24cis_no_world_write_adjust + + - name: "7.1.11 | PATCH | Ensure world writable files and directories are secured | sticky bit set on world-writable directories" + ansible.builtin.shell: df --local -P | awk {'if (NR!=1) print $6'} | xargs -I '{}' find '{}' -xdev -type d -perm -0002 2>/dev/null | xargs chmod a+t + changed_when: false + failed_when: false + +- name: "7.1.12 | PATCH | Ensure no files or directories without an owner and a group exist" + when: + - ubtu24cis_rule_7_1_12 + tags: + - level1-server + - level1-workstation + - patch + - rule_7.1.12 + - NIST800-53R5_AC-3 + - NIST800-53R5_MP-2 + - permissions + vars: + warn_control_id: '7.1.12' + block: + - name: "7.1.12 | AUDIT | Ensure no files or directories without an owner and a group exist | Get list files or directories" + ansible.builtin.shell: find {{ ubtu24cis_exclude_unowned_search_path }} {{ item.mount }} -xdev \( -nouser -o -nogroup \) -not -fstype nfs + changed_when: false + failed_when: false + check_mode: false + register: discovered_unowned_files + with_items: + - "{{ ansible_facts.mounts }}" + loop_control: + label: "{{ item.mount }}" + + - name: "7.1.12 | AUDIT | Ensure no files or directories without an owner and a group exist | Flatten no_user_items results for easier use" + ansible.builtin.set_fact: + discovered_unowned_files_flatten: "{{ discovered_unowned_files.results | map(attribute='stdout_lines') | flatten }}" + + - name: "7.1.12 | AUDIT | Ensure no files or directories without an owner and a group exist | Alert on unowned files and directories" + when: + - not ubtu24cis_ownership_adjust + - discovered_unowned_files_flatten | length > 0 + ansible.builtin.debug: + msg: + - "Warning!! You have unowned files and are configured to not auto-remediate for this task" + - "Please review the files/directories below and assign an owner" + - "{{ discovered_unowned_files_flatten }}" + + - name: "7.1.12 | PATCH | Ensure no files or directories without an owner and a group exist | Set files/directories to configured owner and group" + when: + - ubtu24cis_ownership_adjust + - discovered_unowned_files_flatten | length > 0 + ansible.builtin.file: + path: "{{ item }}" + owner: "{{ ubtu24cis_unowned_owner }}" + group: "{{ ubtu24cis_unowned_group }}" + with_items: + - "{{ udiscovered_unowned_files_flatten }}" + + - name: "7.1.12 | AUDIT | Ensure no files or directories without an owner and a group exist | Warn Count" + when: + - not ubtu24cis_ownership_adjust + - discovered_unowned_files_flatten | length > 0 + ansible.builtin.import_tasks: + file: warning_facts.yml + +- name: "7.1.13 | AUDIT | Ensure SUID and SGID files are reviewed" + when: + - ubtu24cis_rule_7_1_13 + tags: + - level1-server + - level1-workstation + - audit + - rule_7.1.13 + - NIST800-53R5_AC-3 + - NIST800-53R5_CM-1 + - NIST800-53R5_CM-2 + - NIST800-53R5_CM-6 + - NIST800-53R5_CM-7 + - NIST800-53R5_IA-5 + - NIST800-53R5_MP-2 + - permissions + vars: + warn_control_id: '7.1.13' + block: + - name: "7.1.13 | AUDIT | Ensure SUID and SGID files are reviewed | Find SUID and SGID" + ansible.builtin.shell: find {{ item.mount }} -xdev -type f -perm \( -02000 or -04000 \) -not -fstype nfs + changed_when: false + failed_when: false + check_mode: false + register: discovered_suid_sgid_files + with_items: + - "{{ ansible_facts.mounts }}" + loop_control: + label: "{{ item.mount }}" + + - name: "7.1.13 | AUDIT | Audit SUID executables | Flatten suid_executables results for easier use" + ansible.builtin.set_fact: + discovered_suid_sgid_files_flatten: "{{ discovered_suid_sgid_files.results | map(attribute='stdout_lines') | flatten }}" + + - name: "7.1.13 | AUDIT | Audit SUID executables | Alert SUID executables exist" + when: + - discovered_suid_sgid_files_flatten | length > 0 + - not ubtu24cis_suid_sgid_adjust + ansible.builtin.debug: + msg: + - "Warning!! You have SUID executables" + - "The files are listed below, please confirm the integrity of these binaries" + - "{{ discovered_suid_sgid_files_flatten }}" + + - name: "7.1.13 | PATCH | Audit SUID executables | Remove SUID bit" + when: + - ubtu24cis_suid_sgid_adjust + - discovered_suid_sgid_files_flatten | length > 0 + ansible.builtin.file: + path: "{{ item }}" + mode: 'u-s' + with_items: + - "{{ discovered_suid_sgid_files_flatten }}" + + - name: "7.1.13 | AUDIT | Audit SUID executables | Warn Count" + ansible.builtin.import_tasks: + file: warning_facts.yml + when: + - discovered_suid_sgid_files_flatten | length > 0 + - not ubtu24cis_suid_sgid_adjust diff --git a/tasks/section_7/cis_7.2.x.yml b/tasks/section_7/cis_7.2.x.yml new file mode 100644 index 0000000..895c0b2 --- /dev/null +++ b/tasks/section_7/cis_7.2.x.yml @@ -0,0 +1,348 @@ +--- + +- name: "7.2.1 | AUDIT | Ensure accounts in /etc/passwd use shadowed passwords" + when: + - ubtu24cis_rule_7_2_1 + tags: + - level1-server + - level1-workstation + - audit + - rule_7.2.1 + - NIST800-53R5_IA-5 + - user_accounts + vars: + warn_control_id: '7.2.1' + block: + - name: "7.2.1 | AUDIT | Ensure accounts in /etc/passwd use shadowed passwords | Get users not using shadowed passwords" + ansible.builtin.shell: awk -F':' '($2 != "x" ) { print $1}' /etc/passwd + changed_when: false + failed_when: false + register: discovered_nonshadowed_users + + - name: "7.2.1 | AUDIT | Ensure accounts in /etc/passwd use shadowed passwords | Warn on findings" + when: discovered_nonshadowed_users.stdout | length > 0 + ansible.builtin.debug: + msg: + - "Warning!! You have users that are not using a shadowed password. Please convert the below accounts to use a shadowed password" + - "{{ discovered_nonshadowed_users.stdout_lines }}" + + - name: "7.2.1 | WARNING | Ensure accounts in /etc/passwd use shadowed passwords | warn_count" + when: discovered_nonshadowed_users.stdout | length > 0 + ansible.builtin.import_tasks: + file: warning_facts.yml + +- name: "7.2.2 | PATCH | Ensure /etc/shadow password fields are not empty" + when: + - ubtu24cis_rule_7_2_2 + tags: + - level1-server + - level1-workstation + - patch + - rule_7.2.2 + - NIST800-53R5_IA-5 + - user + - permissions + block: + - name: "7.2.2 | AUDIT | Ensure /etc/shadow password fields are not empty | Find users with no password" + ansible.builtin.shell: awk -F":" '($2 == "" ) { print $1 }' /etc/shadow + changed_when: false + check_mode: false + register: discovered_empty_password_acct + + - name: "7.2.2 | PATCH | Ensure /etc/shadow password fields are not empty | Lock users with empty password" + when: discovered_empty_password_acct.stdout | length > 0 + ansible.builtin.user: + name: "{{ item }}" + password_lock: true + loop: + - "{{ discovered_empty_password_acct.stdout_lines }}" + +- name: "7.2.3 | AUDIT | Ensure all groups in /etc/passwd exist in /etc/group" + when: + - ubtu24cis_rule_7_2_3 + tags: + - level1-server + - level1-workstation + - audit + - rule_7.2.3 + - NIST800-53R5_CM-1 + - NIST800-53R5_CM-2 + - NIST800-53R5_CM-6 + - NIST800-53R5_CM-7 + - NIST800-53R5_IA-5 + - groups + vars: + warn_control_id: '7.2.3' + block: + - name: "7.2.3 | AUDIT | Ensure all groups in /etc/passwd exist in /etc/group | Check /etc/passwd entries" + ansible.builtin.shell: pwck -r | grep 'no group' | awk '{ gsub("[:\47]",""); print $2}' + changed_when: false + failed_when: false + check_mode: false + register: discovered_passwd_gid_check + + - name: "7.2.3 | AUDIT | Ensure all groups in /etc/passwd exist in /etc/group | Print warning about users with invalid GIDs missing GID entries in /etc/group" + when: discovered_passwd_gid_check.stdout | length > 0 + ansible.builtin.debug: + msg: "Warning!! The following users have non-existent GIDs (Groups): {{ discovered_passwd_gid_check.stdout_lines | join (', ') }}" + + - name: "7.2.3 | WARNING | Ensure all groups in /etc/passwd exist in /etc/group | warn_count" + when: discovered_passwd_gid_check.stdout | length > 0 + ansible.builtin.import_tasks: + file: warning_facts.yml + +- name: "7.2.4 | PATCH | Ensure shadow group is empty" + when: + - ubtu24cis_rule_7_2_4 + tags: + - level1-server + - level1-workstation + - patch + - rule_6.2.4 + - NIST800-53R5_IA-5 + - user + vars: + warn_control_id: '7.2.4' + block: + - name: "7.2.4 | AUDIT | Ensure shadow group is empty | check users in group" + ansible.builtin.getent: + database: group + split: ':' + key: shadow + + - name: "7.2.4 | AUDIT | Ensure shadow group is empty | check users in group" + ansible.builtin.debug: + msg: "Warning!! - You have users in the shadow group" + when: ansible_facts.getent_group.shadow[2] | length > 0 + + - name: "7.2.4 | AUDIT | Ensure shadow group is empty | check users in group" + ansible.builtin.import_tasks: + file: warning_facts.yml + when: ansible_facts.getent_group.shadow[2] | length > 0 + +- name: "7.2.5 | AUDIT | Ensure no duplicate UIDs exist" + when: + - ubtu24cis_rule_7_2_5 + tags: + - level1-server + - level1-workstation + - audit + - rule_7.2.5 + - NIST800-53R5_CM-1 + - NIST800-53R5_CM-2 + - NIST800-53R5_CM-6 + - NIST800-53R5_CM-7 + - NIST800-53R5_IA-5 + - user + vars: + warn_control_id: '7.2.5' + block: + - name: "7.2.5 | AUDIT | Ensure no duplicate UIDs exist | Check for duplicate UIDs" + ansible.builtin.shell: "pwck -r | awk -F: '{if ($3 in uid) print $1 ; else uid[$3]}' /etc/passwd" + changed_when: false + failed_when: false + check_mode: false + register: discovered_user_uid_check + + - name: "7.2.5 | AUDIT | Ensure no duplicate UIDs exist | Print warning about users with duplicate UIDs" + when: discovered_user_uid_check.stdout | length > 0 + ansible.builtin.debug: + msg: "Warning!! The following users have UIDs that are duplicates: {{ discovered_user_uid_check.stdout_lines }}" + + - name: "7.2.5 | AUDIT | Ensure no duplicate UIDs exist | Set warning count" + when: discovered_user_uid_check.stdout | length > 0 + ansible.builtin.import_tasks: + file: warning_facts.yml + +- name: "7.2.6 | AUDIT | Ensure no duplicate GIDs exist" + when: + - ubtu24cis_rule_7_2_6 + tags: + - level1-server + - level1-workstation + - audit + - rule_7.2.6 + - NIST800-53R5_CM-1 + - NIST800-53R5_CM-2 + - NIST800-53R5_CM-6 + - NIST800-53R5_CM-7 + - NIST800-53R5_IA-5 + - groups + vars: + warn_control_id: '7.2.6' + block: + - name: "7.2.6 | AUDIT | Ensure no duplicate GIDs exist | Check for duplicate GIDs" + ansible.builtin.shell: "pwck -r | awk -F: '{if ($3 in users) print $1 ; else users[$3]}' /etc/group" + changed_when: false + failed_when: false + check_mode: false + register: discovered_user_gid_check + + - name: "7.2.6 | AUDIT | Ensure no duplicate GIDs exist | Print warning about users with duplicate GIDs" + when: discovered_user_gid_check.stdout | length > 0 + ansible.builtin.debug: + msg: "Warning!! The following groups have duplicate GIDs: {{ discovered_user_gid_check.stdout_lines }}" + + - name: "7.2.6 | AUDIT | Ensure no duplicate GIDs exist | Set warning count" + when: discovered_user_gid_check.stdout | length > 0 + ansible.builtin.import_tasks: + file: warning_facts.yml + +- name: "7.2.7 | AUDIT | Ensure no duplicate user names exist" + vars: + warn_control_id: '7.2.67' + when: + - ubtu24cis_rule_7_2_7 + tags: + - level1-server + - level1-workstation + - audit + - rule_7.2.7 + - NIST800-53R5_CM-1 + - NIST800-53R5_CM-2 + - NIST800-53R5_CM-6 + - NIST800-53R5_CM-7 + - NIST800-53R5_IA-5 + - user + block: + - name: "7.2.7 | AUDIT | Ensure no duplicate user names exist | Check for duplicate User Names" + ansible.builtin.shell: "pwck -r | awk -F: '{if ($1 in users) print $1 ; else users[$1]}' /etc/passwd" + changed_when: false + failed_when: false + check_mode: false + register: discovered_username_check + + - name: "7.2.7 | WARNING | Ensure no duplicate user names exist | Print warning about users with duplicate User Names" + when: discovered_username_check.stdout | length > 0 + ansible.builtin.debug: + msg: "Warning!! The following user names are duplicates: {{ discovered_user_username_check.stdout_lines }}" + + - name: "7.2.7 | WARNING | Ensure no duplicate user names exist | Set warning count" + when: discovered_username_check.stdout | length > 0 + ansible.builtin.import_tasks: + file: warning_facts.yml + +- name: "7.2.8 | AUDIT | Ensure no duplicate group names exist" + when: + - ubtu24cis_rule_7_2_8 + tags: + - level1-server + - level1-workstation + - audit + - rule_7.2.8 + - NIST800-53R5_CM-1 + - NIST800-53R5_CM-2 + - NIST800-53R5_CM-6 + - NIST800-53R5_CM-7 + - NIST800-53R5_IA-5 + - groups + vars: + warn_control_id: '7.2.8' + block: + - name: "7.2.8 | AUDIT | Ensure no duplicate group names exist | Check for duplicate group names" + ansible.builtin.shell: 'getent passwd | cut -d: -f1 | sort -n | uniq -d' + changed_when: false + failed_when: false + check_mode: false + register: discovered_group_check + + - name: "7.2.8 | AUDIT | Ensure no duplicate group names exist | Print warning about users with duplicate group names" + when: discovered_group_check.stdout | length > 0 + ansible.builtin.debug: + msg: "Warning!! The following group names are duplicates: {{ discovered_group_group_check.stdout_lines }}" + + - name: "7.2.8 | AUDIT | Ensure no duplicate group names exist | Set warning count" + when: discovered_group_check.stdout | length > 0 + ansible.builtin.import_tasks: + file: warning_facts.yml + +- name: "7.2.9 | PATCH | Ensure local interactive user home directories are configured" + when: + - ubtu24cis_rule_7_2_9 + tags: + - level1-server + - level1-workstation + - patch + - users + - rule_7.2.9 + - NIST800-53R5_NA + block: + - name: "7.2.9 | PATCH | Ensure local interactive user home directories are configured | Create dir if absent" + ansible.builtin.file: + path: "{{ item.dir }}" + state: directory + owner: "{{ item.id }}" + group: "{{ item.gid }}" + loop: "{{ ubtu24cis_passwd | selectattr('uid', '>=', min_int_uid | int ) | selectattr('uid', '<=', max_int_uid | int ) | list }}" + loop_control: + label: "{{ item.id }}" + + # set default ACLs so the homedir has an effective umask of 0027 + - name: "7.2.9 | PATCH | Ensure local interactive user home directories are configured | Set group ACL" + when: not system_is_container + ansible.posix.acl: + path: "{{ item }}" + default: true + etype: group + permissions: rx + state: present + loop: "{{ prelim_interactive_users_home.stdout_lines }}" + + - name: "7.2.9 | PATCH | Ensure local interactive user home directories are configured | Set other ACL" + when: not system_is_container + ansible.posix.acl: + path: "{{ item }}" + default: true + etype: other + permissions: 0 + state: present + loop: "{{ prelim_interactive_users_home.stdout_lines }}" + +- name: "7.2.10 | PATCH | Ensure local interactive user dot files access is configured" + when: + - ubtu24cis_rule_7_2_10 + - ubtu24cis_disruption_high + tags: + - level1-server + - level1-workstation + - patch + - rule_7.2.10 + - NIST800-53R5_CM-1 + - NIST800-53R5_CM-2 + - NIST800-53R5_CM-6 + - NIST800-53R5_CM-7 + - NIST800-53R5_IA-5 + - user + vars: + warn_control_id: '7.2.10' + block: + - name: "7.2.10 | AUDIT | Ensure local interactive user dot files access is configured | Check for files" + ansible.builtin.shell: find /home/ -name "\.*" -perm /g+w,o+w + changed_when: false + failed_when: discovered_homedir_dot_files.rc not in [ 0, 1 ] + check_mode: false + register: discovered_homedir_dot_files + + - name: "7.2.10 | AUDIT | Ensure local interactive user dot files access is configured | Warning on files found" + when: + - discovered_homedir_dot_files.stdout | length > 0 + - ubtu24cis_dotperm_ansiblemanaged + ansible.builtin.debug: + msg: + - "Warning!! We have discovered group or world-writable dot files on your system and this host is configured for manual intervention. Please investigate these files further." + + - name: "7.2.10 | PATCH | Ensure local interactive user dot files access is configured | Set warning count" + when: + - discovered_homedir_dot_files.stdout | length > 0 + - ubtu24cis_dotperm_ansiblemanaged + ansible.builtin.import_tasks: + file: warning_facts.yml + + - name: "7.2.10 | PATCH | Ensure local interactive user dot files access is configured | Changes files if configured" + when: + - discovered_homedir_dot_files.stdout | length > 0 + - ubtu24cis_dotperm_ansiblemanaged + ansible.builtin.file: + path: '{{ item }}' + mode: go-w + with_items: "{{ discovered_homedir_dot_files.stdout_lines }}" diff --git a/tasks/section_7/main.yml b/tasks/section_7/main.yml new file mode 100644 index 0000000..2d1247a --- /dev/null +++ b/tasks/section_7/main.yml @@ -0,0 +1,9 @@ +--- + +- name: "SECTION | 7.1 | System File Permissions" + ansible.builtin.import_tasks: + file: cis_7.1.x.yml + +- name: "SECTION | 7.2 | Local User and Group Settings" + ansible.builtin.import_tasks: + file: cis_7.2.x.yml diff --git a/tasks/warning_facts.yml b/tasks/warning_facts.yml new file mode 100644 index 0000000..4549726 --- /dev/null +++ b/tasks/warning_facts.yml @@ -0,0 +1,20 @@ +--- + +# This task is used to create variables used in giving a warning summary for manual tasks +# that need attention +# +# The warn_control_list and warn_count vars start life in vars/main.yml but get updated +# as the tasks that have a warning complete +# +# Those two variables are used in the tasks/main.yml to display a list of warnings +# +# warn_control_id is set within the task itself and has the control ID as the value +# +# warn_control_list is the main variable to be used and is a list made up of the warn_control_id’s +# +# warn_count is the main variable for the number of warnings and each time a warn_control_id is added +# the count increases by a value of 1 +- name: "{{ warn_control_id }} | AUDIT | Set fact for manual task warning." + ansible.builtin.set_fact: + warn_control_list: "{{ warn_control_list }} [{{ warn_control_id }}]" + warn_count: "{{ warn_count | int + 1 }}" diff --git a/templates/ansible_vars_goss.yml.j2 b/templates/ansible_vars_goss.yml.j2 new file mode 100644 index 0000000..87864e5 --- /dev/null +++ b/templates/ansible_vars_goss.yml.j2 @@ -0,0 +1,743 @@ +## metadata for Audit benchmark +benchmark_version: {{ benchmark_version }} + +# timeout for each command to run where set - default = 10seconds/10000ms +timeout_ms: {{ audit_cmd_timeout }} + +ubtu24cis_section1: {{ ubtu24cis_section1 }} +ubtu24cis_section2: {{ ubtu24cis_section2 }} +ubtu24cis_section3: {{ ubtu24cis_section3 }} +ubtu24cis_section4: {{ ubtu24cis_section4 }} +ubtu24cis_section5: {{ ubtu24cis_section5 }} +ubtu24cis_section6: {{ ubtu24cis_section6 }} +ubtu24cis_section7: {{ ubtu24cis_section7 }} + +ubtu24cis_level_1: {{ ubtu24cis_level_1 }} +ubtu24cis_level_2: {{ ubtu24cis_level_2 }} + + +# to enable rules that may have IO impact on a system e.g. full filesystem scans or CPU heavy +run_heavy_tests: {{ audit_run_heavy_tests }} + +# True is BIOS based system else set to false +ubtu24_legacy_boot: true + +## +## Rule-specific switches +## +## Use the switches below to disable specific rules independently of the chosen profile +## + +## Section 1 Fixes +# Section 1 is Initial setup (FileSystem Configuration, Configure Software Updates, Filesystem Integrity Checking, Secure Boot Settings, +# Additional Process Hardening, Mandatory Access Control, Command Line Warning Banners, and GNOME Display Manager) + +# 1.1 Filesystems +# 1.1.1 Configure Filesystem Kernel Modules +ubtu24cis_rule_1_1_1_1: {{ ubtu24cis_rule_1_1_1_1 }} +ubtu24cis_rule_1_1_1_2: {{ ubtu24cis_rule_1_1_1_2 }} +ubtu24cis_rule_1_1_1_3: {{ ubtu24cis_rule_1_1_1_3 }} +ubtu24cis_rule_1_1_1_4: {{ ubtu24cis_rule_1_1_1_4 }} +ubtu24cis_rule_1_1_1_5: {{ ubtu24cis_rule_1_1_1_5 }} +ubtu24cis_rule_1_1_1_6: {{ ubtu24cis_rule_1_1_1_6 }} +ubtu24cis_rule_1_1_1_7: {{ ubtu24cis_rule_1_1_1_7 }} +ubtu24cis_rule_1_1_1_8: {{ ubtu24cis_rule_1_1_1_8 }} +ubtu24cis_rule_1_1_1_9: {{ ubtu24cis_rule_1_1_1_9 }} +ubtu24cis_rule_1_1_1_10: {{ ubtu24cis_rule_1_1_1_10 }} + +# 1.1.2 Configure Filesystem Partitions +# /tmp +ubtu24cis_rule_1_1_2_1_1: {{ ubtu24cis_rule_1_1_2_1_1 }} +ubtu24cis_rule_1_1_2_1_2: {{ ubtu24cis_rule_1_1_2_1_2 }} +ubtu24cis_rule_1_1_2_1_3: {{ ubtu24cis_rule_1_1_2_1_3 }} +ubtu24cis_rule_1_1_2_1_4: {{ ubtu24cis_rule_1_1_2_1_4 }} + +# /dev/shm +ubtu24cis_rule_1_1_2_2_1: {{ ubtu24cis_rule_1_1_2_2_1 }} +ubtu24cis_rule_1_1_2_2_2: {{ ubtu24cis_rule_1_1_2_2_2 }} +ubtu24cis_rule_1_1_2_2_3: {{ ubtu24cis_rule_1_1_2_2_3 }} +ubtu24cis_rule_1_1_2_2_4: {{ ubtu24cis_rule_1_1_2_2_4 }} + +# /home +ubtu24cis_rule_1_1_2_3_1: {{ ubtu24cis_rule_1_1_2_3_1 }} +ubtu24cis_rule_1_1_2_3_2: {{ ubtu24cis_rule_1_1_2_3_2 }} +ubtu24cis_rule_1_1_2_3_3: {{ ubtu24cis_rule_1_1_2_3_3 }} + +# /var +ubtu24cis_rule_1_1_2_4_1: {{ ubtu24cis_rule_1_1_2_4_1 }} +ubtu24cis_rule_1_1_2_4_2: {{ ubtu24cis_rule_1_1_2_4_2 }} +ubtu24cis_rule_1_1_2_4_3: {{ ubtu24cis_rule_1_1_2_4_3 }} + +# /var/tmp +ubtu24cis_rule_1_1_2_5_1: {{ ubtu24cis_rule_1_1_2_5_1 }} +ubtu24cis_rule_1_1_2_5_2: {{ ubtu24cis_rule_1_1_2_5_2 }} +ubtu24cis_rule_1_1_2_5_3: {{ ubtu24cis_rule_1_1_2_5_3 }} +ubtu24cis_rule_1_1_2_5_4: {{ ubtu24cis_rule_1_1_2_5_4 }} + +# /var/log +ubtu24cis_rule_1_1_2_6_1: {{ ubtu24cis_rule_1_1_2_6_1 }} +ubtu24cis_rule_1_1_2_6_2: {{ ubtu24cis_rule_1_1_2_6_2 }} +ubtu24cis_rule_1_1_2_6_3: {{ ubtu24cis_rule_1_1_2_6_3 }} +ubtu24cis_rule_1_1_2_6_4: {{ ubtu24cis_rule_1_1_2_6_4 }} + +# /var/log/audit +ubtu24cis_rule_1_1_2_7_1: {{ ubtu24cis_rule_1_1_2_7_1 }} +ubtu24cis_rule_1_1_2_7_2: {{ ubtu24cis_rule_1_1_2_7_2 }} +ubtu24cis_rule_1_1_2_7_3: {{ ubtu24cis_rule_1_1_2_7_3 }} +ubtu24cis_rule_1_1_2_7_4: {{ ubtu24cis_rule_1_1_2_7_4 }} + +# 1.2 Package mgmt +# 1.2.1 Configure Package repositories +ubtu24cis_rule_1_2_1_1: {{ ubtu24cis_rule_1_2_1_1 }} +ubtu24cis_rule_1_2_1_2: {{ ubtu24cis_rule_1_2_1_2 }} +# 1.2.2 Configure Package updates +ubtu24cis_rule_1_2_2_1: {{ ubtu24cis_rule_1_2_2_1 }} + +# 1.3 Mandatory Access Control +## 1.3.1 Configure AppArmor +ubtu24cis_rule_1_3_1_1: {{ ubtu24cis_rule_1_3_1_1 }} +ubtu24cis_rule_1_3_1_2: {{ ubtu24cis_rule_1_3_1_2 }} +ubtu24cis_rule_1_3_1_3: {{ ubtu24cis_rule_1_3_1_3 }} +ubtu24cis_rule_1_3_1_4: {{ ubtu24cis_rule_1_3_1_4 }} + +# 1.4 Configure Bootloader +ubtu24cis_rule_1_4_1: {{ ubtu24cis_rule_1_4_1 }} +ubtu24cis_rule_1_4_2: {{ ubtu24cis_rule_1_4_2 }} + +# 1.5 Configure additional Process Hardening +ubtu24cis_rule_1_5_1: {{ ubtu24cis_rule_1_5_1 }} +ubtu24cis_rule_1_5_2: {{ ubtu24cis_rule_1_5_2 }} +ubtu24cis_rule_1_5_3: {{ ubtu24cis_rule_1_5_3 }} +ubtu24cis_rule_1_5_4: {{ ubtu24cis_rule_1_5_4 }} +ubtu24cis_rule_1_5_5: {{ ubtu24cis_rule_1_5_5 }} + +# 1.6 Configure Command Line Warning Banners +ubtu24cis_rule_1_6_1: {{ ubtu24cis_rule_1_6_1 }} +ubtu24cis_rule_1_6_2: {{ ubtu24cis_rule_1_6_2 }} +ubtu24cis_rule_1_6_3: {{ ubtu24cis_rule_1_6_3 }} +ubtu24cis_rule_1_6_4: {{ ubtu24cis_rule_1_6_4 }} +ubtu24cis_rule_1_6_5: {{ ubtu24cis_rule_1_6_5 }} +ubtu24cis_rule_1_6_6: {{ ubtu24cis_rule_1_6_6 }} + +# 1.7 Configure GNOME Display Manager +ubtu24cis_rule_1_7_1: {{ ubtu24cis_rule_1_7_1 }} +ubtu24cis_rule_1_7_2: {{ ubtu24cis_rule_1_7_2 }} +ubtu24cis_rule_1_7_3: {{ ubtu24cis_rule_1_7_3 }} +ubtu24cis_rule_1_7_4: {{ ubtu24cis_rule_1_7_4 }} +ubtu24cis_rule_1_7_5: {{ ubtu24cis_rule_1_7_5 }} +ubtu24cis_rule_1_7_6: {{ ubtu24cis_rule_1_7_6 }} +ubtu24cis_rule_1_7_7: {{ ubtu24cis_rule_1_7_7 }} +ubtu24cis_rule_1_7_8: {{ ubtu24cis_rule_1_7_8 }} +ubtu24cis_rule_1_7_9: {{ ubtu24cis_rule_1_7_9 }} +ubtu24cis_rule_1_7_10: {{ ubtu24cis_rule_1_7_10 }} + +## Section 2 Fixes +# Section 2 is Services (Special Purpose Services, and service clients) + +# 2.1 Configure Server Services +ubtu24cis_rule_2_1_1: {{ ubtu24cis_rule_2_1_1 }} +ubtu24cis_rule_2_1_2: {{ ubtu24cis_rule_2_1_2 }} +ubtu24cis_rule_2_1_3: {{ ubtu24cis_rule_2_1_3 }} +ubtu24cis_rule_2_1_4: {{ ubtu24cis_rule_2_1_4 }} +ubtu24cis_rule_2_1_5: {{ ubtu24cis_rule_2_1_5 }} +ubtu24cis_rule_2_1_6: {{ ubtu24cis_rule_2_1_6 }} +ubtu24cis_rule_2_1_7: {{ ubtu24cis_rule_2_1_7 }} +ubtu24cis_rule_2_1_8: {{ ubtu24cis_rule_2_1_8 }} +ubtu24cis_rule_2_1_9: {{ ubtu24cis_rule_2_1_9 }} +ubtu24cis_rule_2_1_10: {{ ubtu24cis_rule_2_1_10 }} +ubtu24cis_rule_2_1_11: {{ ubtu24cis_rule_2_1_11 }} +ubtu24cis_rule_2_1_12: {{ ubtu24cis_rule_2_1_12 }} +ubtu24cis_rule_2_1_13: {{ ubtu24cis_rule_2_1_13 }} +ubtu24cis_rule_2_1_14: {{ ubtu24cis_rule_2_1_14 }} +ubtu24cis_rule_2_1_15: {{ ubtu24cis_rule_2_1_15 }} +ubtu24cis_rule_2_1_16: {{ ubtu24cis_rule_2_1_16 }} +ubtu24cis_rule_2_1_17: {{ ubtu24cis_rule_2_1_17 }} +ubtu24cis_rule_2_1_18: {{ ubtu24cis_rule_2_1_18 }} +ubtu24cis_rule_2_1_19: {{ ubtu24cis_rule_2_1_19 }} +ubtu24cis_rule_2_1_20: {{ ubtu24cis_rule_2_1_20 }} +ubtu24cis_rule_2_1_21: {{ ubtu24cis_rule_2_1_21 }} +ubtu24cis_rule_2_1_22: {{ ubtu24cis_rule_2_1_22 }} + +# 2.2 Configure client services +ubtu24cis_rule_2_2_1: {{ ubtu24cis_rule_2_2_1 }} +ubtu24cis_rule_2_2_2: {{ ubtu24cis_rule_2_2_2 }} +ubtu24cis_rule_2_2_3: {{ ubtu24cis_rule_2_2_3 }} +ubtu24cis_rule_2_2_4: {{ ubtu24cis_rule_2_2_4 }} +ubtu24cis_rule_2_2_5: {{ ubtu24cis_rule_2_2_5 }} +ubtu24cis_rule_2_2_6: {{ ubtu24cis_rule_2_2_6 }} + +# Ensure time synchronization is in use +ubtu24cis_rule_2_3_1_1: {{ ubtu24cis_rule_2_3_1_1 }} +# Configure systemd-timesyncd +ubtu24cis_rule_2_3_2_1: {{ ubtu24cis_rule_2_3_2_1 }} +ubtu24cis_rule_2_3_2_2: {{ ubtu24cis_rule_2_3_2_2 }} +# Configure Chrony +ubtu24cis_rule_2_3_3_1: {{ ubtu24cis_rule_2_3_3_1 }} +ubtu24cis_rule_2_3_3_2: {{ ubtu24cis_rule_2_3_3_2 }} +ubtu24cis_rule_2_3_3_3: {{ ubtu24cis_rule_2_3_3_3 }} + +# 2.4 Job Schedulers +# 2.4.1 Configure Cron +ubtu24cis_rule_2_4_1_1: {{ ubtu24cis_rule_2_4_1_1 }} +ubtu24cis_rule_2_4_1_2: {{ ubtu24cis_rule_2_4_1_2 }} +ubtu24cis_rule_2_4_1_3: {{ ubtu24cis_rule_2_4_1_3 }} +ubtu24cis_rule_2_4_1_4: {{ ubtu24cis_rule_2_4_1_4 }} +ubtu24cis_rule_2_4_1_5: {{ ubtu24cis_rule_2_4_1_5 }} +ubtu24cis_rule_2_4_1_6: {{ ubtu24cis_rule_2_4_1_6 }} +ubtu24cis_rule_2_4_1_7: {{ ubtu24cis_rule_2_4_1_7 }} +ubtu24cis_rule_2_4_1_8: {{ ubtu24cis_rule_2_4_1_8 }} +# Configure At +ubtu24cis_rule_2_4_2_1: {{ ubtu24cis_rule_2_4_2_1 }} + +## Section 3 Network Configuration +# 3.1 Configure Network Devices +ubtu24cis_rule_3_1_1: {{ ubtu24cis_rule_3_1_1 }} +ubtu24cis_rule_3_1_2: {{ ubtu24cis_rule_3_1_2 }} +ubtu24cis_rule_3_1_3: {{ ubtu24cis_rule_3_1_3 }} +# 3.2 Configure Network Kernel Modules (Host Only) +ubtu24cis_rule_3_2_1: {{ ubtu24cis_rule_3_2_1 }} +ubtu24cis_rule_3_2_2: {{ ubtu24cis_rule_3_2_2 }} +ubtu24cis_rule_3_2_3: {{ ubtu24cis_rule_3_2_3 }} +ubtu24cis_rule_3_2_4: {{ ubtu24cis_rule_3_2_4 }} +# 3.3 Configure Network Kernel Parameters (Host and Router) +ubtu24cis_rule_3_3_1: {{ ubtu24cis_rule_3_3_1 }} +ubtu24cis_rule_3_3_2: {{ ubtu24cis_rule_3_3_2 }} +ubtu24cis_rule_3_3_3: {{ ubtu24cis_rule_3_3_3 }} +ubtu24cis_rule_3_3_4: {{ ubtu24cis_rule_3_3_4 }} +ubtu24cis_rule_3_3_5: {{ ubtu24cis_rule_3_3_5 }} +ubtu24cis_rule_3_3_6: {{ ubtu24cis_rule_3_3_6 }} +ubtu24cis_rule_3_3_7: {{ ubtu24cis_rule_3_3_7 }} +ubtu24cis_rule_3_3_8: {{ ubtu24cis_rule_3_3_8 }} +ubtu24cis_rule_3_3_9: {{ ubtu24cis_rule_3_3_9 }} +ubtu24cis_rule_3_3_10: {{ ubtu24cis_rule_3_3_10 }} +ubtu24cis_rule_3_3_11: {{ ubtu24cis_rule_3_3_11 }} + +## Section 4 Host Based Firewall +# 4.1 Single firewall +ubtu24cis_rule_4_1_1: {{ ubtu24cis_rule_4_1_1 }} +# 4.2 Configure Uncomplicated Firewall +ubtu24cis_rule_4_2_1: {{ ubtu24cis_rule_4_2_1 }} +ubtu24cis_rule_4_2_2: {{ ubtu24cis_rule_4_2_2 }} +ubtu24cis_rule_4_2_3: {{ ubtu24cis_rule_4_2_3 }} +ubtu24cis_rule_4_2_4: {{ ubtu24cis_rule_4_2_4 }} +ubtu24cis_rule_4_2_5: {{ ubtu24cis_rule_4_2_5 }} +ubtu24cis_rule_4_2_6: {{ ubtu24cis_rule_4_2_6 }} +ubtu24cis_rule_4_2_7: {{ ubtu24cis_rule_4_2_7 }} +# 4.3 nftables +ubtu24cis_rule_4_3_1: {{ ubtu24cis_rule_4_3_1 }} +ubtu24cis_rule_4_3_2: {{ ubtu24cis_rule_4_3_2 }} +ubtu24cis_rule_4_3_3: {{ ubtu24cis_rule_4_3_3 }} +ubtu24cis_rule_4_3_4: {{ ubtu24cis_rule_4_3_4 }} +ubtu24cis_rule_4_3_5: {{ ubtu24cis_rule_4_3_5 }} +ubtu24cis_rule_4_3_6: {{ ubtu24cis_rule_4_3_6 }} +ubtu24cis_rule_4_3_7: {{ ubtu24cis_rule_4_3_7 }} +ubtu24cis_rule_4_3_8: {{ ubtu24cis_rule_4_3_8 }} +ubtu24cis_rule_4_3_9: {{ ubtu24cis_rule_4_3_9 }} +ubtu24cis_rule_4_3_10: {{ ubtu24cis_rule_4_3_10 }} + +# Configure iptables software +ubtu24cis_rule_4_4_1_1: {{ ubtu24cis_rule_4_4_1_1 }} +ubtu24cis_rule_4_4_1_2: {{ ubtu24cis_rule_4_4_1_2 }} +ubtu24cis_rule_4_4_1_3: {{ ubtu24cis_rule_4_4_1_3 }} + +# Configure IPv4 iptables +ubtu24cis_rule_4_4_2_1: {{ ubtu24cis_rule_4_4_2_1 }} +ubtu24cis_rule_4_4_2_2: {{ ubtu24cis_rule_4_4_2_2 }} +ubtu24cis_rule_4_4_2_3: {{ ubtu24cis_rule_4_4_2_3 }} +ubtu24cis_rule_4_4_2_4: {{ ubtu24cis_rule_4_4_2_4 }} + +# Configure IPv6 iptables +ubtu24cis_rule_4_4_3_1: {{ ubtu24cis_rule_4_4_3_1 }} +ubtu24cis_rule_4_4_3_2: {{ ubtu24cis_rule_4_4_3_2 }} +ubtu24cis_rule_4_4_3_3: {{ ubtu24cis_rule_4_4_3_3 }} +ubtu24cis_rule_4_4_3_4: {{ ubtu24cis_rule_4_4_3_4 }} + +## Section 5 Access Control +# 5.1 Configure SSH Server +ubtu24cis_rule_5_1_1: {{ ubtu24cis_rule_5_1_1 }} +ubtu24cis_rule_5_1_2: {{ ubtu24cis_rule_5_1_2 }} +ubtu24cis_rule_5_1_3: {{ ubtu24cis_rule_5_1_3 }} +ubtu24cis_rule_5_1_4: {{ ubtu24cis_rule_5_1_4 }} +ubtu24cis_rule_5_1_5: {{ ubtu24cis_rule_5_1_5 }} +ubtu24cis_rule_5_1_6: {{ ubtu24cis_rule_5_1_6 }} +ubtu24cis_rule_5_1_7: {{ ubtu24cis_rule_5_1_7 }} +ubtu24cis_rule_5_1_8: {{ ubtu24cis_rule_5_1_8 }} +ubtu24cis_rule_5_1_9: {{ ubtu24cis_rule_5_1_9 }} +ubtu24cis_rule_5_1_10: {{ ubtu24cis_rule_5_1_10 }} +ubtu24cis_rule_5_1_11: {{ ubtu24cis_rule_5_1_11 }} +ubtu24cis_rule_5_1_12: {{ ubtu24cis_rule_5_1_12 }} +ubtu24cis_rule_5_1_13: {{ ubtu24cis_rule_5_1_13 }} +ubtu24cis_rule_5_1_14: {{ ubtu24cis_rule_5_1_14 }} +ubtu24cis_rule_5_1_15: {{ ubtu24cis_rule_5_1_15 }} +ubtu24cis_rule_5_1_16: {{ ubtu24cis_rule_5_1_16 }} +ubtu24cis_rule_5_1_17: {{ ubtu24cis_rule_5_1_17 }} +ubtu24cis_rule_5_1_18: {{ ubtu24cis_rule_5_1_18 }} +ubtu24cis_rule_5_1_19: {{ ubtu24cis_rule_5_1_19 }} +ubtu24cis_rule_5_1_20: {{ ubtu24cis_rule_5_1_20 }} +ubtu24cis_rule_5_1_21: {{ ubtu24cis_rule_5_1_21 }} +ubtu24cis_rule_5_1_22: {{ ubtu24cis_rule_5_1_22 }} +# 5.2 Configure privilege escalation +ubtu24cis_rule_5_2_1: {{ ubtu24cis_rule_5_2_1 }} +ubtu24cis_rule_5_2_2: {{ ubtu24cis_rule_5_2_2 }} +ubtu24cis_rule_5_2_3: {{ ubtu24cis_rule_5_2_3 }} +ubtu24cis_rule_5_2_4: {{ ubtu24cis_rule_5_2_4 }} +ubtu24cis_rule_5_2_5: {{ ubtu24cis_rule_5_2_5 }} +ubtu24cis_rule_5_2_6: {{ ubtu24cis_rule_5_2_6 }} +ubtu24cis_rule_5_2_7: {{ ubtu24cis_rule_5_2_7 }} +# 5.3.1 Configure PAM software packages +ubtu24cis_rule_5_3_1_1: {{ ubtu24cis_rule_5_3_1_1 }} +ubtu24cis_rule_5_3_1_2: {{ ubtu24cis_rule_5_3_1_2 }} +ubtu24cis_rule_5_3_1_3: {{ ubtu24cis_rule_5_3_1_3 }} +# 5.3.2 Configure pam-auth-update profiles +ubtu24cis_rule_5_3_2_1: {{ ubtu24cis_rule_5_3_2_1 }} +ubtu24cis_rule_5_3_2_2: {{ ubtu24cis_rule_5_3_2_2 }} +ubtu24cis_rule_5_3_2_3: {{ ubtu24cis_rule_5_3_2_3 }} +ubtu24cis_rule_5_3_2_4: {{ ubtu24cis_rule_5_3_2_4 }} +# 5.3.3.1 Configure pam_faillock module +ubtu24cis_rule_5_3_3_1_1: {{ ubtu24cis_rule_5_3_3_1_1 }} +ubtu24cis_rule_5_3_3_1_2: {{ ubtu24cis_rule_5_3_3_1_2 }} +ubtu24cis_rule_5_3_3_1_3: {{ ubtu24cis_rule_5_3_3_1_3 }} +# 5.3.3.2 Configure pam_quality module +ubtu24cis_rule_5_3_3_2_1: {{ ubtu24cis_rule_5_3_3_2_1 }} +ubtu24cis_rule_5_3_3_2_2: {{ ubtu24cis_rule_5_3_3_2_2 }} +ubtu24cis_rule_5_3_3_2_3: {{ ubtu24cis_rule_5_3_3_2_3 }} +ubtu24cis_rule_5_3_3_2_4: {{ ubtu24cis_rule_5_3_3_2_4 }} +ubtu24cis_rule_5_3_3_2_5: {{ ubtu24cis_rule_5_3_3_2_5 }} +ubtu24cis_rule_5_3_3_2_6: {{ ubtu24cis_rule_5_3_3_2_6 }} +ubtu24cis_rule_5_3_3_2_7: {{ ubtu24cis_rule_5_3_3_2_7 }} +ubtu24cis_rule_5_3_3_2_8: {{ ubtu24cis_rule_5_3_3_2_8 }} +# 5.3.3.3 Configure pam_history module +# This are added as part of 5.3.2.4 using jinja2 template +ubtu24cis_rule_5_3_3_3_1: {{ ubtu24cis_rule_5_3_3_3_1 }} +ubtu24cis_rule_5_3_3_3_2: {{ ubtu24cis_rule_5_3_3_3_2 }} +ubtu24cis_rule_5_3_3_3_3: {{ ubtu24cis_rule_5_3_3_3_3 }} +# 5.3.3.4 Configure pam_unix module +ubtu24cis_rule_5_3_3_4_1: {{ ubtu24cis_rule_5_3_3_4_1 }} +ubtu24cis_rule_5_3_3_4_2: {{ ubtu24cis_rule_5_3_3_4_2 }} +ubtu24cis_rule_5_3_3_4_3: {{ ubtu24cis_rule_5_3_3_4_3 }} +ubtu24cis_rule_5_3_3_4_4: {{ ubtu24cis_rule_5_3_3_4_4 }} +# 5.4 User Accounts and Environment +# 5.4.1 Configure shadow password suite parameters +ubtu24cis_rule_5_4_1_1: {{ ubtu24cis_rule_5_4_1_1 }} +ubtu24cis_rule_5_4_1_2: {{ ubtu24cis_rule_5_4_1_2 }} +ubtu24cis_rule_5_4_1_3: {{ ubtu24cis_rule_5_4_1_3 }} +ubtu24cis_rule_5_4_1_4: {{ ubtu24cis_rule_5_4_1_4 }} +ubtu24cis_rule_5_4_1_5: {{ ubtu24cis_rule_5_4_1_5 }} +ubtu24cis_rule_5_4_1_6: {{ ubtu24cis_rule_5_4_1_6 }} +# 5.4.2 Configure root and system accounts and environment +ubtu24cis_rule_5_4_2_1: {{ ubtu24cis_rule_5_4_2_1 }} +ubtu24cis_rule_5_4_2_2: {{ ubtu24cis_rule_5_4_2_2 }} +ubtu24cis_rule_5_4_2_3: {{ ubtu24cis_rule_5_4_2_3 }} +ubtu24cis_rule_5_4_2_4: {{ ubtu24cis_rule_5_4_2_4 }} +ubtu24cis_rule_5_4_2_5: {{ ubtu24cis_rule_5_4_2_5 }} +ubtu24cis_rule_5_4_2_6: {{ ubtu24cis_rule_5_4_2_6 }} +ubtu24cis_rule_5_4_2_7: {{ ubtu24cis_rule_5_4_2_7 }} +ubtu24cis_rule_5_4_2_8: {{ ubtu24cis_rule_5_4_2_8 }} +# 5.4.2 Configure user default environment +ubtu24cis_rule_5_4_3_1: {{ ubtu24cis_rule_5_4_3_1 }} +ubtu24cis_rule_5_4_3_2: {{ ubtu24cis_rule_5_4_3_2 }} +ubtu24cis_rule_5_4_3_3: {{ ubtu24cis_rule_5_4_3_3 }} + +## Section 6 + +# 6.2.1.x Configure systemd-journald service +ubtu24cis_rule_6_1_1_1: {{ ubtu24cis_rule_6_1_1_1 }} +ubtu24cis_rule_6_1_1_2: {{ ubtu24cis_rule_6_1_1_2 }} +ubtu24cis_rule_6_1_1_3: {{ ubtu24cis_rule_6_1_1_3 }} +ubtu24cis_rule_6_1_1_4: {{ ubtu24cis_rule_6_1_1_4 }} +# 6.1.2.1 Configure journald +ubtu24cis_rule_6_1_2_1_1: {{ ubtu24cis_rule_6_1_2_1_1 }} +ubtu24cis_rule_6_1_2_1_2: {{ ubtu24cis_rule_6_1_2_1_2 }} +ubtu24cis_rule_6_1_2_1_3: {{ ubtu24cis_rule_6_1_2_1_3 }} +ubtu24cis_rule_6_1_2_1_4: {{ ubtu24cis_rule_6_1_2_1_4 }} +ubtu24cis_rule_6_1_2_2: {{ ubtu24cis_rule_6_1_2_2 }} +ubtu24cis_rule_6_1_2_3: {{ ubtu24cis_rule_6_1_2_3 }} +ubtu24cis_rule_6_1_2_4: {{ ubtu24cis_rule_6_1_2_4 }} +# 6.1.3 Configure rsyslog +ubtu24cis_rule_6_1_3_1: {{ ubtu24cis_rule_6_1_3_1 }} +ubtu24cis_rule_6_1_3_2: {{ ubtu24cis_rule_6_1_3_2 }} +ubtu24cis_rule_6_1_3_3: {{ ubtu24cis_rule_6_1_3_3 }} +ubtu24cis_rule_6_1_3_4: {{ ubtu24cis_rule_6_1_3_4 }} +ubtu24cis_rule_6_1_3_5: {{ ubtu24cis_rule_6_1_3_5 }} +ubtu24cis_rule_6_1_3_6: {{ ubtu24cis_rule_6_1_3_6 }} +ubtu24cis_rule_6_1_3_7: {{ ubtu24cis_rule_6_1_3_7 }} +# 6.1.3.8 logrotate +ubtu24cis_rule_6_1_3_8: {{ ubtu24cis_rule_6_1_3_8 }} +# 6.1.4.1 configure logfiles +ubtu24cis_rule_6_1_4_1: {{ ubtu24cis_rule_6_1_4_1 }} +# 6.2.1 Configure auditd services +ubtu24cis_rule_6_2_1_1: {{ ubtu24cis_rule_6_2_1_1 }} +ubtu24cis_rule_6_2_1_2: {{ ubtu24cis_rule_6_2_1_2 }} +ubtu24cis_rule_6_2_1_3: {{ ubtu24cis_rule_6_2_1_3 }} +ubtu24cis_rule_6_2_1_4: {{ ubtu24cis_rule_6_2_1_4 }} +# 6.2.2 Configure auditd data retention +ubtu24cis_rule_6_2_2_1: {{ ubtu24cis_rule_6_2_2_1 }} +ubtu24cis_rule_6_2_2_2: {{ ubtu24cis_rule_6_2_2_2 }} +ubtu24cis_rule_6_2_2_3: {{ ubtu24cis_rule_6_2_2_3 }} +ubtu24cis_rule_6_2_2_4: {{ ubtu24cis_rule_6_2_2_4 }} +# 6.2.3 Configure auditd rules +ubtu24cis_rule_6_2_3_1: {{ ubtu24cis_rule_6_2_3_1 }} +ubtu24cis_rule_6_2_3_2: {{ ubtu24cis_rule_6_2_3_2 }} +ubtu24cis_rule_6_2_3_3: {{ ubtu24cis_rule_6_2_3_3 }} +ubtu24cis_rule_6_2_3_4: {{ ubtu24cis_rule_6_2_3_4 }} +ubtu24cis_rule_6_2_3_5: {{ ubtu24cis_rule_6_2_3_5 }} +ubtu24cis_rule_6_2_3_6: {{ ubtu24cis_rule_6_2_3_6 }} +ubtu24cis_rule_6_2_3_7: {{ ubtu24cis_rule_6_2_3_7 }} +ubtu24cis_rule_6_2_3_8: {{ ubtu24cis_rule_6_2_3_8 }} +ubtu24cis_rule_6_2_3_9: {{ ubtu24cis_rule_6_2_3_9 }} +ubtu24cis_rule_6_2_3_10: {{ ubtu24cis_rule_6_2_3_10 }} +ubtu24cis_rule_6_2_3_11: {{ ubtu24cis_rule_6_2_3_11 }} +ubtu24cis_rule_6_2_3_12: {{ ubtu24cis_rule_6_2_3_12 }} +ubtu24cis_rule_6_2_3_13: {{ ubtu24cis_rule_6_2_3_13 }} +ubtu24cis_rule_6_2_3_14: {{ ubtu24cis_rule_6_2_3_14 }} +ubtu24cis_rule_6_2_3_15: {{ ubtu24cis_rule_6_2_3_15 }} +ubtu24cis_rule_6_2_3_16: {{ ubtu24cis_rule_6_2_3_16 }} +ubtu24cis_rule_6_2_3_17: {{ ubtu24cis_rule_6_2_3_17 }} +ubtu24cis_rule_6_2_3_18: {{ ubtu24cis_rule_6_2_3_18 }} +ubtu24cis_rule_6_2_3_19: {{ ubtu24cis_rule_6_2_3_19 }} +ubtu24cis_rule_6_2_3_20: {{ ubtu24cis_rule_6_2_3_20 }} +ubtu24cis_rule_6_2_3_21: {{ ubtu24cis_rule_6_2_3_21 }} +# 6.2.4 Configure audit file access +ubtu24cis_rule_6_2_4_1: {{ ubtu24cis_rule_6_2_4_1 }} +ubtu24cis_rule_6_2_4_2: {{ ubtu24cis_rule_6_2_4_2 }} +ubtu24cis_rule_6_2_4_3: {{ ubtu24cis_rule_6_2_4_3 }} +ubtu24cis_rule_6_2_4_4: {{ ubtu24cis_rule_6_2_4_4 }} +ubtu24cis_rule_6_2_4_5: {{ ubtu24cis_rule_6_2_4_5 }} +ubtu24cis_rule_6_2_4_6: {{ ubtu24cis_rule_6_2_4_6 }} +ubtu24cis_rule_6_2_4_7: {{ ubtu24cis_rule_6_2_4_7 }} +ubtu24cis_rule_6_2_4_8: {{ ubtu24cis_rule_6_2_4_8 }} +ubtu24cis_rule_6_2_4_9: {{ ubtu24cis_rule_6_2_4_9 }} +ubtu24cis_rule_6_2_4_10: {{ ubtu24cis_rule_6_2_4_10 }} +# 6.3 Configure Filesystem Integrity Checking +ubtu24cis_rule_6_3_1: {{ ubtu24cis_rule_6_3_1 }} +ubtu24cis_rule_6_3_2: {{ ubtu24cis_rule_6_3_2 }} +ubtu24cis_rule_6_3_3: {{ ubtu24cis_rule_6_3_3 }} + +## Section 7 +# 7.1 System File Permissions +ubtu24cis_rule_7_1_1: {{ ubtu24cis_rule_7_1_1 }} +ubtu24cis_rule_7_1_2: {{ ubtu24cis_rule_7_1_2 }} +ubtu24cis_rule_7_1_3: {{ ubtu24cis_rule_7_1_3 }} +ubtu24cis_rule_7_1_4: {{ ubtu24cis_rule_7_1_4 }} +ubtu24cis_rule_7_1_5: {{ ubtu24cis_rule_7_1_5 }} +ubtu24cis_rule_7_1_6: {{ ubtu24cis_rule_7_1_6 }} +ubtu24cis_rule_7_1_7: {{ ubtu24cis_rule_7_1_7 }} +ubtu24cis_rule_7_1_8: {{ ubtu24cis_rule_7_1_8 }} +ubtu24cis_rule_7_1_9: {{ ubtu24cis_rule_7_1_9 }} +ubtu24cis_rule_7_1_10: {{ ubtu24cis_rule_7_1_10 }} +ubtu24cis_rule_7_1_11: {{ ubtu24cis_rule_7_1_11 }} +ubtu24cis_rule_7_1_12: {{ ubtu24cis_rule_7_1_12 }} +ubtu24cis_rule_7_1_13: {{ ubtu24cis_rule_7_1_13 }} +# 7.2 Local User and Group Settings +ubtu24cis_rule_7_2_1: {{ ubtu24cis_rule_7_2_1 }} +ubtu24cis_rule_7_2_2: {{ ubtu24cis_rule_7_2_2 }} +ubtu24cis_rule_7_2_3: {{ ubtu24cis_rule_7_2_3 }} +ubtu24cis_rule_7_2_4: {{ ubtu24cis_rule_7_2_4 }} +ubtu24cis_rule_7_2_5: {{ ubtu24cis_rule_7_2_5 }} +ubtu24cis_rule_7_2_6: {{ ubtu24cis_rule_7_2_6 }} +ubtu24cis_rule_7_2_7: {{ ubtu24cis_rule_7_2_7 }} +ubtu24cis_rule_7_2_8: {{ ubtu24cis_rule_7_2_8 }} +ubtu24cis_rule_7_2_9: {{ ubtu24cis_rule_7_2_9 }} +ubtu24cis_rule_7_2_10: {{ ubtu24cis_rule_7_2_10 }} + + +## System functionality configuration variables +## +## There are certain functionalities of a system +## that may require either to skip certain CIS rules +## or install certain packages. +## Set the respective variable to `true` in order to +## enable a certain functionality on the system + +# This variable governs whether specific CIS rules +# concerned with acceptance and routing of packages +# are skipped. +ubtu24cis_is_router: {{ ubtu24cis_is_router }} + +## IPv4 requirement toggle +# This variable governs whether ipv4 is enabled or disabled. +ubtu24cis_ipv4_required: {{ ubtu24cis_ipv4_required }} + +## IPv6 requirement toggle +# This variable governs whether ipv6 is enabled or disabled. +ubtu24cis_ipv6_required: {{ ubtu24cis_ipv6_required }} + +## Desktop requirement toggle +# This variable governs, whether CIS rules regarding GDM +# and X-Windows are carried out. +ubtu24cis_desktop_required: {{ ubtu24cis_desktop_required }} + +## Section 1 + +# If system uses squahshfs e.gf. snap package manager set true +ubtu24cis_squashfs_required:{% if prelim_snap_pkg_mgr.rc == 0 %} true {% else %} false{% endif %} + +## Controls 1.3.1.3 and 1.3.1.4 Ensure all AppArmor Profiles are in enforce (1.3.1.3/4) or complain (1.3.1.3) mode + +# This variable disables the implementation of rules 1.3.1.3 and 1.3.1.4 +# regarding enforcing profiles or putting them in complain mode +ubtu24cis_apparmor_disable: {{ ubtu24cis_apparmor_disable }} + +## Controls 1.4.x - Boot password +# +# THIS VARIABLE SHOULD BE CHANGED AND INCORPORATED INTO VAULT +# THIS VALUE IS WHAT THE ROOT PW WILL BECOME!!!!!!!! +# HAVING THAT PW EXPOSED IN RAW TEXT IS NOT SECURE!!!! +ubtu24cis_grub_user: {{ ubtu24cis_grub_user }} +ubtu24cis_bootloader_password_hash: {{ grub_user_pass }} # pragma: allowlist secret + +## Controls 1.5.x + +## Controls 1.6.x - Warning banners +# The controls 1.6.x set various warning banners and protect the respective files +# by tightening the access rights. + +# This variable specifies the warning banner displayed to the user +# after local login, remote login, and as motd (message of the day) +# Noe that the banner text must not contain the below items in order to be +# compliant with CIS: \m, \r, \s, \v or references to the OS platform +ubtu24cis_warning_banner: | + Authorized uses only. All activity may be monitored and reported. + +# This variable governs, whether dynamic motd is disabled (as required by control 1.7.1) +ubtu24cis_disable_dynamic_motd: {{ ubtu24cis_disable_dynamic_motd }} + +## Controls 1.7.x - Settings for GDM +# This variable specifies the GNOME configuration database file to which configurations are written. +# (See https://help.gnome.org/admin/system-admin-guide/stable/dconf-keyfiles.html.en) +# The default database is `local`. +ubtu24cis_dconf_db_name: local + +## +# Section 2 +## + +## +## Service configuration variables. +## +## Set the respective variable to true to keep the service. +## otherwise the service is stopped and disabled +## +# Service configuration +# Options are +# true to leave installed if exists not changes take place +# false - this removes the package +# mask - if a dependancy for product so cannot be removed +# Server Services +ubtu24cis_autofs_services: {{ ubtu24cis_autofs_services }} +ubtu24cis_autofs_mask: {{ ubtu24cis_autofs_mask }} +ubtu24cis_avahi_server: {{ ubtu24cis_avahi_server }} +ubtu24cis_avahi_mask: {{ ubtu24cis_avahi_mask }} +ubtu24cis_dhcp_server: {{ ubtu24cis_dhcp_server }} +ubtu24cis_dhcp_mask: {{ ubtu24cis_dhcp_mask }} +ubtu24cis_dns_server: {{ ubtu24cis_dns_server }} +ubtu24cis_dns_mask: {{ ubtu24cis_dns_mask }} +ubtu24cis_dnsmasq_server: {{ ubtu24cis_dnsmasq_server }} +ubtu24cis_dnsmasq_mask: {{ ubtu24cis_dnsmasq_mask }} +ubtu24cis_ftp_server: {{ ubtu24cis_ftp_server }} +ubtu24cis_ftp_mask: {{ ubtu24cis_ftp_mask }} +ubtu24cis_ldap_server: {{ ubtu24cis_ldap_server }} +ubtu24cis_ldap_mask: {{ ubtu24cis_ldap_mask }} +ubtu24cis_message_server: {{ ubtu24cis_message_server }} # This is for messaging dovecot and dovecot-pop3 +ubtu24cis_message_mask: {{ ubtu24cis_message_mask }} +ubtu24cis_nfs_server: {{ ubtu24cis_nfs_server }} +ubtu24cis_nfs_mask: {{ ubtu24cis_nfs_mask }} +ubtu24cis_nis_server: {{ ubtu24cis_nis_server }} # set to mask if nis client required +ubtu24cis_nis_mask: {{ ubtu24cis_nis_mask }} +ubtu24cis_print_server: {{ ubtu24cis_print_server }} # replaces cups +ubtu24cis_print_mask: {{ ubtu24cis_print_mask }} +ubtu24cis_rpc_server: {{ ubtu24cis_rpc_server }} +ubtu24cis_rpc_mask: {{ ubtu24cis_rpc_mask }} +ubtu24cis_rsync_server: {{ ubtu24cis_rsync_server }} +ubtu24cis_rsync_mask: {{ ubtu24cis_rsync_mask }} +ubtu24cis_samba_server: {{ ubtu24cis_samba_server }} +ubtu24cis_samba_mask: {{ ubtu24cis_samba_mask }} +ubtu24cis_snmp_server: {{ ubtu24cis_snmp_server }} +ubtu24cis_snmp_mask: {{ ubtu24cis_snmp_mask }} +ubtu24cis_telnet_server: {{ ubtu24cis_telnet_server }} +ubtu24cis_telnet_mask: {{ ubtu24cis_telnet_mask }} +ubtu24cis_tftp_server: {{ ubtu24cis_tftp_server }} +ubtu24cis_tftp_mask: {{ ubtu24cis_tftp_mask }} +ubtu24cis_squid_server: {{ ubtu24cis_squid_server }} +ubtu24cis_squid_mask: {{ ubtu24cis_squid_mask }} +ubtu24cis_apache2_server: {{ ubtu24cis_apache2_server }} +ubtu24cis_apache2_mask: {{ ubtu24cis_apache2_mask }} +ubtu24cis_nginx_server: {{ ubtu24cis_nginx_server }} +ubtu24cis_nginx_mask: {{ ubtu24cis_nginx_mask }} +ubtu24cis_xinetd_server: {{ ubtu24cis_xinetd_server }} +ubtu24cis_xinetd_mask: {{ ubtu24cis_xinetd_mask }} +ubtu24cis_xwindow_server: {{ ubtu24cis_xwindow_server }} # will remove mask not an option +ubtu24cis_is_mail_server: {{ ubtu24cis_is_mail_server }} + +# Client Services +ubtu24cis_nis_client_required: {{ ubtu24cis_nis_client_required }} # Same package as NIS server +ubtu24cis_rsh_client: {{ ubtu24cis_rsh_client }} +ubtu24cis_talk_client: {{ ubtu24cis_talk_client }} +ubtu24cis_telnet_required: {{ ubtu24cis_telnet_required }} +ubtu24cis_ldap_clients_required: {{ ubtu24cis_ldap_clients_required }} +ubtu24cis_ftp_client: {{ ubtu24cis_ftp_client }} + +## Control 2.3.1.1 +# This variable choses the tool used for time synchronization +# The two options are `chrony`and `systemd-timesyncd`. +ubtu24cis_time_sync_tool: "systemd-timesyncd" + +## Controls 2.3.x - Configure time pools & servers for chrony and timesyncd +# The following variable represents a list of of time server pools used +# for configuring chrony and timesyncd. +# Each list item contains two settings, `name` (the domain name of the pool) and synchronization `options`. +# The default setting for the `options` is `iburst maxsources 4` -- please refer to the documentation +# of the time synchronization mechanism you are using. +ubtu24cis_time_pool_name: +{% for pool in ubtu24cis_time_pool %} +- name: {{ pool.name }} + options: {{ pool.options }} +{% endfor %} + +# The following variable represents a list of of time servers used +# for configuring chrony and timesyncd +# Each list item contains two settings, `name` (the domain name of the server) and synchronization `options`. +# The default setting for the `options` is `iburst` -- please refer to the documentation +# of the time synchronization mechanism you are using. +ubtu24cis_time_servers: +{% for servers in ubtu24cis_time_servers %} +- name: {{ servers.name }} + options: {{ servers.options }} +{% endfor %} + +# Section 3 +## Control 3.1.1 - Ensure system is checked to determine if IPv6 is enabled +# This variable governs the mechanism of how the disabling of IPV6 is carried out. +# Its possible values are `grub` and `sysctl`. +ubtu24cis_ipv6_disable: {{ ubtu24cis_ipv6_disable }} + +## Control 3.1.3 - Ensure bluetooth Services are not in use +# This control managed how the bluetooth service is managaed +# Options are +# true to leave installed if exists not changes take place +# false - this removes the package +# mask - if a dependancy for product so cannot be removed +ubtu24cis_bluetooth_service: {{ ubtu24cis_bluetooth_service }} +ubtu24cis_bluetooth_mask: {{ ubtu24cis_bluetooth_mask }} + +## Control 3.3.x - Networking configuration +# This variable contains the path to the file in which, sysctl saves its configurations. +# Its default value is `/etc/sysctl.conf`. +ubtu24cis_sysctl_network_conf: {{ ubtu24cis_sysctl_network_conf }} + +# +### Section 4 +# +## Controls 4.1.x, 4.2.x, and 4.3.x - Firewall configuration +# This variable represents the toggle for which firewall package is used. +# The options that have an effect on the system are `ufw` and `iptables`. +# The option `nftables` is also possible, but will only result in a message, +# that `nftables` has been chosen; all settings have to be carried out manually. +# Any other value, e.g. `none` will skip all firewall-related controls. +ubtu24cis_firewall_package: {{ ubtu24cis_firewall_package }} + +## auditd settings +ubtu24cis_auditd: + space_left_action: {{ ubtu24cis_auditd_space_left_action }} + admin_space_left_action: {{ ubtu24cis_auditd_admin_space_left_action }} + max_log_file_action: {{ ubtu24cis_auditd_max_log_file_action }} + auditd_backlog_limit: {{ ubtu24cis_audit_back_log_limit }} + +## syslog +# Set which syslog service +# journald or rsyslog +ubtu24cis_syslog_service: {{ ubtu24cis_syslog_service }} +ubtu24cis_is_syslog_server: {{ ubtu24cis_system_is_log_server }} + +### Section 5 + +# Note the following to understand precedence and layout +ubtu24cis_sshd_access: + - AllowUser {{ ubtu24cis_sshd.allow_users }} + - AllowGroup {{ ubtu24cis_sshd.allow_groups }} + - DenyUser {{ ubtu24cis_sshd.deny_users }} + - DenyGroup {{ ubtu24cis_sshd.deny_groups }} + +ubtu24cis_ssh_strong_ciphers: + - aes256-gcm@openssh.com + - aes128-gcm@openssh.com + - aes256-ctr + - aes192-ctr + - aes128-ctr +ubtu24cis_ssh_weak_ciphers: + - 3des-cbc + - aes128-cbc + - aes192-cbc + - aes256-cbc + - arcfour + - chacha20-poly1305@openssh.com + - arcfour128 + - arcfour256 + - blowfish-cbc + - cast128-cbc + - rijndael-cbc@lysator.liu.se + +ubtu24cis_ssh_strong_macs: + - HMAC-SHA1 + - hmac-sha2-256 + - hmac-sha2-512 +ubtu24cis_ssh_weak_macs: + - hmac-md5 + - hmac-md5-96 + - hmac-ripemd160 + - hmac-sha1-96 + - umac-64@openssh.com + - umac-128@openssh.com + - hmac-md5-etm@openssh.com + - hmac-md5-96-etm@openssh.com + - hmac-ripemd160-etm@openssh.com + - hmac-sha1-etm@openssh.com + - hmac-sha1-96-etm@openssh.com + - umac-64-etm@openssh.com + - umac-128-etm@openssh.com + - hmac-sha2-512-etm@openssh.com + - hmac-sha2-256-etm@openssh.com + +ubtu24cis_ssh_strong_kex: + - ecdh-sha2-nistp256 + - ecdh-sha2-nistp521 + - diffie-hellman-group-exchange-sha256 + - diffie-hellman-group14-sha256 + - diffie-hellman-group16-sha512 + - diffie-hellman-group18-sha512 +ubtu24cis_ssh_weak_kex: + - diffie-hellman-group1-sha1 + - diffie-hellman-group14-sha1 + - diffie-hellman-group-exchange-sha1 + +ubtu24cis_ssh_aliveinterval: 300 +ubtu24cis_ssh_countmax: 3 +## PAM +ubtu24cis_pam_password: + minlen: "14" + minclass: "4" + +ubtu24cis_pam_passwd_retry: "3" + +# logins.def password settings +ubtu24cis_pass: + max_days: {{ ubtu24cis_pass.max_days }} + min_days: {{ ubtu24cis_pass.min_days }} + warn_age: {{ ubtu24cis_pass.warn_age }} + +# set sugroup if differs from wheel +ubtu24cis_sugroup: nosugroup + + +# var log location variable +ubtu24_varlog_location: {{ ubtu24cis_sudo_logfile }} + +# Section 6 + +# 6.1.2 + +# AIDE +ubtu24cis_config_aide: {{ ubtu24cis_config_aide }} + +# aide setup via - cron, timer +ubtu24cis_aide_scan: {{ ubtu24cis_aide_scan }} diff --git a/templates/audit/98_auditd_exception.rules.j2 b/templates/audit/98_auditd_exception.rules.j2 new file mode 100644 index 0000000..5a12ba3 --- /dev/null +++ b/templates/audit/98_auditd_exception.rules.j2 @@ -0,0 +1,8 @@ +## This file is managed by Ansible, YOUR CHANGES WILL BE LOST! + +# This file contains users whose actions are not logged by auditd +{% if allow_auditd_uid_user_exclusions %} +{% for user in ubtu24cis_auditd_uid_exclude %} +-a never,user -F uid!={{ user }} -F auid!={{ user }} +{% endfor %} +{% endif %} diff --git a/templates/audit/99_auditd.rules.j2 b/templates/audit/99_auditd.rules.j2 new file mode 100644 index 0000000..4ccf915 --- /dev/null +++ b/templates/audit/99_auditd.rules.j2 @@ -0,0 +1,106 @@ + +## Ansible controlled file +# Added as part of ansible-lockdown CIS baseline +# provided by MindPointGroup LLC YOUR CHANGED WILL BE LOST! + +# This template will set all of the auditd configurations via a handler in the role in one task instead of individually + +{% if ubtu24cis_rule_6_2_3_1 %} +-w /etc/sudoers -p wa -k scope +-w /etc/sudoers.d -p wa -k scope +{% endif %} +{% if ubtu24cis_rule_6_2_3_2 %} +-a always,exit -F arch=b64 -C euid!=uid -F auid!=unset -S execve -k user_emulation +-a always,exit -F arch=b32 -C euid!=uid -F auid!=unset -S execve -k user_emulation +{% endif %} +{% if ubtu24cis_rule_6_2_3_3 %} +-w {{ ubtu24cis_sudo_logfile }} -p wa -k sudo_log_file +{% endif %} +{% if ubtu24cis_rule_6_2_3_4 %} +-a always,exit -F arch=b64 -S adjtimex,settimeofday,clock_settime -k time-change +-a always,exit -F arch=b32 -S adjtimex,settimeofday,clock_settime -k time-change +-a always,exit -F arch=b64 -S clock_settime -F a0=0x0 -k time-change +-a always,exit -F arch=b32 -S clock_settime -F a0=0x0 -k time-change +-w /etc/localtime -p wa -k time-change +{% endif %} +{% if ubtu24cis_rule_6_2_3_5 %} +-a always,exit -F arch=b64 -S sethostname,setdomainname -k system-locale +-a always,exit -F arch=b32 -S sethostname,setdomainname -k system-locale +-w /etc/issue -p wa -k system-locale +-w /etc/issue.net -p wa -k system-locale +-w /etc/hosts -p wa -k system-locale +-w /etc/networks -p wa -k system-locale +-w /etc/network -p wa -k system-locale +-w /etc/netplan -p wa -k system-locale +{% endif %} +{% if ubtu24cis_rule_6_2_3_6 %} +{% if priv_procs is defined %} +{% for proc in priv_procs.stdout_lines -%} +-a always,exit -F path={{ proc }} -F perm=x -F auid>=1000 -F auid!=unset -k privileged +{% endfor %} +{% endif %} +{% endif %} +{% if ubtu24cis_rule_6_2_3_7 %} +-a always,exit -F arch=b64 -S creat,open,openat,truncate,ftruncate -F exit=-EACCES -F auid>=1000 -F auid!=unset -k access +-a always,exit -F arch=b64 -S creat,open,openat,truncate,ftruncate -F exit=-EPERM -F auid>=1000 -F auid!=unset -k access +-a always,exit -F arch=b32 -S creat,open,openat,truncate,ftruncate -F exit=-EACCES -F auid>=1000 -F auid!=unset -k access +-a always,exit -F arch=b32 -S creat,open,openat,truncate,ftruncate -F exit=-EPERM -F auid>=1000 -F auid!=unset -k access +{% endif %} +{% if ubtu24cis_rule_6_2_3_8 %} +-w /etc/group -p wa -k identity +-w /etc/passwd -p wa -k identity +-w /etc/gshadow -p wa -k identity +-w /etc/shadow -p wa -k identity +-w /etc/security/opasswd -p wa -k identity +-w /etc/nsswitch.conf -p wa -k identity +-w /etc/pam.conf -p wa -k identity +-w /etc/pam.d -p wa -k identity +{% endif %} +{% if ubtu24cis_rule_6_2_3_9 %} +-a always,exit -F arch=b64 -S chmod,fchmod,fchmodat -F auid>=1000 -F auid!=unset -k perm_mod +-a always,exit -F arch=b64 -S chown,fchown,lchown,fchownat -F auid>=1000 -F auid!=unset -k perm_mod +-a always,exit -F arch=b64 -S setxattr,lsetxattr,fsetxattr,removexattr,lremovexattr,fremovexattr -F auid>=1000 -F auid!=unset -k perm_mod +-a always,exit -F arch=b32 -S chmod,fchmod,fchmodat -F auid>=1000 -F auid!=unset -k perm_mod +-a always,exit -F arch=b32 -S chown,fchown,lchown,fchownat -F auid>=1000 -F auid!=unset -k perm_mod +-a always,exit -F arch=b32 -S setxattr,lsetxattr,fsetxattr,removexattr,lremovexattr,fremovexattr -F auid>=1000 -F auid!=unset -k perm_mod +{% endif %} +{% if ubtu24cis_rule_6_2_3_10 %} +-a always,exit -F arch=b64 -S mount -F auid>=1000 -F auid!=unset -k mounts +-a always,exit -F arch=b32 -S mount -F auid>=1000 -F auid!=unset -k mounts +{% endif %} +{% if ubtu24cis_rule_6_2_3_11 %} +-w /var/run/utmp -p wa -k session +-w /var/log/wtmp -p wa -k session +-w /var/log/btmp -p wa -k session +{% endif %} +{% if ubtu24cis_rule_6_2_3_12 %} +-w /var/log/lastlog -p wa -k logins +-w /var/run/faillock -p wa -k logins +{% endif %} +{% if ubtu24cis_rule_6_2_3_13 %} +-a always,exit -F arch=b64 -S unlink,unlinkat,rename,renameat -F auid>=1000 -F auid!=unset -k delete +-a always,exit -F arch=b32 -S unlink,unlinkat,rename,renameat -F auid>=1000 -F auid!=unset -k delete +{% endif %} +{% if ubtu24cis_rule_6_2_3_14 %} +-w /etc/apparmor/ -p wa -k MAC-policy +-w /etc/apparmor.d/ -p wa -k MAC-policy +{% endif %} +{% if ubtu24cis_rule_6_2_3_15 %} +-a always,exit -F path=/usr/bin/chcon -F perm=x -F auid>=1000 -F auid!=-1 -k perm_chng +{% endif %} +{% if ubtu24cis_rule_6_2_3_16 %} +-a always,exit -F path=/usr/bin/setfacl -F perm=x -F auid>=1000 -F auid!=-1 -k perm_chng +{% endif %} +{% if ubtu24cis_rule_6_2_3_17 %} +-a always,exit -F path=/usr/bin/chacl -F perm=x -F auid>=1000 -F auid!=-1 -k perm_chng +{% endif %} +{% if ubtu24cis_rule_6_2_3_18 %} +-a always,exit -F path=/usr/sbin/usermod -F perm=x -F auid>=1000 -F auid!=-1 -k usermod +{% endif %} +{% if ubtu24cis_rule_6_2_3_19 %} +-a always,exit -F path=/usr/bin/kmod -F perm=x -F auid>=1000 -F auid!=-1 -k kernel_modules +-a always,exit -F arch=b64 -S init_module,finit_module,delete_module,create_module,query_module -F auid>=1000 -F auid!=-1 -k kernel_modules +{% endif %} +{% if ubtu24cis_rule_6_2_3_20 %} +-e 2 +{% endif %} diff --git a/templates/chrony.conf.j2 b/templates/chrony.conf.j2 new file mode 100644 index 0000000..074b1f5 --- /dev/null +++ b/templates/chrony.conf.j2 @@ -0,0 +1,93 @@ +# Welcome to the chrony configuration file. See chrony.conf(5) for more +# information about usuable directives. + +# This will use (up to): +# - 4 sources from ntp.ubuntu.com which some are ipv6 enabled +# - 2 sources from 2.ubuntu.pool.ntp.org which is ipv6 enabled as well +# - 1 source from [01].ubuntu.pool.ntp.org each (ipv4 only atm) +# This means by default, up to 6 dual-stack and up to 2 additional IPv4-only +# sources will be used. +# At the same time it retains some protection against one of the entries being +# down (compare to just using one of the lines). See (LP: #1754358) for the +# discussion. +# +# About using servers from the NTP Pool Project in general see (LP: #104525). +# Approved by Ubuntu Technical Board on 2011-02-08. +# See http://www.pool.ntp.org/join.html for more information. + +{% for server in ubtu24cis_time_synchronization_servers -%} +server {{ server }} {{ ubtu24cis_chrony_server_options }} +{% endfor %} + +# This directive specify the location of the file containing ID/key pairs for +# NTP authentication. +keyfile /etc/chrony/chrony.keys + +# Set runtime command key. Note that if you change the key (not the +# password) to anything other than 1 you will need to edit +# /etc/ppp/ip-up.d/chrony, /etc/ppp/ip-down.d/chrony, /etc/init.d/chrony +# and /etc/cron.weekly/chrony as these scripts use it to get the password. + +#commandkey 1 + +# This directive specify the file into which chronyd will store the rate +# information. +driftfile /var/lib/chrony/chrony.drift + +# Uncomment the following line to turn logging on. +#log tracking measurements statistics + +# Log files location. +logdir /var/log/chrony + +# Stop bad estimates upsetting machine clock. +maxupdateskew 100.0 + +# This directive enables kernel synchronisation (every 11 minutes) of the +# real-time clock. Note that it can’t be used along with the 'rtcfile' directive. +rtcsync + +# Dump measurements when daemon exits. +dumponexit + +# Specify directory for dumping measurements. + +dumpdir /var/lib/chrony + +# Let computer be a server when it is unsynchronised. + +local stratum 10 + +# Allow computers on the unrouted nets to use the server. + +#allow 10/8 +#allow 192.168/16 +#allow 172.16/12 + +# This directive forces `chronyd' to send a message to syslog if it +# makes a system clock adjustment larger than a threshold value in seconds. + +logchange 0.5 + +# This directive defines an email address to which mail should be sent +# if chronyd applies a correction exceeding a particular threshold to the +# system clock. + +# mailonchange root@localhost 0.5 + +# This directive tells chrony to regulate the real-time clock and tells it +# Where to store related data. It may not work on some newer motherboards +# that use the HPET real-time clock. It requires enhanced real-time +# support in the kernel. I've commented it out because with certain +# combinations of motherboard and kernel it is reported to cause lockups. + +# rtcfile /var/lib/chrony/chrony.rtc + +# If the last line of this file reads 'rtconutc' chrony will assume that +# the CMOS clock is on UTC (GMT). If it reads '# rtconutc' or is absent +# chrony will assume local time. The line (if any) was written by the +# chrony postinst based on what it found in /etc/default/rcS. You may +# change it if necessary. +rtconutc + +user {{ ubtu24cis_chrony_user }} diff --git a/templates/etc/chrony/sources.d/pool.sources.j2 b/templates/etc/chrony/sources.d/pool.sources.j2 new file mode 100644 index 0000000..ffb4b27 --- /dev/null +++ b/templates/etc/chrony/sources.d/pool.sources.j2 @@ -0,0 +1,7 @@ +## Ansible controlled file +# Added as part of ansible-lockdown CIS baseline +# provided by MindPointGroup LLC + +{% for pool in ubtu24cis_time_pool %} +pool {{ pool.name }} {{ pool.options }} +{% endfor %} diff --git a/templates/etc/chrony/sources.d/server.sources.j2 b/templates/etc/chrony/sources.d/server.sources.j2 new file mode 100644 index 0000000..adf2758 --- /dev/null +++ b/templates/etc/chrony/sources.d/server.sources.j2 @@ -0,0 +1,7 @@ +## Ansible controlled file +# Added as part of ansible-lockdown CIS baseline +# provided by MindPointGroup LLC + +{% for server in ubtu24cis_time_servers %} +server {{ server.name }} {{ server.options }} +{% endfor %} diff --git a/templates/etc/dconf/db/00-automount_lock.j2 b/templates/etc/dconf/db/00-automount_lock.j2 new file mode 100644 index 0000000..3534474 --- /dev/null +++ b/templates/etc/dconf/db/00-automount_lock.j2 @@ -0,0 +1,9 @@ +## Ansible controlled file +# Added as part of CIS +# provided by MindPointGroup LLC + +# Lock desktop media-handling automount setting +/org/gnome/desktop/media-handling/automount + +# Lock desktop media-handling automount-open +/org/gnome/desktop/media-handling/automount-open diff --git a/templates/etc/dconf/db/00-autorun_lock.j2 b/templates/etc/dconf/db/00-autorun_lock.j2 new file mode 100644 index 0000000..392af74 --- /dev/null +++ b/templates/etc/dconf/db/00-autorun_lock.j2 @@ -0,0 +1,6 @@ +## Ansible controlled file +# Added as part of CIS +# provided by MindPointGroup LLC + +# Lock desktop media-handling settings +/org/gnome/desktop/media-handling/autorun-never diff --git a/templates/etc/dconf/db/00-media-automount.j2 b/templates/etc/dconf/db/00-media-automount.j2 new file mode 100644 index 0000000..227498e --- /dev/null +++ b/templates/etc/dconf/db/00-media-automount.j2 @@ -0,0 +1,7 @@ +## Ansible controlled file +# Added as part of CIS +# provided by MindPointGroup LLC + +[org/gnome/desktop/media-handling] +automount=false +automount-open=false diff --git a/templates/etc/dconf/db/00-media-autorun.j2 b/templates/etc/dconf/db/00-media-autorun.j2 new file mode 100644 index 0000000..a8c297f --- /dev/null +++ b/templates/etc/dconf/db/00-media-autorun.j2 @@ -0,0 +1,6 @@ +## Ansible controlled file +# Added as part of CIS +# provided by MindPointGroup LLC + +[org/gnome/desktop/media-handling] +autorun-never=true diff --git a/templates/etc/dconf/db/00-screensaver.j2 b/templates/etc/dconf/db/00-screensaver.j2 new file mode 100644 index 0000000..56dd494 --- /dev/null +++ b/templates/etc/dconf/db/00-screensaver.j2 @@ -0,0 +1,17 @@ +## Ansible controlled file +# Added as part of CIS +# provided by MindPointGroup LLC + + +# Specify the dconf path +[org/gnome/desktop/session] + +# Number of seconds of inactivity before the screen goes blank +# Set to 0 seconds if you want to deactivate the screensaver. +idle-delay=uint32 {{ ubtu24cis_screensaver_idle_delay }} + +# Specify the dconf path +[org/gnome/desktop/screensaver] + +# Number of seconds after the screen is blank before locking the screen +lock-delay=uint32 {{ ubtu24cis_screensaver_lock_delay }} diff --git a/templates/etc/dconf/db/00-screensaver_lock.j2 b/templates/etc/dconf/db/00-screensaver_lock.j2 new file mode 100644 index 0000000..5d5869f --- /dev/null +++ b/templates/etc/dconf/db/00-screensaver_lock.j2 @@ -0,0 +1,9 @@ +## Ansible controlled file +# Added as part of CIS +# provided by MindPointGroup LLC + +# Lock desktop screensaver idle-delay setting +/org/gnome/desktop/session/idle-delay + +# Lock desktop screensaver lock-delay setting +/org/gnome/desktop/screensaver/lock-delay diff --git a/templates/etc/grub.d/00_user.j2 b/templates/etc/grub.d/00_user.j2 new file mode 100644 index 0000000..c531ac5 --- /dev/null +++ b/templates/etc/grub.d/00_user.j2 @@ -0,0 +1,8 @@ +## Ansible controlled file +# Added as part of ansible-lockdown CIS baseline +# provided by MindPointGroup LLC + +cat < /dev/null; then # Check if the module is currently loaded + l_output2+=(" - Kernel module: \"$l_mod_name\" is loaded" "") + fi + } + while IFS= read -r -d $'\0' l_module_dir; do + a_available_modules+=("$(basename "$l_module_dir")") + done < <(find "$(readlink -f /lib/modules/"$(uname -r)"/kernel/fs)" -mindepth 1 -maxdepth 1 -type d ! -empty -print0) + while IFS= read -r l_exclude; do + if grep -Pq -- "\b$l_exclude\b" <<< "${a_cve_exists[*]}"; then + a_output2+=(" - ** WARNING: kernel module: \"$l_exclude\" has a CVE and is currently mounted! **") + elif + grep -Pq -- "\b$l_exclude\b" <<< "${a_available_modules[*]}"; then + a_output+=(" - Kernel module: \"$l_exclude\" is currently mounted - do NOT unload or disable") + fi + ! grep -Pq -- "\b$l_exclude\b" <<< "${a_ignore[*]}" && a_ignore+=("$l_exclude") + done < <(findmnt -knD | awk '{print $2}' | sort -u) + while IFS= read -r l_config; do + a_modprope_config+=("$l_config") + done < <(modprobe --showconfig | grep -P '^\h*(blacklist|install)') + for l_mod_name in "${a_available_modules[@]}"; do # Iterate over all filesystem modules + [[ "$l_mod_name" =~ overlay ]] && l_mod_name="${l_mod_name::-2}" + if grep -Pq -- "\b$l_mod_name\b" <<< "${a_ignore[*]}"; then + a_excluded+=(" - Kernel module: \"$l_mod_name\"") + else + f_module_chk + fi + done +# Output findings + + echo "### Script can be found at ${BASH_SOURCE} ##" + if [ "${#a_output2[@]}" -le 0 ]; then + printf '%s\n' "" " - No unused filesystem kernel modules are enabled" "${a_output[@]}" "" + else + printf '%s\n' "" "-- Audit Result: --" " ** REVIEW the following **" "${a_output2[@]}" + # Changed return value to capture error + exit 99 + #[ "${#a_output[@]}" -gt 0 ] && printf '%s\n' "" "-- Correctly set: --" "${a_output[@]}" "" + fi +} + +{% endraw %} diff --git a/templates/usr/share/pam-configs/faillock.j2 b/templates/usr/share/pam-configs/faillock.j2 new file mode 100644 index 0000000..738eff5 --- /dev/null +++ b/templates/usr/share/pam-configs/faillock.j2 @@ -0,0 +1,6 @@ +Name: Enable pam_faillock to deny access +Default: yes +Priority: 0 +Auth-Type: Primary +Auth: + [default=die] pam_faillock.so authfail diff --git a/templates/usr/share/pam-configs/faillock_notify.j2 b/templates/usr/share/pam-configs/faillock_notify.j2 new file mode 100644 index 0000000..287839d --- /dev/null +++ b/templates/usr/share/pam-configs/faillock_notify.j2 @@ -0,0 +1,9 @@ +Name: Notify of failed login attempts and reset count upon success +Default: yes +Priority: 1024 +Auth-Type: Primary +Auth: + requisite pam_faillock.so preauth +Account-Type: Primary +Account: + required pam_faillock.so diff --git a/templates/usr/share/pam-configs/pam_unix.j2 b/templates/usr/share/pam-configs/pam_unix.j2 new file mode 100644 index 0000000..bffa0e9 --- /dev/null +++ b/templates/usr/share/pam-configs/pam_unix.j2 @@ -0,0 +1,23 @@ +Name: Unix authentication +Default: yes +Priority: 256 +Auth-Type: Primary +Auth: + [success=end default=ignore] pam_unix.so try_first_pass +Auth-Initial: + [success=end default=ignore] pam_unix.so +Account-Type: Primary +Account: + [success=end new_authtok_reqd=done default=ignore] pam_unix.so +Account-Initial: + [success=end new_authtok_reqd=done default=ignore] pam_unix.so +Session-Type: Additional +Session: + required pam_unix.so +Session-Initial: + required pam_unix.so +Password-Type: Primary +Password: + [success=end default=ignore] pam_unix.so obscure{% if ubtu24cis_rule_5.3.3.4.4 %} use_authtok{% endif %} try_first_pass{% if ubtu24cis_rule_5.3.3.4.3 %} {{ ubtu24cis_passwd_hash_algo }}{% endif %} +Password-Initial: + [success=end default=ignore] pam_unix.so obscure{% if ubtu24cis_rule_5.3.3.4.3 %} {{ ubtu24cis_passwd_hash_algo }}{% endif %} diff --git a/templates/usr/share/pam-configs/pwhistory.j2 b/templates/usr/share/pam-configs/pwhistory.j2 new file mode 100644 index 0000000..aa4cbaf --- /dev/null +++ b/templates/usr/share/pam-configs/pwhistory.j2 @@ -0,0 +1,6 @@ +Name: pwhistory password history checking +Default: yes +Priority: 1024 +Password-Type: Primary +Password: + requisite pam_pwhistory.so enforce_for_root try_first_pass{% if ubtu24cis_rule_5_3_3_3_1 %} remember={{ ubtu24cis_pamd_pwhistory_remember }}{% endif %}{% if ubtu24cis_rule_5_3_3_3_2 %} enforce_for_root{% endif %}{% if ubtu24cis_rule_5_3_3_3_3 %} use_authtok{% endif %} diff --git a/templates/usr/share/pam-configs/pwquality.j2 b/templates/usr/share/pam-configs/pwquality.j2 new file mode 100644 index 0000000..18e8dd4 --- /dev/null +++ b/templates/usr/share/pam-configs/pwquality.j2 @@ -0,0 +1,8 @@ +Name: Pwquality password strength checking +Default: yes +Priority: 1024 +Conflicts: cracklib +Password-Type: Primary +Password: + requisite pam_pwquality.so retry=3 {# # pragma: allowlist secret #} +Password-Initial: requisite diff --git a/vars/audit.yml b/vars/audit.yml new file mode 100644 index 0000000..1dc1cf1 --- /dev/null +++ b/vars/audit.yml @@ -0,0 +1,41 @@ +--- + +#### Audit Configuration Settings #### + +# Timeout for those cmds that take longer to run where timeout set +audit_cmd_timeout: 120000 + +# if get_audit_binary_method == download change accordingly +audit_bin_url: "https://github.com/goss-org/goss/releases/download/{{ audit_bin_version.release }}/goss-linux-" + +### Goss Audit Benchmark file ### +## managed by the control audit_content +# git +audit_file_git: "https://github.com/ansible-lockdown/{{ benchmark }}-Audit.git" +audit_git_version: "benchmark_{{ benchmark_version }}" + +## Goss configuration information +# Where the goss audit configuration will be stored - NOTE benchmark-audit is expected +audit_conf_dir: "{{ audit_conf_dest | default('/opt') }}/{{ benchmark }}-Audit" + +# If changed these can affect other products +pre_audit_outfile: "{{ audit_log_dir }}/{{ ansible_facts.hostname }}-{{ benchmark }}-{{ benchmark_version }}_pre_scan_{{ ansible_facts.date_time.epoch }}.{{ audit_format }}" +post_audit_outfile: "{{ audit_log_dir }}/{{ ansible_facts.hostname }}-{{ benchmark }}-{{ benchmark_version }}_post_scan_{{ ansible_facts.date_time.epoch }}.{{ audit_format }}" + +## The following should not need changing + +### Audit binary settings ### +audit_bin_version: + release: v0.4.8 + AMD64_checksum: 'sha256:85d00b7bba5f175bec95de7dfe1f71f8f25204914aad4c6f03c8457868eb6e2f' + ARM64_checksum: 'sha256:bca8c898bfd35b94c51455ece6193c95e2cd7b2b183ac2047b2d76291e73e47d' +audit_bin_path: /usr/local/bin/ +audit_bin: "{{ audit_bin_path }}goss" +audit_format: json + +audit_vars_path: "{{ audit_conf_dir }}/vars/{{ ansible_facts.hostname }}.yml" +audit_results: | + The{% if not audit_only %} pre remediation{% endif %} audit results are: {{ pre_audit_results }} + {% if not audit_only %}The post remediation audit results are: {{ post_audit_results }}{% endif %} + + Full breakdown can be found in {{ audit_log_dir }} diff --git a/vars/is_container.yml b/vars/is_container.yml new file mode 100644 index 0000000..189d499 --- /dev/null +++ b/vars/is_container.yml @@ -0,0 +1,104 @@ +--- + +# File to skip controls if container +# Based on standard image no changes +# it expected all pkgs required for the container are alreday installed + +## controls + +# Firewall +ubtu24cis_firewall_package: None + +# Filesystems + +## Related individual rules +# Aide +ubtu24cis_rule_1_4_1: false +ubtu24cis_rule_1_4_2: false + +# AppArmor +ubtu24cis_rule_1_6_1: false +ubtu24cis_rule_1_6_2: false +ubtu24cis_rule_1_6_3: false +ubtu24cis_rule_1_6_4: false + +# time sync +ubtu24cis_rule_2_1_1_1: false +ubtu24cis_rule_2_2_1_2: false + +# Auditd +ubtu24cis_rule_4_1_1_1: false +ubtu24cis_rule_4_1_1_2: false +ubtu24cis_rule_4_1_1_3: false +ubtu24cis_rule_4_1_1_4: false +ubtu24cis_rule_4_1_2_1: false +ubtu24cis_rule_4_1_2_2: false +ubtu24cis_rule_4_1_2_3: false +# Auditd rules +ubtu24cis_rule_4_1_3_1: false +ubtu24cis_rule_4_1_3_2: false +ubtu24cis_rule_4_1_3_3: false +ubtu24cis_rule_4_1_3_4: false +ubtu24cis_rule_4_1_3_5: false +ubtu24cis_rule_4_1_3_6: false +ubtu24cis_rule_4_1_3_7: false +ubtu24cis_rule_4_1_3_8: false +ubtu24cis_rule_4_1_3_9: false +ubtu24cis_rule_4_1_3_10: false +ubtu24cis_rule_4_1_3_11: false +ubtu24cis_rule_4_1_3_12: false +ubtu24cis_rule_4_1_3_13: false +ubtu24cis_rule_4_1_3_14: false +ubtu24cis_rule_4_1_3_15: false +ubtu24cis_rule_4_1_3_16: false +ubtu24cis_rule_4_1_3_17: false +ubtu24cis_rule_4_1_3_18: false +ubtu24cis_rule_4_1_3_19: false +ubtu24cis_rule_4_1_3_20: false +ubtu24cis_rule_4_1_3_21: false +# Auditd file access +ubtu24cis_rule_4_1_4_1: false +ubtu24cis_rule_4_1_4_2: false +ubtu24cis_rule_4_1_4_3: false +ubtu24cis_rule_4_1_4_4: false +ubtu24cis_rule_4_1_4_5: false +ubtu24cis_rule_4_1_4_6: false +ubtu24cis_rule_4_1_4_7: false +ubtu24cis_rule_4_1_4_8: false +ubtu24cis_rule_4_1_4_9: false +ubtu24cis_rule_4_1_4_10: false +ubtu24cis_rule_4_1_4_11: false + +# cron +ubtu24cis_rule_5_1_1: false +ubtu24cis_rule_5_1_2: false +ubtu24cis_rule_5_1_3: false +ubtu24cis_rule_5_1_4: false +ubtu24cis_rule_5_1_5: false +ubtu24cis_rule_5_1_6: false +ubtu24cis_rule_5_1_7: false +ubtu24cis_rule_5_1_8: false + +# ssh +ubtu24cis_rule_5_2_1: false +ubtu24cis_rule_5_2_2: false +ubtu24cis_rule_5_2_3: false +ubtu24cis_rule_5_2_4: false +ubtu24cis_rule_5_2_5: false +ubtu24cis_rule_5_2_6: false +ubtu24cis_rule_5_2_7: false +ubtu24cis_rule_5_2_8: false +ubtu24cis_rule_5_2_9: false +ubtu24cis_rule_5_2_10: false +ubtu24cis_rule_5_2_11: false +ubtu24cis_rule_5_2_12: false +ubtu24cis_rule_5_2_13: false +ubtu24cis_rule_5_2_14: false +ubtu24cis_rule_5_2_15: false +ubtu24cis_rule_5_2_16: false +ubtu24cis_rule_5_2_17: false +ubtu24cis_rule_5_2_18: false +ubtu24cis_rule_5_2_19: false +ubtu24cis_rule_5_2_20: false +ubtu24cis_rule_5_2_21: false +ubtu24cis_rule_5_2_22: false diff --git a/vars/main.yml b/vars/main.yml new file mode 100644 index 0000000..f39cd1f --- /dev/null +++ b/vars/main.yml @@ -0,0 +1,14 @@ +--- + +min_ansible_version: 2.12.1 +# Set default value for reboot value +change_requires_reboot: false +# The role discovers dynamically (in tasks/main.yml) whether it +# is executed on a container image and sets the variable +# system_is_container the true. Otherwise, the default value +# 'false' is left unchanged. +system_is_container: false + +# Used to control warning summary +warn_control_list: "" +warn_count: 0