mirror of
https://github.com/ovh/debian-cis.git
synced 2024-11-22 21:47:02 +01:00
IMP(test): Add feature to run functional tests in docker instance
Add usecase in basename Add test files for checks with find command Always show logs FIX: run void script to generate config and avoid sed failure Update README with functional test description Add skeleton for functional test Add argument to launch only specific test suite Add support for debian8 and compulsory mention of debian version at launch Improve README Simplify test file syntax to avoid copy/paste mistake Add script that runs tests on all debian targets Improve run_all_target script with nowait and nodel options Add dockerfile for Buster pre-version Chore: Use getopt for options and reviewed code by shellcheck Add trap to ensure cleanup on exit/interrupt Remove quotes that lead to `less` misinterpretation of the filenames Set `local` for variables inside `test_audit` func Move functional assertion functions to dedicated file Add cleanup for logs and containers Improve cleanup, and now exits Apply shellcheck recommendations FIX: allow script to be run from anywhere (dirname $0) Changes to be committed: modified: README.md new file: src/skel.test new file: tests/docker/Dockerfile.debian10_20181226 new file: tests/docker/Dockerfile.debian8 new file: tests/docker/Dockerfile.debian9 new file: tests/docker_build_and_run_tests.sh new file: tests/hardening/12.10_find_suid_files.sh new file: tests/hardening/12.11_find_sgid_files.sh new file: tests/hardening/12.7_find_world_writable_file.sh new file: tests/hardening/12.8_find_unowned_files.sh new file: tests/hardening/12.9_find_ungrouped_files.sh new file: tests/hardening/2.17_sticky_bit_world_writable_folder.sh new file: tests/launch_tests.sh new file: tests/lib.sh new file: tests/run_all_targets.sh
This commit is contained in:
parent
843ce3efc3
commit
18693200dc
33
README.md
33
README.md
@ -113,6 +113,39 @@ Code your check explaining what it does then if you want to test
|
||||
$ sed -i "s/status=.+/status=enabled/" etc/conf.d/99.99_custom_script.cfg
|
||||
$ ./bin/hardening/99.99_custom_script.sh
|
||||
```
|
||||
## Functional testing
|
||||
|
||||
Functional tests are available. They are to be run in a Docker environment.
|
||||
|
||||
```console
|
||||
$ ./tests/docker_build_and_run_tests.sh <target> [name of test script...]
|
||||
```
|
||||
|
||||
With `target` being like `debian8` or `debian9`.
|
||||
|
||||
Running without script arguments will run all tests in `./tests/hardening/` directory.
|
||||
Or you can specify one or several test script to be run.
|
||||
|
||||
This will build a new Docker image from the current state of the projet and run
|
||||
a container that will assess a blank Debian system compliance for each check.
|
||||
For hardening audit points the audit is expected to fail, then be fixed so that
|
||||
running the audit a second time will succeed.
|
||||
For vulnerable items, the audit is expected to succeed on a blank
|
||||
system, then the functional tests will introduce a weak point, that is expected
|
||||
to be detected when running the audit test a second time. Finally running the `apply`
|
||||
part of debian-cis script will restore a compliance state that is expected to be
|
||||
assed by running the audit check a third time.
|
||||
|
||||
Functional tests can make use of the following helper functions :
|
||||
|
||||
* `describe <test description>`
|
||||
* `run <usecase> <audit_script> <audit_script_options>`
|
||||
* `register_test <test content (see below)>`
|
||||
* `retvalshoudbe <integer>` check the script return value
|
||||
* `contain "<SAMPLE TEXT>"` check that the output contains the following text
|
||||
|
||||
In order to write your own functional test, you will find a code skeleton in
|
||||
`./src/skel.test`.
|
||||
|
||||
## Disclaimer
|
||||
|
||||
|
35
src/skel.test
Normal file
35
src/skel.test
Normal file
@ -0,0 +1,35 @@
|
||||
test_audit() {
|
||||
# Make all variable local to the function by using `local`
|
||||
|
||||
# Optional part, only here if you need to change the audit script's default configuration
|
||||
describe Running void to generate the conf file that will later be edited
|
||||
/opt/debian-cis/bin/hardening/"${script_id}".sh || true
|
||||
# for instance
|
||||
echo 'EXCEPTIONS="$EXCEPTIONS <some file to treat as exception>"' >> /opt/debian-cis/etc/conf.d/"${script}".cfg
|
||||
|
||||
# if your blank system is expected to be compliant
|
||||
describe Running on blank host
|
||||
register_test retvalshouldbe 0
|
||||
register_test contain "<SAMPLE MESSAGE>"
|
||||
run blank /opt/debian-cis/bin/hardening/"${script}".sh --audit-all
|
||||
|
||||
# Proceed to operation that will end up to a non compliant system
|
||||
describe Tests purposely failing
|
||||
register_test retvalshouldbe 1
|
||||
register_test contain "<SAMPLE TEXT SHOWING BAD CONFIG>"
|
||||
register_test contain "$targetfile"
|
||||
run noncompliant /opt/debian-cis/bin/hardening/"${script}".sh --audit-all
|
||||
|
||||
describe Correcting situation
|
||||
# if the audit script provides "apply" option, enable and run it
|
||||
sed -i 's/disabled/enabled/' /opt/debian-cis/etc/conf.d/"${script}".cfg
|
||||
/opt/debian-cis/bin/hardening/"${script}".sh || true
|
||||
# otherwise perform action that will make system compliant again
|
||||
|
||||
# Finally assess that your corrective actions end up with a compliant system
|
||||
describe Checking resolved state
|
||||
register_test retvalshouldbe 0
|
||||
register_test contain "<SAMPLE MESSAGE>"
|
||||
run resolved /opt/debian-cis/bin/hardening/"${script}".sh --audit-all
|
||||
}
|
||||
|
17
tests/docker/Dockerfile.debian10_20181226
Normal file
17
tests/docker/Dockerfile.debian10_20181226
Normal file
@ -0,0 +1,17 @@
|
||||
FROM debian:buster-20181226
|
||||
|
||||
RUN groupadd -g 500 secaudit && useradd -u 500 -g 500 -s /bin/bash secaudit && mkdir -m 700 /home/secaudit && chown secaudit:secaudit /home/secaudit
|
||||
|
||||
RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y bc openssh-server sudo
|
||||
|
||||
COPY --chown=500:500 . /opt/debian-cis/
|
||||
|
||||
COPY debian/default /etc/default/cis-hardening
|
||||
RUN sed -i 's#cis-hardening#debian-cis#' /etc/default/cis-hardening
|
||||
|
||||
COPY cisharden.sudoers /etc/sudoers.d/secaudit
|
||||
RUN sed -i 's#cisharden#secaudit#' /etc/sudoers.d/secaudit
|
||||
|
||||
|
||||
ENTRYPOINT ["/opt/debian-cis/tests/launch_tests.sh"]
|
||||
|
17
tests/docker/Dockerfile.debian8
Normal file
17
tests/docker/Dockerfile.debian8
Normal file
@ -0,0 +1,17 @@
|
||||
FROM debian:jessie
|
||||
|
||||
RUN groupadd -g 500 secaudit && useradd -u 500 -g 500 -s /bin/bash secaudit && mkdir -m 700 /home/secaudit && chown secaudit:secaudit /home/secaudit
|
||||
|
||||
RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y bc openssh-server sudo
|
||||
|
||||
COPY --chown=500:500 . /opt/debian-cis/
|
||||
|
||||
COPY debian/default /etc/default/cis-hardening
|
||||
RUN sed -i 's#cis-hardening#debian-cis#' /etc/default/cis-hardening
|
||||
|
||||
COPY cisharden.sudoers /etc/sudoers.d/secaudit
|
||||
RUN sed -i 's#cisharden#secaudit#' /etc/sudoers.d/secaudit
|
||||
|
||||
|
||||
ENTRYPOINT ["/opt/debian-cis/tests/launch_tests.sh"]
|
||||
|
17
tests/docker/Dockerfile.debian9
Normal file
17
tests/docker/Dockerfile.debian9
Normal file
@ -0,0 +1,17 @@
|
||||
FROM debian:stretch
|
||||
|
||||
RUN groupadd -g 500 secaudit && useradd -u 500 -g 500 -s /bin/bash secaudit && mkdir -m 700 /home/secaudit && chown secaudit:secaudit /home/secaudit
|
||||
|
||||
RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y bc openssh-server sudo
|
||||
|
||||
COPY --chown=500:500 . /opt/debian-cis/
|
||||
|
||||
COPY debian/default /etc/default/cis-hardening
|
||||
RUN sed -i 's#cis-hardening#debian-cis#' /etc/default/cis-hardening
|
||||
|
||||
COPY cisharden.sudoers /etc/sudoers.d/secaudit
|
||||
RUN sed -i 's#cisharden#secaudit#' /etc/sudoers.d/secaudit
|
||||
|
||||
|
||||
ENTRYPOINT ["/opt/debian-cis/tests/launch_tests.sh"]
|
||||
|
34
tests/docker_build_and_run_tests.sh
Executable file
34
tests/docker_build_and_run_tests.sh
Executable file
@ -0,0 +1,34 @@
|
||||
#! /bin/bash
|
||||
# This file builds a docker image for testing the targeted debian version
|
||||
set -e
|
||||
|
||||
target=""
|
||||
regex="debian[[:digit:]]+"
|
||||
|
||||
if [ $# -gt 0 ]; then
|
||||
if [[ $1 =~ $regex ]]; then
|
||||
target=$1
|
||||
shift
|
||||
fi
|
||||
fi
|
||||
if [ -z "$target" ] ; then
|
||||
echo "Usage: $0 <TARGET> [test_script...]" >&2
|
||||
echo -n "Supported targets are: " >&2
|
||||
#ls -1v "$(dirname "$0")"/docker/Dockerfile.* | sed -re 's=^.+/Dockerfile\.==' | tr "\n" " " >&2
|
||||
find "$(dirname "$0")"/docker -name "*Dockerfile.*" | sort -V | sed -re 's=^.+/Dockerfile\.==' | tr "\n" " " >&2
|
||||
echo >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
dockerfile="$(dirname "$0")"/docker/Dockerfile.${target}
|
||||
if [ ! -f "$dockerfile" ] ; then
|
||||
echo "ERROR: No target available for $target" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
trap 'docker rm debian_cis_test_${target}' EXIT HUP INT
|
||||
|
||||
docker build -f "$dockerfile" -t "debian_cis_test:${target}" "$(dirname "$0")"/../
|
||||
|
||||
docker run --name debian_cis_test_"${target}" debian_cis_test:"${target}" "$@"
|
29
tests/hardening/12.10_find_suid_files.sh
Executable file
29
tests/hardening/12.10_find_suid_files.sh
Executable file
@ -0,0 +1,29 @@
|
||||
test_audit() {
|
||||
describe Running void to generate the conf file that will later be edited
|
||||
# shellcheck disable=2154
|
||||
/opt/debian-cis/bin/hardening/"${script}".sh || true
|
||||
echo 'EXCEPTIONS="$EXCEPTIONS /usr/lib/dbus-1.0/dbus-daemon-launch-helper"' >> /opt/debian-cis/etc/conf.d/"${script}".cfg
|
||||
|
||||
describe Running on blank host
|
||||
register_test retvalshouldbe 0
|
||||
register_test contain "No unknown suid files found"
|
||||
run blank /opt/debian-cis/bin/hardening/"${script}".sh --audit-all
|
||||
|
||||
describe Tests purposely failing
|
||||
local targetfile="/home/secaudit/suid_file"
|
||||
touch $targetfile
|
||||
chmod 4700 $targetfile
|
||||
register_test retvalshouldbe 1
|
||||
register_test contain "Some suid files are present"
|
||||
register_test contain "$targetfile"
|
||||
run noncompliant /opt/debian-cis/bin/hardening/"${script}".sh --audit-all
|
||||
|
||||
describe correcting situation
|
||||
chmod 700 $targetfile
|
||||
|
||||
describe Checking resolved state
|
||||
register_test retvalshouldbe 0
|
||||
register_test contain "No unknown suid files found"
|
||||
run resolved /opt/debian-cis/bin/hardening/"${script}".sh --audit-all
|
||||
}
|
||||
|
25
tests/hardening/12.11_find_sgid_files.sh
Executable file
25
tests/hardening/12.11_find_sgid_files.sh
Executable file
@ -0,0 +1,25 @@
|
||||
test_audit() {
|
||||
describe Running on blank host
|
||||
register_test retvalshouldbe 0
|
||||
register_test contain "No unknown sgid files found"
|
||||
# shellcheck disable=2154
|
||||
run blank /opt/debian-cis/bin/hardening/"${script}".sh --audit-all
|
||||
|
||||
describe Tests purposely failing
|
||||
local targetfile="/home/secaudit/sgid_file"
|
||||
touch $targetfile
|
||||
chmod 2700 $targetfile
|
||||
register_test retvalshouldbe 1
|
||||
register_test contain "Some sgid files are present"
|
||||
register_test contain "$targetfile"
|
||||
run noncompliant /opt/debian-cis/bin/hardening/"${script}".sh --audit-all
|
||||
|
||||
describe correcting situation
|
||||
chmod 700 $targetfile
|
||||
|
||||
describe Checking resolved state
|
||||
register_test retvalshouldbe 0
|
||||
register_test contain "No unknown sgid files found"
|
||||
run resolved /opt/debian-cis/bin/hardening/"${script}".sh --audit-all
|
||||
}
|
||||
|
25
tests/hardening/12.7_find_world_writable_file.sh
Executable file
25
tests/hardening/12.7_find_world_writable_file.sh
Executable file
@ -0,0 +1,25 @@
|
||||
test_audit() {
|
||||
describe Running on blank host
|
||||
register_test retvalshouldbe 0
|
||||
register_test contain "No world writable files found"
|
||||
# shellcheck disable=2154
|
||||
run blank /opt/debian-cis/bin/hardening/"${script}".sh --audit-all
|
||||
|
||||
describe Tests purposely failing
|
||||
local targetfile="/home/secaudit/worldwritable"
|
||||
touch $targetfile
|
||||
chmod 777 $targetfile
|
||||
register_test retvalshouldbe 1
|
||||
register_test contain "Some world writable files are present"
|
||||
run noncompliant /opt/debian-cis/bin/hardening/"${script}".sh --audit-all
|
||||
|
||||
describe correcting situation
|
||||
sed -i 's/disabled/enabled/' /opt/debian-cis/etc/conf.d/"${script}".cfg
|
||||
/opt/debian-cis/bin/hardening/"${script}".sh --apply || true
|
||||
|
||||
describe Checking resolved state
|
||||
register_test retvalshouldbe 0
|
||||
register_test contain "No world writable files found"
|
||||
run resolved /opt/debian-cis/bin/hardening/"${script}".sh --audit-all
|
||||
}
|
||||
|
25
tests/hardening/12.8_find_unowned_files.sh
Executable file
25
tests/hardening/12.8_find_unowned_files.sh
Executable file
@ -0,0 +1,25 @@
|
||||
test_audit() {
|
||||
describe Running on blank host
|
||||
register_test retvalshouldbe 0
|
||||
register_test contain "No unowned files found"
|
||||
# shellcheck disable=2154
|
||||
run blank /opt/debian-cis/bin/hardening/"${script}".sh --audit-all
|
||||
|
||||
describe Tests purposely failing
|
||||
local targetfile="/home/secaudit/unowned"
|
||||
touch $targetfile
|
||||
chown 1200 $targetfile
|
||||
register_test retvalshouldbe 1
|
||||
register_test contain "Some unowned files are present"
|
||||
run noncompliant /opt/debian-cis/bin/hardening/"${script}".sh --audit-all
|
||||
|
||||
describe correcting situation
|
||||
sed -i 's/disabled/enabled/' /opt/debian-cis/etc/conf.d/"${script}".cfg
|
||||
/opt/debian-cis/bin/hardening/"${script}".sh || true
|
||||
|
||||
describe Checking resolved state
|
||||
register_test retvalshouldbe 0
|
||||
register_test contain "No unowned files found"
|
||||
run resolved /opt/debian-cis/bin/hardening/"${script}".sh --audit-all
|
||||
}
|
||||
|
25
tests/hardening/12.9_find_ungrouped_files.sh
Executable file
25
tests/hardening/12.9_find_ungrouped_files.sh
Executable file
@ -0,0 +1,25 @@
|
||||
test_audit() {
|
||||
describe Running on blank host
|
||||
register_test retvalshouldbe 0
|
||||
register_test contain "No ungrouped files found"
|
||||
# shellcheck disable=2154
|
||||
run blank /opt/debian-cis/bin/hardening/"${script}".sh --audit-all
|
||||
|
||||
describe Tests purposely failing
|
||||
local targetfile="/home/secaudit/ungrouped"
|
||||
touch $targetfile
|
||||
chown 1200:1200 $targetfile
|
||||
register_test retvalshouldbe 1
|
||||
register_test contain "Some ungrouped files are present"
|
||||
run noncompliant /opt/debian-cis/bin/hardening/"${script}".sh --audit-all
|
||||
|
||||
describe correcting situation
|
||||
sed -i 's/disabled/enabled/' /opt/debian-cis/etc/conf.d/"${script}".cfg
|
||||
/opt/debian-cis/bin/hardening/"${script}".sh --apply || true
|
||||
|
||||
describe Checking resolved state
|
||||
register_test retvalshouldbe 0
|
||||
register_test contain "No ungrouped files found"
|
||||
run resolved /opt/debian-cis/bin/hardening/"${script}".sh --audit-all
|
||||
}
|
||||
|
24
tests/hardening/2.17_sticky_bit_world_writable_folder.sh
Executable file
24
tests/hardening/2.17_sticky_bit_world_writable_folder.sh
Executable file
@ -0,0 +1,24 @@
|
||||
test_audit() {
|
||||
describe Running on blank host
|
||||
register_test retvalshouldbe 0
|
||||
register_test contain "All world writable directories have a sticky bit"
|
||||
# shellcheck disable=2154
|
||||
run blank /opt/debian-cis/bin/hardening/"${script}".sh --audit-all
|
||||
|
||||
describe Tests purposely failing
|
||||
local targetdir="/home/secaudit/world_writable_folder"
|
||||
mkdir $targetdir || true
|
||||
chmod 777 $targetdir
|
||||
register_test retvalshouldbe 1
|
||||
register_test contain "Some world writable directories are not on sticky bit mode"
|
||||
run noncompliant /opt/debian-cis/bin/hardening/"${script}".sh --audit-all
|
||||
|
||||
describe correcting situation
|
||||
sed -i 's/disabled/enabled/' /opt/debian-cis/etc/conf.d/"${script}".cfg
|
||||
/opt/debian-cis/bin/hardening/"${script}".sh --apply || true
|
||||
|
||||
describe Checking resolved state
|
||||
register_test retvalshouldbe 0
|
||||
register_test contain "All world writable directories have a sticky bit"
|
||||
run resolved /opt/debian-cis/bin/hardening/"${script}".sh --audit-all
|
||||
}
|
199
tests/launch_tests.sh
Executable file
199
tests/launch_tests.sh
Executable file
@ -0,0 +1,199 @@
|
||||
#!/bin/bash
|
||||
# stop on any error
|
||||
set -e
|
||||
# stop on undefined variable
|
||||
set -u
|
||||
# debug
|
||||
#set -x
|
||||
|
||||
mytmpdir=$(mktemp -d -t debian-cis-test.XXXXXX)
|
||||
|
||||
cleanup_and_exit() {
|
||||
rm -rf "$mytmpdir"
|
||||
exit 255
|
||||
}
|
||||
trap "cleanup_and_exit" EXIT HUP INT
|
||||
|
||||
outdir="$mytmpdir/out"
|
||||
mkdir -p "$outdir" || exit 1
|
||||
|
||||
tests_list=""
|
||||
testno=0
|
||||
testcount=0
|
||||
|
||||
dismiss_count=0
|
||||
nbfailedret=0
|
||||
nbfailedgrep=0
|
||||
nbfailedconsist=0
|
||||
listfailedret=""
|
||||
listfailedgrep=""
|
||||
listfailedconsist=""
|
||||
|
||||
usecase=""
|
||||
usecase_name=""
|
||||
usecase_name_root=""
|
||||
usecase_name_sudo=""
|
||||
declare -a REGISTERED_TESTS
|
||||
|
||||
#####################
|
||||
# Utility functions #
|
||||
#####################
|
||||
# in case a fatal event occurs, fatal logs and exits with return code 1
|
||||
fatal() {
|
||||
printf "%b %b\n" "\033[1;91mFATAL\033[0m" "$*" >&2
|
||||
printf "%b \n" "\033[1;91mEXIT TEST SUITE WITH FAILURE\033[0m" >&2
|
||||
exit 1
|
||||
}
|
||||
# prints that a test failed
|
||||
fail() {
|
||||
printf "%b %b\n" "\033[1;30m\033[41m[FAIL]\033[0m" "$*" >&2
|
||||
}
|
||||
# prints that a test succeded
|
||||
ok() {
|
||||
printf "%b %b\n" "\033[30m\033[42m[ OK ]\033[0m" "$*" >&2
|
||||
}
|
||||
|
||||
# retrieves audit script logfile
|
||||
get_stdout()
|
||||
{
|
||||
cat "$outdir"/"$usecase_name".log
|
||||
}
|
||||
|
||||
# Reset the list of test assertions
|
||||
clear_registered_tests() {
|
||||
unset REGISTERED_TESTS
|
||||
declare -a REGISTERED_TESTS
|
||||
dismiss_count=0
|
||||
}
|
||||
|
||||
# Generates a formated test name
|
||||
make_usecase_name() {
|
||||
usecase=$1
|
||||
shift
|
||||
role=$1
|
||||
usecase_name=$(printf '%03d-%s-%s-%s' "$testno" "$name" "$usecase" "$role" | sed -re "s=/=_=g")
|
||||
echo -n "$usecase_name"
|
||||
}
|
||||
|
||||
# Plays the registered test suite
|
||||
play_registered_tests() {
|
||||
usecase_name=$1
|
||||
if [[ "${REGISTERED_TESTS[*]}" ]]; then
|
||||
export numtest=${#REGISTERED_TESTS[@]}
|
||||
for t in "${!REGISTERED_TESTS[@]}"; do
|
||||
${REGISTERED_TESTS[$t]}
|
||||
done
|
||||
fi
|
||||
}
|
||||
|
||||
# Plays comparison tests to ensure that root and sudo exection have the same output
|
||||
play_consistency_tests() {
|
||||
consist_test=0
|
||||
printf "\033[34m*** [%03d] %s::%s Root/Sudo Consistency Tests\033[0m\n" "$testno" "$test_file" "$usecase"
|
||||
retfile_root=$outdir/${usecase_name_root}.retval
|
||||
retfile_sudo=$outdir/${usecase_name_sudo}.retval
|
||||
ret=$(eval cmp "$retfile_root" "$retfile_sudo")
|
||||
if [[ ! 0 -eq $ret ]] ; then
|
||||
fail "$name" return values differ
|
||||
diff "$retfile_root" "$retfile_sudo"
|
||||
consist_test=1
|
||||
else
|
||||
ok "$name return values are equal"
|
||||
|
||||
fi
|
||||
retfile_root=$outdir/${usecase_name_root}.log
|
||||
retfile_sudo=$outdir/${usecase_name_sudo}.log
|
||||
cmp "$retfile_root" "$retfile_sudo" && ret=0 || ret=1
|
||||
if [[ ! 0 -eq $ret ]] ; then
|
||||
fail "$name" logs differ
|
||||
diff "$retfile_root" "$retfile_sudo" || true
|
||||
consist_test=1
|
||||
else
|
||||
ok "$name logs are identical"
|
||||
fi
|
||||
|
||||
if [ 1 -eq $consist_test ]; then
|
||||
if [ 0 -eq $dismiss_count ]; then
|
||||
nbfailedconsist=$(( nbfailedconsist + 1 ))
|
||||
listfailedconsist="$listfailedconsist $(make_usecase_name consist)"
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
# Actually runs one signel audit script
|
||||
_run()
|
||||
{
|
||||
usecase_name=$1
|
||||
shift
|
||||
printf "\033[34m*** [%03d] %s \033[0m(%s)\n" "$testno" "$usecase_name" "$*"
|
||||
bash -c "$*" >"$outdir/$usecase_name.log" && true; echo $? > "$outdir/$usecase_name.retval"
|
||||
ret=$(< "$outdir"/"$usecase_name".retval)
|
||||
get_stdout
|
||||
}
|
||||
|
||||
# Load assertion functions for functionnal tests
|
||||
if [ ! -f "$(dirname "$0")"/lib.sh ]; then
|
||||
fatal "Cannot locate lib.sh"
|
||||
fi
|
||||
# shellcheck source=/opt/debian-cis/tests/lib.sh
|
||||
. "$(dirname "$0")"/lib.sh
|
||||
|
||||
###################
|
||||
# Execution start #
|
||||
###################
|
||||
printf "\033[1;36m###\n### %s\n### \033[0m\n" "Starting debian-cis functional testing"
|
||||
|
||||
# if no scripts were passed as arguments, list all available test scenarii to be played
|
||||
if [ $# -eq 0 ]; then
|
||||
tests_list=$(ls -v "$(dirname "$0")"/hardening/)
|
||||
testcount=$(wc -l <<< "$tests_list")
|
||||
else
|
||||
tests_list="$*"
|
||||
testcount=$#
|
||||
fi
|
||||
|
||||
|
||||
for test_file in $tests_list; do
|
||||
test_file_path=$(dirname "$0")/hardening/"$test_file"
|
||||
if [ ! -f "$test_file_path" ]; then
|
||||
fatal "Test file \"$test_file\" does not exist"
|
||||
fi
|
||||
# script var is used inside test files
|
||||
# shellcheck disable=2034
|
||||
script="$(basename "$test_file" .sh)"
|
||||
# source test scenario file to add `test_audit` func
|
||||
# shellcheck disable=1090
|
||||
. "$test_file_path"
|
||||
testno=$(( testno + 1 ))
|
||||
# shellcheck disable=2001
|
||||
name="$(echo "${test_file%%.sh}" | sed 's/\d+\.\d+_//' )"
|
||||
printf "\033[1;36m### [%03d/%03d] %s \033[0m\n" "$testno" "$testcount" "$test_file"
|
||||
# test_audit is the function defined in $test_file, that carries the actual functional tests for this script
|
||||
test_audit
|
||||
# reset var names
|
||||
usecase_name=""
|
||||
usecase_name_root=""
|
||||
usecase_name_sudo=""
|
||||
unset -f test_audit
|
||||
echo ""
|
||||
done
|
||||
|
||||
printf "\033[1;36m###\n### %s \033[0m\n" "Test report"
|
||||
if [ $((nbfailedret + nbfailedgrep + nbfailedconsist )) -eq 0 ] ; then
|
||||
echo -e "\033[42m\033[30mAll tests succeeded :)\033[0m"
|
||||
else
|
||||
(
|
||||
echo -e "\033[41mOne or more tests failed :(\033[0m"
|
||||
echo -e "- $nbfailedret unexpected return values ${listfailedret}"
|
||||
echo -e "- $nbfailedgrep unexpected text values $listfailedgrep"
|
||||
echo -e "- $nbfailedconsist root/sudo consistency $listfailedconsist"
|
||||
) | tee "$outdir"/summary
|
||||
fi
|
||||
echo
|
||||
|
||||
set +e
|
||||
set +u
|
||||
let totalerrors=$((nbfailedret + nbfailedgrep + nbfailedconsist ))
|
||||
# leave `exit 255` for runtime errors
|
||||
[ $totalerrors -ge 255 ] && totalerrors=254
|
||||
exit $totalerrors
|
91
tests/lib.sh
Normal file
91
tests/lib.sh
Normal file
@ -0,0 +1,91 @@
|
||||
# shellcheck shell=bash
|
||||
###########################################
|
||||
# Assertion functions for funcional tests #
|
||||
###########################################
|
||||
|
||||
# sugar to add a decription of the test suite
|
||||
# describe <STRING>
|
||||
describe() {
|
||||
# shellcheck disable=2154
|
||||
printf "\033[36mxxx %s::%s \033[0m\n" "$name" "$*"
|
||||
}
|
||||
|
||||
# Register an assertion on an audit before running it
|
||||
# May be used several times
|
||||
# See below assertion functions
|
||||
# register_test <TEST_STRING>
|
||||
register_test() {
|
||||
export numtest=0
|
||||
if [[ "notempty" == "${REGISTERED_TESTS[*]:+notempty}" ]]; then
|
||||
numtest=${#REGISTERED_TESTS[@]}
|
||||
fi
|
||||
REGISTERED_TESTS[numtest]="$*"
|
||||
}
|
||||
|
||||
# retvalshouldbe checks that the audit return value equals the one passed as parameter
|
||||
# retvalshoudbe <NUMBER>
|
||||
retvalshouldbe()
|
||||
{
|
||||
# shellcheck disable=2154
|
||||
retfile=$outdir/${usecase_name}.retval
|
||||
shouldbe=$1
|
||||
got=$(< "$retfile")
|
||||
if [ "$got" = "$shouldbe" ] ; then
|
||||
ok "RETURN VALUE" "($shouldbe)"
|
||||
else
|
||||
if [ 0 -eq "$dismiss_count" ]; then
|
||||
nbfailedret=$(( nbfailedret + 1 ))
|
||||
listfailedret="$listfailedret $usecase_name"
|
||||
fi
|
||||
fail "RETURN VALUE" "(got $got instead of $shouldbe)"
|
||||
fi
|
||||
}
|
||||
|
||||
# contain looks for a string in audit logfile
|
||||
# contain [REGEX] <STRING|regexSTRING>
|
||||
contain()
|
||||
{
|
||||
local specialoption=''
|
||||
if [ "$1" != "REGEX" ] ; then
|
||||
specialoption='-F'
|
||||
else
|
||||
specialoption='-E'
|
||||
shift
|
||||
fi
|
||||
file=$outdir/${usecase_name}.log
|
||||
pattern=$*
|
||||
if grep -q $specialoption -- "$pattern" "$file"; then
|
||||
ok "MUST CONTAIN" "($pattern)"
|
||||
else
|
||||
if [ 0 -eq "$dismiss_count" ]; then
|
||||
nbfailedgrep=$(( nbfailedgrep + 1 ))
|
||||
listfailedgrep="$listfailedgrep $usecase_name"
|
||||
fi
|
||||
fail "MUST CONTAIN" "($pattern)"
|
||||
fi
|
||||
}
|
||||
|
||||
# test is expected to fail (for instance on blank system)
|
||||
# then the test wont be taken into account for test suite success
|
||||
dismiss_count_for_test() {
|
||||
dismiss_count=1
|
||||
}
|
||||
|
||||
# Run the audit script in both root and sudo mode and plays assertion tests and
|
||||
# sudo/root consistency tests
|
||||
# run <USECASE> <AUDIT_SCRIPT>
|
||||
run() {
|
||||
usecase=$1
|
||||
shift
|
||||
usecase_name_root=$(make_usecase_name "$usecase" "root")
|
||||
_run "$usecase_name_root" "$@"
|
||||
play_registered_tests "$usecase_name_root"
|
||||
|
||||
usecase_name_sudo=$(make_usecase_name "$usecase" "sudo")
|
||||
_run "$usecase_name_sudo" "sudo -u secaudit" "$@" "--sudo"
|
||||
play_registered_tests "$usecase_name_sudo"
|
||||
|
||||
play_consistency_tests
|
||||
clear_registered_tests
|
||||
}
|
||||
|
77
tests/run_all_targets.sh
Executable file
77
tests/run_all_targets.sh
Executable file
@ -0,0 +1,77 @@
|
||||
#!/bin/bash
|
||||
# usage : $0 [--nodel|--nowait] [1.1_script-to-test.sh...]
|
||||
# --nodel will keep logs
|
||||
# --nowait will not wait for you to see logs
|
||||
# if all test docker passed return 0, otherwise 1 meaning some tests failed
|
||||
|
||||
tmpdir=$(mktemp -d -t debcistest.XXXXXX)
|
||||
failedtarget=""
|
||||
|
||||
cleanup() {
|
||||
if [ "$nodel" -eq 0 ]; then
|
||||
rm -rf "$tmpdir"
|
||||
fi
|
||||
}
|
||||
|
||||
# `exit 255` for runtime error
|
||||
trap "cleanup; exit 255" EXIT HUP INT
|
||||
|
||||
if [ ! -t 0 ]; then
|
||||
echo -e "\e[34mNo stdin \e[0m"
|
||||
nodel=1
|
||||
nowait=1
|
||||
fi
|
||||
|
||||
nodel=0
|
||||
nowait=0
|
||||
OPTIONS=$(getopt --long nodel,nowait -- "$0" "$@")
|
||||
eval set -- "$OPTIONS"
|
||||
# Treating options
|
||||
while true; do
|
||||
case "$1" in
|
||||
--nodel ) nodel=1; shift ;;
|
||||
--nowait ) nowait=1; shift ;;
|
||||
-- ) shift; break ;;
|
||||
* ) break ;;
|
||||
esac
|
||||
done
|
||||
|
||||
# Execution summary
|
||||
if [ "$nodel" -eq 1 ]; then
|
||||
echo -e "\e[34mLog directory: $tmpdir \e[0m"
|
||||
fi
|
||||
if [ "$nowait" -eq 1 ]; then
|
||||
echo -e "\e[34mRunning in non-interactive mode\e[0m"
|
||||
fi
|
||||
|
||||
# Actual execution
|
||||
# Loops over found targets and runs docker_build_and_run_tests
|
||||
for target in $("$(dirname "$0")"/docker_build_and_run_tests.sh 2>&1 | grep "Supported" | cut -d ':' -f 2); do
|
||||
echo "Running $target $*"
|
||||
"$(dirname "$0")"/docker_build_and_run_tests.sh "$target" "$@" 2>&1 | \
|
||||
tee "${tmpdir}"/"${target}" | \
|
||||
grep -q "All tests succeeded"
|
||||
ret=$?
|
||||
if [[ 0 -eq $ret ]]; then
|
||||
echo -e "\e[92mOK\e[0m $target"
|
||||
else
|
||||
echo -e "\e[91mKO\e[0m $target"
|
||||
failedtarget="$failedtarget ${tmpdir}/${target}"
|
||||
fi
|
||||
done
|
||||
|
||||
if [[ ! -z "$failedtarget" && "$nowait" -eq 0 ]]; then
|
||||
echo -e "\nPress \e[1mENTER\e[0m to display failed test logs"
|
||||
echo -e "Use \e[1m:n\e[0m (next) and \e[1m:p\e[0m (previous) to navigate between log files"
|
||||
echo -e "and \e[1mq\e[0m to quit"
|
||||
# shellcheck disable=2015,2162,2034
|
||||
test -t 0 && read _wait || true
|
||||
# disable shellcheck to allow expansion of logfiles list
|
||||
# shellcheck disable=2086
|
||||
less -R $failedtarget
|
||||
fi
|
||||
|
||||
trap - EXIT HUP INT
|
||||
cleanup
|
||||
|
||||
exit ${failedtarget:+1}
|
Loading…
Reference in New Issue
Block a user