fix: Avoid find failures on too many files (#144)

Signed-off-by: Tarik Megzari <tarik.megzari@corp.ovh.com>

Co-authored-by: Tarik Megzari <tarik.megzari@corp.ovh.com>
Co-authored-by: GoldenKiwi <thibault.dewailly@corp.ovh.com>
This commit is contained in:
Tarik Megzari
2022-03-02 17:49:28 +01:00
committed by GitHub
parent 20bf51f65b
commit b962155a3c
7 changed files with 25 additions and 25 deletions

View File

@ -24,9 +24,9 @@ audit() {
FS_NAMES=$(df --local -P | awk '{ if (NR!=1) print $6 }')
# shellcheck disable=2086
if [ -n "$IGNORED_PATH" ]; then
FOUND_BINARIES=$($SUDO_CMD find $FS_NAMES -xdev -type f -perm -2000 -regextype 'egrep' ! -regex $IGNORED_PATH -print)
FOUND_BINARIES=$($SUDO_CMD find $FS_NAMES -xdev -ignore_readdir_race -type f -perm -2000 -regextype 'egrep' ! -regex $IGNORED_PATH -print)
else
FOUND_BINARIES=$($SUDO_CMD find $FS_NAMES -xdev -type f -perm -2000 -print)
FOUND_BINARIES=$($SUDO_CMD find $FS_NAMES -xdev -ignore_readdir_race -type f -perm -2000 -print)
fi
BAD_BINARIES=""
for BINARY in $FOUND_BINARIES; do