Add new validation test for keyslot digest bond

This commit is contained in:
Ondrej Kozina
2018-04-20 13:53:24 +02:00
committed by Milan Broz
parent f6be62ac5f
commit a702b7ccc5
3 changed files with 144 additions and 0 deletions

View File

@@ -0,0 +1,72 @@
#!/bin/bash
. lib.sh
#
# *** Description ***
#
# generate primary header with luks2 keyslot not assigned
# to any digest.
#
# secondary header is corrupted on purpose as well
#
# $1 full target dir
# $2 full source luks2 image
function prepare()
{
cp $SRC_IMG $TGT_IMG
test -d $TMPDIR || mkdir $TMPDIR
read_luks2_json0 $TGT_IMG $TMPDIR/json0
read_luks2_bin_hdr0 $TGT_IMG $TMPDIR/hdr0
read_luks2_bin_hdr1 $TGT_IMG $TMPDIR/hdr1
}
function generate()
{
read -r json_str_orig < $TMPDIR/json0
arr_len=$(jq -c -M '.digests."0".keyslots | length' $TMPDIR/json0)
# remove first element from digests."0".keyslots array
json_str=$(jq -r -c -M 'del(.digests."0".keyslots[0])' $TMPDIR/json0)
test ${#json_str} -lt $((LUKS2_JSON_SIZE*512)) || exit 2
write_luks2_json "$json_str" $TMPDIR/json0
merge_bin_hdr_with_json $TMPDIR/hdr0 $TMPDIR/json0 $TMPDIR/area0
erase_checksum $TMPDIR/area0
chks0=$(calc_sha256_checksum_file $TMPDIR/area0)
write_checksum $chks0 $TMPDIR/area0
write_luks2_hdr0 $TMPDIR/area0 $TGT_IMG
kill_bin_hdr $TMPDIR/hdr1
write_luks2_hdr1 $TMPDIR/hdr1 $TGT_IMG
}
function check()
{
read_luks2_bin_hdr1 $TGT_IMG $TMPDIR/hdr_res1
local str_res1=$(head -c 6 $TMPDIR/hdr_res1)
test "$str_res1" = "VACUUM" || exit 2
read_luks2_json0 $TGT_IMG $TMPDIR/json_res0
chks_res0=$(read_sha256_checksum $TGT_IMG)
test "$chks0" = "$chks_res0" || exit 2
new_arr_len=$(jq -c -M '.digests."0".keyslots | length' $TMPDIR/json_res0)
test $((arr_len-1)) -eq $new_arr_len || exit 2
}
function cleanup()
{
rm -f $TMPDIR/*
rm -fd $TMPDIR
}
test $# -eq 2 || exit 1
TGT_IMG=$1/$(test_img_name $0)
SRC_IMG=$2
prepare
generate
check
cleanup

View File

@@ -0,0 +1,70 @@
#!/bin/bash
. lib.sh
#
# *** Description ***
#
# generate primary header with luks2 keyslot assigned
# to more than 1 digest.
#
# secondary header is corrupted on purpose as well
#
# $1 full target dir
# $2 full source luks2 image
function prepare()
{
cp $SRC_IMG $TGT_IMG
test -d $TMPDIR || mkdir $TMPDIR
read_luks2_json0 $TGT_IMG $TMPDIR/json0
read_luks2_bin_hdr0 $TGT_IMG $TMPDIR/hdr0
read_luks2_bin_hdr1 $TGT_IMG $TMPDIR/hdr1
}
function generate()
{
# add keyslot 1 to second digest
json_str=$(jq -r -c -M '.digests."1" = .digests."0" | .digests."1".keyslots = ["1"]' $TMPDIR/json0)
test ${#json_str} -lt $((LUKS2_JSON_SIZE*512)) || exit 2
write_luks2_json "$json_str" $TMPDIR/json0
merge_bin_hdr_with_json $TMPDIR/hdr0 $TMPDIR/json0 $TMPDIR/area0
erase_checksum $TMPDIR/area0
chks0=$(calc_sha256_checksum_file $TMPDIR/area0)
write_checksum $chks0 $TMPDIR/area0
write_luks2_hdr0 $TMPDIR/area0 $TGT_IMG
kill_bin_hdr $TMPDIR/hdr1
write_luks2_hdr1 $TMPDIR/hdr1 $TGT_IMG
}
function check()
{
read_luks2_bin_hdr1 $TGT_IMG $TMPDIR/hdr_res1
local str_res1=$(head -c 6 $TMPDIR/hdr_res1)
test "$str_res1" = "VACUUM" || exit 2
read_luks2_json0 $TGT_IMG $TMPDIR/json_res0
chks_res0=$(read_sha256_checksum $TGT_IMG)
test "$chks0" = "$chks_res0" || exit 2
new_arr_len=$(jq -c -M '.digests."1".keyslots | length' $TMPDIR/json_res0)
test 1 -eq $new_arr_len || exit 2
}
function cleanup()
{
rm -f $TMPDIR/*
rm -fd $TMPDIR
}
test $# -eq 2 || exit 1
TGT_IMG=$1/$(test_img_name $0)
SRC_IMG=$2
prepare
generate
check
cleanup

View File

@@ -158,6 +158,8 @@ RUN luks2-invalid-keyslots-size-c1.img "F" "Failed to detect unaligned keyslot
RUN luks2-invalid-keyslots-size-c2.img "F" "Failed to detect too small keyslots_size config section"
RUN luks2-invalid-json-size-c0.img "F" "Failed to detect invalid json_size config section"
RUN luks2-invalid-json-size-c1.img "F" "Failed to detect invalid json_size config section"
RUN luks2-keyslot-missing-digest.img "F" "Failed to detect missing keyslot digest."
RUN luks2-keyslot-too-many-digests.img "F" "Failed to detect keyslot has too many digests."
echo "[4] Test integers value limits"
RUN luks2-uint64-max-segment-size.img "R" "Validation rejected correct value"