selftests/bpf: validate zero preservation for sub-slot loads
authorAndrii Nakryiko <andrii@kernel.org>
Tue, 5 Dec 2023 18:42:45 +0000 (10:42 -0800)
committerAlexei Starovoitov <ast@kernel.org>
Tue, 5 Dec 2023 21:40:21 +0000 (13:40 -0800)
Validate that 1-, 2-, and 4-byte loads from stack slots not aligned on
8-byte boundary still preserve zero, when loading from all-STACK_ZERO
sub-slots, or when stack sub-slots are covered by spilled register with
known constant zero value.

Signed-off-by: Andrii Nakryiko <andrii@kernel.org>
Link: https://lore.kernel.org/r/20231205184248.1502704-8-andrii@kernel.org
Signed-off-by: Alexei Starovoitov <ast@kernel.org>
tools/testing/selftests/bpf/progs/verifier_spill_fill.c

index d9dabae81176764d47bacf439b02bad185a3fe7f..41fd61299eab0ca1364ce5d9ee952ee988f689fe 100644 (file)
@@ -490,4 +490,75 @@ __naked void spill_subregs_preserve_stack_zero(void)
        : __clobber_all);
 }
 
+char single_byte_buf[1] SEC(".data.single_byte_buf");
+
+SEC("raw_tp")
+__log_level(2)
+__success
+__naked void partial_stack_load_preserves_zeros(void)
+{
+       asm volatile (
+               /* fp-8 is all STACK_ZERO */
+               ".8byte %[fp8_st_zero];" /* LLVM-18+: *(u64 *)(r10 -8) = 0; */
+
+               /* fp-16 is const zero register */
+               "r0 = 0;"
+               "*(u64 *)(r10 -16) = r0;"
+
+               /* load single U8 from non-aligned STACK_ZERO slot */
+               "r1 = %[single_byte_buf];"
+               "r2 = *(u8 *)(r10 -1);"
+               "r1 += r2;"
+               "*(u8 *)(r1 + 0) = r2;" /* this should be fine */
+
+               /* load single U8 from non-aligned ZERO REG slot */
+               "r1 = %[single_byte_buf];"
+               "r2 = *(u8 *)(r10 -9);"
+               "r1 += r2;"
+               "*(u8 *)(r1 + 0) = r2;" /* this should be fine */
+
+               /* load single U16 from non-aligned STACK_ZERO slot */
+               "r1 = %[single_byte_buf];"
+               "r2 = *(u16 *)(r10 -2);"
+               "r1 += r2;"
+               "*(u8 *)(r1 + 0) = r2;" /* this should be fine */
+
+               /* load single U16 from non-aligned ZERO REG slot */
+               "r1 = %[single_byte_buf];"
+               "r2 = *(u16 *)(r10 -10);"
+               "r1 += r2;"
+               "*(u8 *)(r1 + 0) = r2;" /* this should be fine */
+
+               /* load single U32 from non-aligned STACK_ZERO slot */
+               "r1 = %[single_byte_buf];"
+               "r2 = *(u32 *)(r10 -4);"
+               "r1 += r2;"
+               "*(u8 *)(r1 + 0) = r2;" /* this should be fine */
+
+               /* load single U32 from non-aligned ZERO REG slot */
+               "r1 = %[single_byte_buf];"
+               "r2 = *(u32 *)(r10 -12);"
+               "r1 += r2;"
+               "*(u8 *)(r1 + 0) = r2;" /* this should be fine */
+
+               /* for completeness, load U64 from STACK_ZERO slot */
+               "r1 = %[single_byte_buf];"
+               "r2 = *(u64 *)(r10 -8);"
+               "r1 += r2;"
+               "*(u8 *)(r1 + 0) = r2;" /* this should be fine */
+
+               /* for completeness, load U64 from ZERO REG slot */
+               "r1 = %[single_byte_buf];"
+               "r2 = *(u64 *)(r10 -16);"
+               "r1 += r2;"
+               "*(u8 *)(r1 + 0) = r2;" /* this should be fine */
+
+               "r0 = 0;"
+               "exit;"
+       :
+       : __imm_ptr(single_byte_buf),
+         __imm_insn(fp8_st_zero, BPF_ST_MEM(BPF_DW, BPF_REG_FP, -8, 0))
+       : __clobber_common);
+}
+
 char _license[] SEC("license") = "GPL";