bpf: Minor improvements for bpf_cmp.
authorAlexei Starovoitov <ast@kernel.org>
Fri, 12 Jan 2024 22:01:34 +0000 (14:01 -0800)
committerAlexei Starovoitov <ast@kernel.org>
Tue, 23 Jan 2024 22:40:23 +0000 (14:40 -0800)
Few minor improvements for bpf_cmp() macro:
. reduce number of args in __bpf_cmp()
. rename NOFLIP to UNLIKELY
. add a comment about 64-bit truncation in "i" constraint
. use "ri" constraint for sizeof(rhs) <= 4
. improve error message for bpf_cmp_likely()

Before:
progs/iters_task_vma.c:31:7: error: variable 'ret' is uninitialized when used here [-Werror,-Wuninitialized]
   31 |                 if (bpf_cmp_likely(seen, <==, 1000))
      |                     ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
../bpf/bpf_experimental.h:325:3: note: expanded from macro 'bpf_cmp_likely'
  325 |                 ret;
      |                 ^~~
progs/iters_task_vma.c:31:7: note: variable 'ret' is declared here
../bpf/bpf_experimental.h:310:3: note: expanded from macro 'bpf_cmp_likely'
  310 |                 bool ret;
      |                 ^

After:
progs/iters_task_vma.c:31:7: error: invalid operand for instruction
   31 |                 if (bpf_cmp_likely(seen, <==, 1000))
      |                     ^
../bpf/bpf_experimental.h:324:17: note: expanded from macro 'bpf_cmp_likely'
  324 |                         asm volatile("r0 " #OP " invalid compare");
      |                                      ^
<inline asm>:1:5: note: instantiated into assembly here
    1 |         r0 <== invalid compare
      |            ^

Signed-off-by: Alexei Starovoitov <ast@kernel.org>
Signed-off-by: Daniel Borkmann <daniel@iogearbox.net>
Acked-by: Yonghong Song <yonghong.song@linux.dev>
Link: https://lore.kernel.org/bpf/20240112220134.71209-1-alexei.starovoitov@gmail.com
Signed-off-by: Alexei Starovoitov <ast@kernel.org>
tools/testing/selftests/bpf/bpf_experimental.h

index f44875f8b36780aba49f19700bac9562a4e06f4f..0d749006d107577877e56a89f62af5e4492a585f 100644 (file)
@@ -260,11 +260,11 @@ extern void bpf_throw(u64 cookie) __ksym;
 
 #define __is_signed_type(type) (((type)(-1)) < (type)1)
 
-#define __bpf_cmp(LHS, OP, SIGN, PRED, RHS, DEFAULT)                                           \
+#define __bpf_cmp(LHS, OP, PRED, RHS, DEFAULT)                                         \
        ({                                                                                      \
                __label__ l_true;                                                               \
                bool ret = DEFAULT;                                                             \
-               asm volatile goto("if %[lhs] " SIGN #OP " %[rhs] goto %l[l_true]"               \
+               asm volatile goto("if %[lhs] " OP " %[rhs] goto %l[l_true]"             \
                                  :: [lhs] "r"((short)LHS), [rhs] PRED (RHS) :: l_true);        \
                ret = !DEFAULT;                                                                 \
 l_true:                                                                                                \
@@ -276,7 +276,7 @@ l_true:                                                                                             \
  * __lhs OP __rhs below will catch the mistake.
  * Be aware that we check only __lhs to figure out the sign of compare.
  */
-#define _bpf_cmp(LHS, OP, RHS, NOFLIP)                                                         \
+#define _bpf_cmp(LHS, OP, RHS, UNLIKELY)                                                               \
        ({                                                                                      \
                typeof(LHS) __lhs = (LHS);                                                      \
                typeof(RHS) __rhs = (RHS);                                                      \
@@ -285,14 +285,17 @@ l_true:                                                                                           \
                (void)(__lhs OP __rhs);                                                         \
                if (__cmp_cannot_be_signed(OP) || !__is_signed_type(typeof(__lhs))) {           \
                        if (sizeof(__rhs) == 8)                                                 \
-                               ret = __bpf_cmp(__lhs, OP, "", "r", __rhs, NOFLIP);             \
+                               /* "i" will truncate 64-bit constant into s32,                  \
+                                * so we have to use extra register via "r".                    \
+                                */                                                             \
+                               ret = __bpf_cmp(__lhs, #OP, "r", __rhs, UNLIKELY);              \
                        else                                                                    \
-                               ret = __bpf_cmp(__lhs, OP, "", "i", __rhs, NOFLIP);             \
+                               ret = __bpf_cmp(__lhs, #OP, "ri", __rhs, UNLIKELY);             \
                } else {                                                                        \
                        if (sizeof(__rhs) == 8)                                                 \
-                               ret = __bpf_cmp(__lhs, OP, "s", "r", __rhs, NOFLIP);            \
+                               ret = __bpf_cmp(__lhs, "s"#OP, "r", __rhs, UNLIKELY);           \
                        else                                                                    \
-                               ret = __bpf_cmp(__lhs, OP, "s", "i", __rhs, NOFLIP);            \
+                               ret = __bpf_cmp(__lhs, "s"#OP, "ri", __rhs, UNLIKELY);          \
                }                                                                               \
                ret;                                                                            \
        })
@@ -304,7 +307,7 @@ l_true:                                                                                             \
 #ifndef bpf_cmp_likely
 #define bpf_cmp_likely(LHS, OP, RHS)                                                           \
        ({                                                                                      \
-               bool ret;                                                                       \
+               bool ret = 0;                                                                   \
                if (__builtin_strcmp(#OP, "==") == 0)                                           \
                        ret = _bpf_cmp(LHS, !=, RHS, false);                                    \
                else if (__builtin_strcmp(#OP, "!=") == 0)                                      \
@@ -318,7 +321,7 @@ l_true:                                                                                             \
                else if (__builtin_strcmp(#OP, ">=") == 0)                                      \
                        ret = _bpf_cmp(LHS, <, RHS, false);                                     \
                else                                                                            \
-                       (void) "bug";                                                           \
+                       asm volatile("r0 " #OP " invalid compare");                             \
                ret;                                                                            \
        })
 #endif