arm64/sve: Explicitly load vector length when restoring SVE state
authorMark Brown <broonie@kernel.org>
Tue, 19 Oct 2021 17:22:13 +0000 (18:22 +0100)
committerWill Deacon <will@kernel.org>
Thu, 21 Oct 2021 09:18:17 +0000 (10:18 +0100)
Currently when restoring the SVE state we supply the SVE vector length
as an argument to sve_load_state() and the underlying macros. This becomes
inconvenient with the addition of SME since we may need to restore any
combination of SVE and SME vector lengths, and we already separately
restore the vector length in the KVM code. We don't need to know the vector
length during the actual register load since the SME load instructions can
index into the data array for us.

Refactor the interface so we explicitly set the vector length separately
to restoring the SVE registers in preparation for adding SME support, no
functional change should be involved.

Signed-off-by: Mark Brown <broonie@kernel.org>
Link: https://lore.kernel.org/r/20211019172247.3045838-9-broonie@kernel.org
Signed-off-by: Will Deacon <will@kernel.org>
arch/arm64/include/asm/fpsimd.h
arch/arm64/include/asm/fpsimdmacros.h
arch/arm64/kernel/entry-fpsimd.S
arch/arm64/kernel/fpsimd.c
arch/arm64/kvm/hyp/fpsimd.S

index 5a1f79a4a500843cf2ddf2baf31bb4aa868ad58a..1d0b5fa253a0cbcea8c83e31336902d4e75d794c 100644 (file)
@@ -67,7 +67,7 @@ static inline void *sve_pffr(struct thread_struct *thread)
 
 extern void sve_save_state(void *state, u32 *pfpsr, int save_ffr);
 extern void sve_load_state(void const *state, u32 const *pfpsr,
-                          int restore_ffr, unsigned long vq_minus_1);
+                          int restore_ffr);
 extern void sve_flush_live(bool flush_ffr, unsigned long vq_minus_1);
 extern unsigned int sve_get_vl(void);
 extern void sve_set_vq(unsigned long vq_minus_1);
index e5ffd8b265b6760a91163fba1842fa8808d7fadf..2509d7dde55a78a08a41433cdf51730f24313573 100644 (file)
                str             w\nxtmp, [\xpfpsr, #4]
 .endm
 
-.macro __sve_load nxbase, xpfpsr, restore_ffr, nxtmp
+.macro sve_load nxbase, xpfpsr, restore_ffr, nxtmp
  _for n, 0, 31,        _sve_ldr_v      \n, \nxbase, \n - 34
                cbz             \restore_ffr, 921f
                _sve_ldr_p      0, \nxbase
                ldr             w\nxtmp, [\xpfpsr, #4]
                msr             fpcr, x\nxtmp
 .endm
-
-.macro sve_load nxbase, xpfpsr, restore_ffr, xvqminus1, nxtmp, xtmp2
-               sve_load_vq     \xvqminus1, x\nxtmp, \xtmp2
-               __sve_load      \nxbase, \xpfpsr, \restore_ffr, \nxtmp
-.endm
index f588c214d44bd067b60e411486cec11a7c9678c8..dc242e269f9aae94854fb6a6116ceb8575ac4bd5 100644 (file)
@@ -51,10 +51,9 @@ SYM_FUNC_END(sve_save_state)
  * x0 - pointer to buffer for state
  * x1 - pointer to storage for FPSR
  * x2 - Restore FFR if non-zero
- * x3 - VQ-1
  */
 SYM_FUNC_START(sve_load_state)
-       sve_load 0, x1, x2, x3, 4, x5
+       sve_load 0, x1, x2, 4
        ret
 SYM_FUNC_END(sve_load_state)
 
index 9248c9efe5fdf744c81bac6acaaf8e5c3ecab369..e75dd20a40cfaf0444924ec6a074c4719db533af 100644 (file)
@@ -318,12 +318,13 @@ static void task_fpsimd_load(void)
        WARN_ON(!system_supports_fpsimd());
        WARN_ON(!have_cpu_fpsimd_context());
 
-       if (IS_ENABLED(CONFIG_ARM64_SVE) && test_thread_flag(TIF_SVE))
+       if (IS_ENABLED(CONFIG_ARM64_SVE) && test_thread_flag(TIF_SVE)) {
+               sve_set_vq(sve_vq_from_vl(task_get_sve_vl(current)) - 1);
                sve_load_state(sve_pffr(&current->thread),
-                              &current->thread.uw.fpsimd_state.fpsr, true,
-                              sve_vq_from_vl(task_get_sve_vl(current)) - 1);
-       else
+                              &current->thread.uw.fpsimd_state.fpsr, true);
+       } else {
                fpsimd_load_state(&current->thread.uw.fpsimd_state);
+       }
 }
 
 /*
@@ -1423,10 +1424,10 @@ void __efi_fpsimd_end(void)
                    likely(__this_cpu_read(efi_sve_state_used))) {
                        char const *sve_state = this_cpu_ptr(efi_sve_state);
 
+                       sve_set_vq(sve_vq_from_vl(sve_get_vl()) - 1);
                        sve_load_state(sve_state + sve_ffr_offset(sve_max_vl()),
                                       &this_cpu_ptr(&efi_fpsimd_state)->fpsr,
-                                      true,
-                                      sve_vq_from_vl(sve_get_vl()) - 1);
+                                      true);
 
                        __this_cpu_write(efi_sve_state_used, false);
                } else {
index 1bb3b04b84e6a4383fcde3308c24c295b166d7d2..e950875e31cee4df58d041519b7584356463c91b 100644 (file)
@@ -22,7 +22,7 @@ SYM_FUNC_END(__fpsimd_restore_state)
 
 SYM_FUNC_START(__sve_restore_state)
        mov     x2, #1
-       __sve_load 0, x1, x2, 3
+       sve_load 0, x1, x2, 3
        ret
 SYM_FUNC_END(__sve_restore_state)