arm64: entry: Save some nops when CONFIG_ARM64_PSEUDO_NMI is not set
authorHe Ying <heying24@huawei.com>
Wed, 12 Jan 2022 03:24:10 +0000 (22:24 -0500)
committerWill Deacon <will@kernel.org>
Tue, 15 Feb 2022 15:15:06 +0000 (15:15 +0000)
Arm64 pseudo-NMI feature code brings some additional nops
when CONFIG_ARM64_PSEUDO_NMI is not set, which is not
necessary. So add necessary ifdeffery to avoid it.

Signed-off-by: He Ying <heying24@huawei.com>
Acked-by: Mark Rutland <mark.rutland@arm.com>
Link: https://lore.kernel.org/r/20220112032410.29231-1-heying24@huawei.com
Signed-off-by: Will Deacon <will@kernel.org>
arch/arm64/kernel/entry.S

index 772ec2ecf48884f24148772ab854497b5eada522..eb59621d6c6a26d8e8109bd18c944998d2d894cc 100644 (file)
@@ -300,6 +300,7 @@ alternative_else_nop_endif
        str     w21, [sp, #S_SYSCALLNO]
        .endif
 
+#ifdef CONFIG_ARM64_PSEUDO_NMI
        /* Save pmr */
 alternative_if ARM64_HAS_IRQ_PRIO_MASKING
        mrs_s   x20, SYS_ICC_PMR_EL1
@@ -307,6 +308,7 @@ alternative_if ARM64_HAS_IRQ_PRIO_MASKING
        mov     x20, #GIC_PRIO_IRQON | GIC_PRIO_PSR_I_SET
        msr_s   SYS_ICC_PMR_EL1, x20
 alternative_else_nop_endif
+#endif
 
        /* Re-enable tag checking (TCO set on exception entry) */
 #ifdef CONFIG_ARM64_MTE
@@ -330,6 +332,7 @@ alternative_else_nop_endif
        disable_daif
        .endif
 
+#ifdef CONFIG_ARM64_PSEUDO_NMI
        /* Restore pmr */
 alternative_if ARM64_HAS_IRQ_PRIO_MASKING
        ldr     x20, [sp, #S_PMR_SAVE]
@@ -339,6 +342,7 @@ alternative_if ARM64_HAS_IRQ_PRIO_MASKING
        dsb     sy                              // Ensure priority change is seen by redistributor
 .L__skip_pmr_sync\@:
 alternative_else_nop_endif
+#endif
 
        ldp     x21, x22, [sp, #S_PC]           // load ELR, SPSR