arm64: move to ESR_ELx macros
authorMark Rutland <mark.rutland@arm.com>
Mon, 24 Nov 2014 12:31:40 +0000 (12:31 +0000)
committerMark Rutland <mark.rutland@arm.com>
Thu, 15 Jan 2015 12:24:15 +0000 (12:24 +0000)
Now that we have common ESR_ELx_* macros, move the core arm64 code over
to them.

There should be no functional change as a result of this patch.

Signed-off-by: Mark Rutland <mark.rutland@arm.com>
Acked-by: Catalin Marinas <catalin.marinas@arm.com>
Reviewed-by: Christoffer Dall <christoffer.dall@linaro.org>
Cc: Marc Zyngier <marc.zyngier@arm.com>
Cc: Peter Maydell <peter.maydell@linaro.org>
Cc: Will Deacon <will.deacon@arm.com>
arch/arm64/kernel/entry.S
arch/arm64/kernel/signal32.c
arch/arm64/mm/fault.c

index fd4fa374e5d2b276821e324eb6066f86bdb514c7..02e6af117762694389cf6298235a1593e859d7c0 100644 (file)
@@ -269,18 +269,18 @@ ENDPROC(el1_error_invalid)
 el1_sync:
        kernel_entry 1
        mrs     x1, esr_el1                     // read the syndrome register
-       lsr     x24, x1, #ESR_EL1_EC_SHIFT      // exception class
-       cmp     x24, #ESR_EL1_EC_DABT_EL1       // data abort in EL1
+       lsr     x24, x1, #ESR_ELx_EC_SHIFT      // exception class
+       cmp     x24, #ESR_ELx_EC_DABT_CUR       // data abort in EL1
        b.eq    el1_da
-       cmp     x24, #ESR_EL1_EC_SYS64          // configurable trap
+       cmp     x24, #ESR_ELx_EC_SYS64          // configurable trap
        b.eq    el1_undef
-       cmp     x24, #ESR_EL1_EC_SP_ALIGN       // stack alignment exception
+       cmp     x24, #ESR_ELx_EC_SP_ALIGN       // stack alignment exception
        b.eq    el1_sp_pc
-       cmp     x24, #ESR_EL1_EC_PC_ALIGN       // pc alignment exception
+       cmp     x24, #ESR_ELx_EC_PC_ALIGN       // pc alignment exception
        b.eq    el1_sp_pc
-       cmp     x24, #ESR_EL1_EC_UNKNOWN        // unknown exception in EL1
+       cmp     x24, #ESR_ELx_EC_UNKNOWN        // unknown exception in EL1
        b.eq    el1_undef
-       cmp     x24, #ESR_EL1_EC_BREAKPT_EL1    // debug exception in EL1
+       cmp     x24, #ESR_ELx_EC_BREAKPT_CUR    // debug exception in EL1
        b.ge    el1_dbg
        b       el1_inv
 el1_da:
@@ -318,7 +318,7 @@ el1_dbg:
        /*
         * Debug exception handling
         */
-       cmp     x24, #ESR_EL1_EC_BRK64          // if BRK64
+       cmp     x24, #ESR_ELx_EC_BRK64          // if BRK64
        cinc    x24, x24, eq                    // set bit '0'
        tbz     x24, #0, el1_inv                // EL1 only
        mrs     x0, far_el1
@@ -375,26 +375,26 @@ el1_preempt:
 el0_sync:
        kernel_entry 0
        mrs     x25, esr_el1                    // read the syndrome register
-       lsr     x24, x25, #ESR_EL1_EC_SHIFT     // exception class
-       cmp     x24, #ESR_EL1_EC_SVC64          // SVC in 64-bit state
+       lsr     x24, x25, #ESR_ELx_EC_SHIFT     // exception class
+       cmp     x24, #ESR_ELx_EC_SVC64          // SVC in 64-bit state
        b.eq    el0_svc
-       cmp     x24, #ESR_EL1_EC_DABT_EL0       // data abort in EL0
+       cmp     x24, #ESR_ELx_EC_DABT_LOW       // data abort in EL0
        b.eq    el0_da
-       cmp     x24, #ESR_EL1_EC_IABT_EL0       // instruction abort in EL0
+       cmp     x24, #ESR_ELx_EC_IABT_LOW       // instruction abort in EL0
        b.eq    el0_ia
-       cmp     x24, #ESR_EL1_EC_FP_ASIMD       // FP/ASIMD access
+       cmp     x24, #ESR_ELx_EC_FP_ASIMD       // FP/ASIMD access
        b.eq    el0_fpsimd_acc
-       cmp     x24, #ESR_EL1_EC_FP_EXC64       // FP/ASIMD exception
+       cmp     x24, #ESR_ELx_EC_FP_EXC64       // FP/ASIMD exception
        b.eq    el0_fpsimd_exc
-       cmp     x24, #ESR_EL1_EC_SYS64          // configurable trap
+       cmp     x24, #ESR_ELx_EC_SYS64          // configurable trap
        b.eq    el0_undef
-       cmp     x24, #ESR_EL1_EC_SP_ALIGN       // stack alignment exception
+       cmp     x24, #ESR_ELx_EC_SP_ALIGN       // stack alignment exception
        b.eq    el0_sp_pc
-       cmp     x24, #ESR_EL1_EC_PC_ALIGN       // pc alignment exception
+       cmp     x24, #ESR_ELx_EC_PC_ALIGN       // pc alignment exception
        b.eq    el0_sp_pc
-       cmp     x24, #ESR_EL1_EC_UNKNOWN        // unknown exception in EL0
+       cmp     x24, #ESR_ELx_EC_UNKNOWN        // unknown exception in EL0
        b.eq    el0_undef
-       cmp     x24, #ESR_EL1_EC_BREAKPT_EL0    // debug exception in EL0
+       cmp     x24, #ESR_ELx_EC_BREAKPT_LOW    // debug exception in EL0
        b.ge    el0_dbg
        b       el0_inv
 
@@ -403,30 +403,30 @@ el0_sync:
 el0_sync_compat:
        kernel_entry 0, 32
        mrs     x25, esr_el1                    // read the syndrome register
-       lsr     x24, x25, #ESR_EL1_EC_SHIFT     // exception class
-       cmp     x24, #ESR_EL1_EC_SVC32          // SVC in 32-bit state
+       lsr     x24, x25, #ESR_ELx_EC_SHIFT     // exception class
+       cmp     x24, #ESR_ELx_EC_SVC32          // SVC in 32-bit state
        b.eq    el0_svc_compat
-       cmp     x24, #ESR_EL1_EC_DABT_EL0       // data abort in EL0
+       cmp     x24, #ESR_ELx_EC_DABT_LOW       // data abort in EL0
        b.eq    el0_da
-       cmp     x24, #ESR_EL1_EC_IABT_EL0       // instruction abort in EL0
+       cmp     x24, #ESR_ELx_EC_IABT_LOW       // instruction abort in EL0
        b.eq    el0_ia
-       cmp     x24, #ESR_EL1_EC_FP_ASIMD       // FP/ASIMD access
+       cmp     x24, #ESR_ELx_EC_FP_ASIMD       // FP/ASIMD access
        b.eq    el0_fpsimd_acc
-       cmp     x24, #ESR_EL1_EC_FP_EXC32       // FP/ASIMD exception
+       cmp     x24, #ESR_ELx_EC_FP_EXC32       // FP/ASIMD exception
        b.eq    el0_fpsimd_exc
-       cmp     x24, #ESR_EL1_EC_UNKNOWN        // unknown exception in EL0
+       cmp     x24, #ESR_ELx_EC_UNKNOWN        // unknown exception in EL0
        b.eq    el0_undef
-       cmp     x24, #ESR_EL1_EC_CP15_32        // CP15 MRC/MCR trap
+       cmp     x24, #ESR_ELx_EC_CP15_32        // CP15 MRC/MCR trap
        b.eq    el0_undef
-       cmp     x24, #ESR_EL1_EC_CP15_64        // CP15 MRRC/MCRR trap
+       cmp     x24, #ESR_ELx_EC_CP15_64        // CP15 MRRC/MCRR trap
        b.eq    el0_undef
-       cmp     x24, #ESR_EL1_EC_CP14_MR        // CP14 MRC/MCR trap
+       cmp     x24, #ESR_ELx_EC_CP14_MR        // CP14 MRC/MCR trap
        b.eq    el0_undef
-       cmp     x24, #ESR_EL1_EC_CP14_LS        // CP14 LDC/STC trap
+       cmp     x24, #ESR_ELx_EC_CP14_LS        // CP14 LDC/STC trap
        b.eq    el0_undef
-       cmp     x24, #ESR_EL1_EC_CP14_64        // CP14 MRRC/MCRR trap
+       cmp     x24, #ESR_ELx_EC_CP14_64        // CP14 MRRC/MCRR trap
        b.eq    el0_undef
-       cmp     x24, #ESR_EL1_EC_BREAKPT_EL0    // debug exception in EL0
+       cmp     x24, #ESR_ELx_EC_BREAKPT_LOW    // debug exception in EL0
        b.ge    el0_dbg
        b       el0_inv
 el0_svc_compat:
index 5a1ba6e80d4e20b6d95b0152d99e2ebca53e7424..192d900c058ff57acd8dd58208036a69dc1ee827 100644 (file)
@@ -501,7 +501,7 @@ static int compat_setup_sigframe(struct compat_sigframe __user *sf,
 
        __put_user_error((compat_ulong_t)0, &sf->uc.uc_mcontext.trap_no, err);
        /* set the compat FSR WnR */
-       __put_user_error(!!(current->thread.fault_code & ESR_EL1_WRITE) <<
+       __put_user_error(!!(current->thread.fault_code & ESR_ELx_WNR) <<
                         FSR_WRITE_SHIFT, &sf->uc.uc_mcontext.error_code, err);
        __put_user_error(current->thread.fault_address, &sf->uc.uc_mcontext.fault_address, err);
        __put_user_error(set->sig[0], &sf->uc.uc_mcontext.oldmask, err);
index c11cd27ca8f580c1ab5cb6482e79c6d36f75af22..96da13167d4a5c77564952a6d3a0fdce35d6580f 100644 (file)
@@ -219,7 +219,7 @@ static int __kprobes do_page_fault(unsigned long addr, unsigned int esr,
 
        if (esr & ESR_LNX_EXEC) {
                vm_flags = VM_EXEC;
-       } else if ((esr & ESR_EL1_WRITE) && !(esr & ESR_EL1_CM)) {
+       } else if ((esr & ESR_ELx_WNR) && !(esr & ESR_ELx_CM)) {
                vm_flags = VM_WRITE;
                mm_flags |= FAULT_FLAG_WRITE;
        }