ARM: Fix interrupt management
Interrupts where not correctly masked while in kernel, which breaks one of the current main assumptions. Also remove some duplication on ARM asm files, and add a function to check the status of ARM irqs (not compiled by default) Change-Id: I3c25d2b388f93fd8fe423998b94b3c4f140ba831
This commit is contained in:
parent
eff37b8a8b
commit
b36292e232
7 changed files with 169 additions and 109 deletions
|
@ -64,7 +64,7 @@ CLEANFILES+= ${ORIG_UNPAGED_OBJS}
|
|||
|
||||
SRCS+= mpx.S arch_clock.c arch_do_vmctl.c arch_system.c \
|
||||
omap_serial.c omap_timer.c omap_intr.c exception.c \
|
||||
io_intr.S klib.S memory.c \
|
||||
klib.S memory.c \
|
||||
protect.c direct_tty_utils.c arch_reset.c \
|
||||
pg_utils.c phys_copy.S phys_memset.S exc.S
|
||||
OBJS.kernel+= ${UNPAGED_OBJS}
|
||||
|
|
|
@ -25,6 +25,7 @@ halt_cpu(void)
|
|||
asm volatile("dsb");
|
||||
asm volatile("cpsie i");
|
||||
asm volatile("wfi");
|
||||
asm volatile("cpsid i");
|
||||
}
|
||||
|
||||
void
|
||||
|
|
|
@ -159,10 +159,8 @@ struct proc * arch_finish_switch_to_user(void)
|
|||
p = get_cpulocal_var(proc_ptr);
|
||||
*((reg_t *)stk) = (reg_t) p;
|
||||
|
||||
/* make sure I bit is clear in PSR so that interrupts won't be disabled
|
||||
* once p's context is restored. this should not be possible.
|
||||
*/
|
||||
assert(!(p->p_reg.psr & PSR_I));
|
||||
/* turn interrupts on */
|
||||
p->p_reg.psr &= ~(PSR_I|PSR_F);
|
||||
|
||||
return p;
|
||||
}
|
||||
|
|
|
@ -1,6 +1,23 @@
|
|||
#ifndef _ARM_CPUFUNC_H
|
||||
#define _ARM_CPUFUNC_H
|
||||
|
||||
#if 0
|
||||
/* check interrupt state */
|
||||
static inline void check_int(unsigned int state, int line)
|
||||
{
|
||||
unsigned int cpsr = 0;
|
||||
|
||||
asm volatile("mrs %0, cpsr" : "=r" (cpsr));
|
||||
|
||||
if ((cpsr & PSR_F) != (state & PSR_F))
|
||||
printf("%d: FIQs are unexpectedly %s\n", line, (cpsr & PSR_F) ? "MASKED" : "UNMASKED");
|
||||
|
||||
if ((cpsr & PSR_I) != (state & PSR_I))
|
||||
printf("%d: IRQs are unexpectedly %s\n", line, (cpsr & PSR_I) ? "MASKED" : "UNMASKED");
|
||||
|
||||
}
|
||||
#endif
|
||||
|
||||
/* Data memory barrier */
|
||||
static inline void dmb(void)
|
||||
{
|
||||
|
@ -28,8 +45,29 @@ static inline void barrier(void)
|
|||
static inline void refresh_tlb(void)
|
||||
{
|
||||
dsb();
|
||||
|
||||
/* Invalidate entire unified TLB */
|
||||
asm volatile("mcr p15, 0, r0, c8, c7, 0 @ TLBIALL\n\t");
|
||||
asm volatile("mcr p15, 0, %[zero], c8, c7, 0 @ TLBIALL\n\t" : : [zero] "r" (0));
|
||||
|
||||
#if 0
|
||||
/* Invalidate entire data TLB */
|
||||
asm volatile("mcr p15, 0, %[zero], c8, c6, 0" : : [zero] "r" (0));
|
||||
|
||||
/* Invalidate entire instruction TLB */
|
||||
asm volatile("mcr p15, 0, %[zero], c8, c5, 0" : : [zero] "r" (0));
|
||||
#endif
|
||||
|
||||
#if 0
|
||||
/*
|
||||
* Invalidate all instruction caches to PoU.
|
||||
* Also flushes branch target cache.
|
||||
*/
|
||||
asm volatile("mcr p15, 0, %[zero], c7, c5, 0" : : [zero] "r" (0));
|
||||
|
||||
/* Invalidate entire branch predictor array */
|
||||
asm volatile("mcr p15, 0, %[zero], c7, c5, 6" : : [zero] "r" (0)); /* flush BTB */
|
||||
#endif
|
||||
|
||||
dsb();
|
||||
isb();
|
||||
}
|
||||
|
@ -42,6 +80,7 @@ static inline u32_t read_sctlr()
|
|||
|
||||
asm volatile("mrc p15, 0, %[ctl], c1, c0, 0 @ Read SCTLR\n\t"
|
||||
: [ctl] "=r" (ctl));
|
||||
|
||||
return ctl;
|
||||
}
|
||||
|
||||
|
@ -50,6 +89,7 @@ static inline void write_sctlr(u32_t ctl)
|
|||
{
|
||||
asm volatile("mcr p15, 0, %[ctl], c1, c0, 0 @ Write SCTLR\n\t"
|
||||
: : [ctl] "r" (ctl));
|
||||
isb();
|
||||
}
|
||||
|
||||
/* Read Translation Table Base Register 0 */
|
||||
|
@ -59,6 +99,7 @@ static inline u32_t read_ttbr0()
|
|||
|
||||
asm volatile("mrc p15, 0, %[bar], c2, c0, 0 @ Read TTBR0\n\t"
|
||||
: [bar] "=r" (bar));
|
||||
|
||||
return bar;
|
||||
}
|
||||
|
||||
|
@ -66,8 +107,10 @@ static inline u32_t read_ttbr0()
|
|||
static inline void write_ttbr0(u32_t bar)
|
||||
{
|
||||
barrier();
|
||||
|
||||
asm volatile("mcr p15, 0, %[bar], c2, c0, 0 @ Write TTBR0\n\t"
|
||||
: : [bar] "r" (bar));
|
||||
|
||||
refresh_tlb();
|
||||
}
|
||||
|
||||
|
@ -75,8 +118,8 @@ static inline void write_ttbr0(u32_t bar)
|
|||
static inline void reload_ttbr0(void)
|
||||
{
|
||||
reg_t ttbr = read_ttbr0();
|
||||
|
||||
write_ttbr0(ttbr);
|
||||
refresh_tlb();
|
||||
}
|
||||
|
||||
/* Read Translation Table Base Register 1 */
|
||||
|
@ -86,6 +129,7 @@ static inline u32_t read_ttbr1()
|
|||
|
||||
asm volatile("mrc p15, 0, %[bar], c2, c0, 1 @ Read TTBR1\n\t"
|
||||
: [bar] "=r" (bar));
|
||||
|
||||
return bar;
|
||||
}
|
||||
|
||||
|
@ -93,8 +137,10 @@ static inline u32_t read_ttbr1()
|
|||
static inline void write_ttbr1(u32_t bar)
|
||||
{
|
||||
barrier();
|
||||
|
||||
asm volatile("mcr p15, 0, %[bar], c2, c0, 1 @ Write TTBR1\n\t"
|
||||
: : [bar] "r" (bar));
|
||||
|
||||
refresh_tlb();
|
||||
}
|
||||
|
||||
|
@ -102,8 +148,8 @@ static inline void write_ttbr1(u32_t bar)
|
|||
static inline void reload_ttbr1(void)
|
||||
{
|
||||
reg_t ttbr = read_ttbr1();
|
||||
|
||||
write_ttbr1(ttbr);
|
||||
refresh_tlb();
|
||||
}
|
||||
|
||||
/* Read Translation Table Base Control Register */
|
||||
|
@ -113,6 +159,7 @@ static inline u32_t read_ttbcr()
|
|||
|
||||
asm volatile("mrc p15, 0, %[bcr], c2, c0, 2 @ Read TTBCR\n\t"
|
||||
: [bcr] "=r" (bcr));
|
||||
|
||||
return bcr;
|
||||
}
|
||||
|
||||
|
@ -121,6 +168,8 @@ static inline void write_ttbcr(u32_t bcr)
|
|||
{
|
||||
asm volatile("mcr p15, 0, %[bcr], c2, c0, 2 @ Write TTBCR\n\t"
|
||||
: : [bcr] "r" (bcr));
|
||||
|
||||
isb();
|
||||
}
|
||||
|
||||
/* Read Domain Access Control Register */
|
||||
|
@ -130,6 +179,7 @@ static inline u32_t read_dacr()
|
|||
|
||||
asm volatile("mrc p15, 0, %[dacr], c3, c0, 0 @ Read DACR\n\t"
|
||||
: [dacr] "=r" (dacr));
|
||||
|
||||
return dacr;
|
||||
}
|
||||
|
||||
|
@ -138,6 +188,8 @@ static inline void write_dacr(u32_t dacr)
|
|||
{
|
||||
asm volatile("mcr p15, 0, %[dacr], c3, c0, 0 @ Write DACR\n\t"
|
||||
: : [dacr] "r" (dacr));
|
||||
|
||||
isb();
|
||||
}
|
||||
|
||||
/* Read Data Fault Status Register */
|
||||
|
@ -147,6 +199,7 @@ static inline u32_t read_dfsr()
|
|||
|
||||
asm volatile("mrc p15, 0, %[fsr], c5, c0, 0 @ Read DFSR\n\t"
|
||||
: [fsr] "=r" (fsr));
|
||||
|
||||
return fsr;
|
||||
}
|
||||
|
||||
|
@ -155,6 +208,8 @@ static inline void write_dfsr(u32_t fsr)
|
|||
{
|
||||
asm volatile("mcr p15, 0, %[fsr], c5, c0, 0 @ Write DFSR\n\t"
|
||||
: : [fsr] "r" (fsr));
|
||||
|
||||
isb();
|
||||
}
|
||||
|
||||
/* Read Instruction Fault Status Register */
|
||||
|
@ -164,6 +219,7 @@ static inline u32_t read_ifsr()
|
|||
|
||||
asm volatile("mrc p15, 0, %[fsr], c5, c0, 1 @ Read IFSR\n\t"
|
||||
: [fsr] "=r" (fsr));
|
||||
|
||||
return fsr;
|
||||
}
|
||||
|
||||
|
@ -172,6 +228,8 @@ static inline void write_ifsr(u32_t fsr)
|
|||
{
|
||||
asm volatile("mcr p15, 0, %[fsr], c5, c0, 1 @ Write IFSR\n\t"
|
||||
: : [fsr] "r" (fsr));
|
||||
|
||||
isb();
|
||||
}
|
||||
|
||||
/* Read Data Fault Address Register */
|
||||
|
@ -181,6 +239,7 @@ static inline u32_t read_dfar()
|
|||
|
||||
asm volatile("mrc p15, 0, %[far], c6, c0, 0 @ Read DFAR\n\t"
|
||||
: [far] "=r" (far));
|
||||
|
||||
return far;
|
||||
}
|
||||
|
||||
|
@ -189,6 +248,8 @@ static inline void write_dfar(u32_t far)
|
|||
{
|
||||
asm volatile("mcr p15, 0, %[far], c6, c0, 0 @ Write DFAR\n\t"
|
||||
: : [far] "r" (far));
|
||||
|
||||
isb();
|
||||
}
|
||||
|
||||
/* Read Instruction Fault Address Register */
|
||||
|
@ -198,6 +259,7 @@ static inline u32_t read_ifar()
|
|||
|
||||
asm volatile("mrc p15, 0, %[far], c6, c0, 2 @ Read IFAR\n\t"
|
||||
: [far] "=r" (far));
|
||||
|
||||
return far;
|
||||
}
|
||||
|
||||
|
@ -206,6 +268,8 @@ static inline void write_ifar(u32_t far)
|
|||
{
|
||||
asm volatile("mcr p15, 0, %[far], c6, c0, 2 @ Write IFAR\n\t"
|
||||
: : [far] "r" (far));
|
||||
|
||||
isb();
|
||||
}
|
||||
|
||||
/* Read Vector Base Address Register */
|
||||
|
@ -215,6 +279,7 @@ static inline u32_t read_vbar()
|
|||
|
||||
asm volatile("mrc p15, 0, %[vbar], c12, c0, 0 @ Read VBAR\n\t"
|
||||
: [vbar] "=r" (vbar));
|
||||
|
||||
return vbar;
|
||||
}
|
||||
|
||||
|
@ -223,7 +288,8 @@ static inline void write_vbar(u32_t vbar)
|
|||
{
|
||||
asm volatile("mcr p15, 0, %[vbar], c12, c0, 0 @ Write VBAR\n\t"
|
||||
: : [vbar] "r" (vbar));
|
||||
asm volatile("dsb");
|
||||
|
||||
isb();
|
||||
}
|
||||
|
||||
/* Read the Main ID Register */
|
||||
|
@ -233,6 +299,7 @@ static inline u32_t read_midr()
|
|||
|
||||
asm volatile("mrc p15, 0, %[id], c0, c0, 0 @ read MIDR\n\t"
|
||||
: [id] "=r" (id));
|
||||
|
||||
return id;
|
||||
}
|
||||
|
||||
|
@ -243,6 +310,7 @@ static inline u32_t read_actlr()
|
|||
|
||||
asm volatile("mrc p15, 0, %[ctl], c1, c0, 1 @ Read ACTLR\n\t"
|
||||
: [ctl] "=r" (ctl));
|
||||
|
||||
return ctl;
|
||||
}
|
||||
|
||||
|
@ -251,6 +319,8 @@ static inline void write_actlr(u32_t ctl)
|
|||
{
|
||||
asm volatile("mcr p15, 0, %[ctl], c1, c0, 1 @ Write ACTLR\n\t"
|
||||
: : [ctl] "r" (ctl));
|
||||
|
||||
isb();
|
||||
}
|
||||
|
||||
/* Read Current Program Status Register */
|
||||
|
@ -260,6 +330,7 @@ static inline u32_t read_cpsr()
|
|||
|
||||
asm volatile("mrs %[status], cpsr @ read CPSR"
|
||||
: [status] "=r" (status));
|
||||
|
||||
return status;
|
||||
}
|
||||
|
||||
|
|
|
@ -1,14 +0,0 @@
|
|||
/* intr_disable(), intr_enable - Disable/Enable hardware interrupts. */
|
||||
/* void intr_disable(void); */
|
||||
/* void intr_enable(void); */
|
||||
#include <machine/asm.h>
|
||||
|
||||
ENTRY(intr_disable)
|
||||
dsb
|
||||
cpsid i
|
||||
bx lr
|
||||
|
||||
ENTRY(intr_enable)
|
||||
dsb
|
||||
cpsie i
|
||||
bx lr
|
|
@ -82,12 +82,14 @@ ENTRY(__user_copy_msg_pointer_failure)
|
|||
mov r0, #-1
|
||||
bx lr
|
||||
|
||||
ENTRY(intr_enable)
|
||||
ENTRY(interrupts_enable)
|
||||
dsb
|
||||
cpsie i
|
||||
cpsie if
|
||||
bx lr
|
||||
|
||||
ENTRY(intr_disable)
|
||||
ENTRY(interrupts_disable)
|
||||
dsb
|
||||
cpsid i
|
||||
cpsid if
|
||||
bx lr
|
||||
|
|
|
@ -53,6 +53,8 @@ IMPORT(svc_stack)
|
|||
.macro test_int_in_kernel, label
|
||||
push {r3}
|
||||
ldr r3, [sp, #8] /* spsr */
|
||||
orr r3, r3, #(PSR_F | PSR_I) /* mask interrupts on return */
|
||||
str r3, [sp, #8] /* spsr */
|
||||
and r3, r3, #PSR_MODE_MASK
|
||||
cmp r3, #MODE_USR
|
||||
pop {r3}
|
||||
|
|
Loading…
Reference in a new issue