at v2.6.16-rc4 177 lines 4.1 kB view raw
1/* 2 * This file is subject to the terms and conditions of the GNU General Public 3 * License. See the file "COPYING" in the main directory of this archive 4 * for more details. 5 * 6 * Copyright (C) 1994, 95, 96, 97, 98, 99, 2003 by Ralf Baechle 7 * Copyright (C) 1996 by Paul M. Antoine 8 * Copyright (C) 1999 Silicon Graphics 9 * Copyright (C) 2000 MIPS Technologies, Inc. 10 */ 11#ifndef _ASM_INTERRUPT_H 12#define _ASM_INTERRUPT_H 13 14#include <linux/config.h> 15#include <asm/hazards.h> 16 17__asm__ ( 18 " .macro local_irq_enable \n" 19 " .set push \n" 20 " .set reorder \n" 21 " .set noat \n" 22#ifdef CONFIG_CPU_MIPSR2 23 " ei \n" 24#else 25 " mfc0 $1,$12 \n" 26 " ori $1,0x1f \n" 27 " xori $1,0x1e \n" 28 " mtc0 $1,$12 \n" 29#endif 30 " irq_enable_hazard \n" 31 " .set pop \n" 32 " .endm"); 33 34static inline void local_irq_enable(void) 35{ 36 __asm__ __volatile__( 37 "local_irq_enable" 38 : /* no outputs */ 39 : /* no inputs */ 40 : "memory"); 41} 42 43/* 44 * For cli() we have to insert nops to make sure that the new value 45 * has actually arrived in the status register before the end of this 46 * macro. 47 * R4000/R4400 need three nops, the R4600 two nops and the R10000 needs 48 * no nops at all. 49 */ 50/* 51 * For TX49, operating only IE bit is not enough. 52 * 53 * If mfc0 $12 follows store and the mfc0 is last instruction of a 54 * page and fetching the next instruction causes TLB miss, the result 55 * of the mfc0 might wrongly contain EXL bit. 56 * 57 * ERT-TX49H2-027, ERT-TX49H3-012, ERT-TX49HL3-006, ERT-TX49H4-008 58 * 59 * Workaround: mask EXL bit of the result or place a nop before mfc0. 60 */ 61__asm__ ( 62 " .macro local_irq_disable\n" 63 " .set push \n" 64 " .set noat \n" 65#ifdef CONFIG_CPU_MIPSR2 66 " di \n" 67#else 68 " mfc0 $1,$12 \n" 69 " ori $1,0x1f \n" 70 " xori $1,0x1f \n" 71 " .set noreorder \n" 72 " mtc0 $1,$12 \n" 73#endif 74 " irq_disable_hazard \n" 75 " .set pop \n" 76 " .endm \n"); 77 78static inline void local_irq_disable(void) 79{ 80 __asm__ __volatile__( 81 "local_irq_disable" 82 : /* no outputs */ 83 : /* no inputs */ 84 : "memory"); 85} 86 87__asm__ ( 88 " .macro local_save_flags flags \n" 89 " .set push \n" 90 " .set reorder \n" 91 " mfc0 \\flags, $12 \n" 92 " .set pop \n" 93 " .endm \n"); 94 95#define local_save_flags(x) \ 96__asm__ __volatile__( \ 97 "local_save_flags %0" \ 98 : "=r" (x)) 99 100__asm__ ( 101 " .macro local_irq_save result \n" 102 " .set push \n" 103 " .set reorder \n" 104 " .set noat \n" 105#ifdef CONFIG_CPU_MIPSR2 106 " di \\result \n" 107 " andi \\result, 1 \n" 108#else 109 " mfc0 \\result, $12 \n" 110 " ori $1, \\result, 0x1f \n" 111 " xori $1, 0x1f \n" 112 " .set noreorder \n" 113 " mtc0 $1, $12 \n" 114#endif 115 " irq_disable_hazard \n" 116 " .set pop \n" 117 " .endm \n"); 118 119#define local_irq_save(x) \ 120__asm__ __volatile__( \ 121 "local_irq_save\t%0" \ 122 : "=r" (x) \ 123 : /* no inputs */ \ 124 : "memory") 125 126__asm__ ( 127 " .macro local_irq_restore flags \n" 128 " .set push \n" 129 " .set noreorder \n" 130 " .set noat \n" 131#if defined(CONFIG_CPU_MIPSR2) && defined(CONFIG_IRQ_CPU) 132 /* 133 * Slow, but doesn't suffer from a relativly unlikely race 134 * condition we're having since days 1. 135 */ 136 " beqz \\flags, 1f \n" 137 " di \n" 138 " ei \n" 139 "1: \n" 140#elif defined(CONFIG_CPU_MIPSR2) 141 /* 142 * Fast, dangerous. Life is fun, life is good. 143 */ 144 " mfc0 $1, $12 \n" 145 " ins $1, \\flags, 0, 1 \n" 146 " mtc0 $1, $12 \n" 147#else 148 " mfc0 $1, $12 \n" 149 " andi \\flags, 1 \n" 150 " ori $1, 0x1f \n" 151 " xori $1, 0x1f \n" 152 " or \\flags, $1 \n" 153 " mtc0 \\flags, $12 \n" 154#endif 155 " irq_disable_hazard \n" 156 " .set pop \n" 157 " .endm \n"); 158 159#define local_irq_restore(flags) \ 160do { \ 161 unsigned long __tmp1; \ 162 \ 163 __asm__ __volatile__( \ 164 "local_irq_restore\t%0" \ 165 : "=r" (__tmp1) \ 166 : "0" (flags) \ 167 : "memory"); \ 168} while(0) 169 170#define irqs_disabled() \ 171({ \ 172 unsigned long flags; \ 173 local_save_flags(flags); \ 174 !(flags & 1); \ 175}) 176 177#endif /* _ASM_INTERRUPT_H */