]> pilppa.org Git - linux-2.6-omap-h63xx.git/blobdiff - include/asm-mips/interrupt.h
ACPI add ia64 exports to build acpi_memhotplug as a module
[linux-2.6-omap-h63xx.git] / include / asm-mips / interrupt.h
index abdf54ee64cf306d35b0dd32702c5fed061d4f58..4bb9c06f44107006f4c6330df7f0aedb23bc96a2 100644 (file)
@@ -19,7 +19,12 @@ __asm__ (
        "       .set    push                                            \n"
        "       .set    reorder                                         \n"
        "       .set    noat                                            \n"
-#ifdef CONFIG_CPU_MIPSR2
+#ifdef CONFIG_MIPS_MT_SMTC
+       "       mfc0    $1, $2, 1       # SMTC - clear TCStatus.IXMT    \n"
+       "       ori     $1, 0x400                                       \n"
+       "       xori    $1, 0x400                                       \n"
+       "       mtc0    $1, $2, 1                                       \n"
+#elif defined(CONFIG_CPU_MIPSR2)
        "       ei                                                      \n"
 #else
        "       mfc0    $1,$12                                          \n"
@@ -47,16 +52,32 @@ static inline void local_irq_enable(void)
  * R4000/R4400 need three nops, the R4600 two nops and the R10000 needs
  * no nops at all.
  */
+/*
+ * For TX49, operating only IE bit is not enough.
+ *
+ * If mfc0 $12 follows store and the mfc0 is last instruction of a
+ * page and fetching the next instruction causes TLB miss, the result
+ * of the mfc0 might wrongly contain EXL bit.
+ *
+ * ERT-TX49H2-027, ERT-TX49H3-012, ERT-TX49HL3-006, ERT-TX49H4-008
+ *
+ * Workaround: mask EXL bit of the result or place a nop before mfc0.
+ */
 __asm__ (
        "       .macro  local_irq_disable\n"
        "       .set    push                                            \n"
        "       .set    noat                                            \n"
-#ifdef CONFIG_CPU_MIPSR2
+#ifdef CONFIG_MIPS_MT_SMTC
+       "       mfc0    $1, $2, 1                                       \n"
+       "       ori     $1, 0x400                                       \n"
+       "       .set    noreorder                                       \n"
+       "       mtc0    $1, $2, 1                                       \n"
+#elif defined(CONFIG_CPU_MIPSR2)
        "       di                                                      \n"
 #else
        "       mfc0    $1,$12                                          \n"
-       "       ori     $1,1                                            \n"
-       "       xori    $1,1                                            \n"
+       "       ori     $1,0x1f                                         \n"
+       "       xori    $1,0x1f                                         \n"
        "       .set    noreorder                                       \n"
        "       mtc0    $1,$12                                          \n"
 #endif
@@ -77,7 +98,11 @@ __asm__ (
        "       .macro  local_save_flags flags                          \n"
        "       .set    push                                            \n"
        "       .set    reorder                                         \n"
+#ifdef CONFIG_MIPS_MT_SMTC
+       "       mfc0    \\flags, $2, 1                                  \n"
+#else
        "       mfc0    \\flags, $12                                    \n"
+#endif
        "       .set    pop                                             \n"
        "       .endm                                                   \n");
 
@@ -91,13 +116,19 @@ __asm__ (
        "       .set    push                                            \n"
        "       .set    reorder                                         \n"
        "       .set    noat                                            \n"
-#ifdef CONFIG_CPU_MIPSR2
+#ifdef CONFIG_MIPS_MT_SMTC
+       "       mfc0    \\result, $2, 1                                 \n"
+       "       ori     $1, \\result, 0x400                             \n"
+       "       .set    noreorder                                       \n"
+       "       mtc0    $1, $2, 1                                       \n"
+       "       andi    \\result, \\result, 0x400                       \n"
+#elif defined(CONFIG_CPU_MIPSR2)
        "       di      \\result                                        \n"
        "       andi    \\result, 1                                     \n"
 #else
        "       mfc0    \\result, $12                                   \n"
-       "       ori     $1, \\result, 1                                 \n"
-       "       xori    $1, 1                                           \n"
+       "       ori     $1, \\result, 0x1f                              \n"
+       "       xori    $1, 0x1f                                        \n"
        "       .set    noreorder                                       \n"
        "       mtc0    $1, $12                                         \n"
 #endif
@@ -114,9 +145,17 @@ __asm__ __volatile__(                                                      \
 
 __asm__ (
        "       .macro  local_irq_restore flags                         \n"
+       "       .set    push                                            \n"
        "       .set    noreorder                                       \n"
        "       .set    noat                                            \n"
-#if defined(CONFIG_CPU_MIPSR2) && defined(CONFIG_IRQ_CPU)
+#ifdef CONFIG_MIPS_MT_SMTC
+       "mfc0   $1, $2, 1                                               \n"
+       "andi   \\flags, 0x400                                          \n"
+       "ori    $1, 0x400                                               \n"
+       "xori   $1, 0x400                                               \n"
+       "or     \\flags, $1                                             \n"
+       "mtc0   \\flags, $2, 1                                          \n"
+#elif defined(CONFIG_CPU_MIPSR2) && defined(CONFIG_IRQ_CPU)
        /*
         * Slow, but doesn't suffer from a relativly unlikely race
         * condition we're having since days 1.
@@ -135,14 +174,13 @@ __asm__ (
 #else
        "       mfc0    $1, $12                                         \n"
        "       andi    \\flags, 1                                      \n"
-       "       ori     $1, 1                                           \n"
-       "       xori    $1, 1                                           \n"
+       "       ori     $1, 0x1f                                        \n"
+       "       xori    $1, 0x1f                                        \n"
        "       or      \\flags, $1                                     \n"
        "       mtc0    \\flags, $12                                    \n"
 #endif
        "       irq_disable_hazard                                      \n"
-       "       .set    at                                              \n"
-       "       .set    reorder                                         \n"
+       "       .set    pop                                             \n"
        "       .endm                                                   \n");
 
 #define local_irq_restore(flags)                                       \
@@ -156,11 +194,29 @@ do {                                                                      \
                : "memory");                                            \
 } while(0)
 
-#define irqs_disabled()                                                        \
-({                                                                     \
-       unsigned long flags;                                            \
-       local_save_flags(flags);                                        \
-       !(flags & 1);                                                   \
-})
+static inline int irqs_disabled(void)
+{
+#ifdef CONFIG_MIPS_MT_SMTC
+       /*
+        * SMTC model uses TCStatus.IXMT to disable interrupts for a thread/CPU
+        */
+       unsigned long __result;
+
+       __asm__ __volatile__(
+       "       .set    noreorder                                       \n"
+       "       mfc0    %0, $2, 1                                       \n"
+       "       andi    %0, 0x400                                       \n"
+       "       slt     %0, $0, %0                                      \n"
+       "       .set    reorder                                         \n"
+       : "=r" (__result));
+
+       return __result;
+#else
+       unsigned long flags;
+       local_save_flags(flags);
+
+       return !(flags & 1);
+#endif
+}
 
 #endif /* _ASM_INTERRUPT_H */