tss->io_bitmap_base = INVALID_IO_BITMAP_OFFSET_LAZY;
 }
 
+/*
+ * This function selects if the context switch from prev to next
+ * has to tweak the TSC disable bit in the cr4.
+ */
+static inline void disable_tsc(struct task_struct *prev_p,
+                              struct task_struct *next_p)
+{
+       struct thread_info *prev, *next;
+
+       /*
+        * gcc should eliminate the ->thread_info dereference if
+        * has_secure_computing returns 0 at compile time (SECCOMP=n).
+        */
+       prev = prev_p->thread_info;
+       next = next_p->thread_info;
+
+       if (has_secure_computing(prev) || has_secure_computing(next)) {
+               /* slow path here */
+               if (has_secure_computing(prev) &&
+                   !has_secure_computing(next)) {
+                       write_cr4(read_cr4() & ~X86_CR4_TSD);
+               } else if (!has_secure_computing(prev) &&
+                          has_secure_computing(next))
+                       write_cr4(read_cr4() | X86_CR4_TSD);
+       }
+}
+
 /*
  *     switch_to(x,yn) should switch tasks from x to y.
  *
        if (unlikely(prev->io_bitmap_ptr || next->io_bitmap_ptr))
                handle_io_bitmap(next, tss);
 
+       disable_tsc(prev_p, next_p);
+
        return prev_p;
 }
 
 
        return err;
 }
 
+/*
+ * This function selects if the context switch from prev to next
+ * has to tweak the TSC disable bit in the cr4.
+ */
+static inline void disable_tsc(struct task_struct *prev_p,
+                              struct task_struct *next_p)
+{
+       struct thread_info *prev, *next;
+
+       /*
+        * gcc should eliminate the ->thread_info dereference if
+        * has_secure_computing returns 0 at compile time (SECCOMP=n).
+        */
+       prev = prev_p->thread_info;
+       next = next_p->thread_info;
+
+       if (has_secure_computing(prev) || has_secure_computing(next)) {
+               /* slow path here */
+               if (has_secure_computing(prev) &&
+                   !has_secure_computing(next)) {
+                       write_cr4(read_cr4() & ~X86_CR4_TSD);
+               } else if (!has_secure_computing(prev) &&
+                          has_secure_computing(next))
+                       write_cr4(read_cr4() | X86_CR4_TSD);
+       }
+}
+
 /*
  * This special macro can be used to load a debugging register
  */
                }
        }
 
+       disable_tsc(prev_p, next_p);
+
        return prev_p;
 }
 
 
  */
 #define __flush_tlb_global()                                           \
        do {                                                            \
-               unsigned int tmpreg;                                    \
+               unsigned int tmpreg, cr4, cr4_orig;                     \
                                                                        \
                __asm__ __volatile__(                                   \
-                       "movl %1, %%cr4;  # turn off PGE     \n"        \
+                       "movl %%cr4, %2;  # turn off PGE     \n"        \
+                       "movl %2, %1;                        \n"        \
+                       "andl %3, %1;                        \n"        \
+                       "movl %1, %%cr4;                     \n"        \
                        "movl %%cr3, %0;                     \n"        \
                        "movl %0, %%cr3;  # flush TLB        \n"        \
                        "movl %2, %%cr4;  # turn PGE back on \n"        \
-                       : "=&r" (tmpreg)                                \
-                       : "r" (mmu_cr4_features & ~X86_CR4_PGE),        \
-                         "r" (mmu_cr4_features)                        \
+                       : "=&r" (tmpreg), "=&r" (cr4), "=&r" (cr4_orig) \
+                       : "i" (~X86_CR4_PGE)                            \
                        : "memory");                                    \
        } while (0)
 
 
  */
 #define __flush_tlb_global()                                           \
        do {                                                            \
-               unsigned long tmpreg;                                   \
+               unsigned long tmpreg, cr4, cr4_orig;                    \
                                                                        \
                __asm__ __volatile__(                                   \
-                       "movq %1, %%cr4;  # turn off PGE     \n"        \
+                       "movq %%cr4, %2;  # turn off PGE     \n"        \
+                       "movq %2, %1;                        \n"        \
+                       "andq %3, %1;                        \n"        \
+                       "movq %1, %%cr4;                     \n"        \
                        "movq %%cr3, %0;  # flush TLB        \n"        \
                        "movq %0, %%cr3;                     \n"        \
                        "movq %2, %%cr4;  # turn PGE back on \n"        \
-                       : "=&r" (tmpreg)                                \
-                       : "r" (mmu_cr4_features & ~X86_CR4_PGE),        \
-                         "r" (mmu_cr4_features)                        \
+                       : "=&r" (tmpreg), "=&r" (cr4), "=&r" (cr4_orig) \
+                       : "i" (~X86_CR4_PGE)                            \
                        : "memory");                                    \
        } while (0)
 
 
                __secure_computing(this_syscall);
 }
 
+static inline int has_secure_computing(struct thread_info *ti)
+{
+       return unlikely(test_ti_thread_flag(ti, TIF_SECCOMP));
+}
+
 #else /* CONFIG_SECCOMP */
 
 #if (__GNUC__ > 2)
 #endif
 
 #define secure_computing(x) do { } while (0)
+/* static inline to preserve typechecking */
+static inline int has_secure_computing(struct thread_info *ti)
+{
+       return 0;
+}
 
 #endif /* CONFIG_SECCOMP */