ftrace_global_list_func(unsigned long ip, unsigned long parent_ip,
                        struct ftrace_ops *op, struct pt_regs *regs)
 {
-       if (unlikely(trace_recursion_test(TRACE_GLOBAL_BIT)))
+       int bit;
+
+       if (in_interrupt()) {
+               if (in_nmi())
+                       bit = TRACE_GLOBAL_NMI_BIT;
+
+               else if (in_irq())
+                       bit = TRACE_GLOBAL_IRQ_BIT;
+               else
+                       bit = TRACE_GLOBAL_SIRQ_BIT;
+       } else
+               bit = TRACE_GLOBAL_BIT;
+
+       if (unlikely(trace_recursion_test(bit)))
                return;
 
-       trace_recursion_set(TRACE_GLOBAL_BIT);
+       trace_recursion_set(bit);
        do_for_each_ftrace_op(op, ftrace_global_list) {
                op->func(ip, parent_ip, op, regs);
        } while_for_each_ftrace_op(op);
-       trace_recursion_clear(TRACE_GLOBAL_BIT);
+       trace_recursion_clear(bit);
 }
 
 static void ftrace_pid_func(unsigned long ip, unsigned long parent_ip,
                       struct ftrace_ops *ignored, struct pt_regs *regs)
 {
        struct ftrace_ops *op;
+       unsigned int bit;
 
        if (function_trace_stop)
                return;
 
-       if (unlikely(trace_recursion_test(TRACE_INTERNAL_BIT)))
-               return;
+       if (in_interrupt()) {
+               if (in_nmi())
+                       bit = TRACE_INTERNAL_NMI_BIT;
+
+               else if (in_irq())
+                       bit = TRACE_INTERNAL_IRQ_BIT;
+               else
+                       bit = TRACE_INTERNAL_SIRQ_BIT;
+       } else
+               bit = TRACE_INTERNAL_BIT;
+
+       if (unlikely(trace_recursion_test(bit)))
+                       return;
+
+       trace_recursion_set(bit);
 
-       trace_recursion_set(TRACE_INTERNAL_BIT);
        /*
         * Some of the ops may be dynamically allocated,
         * they must be freed after a synchronize_sched().
                        op->func(ip, parent_ip, op, regs);
        } while_for_each_ftrace_op(op);
        preempt_enable_notrace();
-       trace_recursion_clear(TRACE_INTERNAL_BIT);
+       trace_recursion_clear(bit);
 }
 
 /*
 
 
 /* for function tracing recursion */
 #define TRACE_INTERNAL_BIT             (1<<11)
-#define TRACE_GLOBAL_BIT               (1<<12)
-#define TRACE_CONTROL_BIT              (1<<13)
+#define TRACE_INTERNAL_NMI_BIT         (1<<12)
+#define TRACE_INTERNAL_IRQ_BIT         (1<<13)
+#define TRACE_INTERNAL_SIRQ_BIT                (1<<14)
+#define TRACE_GLOBAL_BIT               (1<<15)
+#define TRACE_GLOBAL_NMI_BIT           (1<<16)
+#define TRACE_GLOBAL_IRQ_BIT           (1<<17)
+#define TRACE_GLOBAL_SIRQ_BIT          (1<<18)
+#define TRACE_CONTROL_BIT              (1<<19)
 
 /*
  * Abuse of the trace_recursion.
  * was called in irq context but we have irq tracing off. Since this
  * can only be modified by current, we can reuse trace_recursion.
  */
-#define TRACE_IRQ_BIT                  (1<<13)
+#define TRACE_IRQ_BIT                  (1<<20)
 
 #define trace_recursion_set(bit)       do { (current)->trace_recursion |= (bit); } while (0)
 #define trace_recursion_clear(bit)     do { (current)->trace_recursion &= ~(bit); } while (0)