#endif
 
 #ifdef CONFIG_STACKTRACE
-static void __ftrace_trace_stack(struct trace_buffer *buffer,
+static void __ftrace_trace_stack(struct trace_array *tr,
+                                struct trace_buffer *buffer,
                                 unsigned int trace_ctx,
                                 int skip, struct pt_regs *regs);
 static inline void ftrace_trace_stack(struct trace_array *tr,
                                      int skip, struct pt_regs *regs);
 
 #else
-static inline void __ftrace_trace_stack(struct trace_buffer *buffer,
+static inline void __ftrace_trace_stack(struct trace_array *tr,
+                                       struct trace_buffer *buffer,
                                        unsigned int trace_ctx,
                                        int skip, struct pt_regs *regs)
 {
 static DEFINE_PER_CPU(struct ftrace_stacks, ftrace_stacks);
 static DEFINE_PER_CPU(int, ftrace_stack_reserve);
 
-static void __ftrace_trace_stack(struct trace_buffer *buffer,
+static void __ftrace_trace_stack(struct trace_array *tr,
+                                struct trace_buffer *buffer,
                                 unsigned int trace_ctx,
                                 int skip, struct pt_regs *regs)
 {
                nr_entries = stack_trace_save(fstack->calls, size, skip);
        }
 
+#ifdef CONFIG_DYNAMIC_FTRACE
+       /* Mark entry of stack trace as trampoline code */
+       if (tr->ops && tr->ops->trampoline) {
+               unsigned long tramp_start = tr->ops->trampoline;
+               unsigned long tramp_end = tramp_start + tr->ops->trampoline_size;
+               unsigned long *calls = fstack->calls;
+
+               for (int i = 0; i < nr_entries; i++) {
+                       if (calls[i] >= tramp_start && calls[i] < tramp_end)
+                               calls[i] = FTRACE_TRAMPOLINE_MARKER;
+               }
+       }
+#endif
+
        event = __trace_buffer_lock_reserve(buffer, TRACE_STACK,
                                    struct_size(entry, caller, nr_entries),
                                    trace_ctx);
        if (!(tr->trace_flags & TRACE_ITER_STACKTRACE))
                return;
 
-       __ftrace_trace_stack(buffer, trace_ctx, skip, regs);
+       __ftrace_trace_stack(tr, buffer, trace_ctx, skip, regs);
 }
 
 void __trace_stack(struct trace_array *tr, unsigned int trace_ctx,
        struct trace_buffer *buffer = tr->array_buffer.buffer;
 
        if (rcu_is_watching()) {
-               __ftrace_trace_stack(buffer, trace_ctx, skip, NULL);
+               __ftrace_trace_stack(tr, buffer, trace_ctx, skip, NULL);
                return;
        }
 
                return;
 
        ct_irq_enter_irqson();
-       __ftrace_trace_stack(buffer, trace_ctx, skip, NULL);
+       __ftrace_trace_stack(tr, buffer, trace_ctx, skip, NULL);
        ct_irq_exit_irqson();
 }
 
        /* Skip 1 to skip this function. */
        skip++;
 #endif
-       __ftrace_trace_stack(printk_trace->array_buffer.buffer,
-                            tracing_gen_ctx(), skip, NULL);
+       __ftrace_trace_stack(printk_trace, printk_trace->array_buffer.buffer,
+                               tracing_gen_ctx(), skip, NULL);
 }
 EXPORT_SYMBOL_GPL(trace_dump_stack);