From: Xiao Wang Date: Thu, 23 May 2024 03:18:35 +0000 (+0800) Subject: riscv, bpf: Use STACK_ALIGN macro for size rounding up X-Git-Url: https://www.infradead.org/git/?a=commitdiff_plain;h=e944fc8152744a41dc62e720995538e48b053bb9;p=linux.git riscv, bpf: Use STACK_ALIGN macro for size rounding up Use the macro STACK_ALIGN that is defined in asm/processor.h for stack size rounding up, just like bpf_jit_comp32.c does. Signed-off-by: Xiao Wang Signed-off-by: Daniel Borkmann Reviewed-by: Pu Lehui Link: https://lore.kernel.org/bpf/20240523031835.3977713-1-xiao.w.wang@intel.com --- diff --git a/arch/riscv/net/bpf_jit_comp64.c b/arch/riscv/net/bpf_jit_comp64.c index 79a001d5533e..c21a0ff23415 100644 --- a/arch/riscv/net/bpf_jit_comp64.c +++ b/arch/riscv/net/bpf_jit_comp64.c @@ -868,7 +868,7 @@ static int __arch_prepare_bpf_trampoline(struct bpf_tramp_image *im, stack_size += 8; sreg_off = stack_size; - stack_size = round_up(stack_size, 16); + stack_size = round_up(stack_size, STACK_ALIGN); if (!is_struct_ops) { /* For the trampoline called from function entry, @@ -1960,7 +1960,7 @@ void bpf_jit_build_prologue(struct rv_jit_context *ctx, bool is_subprog) { int i, stack_adjust = 0, store_offset, bpf_stack_adjust; - bpf_stack_adjust = round_up(ctx->prog->aux->stack_depth, 16); + bpf_stack_adjust = round_up(ctx->prog->aux->stack_depth, STACK_ALIGN); if (bpf_stack_adjust) mark_fp(ctx); @@ -1982,7 +1982,7 @@ void bpf_jit_build_prologue(struct rv_jit_context *ctx, bool is_subprog) if (ctx->arena_vm_start) stack_adjust += 8; - stack_adjust = round_up(stack_adjust, 16); + stack_adjust = round_up(stack_adjust, STACK_ALIGN); stack_adjust += bpf_stack_adjust; store_offset = stack_adjust - 8;