scratch = *raw_cpu_ptr(m->scratch);
if (unlikely(!scratch))
goto out;
+ __local_lock_nested_bh(&scratch->bh_lock);
map_index = scratch->map_index;
last);
if (b < 0) {
scratch->map_index = map_index;
+ __local_unlock_nested_bh(&scratch->bh_lock);
local_bh_enable();
return NULL;
* *next* bitmap (not initial) for the next packet.
*/
scratch->map_index = map_index;
+ __local_unlock_nested_bh(&scratch->bh_lock);
local_bh_enable();
return e;
}
data += NFT_PIPAPO_GROUPS_PADDING(f);
}
+ __local_unlock_nested_bh(&scratch->bh_lock);
out:
local_bh_enable();
return NULL;
}
pipapo_free_scratch(clone, i);
+ local_lock_init(&scratch->bh_lock);
*per_cpu_ptr(clone->scratch, i) = scratch;
}
/**
* struct nft_pipapo_scratch - percpu data used for lookup and matching
+ * @bh_lock: PREEMPT_RT local spinlock
* @map_index: Current working bitmap index, toggled between field matches
* @__map: store partial matching results during lookup
*/
struct nft_pipapo_scratch {
+ local_lock_t bh_lock;
u8 map_index;
unsigned long __map[];
};
if (unlikely(!scratch))
return NULL;
+ __local_lock_nested_bh(&scratch->bh_lock);
map_index = scratch->map_index;
map = NFT_PIPAPO_LT_ALIGN(&scratch->__map[0]);
res = map + (map_index ? m->bsize_max : 0);
if (ret < 0) {
scratch->map_index = map_index;
kernel_fpu_end();
+ __local_unlock_nested_bh(&scratch->bh_lock);
return NULL;
}
scratch->map_index = map_index;
kernel_fpu_end();
+ __local_unlock_nested_bh(&scratch->bh_lock);
return e;
}
}
kernel_fpu_end();
+ __local_unlock_nested_bh(&scratch->bh_lock);
return NULL;
}