pkt_sched: gen_estimator: add a new lock

gen_kill_estimator() / gen_new_estimator() is not always called with
RTNL held.

net/netfilter/xt_RATEEST.c is one user of these API that do not hold
RTNL, so random corruptions can occur between "tc" and "iptables".

Add a new fine grained lock instead of trying to use RTNL in netfilter.

Signed-off-by: Eric Dumazet <eric.dumazet@gmail.com>
Signed-off-by: David S. Miller <davem@davemloft.net>

authored by Eric Dumazet and committed by David S. Miller ae638c47 597a264b

+12 -3
+12 -3
net/core/gen_estimator.c
··· 107 107 108 108 /* Protects against soft lockup during large deletion */ 109 109 static struct rb_root est_root = RB_ROOT; 110 + static DEFINE_SPINLOCK(est_tree_lock); 110 111 111 112 static void est_timer(unsigned long arg) 112 113 { ··· 202 201 * 203 202 * Returns 0 on success or a negative error code. 204 203 * 205 - * NOTE: Called under rtnl_mutex 206 204 */ 207 205 int gen_new_estimator(struct gnet_stats_basic_packed *bstats, 208 206 struct gnet_stats_rate_est *rate_est, ··· 232 232 est->last_packets = bstats->packets; 233 233 est->avpps = rate_est->pps<<10; 234 234 235 + spin_lock(&est_tree_lock); 235 236 if (!elist[idx].timer.function) { 236 237 INIT_LIST_HEAD(&elist[idx].list); 237 238 setup_timer(&elist[idx].timer, est_timer, idx); ··· 243 242 244 243 list_add_rcu(&est->list, &elist[idx].list); 245 244 gen_add_node(est); 245 + spin_unlock(&est_tree_lock); 246 246 247 247 return 0; 248 248 } ··· 263 261 * 264 262 * Removes the rate estimator specified by &bstats and &rate_est. 265 263 * 266 - * NOTE: Called under rtnl_mutex 267 264 */ 268 265 void gen_kill_estimator(struct gnet_stats_basic_packed *bstats, 269 266 struct gnet_stats_rate_est *rate_est) 270 267 { 271 268 struct gen_estimator *e; 272 269 270 + spin_lock(&est_tree_lock); 273 271 while ((e = gen_find_node(bstats, rate_est))) { 274 272 rb_erase(&e->node, &est_root); 275 273 ··· 280 278 list_del_rcu(&e->list); 281 279 call_rcu(&e->e_rcu, __gen_kill_estimator); 282 280 } 281 + spin_unlock(&est_tree_lock); 283 282 } 284 283 EXPORT_SYMBOL(gen_kill_estimator); 285 284 ··· 315 312 bool gen_estimator_active(const struct gnet_stats_basic_packed *bstats, 316 313 const struct gnet_stats_rate_est *rate_est) 317 314 { 315 + bool res; 316 + 318 317 ASSERT_RTNL(); 319 318 320 - return gen_find_node(bstats, rate_est) != NULL; 319 + spin_lock(&est_tree_lock); 320 + res = gen_find_node(bstats, rate_est) != NULL; 321 + spin_unlock(&est_tree_lock); 322 + 323 + return res; 321 324 } 322 325 EXPORT_SYMBOL(gen_estimator_active);