9 #include <linux/module.h>
10 #include <linux/types.h>
11 #include <linux/slab.h>
14 #include <linux/random.h>
16 #include <linux/time.h>
17 #include <linux/kernel.h>
19 #include <linux/net.h>
71 static const int gc_delay = 60 *
HZ;
75 #define node_height(x) x->avl_height
77 #define peer_avl_empty ((struct inet_peer *)&peer_fake_node)
78 #define peer_avl_empty_rcu ((struct inet_peer __rcu __force *)&peer_fake_node)
79 static const struct inet_peer peer_fake_node = {
99 return (family ==
AF_INET ? &v4_seq : &v6_seq);
102 static inline void flush_check(
struct inet_peer_base *base,
int family)
114 atomic_t *fp = inetpeer_seq_ptr(family);
119 #define PEER_MAXDEPTH 40
132 spin_lock_bh(&gc_lock);
134 spin_unlock_bh(&gc_lock);
136 if (list_empty(&
list))
164 if (list_empty(&
list))
167 spin_lock_bh(&gc_lock);
169 spin_unlock_bh(&gc_lock);
205 for (i = 0; i <
n; i++) {
216 #define rcu_deref_locked(X, BASE) \
217 rcu_dereference_protected(X, lockdep_is_held(&(BASE)->lock.lock))
222 #define lookup(_daddr, _stack, _base) \
224 struct inet_peer *u; \
225 struct inet_peer __rcu **v; \
228 *stackptr++ = &_base->root; \
229 for (u = rcu_deref_locked(_base->root, _base); \
230 u != peer_avl_empty; ) { \
231 int cmp = addr_compare(_daddr, &u->daddr); \
239 u = rcu_deref_locked(*v, _base); \
258 int cmp = addr_compare(daddr, &u->
daddr);
263 if (!atomic_add_unless(&u->
refcnt, 1, -1))
278 #define lookup_rightempty(start, base) \
280 struct inet_peer *u; \
281 struct inet_peer __rcu **v; \
282 *stackptr++ = &start->avl_left; \
283 v = &start->avl_left; \
284 for (u = rcu_deref_locked(*v, base); \
285 u->avl_right != peer_avl_empty_rcu; ) { \
288 u = rcu_deref_locked(*v, base); \
305 while (stackend > stack) {
340 }
else if (rh > lh + 1) {
375 #define link_to_pool(n, base) \
378 n->avl_left = peer_avl_empty_rcu; \
379 n->avl_right = peer_avl_empty_rcu; \
381 rcu_assign_pointer(**--stackptr, n); \
382 peer_avl_rebalance(stack, stackptr, base); \
417 peer_avl_rebalance(
stack, stackptr, base);
431 if (base->
total >= inet_peer_threshold)
436 base->
total / inet_peer_threshold *
HZ;
438 while (stackptr >
stack) {
451 while ((p = gchead) !=
NULL) {
454 unlink_from_pool(p, base,
stack);
466 int invalidated, gccnt = 0;
468 flush_check(base, daddr->
family);
474 sequence = read_seqbegin(&base->
lock);
475 p = lookup_rcu(daddr, base);
476 invalidated = read_seqretry(&base->
lock, sequence);
483 if (!create && !invalidated)
491 p =
lookup(daddr, stack, base);
498 gccnt = inet_peer_gc(base, stack, stackptr);
554 #define XRLIM_BURST_FACTOR 6
557 unsigned long now,
token;
569 if (token >= timeout) {
582 spin_lock_bh(&gc_lock);
584 spin_unlock_bh(&gc_lock);