__free_pages(page, order);
}
-#define need_reserve_slab_rcu \
- (sizeof(((struct page *)NULL)->lru) < sizeof(struct rcu_head))
-
static void rcu_free_slab(struct rcu_head *h)
{
- struct page *page;
-
- if (need_reserve_slab_rcu)
- page = virt_to_head_page(h);
- else
- page = container_of((struct list_head *)h, struct page, lru);
+ struct page *page = container_of(h, struct page, rcu_head);
__free_slab(page->slab_cache, page);
}
static void free_slab(struct kmem_cache *s, struct page *page)
{
if (unlikely(s->flags & SLAB_TYPESAFE_BY_RCU)) {
- struct rcu_head *head;
-
- if (need_reserve_slab_rcu) {
- int order = compound_order(page);
- int offset = (PAGE_SIZE << order) - s->reserved;
-
- VM_BUG_ON(s->reserved != sizeof(*head));
- head = page_address(page) + offset;
- } else {
- head = &page->rcu_head;
- }
-
- call_rcu(head, rcu_free_slab);
+ call_rcu(&page->rcu_head, rcu_free_slab);
} else
__free_slab(s, page);
}
s->random = get_random_long();
#endif
- if (need_reserve_slab_rcu && (s->flags & SLAB_TYPESAFE_BY_RCU))
- s->reserved = sizeof(struct rcu_head);
-
if (!calculate_sizes(s, -1))
goto error;
if (disable_higher_order_debug) {