mm: Convert huge_memory to XArray
authorMatthew Wilcox <willy@infradead.org>
Mon, 4 Dec 2017 15:16:10 +0000 (10:16 -0500)
committerMatthew Wilcox <willy@infradead.org>
Sun, 21 Oct 2018 14:46:38 +0000 (10:46 -0400)
Quite a straightforward conversion.

Signed-off-by: Matthew Wilcox <willy@infradead.org>
mm/huge_memory.c

index 533f9b00147d267644bcbf98da717329fb07c38f..9eb79c384616ea72d42c39c8f04013caf350d2f2 100644 (file)
@@ -2443,13 +2443,13 @@ static void __split_huge_page(struct page *page, struct list_head *list,
        ClearPageCompound(head);
        /* See comment in __split_huge_page_tail() */
        if (PageAnon(head)) {
-               /* Additional pin to radix tree of swap cache */
+               /* Additional pin to swap cache */
                if (PageSwapCache(head))
                        page_ref_add(head, 2);
                else
                        page_ref_inc(head);
        } else {
-               /* Additional pin to radix tree */
+               /* Additional pin to page cache */
                page_ref_add(head, 2);
                xa_unlock(&head->mapping->i_pages);
        }
@@ -2561,7 +2561,7 @@ bool can_split_huge_page(struct page *page, int *pextra_pins)
 {
        int extra_pins;
 
-       /* Additional pins from radix tree */
+       /* Additional pins from page cache */
        if (PageAnon(page))
                extra_pins = PageSwapCache(page) ? HPAGE_PMD_NR : 0;
        else
@@ -2657,17 +2657,14 @@ int split_huge_page_to_list(struct page *page, struct list_head *list)
        spin_lock_irqsave(zone_lru_lock(page_zone(head)), flags);
 
        if (mapping) {
-               void **pslot;
+               XA_STATE(xas, &mapping->i_pages, page_index(head));
 
-               xa_lock(&mapping->i_pages);
-               pslot = radix_tree_lookup_slot(&mapping->i_pages,
-                               page_index(head));
                /*
-                * Check if the head page is present in radix tree.
+                * Check if the head page is present in page cache.
                 * We assume all tail are present too, if head is there.
                 */
-               if (radix_tree_deref_slot_protected(pslot,
-                                       &mapping->i_pages.xa_lock) != head)
+               xa_lock(&mapping->i_pages);
+               if (xas_load(&xas) != head)
                        goto fail;
        }