]> pilppa.org Git - linux-2.6-omap-h63xx.git/blobdiff - mm/slub.c
[ARM] cache align destination pointer when copying memory for some processors
[linux-2.6-omap-h63xx.git] / mm / slub.c
index d379b782fc8325cec775bc1daca939bbe06d6d75..0987d1cd943cc88d626a7d1b1097ba1f83243591 100644 (file)
--- a/mm/slub.c
+++ b/mm/slub.c
@@ -2726,9 +2726,10 @@ size_t ksize(const void *object)
 
        page = virt_to_head_page(object);
 
-       if (unlikely(!PageSlab(page)))
+       if (unlikely(!PageSlab(page))) {
+               WARN_ON(!PageCompound(page));
                return PAGE_SIZE << compound_order(page);
-
+       }
        s = page->slab;
 
 #ifdef CONFIG_SLUB_DEBUG
@@ -3762,7 +3763,7 @@ static int any_slab_objects(struct kmem_cache *s)
                if (!n)
                        continue;
 
-               if (atomic_read(&n->total_objects))
+               if (atomic_long_read(&n->total_objects))
                        return 1;
        }
        return 0;