Now that vma_alloc_folio aligns the address internally, drop the redundant HPAGE_PMD_MASK alignment at the callsite. Signed-off-by: Michael S. Tsirkin --- mm/huge_memory.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mm/huge_memory.c b/mm/huge_memory.c index 970e077019b7..d689e6491ddb 100644 --- a/mm/huge_memory.c +++ b/mm/huge_memory.c @@ -1337,7 +1337,7 @@ static struct folio *vma_alloc_anon_folio_pmd(struct vm_area_struct *vma, const int order = HPAGE_PMD_ORDER; struct folio *folio; - folio = vma_alloc_folio(gfp, order, vma, addr & HPAGE_PMD_MASK); + folio = vma_alloc_folio(gfp, order, vma, addr); if (unlikely(!folio)) { count_vm_event(THP_FAULT_FALLBACK); -- MST