agp_free_memory(new);
return NULL;
}
- new->memory[i] = virt_to_phys(addr);
+ new->memory[i] =
+ agp_bridge->driver->mask_memory(virt_to_phys(addr), type);
new->page_count++;
}
u32 tmp;
u32 agp3;
- for_each_pci_dev(device) {
+ while ((device = pci_find_device(PCI_ANY_ID, PCI_ANY_ID, device)) != NULL) {
cap_ptr = pci_find_capability(device, PCI_CAP_ID_AGP);
if (!cap_ptr)
continue;
if (agp_v3)
mode *= 4;
- for_each_pci_dev(device) {
+ while ((device = pci_find_device(PCI_ANY_ID, PCI_ANY_ID, device)) != NULL) {
u8 agp = pci_find_capability(device, PCI_CAP_ID_AGP);
if (!agp)
continue;
return NULL;
map_page_into_agp(page);
-
+
get_page(page);
SetPageLocked(page);
atomic_inc(&agp_bridge->current_memory_agp);
page = virt_to_page(addr);
unmap_page_from_agp(page);
-
put_page(page);
unlock_page(page);
free_page((unsigned long)addr);
EXPORT_SYMBOL(agp_enable);
+#ifdef CONFIG_SMP
static void ipi_handler(void *null)
{
flush_agp_cache();
}
+#endif
void global_cache_flush(void)
{
+#ifdef CONFIG_SMP
if (on_each_cpu(ipi_handler, NULL, 1, 1) != 0)
panic(PFX "timed out waiting for the other CPUs!\n");
+#else
+ flush_agp_cache();
+#endif
}
EXPORT_SYMBOL(global_cache_flush);