.type bcopy,@function
#ifdef __KERNEL__
+ .globl __memcpy_begin
+__memcpy_begin:
+
+ .globl __memcpy
+ .type __memcpy,@function
+
memcpy_private:
+__memcpy:
memcpy: mov ASI_P, asi_src ! IEU0 Group
brnz,pt %o2, __memcpy_entry ! CTI
mov ASI_P, asi_dest ! IEU1
or %g3, %lo(NOP), %g3; \
stw %g3, [%g2 + 0x4]; \
flush %g2;
+#define ULTRA3_PCACHE_DO_NOP(symbol) \
+ sethi %hi(symbol##_nop_1_6), %g1; \
+ or %g1, %lo(symbol##_nop_1_6), %g1; \
+ sethi %hi(NOP), %g2; \
+ stw %g2, [%g1 + 0x00]; \
+ stw %g2, [%g1 + 0x04]; \
+ flush %g1 + 0x00; \
+ stw %g2, [%g1 + 0x08]; \
+ stw %g2, [%g1 + 0x0c]; \
+ flush %g1 + 0x08; \
+ stw %g2, [%g1 + 0x10]; \
+ stw %g2, [%g1 + 0x04]; \
+ flush %g1 + 0x10; \
+ sethi %hi(symbol##_nop_2_3), %g1; \
+ or %g1, %lo(symbol##_nop_2_3), %g1; \
+ stw %g2, [%g1 + 0x00]; \
+ stw %g2, [%g1 + 0x04]; \
+ flush %g1 + 0x00; \
+ stw %g2, [%g1 + 0x08]; \
+ flush %g1 + 0x08;
+
+#include <asm/dcu.h>
.globl cheetah_patch_copyops
cheetah_patch_copyops:
ULTRA3_DO_PATCH(__copy_from_user, U3copy_from_user)
ULTRA3_DO_PATCH(__copy_to_user, U3copy_to_user)
ULTRA3_DO_PATCH(__copy_in_user, U3copy_in_user)
+#if 0 /* Causes data corruption, nop out the optimization
+ * for now -DaveM
+ */
+ ldxa [%g0] ASI_DCU_CONTROL_REG, %g3
+ sethi %uhi(DCU_PE), %o3
+ sllx %o3, 32, %o3
+ andcc %g3, %o3, %g0
+ be,pn %xcc, pcache_disabled
+ nop
+#endif
+ ULTRA3_PCACHE_DO_NOP(U3memcpy)
+ ULTRA3_PCACHE_DO_NOP(U3copy_from_user)
+ ULTRA3_PCACHE_DO_NOP(U3copy_to_user)
+ ULTRA3_PCACHE_DO_NOP(cheetah_copy_user_page)
+#if 0
+pcache_disabled:
+#endif
retl
nop
#undef BRANCH_ALWAYS
FPU_RETL
#ifdef __KERNEL__
+ .globl __memcpy_end
+__memcpy_end:
+
.section .fixup
.align 4
VIScopyfixup_reto2: