diff options
Diffstat (limited to 'include/asm-ia64/sn/rw_mmr.h')
-rw-r--r-- | include/asm-ia64/sn/rw_mmr.h | 56 |
1 files changed, 5 insertions, 51 deletions
diff --git a/include/asm-ia64/sn/rw_mmr.h b/include/asm-ia64/sn/rw_mmr.h index f40fd1a..2d78f4c 100644 --- a/include/asm-ia64/sn/rw_mmr.h +++ b/include/asm-ia64/sn/rw_mmr.h @@ -3,15 +3,14 @@ * License. See the file "COPYING" in the main directory of this archive * for more details. * - * Copyright (C) 2002-2004 Silicon Graphics, Inc. All Rights Reserved. + * Copyright (C) 2002-2006 Silicon Graphics, Inc. All Rights Reserved. */ #ifndef _ASM_IA64_SN_RW_MMR_H #define _ASM_IA64_SN_RW_MMR_H /* - * This file contains macros used to access MMR registers via - * uncached physical addresses. + * This file that access MMRs via uncached physical addresses. * pio_phys_read_mmr - read an MMR * pio_phys_write_mmr - write an MMR * pio_atomic_phys_write_mmrs - atomically write 1 or 2 MMRs with psr.ic=0 @@ -22,53 +21,8 @@ */ -extern inline long -pio_phys_read_mmr(volatile long *mmr) -{ - long val; - asm volatile - ("mov r2=psr;;" - "rsm psr.i | psr.dt;;" - "srlz.i;;" - "ld8.acq %0=[%1];;" - "mov psr.l=r2;;" - "srlz.i;;" - : "=r"(val) - : "r"(mmr) - : "r2"); - return val; -} - - - -extern inline void -pio_phys_write_mmr(volatile long *mmr, long val) -{ - asm volatile - ("mov r2=psr;;" - "rsm psr.i | psr.dt;;" - "srlz.i;;" - "st8.rel [%0]=%1;;" - "mov psr.l=r2;;" - "srlz.i;;" - :: "r"(mmr), "r"(val) - : "r2", "memory"); -} - -extern inline void -pio_atomic_phys_write_mmrs(volatile long *mmr1, long val1, volatile long *mmr2, long val2) -{ - asm volatile - ("mov r2=psr;;" - "rsm psr.i | psr.dt | psr.ic;;" - "cmp.ne p9,p0=%2,r0;" - "srlz.i;;" - "st8.rel [%0]=%1;" - "(p9) st8.rel [%2]=%3;;" - "mov psr.l=r2;;" - "srlz.i;;" - :: "r"(mmr1), "r"(val1), "r"(mmr2), "r"(val2) - : "p9", "r2", "memory"); -} +extern long pio_phys_read_mmr(volatile long *mmr); +extern void pio_phys_write_mmr(volatile long *mmr, long val); +extern void pio_atomic_phys_write_mmrs(volatile long *mmr1, long val1, volatile long *mmr2, long val2); #endif /* _ASM_IA64_SN_RW_MMR_H */ |