summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--arch/arm/cpu/armv8/cache.S24
-rw-r--r--arch/arm/cpu/armv8/cache_v8.c2
-rw-r--r--arch/arm/include/asm/system.h15
3 files changed, 40 insertions, 1 deletions
diff --git a/arch/arm/cpu/armv8/cache.S b/arch/arm/cpu/armv8/cache.S
index f1deaa7..7cba308 100644
--- a/arch/arm/cpu/armv8/cache.S
+++ b/arch/arm/cpu/armv8/cache.S
@@ -138,6 +138,30 @@ ENTRY(__asm_flush_dcache_range)
dsb sy
ret
ENDPROC(__asm_flush_dcache_range)
+/*
+ * void __asm_invalidate_dcache_range(start, end)
+ *
+ * invalidate data cache in the range
+ *
+ * x0: start address
+ * x1: end address
+ */
+ENTRY(__asm_invalidate_dcache_range)
+ mrs x3, ctr_el0
+ ubfm x3, x3, #16, #19
+ mov x2, #4
+ lsl x2, x2, x3 /* cache line size */
+
+ /* x2 <- minimal cache line size in cache system */
+ sub x3, x2, #1
+ bic x0, x0, x3
+1: dc ivac, x0 /* invalidate data or unified cache */
+ add x0, x0, x2
+ cmp x0, x1
+ b.lo 1b
+ dsb sy
+ ret
+ENDPROC(__asm_invalidate_dcache_range)
/*
* void __asm_invalidate_icache_all(void)
diff --git a/arch/arm/cpu/armv8/cache_v8.c b/arch/arm/cpu/armv8/cache_v8.c
index bd1c3e0..adc7e17 100644
--- a/arch/arm/cpu/armv8/cache_v8.c
+++ b/arch/arm/cpu/armv8/cache_v8.c
@@ -446,7 +446,7 @@ inline void flush_dcache_all(void)
*/
void invalidate_dcache_range(unsigned long start, unsigned long stop)
{
- __asm_flush_dcache_range(start, stop);
+ __asm_invalidate_dcache_range(start, stop);
}
/*
diff --git a/arch/arm/include/asm/system.h b/arch/arm/include/asm/system.h
index 9c3261c..79bd19a 100644
--- a/arch/arm/include/asm/system.h
+++ b/arch/arm/include/asm/system.h
@@ -180,6 +180,21 @@ static inline unsigned long read_mpidr(void)
void __asm_flush_dcache_all(void);
void __asm_invalidate_dcache_all(void);
void __asm_flush_dcache_range(u64 start, u64 end);
+
+/**
+ * __asm_invalidate_dcache_range() - Invalidate a range of virtual addresses
+ *
+ * This performance an invalidate from @start to @end - 1. Both addresses
+ * should be cache-aligned, otherwise this function will align the start
+ * address and may continue past the end address.
+ *
+ * Data in the address range is evicted from the cache and is not written back
+ * to memory.
+ *
+ * @start: Start address to invalidate
+ * @end: End address to invalidate up to (exclusive)
+ */
+void __asm_invalidate_dcache_range(u64 start, u64 end);
void __asm_invalidate_tlb_all(void);
void __asm_invalidate_icache_all(void);
int __asm_invalidate_l3_dcache(void);