diff options
author | Masahiro Yamada <yamada.masahiro@socionext.com> | 2016-05-17 16:38:08 +0900 |
---|---|---|
committer | Tom Rini <trini@konsulko.com> | 2016-05-27 15:47:55 -0400 |
commit | ba9eb6c7eb3490f72f07bc712f7196fb4e0fe80c (patch) | |
tree | 67323ee0d3ab3ff9e3b3708692a7e463f2576fa1 /arch/arm/cpu/armv8 | |
parent | 1a021230d37d4f87ec0ca9f4103b582e415f1b76 (diff) | |
download | u-boot-imx-ba9eb6c7eb3490f72f07bc712f7196fb4e0fe80c.zip u-boot-imx-ba9eb6c7eb3490f72f07bc712f7196fb4e0fe80c.tar.gz u-boot-imx-ba9eb6c7eb3490f72f07bc712f7196fb4e0fe80c.tar.bz2 |
arm64: rename __asm_flush_dcache_level to __asm_dcache_level
Since 1e6ad55c0582 ("armv8/cache: Change cache invalidate and flush
function"), this routine can be used for both cache flushing and
cache invalidation. So, it is better to not include "flush" in
this routine name.
Signed-off-by: Masahiro Yamada <yamada.masahiro@socionext.com>
Diffstat (limited to 'arch/arm/cpu/armv8')
-rw-r--r-- | arch/arm/cpu/armv8/cache.S | 12 |
1 files changed, 6 insertions, 6 deletions
diff --git a/arch/arm/cpu/armv8/cache.S b/arch/arm/cpu/armv8/cache.S index 6aaecf3..46f25e6 100644 --- a/arch/arm/cpu/armv8/cache.S +++ b/arch/arm/cpu/armv8/cache.S @@ -14,15 +14,15 @@ #include <linux/linkage.h> /* - * void __asm_flush_dcache_level(level) + * void __asm_dcache_level(level) * - * clean and invalidate one level cache. + * flush or invalidate one level cache. * * x0: cache level * x1: 0 clean & invalidate, 1 invalidate only * x2~x9: clobbered */ -ENTRY(__asm_flush_dcache_level) +ENTRY(__asm_dcache_level) lsl x12, x0, #1 msr csselr_el1, x12 /* select cache level */ isb /* sync change of cssidr_el1 */ @@ -57,14 +57,14 @@ loop_way: b.ge loop_set ret -ENDPROC(__asm_flush_dcache_level) +ENDPROC(__asm_dcache_level) /* * void __asm_flush_dcache_all(int invalidate_only) * * x0: 0 clean & invalidate, 1 invalidate only * - * clean and invalidate all data cache by SET/WAY. + * flush or invalidate all data cache by SET/WAY. */ ENTRY(__asm_dcache_all) mov x1, x0 @@ -87,7 +87,7 @@ loop_level: and x12, x12, #7 /* x12 <- cache type */ cmp x12, #2 b.lt skip /* skip if no cache or icache */ - bl __asm_flush_dcache_level /* x1 = 0 flush, 1 invalidate */ + bl __asm_dcache_level /* x1 = 0 flush, 1 invalidate */ skip: add x0, x0, #1 /* increment cache level */ cmp x11, x0 |