summaryrefslogtreecommitdiff
path: root/arch
diff options
context:
space:
mode:
Diffstat (limited to 'arch')
-rw-r--r--arch/arm/cpu/armv7/cache_v7.c14
-rw-r--r--arch/arm/include/asm/armv7.h10
-rw-r--r--arch/arm/include/asm/bitops.h43
-rw-r--r--arch/arm/include/asm/macro.h3
-rw-r--r--arch/arm/include/asm/system.h37
-rw-r--r--arch/arm/lib/cache-cp15.c14
6 files changed, 111 insertions, 10 deletions
diff --git a/arch/arm/cpu/armv7/cache_v7.c b/arch/arm/cpu/armv7/cache_v7.c
index 0f9d837..e8ee875 100644
--- a/arch/arm/cpu/armv7/cache_v7.c
+++ b/arch/arm/cpu/armv7/cache_v7.c
@@ -68,7 +68,7 @@ static void v7_inval_dcache_level_setway(u32 level, u32 num_sets,
}
}
/* DSB to make sure the operation is complete */
- CP15DSB;
+ DSB;
}
static void v7_clean_inval_dcache_level_setway(u32 level, u32 num_sets,
@@ -96,7 +96,7 @@ static void v7_clean_inval_dcache_level_setway(u32 level, u32 num_sets,
}
}
/* DSB to make sure the operation is complete */
- CP15DSB;
+ DSB;
}
static void v7_maint_dcache_level_setway(u32 level, u32 operation)
@@ -215,7 +215,7 @@ static void v7_dcache_maint_range(u32 start, u32 stop, u32 range_op)
}
/* DSB to make sure the operation is complete */
- CP15DSB;
+ DSB;
}
/* Invalidate TLB */
@@ -228,9 +228,9 @@ static void v7_inval_tlb(void)
/* Invalidate entire instruction TLB */
asm volatile ("mcr p15, 0, %0, c8, c5, 0" : : "r" (0));
/* Full system DSB - make sure that the invalidation is complete */
- CP15DSB;
+ DSB;
/* Full system ISB - make sure the instruction stream sees it */
- CP15ISB;
+ ISB;
}
void invalidate_dcache_all(void)
@@ -343,10 +343,10 @@ void invalidate_icache_all(void)
asm volatile ("mcr p15, 0, %0, c7, c5, 6" : : "r" (0));
/* Full system DSB - make sure that the invalidation is complete */
- CP15DSB;
+ DSB;
/* ISB - make sure the instruction stream sees it */
- CP15ISB;
+ ISB;
}
#else
void invalidate_icache_all(void)
diff --git a/arch/arm/include/asm/armv7.h b/arch/arm/include/asm/armv7.h
index 58d8b16..cbe7dc1 100644
--- a/arch/arm/include/asm/armv7.h
+++ b/arch/arm/include/asm/armv7.h
@@ -70,6 +70,16 @@
#define CP15DSB asm volatile ("mcr p15, 0, %0, c7, c10, 4" : : "r" (0))
#define CP15DMB asm volatile ("mcr p15, 0, %0, c7, c10, 5" : : "r" (0))
+#ifdef __ARM_ARCH_7A__
+#define ISB asm volatile ("isb" : : : "memory")
+#define DSB asm volatile ("dsb" : : : "memory")
+#define DMB asm volatile ("dmb" : : : "memory")
+#else
+#define ISB CP15ISB
+#define DSB CP15DSB
+#define DMB CP15DMB
+#endif
+
/*
* Workaround for ARM errata # 798870
* Set L2ACTLR[7] to reissue any memory transaction in the L2 that has been
diff --git a/arch/arm/include/asm/bitops.h b/arch/arm/include/asm/bitops.h
index 597dafb..9b78043 100644
--- a/arch/arm/include/asm/bitops.h
+++ b/arch/arm/include/asm/bitops.h
@@ -95,9 +95,6 @@ static inline int __test_and_change_bit(int nr, volatile void *addr)
return (old & mask) != 0;
}
-extern int find_first_zero_bit(void * addr, unsigned size);
-extern int find_next_zero_bit(void * addr, int size, int offset);
-
/*
* This routine doesn't need to be atomic.
*/
@@ -129,6 +126,43 @@ static inline unsigned long ffz(unsigned long word)
return k;
}
+static inline int find_next_zero_bit(void *addr, int size, int offset)
+{
+ unsigned long *p = ((unsigned long *)addr) + (offset >> 5);
+ unsigned long result = offset & ~31UL;
+ unsigned long tmp;
+
+ if (offset >= size)
+ return size;
+ size -= result;
+ offset &= 31UL;
+ if (offset) {
+ tmp = *(p++);
+ tmp |= ~0UL >> (32-offset);
+ if (size < 32)
+ goto found_first;
+ if (~tmp)
+ goto found_middle;
+ size -= 32;
+ result += 32;
+ }
+ while (size & ~31UL) {
+ tmp = *(p++);
+ if (~tmp)
+ goto found_middle;
+ result += 32;
+ size -= 32;
+ }
+ if (!size)
+ return result;
+ tmp = *p;
+
+found_first:
+ tmp |= ~0UL >> size;
+found_middle:
+ return result + ffz(tmp);
+}
+
/*
* hweightN: returns the hamming weight (i.e. the number
* of bits set) of a N-bit word
@@ -138,6 +172,9 @@ static inline unsigned long ffz(unsigned long word)
#define hweight16(x) generic_hweight16(x)
#define hweight8(x) generic_hweight8(x)
+#define find_first_zero_bit(addr, size) \
+ find_next_zero_bit((addr), (size), 0)
+
#define ext2_set_bit test_and_set_bit
#define ext2_clear_bit test_and_clear_bit
#define ext2_test_bit test_bit
diff --git a/arch/arm/include/asm/macro.h b/arch/arm/include/asm/macro.h
index 3cf3307..9bb0efa 100644
--- a/arch/arm/include/asm/macro.h
+++ b/arch/arm/include/asm/macro.h
@@ -143,6 +143,9 @@ lr .req x30
mov \xreg1, #0x33ff
msr cptr_el2, \xreg1 /* Disable coprocessor traps to EL2 */
+ /* Initialize Generic Timers */
+ msr cntvoff_el2, xzr
+
/* Initialize SCTLR_EL2
*
* setting RES1 bits (29,28,23,22,18,16,11,5,4) to 1
diff --git a/arch/arm/include/asm/system.h b/arch/arm/include/asm/system.h
index 2a5bed2..9cd2f1e 100644
--- a/arch/arm/include/asm/system.h
+++ b/arch/arm/include/asm/system.h
@@ -196,6 +196,28 @@ static inline void set_dacr(unsigned int val)
isb();
}
+#ifdef CONFIG_ARMV7
+/* Short-Descriptor Translation Table Level 1 Bits */
+#define TTB_SECT_NS_MASK (1 << 19)
+#define TTB_SECT_NG_MASK (1 << 17)
+#define TTB_SECT_S_MASK (1 << 16)
+/* Note: TTB AP bits are set elsewhere */
+#define TTB_SECT_TEX(x) ((x & 0x7) << 12)
+#define TTB_SECT_DOMAIN(x) ((x & 0xf) << 5)
+#define TTB_SECT_XN_MASK (1 << 4)
+#define TTB_SECT_C_MASK (1 << 3)
+#define TTB_SECT_B_MASK (1 << 2)
+#define TTB_SECT (2 << 0)
+
+/* options available for data cache on each page */
+enum dcache_option {
+ DCACHE_OFF = TTB_SECT_S_MASK | TTB_SECT_DOMAIN(0) |
+ TTB_SECT_XN_MASK | TTB_SECT,
+ DCACHE_WRITETHROUGH = DCACHE_OFF | TTB_SECT_C_MASK,
+ DCACHE_WRITEBACK = DCACHE_WRITETHROUGH | TTB_SECT_B_MASK,
+ DCACHE_WRITEALLOC = DCACHE_WRITEBACK | TTB_SECT_TEX(1),
+};
+#else
/* options available for data cache on each page */
enum dcache_option {
DCACHE_OFF = 0x12,
@@ -203,6 +225,7 @@ enum dcache_option {
DCACHE_WRITEBACK = 0x1e,
DCACHE_WRITEALLOC = 0x16,
};
+#endif
/* Size of an MMU section */
enum {
@@ -210,6 +233,20 @@ enum {
MMU_SECTION_SIZE = 1 << MMU_SECTION_SHIFT,
};
+#ifdef CONFIG_ARMV7
+/* TTBR0 bits */
+#define TTBR0_BASE_ADDR_MASK 0xFFFFC000
+#define TTBR0_RGN_NC (0 << 3)
+#define TTBR0_RGN_WBWA (1 << 3)
+#define TTBR0_RGN_WT (2 << 3)
+#define TTBR0_RGN_WB (3 << 3)
+/* TTBR0[6] is IRGN[0] and TTBR[0] is IRGN[1] */
+#define TTBR0_IRGN_NC (0 << 0 | 0 << 6)
+#define TTBR0_IRGN_WBWA (0 << 0 | 1 << 6)
+#define TTBR0_IRGN_WT (1 << 0 | 0 << 6)
+#define TTBR0_IRGN_WB (1 << 0 | 1 << 6)
+#endif
+
/**
* Change the cache settings for a region.
*
diff --git a/arch/arm/lib/cache-cp15.c b/arch/arm/lib/cache-cp15.c
index 0291afa..c65e068 100644
--- a/arch/arm/lib/cache-cp15.c
+++ b/arch/arm/lib/cache-cp15.c
@@ -96,9 +96,23 @@ static inline void mmu_setup(void)
dram_bank_mmu_setup(i);
}
+#ifdef CONFIG_ARMV7
+ /* Set TTBR0 */
+ reg = gd->arch.tlb_addr & TTBR0_BASE_ADDR_MASK;
+#if defined(CONFIG_SYS_ARM_CACHE_WRITETHROUGH)
+ reg |= TTBR0_RGN_WT | TTBR0_IRGN_WT;
+#elif defined(CONFIG_SYS_ARM_CACHE_WRITEALLOC)
+ reg |= TTBR0_RGN_WBWA | TTBR0_IRGN_WBWA;
+#else
+ reg |= TTBR0_RGN_WB | TTBR0_IRGN_WB;
+#endif
+ asm volatile("mcr p15, 0, %0, c2, c0, 0"
+ : : "r" (reg) : "memory");
+#else
/* Copy the page table address to cp15 */
asm volatile("mcr p15, 0, %0, c2, c0, 0"
: : "r" (gd->arch.tlb_addr) : "memory");
+#endif
/* Set the access control to all-supervisor */
asm volatile("mcr p15, 0, %0, c3, c0, 0"
: : "r" (~0));