summaryrefslogtreecommitdiffstats
path: root/arch/arm/include
diff options
context:
space:
mode:
authorTom Rini <trini@konsulko.com>2015-03-01 22:05:54 -0500
committerTom Rini <trini@konsulko.com>2015-03-01 22:05:54 -0500
commit6fa361903cdb673ec325a56125391088da257e81 (patch)
treef098e32c64f58070f1d69955806b14dbda571ad6 /arch/arm/include
parent1da7ce4155d0839d9d56525379493bb0f80b5330 (diff)
parent306f527eff269e48a98c9d83016df6d6877dbb6a (diff)
downloadtalos-obmc-uboot-6fa361903cdb673ec325a56125391088da257e81.tar.gz
talos-obmc-uboot-6fa361903cdb673ec325a56125391088da257e81.zip
Merge branch 'master' of git://git.denx.de/u-boot-samsung
Diffstat (limited to 'arch/arm/include')
-rw-r--r--arch/arm/include/asm/arch-exynos/cpu.h5
-rw-r--r--arch/arm/include/asm/arch-exynos/system.h88
-rw-r--r--arch/arm/include/asm/armv7.h44
3 files changed, 137 insertions, 0 deletions
diff --git a/arch/arm/include/asm/arch-exynos/cpu.h b/arch/arm/include/asm/arch-exynos/cpu.h
index 29674ad4da..e7395201ad 100644
--- a/arch/arm/include/asm/arch-exynos/cpu.h
+++ b/arch/arm/include/asm/arch-exynos/cpu.h
@@ -153,6 +153,10 @@
#define EXYNOS5420_CLOCK_BASE 0x10010000
#define EXYNOS5420_POWER_BASE 0x10040000
#define EXYNOS5420_SWRESET 0x10040400
+#define EXYNOS5420_INFORM_BASE 0x10040800
+#define EXYNOS5420_SPARE_BASE 0x10040900
+#define EXYNOS5420_CPU_CONFIG_BASE 0x10042000
+#define EXYNOS5420_CPU_STATUS_BASE 0x10042004
#define EXYNOS5420_SYSREG_BASE 0x10050000
#define EXYNOS5420_TZPC_BASE 0x100E0000
#define EXYNOS5420_WATCHDOG_BASE 0x101D0000
@@ -186,6 +190,7 @@
#define EXYNOS5420_USB3PHY_BASE DEVICE_NOT_AVAILABLE
#define EXYNOS5420_USB_HOST_XHCI_BASE DEVICE_NOT_AVAILABLE
+
#ifndef __ASSEMBLY__
#include <asm/io.h>
/* CPU detection macros */
diff --git a/arch/arm/include/asm/arch-exynos/system.h b/arch/arm/include/asm/arch-exynos/system.h
index 4968d3dd2e..3ffb296a57 100644
--- a/arch/arm/include/asm/arch-exynos/system.h
+++ b/arch/arm/include/asm/arch-exynos/system.h
@@ -37,6 +37,94 @@ struct exynos5_sysreg {
#define USB20_PHY_CFG_HOST_LINK_EN (1 << 0)
+/*
+ * Data Synchronization Barrier acts as a special kind of memory barrier.
+ * No instruction in program order after this instruction executes until
+ * this instruction completes. This instruction completes when:
+ * - All explicit memory accesses before this instruction complete.
+ * - All Cache, Branch predictor and TLB maintenance operations before
+ * this instruction complete.
+ */
+#define dsb() __asm__ __volatile__ ("dsb\n\t" : : );
+
+/*
+ * This instruction causes an event to be signaled to all cores
+ * within a multiprocessor system. If SEV is implemented,
+ * WFE must also be implemented.
+ */
+#define sev() __asm__ __volatile__ ("sev\n\t" : : );
+/*
+ * If the Event Register is not set, WFE suspends execution until
+ * one of the following events occurs:
+ * - an IRQ interrupt, unless masked by the CPSR I-bit
+ * - an FIQ interrupt, unless masked by the CPSR F-bit
+ * - an Imprecise Data abort, unless masked by the CPSR A-bit
+ * - a Debug Entry request, if Debug is enabled
+ * - an Event signaled by another processor using the SEV instruction.
+ * If the Event Register is set, WFE clears it and returns immediately.
+ * If WFE is implemented, SEV must also be implemented.
+ */
+#define wfe() __asm__ __volatile__ ("wfe\n\t" : : );
+
+/* Move 0xd3 value to CPSR register to enable SVC mode */
+#define svc32_mode_en() __asm__ __volatile__ \
+ ("@ I&F disable, Mode: 0x13 - SVC\n\t" \
+ "msr cpsr_c, #0x13|0xC0\n\t" : : )
+
+/* Set program counter with the given value */
+#define set_pc(x) __asm__ __volatile__ ("mov pc, %0\n\t" : : "r"(x))
+
+/* Branch to the given location */
+#define branch_bx(x) __asm__ __volatile__ ("bx %0\n\t" : : "r"(x))
+
+/* Read Main Id register */
+#define mrc_midr(x) __asm__ __volatile__ \
+ ("mrc p15, 0, %0, c0, c0, 0\n\t" : "=r"(x) : )
+
+/* Read Multiprocessor Affinity Register */
+#define mrc_mpafr(x) __asm__ __volatile__ \
+ ("mrc p15, 0, %0, c0, c0, 5\n\t" : "=r"(x) : )
+
+/* Read System Control Register */
+#define mrc_sctlr(x) __asm__ __volatile__ \
+ ("mrc p15, 0, %0, c1, c0, 0\n\t" : "=r"(x) : )
+
+/* Read Auxiliary Control Register */
+#define mrc_auxr(x) __asm__ __volatile__ \
+ ("mrc p15, 0, %0, c1, c0, 1\n\t" : "=r"(x) : )
+
+/* Read L2 Control register */
+#define mrc_l2_ctlr(x) __asm__ __volatile__ \
+ ("mrc p15, 1, %0, c9, c0, 2\n\t" : "=r"(x) : )
+
+/* Read L2 Auxilliary Control register */
+#define mrc_l2_aux_ctlr(x) __asm__ __volatile__ \
+ ("mrc p15, 1, %0, c15, c0, 0\n\t" : "=r"(x) : )
+
+/* Write System Control Register */
+#define mcr_sctlr(x) __asm__ __volatile__ \
+ ("mcr p15, 0, %0, c1, c0, 0\n\t" : : "r"(x))
+
+/* Write Auxiliary Control Register */
+#define mcr_auxr(x) __asm__ __volatile__ \
+ ("mcr p15, 0, %0, c1, c0, 1\n\t" : : "r"(x))
+
+/* Invalidate all instruction caches to PoU */
+#define mcr_icache(x) __asm__ __volatile__ \
+ ("mcr p15, 0, %0, c7, c5, 0\n\t" : : "r"(x))
+
+/* Invalidate unified TLB */
+#define mcr_tlb(x) __asm__ __volatile__ \
+ ("mcr p15, 0, %0, c8, c7, 0\n\t" : : "r"(x))
+
+/* Write L2 Control register */
+#define mcr_l2_ctlr(x) __asm__ __volatile__ \
+ ("mcr p15, 1, %0, c9, c0, 2\n\t" : : "r"(x))
+
+/* Write L2 Auxilliary Control register */
+#define mcr_l2_aux_ctlr(x) __asm__ __volatile__ \
+ ("mcr p15, 1, %0, c15, c0, 0\n\t" : : "r"(x))
+
void set_usbhost_mode(unsigned int mode);
void set_system_display_ctrl(void);
int exynos_lcd_early_init(const void *blob);
diff --git a/arch/arm/include/asm/armv7.h b/arch/arm/include/asm/armv7.h
index a13da23cf1..dc9561cd2b 100644
--- a/arch/arm/include/asm/armv7.h
+++ b/arch/arm/include/asm/armv7.h
@@ -69,6 +69,50 @@
#define CP15DSB asm volatile ("mcr p15, 0, %0, c7, c10, 4" : : "r" (0))
#define CP15DMB asm volatile ("mcr p15, 0, %0, c7, c10, 5" : : "r" (0))
+/*
+ * Workaround for ARM errata # 798870
+ * Set L2ACTLR[7] to reissue any memory transaction in the L2 that has been
+ * stalled for 1024 cycles to verify that its hazard condition still exists.
+ */
+static inline void v7_enable_l2_hazard_detect(void)
+{
+ uint32_t val;
+
+ /* L2ACTLR[7]: Enable hazard detect timeout */
+ asm volatile ("mrc p15, 1, %0, c15, c0, 0\n\t" : "=r"(val));
+ val |= (1 << 7);
+ asm volatile ("mcr p15, 1, %0, c15, c0, 0\n\t" : : "r"(val));
+}
+
+/*
+ * Workaround for ARM errata # 799270
+ * Ensure that the L2 logic has been used within the previous 256 cycles
+ * before modifying the ACTLR.SMP bit. This is required during boot before
+ * MMU has been enabled, or during a specified reset or power down sequence.
+ */
+static inline void v7_enable_smp(uint32_t address)
+{
+ uint32_t temp, val;
+
+ /* Read auxiliary control register */
+ asm volatile ("mrc p15, 0, %0, c1, c0, 1\n\t" : "=r"(val));
+
+ /* Enable SMP */
+ val |= (1 << 6);
+
+ /* Dummy read to assure L2 access */
+ temp = readl(address);
+ temp &= 0;
+ val |= temp;
+
+ /* Write auxiliary control register */
+ asm volatile ("mcr p15, 0, %0, c1, c0, 1\n\t" : : "r"(val));
+
+ CP15DSB;
+ CP15ISB;
+}
+
+void v7_en_l2_hazard_detect(void);
void v7_outer_cache_enable(void);
void v7_outer_cache_disable(void);
void v7_outer_cache_flush_all(void);
OpenPOWER on IntegriCloud