diff options
Diffstat (limited to 'arch/arm/boot')
-rw-r--r-- | arch/arm/boot/compressed/Makefile | 2 | ||||
-rw-r--r-- | arch/arm/boot/compressed/head.S | 61 | ||||
-rw-r--r-- | arch/arm/boot/compressed/misc.c | 12 | ||||
-rw-r--r-- | arch/arm/boot/compressed/vmlinux.lds.in | 1 |
4 files changed, 44 insertions, 32 deletions
diff --git a/arch/arm/boot/compressed/Makefile b/arch/arm/boot/compressed/Makefile index 8ebbb511c783..0c6852d93506 100644 --- a/arch/arm/boot/compressed/Makefile +++ b/arch/arm/boot/compressed/Makefile @@ -74,7 +74,7 @@ ZTEXTADDR := $(CONFIG_ZBOOT_ROM_TEXT) ZBSSADDR := $(CONFIG_ZBOOT_ROM_BSS) else ZTEXTADDR := 0 -ZBSSADDR := ALIGN(4) +ZBSSADDR := ALIGN(8) endif SEDFLAGS = s/TEXT_START/$(ZTEXTADDR)/;s/BSS_START/$(ZBSSADDR)/ diff --git a/arch/arm/boot/compressed/head.S b/arch/arm/boot/compressed/head.S index 84ac4d656310..6fdf4abb718b 100644 --- a/arch/arm/boot/compressed/head.S +++ b/arch/arm/boot/compressed/head.S @@ -21,20 +21,12 @@ #if defined(CONFIG_DEBUG_ICEDCC) -#if defined(CONFIG_CPU_V6) || defined(CONFIG_CPU_V6K) +#if defined(CONFIG_CPU_V6) || defined(CONFIG_CPU_V6K) || defined(CONFIG_CPU_V7) .macro loadsp, rb, tmp .endm .macro writeb, ch, rb mcr p14, 0, \ch, c0, c5, 0 .endm -#elif defined(CONFIG_CPU_V7) - .macro loadsp, rb, tmp - .endm - .macro writeb, ch, rb -wait: mrc p14, 0, pc, c0, c1, 0 - bcs wait - mcr p14, 0, \ch, c0, c5, 0 - .endm #elif defined(CONFIG_CPU_XSCALE) .macro loadsp, rb, tmp .endm @@ -187,15 +179,14 @@ not_angel: bl cache_on restart: adr r0, LC0 - ldmia r0, {r1, r2, r3, r5, r6, r9, r11, r12} - ldr sp, [r0, #32] + ldmia r0, {r1, r2, r3, r6, r9, r11, r12} + ldr sp, [r0, #28] /* * We might be running at a different address. We need * to fix up various pointers. */ sub r0, r0, r1 @ calculate the delta offset - add r5, r5, r0 @ _start add r6, r6, r0 @ _edata #ifndef CONFIG_ZBOOT_ROM @@ -214,31 +205,40 @@ restart: adr r0, LC0 /* * Check to see if we will overwrite ourselves. * r4 = final kernel address - * r5 = start of this image * r9 = size of decompressed image * r10 = end of this image, including bss/stack/malloc space if non XIP * We basically want: - * r4 >= r10 -> OK - * r4 + image length <= r5 -> OK + * r4 - 16k page directory >= r10 -> OK + * r4 + image length <= current position (pc) -> OK */ + add r10, r10, #16384 cmp r4, r10 bhs wont_overwrite add r10, r4, r9 - cmp r10, r5 + ARM( cmp r10, pc ) + THUMB( mov lr, pc ) + THUMB( cmp r10, lr ) bls wont_overwrite /* * Relocate ourselves past the end of the decompressed kernel. - * r5 = start of this image * r6 = _edata * r10 = end of the decompressed kernel * Because we always copy ahead, we need to do it from the end and go * backward in case the source and destination overlap. */ - /* Round up to next 256-byte boundary. */ - add r10, r10, #256 + /* + * Bump to the next 256-byte boundary with the size of + * the relocation code added. This avoids overwriting + * ourself when the offset is small. + */ + add r10, r10, #((reloc_code_end - restart + 256) & ~255) bic r10, r10, #255 + /* Get start of code we want to copy and align it down. */ + adr r5, restart + bic r5, r5, #31 + sub r9, r6, r5 @ size to copy add r9, r9, #31 @ rounded up to a multiple bic r9, r9, #31 @ ... of 32 bytes @@ -253,6 +253,11 @@ restart: adr r0, LC0 /* Preserve offset to relocated code. */ sub r6, r9, r6 +#ifndef CONFIG_ZBOOT_ROM + /* cache_clean_flush may use the stack, so relocate it */ + add sp, sp, r6 +#endif + bl cache_clean_flush adr r0, BSYM(restart) @@ -341,7 +346,6 @@ not_relocated: mov r0, #0 LC0: .word LC0 @ r1 .word __bss_start @ r2 .word _end @ r3 - .word _start @ r5 .word _edata @ r6 .word _image_size @ r9 .word _got_start @ r11 @@ -455,7 +459,11 @@ __setup_mmu: sub r3, r4, #16384 @ Page directory size orr r1, r1, #3 << 10 add r2, r3, #16384 1: cmp r1, r9 @ if virt > start of RAM +#ifdef CONFIG_CPU_DCACHE_WRITETHROUGH + orrhs r1, r1, #0x08 @ set cacheable +#else orrhs r1, r1, #0x0c @ set cacheable, bufferable +#endif cmp r1, r10 @ if virt > end of RAM bichs r1, r1, #0x0c @ clear cacheable, bufferable str r1, [r0], #4 @ 1:1 mapping @@ -480,6 +488,12 @@ __setup_mmu: sub r3, r4, #16384 @ Page directory size mov pc, lr ENDPROC(__setup_mmu) +__arm926ejs_mmu_cache_on: +#ifdef CONFIG_CPU_DCACHE_WRITETHROUGH + mov r0, #4 @ put dcache in WT mode + mcr p15, 7, r0, c15, c0, 0 +#endif + __armv4_mmu_cache_on: mov r12, lr #ifdef CONFIG_MMU @@ -661,6 +675,12 @@ proc_types: W(b) __armv4_mpu_cache_off W(b) __armv4_mpu_cache_flush + .word 0x41069260 @ ARM926EJ-S (v5TEJ) + .word 0xff0ffff0 + b __arm926ejs_mmu_cache_on + b __armv4_mmu_cache_off + b __armv5tej_mmu_cache_flush + .word 0x00007000 @ ARM7 IDs .word 0x0000f000 mov pc, lr @@ -1070,6 +1090,7 @@ memdump: mov r12, r0 #endif .ltorg +reloc_code_end: .align .section ".stack", "aw", %nobits diff --git a/arch/arm/boot/compressed/misc.c b/arch/arm/boot/compressed/misc.c index 4657e877bf8f..2df38263124c 100644 --- a/arch/arm/boot/compressed/misc.c +++ b/arch/arm/boot/compressed/misc.c @@ -36,7 +36,7 @@ extern void error(char *x); #ifdef CONFIG_DEBUG_ICEDCC -#if defined(CONFIG_CPU_V6) || defined(CONFIG_CPU_V6K) +#if defined(CONFIG_CPU_V6) || defined(CONFIG_CPU_V6K) || defined(CONFIG_CPU_V7) static void icedcc_putc(int ch) { @@ -52,16 +52,6 @@ static void icedcc_putc(int ch) asm("mcr p14, 0, %0, c0, c5, 0" : : "r" (ch)); } -#elif defined(CONFIG_CPU_V7) - -static void icedcc_putc(int ch) -{ - asm( - "wait: mrc p14, 0, pc, c0, c1, 0 \n\ - bcs wait \n\ - mcr p14, 0, %0, c0, c5, 0 " - : : "r" (ch)); -} #elif defined(CONFIG_CPU_XSCALE) diff --git a/arch/arm/boot/compressed/vmlinux.lds.in b/arch/arm/boot/compressed/vmlinux.lds.in index 5309909d7282..ea80abe78844 100644 --- a/arch/arm/boot/compressed/vmlinux.lds.in +++ b/arch/arm/boot/compressed/vmlinux.lds.in @@ -54,6 +54,7 @@ SECTIONS .bss : { *(.bss) } _end = .; + . = ALIGN(8); /* the stack must be 64-bit aligned */ .stack : { *(.stack) } .stab 0 : { *(.stab) } |