Guennadi Liakhovetski | 9b07773 | 2008-08-31 00:39:46 +0200 | [diff] [blame^] | 1 | /* |
| 2 | * armboot - Startup Code for S3C6400/ARM1176 CPU-core |
| 3 | * |
| 4 | * Copyright (c) 2007 Samsung Electronics |
| 5 | * |
| 6 | * Copyright (C) 2008 |
| 7 | * Guennadi Liakhovetki, DENX Software Engineering, <lg@denx.de> |
| 8 | * |
| 9 | * See file CREDITS for list of people who contributed to this |
| 10 | * project. |
| 11 | * |
| 12 | * This program is free software; you can redistribute it and/or |
| 13 | * modify it under the terms of the GNU General Public License as |
| 14 | * published by the Free Software Foundation; either version 2 of |
| 15 | * the License, or (at your option) any later version. |
| 16 | * |
| 17 | * This program is distributed in the hope that it will be useful, |
| 18 | * but WITHOUT ANY WARRANTY; without even the implied warranty of |
| 19 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
| 20 | * GNU General Public License for more details. |
| 21 | * |
| 22 | * You should have received a copy of the GNU General Public License |
| 23 | * along with this program; if not, write to the Free Software |
| 24 | * Foundation, Inc., 59 Temple Place, Suite 330, Boston, |
| 25 | * MA 02111-1307 USA |
| 26 | * |
| 27 | * 2007-09-21 - Restructured codes by jsgood (jsgood.yang@samsung.com) |
| 28 | * 2007-09-21 - Added MoviNAND and OneNAND boot codes by |
| 29 | * jsgood (jsgood.yang@samsung.com) |
| 30 | * Base codes by scsuh (sc.suh) |
| 31 | */ |
| 32 | |
| 33 | #include <config.h> |
| 34 | #include <version.h> |
| 35 | #ifdef CONFIG_ENABLE_MMU |
| 36 | #include <asm/proc/domain.h> |
| 37 | #endif |
| 38 | #include <s3c6400.h> |
| 39 | |
| 40 | #if !defined(CONFIG_ENABLE_MMU) && !defined(CFG_PHY_UBOOT_BASE) |
| 41 | #define CFG_PHY_UBOOT_BASE CFG_UBOOT_BASE |
| 42 | #endif |
| 43 | |
| 44 | /* |
| 45 | ************************************************************************* |
| 46 | * |
| 47 | * Jump vector table as in table 3.1 in [1] |
| 48 | * |
| 49 | ************************************************************************* |
| 50 | */ |
| 51 | |
| 52 | .globl _start |
| 53 | _start: b reset |
| 54 | #ifndef CONFIG_NAND_SPL |
| 55 | ldr pc, _undefined_instruction |
| 56 | ldr pc, _software_interrupt |
| 57 | ldr pc, _prefetch_abort |
| 58 | ldr pc, _data_abort |
| 59 | ldr pc, _not_used |
| 60 | ldr pc, _irq |
| 61 | ldr pc, _fiq |
| 62 | |
| 63 | _undefined_instruction: |
| 64 | .word undefined_instruction |
| 65 | _software_interrupt: |
| 66 | .word software_interrupt |
| 67 | _prefetch_abort: |
| 68 | .word prefetch_abort |
| 69 | _data_abort: |
| 70 | .word data_abort |
| 71 | _not_used: |
| 72 | .word not_used |
| 73 | _irq: |
| 74 | .word irq |
| 75 | _fiq: |
| 76 | .word fiq |
| 77 | _pad: |
| 78 | .word 0x12345678 /* now 16*4=64 */ |
| 79 | #else |
| 80 | . = _start + 64 |
| 81 | #endif |
| 82 | |
| 83 | .global _end_vect |
| 84 | _end_vect: |
| 85 | .balignl 16,0xdeadbeef |
| 86 | /* |
| 87 | ************************************************************************* |
| 88 | * |
| 89 | * Startup Code (reset vector) |
| 90 | * |
| 91 | * do important init only if we don't start from memory! |
| 92 | * setup Memory and board specific bits prior to relocation. |
| 93 | * relocate armboot to ram |
| 94 | * setup stack |
| 95 | * |
| 96 | ************************************************************************* |
| 97 | */ |
| 98 | |
| 99 | _TEXT_BASE: |
| 100 | .word TEXT_BASE |
| 101 | |
| 102 | /* |
| 103 | * Below variable is very important because we use MMU in U-Boot. |
| 104 | * Without it, we cannot run code correctly before MMU is ON. |
| 105 | * by scsuh. |
| 106 | */ |
| 107 | _TEXT_PHY_BASE: |
| 108 | .word CFG_PHY_UBOOT_BASE |
| 109 | |
| 110 | .globl _armboot_start |
| 111 | _armboot_start: |
| 112 | .word _start |
| 113 | |
| 114 | /* |
| 115 | * These are defined in the board-specific linker script. |
| 116 | */ |
| 117 | .globl _bss_start |
| 118 | _bss_start: |
| 119 | .word __bss_start |
| 120 | |
| 121 | .globl _bss_end |
| 122 | _bss_end: |
| 123 | .word _end |
| 124 | |
| 125 | /* |
| 126 | * the actual reset code |
| 127 | */ |
| 128 | |
| 129 | reset: |
| 130 | /* |
| 131 | * set the cpu to SVC32 mode |
| 132 | */ |
| 133 | mrs r0, cpsr |
| 134 | bic r0, r0, #0x3f |
| 135 | orr r0, r0, #0xd3 |
| 136 | msr cpsr, r0 |
| 137 | |
| 138 | /* |
| 139 | ************************************************************************* |
| 140 | * |
| 141 | * CPU_init_critical registers |
| 142 | * |
| 143 | * setup important registers |
| 144 | * setup memory timing |
| 145 | * |
| 146 | ************************************************************************* |
| 147 | */ |
| 148 | /* |
| 149 | * we do sys-critical inits only at reboot, |
| 150 | * not when booting from ram! |
| 151 | */ |
| 152 | cpu_init_crit: |
| 153 | /* |
| 154 | * When booting from NAND - it has definitely been a reset, so, no need |
| 155 | * to flush caches and disable the MMU |
| 156 | */ |
| 157 | #ifndef CONFIG_NAND_SPL |
| 158 | /* |
| 159 | * flush v4 I/D caches |
| 160 | */ |
| 161 | mov r0, #0 |
| 162 | mcr p15, 0, r0, c7, c7, 0 /* flush v3/v4 cache */ |
| 163 | mcr p15, 0, r0, c8, c7, 0 /* flush v4 TLB */ |
| 164 | |
| 165 | /* |
| 166 | * disable MMU stuff and caches |
| 167 | */ |
| 168 | mrc p15, 0, r0, c1, c0, 0 |
| 169 | bic r0, r0, #0x00002300 @ clear bits 13, 9:8 (--V- --RS) |
| 170 | bic r0, r0, #0x00000087 @ clear bits 7, 2:0 (B--- -CAM) |
| 171 | orr r0, r0, #0x00000002 @ set bit 2 (A) Align |
| 172 | orr r0, r0, #0x00001000 @ set bit 12 (I) I-Cache |
| 173 | /* Prepare to disable the MMU */ |
| 174 | adr r1, mmu_disable_phys |
| 175 | /* We presume we're within the first 1024 bytes */ |
| 176 | and r1, r1, #0x3fc |
| 177 | ldr r2, _TEXT_PHY_BASE |
| 178 | ldr r3, =0xfff00000 |
| 179 | and r2, r2, r3 |
| 180 | orr r2, r2, r1 |
| 181 | b mmu_disable |
| 182 | |
| 183 | .align 5 |
| 184 | /* Run in a single cache-line */ |
| 185 | mmu_disable: |
| 186 | mcr p15, 0, r0, c1, c0, 0 |
| 187 | nop |
| 188 | nop |
| 189 | mov pc, r2 |
| 190 | #endif |
| 191 | |
| 192 | mmu_disable_phys: |
| 193 | /* Peri port setup */ |
| 194 | ldr r0, =0x70000000 |
| 195 | orr r0, r0, #0x13 |
| 196 | mcr p15,0,r0,c15,c2,4 @ 256M (0x70000000 - 0x7fffffff) |
| 197 | |
| 198 | /* |
| 199 | * Go setup Memory and board specific bits prior to relocation. |
| 200 | */ |
| 201 | bl lowlevel_init /* go setup pll,mux,memory */ |
| 202 | |
| 203 | after_copy: |
| 204 | #ifdef CONFIG_ENABLE_MMU |
| 205 | enable_mmu: |
| 206 | /* enable domain access */ |
| 207 | ldr r5, =0x0000ffff |
| 208 | mcr p15, 0, r5, c3, c0, 0 /* load domain access register */ |
| 209 | |
| 210 | /* Set the TTB register */ |
| 211 | ldr r0, _mmu_table_base |
| 212 | ldr r1, =CFG_PHY_UBOOT_BASE |
| 213 | ldr r2, =0xfff00000 |
| 214 | bic r0, r0, r2 |
| 215 | orr r1, r0, r1 |
| 216 | mcr p15, 0, r1, c2, c0, 0 |
| 217 | |
| 218 | /* Enable the MMU */ |
| 219 | mrc p15, 0, r0, c1, c0, 0 |
| 220 | orr r0, r0, #1 /* Set CR_M to enable MMU */ |
| 221 | |
| 222 | /* Prepare to enable the MMU */ |
| 223 | adr r1, skip_hw_init |
| 224 | and r1, r1, #0x3fc |
| 225 | ldr r2, _TEXT_BASE |
| 226 | ldr r3, =0xfff00000 |
| 227 | and r2, r2, r3 |
| 228 | orr r2, r2, r1 |
| 229 | b mmu_enable |
| 230 | |
| 231 | .align 5 |
| 232 | /* Run in a single cache-line */ |
| 233 | mmu_enable: |
| 234 | |
| 235 | mcr p15, 0, r0, c1, c0, 0 |
| 236 | nop |
| 237 | nop |
| 238 | mov pc, r2 |
| 239 | #endif |
| 240 | |
| 241 | skip_hw_init: |
| 242 | /* Set up the stack */ |
| 243 | stack_setup: |
| 244 | #ifdef CONFIG_MEMORY_UPPER_CODE |
| 245 | ldr sp, =(CFG_UBOOT_BASE + CFG_UBOOT_SIZE - 0xc) |
| 246 | #else |
| 247 | ldr r0, _TEXT_BASE /* upper 128 KiB: relocated uboot */ |
| 248 | sub r0, r0, #CFG_MALLOC_LEN /* malloc area */ |
| 249 | sub r0, r0, #CFG_GBL_DATA_SIZE /* bdinfo */ |
| 250 | sub sp, r0, #12 /* leave 3 words for abort-stack */ |
| 251 | |
| 252 | #endif |
| 253 | |
| 254 | clear_bss: |
| 255 | ldr r0, _bss_start /* find start of bss segment */ |
| 256 | ldr r1, _bss_end /* stop here */ |
| 257 | mov r2, #0 /* clear */ |
| 258 | |
| 259 | clbss_l: |
| 260 | str r2, [r0] /* clear loop... */ |
| 261 | add r0, r0, #4 |
| 262 | cmp r0, r1 |
| 263 | ble clbss_l |
| 264 | |
| 265 | #ifndef CONFIG_NAND_SPL |
| 266 | ldr pc, _start_armboot |
| 267 | |
| 268 | _start_armboot: |
| 269 | .word start_armboot |
| 270 | #else |
| 271 | b nand_boot |
| 272 | /* .word nand_boot*/ |
| 273 | #endif |
| 274 | |
| 275 | #ifdef CONFIG_ENABLE_MMU |
| 276 | _mmu_table_base: |
| 277 | .word mmu_table |
| 278 | #endif |
| 279 | |
| 280 | #ifndef CONFIG_NAND_SPL |
| 281 | /* |
| 282 | * we assume that cache operation is done before. (eg. cleanup_before_linux()) |
| 283 | * actually, we don't need to do anything about cache if not use d-cache in |
| 284 | * U-Boot. So, in this function we clean only MMU. by scsuh |
| 285 | * |
| 286 | * void theLastJump(void *kernel, int arch_num, uint boot_params); |
| 287 | */ |
| 288 | #ifdef CONFIG_ENABLE_MMU |
| 289 | .globl theLastJump |
| 290 | theLastJump: |
| 291 | mov r9, r0 |
| 292 | ldr r3, =0xfff00000 |
| 293 | ldr r4, _TEXT_PHY_BASE |
| 294 | adr r5, phy_last_jump |
| 295 | bic r5, r5, r3 |
| 296 | orr r5, r5, r4 |
| 297 | mov pc, r5 |
| 298 | phy_last_jump: |
| 299 | /* |
| 300 | * disable MMU stuff |
| 301 | */ |
| 302 | mrc p15, 0, r0, c1, c0, 0 |
| 303 | bic r0, r0, #0x00002300 /* clear bits 13, 9:8 (--V- --RS) */ |
| 304 | bic r0, r0, #0x00000087 /* clear bits 7, 2:0 (B--- -CAM) */ |
| 305 | orr r0, r0, #0x00000002 /* set bit 2 (A) Align */ |
| 306 | orr r0, r0, #0x00001000 /* set bit 12 (I) I-Cache */ |
| 307 | mcr p15, 0, r0, c1, c0, 0 |
| 308 | |
| 309 | mcr p15, 0, r0, c8, c7, 0 /* flush v4 TLB */ |
| 310 | |
| 311 | mov r0, #0 |
| 312 | mov pc, r9 |
| 313 | #endif |
| 314 | /* |
| 315 | ************************************************************************* |
| 316 | * |
| 317 | * Interrupt handling |
| 318 | * |
| 319 | ************************************************************************* |
| 320 | */ |
| 321 | @ |
| 322 | @ IRQ stack frame. |
| 323 | @ |
| 324 | #define S_FRAME_SIZE 72 |
| 325 | |
| 326 | #define S_OLD_R0 68 |
| 327 | #define S_PSR 64 |
| 328 | #define S_PC 60 |
| 329 | #define S_LR 56 |
| 330 | #define S_SP 52 |
| 331 | |
| 332 | #define S_IP 48 |
| 333 | #define S_FP 44 |
| 334 | #define S_R10 40 |
| 335 | #define S_R9 36 |
| 336 | #define S_R8 32 |
| 337 | #define S_R7 28 |
| 338 | #define S_R6 24 |
| 339 | #define S_R5 20 |
| 340 | #define S_R4 16 |
| 341 | #define S_R3 12 |
| 342 | #define S_R2 8 |
| 343 | #define S_R1 4 |
| 344 | #define S_R0 0 |
| 345 | |
| 346 | #define MODE_SVC 0x13 |
| 347 | #define I_BIT 0x80 |
| 348 | |
| 349 | /* |
| 350 | * use bad_save_user_regs for abort/prefetch/undef/swi ... |
| 351 | */ |
| 352 | |
| 353 | .macro bad_save_user_regs |
| 354 | /* carve out a frame on current user stack */ |
| 355 | sub sp, sp, #S_FRAME_SIZE |
| 356 | /* Save user registers (now in svc mode) r0-r12 */ |
| 357 | stmia sp, {r0 - r12} |
| 358 | |
| 359 | ldr r2, _armboot_start |
| 360 | sub r2, r2, #(CFG_MALLOC_LEN) |
| 361 | /* set base 2 words into abort stack */ |
| 362 | sub r2, r2, #(CFG_GBL_DATA_SIZE+8) |
| 363 | /* get values for "aborted" pc and cpsr (into parm regs) */ |
| 364 | ldmia r2, {r2 - r3} |
| 365 | /* grab pointer to old stack */ |
| 366 | add r0, sp, #S_FRAME_SIZE |
| 367 | |
| 368 | add r5, sp, #S_SP |
| 369 | mov r1, lr |
| 370 | /* save sp_SVC, lr_SVC, pc, cpsr */ |
| 371 | stmia r5, {r0 - r3} |
| 372 | /* save current stack into r0 (param register) */ |
| 373 | mov r0, sp |
| 374 | .endm |
| 375 | |
| 376 | .macro get_bad_stack |
| 377 | /* setup our mode stack (enter in banked mode) */ |
| 378 | ldr r13, _armboot_start |
| 379 | /* move past malloc pool */ |
| 380 | sub r13, r13, #(CFG_MALLOC_LEN) |
| 381 | /* move to reserved a couple spots for abort stack */ |
| 382 | sub r13, r13, #(CFG_GBL_DATA_SIZE + 8) |
| 383 | |
| 384 | /* save caller lr in position 0 of saved stack */ |
| 385 | str lr, [r13] |
| 386 | /* get the spsr */ |
| 387 | mrs lr, spsr |
| 388 | /* save spsr in position 1 of saved stack */ |
| 389 | str lr, [r13, #4] |
| 390 | |
| 391 | /* prepare SVC-Mode */ |
| 392 | mov r13, #MODE_SVC |
| 393 | @ msr spsr_c, r13 |
| 394 | /* switch modes, make sure moves will execute */ |
| 395 | msr spsr, r13 |
| 396 | /* capture return pc */ |
| 397 | mov lr, pc |
| 398 | /* jump to next instruction & switch modes. */ |
| 399 | movs pc, lr |
| 400 | .endm |
| 401 | |
| 402 | .macro get_bad_stack_swi |
| 403 | /* space on current stack for scratch reg. */ |
| 404 | sub r13, r13, #4 |
| 405 | /* save R0's value. */ |
| 406 | str r0, [r13] |
| 407 | /* get data regions start */ |
| 408 | ldr r0, _armboot_start |
| 409 | /* move past malloc pool */ |
| 410 | sub r0, r0, #(CFG_MALLOC_LEN) |
| 411 | /* move past gbl and a couple spots for abort stack */ |
| 412 | sub r0, r0, #(CFG_GBL_DATA_SIZE + 8) |
| 413 | /* save caller lr in position 0 of saved stack */ |
| 414 | str lr, [r0] |
| 415 | /* get the spsr */ |
| 416 | mrs r0, spsr |
| 417 | /* save spsr in position 1 of saved stack */ |
| 418 | str lr, [r0, #4] |
| 419 | /* restore r0 */ |
| 420 | ldr r0, [r13] |
| 421 | /* pop stack entry */ |
| 422 | add r13, r13, #4 |
| 423 | .endm |
| 424 | |
| 425 | /* |
| 426 | * exception handlers |
| 427 | */ |
| 428 | .align 5 |
| 429 | undefined_instruction: |
| 430 | get_bad_stack |
| 431 | bad_save_user_regs |
| 432 | bl do_undefined_instruction |
| 433 | |
| 434 | .align 5 |
| 435 | software_interrupt: |
| 436 | get_bad_stack_swi |
| 437 | bad_save_user_regs |
| 438 | bl do_software_interrupt |
| 439 | |
| 440 | .align 5 |
| 441 | prefetch_abort: |
| 442 | get_bad_stack |
| 443 | bad_save_user_regs |
| 444 | bl do_prefetch_abort |
| 445 | |
| 446 | .align 5 |
| 447 | data_abort: |
| 448 | get_bad_stack |
| 449 | bad_save_user_regs |
| 450 | bl do_data_abort |
| 451 | |
| 452 | .align 5 |
| 453 | not_used: |
| 454 | get_bad_stack |
| 455 | bad_save_user_regs |
| 456 | bl do_not_used |
| 457 | |
| 458 | .align 5 |
| 459 | irq: |
| 460 | get_bad_stack |
| 461 | bad_save_user_regs |
| 462 | bl do_irq |
| 463 | |
| 464 | .align 5 |
| 465 | fiq: |
| 466 | get_bad_stack |
| 467 | bad_save_user_regs |
| 468 | bl do_fiq |
| 469 | #endif /* CONFIG_NAND_SPL */ |