@@ -144,7 +144,9 @@ reset_exceptions:
144144 mttbl r0 /* prevent FIT and WDT exceptions */
145145 mttbu r0
146146 mtspr SPRN_TSR, r1 /* clear all timer exception status */
147+ #ifdef PLATFORM_nxp_p1021
147148 mtspr SPRN_TCR, r0 /* disable all timers */
149+ #endif
148150 mtspr SPRN_ESR, r0 /* clear exception syndrome register */
149151 mtspr SPRN_MCSR, r0 /* clear machine check syndrome register */
150152 mtxer r0 /* clear integer exception register */
@@ -155,7 +157,7 @@ hardware_reg:
155157 ori r0, r0, (HID0_EMCP | HID0_TBEN | HID0_ENMAS7)@l
156158 mtspr SPRN_HID0, r0
157159
158- #ifndef BUILD_LOADER_STAGE1
160+ #if defined(PLATFORM_nxp_p1021) && !defined( BUILD_LOADER_STAGE1)
159161 /* Set addr streaming & broadcast
160162 * and optimized sync instruction (if rev 5.0 or greater) */
161163 li r0, (HID1_ASTME | HID1_ABE)@l
@@ -223,6 +225,7 @@ find_pc:
223225 andc r2, r2, r3
224226 or r2, r2, r1
225227 mtspr MAS2, r2 /* EPN */
228+
226229 mfspr r2, MAS3
227230 andc r2, r2, r3
228231 or r2, r2, r1
@@ -242,7 +245,7 @@ find_pc:
242245 li r3, 0
243246 mtspr MAS1, r3
2442471: cmpw r3, r14
245- rlwinm r5, r5 , 16 , MAS0_ESEL_MSK
248+ rlwinm r5, r3 , 16 , MAS0_ESEL_MSK
246249 addi r3, r3, 1
247250 beq 2f /* skip the TLB in R14 */
248251
@@ -397,78 +400,118 @@ flash_tlb:
397400 BOOKE_PAGESZ_256M, 0 , r3);
398401#endif
399402
400- #ifdef ENABLE_L2_CACHE
401-
402- #ifdef MMU_V2
403+ #if defined(ENABLE_L2_CACHE) && defined(MMU_V2)
403404
404- /* e6500 - must have L2 initialized before L1 */
405- /* E6500RM 5.6.2 Enabling and disabling the L1 caches:
406- * "Note that enabling either L1 cache without first enabling the L2 cache
407- * is not supported."
408- */
409405create_ccsr_l2_tlb:
410- /* L2 0xFEC20000: TLB 1, Entry 9, Supervisor X/R/W, G , TS=0, 256KB , IPROT */
406+ /* L2 0xFEC20000: TLB 1, Entry 9, Supervisor X/R/W, IG , TS=0, 512KB , IPROT */
411407 set_tlb(1 , 9 ,
412408 L2SRAM_ADDR, L2SRAM_ADDR, 0 ,
413- MAS3_SX | MAS3_SW | MAS3_SR, MAS2_G, 0 ,
414- BOOKE_PAGESZ_256K, 1 , r3);
415-
416- /* CRM 11.7 */
417- setup_l2:
418-
419- /* L2 data cache invalidation & unlocking
420- * create flash invalidate & unlock bit mask (see Table 2-19)
421- */
422- lis r4, 0x0020
423- ori r4, r4, 0x0400
424- /* get base address of memory mapped registers */
425- mfspr r5, SCCSRBAR
426- li r7, 24 /* get shift count */
427- sld r5, r5, r7
428- lis r6, 0x00C2 /* block offset for desired cluster (see Table 2-4) */
429- /* subsequent cluster L2 caches may be invalidated & unlocked by adding 0x40000 to 6 */
430- add r6, r6, r5
431- /* L2SC0 offset (see Table 2-5), included here only for example */
432- /*addi r6, r6, r0 */
433- /* ensure prior memory transactions are performed */
409+ MAS3_SX | MAS3_SW | MAS3_SR, MAS2_I | MAS2_G, 0 ,
410+ BOOKE_PAGESZ_512K, 1 , r3);
411+
412+ setup_l2_sram:
413+ /* 8.2 CoreNet Platform Cache (CPC) Memory Map */
414+ #define CPCCSR0 (0x000 )
415+ #define CPCSRCR1 (0x100 )
416+ #define CPCSRCR0 (0x104 )
417+ #define CPCHDBCR0 (0xF00 )
418+
419+ #define CPCCSR0_CPCE (0x80000000 >> 0 )
420+ #define CPCCSR0_CPCPE (0x80000000 >> 1 )
421+ #define CPCCSR0_CPCFI (0x80000000 >> 10 )
422+ #define CPCCSR0_CPCLFC (0x80000000 >> 21 )
423+ #define CPCCSR0_SRAM_ENABLE (CPCCSR0_CPCE | CPCCSR0_CPCPE)
424+
425+ #define CPCSRCR0_SRAMSZ_64 (0x1 << 1 ) /* ways 14-15 */
426+ #define CPCSRCR0_SRAMSZ_256 (0x3 << 1 ) /* ways 8-15 */
427+ #define CPCSRCR0_SRAMSZ_512 (0x4 << 1 ) /* ways 0-15 */
428+ #define CPCSRCR0_SRAMEN (0x1 )
429+
430+ #define CPCHDBCR0_SPEC_DIS (0x80000000 >> 4 )
431+
432+ /* T2080RM: 8.4.2.2 Enabling the CPC after Power-On Reset */
433+ /* R1 = CPC base */
434+ lis r1, CPC_BASE@h
435+ ori r1, r1, CPC_BASE@l
436+
437+ /* Set CPC SRAM control register */
438+ /* SRAM high addrress 0x0 */
439+ li r0, 0
440+ stw r0, CPCSRCR1(r1)
441+ /* SRAM low address */
442+ lis r0, L2SRAM_ADDR@h
443+ ori r0, r0, L2SRAM_ADDR@l
444+ /* Enable SRAM and set size (must match L2SRAM_SIZE) */
445+ ori r0, r0, (CPCSRCR0_SRAMSZ_256 | CPCSRCR0_SRAMEN)
446+ stw r0, CPCSRCR0(r1)
447+
448+ /* Enable memory mapped SRAM */
449+ lis r0, CPCCSR0_SRAM_ENABLE@h
450+ mbar
451+ isync
452+ stw r0, CPCCSR0(r1)
453+ mbar
454+
455+ /* Disable speculation */
456+ lwz r0, CPCHDBCR0(r1)
457+ oris r0, r0, CPCHDBCR0_SPEC_DIS@h
458+ stw r0, CPCHDBCR0(r1)
459+
460+ setup_l2_cache:
461+ /* L2 Cache Control - E6500CORERM 2.2.3 Memory-mapped registers (MMRs) */
462+ #define L2_CLUSTER_BASE(n) (CCSRBAR + 0xC20000 + (n * 0x40000 ))
463+ #define L2CSR0 (0x000 ) /* L2 Cache Control and Status 0 */
464+ #define L2CSR1 (0x004 ) /* L2 Cache Control and Status 1 */
465+ #define L2CFG0 (0x008 ) /* L2 Cache Configuration */
466+ #define L2PID(n) (0x200 + (n * 0x10 )) /* L2 Cache Partitioning ID */
467+ #define L2PIR(n) (0x208 + (n * 0x10 )) /* L2 Cache Partitioning Allocation */
468+ #define L2PWR(n) (0x20C + (n * 0x10 )) /* L2 Cache Partitioning Way */
469+
470+ #define L2CSR0_L2FI 0x00200000 /* L2 Cache Flash Invalidate */
471+ #define L2CSR0_L2FL 0x00000800 /* L2 Cache Flush */
472+ #define L2CSR0_L2LFC 0x00000400 /* L2 Cache Lock Flash Clear */
473+ #define L2CSR0_L2PE 0x40000000 /* L2 Cache Parity/ECC Enable */
474+ #define L2CSR0_L2E 0x80000000 /* L2 Cache Enable */
475+
476+ /* E6500CORERM: 11.7 L2 cache state */
477+ /* R5 = L2 cluster 1 base */
478+ lis r5, L2_CLUSTER_BASE(0 )@h
479+ ori r5, r5, L2_CLUSTER_BASE(0 )@l
480+ /* Invalidate and clear locks */
481+ lis r1, (L2CSR0_L2FI | L2CSR0_L2LFC)@h
482+ ori r1, r1, (L2CSR0_L2FI | L2CSR0_L2LFC)@l
434483 sync
484+ stw r1, L2CSR0(r5)
435485
436- // TODO is this required?
437- li r5, 33
438- stw r5, 4 (r6)
439-
440- sync
441- stw r4, 0 (r6) /* write L2SC0 MMR to flash invalidate L2 cache and locks */
442- l2loop:
443- sync
444- lwz r5, 0 (r6) /* get current L2SC0 MMR value */
445- and . r5, r5, r4 /* compare to mask to see if complete */
446- bne l2loop
486+ /* poll till invalidate and lock bits are cleared */
487+ poll_l2_invclear:
488+ lwz r4, L2CSR0(r5)
489+ and . r4, r1, r4
490+ bne poll_l2_invclear
447491 isync
448492
449- enable_l2_pe:
450- lis r5, 0x4000
451- sync
452- stw r5, 0 (r6)
453- l2_pe_loop:
493+ /* enable L2 with parity */
454494 sync
455- lwz r4, 0 (r6)
456- cmplw r4, r5
457- bne l2_pe_loop
458495 isync
459-
460- enable_l2e:
461- lis r5, 0xC000
462- sync
463- stw r5, 0 (r6)
464- l2e_loop:
465- sync
466- lwz r4, 0 (r6)
467- cmplw r4, r5
468- bne l2e_loop
496+ lis r4, (L2CSR0_L2E | L2CSR0_L2PE)@h
497+ stw r4, L2CSR0(r5)
469498 isync
470- #endif /* MMU_V2 */
471- #endif /* ENABLE_L2_CACHE */
499+
500+ /* set stash id = 32 */
501+ li r4, 32
502+ stw r4, L2CSR1(r5)
503+
504+ l2_sram_init:
505+ /* clear 8 bytes at a time */
506+ lis r2, (L2SRAM_ADDR - 8 )@h
507+ ori r2, r2, (L2SRAM_ADDR - 8 )@l
508+ lis r3, (L2SRAM_SIZE / 8 )@h
509+ mtctr r3
510+ li r3, 0
511+ l2_sram_init_loop:
512+ stdu r3, 8 (r2)
513+ bdnz l2_sram_init_loop
514+ #endif /* ENABLE_L2_CACHE && MMU_V2 */
472515
473516#ifdef MMU_V2
474517set_stack_as:
0 commit comments