Skip to content
Snippets Groups Projects
Commit dd7c7200 authored by Paul Burton's avatar Paul Burton Committed by Daniel Schwierzeck
Browse files

MIPS: allow systems to skip loads during cache init


Current MIPS systems do not require that loads be performed to force the
parity of cache lines, a simple invalidate by clearing the tag for each
line will suffice. Thus this patch makes the loads & subsequent second
invalidation conditional upon the CONFIG_SYS_MIPS_CACHE_INIT_RAM_LOAD
option, and defines that for existing mips32 targets. Exceptions are
malta where this is known to be unnecessary, and qemu-mips where caches
are not implemented.

Signed-off-by: default avatarPaul Burton <paul.burton@imgtec.com>
Cc: Daniel Schwierzeck <daniel.schwierzeck@gmail.com>
parent ca4e833c
Branches
Tags
No related merge requests found
...@@ -36,6 +36,7 @@ config TARGET_VCT ...@@ -36,6 +36,7 @@ config TARGET_VCT
select SUPPORTS_BIG_ENDIAN select SUPPORTS_BIG_ENDIAN
select SUPPORTS_CPU_MIPS32_R1 select SUPPORTS_CPU_MIPS32_R1
select SUPPORTS_CPU_MIPS32_R2 select SUPPORTS_CPU_MIPS32_R2
select SYS_MIPS_CACHE_INIT_RAM_LOAD
config TARGET_DBAU1X00 config TARGET_DBAU1X00
bool "Support dbau1x00" bool "Support dbau1x00"
...@@ -43,12 +44,14 @@ config TARGET_DBAU1X00 ...@@ -43,12 +44,14 @@ config TARGET_DBAU1X00
select SUPPORTS_LITTLE_ENDIAN select SUPPORTS_LITTLE_ENDIAN
select SUPPORTS_CPU_MIPS32_R1 select SUPPORTS_CPU_MIPS32_R1
select SUPPORTS_CPU_MIPS32_R2 select SUPPORTS_CPU_MIPS32_R2
select SYS_MIPS_CACHE_INIT_RAM_LOAD
config TARGET_PB1X00 config TARGET_PB1X00
bool "Support pb1x00" bool "Support pb1x00"
select SUPPORTS_LITTLE_ENDIAN select SUPPORTS_LITTLE_ENDIAN
select SUPPORTS_CPU_MIPS32_R1 select SUPPORTS_CPU_MIPS32_R1
select SUPPORTS_CPU_MIPS32_R2 select SUPPORTS_CPU_MIPS32_R2
select SYS_MIPS_CACHE_INIT_RAM_LOAD
endchoice endchoice
...@@ -185,6 +188,9 @@ config 64BIT ...@@ -185,6 +188,9 @@ config 64BIT
config SWAP_IO_SPACE config SWAP_IO_SPACE
bool bool
config SYS_MIPS_CACHE_INIT_RAM_LOAD
bool
endif endif
endmenu endmenu
...@@ -113,6 +113,8 @@ LEAF(mips_cache_reset) ...@@ -113,6 +113,8 @@ LEAF(mips_cache_reset)
l1_info t3, t9, MIPS_CONF1_DA_SHIFT l1_info t3, t9, MIPS_CONF1_DA_SHIFT
#endif #endif
#ifdef CONFIG_SYS_MIPS_CACHE_INIT_RAM_LOAD
/* Determine the largest L1 cache size */ /* Determine the largest L1 cache size */
#if defined(CONFIG_SYS_ICACHE_SIZE) && defined(CONFIG_SYS_DCACHE_SIZE) #if defined(CONFIG_SYS_ICACHE_SIZE) && defined(CONFIG_SYS_DCACHE_SIZE)
#if CONFIG_SYS_ICACHE_SIZE > CONFIG_SYS_DCACHE_SIZE #if CONFIG_SYS_ICACHE_SIZE > CONFIG_SYS_DCACHE_SIZE
...@@ -134,14 +136,15 @@ LEAF(mips_cache_reset) ...@@ -134,14 +136,15 @@ LEAF(mips_cache_reset)
f_fill64 a0, -64, zero f_fill64 a0, -64, zero
bne a0, a1, 2b bne a0, a1, 2b
/* #endif /* CONFIG_SYS_MIPS_CACHE_INIT_RAM_LOAD */
* The caches are probably in an indeterminate state,
* so we force good parity into them by doing an
* invalidate, load/fill, invalidate for each line.
*/
/* /*
* Assume bottom of RAM will generate good parity for the cache. * The caches are probably in an indeterminate state, so we force good
* parity into them by doing an invalidate for each line. If
* CONFIG_SYS_MIPS_CACHE_INIT_RAM_LOAD is set then we'll proceed to
* perform a load/fill & a further invalidate for each line, assuming
* that the bottom of RAM (having just been cleared) will generate good
* parity for the cache.
*/ */
/* /*
...@@ -153,12 +156,14 @@ LEAF(mips_cache_reset) ...@@ -153,12 +156,14 @@ LEAF(mips_cache_reset)
PTR_ADDU t1, t0, t2 PTR_ADDU t1, t0, t2
/* clear tag to invalidate */ /* clear tag to invalidate */
cache_loop t0, t1, t8, INDEX_STORE_TAG_I cache_loop t0, t1, t8, INDEX_STORE_TAG_I
#ifdef CONFIG_SYS_MIPS_CACHE_INIT_RAM_LOAD
/* fill once, so data field parity is correct */ /* fill once, so data field parity is correct */
PTR_LI t0, INDEX_BASE PTR_LI t0, INDEX_BASE
cache_loop t0, t1, t8, FILL cache_loop t0, t1, t8, FILL
/* invalidate again - prudent but not strictly neccessary */ /* invalidate again - prudent but not strictly neccessary */
PTR_LI t0, INDEX_BASE PTR_LI t0, INDEX_BASE
cache_loop t0, t1, t8, INDEX_STORE_TAG_I cache_loop t0, t1, t8, INDEX_STORE_TAG_I
#endif
/* /*
* then initialize D-cache. * then initialize D-cache.
...@@ -169,6 +174,7 @@ LEAF(mips_cache_reset) ...@@ -169,6 +174,7 @@ LEAF(mips_cache_reset)
PTR_ADDU t1, t0, t3 PTR_ADDU t1, t0, t3
/* clear all tags */ /* clear all tags */
cache_loop t0, t1, t9, INDEX_STORE_TAG_D cache_loop t0, t1, t9, INDEX_STORE_TAG_D
#ifdef CONFIG_SYS_MIPS_CACHE_INIT_RAM_LOAD
/* load from each line (in cached space) */ /* load from each line (in cached space) */
PTR_LI t0, INDEX_BASE PTR_LI t0, INDEX_BASE
2: LONG_L zero, 0(t0) 2: LONG_L zero, 0(t0)
...@@ -177,6 +183,7 @@ LEAF(mips_cache_reset) ...@@ -177,6 +183,7 @@ LEAF(mips_cache_reset)
/* clear all tags */ /* clear all tags */
PTR_LI t0, INDEX_BASE PTR_LI t0, INDEX_BASE
cache_loop t0, t1, t9, INDEX_STORE_TAG_D cache_loop t0, t1, t9, INDEX_STORE_TAG_D
#endif
3: jr ra 3: jr ra
END(mips_cache_reset) END(mips_cache_reset)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment