David Hendricks | 2fba5e2 | 2013-03-14 19:06:11 -0700 | [diff] [blame] | 1 | /* |
| 2 | * This file is part of the coreboot project. |
| 3 | * |
Deepa Dinamani | e197748 | 2015-01-28 14:15:56 -0800 | [diff] [blame] | 4 | * Copyright (c) 2015, The Linux Foundation. All rights reserved. |
David Hendricks | 2fba5e2 | 2013-03-14 19:06:11 -0700 | [diff] [blame] | 5 | * Copyright 2013 Google Inc. |
| 6 | * |
| 7 | * Redistribution and use in source and binary forms, with or without |
| 8 | * modification, are permitted provided that the following conditions |
| 9 | * are met: |
| 10 | * 1. Redistributions of source code must retain the above copyright |
| 11 | * notice, this list of conditions and the following disclaimer. |
| 12 | * 2. Redistributions in binary form must reproduce the above copyright |
| 13 | * notice, this list of conditions and the following disclaimer in the |
| 14 | * documentation and/or other materials provided with the distribution. |
| 15 | * 3. The name of the author may not be used to endorse or promote products |
| 16 | * derived from this software without specific prior written permission. |
| 17 | * |
| 18 | * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND |
| 19 | * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE |
| 20 | * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE |
| 21 | * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE |
| 22 | * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL |
| 23 | * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS |
| 24 | * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) |
| 25 | * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT |
| 26 | * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY |
| 27 | * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF |
| 28 | * SUCH DAMAGE. |
David Hendricks | 08e3656 | 2013-03-25 15:02:29 -0700 | [diff] [blame] | 29 | * |
Gabe Black | 51edd54 | 2013-09-30 23:00:33 -0700 | [diff] [blame] | 30 | * cache.h: Cache maintenance API for ARM |
David Hendricks | 2fba5e2 | 2013-03-14 19:06:11 -0700 | [diff] [blame] | 31 | */ |
| 32 | |
Gabe Black | 51edd54 | 2013-09-30 23:00:33 -0700 | [diff] [blame] | 33 | #ifndef ARM_CACHE_H |
| 34 | #define ARM_CACHE_H |
David Hendricks | 2fba5e2 | 2013-03-14 19:06:11 -0700 | [diff] [blame] | 35 | |
Julius Werner | f09f224 | 2013-08-28 14:43:14 -0700 | [diff] [blame] | 36 | #include <stddef.h> |
Gabe Black | c0f82d2 | 2013-07-24 03:50:18 -0700 | [diff] [blame] | 37 | #include <stdint.h> |
| 38 | |
David Hendricks | 2fba5e2 | 2013-03-14 19:06:11 -0700 | [diff] [blame] | 39 | /* SCTLR bits */ |
| 40 | #define SCTLR_M (1 << 0) /* MMU enable */ |
| 41 | #define SCTLR_A (1 << 1) /* Alignment check enable */ |
| 42 | #define SCTLR_C (1 << 2) /* Data/unified cache enable */ |
| 43 | /* Bits 4:3 are reserved */ |
| 44 | #define SCTLR_CP15BEN (1 << 5) /* CP15 barrier enable */ |
| 45 | /* Bit 6 is reserved */ |
| 46 | #define SCTLR_B (1 << 7) /* Endianness */ |
| 47 | /* Bits 9:8 */ |
| 48 | #define SCTLR_SW (1 << 10) /* SWP and SWPB enable */ |
| 49 | #define SCTLR_Z (1 << 11) /* Branch prediction enable */ |
| 50 | #define SCTLR_I (1 << 12) /* Instruction cache enable */ |
| 51 | #define SCTLR_V (1 << 13) /* Low/high exception vectors */ |
| 52 | #define SCTLR_RR (1 << 14) /* Round Robin select */ |
| 53 | /* Bits 16:15 are reserved */ |
| 54 | #define SCTLR_HA (1 << 17) /* Hardware Access flag enable */ |
| 55 | /* Bit 18 is reserved */ |
| 56 | /* Bits 20:19 reserved virtualization not supported */ |
| 57 | #define SCTLR_WXN (1 << 19) /* Write permission implies XN */ |
| 58 | #define SCTLR_UWXN (1 << 20) /* Unprivileged write permission |
| 59 | implies PL1 XN */ |
| 60 | #define SCTLR_FI (1 << 21) /* Fast interrupt config enable */ |
| 61 | #define SCTLR_U (1 << 22) /* Unaligned access behavior */ |
| 62 | #define SCTLR_VE (1 << 24) /* Interrupt vectors enable */ |
| 63 | #define SCTLR_EE (1 << 25) /* Exception endianness */ |
| 64 | /* Bit 26 is reserved */ |
| 65 | #define SCTLR_NMFI (1 << 27) /* Non-maskable FIQ support */ |
| 66 | #define SCTLR_TRE (1 << 28) /* TEX remap enable */ |
| 67 | #define SCTLR_AFE (1 << 29) /* Access flag enable */ |
| 68 | #define SCTLR_TE (1 << 30) /* Thumb exception enable */ |
| 69 | /* Bit 31 is reserved */ |
| 70 | |
| 71 | /* |
| 72 | * Sync primitives |
| 73 | */ |
| 74 | |
| 75 | /* data memory barrier */ |
| 76 | static inline void dmb(void) |
| 77 | { |
| 78 | asm volatile ("dmb" : : : "memory"); |
| 79 | } |
| 80 | |
| 81 | /* data sync barrier */ |
| 82 | static inline void dsb(void) |
| 83 | { |
| 84 | asm volatile ("dsb" : : : "memory"); |
| 85 | } |
| 86 | |
| 87 | /* instruction sync barrier */ |
| 88 | static inline void isb(void) |
| 89 | { |
| 90 | asm volatile ("isb" : : : "memory"); |
| 91 | } |
| 92 | |
| 93 | /* |
| 94 | * Low-level TLB maintenance operations |
| 95 | */ |
| 96 | |
David Hendricks | 2fba5e2 | 2013-03-14 19:06:11 -0700 | [diff] [blame] | 97 | /* invalidate entire unified TLB */ |
| 98 | static inline void tlbiall(void) |
| 99 | { |
David Hendricks | 08e3656 | 2013-03-25 15:02:29 -0700 | [diff] [blame] | 100 | asm volatile ("mcr p15, 0, %0, c8, c7, 0" : : "r" (0) : "memory"); |
| 101 | } |
| 102 | |
David Hendricks | b98ab4a | 2013-08-16 12:17:50 -0700 | [diff] [blame] | 103 | /* invalidate unified TLB by MVA, all ASID */ |
| 104 | static inline void tlbimvaa(unsigned long mva) |
| 105 | { |
| 106 | asm volatile ("mcr p15, 0, %0, c8, c7, 3" : : "r" (mva) : "memory"); |
| 107 | } |
| 108 | |
David Hendricks | 08e3656 | 2013-03-25 15:02:29 -0700 | [diff] [blame] | 109 | /* write data access control register (DACR) */ |
| 110 | static inline void write_dacr(uint32_t val) |
| 111 | { |
| 112 | asm volatile ("mcr p15, 0, %0, c3, c0, 0" : : "r" (val)); |
| 113 | } |
| 114 | |
Daisuke Nojiri | f574a32 | 2014-02-27 14:56:39 -0800 | [diff] [blame] | 115 | /* read memory model feature register 0 (MMFR0) */ |
| 116 | static inline uint32_t read_mmfr0(void) |
| 117 | { |
| 118 | uint32_t mmfr; |
| 119 | asm volatile ("mrc p15, 0, %0, c0, c1, 4" : "=r" (mmfr)); |
| 120 | return mmfr; |
| 121 | } |
| 122 | /* read MAIR0 (memory address indirection register 0) */ |
| 123 | static inline uint32_t read_mair0(void) |
| 124 | { |
| 125 | uint32_t mair; |
| 126 | asm volatile ("mrc p15, 0, %0, c10, c2, 0" : "=r" (mair)); |
| 127 | return mair; |
| 128 | } |
| 129 | /* write MAIR0 (memory address indirection register 0) */ |
| 130 | static inline void write_mair0(uint32_t val) |
| 131 | { |
| 132 | asm volatile ("mcr p15, 0, %0, c10, c2, 0" : : "r" (val)); |
| 133 | } |
David Hendricks | 08e3656 | 2013-03-25 15:02:29 -0700 | [diff] [blame] | 134 | /* write translation table base register 0 (TTBR0) */ |
| 135 | static inline void write_ttbr0(uint32_t val) |
| 136 | { |
Martin Roth | 898a775 | 2017-06-01 11:39:59 -0600 | [diff] [blame] | 137 | if (IS_ENABLED(CONFIG_ARM_LPAE)) |
| 138 | asm volatile ("mcrr p15, 0, %[val], %[zero], c2" : : |
Daisuke Nojiri | f574a32 | 2014-02-27 14:56:39 -0800 | [diff] [blame] | 139 | [val] "r" (val), [zero] "r" (0)); |
Martin Roth | 898a775 | 2017-06-01 11:39:59 -0600 | [diff] [blame] | 140 | else |
| 141 | asm volatile ("mcr p15, 0, %0, c2, c0, 0" : : "r" (val) : "memory"); |
David Hendricks | 08e3656 | 2013-03-25 15:02:29 -0700 | [diff] [blame] | 142 | } |
| 143 | |
| 144 | /* read translation table base control register (TTBCR) */ |
| 145 | static inline uint32_t read_ttbcr(void) |
| 146 | { |
| 147 | uint32_t val = 0; |
| 148 | asm volatile ("mrc p15, 0, %0, c2, c0, 2" : "=r" (val)); |
| 149 | return val; |
| 150 | } |
| 151 | |
| 152 | /* write translation table base control register (TTBCR) */ |
| 153 | static inline void write_ttbcr(uint32_t val) |
| 154 | { |
| 155 | asm volatile ("mcr p15, 0, %0, c2, c0, 2" : : "r" (val) : "memory"); |
David Hendricks | 2fba5e2 | 2013-03-14 19:06:11 -0700 | [diff] [blame] | 156 | } |
| 157 | |
| 158 | /* |
| 159 | * Low-level cache maintenance operations |
| 160 | */ |
| 161 | |
| 162 | /* branch predictor invalidate all */ |
| 163 | static inline void bpiall(void) |
| 164 | { |
| 165 | asm volatile ("mcr p15, 0, %0, c7, c5, 6" : : "r" (0)); |
| 166 | } |
| 167 | |
| 168 | /* data cache clean and invalidate by MVA to PoC */ |
| 169 | static inline void dccimvac(unsigned long mva) |
| 170 | { |
David Hendricks | 08e3656 | 2013-03-25 15:02:29 -0700 | [diff] [blame] | 171 | asm volatile ("mcr p15, 0, %0, c7, c14, 1" : : "r" (mva) : "memory"); |
David Hendricks | 2fba5e2 | 2013-03-14 19:06:11 -0700 | [diff] [blame] | 172 | } |
| 173 | |
| 174 | /* data cache invalidate by set/way */ |
| 175 | static inline void dccisw(uint32_t val) |
| 176 | { |
David Hendricks | 08e3656 | 2013-03-25 15:02:29 -0700 | [diff] [blame] | 177 | asm volatile ("mcr p15, 0, %0, c7, c14, 2" : : "r" (val) : "memory"); |
David Hendricks | 2fba5e2 | 2013-03-14 19:06:11 -0700 | [diff] [blame] | 178 | } |
| 179 | |
| 180 | /* data cache clean by MVA to PoC */ |
| 181 | static inline void dccmvac(unsigned long mva) |
| 182 | { |
David Hendricks | 08e3656 | 2013-03-25 15:02:29 -0700 | [diff] [blame] | 183 | asm volatile ("mcr p15, 0, %0, c7, c10, 1" : : "r" (mva) : "memory"); |
David Hendricks | 2fba5e2 | 2013-03-14 19:06:11 -0700 | [diff] [blame] | 184 | } |
| 185 | |
David Hendricks | b98ab4a | 2013-08-16 12:17:50 -0700 | [diff] [blame] | 186 | /* data cache clean by set/way */ |
| 187 | static inline void dccsw(uint32_t val) |
| 188 | { |
| 189 | asm volatile ("mcr p15, 0, %0, c7, c10, 2" : : "r" (val) : "memory"); |
| 190 | } |
| 191 | |
David Hendricks | 2fba5e2 | 2013-03-14 19:06:11 -0700 | [diff] [blame] | 192 | /* data cache invalidate by MVA to PoC */ |
| 193 | static inline void dcimvac(unsigned long mva) |
| 194 | { |
David Hendricks | 08e3656 | 2013-03-25 15:02:29 -0700 | [diff] [blame] | 195 | asm volatile ("mcr p15, 0, %0, c7, c6, 1" : : "r" (mva) : "memory"); |
| 196 | } |
| 197 | |
| 198 | /* data cache invalidate by set/way */ |
| 199 | static inline void dcisw(uint32_t val) |
| 200 | { |
| 201 | asm volatile ("mcr p15, 0, %0, c7, c6, 2" : : "r" (val) : "memory"); |
David Hendricks | 2fba5e2 | 2013-03-14 19:06:11 -0700 | [diff] [blame] | 202 | } |
| 203 | |
| 204 | /* instruction cache invalidate all by PoU */ |
| 205 | static inline void iciallu(void) |
| 206 | { |
| 207 | asm volatile ("mcr p15, 0, %0, c7, c5, 0" : : "r" (0)); |
| 208 | } |
| 209 | |
| 210 | /* |
| 211 | * Cache co-processor (CP15) access functions |
| 212 | */ |
| 213 | |
| 214 | /* read cache level ID register (CLIDR) */ |
| 215 | static inline uint32_t read_clidr(void) |
| 216 | { |
| 217 | uint32_t val = 0; |
| 218 | asm volatile ("mrc p15, 1, %0, c0, c0, 1" : "=r" (val)); |
| 219 | return val; |
| 220 | } |
| 221 | |
| 222 | /* read cache size ID register register (CCSIDR) */ |
| 223 | static inline uint32_t read_ccsidr(void) |
| 224 | { |
| 225 | uint32_t val = 0; |
| 226 | asm volatile ("mrc p15, 1, %0, c0, c0, 0" : "=r" (val)); |
| 227 | return val; |
| 228 | } |
| 229 | |
| 230 | /* read cache size selection register (CSSELR) */ |
| 231 | static inline uint32_t read_csselr(void) |
| 232 | { |
| 233 | uint32_t val = 0; |
| 234 | asm volatile ("mrc p15, 2, %0, c0, c0, 0" : "=r" (val)); |
| 235 | return val; |
| 236 | } |
| 237 | |
| 238 | /* write to cache size selection register (CSSELR) */ |
| 239 | static inline void write_csselr(uint32_t val) |
| 240 | { |
| 241 | /* |
| 242 | * Bits [3:1] - Cache level + 1 (0b000 = L1, 0b110 = L7, 0b111 is rsvd) |
| 243 | * Bit 0 - 0 = data or unified cache, 1 = instruction cache |
| 244 | */ |
| 245 | asm volatile ("mcr p15, 2, %0, c0, c0, 0" : : "r" (val)); |
| 246 | isb(); /* ISB to sync the change to CCSIDR */ |
| 247 | } |
| 248 | |
David Hendricks | 6119bea | 2013-03-29 13:24:29 -0700 | [diff] [blame] | 249 | /* read L2 control register (L2CTLR) */ |
| 250 | static inline uint32_t read_l2ctlr(void) |
David Hendricks | 2fba5e2 | 2013-03-14 19:06:11 -0700 | [diff] [blame] | 251 | { |
David Hendricks | 6119bea | 2013-03-29 13:24:29 -0700 | [diff] [blame] | 252 | uint32_t val = 0; |
| 253 | asm volatile ("mrc p15, 1, %0, c9, c0, 2" : "=r" (val)); |
| 254 | return val; |
| 255 | } |
| 256 | |
| 257 | /* write L2 control register (L2CTLR) */ |
| 258 | static inline void write_l2ctlr(uint32_t val) |
| 259 | { |
| 260 | /* |
| 261 | * Note: L2CTLR can only be written when the L2 memory system |
| 262 | * is idle, ie before the MMU is enabled. |
| 263 | */ |
| 264 | asm volatile("mcr p15, 1, %0, c9, c0, 2" : : "r" (val) : "memory" ); |
| 265 | isb(); |
| 266 | } |
| 267 | |
David Hendricks | 36e62c2 | 2013-08-06 17:32:41 -0700 | [diff] [blame] | 268 | /* read L2 Auxiliary Control Register (L2ACTLR) */ |
| 269 | static inline uint32_t read_l2actlr(void) |
| 270 | { |
| 271 | uint32_t val = 0; |
| 272 | asm volatile ("mrc p15, 1, %0, c15, c0, 0" : "=r" (val)); |
| 273 | return val; |
| 274 | } |
| 275 | |
| 276 | /* write L2 Auxiliary Control Register (L2ACTLR) */ |
| 277 | static inline void write_l2actlr(uint32_t val) |
| 278 | { |
| 279 | asm volatile ("mcr p15, 1, %0, c15, c0, 0" : : "r" (val) : "memory" ); |
| 280 | isb(); |
| 281 | } |
| 282 | |
David Hendricks | 6119bea | 2013-03-29 13:24:29 -0700 | [diff] [blame] | 283 | /* read system control register (SCTLR) */ |
| 284 | static inline uint32_t read_sctlr(void) |
| 285 | { |
| 286 | uint32_t val; |
David Hendricks | 08e3656 | 2013-03-25 15:02:29 -0700 | [diff] [blame] | 287 | asm volatile ("mrc p15, 0, %0, c1, c0, 0" : "=r" (val)); |
David Hendricks | 2fba5e2 | 2013-03-14 19:06:11 -0700 | [diff] [blame] | 288 | return val; |
| 289 | } |
| 290 | |
| 291 | /* write system control register (SCTLR) */ |
David Hendricks | 08e3656 | 2013-03-25 15:02:29 -0700 | [diff] [blame] | 292 | static inline void write_sctlr(uint32_t val) |
David Hendricks | 2fba5e2 | 2013-03-14 19:06:11 -0700 | [diff] [blame] | 293 | { |
| 294 | asm volatile ("mcr p15, 0, %0, c1, c0, 0" : : "r" (val) : "cc"); |
| 295 | isb(); |
| 296 | } |
| 297 | |
Julius Werner | b2b7132 | 2014-10-16 10:23:36 -0700 | [diff] [blame] | 298 | /* read data fault address register (DFAR) */ |
| 299 | static inline uint32_t read_dfar(void) |
| 300 | { |
| 301 | uint32_t val; |
| 302 | asm volatile ("mrc p15, 0, %0, c6, c0, 0" : "=r" (val)); |
| 303 | return val; |
| 304 | } |
| 305 | |
| 306 | /* read data fault status register (DFSR) */ |
| 307 | static inline uint32_t read_dfsr(void) |
| 308 | { |
| 309 | uint32_t val; |
| 310 | asm volatile ("mrc p15, 0, %0, c5, c0, 0" : "=r" (val)); |
| 311 | return val; |
| 312 | } |
| 313 | |
| 314 | /* read instruction fault address register (IFAR) */ |
| 315 | static inline uint32_t read_ifar(void) |
| 316 | { |
| 317 | uint32_t val; |
| 318 | asm volatile ("mrc p15, 0, %0, c6, c0, 2" : "=r" (val)); |
| 319 | return val; |
| 320 | } |
| 321 | |
| 322 | /* read instruction fault status register (IFSR) */ |
| 323 | static inline uint32_t read_ifsr(void) |
| 324 | { |
| 325 | uint32_t val; |
| 326 | asm volatile ("mrc p15, 0, %0, c5, c0, 1" : "=r" (val)); |
| 327 | return val; |
| 328 | } |
| 329 | |
| 330 | /* read auxiliary data fault status register (ADFSR) */ |
| 331 | static inline uint32_t read_adfsr(void) |
| 332 | { |
| 333 | uint32_t val; |
| 334 | asm volatile ("mrc p15, 0, %0, c5, c1, 0" : "=r" (val)); |
| 335 | return val; |
| 336 | } |
| 337 | |
| 338 | /* read auxiliary instruction fault status register (AIFSR) */ |
| 339 | static inline uint32_t read_aifsr(void) |
| 340 | { |
| 341 | uint32_t val; |
| 342 | asm volatile ("mrc p15, 0, %0, c5, c1, 1" : "=r" (val)); |
| 343 | return val; |
| 344 | } |
| 345 | |
David Hendricks | 2fba5e2 | 2013-03-14 19:06:11 -0700 | [diff] [blame] | 346 | /* |
| 347 | * Cache maintenance API |
| 348 | */ |
| 349 | |
David Hendricks | 08e3656 | 2013-03-25 15:02:29 -0700 | [diff] [blame] | 350 | /* dcache clean and invalidate all (on current level given by CCSELR) */ |
David Hendricks | 2fba5e2 | 2013-03-14 19:06:11 -0700 | [diff] [blame] | 351 | void dcache_clean_invalidate_all(void); |
| 352 | |
David Hendricks | 08e3656 | 2013-03-25 15:02:29 -0700 | [diff] [blame] | 353 | /* dcache clean by modified virtual address to PoC */ |
Julius Werner | f09f224 | 2013-08-28 14:43:14 -0700 | [diff] [blame] | 354 | void dcache_clean_by_mva(void const *addr, size_t len); |
David Hendricks | 2fba5e2 | 2013-03-14 19:06:11 -0700 | [diff] [blame] | 355 | |
David Hendricks | 08e3656 | 2013-03-25 15:02:29 -0700 | [diff] [blame] | 356 | /* dcache clean and invalidate by modified virtual address to PoC */ |
Julius Werner | f09f224 | 2013-08-28 14:43:14 -0700 | [diff] [blame] | 357 | void dcache_clean_invalidate_by_mva(void const *addr, size_t len); |
David Hendricks | 2fba5e2 | 2013-03-14 19:06:11 -0700 | [diff] [blame] | 358 | |
Hung-Te Lin | cb0aeef | 2013-07-08 12:27:13 +0800 | [diff] [blame] | 359 | /* dcache invalidate by modified virtual address to PoC */ |
Julius Werner | f09f224 | 2013-08-28 14:43:14 -0700 | [diff] [blame] | 360 | void dcache_invalidate_by_mva(void const *addr, size_t len); |
Hung-Te Lin | cb0aeef | 2013-07-08 12:27:13 +0800 | [diff] [blame] | 361 | |
David Hendricks | b98ab4a | 2013-08-16 12:17:50 -0700 | [diff] [blame] | 362 | void dcache_clean_all(void); |
| 363 | |
David Hendricks | 08e3656 | 2013-03-25 15:02:29 -0700 | [diff] [blame] | 364 | /* dcache invalidate all (on current level given by CCSELR) */ |
| 365 | void dcache_invalidate_all(void); |
| 366 | |
Gabe Black | d40be11 | 2013-10-09 23:45:07 -0700 | [diff] [blame] | 367 | /* returns number of bytes per cache line */ |
| 368 | unsigned int dcache_line_bytes(void); |
| 369 | |
David Hendricks | 08e3656 | 2013-03-25 15:02:29 -0700 | [diff] [blame] | 370 | /* dcache and MMU disable */ |
| 371 | void dcache_mmu_disable(void); |
| 372 | |
| 373 | /* dcache and MMU enable */ |
| 374 | void dcache_mmu_enable(void); |
| 375 | |
Julius Werner | fd9defc | 2014-01-21 20:11:22 -0800 | [diff] [blame] | 376 | /* perform all icache/dcache maintenance needed after loading new code */ |
| 377 | void cache_sync_instructions(void); |
David Hendricks | 2fba5e2 | 2013-03-14 19:06:11 -0700 | [diff] [blame] | 378 | |
David Hendricks | 08e3656 | 2013-03-25 15:02:29 -0700 | [diff] [blame] | 379 | /* tlb invalidate all */ |
| 380 | void tlb_invalidate_all(void); |
| 381 | |
| 382 | /* |
| 383 | * Generalized setup/init functions |
| 384 | */ |
| 385 | |
Julius Werner | 03a0a65 | 2015-09-29 17:28:15 -0700 | [diff] [blame] | 386 | /* MMU initialization (set page table base, permissions, initialize subtable |
| 387 | * buffer, etc.). Must only be called ONCE PER BOOT, before any mappings. */ |
David Hendricks | 08e3656 | 2013-03-25 15:02:29 -0700 | [diff] [blame] | 388 | void mmu_init(void); |
| 389 | |
| 390 | enum dcache_policy { |
| 391 | DCACHE_OFF, |
| 392 | DCACHE_WRITEBACK, |
| 393 | DCACHE_WRITETHROUGH, |
| 394 | }; |
| 395 | |
David Hendricks | b98ab4a | 2013-08-16 12:17:50 -0700 | [diff] [blame] | 396 | /* disable the mmu for a range. Primarily useful to lock out address 0. */ |
Julius Werner | 108548a | 2014-10-09 17:31:45 -0700 | [diff] [blame] | 397 | void mmu_disable_range(u32 start_mb, u32 size_mb); |
David Hendricks | 08e3656 | 2013-03-25 15:02:29 -0700 | [diff] [blame] | 398 | /* mmu range configuration (set dcache policy) */ |
Julius Werner | 108548a | 2014-10-09 17:31:45 -0700 | [diff] [blame] | 399 | void mmu_config_range(u32 start_mb, u32 size_mb, enum dcache_policy policy); |
| 400 | |
| 401 | /* Reconfigure memory mappings at the fine-grained (4K) page level. Must be |
| 402 | * called on a range contained within a single, already mapped block/superpage. |
| 403 | * Careful: Do NOT map over this address range with mmu_config_range() again |
| 404 | * later, or you will leak resources and may desync your TLB! */ |
| 405 | void mmu_config_range_kb(u32 start_kb, u32 size_kb, enum dcache_policy policy); |
Deepa Dinamani | e197748 | 2015-01-28 14:15:56 -0800 | [diff] [blame] | 406 | void mmu_disable_range_kb(u32 start_kb, u32 size_kb); |
David Hendricks | 2fba5e2 | 2013-03-14 19:06:11 -0700 | [diff] [blame] | 407 | |
Gabe Black | 51edd54 | 2013-09-30 23:00:33 -0700 | [diff] [blame] | 408 | #endif /* ARM_CACHE_H */ |