cpu/mtrr.h: Fix macro names for MTRR registers

We use UNDERSCORE_CASE. For the MTRR macros that refer to an MSR,
we also remove the _MSR suffix, as they are, by definition, MSRs.

Change-Id: Id4483a75d62cf1b478a9105ee98a8f55140ce0ef
Signed-off-by: Alexandru Gagniuc <mr.nuke.me@gmail.com>
Reviewed-on: http://review.coreboot.org/11761
Reviewed-by: Aaron Durbin <adurbin@chromium.org>
Tested-by: build bot (Jenkins)
diff --git a/src/cpu/intel/car/cache_as_ram.inc b/src/cpu/intel/car/cache_as_ram.inc
index f9be6e8..6ef8604 100644
--- a/src/cpu/intel/car/cache_as_ram.inc
+++ b/src/cpu/intel/car/cache_as_ram.inc
@@ -54,7 +54,7 @@
 	 */
 	xorl	%eax, %eax
 	xorl	%edx, %edx
-	movl	$MTRRfix64K_00000_MSR, %ecx
+	movl	$MTRR_FIX_64K_00000, %ecx
 	wrmsr
 
 	/*
@@ -102,16 +102,16 @@
 
 	/* Wait for the Logical AP to complete initialization. */
 LogicalAP_SIPINotdone:
-	movl	$MTRRfix64K_00000_MSR, %ecx
+	movl	$MTRR_FIX_64K_00000, %ecx
 	rdmsr
 	orl	%eax, %eax
 	jz	LogicalAP_SIPINotdone
 
 NotHtProcessor:
 	/* Set the default memory type and enable fixed and variable MTRRs. */
-	movl	$MTRRdefType_MSR, %ecx
+	movl	$MTRR_DEF_TYPE_MSR, %ecx
 	xorl	%edx, %edx
-	movl	$(MTRRdefTypeEn | MTRRdefTypeFixEn), %eax
+	movl	$(MTRR_DEF_TYPE_EN | MTRR_DEF_TYPE_FIX_EN), %eax
 	wrmsr
 
 	/* Clear all MTRRs. */
@@ -131,35 +131,35 @@
 
 all_mtrr_msrs:
 	/* fixed MTRR MSRs */
-	.long	MTRRfix64K_00000_MSR
-	.long	MTRRfix16K_80000_MSR
-	.long	MTRRfix16K_A0000_MSR
-	.long	MTRRfix4K_C0000_MSR
-	.long	MTRRfix4K_C8000_MSR
-	.long	MTRRfix4K_D0000_MSR
-	.long	MTRRfix4K_D8000_MSR
-	.long	MTRRfix4K_E0000_MSR
-	.long	MTRRfix4K_E8000_MSR
-	.long	MTRRfix4K_F0000_MSR
-	.long	MTRRfix4K_F8000_MSR
+	.long	MTRR_FIX_64K_00000
+	.long	MTRR_FIX_16K_80000
+	.long	MTRR_FIX_16K_A0000
+	.long	MTRR_FIX_4K_C0000
+	.long	MTRR_FIX_4K_C8000
+	.long	MTRR_FIX_4K_D0000
+	.long	MTRR_FIX_4K_D8000
+	.long	MTRR_FIX_4K_E0000
+	.long	MTRR_FIX_4K_E8000
+	.long	MTRR_FIX_4K_F0000
+	.long	MTRR_FIX_4K_F8000
 
 	/* var MTRR MSRs */
-	.long	MTRRphysBase_MSR(0)
-	.long	MTRRphysMask_MSR(0)
-	.long	MTRRphysBase_MSR(1)
-	.long	MTRRphysMask_MSR(1)
-	.long	MTRRphysBase_MSR(2)
-	.long	MTRRphysMask_MSR(2)
-	.long	MTRRphysBase_MSR(3)
-	.long	MTRRphysMask_MSR(3)
-	.long	MTRRphysBase_MSR(4)
-	.long	MTRRphysMask_MSR(4)
-	.long	MTRRphysBase_MSR(5)
-	.long	MTRRphysMask_MSR(5)
-	.long	MTRRphysBase_MSR(6)
-	.long	MTRRphysMask_MSR(6)
-	.long	MTRRphysBase_MSR(7)
-	.long	MTRRphysMask_MSR(7)
+	.long	MTRR_PHYS_BASE(0)
+	.long	MTRR_PHYS_MASK(0)
+	.long	MTRR_PHYS_BASE(1)
+	.long	MTRR_PHYS_MASK(1)
+	.long	MTRR_PHYS_BASE(2)
+	.long	MTRR_PHYS_MASK(2)
+	.long	MTRR_PHYS_BASE(3)
+	.long	MTRR_PHYS_MASK(3)
+	.long	MTRR_PHYS_BASE(4)
+	.long	MTRR_PHYS_MASK(4)
+	.long	MTRR_PHYS_BASE(5)
+	.long	MTRR_PHYS_MASK(5)
+	.long	MTRR_PHYS_BASE(6)
+	.long	MTRR_PHYS_MASK(6)
+	.long	MTRR_PHYS_BASE(7)
+	.long	MTRR_PHYS_MASK(7)
 
 	.long	0x000 /* NULL, end of table */
 
@@ -219,13 +219,13 @@
 
 #if CacheSize > 0x8000
 	/* Enable caching for 32K-64K using fixed MTRR. */
-	movl	$MTRRfix4K_C0000_MSR, %ecx
+	movl	$MTRR_FIX_4K_C0000, %ecx
 	simplemask CacheSize, 0x8000
 	wrmsr
 #endif
 
 	/* Enable caching for 0-32K using fixed MTRR. */
-	movl	$MTRRfix4K_C8000_MSR, %ecx
+	movl	$MTRR_FIX_4K_C8000, %ecx
 	simplemask CacheSize, 0
 	wrmsr
 
@@ -235,7 +235,7 @@
 	 * Enable write base caching so we can do execute in place (XIP)
 	 * on the flash ROM.
 	 */
-	movl	$MTRRphysBase_MSR(1), %ecx
+	movl	$MTRR_PHYS_BASE(1), %ecx
 	xorl	%edx, %edx
 	/*
 	 * IMPORTANT: The following calculation _must_ be done at runtime. See
@@ -246,9 +246,9 @@
 	orl	$MTRR_TYPE_WRBACK, %eax
 	wrmsr
 
-	movl	$MTRRphysMask_MSR(1), %ecx
+	movl	$MTRR_PHYS_MASK(1), %ecx
 	movl	$0x0000000f, %edx
-	movl	$(~(CONFIG_XIP_ROM_SIZE - 1) | MTRRphysMaskValid), %eax
+	movl	$(~(CONFIG_XIP_ROM_SIZE - 1) | MTRR_PHYS_MASK_VALID), %eax
 	wrmsr
 #endif /* CONFIG_XIP_ROM_SIZE */
 
@@ -332,13 +332,13 @@
 	movl	%eax, %cr0
 
 	/* Clear sth. */
-	movl	$MTRRfix4K_C8000_MSR, %ecx
+	movl	$MTRR_FIX_4K_C8000, %ecx
 	xorl	%edx, %edx
 	xorl	%eax, %eax
 	wrmsr
 
 #if CONFIG_DCACHE_RAM_SIZE > 0x8000
-	movl	$MTRRfix4K_C0000_MSR, %ecx
+	movl	$MTRR_FIX_4K_C0000, %ecx
 	wrmsr
 #endif
 
@@ -346,9 +346,9 @@
 	 * Set the default memory type and disable fixed
 	 * and enable variable MTRRs.
 	 */
-	movl	$MTRRdefType_MSR, %ecx
+	movl	$MTRR_DEF_TYPE_MSR, %ecx
 	xorl	%edx, %edx
-	movl	$MTRRdefTypeEn, %eax /* Enable variable and disable fixed MTRRs. */
+	movl	$MTRR_DEF_TYPE_EN, %eax /* Enable variable and disable fixed MTRRs. */
 	wrmsr
 
 	/* Enable cache. */
diff --git a/src/cpu/intel/car/cache_as_ram_ht.inc b/src/cpu/intel/car/cache_as_ram_ht.inc
index 193ad41..6eb50ba 100644
--- a/src/cpu/intel/car/cache_as_ram_ht.inc
+++ b/src/cpu/intel/car/cache_as_ram_ht.inc
@@ -61,7 +61,7 @@
 	post_code(0x21)
 
 	/* Configure the default memory type to uncacheable. */
-	movl	$MTRRdefType_MSR, %ecx
+	movl	$MTRR_DEF_TYPE_MSR, %ecx
 	rdmsr
 	andl	$(~0x00000cff), %eax
 	wrmsr
@@ -95,9 +95,9 @@
 	 */
 addrsize_set_high:
 	xorl	%eax, %eax
-	movl	$MTRRphysMask_MSR(0), %ecx
+	movl	$MTRR_PHYS_MASK(0), %ecx
 	wrmsr
-	movl	$MTRRphysMask_MSR(1), %ecx
+	movl	$MTRR_PHYS_MASK(1), %ecx
 	wrmsr
 	movl	$LAPIC_BASE_MSR, %ecx
 	not	%edx
@@ -188,7 +188,7 @@
 	post_code(0x26)
 
 	/* Wait for sibling CPU to start. */
-1:	movl	$(MTRRphysBase_MSR(0)), %ecx
+1:	movl	$(MTRR_PHYS_BASE(0)), %ecx
 	rdmsr
 	andl	%eax, %eax
 	jnz	sipi_complete
@@ -211,7 +211,7 @@
 	post_code(0x28)
 
 	/* MTRR registers are shared between HT siblings. */
-	movl	$(MTRRphysBase_MSR(0)), %ecx
+	movl	$(MTRR_PHYS_BASE(0)), %ecx
 	movl	$(1<<12), %eax
 	xorl	%edx, %edx
 	wrmsr
@@ -230,21 +230,21 @@
 	post_code(0x2a)
 
 	/* Set Cache-as-RAM base address. */
-	movl	$(MTRRphysBase_MSR(0)), %ecx
+	movl	$(MTRR_PHYS_BASE(0)), %ecx
 	movl	$(CACHE_AS_RAM_BASE | MTRR_TYPE_WRBACK), %eax
 	xorl	%edx, %edx
 	wrmsr
 
 	/* Set Cache-as-RAM mask. */
-	movl	$(MTRRphysMask_MSR(0)), %ecx
+	movl	$(MTRR_PHYS_MASK(0)), %ecx
 	rdmsr
-	movl	$(~(CACHE_AS_RAM_SIZE - 1) | MTRRphysMaskValid), %eax
+	movl	$(~(CACHE_AS_RAM_SIZE - 1) | MTRR_PHYS_MASK_VALID), %eax
 	wrmsr
 
 	/* Enable MTRR. */
-	movl	$MTRRdefType_MSR, %ecx
+	movl	$MTRR_DEF_TYPE_MSR, %ecx
 	rdmsr
-	orl	$MTRRdefTypeEn, %eax
+	orl	$MTRR_DEF_TYPE_EN, %eax
 	wrmsr
 
 	post_code(0x2b)
@@ -308,7 +308,7 @@
 
 #if CONFIG_XIP_ROM_SIZE
 	/* Enable cache for our code in Flash because we do XIP here */
-	movl	$MTRRphysBase_MSR(1), %ecx
+	movl	$MTRR_PHYS_BASE(1), %ecx
 	xorl	%edx, %edx
 	/*
 	 * IMPORTANT: The following calculation _must_ be done at runtime. See
@@ -319,9 +319,9 @@
 	orl	$MTRR_TYPE_WRBACK, %eax
 	wrmsr
 
-	movl	$MTRRphysMask_MSR(1), %ecx
+	movl	$MTRR_PHYS_MASK(1), %ecx
 	rdmsr
-	movl	$(~(CONFIG_XIP_ROM_SIZE - 1) | MTRRphysMaskValid), %eax
+	movl	$(~(CONFIG_XIP_ROM_SIZE - 1) | MTRR_PHYS_MASK_VALID), %eax
 	wrmsr
 #endif /* CONFIG_XIP_ROM_SIZE */
 
@@ -356,9 +356,9 @@
 	post_code(0x34)
 
 	/* Disable MTRR. */
-	movl	$MTRRdefType_MSR, %ecx
+	movl	$MTRR_DEF_TYPE_MSR, %ecx
 	rdmsr
-	andl	$(~MTRRdefTypeEn), %eax
+	andl	$(~MTRR_DEF_TYPE_EN), %eax
 	wrmsr
 
 	post_code(0x35)
@@ -382,24 +382,24 @@
 	post_code(0x38)
 
 	/* Enable Write Back and Speculative Reads for low RAM. */
-	movl	$MTRRphysBase_MSR(0), %ecx
+	movl	$MTRR_PHYS_BASE(0), %ecx
 	movl	$(0x00000000 | MTRR_TYPE_WRBACK), %eax
 	xorl	%edx, %edx
 	wrmsr
-	movl	$MTRRphysMask_MSR(0), %ecx
+	movl	$MTRR_PHYS_MASK(0), %ecx
 	rdmsr
-	movl	$(~(CONFIG_RAMTOP - 1) | MTRRphysMaskValid), %eax
+	movl	$(~(CONFIG_RAMTOP - 1) | MTRR_PHYS_MASK_VALID), %eax
 	wrmsr
 
 #if CACHE_ROM_SIZE
 	/* Enable caching and Speculative Reads for Flash ROM device. */
-	movl	$MTRRphysBase_MSR(1), %ecx
+	movl	$MTRR_PHYS_BASE(1), %ecx
 	movl	$(CACHE_ROM_BASE | MTRR_TYPE_WRPROT), %eax
 	xorl	%edx, %edx
 	wrmsr
-	movl	$MTRRphysMask_MSR(1), %ecx
+	movl	$MTRR_PHYS_MASK(1), %ecx
 	rdmsr
-	movl	$(~(CACHE_ROM_SIZE - 1) | MTRRphysMaskValid), %eax
+	movl	$(~(CACHE_ROM_SIZE - 1) | MTRR_PHYS_MASK_VALID), %eax
 	wrmsr
 #endif
 
@@ -413,9 +413,9 @@
 	post_code(0x3a)
 
 	/* Enable MTRR. */
-	movl	$MTRRdefType_MSR, %ecx
+	movl	$MTRR_DEF_TYPE_MSR, %ecx
 	rdmsr
-	orl	$MTRRdefTypeEn, %eax
+	orl	$MTRR_DEF_TYPE_EN, %eax
 	wrmsr
 
 	post_code(0x3b)