cpu/x86: Move fls() and fms() to mtrr.h

Move the funtion to find most significant bit set(fms)
and function to find least significant bit set(fls) to a common
place. And remove the duplicates.

Change-Id: Ia821038b622d93e7f719c18e5ee3e8112de66a53
Signed-off-by: Rizwan Qureshi <rizwan.qureshi@intel.com>
Reviewed-on: https://review.coreboot.org/16525
Tested-by: build bot (Jenkins)
Reviewed-by: Aaron Durbin <adurbin@chromium.org>
diff --git a/src/include/cpu/x86/mtrr.h b/src/include/cpu/x86/mtrr.h
index f32bece..79f9890 100644
--- a/src/include/cpu/x86/mtrr.h
+++ b/src/include/cpu/x86/mtrr.h
@@ -84,6 +84,33 @@
 int get_free_var_mtrr(void);
 #endif
 
+#if !defined(__ASSEMBLER__) && !defined(__ROMCC__)
+
+/* fms: find most significant bit set, stolen from Linux Kernel Source. */
+static inline unsigned int fms(unsigned int x)
+{
+	int r;
+
+	__asm__("bsrl %1,%0\n\t"
+	        "jnz 1f\n\t"
+	        "movl $0,%0\n"
+	        "1:" : "=r" (r) : "g" (x));
+	return r;
+}
+
+/* fls: find least significant bit set */
+static inline unsigned int fls(unsigned int x)
+{
+	int r;
+
+	__asm__("bsfl %1,%0\n\t"
+	        "jnz 1f\n\t"
+	        "movl $32,%0\n"
+	        "1:" : "=r" (r) : "g" (x));
+	return r;
+}
+#endif
+
 /* Align up to next power of 2, suitable for ROMCC and assembler too.
  * Range of result 256kB to 128MB is good enough here.
  */