arch/x86/include/asm/msr.h
changeset 2 d1f6d8b6f81c
parent 0 aa628870c1d3
equal deleted inserted replaced
1:0056487c491e 2:d1f6d8b6f81c
    20 		     : "=a" (low), "=d" (high), "=c" (*aux));
    20 		     : "=a" (low), "=d" (high), "=c" (*aux));
    21 	return low | ((u64)high << 32);
    21 	return low | ((u64)high << 32);
    22 }
    22 }
    23 
    23 
    24 /*
    24 /*
    25  * i386 calling convention returns 64-bit value in edx:eax, while
    25  * both i386 and x86_64 returns 64-bit value in edx:eax, but gcc's "A"
    26  * x86_64 returns at rax. Also, the "A" constraint does not really
    26  * constraint has different meanings. For i386, "A" means exactly
    27  * mean rdx:rax in x86_64, so we need specialized behaviour for each
    27  * edx:eax, while for x86_64 it doesn't mean rdx:rax or edx:eax. Instead,
    28  * architecture
    28  * it means rax *or* rdx.
    29  */
    29  */
    30 #ifdef CONFIG_X86_64
    30 #ifdef CONFIG_X86_64
    31 #define DECLARE_ARGS(val, low, high)	unsigned low, high
    31 #define DECLARE_ARGS(val, low, high)	unsigned low, high
    32 #define EAX_EDX_VAL(val, low, high)	((low) | ((u64)(high) << 32))
    32 #define EAX_EDX_VAL(val, low, high)	((low) | ((u64)(high) << 32))
    33 #define EAX_EDX_ARGS(val, low, high)	"a" (low), "d" (high)
    33 #define EAX_EDX_ARGS(val, low, high)	"a" (low), "d" (high)
    83 				    unsigned low, unsigned high)
    83 				    unsigned low, unsigned high)
    84 {
    84 {
    85 	asm volatile("wrmsr" : : "c" (msr), "a"(low), "d" (high) : "memory");
    85 	asm volatile("wrmsr" : : "c" (msr), "a"(low), "d" (high) : "memory");
    86 }
    86 }
    87 
    87 
    88 static inline int native_write_msr_safe(unsigned int msr,
    88 /* Can be uninlined because referenced by paravirt */
       
    89 notrace static inline int native_write_msr_safe(unsigned int msr,
    89 					unsigned low, unsigned high)
    90 					unsigned low, unsigned high)
    90 {
    91 {
    91 	int err;
    92 	int err;
    92 	asm volatile("2: wrmsr ; xor %[err],%[err]\n"
    93 	asm volatile("2: wrmsr ; xor %[err],%[err]\n"
    93 		     "1:\n\t"
    94 		     "1:\n\t"
   179 	*p = native_read_msr_amd_safe(msr, &err);
   180 	*p = native_read_msr_amd_safe(msr, &err);
   180 	return err;
   181 	return err;
   181 }
   182 }
   182 
   183 
   183 #define rdtscl(low)						\
   184 #define rdtscl(low)						\
   184 	((low) = (u32)native_read_tsc())
   185 	((low) = (u32)__native_read_tsc())
   185 
   186 
   186 #define rdtscll(val)						\
   187 #define rdtscll(val)						\
   187 	((val) = native_read_tsc())
   188 	((val) = __native_read_tsc())
   188 
   189 
   189 #define rdpmc(counter, low, high)			\
   190 #define rdpmc(counter, low, high)			\
   190 do {							\
   191 do {							\
   191 	u64 _l = native_read_pmc((counter));		\
   192 	u64 _l = native_read_pmc((counter));		\
   192 	(low)  = (u32)_l;				\
   193 	(low)  = (u32)_l;				\