1 // Basic x86 asm functions.
12 #define CR0_PG (1<<31) // Paging
13 #define CR0_CD (1<<30) // Cache disable
14 #define CR0_NW (1<<29) // Not Write-through
15 #define CR0_PE (1<<0) // Protection enable
18 #define PORT_A20 0x0092
19 #define A20_ENABLE_BIT 0x02
23 #include "types.h" // u32
25 static inline void irq_disable(void)
27 asm volatile("cli": : :"memory");
30 static inline void irq_enable(void)
32 asm volatile("sti": : :"memory");
35 static inline u32 save_flags(void)
38 asm volatile("pushfl ; popl %0" : "=rm" (flags));
42 static inline void restore_flags(u32 flags)
44 asm volatile("pushl %0 ; popfl" : : "g" (flags) : "memory", "cc");
47 static inline void cpu_relax(void)
49 asm volatile("rep ; nop": : :"memory");
52 static inline void nop(void)
57 static inline void hlt(void)
59 asm volatile("hlt": : :"memory");
62 static inline void wbinvd(void)
64 asm volatile("wbinvd": : :"memory");
67 #define CPUID_TSC (1 << 4)
68 #define CPUID_MSR (1 << 5)
69 #define CPUID_APIC (1 << 9)
70 #define CPUID_MTRR (1 << 12)
71 static inline void __cpuid(u32 index, u32 *eax, u32 *ebx, u32 *ecx, u32 *edx)
74 : "=a" (*eax), "=b" (*ebx), "=c" (*ecx), "=d" (*edx)
78 static inline u32 cr0_read(void) {
80 asm("movl %%cr0, %0" : "=r"(cr0));
83 static inline void cr0_write(u32 cr0) {
84 asm("movl %0, %%cr0" : : "r"(cr0));
86 static inline void cr0_mask(u32 off, u32 on) {
87 cr0_write((cr0_read() & ~off) | on);
89 static inline u16 cr0_vm86_read(void) {
91 asm("smsww %0" : "=r"(cr0));
95 static inline u64 rdmsr(u32 index)
98 asm ("rdmsr" : "=A"(ret) : "c"(index));
102 static inline void wrmsr(u32 index, u64 val)
104 asm volatile ("wrmsr" : : "c"(index), "A"(val));
107 static inline u64 rdtscll(void)
110 asm volatile("rdtsc" : "=A" (val));
114 static inline u32 __ffs(u32 word)
121 static inline u32 __fls(u32 word)
129 static inline u32 getesp(void) {
131 asm("movl %%esp, %0" : "=rm"(esp));
135 static inline u32 rol(u32 val, u16 rol) {
137 asm volatile("roll %%cl, %%eax"
138 : "=a" (res) : "a" (val), "c" (rol));
142 static inline void outb(u8 value, u16 port) {
143 __asm__ __volatile__("outb %b0, %w1" : : "a"(value), "Nd"(port));
145 static inline void outw(u16 value, u16 port) {
146 __asm__ __volatile__("outw %w0, %w1" : : "a"(value), "Nd"(port));
148 static inline void outl(u32 value, u16 port) {
149 __asm__ __volatile__("outl %0, %w1" : : "a"(value), "Nd"(port));
151 static inline u8 inb(u16 port) {
153 __asm__ __volatile__("inb %w1, %b0" : "=a"(value) : "Nd"(port));
156 static inline u16 inw(u16 port) {
158 __asm__ __volatile__("inw %w1, %w0" : "=a"(value) : "Nd"(port));
161 static inline u32 inl(u16 port) {
163 __asm__ __volatile__("inl %w1, %0" : "=a"(value) : "Nd"(port));
167 static inline void insb(u16 port, u8 *data, u32 count) {
168 asm volatile("rep insb (%%dx), %%es:(%%edi)"
169 : "+c"(count), "+D"(data) : "d"(port) : "memory");
171 static inline void insw(u16 port, u16 *data, u32 count) {
172 asm volatile("rep insw (%%dx), %%es:(%%edi)"
173 : "+c"(count), "+D"(data) : "d"(port) : "memory");
175 static inline void insl(u16 port, u32 *data, u32 count) {
176 asm volatile("rep insl (%%dx), %%es:(%%edi)"
177 : "+c"(count), "+D"(data) : "d"(port) : "memory");
179 // XXX - outs not limited to es segment
180 static inline void outsb(u16 port, u8 *data, u32 count) {
181 asm volatile("rep outsb %%es:(%%esi), (%%dx)"
182 : "+c"(count), "+S"(data) : "d"(port) : "memory");
184 static inline void outsw(u16 port, u16 *data, u32 count) {
185 asm volatile("rep outsw %%es:(%%esi), (%%dx)"
186 : "+c"(count), "+S"(data) : "d"(port) : "memory");
188 static inline void outsl(u16 port, u32 *data, u32 count) {
189 asm volatile("rep outsl %%es:(%%esi), (%%dx)"
190 : "+c"(count), "+S"(data) : "d"(port) : "memory");
193 /* Compiler barrier is enough as an x86 CPU does not reorder reads or writes */
194 static inline void smp_rmb(void) {
197 static inline void smp_wmb(void) {
201 static inline void writel(void *addr, u32 val) {
203 *(volatile u32 *)addr = val;
205 static inline void writew(void *addr, u16 val) {
207 *(volatile u16 *)addr = val;
209 static inline void writeb(void *addr, u8 val) {
211 *(volatile u8 *)addr = val;
213 static inline u32 readl(const void *addr) {
214 u32 val = *(volatile const u32 *)addr;
218 static inline u16 readw(const void *addr) {
219 u16 val = *(volatile const u16 *)addr;
223 static inline u8 readb(const void *addr) {
224 u8 val = *(volatile const u8 *)addr;
230 #define GDT_CODE (0x9bULL << 40) // Code segment - P,R,A bits also set
231 #define GDT_DATA (0x93ULL << 40) // Data segment - W,A bits also set
232 #define GDT_B (0x1ULL << 54) // Big flag
233 #define GDT_G (0x1ULL << 55) // Granularity flag
234 // GDT bits for segment base
235 #define GDT_BASE(v) ((((u64)(v) & 0xff000000) << 32) \
236 | (((u64)(v) & 0x00ffffff) << 16))
237 // GDT bits for segment limit (0-1Meg)
238 #define GDT_LIMIT(v) ((((u64)(v) & 0x000f0000) << 32) \
239 | (((u64)(v) & 0x0000ffff) << 0))
240 // GDT bits for segment limit (0-4Gig in 4K chunks)
241 #define GDT_GRANLIMIT(v) (GDT_G | GDT_LIMIT((v) >> 12))
248 static inline void sgdt(struct descloc_s *desc) {
249 asm("sgdtl %0" : "=m"(*desc));
251 static inline void lgdt(struct descloc_s *desc) {
252 asm("lgdtl %0" : : "m"(*desc) : "memory");
255 static inline u8 get_a20(void) {
256 return (inb(PORT_A20) & A20_ENABLE_BIT) != 0;
259 static inline u8 set_a20(u8 cond) {
260 u8 val = inb(PORT_A20);
261 outb((val & ~A20_ENABLE_BIT) | (cond ? A20_ENABLE_BIT : 0), PORT_A20);
262 return (val & A20_ENABLE_BIT) != 0;
266 void cpuid(u32 index, u32 *eax, u32 *ebx, u32 *ecx, u32 *edx);
268 #endif // !__ASSEMBLY__