《操作系统》的实验代码。

302 рядки
8.8 KiB

12 роки тому
  1. #ifndef __LIBS_X86_H__
  2. #define __LIBS_X86_H__
  3. #include <defs.h>
  4. #define do_div(n, base) ({ \
  5. unsigned long __upper, __low, __high, __mod, __base; \
  6. __base = (base); \
  7. asm ("" : "=a" (__low), "=d" (__high) : "A" (n)); \
  8. __upper = __high; \
  9. if (__high != 0) { \
  10. __upper = __high % __base; \
  11. __high = __high / __base; \
  12. } \
  13. asm ("divl %2" : "=a" (__low), "=d" (__mod) \
  14. : "rm" (__base), "0" (__low), "1" (__upper)); \
  15. asm ("" : "=A" (n) : "a" (__low), "d" (__high)); \
  16. __mod; \
  17. })
  18. #define barrier() __asm__ __volatile__ ("" ::: "memory")
  19. static inline uint8_t inb(uint16_t port) __attribute__((always_inline));
  20. static inline void insl(uint32_t port, void *addr, int cnt) __attribute__((always_inline));
  21. static inline void outb(uint16_t port, uint8_t data) __attribute__((always_inline));
  22. static inline void outw(uint16_t port, uint16_t data) __attribute__((always_inline));
  23. static inline void outsl(uint32_t port, const void *addr, int cnt) __attribute__((always_inline));
  24. static inline uint32_t read_ebp(void) __attribute__((always_inline));
  25. static inline void breakpoint(void) __attribute__((always_inline));
  26. static inline uint32_t read_dr(unsigned regnum) __attribute__((always_inline));
  27. static inline void write_dr(unsigned regnum, uint32_t value) __attribute__((always_inline));
  28. /* Pseudo-descriptors used for LGDT, LLDT(not used) and LIDT instructions. */
  29. struct pseudodesc {
  30. uint16_t pd_lim; // Limit
  31. uintptr_t pd_base; // Base address
  32. } __attribute__ ((packed));
  33. static inline void lidt(struct pseudodesc *pd) __attribute__((always_inline));
  34. static inline void sti(void) __attribute__((always_inline));
  35. static inline void cli(void) __attribute__((always_inline));
  36. static inline void ltr(uint16_t sel) __attribute__((always_inline));
  37. static inline uint32_t read_eflags(void) __attribute__((always_inline));
  38. static inline void write_eflags(uint32_t eflags) __attribute__((always_inline));
  39. static inline void lcr0(uintptr_t cr0) __attribute__((always_inline));
  40. static inline void lcr3(uintptr_t cr3) __attribute__((always_inline));
  41. static inline uintptr_t rcr0(void) __attribute__((always_inline));
  42. static inline uintptr_t rcr1(void) __attribute__((always_inline));
  43. static inline uintptr_t rcr2(void) __attribute__((always_inline));
  44. static inline uintptr_t rcr3(void) __attribute__((always_inline));
  45. static inline void invlpg(void *addr) __attribute__((always_inline));
  46. static inline uint8_t
  47. inb(uint16_t port) {
  48. uint8_t data;
  49. asm volatile ("inb %1, %0" : "=a" (data) : "d" (port) : "memory");
  50. return data;
  51. }
  52. static inline void
  53. insl(uint32_t port, void *addr, int cnt) {
  54. asm volatile (
  55. "cld;"
  56. "repne; insl;"
  57. : "=D" (addr), "=c" (cnt)
  58. : "d" (port), "0" (addr), "1" (cnt)
  59. : "memory", "cc");
  60. }
  61. static inline void
  62. outb(uint16_t port, uint8_t data) {
  63. asm volatile ("outb %0, %1" :: "a" (data), "d" (port) : "memory");
  64. }
  65. static inline void
  66. outw(uint16_t port, uint16_t data) {
  67. asm volatile ("outw %0, %1" :: "a" (data), "d" (port) : "memory");
  68. }
  69. static inline void
  70. outsl(uint32_t port, const void *addr, int cnt) {
  71. asm volatile (
  72. "cld;"
  73. "repne; outsl;"
  74. : "=S" (addr), "=c" (cnt)
  75. : "d" (port), "0" (addr), "1" (cnt)
  76. : "memory", "cc");
  77. }
  78. static inline uint32_t
  79. read_ebp(void) {
  80. uint32_t ebp;
  81. asm volatile ("movl %%ebp, %0" : "=r" (ebp));
  82. return ebp;
  83. }
  84. static inline void
  85. breakpoint(void) {
  86. asm volatile ("int $3");
  87. }
  88. static inline uint32_t
  89. read_dr(unsigned regnum) {
  90. uint32_t value = 0;
  91. switch (regnum) {
  92. case 0: asm volatile ("movl %%db0, %0" : "=r" (value)); break;
  93. case 1: asm volatile ("movl %%db1, %0" : "=r" (value)); break;
  94. case 2: asm volatile ("movl %%db2, %0" : "=r" (value)); break;
  95. case 3: asm volatile ("movl %%db3, %0" : "=r" (value)); break;
  96. case 6: asm volatile ("movl %%db6, %0" : "=r" (value)); break;
  97. case 7: asm volatile ("movl %%db7, %0" : "=r" (value)); break;
  98. }
  99. return value;
  100. }
  101. static void
  102. write_dr(unsigned regnum, uint32_t value) {
  103. switch (regnum) {
  104. case 0: asm volatile ("movl %0, %%db0" :: "r" (value)); break;
  105. case 1: asm volatile ("movl %0, %%db1" :: "r" (value)); break;
  106. case 2: asm volatile ("movl %0, %%db2" :: "r" (value)); break;
  107. case 3: asm volatile ("movl %0, %%db3" :: "r" (value)); break;
  108. case 6: asm volatile ("movl %0, %%db6" :: "r" (value)); break;
  109. case 7: asm volatile ("movl %0, %%db7" :: "r" (value)); break;
  110. }
  111. }
  112. static inline void
  113. lidt(struct pseudodesc *pd) {
  114. asm volatile ("lidt (%0)" :: "r" (pd) : "memory");
  115. }
  116. static inline void
  117. sti(void) {
  118. asm volatile ("sti");
  119. }
  120. static inline void
  121. cli(void) {
  122. asm volatile ("cli" ::: "memory");
  123. }
  124. static inline void
  125. ltr(uint16_t sel) {
  126. asm volatile ("ltr %0" :: "r" (sel) : "memory");
  127. }
  128. static inline uint32_t
  129. read_eflags(void) {
  130. uint32_t eflags;
  131. asm volatile ("pushfl; popl %0" : "=r" (eflags));
  132. return eflags;
  133. }
  134. static inline void
  135. write_eflags(uint32_t eflags) {
  136. asm volatile ("pushl %0; popfl" :: "r" (eflags));
  137. }
  138. static inline void
  139. lcr0(uintptr_t cr0) {
  140. asm volatile ("mov %0, %%cr0" :: "r" (cr0) : "memory");
  141. }
  142. static inline void
  143. lcr3(uintptr_t cr3) {
  144. asm volatile ("mov %0, %%cr3" :: "r" (cr3) : "memory");
  145. }
  146. static inline uintptr_t
  147. rcr0(void) {
  148. uintptr_t cr0;
  149. asm volatile ("mov %%cr0, %0" : "=r" (cr0) :: "memory");
  150. return cr0;
  151. }
  152. static inline uintptr_t
  153. rcr1(void) {
  154. uintptr_t cr1;
  155. asm volatile ("mov %%cr1, %0" : "=r" (cr1) :: "memory");
  156. return cr1;
  157. }
  158. static inline uintptr_t
  159. rcr2(void) {
  160. uintptr_t cr2;
  161. asm volatile ("mov %%cr2, %0" : "=r" (cr2) :: "memory");
  162. return cr2;
  163. }
  164. static inline uintptr_t
  165. rcr3(void) {
  166. uintptr_t cr3;
  167. asm volatile ("mov %%cr3, %0" : "=r" (cr3) :: "memory");
  168. return cr3;
  169. }
  170. static inline void
  171. invlpg(void *addr) {
  172. asm volatile ("invlpg (%0)" :: "r" (addr) : "memory");
  173. }
  174. static inline int __strcmp(const char *s1, const char *s2) __attribute__((always_inline));
  175. static inline char *__strcpy(char *dst, const char *src) __attribute__((always_inline));
  176. static inline void *__memset(void *s, char c, size_t n) __attribute__((always_inline));
  177. static inline void *__memmove(void *dst, const void *src, size_t n) __attribute__((always_inline));
  178. static inline void *__memcpy(void *dst, const void *src, size_t n) __attribute__((always_inline));
  179. #ifndef __HAVE_ARCH_STRCMP
  180. #define __HAVE_ARCH_STRCMP
  181. static inline int
  182. __strcmp(const char *s1, const char *s2) {
  183. int d0, d1, ret;
  184. asm volatile (
  185. "1: lodsb;"
  186. "scasb;"
  187. "jne 2f;"
  188. "testb %%al, %%al;"
  189. "jne 1b;"
  190. "xorl %%eax, %%eax;"
  191. "jmp 3f;"
  192. "2: sbbl %%eax, %%eax;"
  193. "orb $1, %%al;"
  194. "3:"
  195. : "=a" (ret), "=&S" (d0), "=&D" (d1)
  196. : "1" (s1), "2" (s2)
  197. : "memory");
  198. return ret;
  199. }
  200. #endif /* __HAVE_ARCH_STRCMP */
  201. #ifndef __HAVE_ARCH_STRCPY
  202. #define __HAVE_ARCH_STRCPY
  203. static inline char *
  204. __strcpy(char *dst, const char *src) {
  205. int d0, d1, d2;
  206. asm volatile (
  207. "1: lodsb;"
  208. "stosb;"
  209. "testb %%al, %%al;"
  210. "jne 1b;"
  211. : "=&S" (d0), "=&D" (d1), "=&a" (d2)
  212. : "0" (src), "1" (dst) : "memory");
  213. return dst;
  214. }
  215. #endif /* __HAVE_ARCH_STRCPY */
  216. #ifndef __HAVE_ARCH_MEMSET
  217. #define __HAVE_ARCH_MEMSET
  218. static inline void *
  219. __memset(void *s, char c, size_t n) {
  220. int d0, d1;
  221. asm volatile (
  222. "rep; stosb;"
  223. : "=&c" (d0), "=&D" (d1)
  224. : "0" (n), "a" (c), "1" (s)
  225. : "memory");
  226. return s;
  227. }
  228. #endif /* __HAVE_ARCH_MEMSET */
  229. #ifndef __HAVE_ARCH_MEMMOVE
  230. #define __HAVE_ARCH_MEMMOVE
  231. static inline void *
  232. __memmove(void *dst, const void *src, size_t n) {
  233. if (dst < src) {
  234. return __memcpy(dst, src, n);
  235. }
  236. int d0, d1, d2;
  237. asm volatile (
  238. "std;"
  239. "rep; movsb;"
  240. "cld;"
  241. : "=&c" (d0), "=&S" (d1), "=&D" (d2)
  242. : "0" (n), "1" (n - 1 + src), "2" (n - 1 + dst)
  243. : "memory");
  244. return dst;
  245. }
  246. #endif /* __HAVE_ARCH_MEMMOVE */
  247. #ifndef __HAVE_ARCH_MEMCPY
  248. #define __HAVE_ARCH_MEMCPY
  249. static inline void *
  250. __memcpy(void *dst, const void *src, size_t n) {
  251. int d0, d1, d2;
  252. asm volatile (
  253. "rep; movsl;"
  254. "movl %4, %%ecx;"
  255. "andl $3, %%ecx;"
  256. "jz 1f;"
  257. "rep; movsb;"
  258. "1:"
  259. : "=&c" (d0), "=&D" (d1), "=&S" (d2)
  260. : "0" (n / 4), "g" (n), "1" (dst), "2" (src)
  261. : "memory");
  262. return dst;
  263. }
  264. #endif /* __HAVE_ARCH_MEMCPY */
  265. #endif /* !__LIBS_X86_H__ */