bitops-llsc.h 2.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146
  1. #ifndef __ASM_SH_BITOPS_LLSC_H
  2. #define __ASM_SH_BITOPS_LLSC_H
  3. static inline void set_bit(int nr, volatile void *addr)
  4. {
  5. int mask;
  6. volatile unsigned int *a = addr;
  7. unsigned long tmp;
  8. a += nr >> 5;
  9. mask = 1 << (nr & 0x1f);
  10. __asm__ __volatile__ (
  11. "1: \n\t"
  12. "movli.l @%1, %0 ! set_bit \n\t"
  13. "or %2, %0 \n\t"
  14. "movco.l %0, @%1 \n\t"
  15. "bf 1b \n\t"
  16. : "=&z" (tmp)
  17. : "r" (a), "r" (mask)
  18. : "t", "memory"
  19. );
  20. }
  21. static inline void clear_bit(int nr, volatile void *addr)
  22. {
  23. int mask;
  24. volatile unsigned int *a = addr;
  25. unsigned long tmp;
  26. a += nr >> 5;
  27. mask = 1 << (nr & 0x1f);
  28. __asm__ __volatile__ (
  29. "1: \n\t"
  30. "movli.l @%1, %0 ! clear_bit \n\t"
  31. "and %2, %0 \n\t"
  32. "movco.l %0, @%1 \n\t"
  33. "bf 1b \n\t"
  34. : "=&z" (tmp)
  35. : "r" (a), "r" (~mask)
  36. : "t", "memory"
  37. );
  38. }
  39. static inline void change_bit(int nr, volatile void *addr)
  40. {
  41. int mask;
  42. volatile unsigned int *a = addr;
  43. unsigned long tmp;
  44. a += nr >> 5;
  45. mask = 1 << (nr & 0x1f);
  46. __asm__ __volatile__ (
  47. "1: \n\t"
  48. "movli.l @%1, %0 ! change_bit \n\t"
  49. "xor %2, %0 \n\t"
  50. "movco.l %0, @%1 \n\t"
  51. "bf 1b \n\t"
  52. : "=&z" (tmp)
  53. : "r" (a), "r" (mask)
  54. : "t", "memory"
  55. );
  56. }
  57. static inline int test_and_set_bit(int nr, volatile void *addr)
  58. {
  59. int mask, retval;
  60. volatile unsigned int *a = addr;
  61. unsigned long tmp;
  62. a += nr >> 5;
  63. mask = 1 << (nr & 0x1f);
  64. __asm__ __volatile__ (
  65. "1: \n\t"
  66. "movli.l @%2, %0 ! test_and_set_bit \n\t"
  67. "mov %0, %1 \n\t"
  68. "or %3, %0 \n\t"
  69. "movco.l %0, @%2 \n\t"
  70. "bf 1b \n\t"
  71. "and %3, %1 \n\t"
  72. : "=&z" (tmp), "=&r" (retval)
  73. : "r" (a), "r" (mask)
  74. : "t", "memory"
  75. );
  76. return retval != 0;
  77. }
  78. static inline int test_and_clear_bit(int nr, volatile void *addr)
  79. {
  80. int mask, retval;
  81. volatile unsigned int *a = addr;
  82. unsigned long tmp;
  83. a += nr >> 5;
  84. mask = 1 << (nr & 0x1f);
  85. __asm__ __volatile__ (
  86. "1: \n\t"
  87. "movli.l @%2, %0 ! test_and_clear_bit \n\t"
  88. "mov %0, %1 \n\t"
  89. "and %4, %0 \n\t"
  90. "movco.l %0, @%2 \n\t"
  91. "bf 1b \n\t"
  92. "and %3, %1 \n\t"
  93. "synco \n\t"
  94. : "=&z" (tmp), "=&r" (retval)
  95. : "r" (a), "r" (mask), "r" (~mask)
  96. : "t", "memory"
  97. );
  98. return retval != 0;
  99. }
  100. static inline int test_and_change_bit(int nr, volatile void *addr)
  101. {
  102. int mask, retval;
  103. volatile unsigned int *a = addr;
  104. unsigned long tmp;
  105. a += nr >> 5;
  106. mask = 1 << (nr & 0x1f);
  107. __asm__ __volatile__ (
  108. "1: \n\t"
  109. "movli.l @%2, %0 ! test_and_change_bit \n\t"
  110. "mov %0, %1 \n\t"
  111. "xor %3, %0 \n\t"
  112. "movco.l %0, @%2 \n\t"
  113. "bf 1b \n\t"
  114. "and %3, %1 \n\t"
  115. "synco \n\t"
  116. : "=&z" (tmp), "=&r" (retval)
  117. : "r" (a), "r" (mask)
  118. : "t", "memory"
  119. );
  120. return retval != 0;
  121. }
  122. #include <asm-generic/bitops/non-atomic.h>
  123. #endif /* __ASM_SH_BITOPS_LLSC_H */