atomic-irq.h 1.6 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071
  1. #ifndef __ASM_SH_ATOMIC_IRQ_H
  2. #define __ASM_SH_ATOMIC_IRQ_H
  3. #include <linux/irqflags.h>
  4. /*
  5. * To get proper branch prediction for the main line, we must branch
  6. * forward to code at the end of this object's .text section, then
  7. * branch back to restart the operation.
  8. */
  9. #define ATOMIC_OP(op, c_op) \
  10. static inline void atomic_##op(int i, atomic_t *v) \
  11. { \
  12. unsigned long flags; \
  13. \
  14. raw_local_irq_save(flags); \
  15. v->counter c_op i; \
  16. raw_local_irq_restore(flags); \
  17. }
  18. #define ATOMIC_OP_RETURN(op, c_op) \
  19. static inline int atomic_##op##_return(int i, atomic_t *v) \
  20. { \
  21. unsigned long temp, flags; \
  22. \
  23. raw_local_irq_save(flags); \
  24. temp = v->counter; \
  25. temp c_op i; \
  26. v->counter = temp; \
  27. raw_local_irq_restore(flags); \
  28. \
  29. return temp; \
  30. }
  31. #define ATOMIC_FETCH_OP(op, c_op) \
  32. static inline int atomic_fetch_##op(int i, atomic_t *v) \
  33. { \
  34. unsigned long temp, flags; \
  35. \
  36. raw_local_irq_save(flags); \
  37. temp = v->counter; \
  38. v->counter c_op i; \
  39. raw_local_irq_restore(flags); \
  40. \
  41. return temp; \
  42. }
  43. #define ATOMIC_OPS(op, c_op) \
  44. ATOMIC_OP(op, c_op) \
  45. ATOMIC_OP_RETURN(op, c_op) \
  46. ATOMIC_FETCH_OP(op, c_op)
  47. ATOMIC_OPS(add, +=)
  48. ATOMIC_OPS(sub, -=)
  49. #undef ATOMIC_OPS
  50. #define ATOMIC_OPS(op, c_op) \
  51. ATOMIC_OP(op, c_op) \
  52. ATOMIC_FETCH_OP(op, c_op)
  53. ATOMIC_OPS(and, &=)
  54. ATOMIC_OPS(or, |=)
  55. ATOMIC_OPS(xor, ^=)
  56. #undef ATOMIC_OPS
  57. #undef ATOMIC_FETCH_OP
  58. #undef ATOMIC_OP_RETURN
  59. #undef ATOMIC_OP
  60. #endif /* __ASM_SH_ATOMIC_IRQ_H */