123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188 |
- #include <linux/export.h>
- #include <linux/lockref.h>
- #if USE_CMPXCHG_LOCKREF
- #define CMPXCHG_LOOP(CODE, SUCCESS) do { \
- struct lockref old; \
- BUILD_BUG_ON(sizeof(old) != 8); \
- old.lock_count = READ_ONCE(lockref->lock_count); \
- while (likely(arch_spin_value_unlocked(old.lock.rlock.raw_lock))) { \
- struct lockref new = old, prev = old; \
- CODE \
- old.lock_count = cmpxchg64_relaxed(&lockref->lock_count, \
- old.lock_count, \
- new.lock_count); \
- if (likely(old.lock_count == prev.lock_count)) { \
- SUCCESS; \
- } \
- cpu_relax_lowlatency(); \
- } \
- } while (0)
- #else
- #define CMPXCHG_LOOP(CODE, SUCCESS) do { } while (0)
- #endif
- void lockref_get(struct lockref *lockref)
- {
- CMPXCHG_LOOP(
- new.count++;
- ,
- return;
- );
- spin_lock(&lockref->lock);
- lockref->count++;
- spin_unlock(&lockref->lock);
- }
- EXPORT_SYMBOL(lockref_get);
- int lockref_get_not_zero(struct lockref *lockref)
- {
- int retval;
- CMPXCHG_LOOP(
- new.count++;
- if (old.count <= 0)
- return 0;
- ,
- return 1;
- );
- spin_lock(&lockref->lock);
- retval = 0;
- if (lockref->count > 0) {
- lockref->count++;
- retval = 1;
- }
- spin_unlock(&lockref->lock);
- return retval;
- }
- EXPORT_SYMBOL(lockref_get_not_zero);
- int lockref_get_or_lock(struct lockref *lockref)
- {
- CMPXCHG_LOOP(
- new.count++;
- if (old.count <= 0)
- break;
- ,
- return 1;
- );
- spin_lock(&lockref->lock);
- if (lockref->count <= 0)
- return 0;
- lockref->count++;
- spin_unlock(&lockref->lock);
- return 1;
- }
- EXPORT_SYMBOL(lockref_get_or_lock);
- int lockref_put_return(struct lockref *lockref)
- {
- CMPXCHG_LOOP(
- new.count--;
- if (old.count <= 0)
- return -1;
- ,
- return new.count;
- );
- return -1;
- }
- EXPORT_SYMBOL(lockref_put_return);
- int lockref_put_or_lock(struct lockref *lockref)
- {
- CMPXCHG_LOOP(
- new.count--;
- if (old.count <= 1)
- break;
- ,
- return 1;
- );
- spin_lock(&lockref->lock);
- if (lockref->count <= 1)
- return 0;
- lockref->count--;
- spin_unlock(&lockref->lock);
- return 1;
- }
- EXPORT_SYMBOL(lockref_put_or_lock);
- void lockref_mark_dead(struct lockref *lockref)
- {
- assert_spin_locked(&lockref->lock);
- lockref->count = -128;
- }
- EXPORT_SYMBOL(lockref_mark_dead);
- int lockref_get_not_dead(struct lockref *lockref)
- {
- int retval;
- CMPXCHG_LOOP(
- new.count++;
- if (old.count < 0)
- return 0;
- ,
- return 1;
- );
- spin_lock(&lockref->lock);
- retval = 0;
- if (lockref->count >= 0) {
- lockref->count++;
- retval = 1;
- }
- spin_unlock(&lockref->lock);
- return retval;
- }
- EXPORT_SYMBOL(lockref_get_not_dead);
|