Skip to content
Snippets Groups Projects
Select Git revision
  • d472d9d98b463dd7a04f2bcdeafe4261686ce6ab
  • vme-testing default
  • ci-test
  • master
  • remoteproc
  • am625-sk-ov5640
  • pcal6534-upstreaming
  • lps22df-upstreaming
  • msc-upstreaming
  • imx8mp
  • iio/noa1305
  • vme-next
  • vme-next-4.14-rc4
  • v4.14-rc4
  • v4.14-rc3
  • v4.14-rc2
  • v4.14-rc1
  • v4.13
  • vme-next-4.13-rc7
  • v4.13-rc7
  • v4.13-rc6
  • v4.13-rc5
  • v4.13-rc4
  • v4.13-rc3
  • v4.13-rc2
  • v4.13-rc1
  • v4.12
  • v4.12-rc7
  • v4.12-rc6
  • v4.12-rc5
  • v4.12-rc4
  • v4.12-rc3
32 results

lockref.c

Blame
  • lockref.c 2.66 KiB
    #include <linux/export.h>
    #include <linux/lockref.h>
    
    #ifdef CONFIG_CMPXCHG_LOCKREF
    
    /*
     * Note that the "cmpxchg()" reloads the "old" value for the
     * failure case.
     */
    #define CMPXCHG_LOOP(CODE, SUCCESS) do {					\
    	struct lockref old;							\
    	BUILD_BUG_ON(sizeof(old) != 8);						\
    	old.lock_count = ACCESS_ONCE(lockref->lock_count);			\
    	while (likely(arch_spin_value_unlocked(old.lock.rlock.raw_lock))) {  	\
    		struct lockref new = old, prev = old;				\
    		CODE								\
    		old.lock_count = cmpxchg(&lockref->lock_count,			\
    					 old.lock_count, new.lock_count);	\
    		if (likely(old.lock_count == prev.lock_count)) {		\
    			SUCCESS;						\
    		}								\
    		cpu_relax();							\
    	}									\
    } while (0)
    
    #else
    
    #define CMPXCHG_LOOP(CODE, SUCCESS) do { } while (0)
    
    #endif
    
    /**
     * lockref_get - Increments reference count unconditionally
     * @lockcnt: pointer to lockref structure
     *
     * This operation is only valid if you already hold a reference
     * to the object, so you know the count cannot be zero.
     */
    void lockref_get(struct lockref *lockref)
    {
    	CMPXCHG_LOOP(
    		new.count++;
    	,
    		return;
    	);
    
    	spin_lock(&lockref->lock);
    	lockref->count++;
    	spin_unlock(&lockref->lock);
    }
    EXPORT_SYMBOL(lockref_get);
    
    /**
     * lockref_get_not_zero - Increments count unless the count is 0
     * @lockcnt: pointer to lockref structure
     * Return: 1 if count updated successfully or 0 if count was zero
     */
    int lockref_get_not_zero(struct lockref *lockref)
    {
    	int retval;
    
    	CMPXCHG_LOOP(
    		new.count++;
    		if (!old.count)
    			return 0;
    	,
    		return 1;
    	);
    
    	spin_lock(&lockref->lock);
    	retval = 0;
    	if (lockref->count) {
    		lockref->count++;
    		retval = 1;
    	}
    	spin_unlock(&lockref->lock);
    	return retval;
    }
    EXPORT_SYMBOL(lockref_get_not_zero);
    
    /**
     * lockref_get_or_lock - Increments count unless the count is 0
     * @lockcnt: pointer to lockref structure
     * Return: 1 if count updated successfully or 0 if count was zero
     * and we got the lock instead.
     */
    int lockref_get_or_lock(struct lockref *lockref)
    {
    	CMPXCHG_LOOP(
    		new.count++;
    		if (!old.count)
    			break;
    	,
    		return 1;
    	);
    
    	spin_lock(&lockref->lock);
    	if (!lockref->count)
    		return 0;
    	lockref->count++;
    	spin_unlock(&lockref->lock);
    	return 1;
    }
    EXPORT_SYMBOL(lockref_get_or_lock);
    
    /**
     * lockref_put_or_lock - decrements count unless count <= 1 before decrement
     * @lockcnt: pointer to lockref structure
     * Return: 1 if count updated successfully or 0 if count <= 1 and lock taken
     */
    int lockref_put_or_lock(struct lockref *lockref)
    {
    	CMPXCHG_LOOP(
    		new.count--;
    		if (old.count <= 1)
    			break;
    	,
    		return 1;
    	);
    
    	spin_lock(&lockref->lock);
    	if (lockref->count <= 1)
    		return 0;
    	lockref->count--;
    	spin_unlock(&lockref->lock);
    	return 1;
    }
    EXPORT_SYMBOL(lockref_put_or_lock);