mirror of
				https://git.kernel.org/pub/scm/linux/kernel/git/chenhuacai/linux-loongson
				synced 2025-10-31 14:30:50 +00:00 
			
		
		
		
	 e8aed68614
			
		
	
	
		e8aed68614
		
	
	
	
	
		
			
			atomic_inc_not_zero(v) return 0 if *v = 0. use spin_lock instead of write_lock for update lock. Signed-off-by: Lai Jiangshan <laijs@cn.fujitsu.com> Cc: "Paul E. McKenney" <paulmck@linux.vnet.ibm.com> Signed-off-by: Ingo Molnar <mingo@elte.hu>
		
			
				
	
	
		
			67 lines
		
	
	
		
			2.2 KiB
		
	
	
	
		
			Plaintext
		
	
	
	
	
	
			
		
		
	
	
			67 lines
		
	
	
		
			2.2 KiB
		
	
	
	
		
			Plaintext
		
	
	
	
	
	
| Reference-count design for elements of lists/arrays protected by RCU.
 | |
| 
 | |
| Reference counting on elements of lists which are protected by traditional
 | |
| reader/writer spinlocks or semaphores are straightforward:
 | |
| 
 | |
| 1.				2.
 | |
| add()				search_and_reference()
 | |
| {				{
 | |
|     alloc_object		    read_lock(&list_lock);
 | |
|     ...				    search_for_element
 | |
|     atomic_set(&el->rc, 1);	    atomic_inc(&el->rc);
 | |
|     write_lock(&list_lock);	     ...
 | |
|     add_element			    read_unlock(&list_lock);
 | |
|     ...				    ...
 | |
|     write_unlock(&list_lock);	}
 | |
| }
 | |
| 
 | |
| 3.					4.
 | |
| release_referenced()			delete()
 | |
| {					{
 | |
|     ...					    write_lock(&list_lock);
 | |
|     atomic_dec(&el->rc, relfunc)	    ...
 | |
|     ...					    delete_element
 | |
| }					    write_unlock(&list_lock);
 | |
|  					    ...
 | |
| 					    if (atomic_dec_and_test(&el->rc))
 | |
| 					        kfree(el);
 | |
| 					    ...
 | |
| 					}
 | |
| 
 | |
| If this list/array is made lock free using RCU as in changing the
 | |
| write_lock() in add() and delete() to spin_lock() and changing read_lock()
 | |
| in search_and_reference() to rcu_read_lock(), the atomic_inc() in
 | |
| search_and_reference() could potentially hold reference to an element which
 | |
| has already been deleted from the list/array.  Use atomic_inc_not_zero()
 | |
| in this scenario as follows:
 | |
| 
 | |
| 1.					2.
 | |
| add()					search_and_reference()
 | |
| {					{
 | |
|     alloc_object			    rcu_read_lock();
 | |
|     ...					    search_for_element
 | |
|     atomic_set(&el->rc, 1);		    if (!atomic_inc_not_zero(&el->rc)) {
 | |
|     spin_lock(&list_lock);		        rcu_read_unlock();
 | |
| 					        return FAIL;
 | |
|     add_element				    }
 | |
|     ...					    ...
 | |
|     spin_unlock(&list_lock);		    rcu_read_unlock();
 | |
| }					}
 | |
| 3.					4.
 | |
| release_referenced()			delete()
 | |
| {					{
 | |
|     ...					    spin_lock(&list_lock);
 | |
|     if (atomic_dec_and_test(&el->rc))       ...
 | |
|         call_rcu(&el->head, el_free);       delete_element
 | |
|     ...                                     spin_unlock(&list_lock);
 | |
| } 					    ...
 | |
| 					    if (atomic_dec_and_test(&el->rc))
 | |
| 					        call_rcu(&el->head, el_free);
 | |
| 					    ...
 | |
| 					}
 | |
| 
 | |
| Sometimes, a reference to the element needs to be obtained in the
 | |
| update (write) stream.  In such cases, atomic_inc_not_zero() might be
 | |
| overkill, since we hold the update-side spinlock.  One might instead
 | |
| use atomic_inc() in such cases.
 |