mirror of
				https://git.kernel.org/pub/scm/linux/kernel/git/chenhuacai/linux-loongson
				synced 2025-10-31 16:38:31 +00:00 
			
		
		
		
	 d19720a909
			
		
	
	
		d19720a909
		
	
	
	
	
		
			
			Updates to in-tree RCU documentation based on comments over the past few months. Signed-off-by: "Paul E. McKenney" <paulmck@us.ibm.com> Signed-off-by: Andrew Morton <akpm@osdl.org> Signed-off-by: Linus Torvalds <torvalds@osdl.org>
		
			
				
	
	
		
			67 lines
		
	
	
		
			2.2 KiB
		
	
	
	
		
			Plaintext
		
	
	
	
	
	
			
		
		
	
	
			67 lines
		
	
	
		
			2.2 KiB
		
	
	
	
		
			Plaintext
		
	
	
	
	
	
| Reference-count design for elements of lists/arrays protected by RCU.
 | |
| 
 | |
| Reference counting on elements of lists which are protected by traditional
 | |
| reader/writer spinlocks or semaphores are straightforward:
 | |
| 
 | |
| 1.				2.
 | |
| add()				search_and_reference()
 | |
| {				{
 | |
|     alloc_object		    read_lock(&list_lock);
 | |
|     ...				    search_for_element
 | |
|     atomic_set(&el->rc, 1);	    atomic_inc(&el->rc);
 | |
|     write_lock(&list_lock);	     ...
 | |
|     add_element			    read_unlock(&list_lock);
 | |
|     ...				    ...
 | |
|     write_unlock(&list_lock);	}
 | |
| }
 | |
| 
 | |
| 3.					4.
 | |
| release_referenced()			delete()
 | |
| {					{
 | |
|     ...					    write_lock(&list_lock);
 | |
|     atomic_dec(&el->rc, relfunc)	    ...
 | |
|     ...					    delete_element
 | |
| }					    write_unlock(&list_lock);
 | |
|  					    ...
 | |
| 					    if (atomic_dec_and_test(&el->rc))
 | |
| 					        kfree(el);
 | |
| 					    ...
 | |
| 					}
 | |
| 
 | |
| If this list/array is made lock free using RCU as in changing the
 | |
| write_lock() in add() and delete() to spin_lock and changing read_lock
 | |
| in search_and_reference to rcu_read_lock(), the atomic_get in
 | |
| search_and_reference could potentially hold reference to an element which
 | |
| has already been deleted from the list/array.  Use atomic_inc_not_zero()
 | |
| in this scenario as follows:
 | |
| 
 | |
| 1.					2.
 | |
| add()					search_and_reference()
 | |
| {					{
 | |
|     alloc_object			    rcu_read_lock();
 | |
|     ...					    search_for_element
 | |
|     atomic_set(&el->rc, 1);		    if (atomic_inc_not_zero(&el->rc)) {
 | |
|     write_lock(&list_lock);		        rcu_read_unlock();
 | |
| 					        return FAIL;
 | |
|     add_element				    }
 | |
|     ...					    ...
 | |
|     write_unlock(&list_lock);		    rcu_read_unlock();
 | |
| }					}
 | |
| 3.					4.
 | |
| release_referenced()			delete()
 | |
| {					{
 | |
|     ...					    write_lock(&list_lock);
 | |
|     if (atomic_dec_and_test(&el->rc))       ...
 | |
|         call_rcu(&el->head, el_free);       delete_element
 | |
|     ...                                     write_unlock(&list_lock);
 | |
| } 					    ...
 | |
| 					    if (atomic_dec_and_test(&el->rc))
 | |
| 					        call_rcu(&el->head, el_free);
 | |
| 					    ...
 | |
| 					}
 | |
| 
 | |
| Sometimes, a reference to the element needs to be obtained in the
 | |
| update (write) stream.  In such cases, atomic_inc_not_zero() might be
 | |
| overkill, since we hold the update-side spinlock.  One might instead
 | |
| use atomic_inc() in such cases.
 |