mirror of
				https://git.kernel.org/pub/scm/linux/kernel/git/chenhuacai/linux-loongson
				synced 2025-10-31 03:13:59 +00:00 
			
		
		
		
	 a6eb9fe105
			
		
	
	
		a6eb9fe105
		
	
	
	
	
		
			
			Now each architecture has the own dma_get_cache_alignment implementation. dma_get_cache_alignment returns the minimum DMA alignment. Architectures define it as ARCH_KMALLOC_MINALIGN (it's used to make sure that malloc'ed buffer is DMA-safe; the buffer doesn't share a cache with the others). So we can unify dma_get_cache_alignment implementations. This patch: dma_get_cache_alignment() needs to know if an architecture defines ARCH_KMALLOC_MINALIGN or not (needs to know if architecture has DMA alignment restriction). However, slab.h define ARCH_KMALLOC_MINALIGN if architectures doesn't define it. Let's rename ARCH_KMALLOC_MINALIGN to ARCH_DMA_MINALIGN. ARCH_KMALLOC_MINALIGN is used only in the internals of slab/slob/slub (except for crypto). Signed-off-by: FUJITA Tomonori <fujita.tomonori@lab.ntt.co.jp> Cc: <linux-arch@vger.kernel.org> Signed-off-by: Andrew Morton <akpm@linux-foundation.org> Signed-off-by: Linus Torvalds <torvalds@linux-foundation.org>
		
			
				
	
	
		
			35 lines
		
	
	
		
			969 B
		
	
	
	
		
			C
		
	
	
	
	
	
			
		
		
	
	
			35 lines
		
	
	
		
			969 B
		
	
	
	
		
			C
		
	
	
	
	
	
| /*
 | |
|  * include/asm-xtensa/cache.h
 | |
|  *
 | |
|  * This file is subject to the terms and conditions of the GNU General Public
 | |
|  * License.  See the file "COPYING" in the main directory of this archive
 | |
|  * for more details.
 | |
|  *
 | |
|  * (C) 2001 - 2005 Tensilica Inc.
 | |
|  */
 | |
| 
 | |
| #ifndef _XTENSA_CACHE_H
 | |
| #define _XTENSA_CACHE_H
 | |
| 
 | |
| #include <variant/core.h>
 | |
| 
 | |
| #define L1_CACHE_SHIFT	XCHAL_DCACHE_LINEWIDTH
 | |
| #define L1_CACHE_BYTES	XCHAL_DCACHE_LINESIZE
 | |
| #define SMP_CACHE_BYTES	L1_CACHE_BYTES
 | |
| 
 | |
| #define DCACHE_WAY_SIZE	(XCHAL_DCACHE_SIZE/XCHAL_DCACHE_WAYS)
 | |
| #define ICACHE_WAY_SIZE	(XCHAL_ICACHE_SIZE/XCHAL_ICACHE_WAYS)
 | |
| #define DCACHE_WAY_SHIFT (XCHAL_DCACHE_SETWIDTH + XCHAL_DCACHE_LINEWIDTH)
 | |
| #define ICACHE_WAY_SHIFT (XCHAL_ICACHE_SETWIDTH + XCHAL_ICACHE_LINEWIDTH)
 | |
| 
 | |
| /* Maximum cache size per way. */
 | |
| #if DCACHE_WAY_SIZE >= ICACHE_WAY_SIZE
 | |
| # define CACHE_WAY_SIZE DCACHE_WAY_SIZE
 | |
| #else
 | |
| # define CACHE_WAY_SIZE ICACHE_WAY_SIZE
 | |
| #endif
 | |
| 
 | |
| #define ARCH_DMA_MINALIGN	L1_CACHE_BYTES
 | |
| 
 | |
| #endif	/* _XTENSA_CACHE_H */
 |