Home | History | Annotate | Line # | Download | only in include
      1  1.4  simonb /*	$NetBSD: cache_ls2.h,v 1.4 2020/07/26 08:08:41 simonb Exp $	*/
      2  1.1    matt 
      3  1.1    matt /*-
      4  1.1    matt  * Copyright (c) 2009 The NetBSD Foundation, Inc.
      5  1.1    matt  * All rights reserved.
      6  1.1    matt  *
      7  1.1    matt  * This code is derived from software contributed to The NetBSD Foundation
      8  1.1    matt  * by Matt Thomas <matt (at) 3am-software.com>.
      9  1.1    matt  *
     10  1.1    matt  * Redistribution and use in source and binary forms, with or without
     11  1.1    matt  * modification, are permitted provided that the following conditions
     12  1.1    matt  * are met:
     13  1.1    matt  * 1. Redistributions of source code must retain the above copyright
     14  1.1    matt  *    notice, this list of conditions and the following disclaimer.
     15  1.1    matt  * 2. Redistributions in binary form must reproduce the above copyright
     16  1.1    matt  *    notice, this list of conditions and the following disclaimer in the
     17  1.1    matt  *    documentation and/or other materials provided with the distribution.
     18  1.1    matt  *
     19  1.1    matt  * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
     20  1.1    matt  * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
     21  1.1    matt  * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
     22  1.1    matt  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
     23  1.1    matt  * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
     24  1.1    matt  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
     25  1.1    matt  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
     26  1.1    matt  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
     27  1.1    matt  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
     28  1.1    matt  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
     29  1.1    matt  * POSSIBILITY OF SUCH DAMAGE.
     30  1.1    matt  */
     31  1.1    matt 
     32  1.1    matt #ifndef _MIPS_CACHE_LS2_H_
     33  1.4  simonb #define	_MIPS_CACHE_LS2_H_
     34  1.1    matt 
     35  1.1    matt /*
     36  1.1    matt  * Cache definitions/operations for Loongson-style caches.
     37  1.1    matt  */
     38  1.1    matt #define	CACHEOP_LS2_I_INDEX_INV		0
     39  1.1    matt #define	CACHEOP_LS2_D_INDEX_WB_INV	1
     40  1.1    matt #define	CACHEOP_LS2_S_INDEX_WB_INV	3
     41  1.1    matt #define	CACHEOP_LS2_D_HIT_INV		17
     42  1.1    matt #define	CACHEOP_LS2_S_HIT_INV		19
     43  1.1    matt #define	CACHEOP_LS2_D_HIT_WB_INV	21
     44  1.1    matt #define	CACHEOP_LS2_S_HIT_WB_INV	23
     45  1.1    matt 
     46  1.1    matt #if !defined(_LOCORE)
     47  1.1    matt /*
     48  1.1    matt  * The way is encoded in the bottom 2 bits of VA.
     49  1.1    matt  */
     50  1.1    matt 
     51  1.4  simonb #define	cache_op_ls2_8line_4way(va, op)					\
     52  1.1    matt 	__asm volatile(							\
     53  1.1    matt                 ".set noreorder					\n\t"	\
     54  1.2    matt                 "cache %1, 0x00(%0); cache %1, 0x20(%0)		\n\t"	\
     55  1.2    matt                 "cache %1, 0x40(%0); cache %1, 0x60(%0)		\n\t"	\
     56  1.2    matt                 "cache %1, 0x80(%0); cache %1, 0xa0(%0)		\n\t"	\
     57  1.2    matt                 "cache %1, 0xc0(%0); cache %1, 0xe0(%0)		\n\t"	\
     58  1.2    matt                 "cache %1, 0x01(%0); cache %1, 0x21(%0)		\n\t"	\
     59  1.2    matt                 "cache %1, 0x41(%0); cache %1, 0x61(%0)		\n\t"	\
     60  1.2    matt                 "cache %1, 0x81(%0); cache %1, 0xa1(%0)		\n\t"	\
     61  1.2    matt                 "cache %1, 0xc1(%0); cache %1, 0xe1(%0)		\n\t"	\
     62  1.2    matt                 "cache %1, 0x02(%0); cache %1, 0x22(%0)		\n\t"	\
     63  1.2    matt                 "cache %1, 0x42(%0); cache %1, 0x62(%0)		\n\t"	\
     64  1.2    matt                 "cache %1, 0x82(%0); cache %1, 0xa2(%0)		\n\t"	\
     65  1.2    matt                 "cache %1, 0xc2(%0); cache %1, 0xe2(%0)		\n\t"	\
     66  1.2    matt                 "cache %1, 0x03(%0); cache %1, 0x23(%0)		\n\t"	\
     67  1.2    matt                 "cache %1, 0x43(%0); cache %1, 0x63(%0)		\n\t"	\
     68  1.2    matt                 "cache %1, 0x83(%0); cache %1, 0xa3(%0)		\n\t"	\
     69  1.2    matt                 "cache %1, 0xc3(%0); cache %1, 0xe3(%0)		\n\t"	\
     70  1.1    matt                 ".set reorder"						\
     71  1.1    matt             :								\
     72  1.1    matt             : "r" (va), "i" (op)					\
     73  1.1    matt             : "memory");
     74  1.1    matt 
     75  1.4  simonb #define	cache_op_ls2_line_4way(va, op)					\
     76  1.1    matt 	__asm volatile(							\
     77  1.1    matt                 ".set noreorder					\n\t"	\
     78  1.1    matt                 "cache %1, 0(%0); cache %1, 1(%0)		\n\t"	\
     79  1.1    matt                 "cache %1, 2(%0); cache %1, 3(%0)		\n\t"	\
     80  1.1    matt                 ".set reorder"						\
     81  1.1    matt             :								\
     82  1.1    matt             : "r" (va), "i" (op)					\
     83  1.1    matt             : "memory");
     84  1.1    matt 
     85  1.4  simonb #define	cache_op_ls2_8line(va, op)					\
     86  1.1    matt 	__asm volatile(							\
     87  1.1    matt                 ".set noreorder					\n\t"	\
     88  1.1    matt                 "cache %1, 0x00(%0); cache %1, 0x20(%0)		\n\t"	\
     89  1.1    matt                 "cache %1, 0x40(%0); cache %1, 0x60(%0)		\n\t"	\
     90  1.1    matt                 "cache %1, 0x80(%0); cache %1, 0xa0(%0)		\n\t"	\
     91  1.1    matt                 "cache %1, 0xc0(%0); cache %1, 0xe0(%0)		\n\t"	\
     92  1.1    matt                 ".set reorder"						\
     93  1.1    matt             :								\
     94  1.1    matt             : "r" (va), "i" (op)					\
     95  1.1    matt             : "memory");
     96  1.1    matt 
     97  1.4  simonb #define	cache_op_ls2_line(va, op)					\
     98  1.1    matt 	__asm volatile(							\
     99  1.1    matt                 ".set noreorder					\n\t"	\
    100  1.1    matt                 "cache %1, 0(%0)				\n\t"	\
    101  1.1    matt                 ".set reorder"						\
    102  1.1    matt             :								\
    103  1.1    matt             : "r" (va), "i" (op)					\
    104  1.1    matt             : "memory");
    105  1.1    matt 
    106  1.1    matt void	ls2_icache_sync_all(void);
    107  1.3    matt void	ls2_icache_sync_range(register_t, vsize_t);
    108  1.1    matt void	ls2_icache_sync_range_index(vaddr_t, vsize_t);
    109  1.1    matt 
    110  1.1    matt void	ls2_pdcache_wbinv_all(void);
    111  1.3    matt void	ls2_pdcache_wbinv_range(register_t, vsize_t);
    112  1.1    matt void	ls2_pdcache_wbinv_range_index(vaddr_t, vsize_t);
    113  1.1    matt 
    114  1.3    matt void	ls2_pdcache_inv_range(register_t, vsize_t);
    115  1.3    matt void	ls2_pdcache_wb_range(register_t, vsize_t);
    116  1.1    matt 
    117  1.1    matt void	ls2_sdcache_wbinv_all(void);
    118  1.3    matt void	ls2_sdcache_wbinv_range(register_t, vsize_t);
    119  1.1    matt void	ls2_sdcache_wbinv_range_index(vaddr_t, vsize_t);
    120  1.1    matt 
    121  1.3    matt void	ls2_sdcache_inv_range(register_t, vsize_t);
    122  1.3    matt void	ls2_sdcache_wb_range(register_t, vsize_t);
    123  1.1    matt 
    124  1.1    matt #endif /* !_LOCORE */
    125  1.1    matt #endif /* !_MIPS_CACHE_LS2_H_ */
    126