Home | History | Annotate | Line # | Download | only in include
cpufunc.h revision 1.13
      1 /*	$NetBSD: cpufunc.h,v 1.13 2009/11/29 10:08:10 skrll Exp $	*/
      2 
      3 /*	$OpenBSD: cpufunc.h,v 1.17 2000/05/15 17:22:40 mickey Exp $	*/
      4 
      5 /*
      6  * Copyright (c) 1998-2004 Michael Shalayeff
      7  * All rights reserved.
      8  *
      9  * Redistribution and use in source and binary forms, with or without
     10  * modification, are permitted provided that the following conditions
     11  * are met:
     12  * 1. Redistributions of source code must retain the above copyright
     13  *    notice, this list of conditions and the following disclaimer.
     14  * 2. Redistributions in binary form must reproduce the above copyright
     15  *    notice, this list of conditions and the following disclaimer in the
     16  *    documentation and/or other materials provided with the distribution.
     17  *
     18  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
     19  * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
     20  * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
     21  * IN NO EVENT SHALL THE AUTHOR OR HIS RELATIVES BE LIABLE FOR ANY DIRECT,
     22  * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
     23  * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
     24  * SERVICES; LOSS OF MIND, USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
     25  * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
     26  * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
     27  * IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
     28  * THE POSSIBILITY OF SUCH DAMAGE.
     29  */
     30 /*
     31  *  (c) Copyright 1988 HEWLETT-PACKARD COMPANY
     32  *
     33  *  To anyone who acknowledges that this file is provided "AS IS"
     34  *  without any express or implied warranty:
     35  *      permission to use, copy, modify, and distribute this file
     36  *  for any purpose is hereby granted without fee, provided that
     37  *  the above copyright notice and this notice appears in all
     38  *  copies, and that the name of Hewlett-Packard Company not be
     39  *  used in advertising or publicity pertaining to distribution
     40  *  of the software without specific, written prior permission.
     41  *  Hewlett-Packard Company makes no representations about the
     42  *  suitability of this software for any purpose.
     43  */
     44 /*
     45  * Copyright (c) 1990,1994 The University of Utah and
     46  * the Computer Systems Laboratory (CSL).  All rights reserved.
     47  *
     48  * THE UNIVERSITY OF UTAH AND CSL PROVIDE THIS SOFTWARE IN ITS "AS IS"
     49  * CONDITION, AND DISCLAIM ANY LIABILITY OF ANY KIND FOR ANY DAMAGES
     50  * WHATSOEVER RESULTING FROM ITS USE.
     51  *
     52  * CSL requests users of this software to return to csl-dist (at) cs.utah.edu any
     53  * improvements that they make and grant CSL redistribution rights.
     54  *
     55  * 	Utah $Hdr: c_support.s 1.8 94/12/14$
     56  *	Author: Bob Wheeler, University of Utah CSL
     57  */
     58 
     59 #ifndef _HPPA_CPUFUNC_H_
     60 #define _HPPA_CPUFUNC_H_
     61 
     62 #include <machine/psl.h>
     63 #include <machine/pte.h>
     64 
     65 #define tlbbtop(b) ((b) >> (PGSHIFT - 5))
     66 #define tlbptob(p) ((p) << (PGSHIFT - 5))
     67 
     68 #define hptbtop(b) ((b) >> 17)
     69 
     70 /* Get space register for an address */
     71 static __inline register_t
     72 ldsid(vaddr_t p) {
     73 	register_t ret;
     74 	__asm volatile("ldsid (%1),%0" : "=r" (ret) : "r" (p));
     75 	return ret;
     76 }
     77 
     78 #define mtctl(v,r) __asm volatile("mtctl %0,%1":: "r" (v), "i" (r))
     79 #define mfctl(r,v) __asm volatile("mfctl %1,%0": "=r" (v): "i" (r))
     80 
     81 #define mfcpu(r,v)	/* XXX for the lack of the mnemonics */		\
     82 	__asm volatile("diag  %1\n\t"					\
     83 			 "copy  %%r22, %0"				\
     84 	: "=r" (v) : "i" ((0x1400 | ((r) << 21) | (22))) : "r22")
     85 
     86 #define mtsp(v,r) __asm volatile("mtsp %0,%1":: "r" (v), "i" (r))
     87 #define mfsp(r,v) __asm volatile("mfsp %1,%0": "=r" (v): "i" (r))
     88 
     89 #define ssm(v,r) __asm volatile("ssm %1,%0": "=r" (r): "i" (v))
     90 #define rsm(v,r) __asm volatile("rsm %1,%0": "=r" (r): "i" (v))
     91 
     92 /* Move to system mask. Old value of system mask is returned. */
     93 static __inline register_t mtsm(register_t mask) {
     94 	register_t ret;
     95 	__asm volatile(
     96 	    "ssm 0,%0\n\t"
     97 	    "mtsm %1": "=&r" (ret) : "r" (mask));
     98 	return ret;
     99 }
    100 
    101 #define	fdce(sp,off) __asm volatile("fdce 0(%0,%1)":: "i" (sp), "r" (off))
    102 #define	fice(sp,off) __asm volatile("fice 0(%0,%1)":: "i" (sp), "r" (off))
    103 #define sync_caches() \
    104     __asm volatile("sync\n\tnop\n\tnop\n\tnop\n\tnop\n\tnop\n\tnop\n\tnop":::"memory")
    105 
    106 static __inline void
    107 iitlba(u_int pg, pa_space_t sp, vaddr_t va)
    108 {
    109 	mtsp(sp, 1);
    110 	__asm volatile("iitlba %0,(%%sr1, %1)":: "r" (pg), "r" (va));
    111 }
    112 
    113 static __inline void
    114 idtlba(u_int pg, pa_space_t sp, vaddr_t va)
    115 {
    116 	mtsp(sp, 1);
    117 	__asm volatile("idtlba %0,(%%sr1, %1)":: "r" (pg), "r" (va));
    118 }
    119 
    120 static __inline void
    121 iitlbp(u_int prot, pa_space_t sp, vaddr_t va)
    122 {
    123 	mtsp(sp, 1);
    124 	__asm volatile("iitlbp %0,(%%sr1, %1)":: "r" (prot), "r" (va));
    125 }
    126 
    127 static __inline void
    128 idtlbp(u_int prot, pa_space_t sp, vaddr_t va)
    129 {
    130 	mtsp(sp, 1);
    131 	__asm volatile("idtlbp %0,(%%sr1, %1)":: "r" (prot), "r" (va));
    132 }
    133 
    134 static __inline void
    135 pitlb(pa_space_t sp, vaddr_t va)
    136 {
    137 	mtsp(sp, 1);
    138 	__asm volatile("pitlb %%r0(%%sr1, %0)":: "r" (va));
    139 }
    140 
    141 static __inline void
    142 pdtlb(pa_space_t sp, vaddr_t va)
    143 {
    144 	mtsp(sp, 1);
    145 	__asm volatile("pdtlb %%r0(%%sr1, %0)":: "r" (va));
    146 }
    147 
    148 static __inline void
    149 pitlbe(pa_space_t sp, vaddr_t va)
    150 {
    151 	mtsp(sp, 1);
    152 	__asm volatile("pitlbe %%r0(%%sr1, %0)":: "r" (va));
    153 }
    154 
    155 static __inline void
    156 pdtlbe(pa_space_t sp, vaddr_t va)
    157 {
    158 	mtsp(sp, 1);
    159 	__asm volatile("pdtlbe %%r0(%%sr1, %0)":: "r" (va));
    160 }
    161 
    162 #ifdef _KERNEL
    163 extern int (*cpu_hpt_init)(vaddr_t, vsize_t);
    164 
    165 void ficache(pa_space_t, vaddr_t, vsize_t);
    166 void fdcache(pa_space_t, vaddr_t, vsize_t);
    167 void pdcache(pa_space_t, vaddr_t, vsize_t);
    168 void fcacheall(void);
    169 void ptlball(void);
    170 hppa_hpa_t cpu_gethpa(int);
    171 
    172 #define PCXL2_ACCEL_IO_START		0xf4000000
    173 #define PCXL2_ACCEL_IO_END		(0xfc000000 - 1)
    174 #define PCXL2_ACCEL_IO_ADDR2MASK(a)	(0x8 >> ((((a) >> 25) - 2) & 3))
    175 void eaio_l2(int);
    176 
    177 /*
    178  * These flush or purge the data cache for a item whose total
    179  * size is <= the size of a data cache line, however they don't
    180  * check this constraint.
    181  */
    182 static __inline void
    183 fdcache_small(pa_space_t sp, vaddr_t va, vsize_t size)
    184 {
    185 	__asm volatile(
    186 		"	mtsp	%0,%%sr1		\n"
    187 		"	fdc	%%r0(%%sr1, %1)		\n"
    188 		"	fdc	%2(%%sr1, %1)		\n"
    189 		"	sync				\n"
    190 		"	syncdma				\n"
    191 		:
    192 		: "r" (sp), "r" (va), "r" (size - 1));
    193 }
    194 static __inline void
    195 pdcache_small(pa_space_t sp, vaddr_t va, vsize_t size)
    196 {
    197 	__asm volatile(
    198 		"	mtsp	%0,%%sr1		\n"
    199 		"	pdc	%%r0(%%sr1, %1)		\n"
    200 		"	pdc	%2(%%sr1, %1)		\n"
    201 		"	sync				\n"
    202 		"	syncdma				\n"
    203 		:
    204 		: "r" (sp), "r" (va), "r" (size - 1));
    205 }
    206 
    207 #endif /* _KERNEL */
    208 
    209 #endif /* _HPPA_CPUFUNC_H_ */
    210