Home | History | Annotate | Line # | Download | only in sparc64
mdreloc.c revision 1.13
      1 /*	$NetBSD: mdreloc.c,v 1.13 2002/09/06 03:05:37 mycroft Exp $	*/
      2 
      3 /*-
      4  * Copyright (c) 2000 Eduardo Horvath.
      5  * Copyright (c) 1999 The NetBSD Foundation, Inc.
      6  * All rights reserved.
      7  *
      8  * This code is derived from software contributed to The NetBSD Foundation
      9  * by Paul Kranenburg.
     10  *
     11  * Redistribution and use in source and binary forms, with or without
     12  * modification, are permitted provided that the following conditions
     13  * are met:
     14  * 1. Redistributions of source code must retain the above copyright
     15  *    notice, this list of conditions and the following disclaimer.
     16  * 2. Redistributions in binary form must reproduce the above copyright
     17  *    notice, this list of conditions and the following disclaimer in the
     18  *    documentation and/or other materials provided with the distribution.
     19  * 3. All advertising materials mentioning features or use of this software
     20  *    must display the following acknowledgement:
     21  *        This product includes software developed by the NetBSD
     22  *        Foundation, Inc. and its contributors.
     23  * 4. Neither the name of The NetBSD Foundation nor the names of its
     24  *    contributors may be used to endorse or promote products derived
     25  *    from this software without specific prior written permission.
     26  *
     27  * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
     28  * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
     29  * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
     30  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
     31  * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
     32  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
     33  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
     34  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
     35  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
     36  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
     37  * POSSIBILITY OF SUCH DAMAGE.
     38  */
     39 
     40 #include <errno.h>
     41 #include <stdio.h>
     42 #include <stdlib.h>
     43 #include <string.h>
     44 #include <unistd.h>
     45 #include <sys/stat.h>
     46 
     47 #include "rtldenv.h"
     48 #include "debug.h"
     49 #include "rtld.h"
     50 
     51 /*
     52  * The following table holds for each relocation type:
     53  *	- the width in bits of the memory location the relocation
     54  *	  applies to (not currently used)
     55  *	- the number of bits the relocation value must be shifted to the
     56  *	  right (i.e. discard least significant bits) to fit into
     57  *	  the appropriate field in the instruction word.
     58  *	- flags indicating whether
     59  *		* the relocation involves a symbol
     60  *		* the relocation is relative to the current position
     61  *		* the relocation is for a GOT entry
     62  *		* the relocation is relative to the load address
     63  *
     64  */
     65 #define _RF_S		0x80000000		/* Resolve symbol */
     66 #define _RF_A		0x40000000		/* Use addend */
     67 #define _RF_P		0x20000000		/* Location relative */
     68 #define _RF_G		0x10000000		/* GOT offset */
     69 #define _RF_B		0x08000000		/* Load address relative */
     70 #define _RF_U		0x04000000		/* Unaligned */
     71 #define _RF_SZ(s)	(((s) & 0xff) << 8)	/* memory target size */
     72 #define _RF_RS(s)	( (s) & 0xff)		/* right shift */
     73 static int reloc_target_flags[] = {
     74 	0,							/* NONE */
     75 	_RF_S|_RF_A|		_RF_SZ(8)  | _RF_RS(0),		/* RELOC_8 */
     76 	_RF_S|_RF_A|		_RF_SZ(16) | _RF_RS(0),		/* RELOC_16 */
     77 	_RF_S|_RF_A|		_RF_SZ(32) | _RF_RS(0),		/* RELOC_32 */
     78 	_RF_S|_RF_A|_RF_P|	_RF_SZ(8)  | _RF_RS(0),		/* DISP_8 */
     79 	_RF_S|_RF_A|_RF_P|	_RF_SZ(16) | _RF_RS(0),		/* DISP_16 */
     80 	_RF_S|_RF_A|_RF_P|	_RF_SZ(32) | _RF_RS(0),		/* DISP_32 */
     81 	_RF_S|_RF_A|_RF_P|	_RF_SZ(32) | _RF_RS(2),		/* WDISP_30 */
     82 	_RF_S|_RF_A|_RF_P|	_RF_SZ(32) | _RF_RS(2),		/* WDISP_22 */
     83 	_RF_S|_RF_A|		_RF_SZ(32) | _RF_RS(10),	/* HI22 */
     84 	_RF_S|_RF_A|		_RF_SZ(32) | _RF_RS(0),		/* 22 */
     85 	_RF_S|_RF_A|		_RF_SZ(32) | _RF_RS(0),		/* 13 */
     86 	_RF_S|_RF_A|		_RF_SZ(32) | _RF_RS(0),		/* LO10 */
     87 	_RF_G|			_RF_SZ(32) | _RF_RS(0),		/* GOT10 */
     88 	_RF_G|			_RF_SZ(32) | _RF_RS(0),		/* GOT13 */
     89 	_RF_G|			_RF_SZ(32) | _RF_RS(10),	/* GOT22 */
     90 	_RF_S|_RF_A|_RF_P|	_RF_SZ(32) | _RF_RS(0),		/* PC10 */
     91 	_RF_S|_RF_A|_RF_P|	_RF_SZ(32) | _RF_RS(10),	/* PC22 */
     92 	      _RF_A|_RF_P|	_RF_SZ(32) | _RF_RS(2),		/* WPLT30 */
     93 				_RF_SZ(32) | _RF_RS(0),		/* COPY */
     94 	_RF_S|_RF_A|		_RF_SZ(64) | _RF_RS(0),		/* GLOB_DAT */
     95 				_RF_SZ(32) | _RF_RS(0),		/* JMP_SLOT */
     96 	      _RF_A|	_RF_B|	_RF_SZ(64) | _RF_RS(0),		/* RELATIVE */
     97 	_RF_S|_RF_A|	_RF_U|	_RF_SZ(32) | _RF_RS(0),		/* UA_32 */
     98 
     99 	      _RF_A|		_RF_SZ(32) | _RF_RS(0),		/* PLT32 */
    100 	      _RF_A|		_RF_SZ(32) | _RF_RS(10),	/* HIPLT22 */
    101 	      _RF_A|		_RF_SZ(32) | _RF_RS(0),		/* LOPLT10 */
    102 	      _RF_A|_RF_P|	_RF_SZ(32) | _RF_RS(0),		/* PCPLT32 */
    103 	      _RF_A|_RF_P|	_RF_SZ(32) | _RF_RS(10),	/* PCPLT22 */
    104 	      _RF_A|_RF_P|	_RF_SZ(32) | _RF_RS(0),		/* PCPLT10 */
    105 	_RF_S|_RF_A|		_RF_SZ(32) | _RF_RS(0),		/* 10 */
    106 	_RF_S|_RF_A|		_RF_SZ(32) | _RF_RS(0),		/* 11 */
    107 	_RF_S|_RF_A|		_RF_SZ(64) | _RF_RS(0),		/* 64 */
    108 	_RF_S|_RF_A|/*extra*/	_RF_SZ(32) | _RF_RS(0),		/* OLO10 */
    109 	_RF_S|_RF_A|		_RF_SZ(32) | _RF_RS(42),	/* HH22 */
    110 	_RF_S|_RF_A|		_RF_SZ(32) | _RF_RS(32),	/* HM10 */
    111 	_RF_S|_RF_A|		_RF_SZ(32) | _RF_RS(10),	/* LM22 */
    112 	_RF_S|_RF_A|_RF_P|	_RF_SZ(32) | _RF_RS(42),	/* PC_HH22 */
    113 	_RF_S|_RF_A|_RF_P|	_RF_SZ(32) | _RF_RS(32),	/* PC_HM10 */
    114 	_RF_S|_RF_A|_RF_P|	_RF_SZ(32) | _RF_RS(10),	/* PC_LM22 */
    115 	_RF_S|_RF_A|_RF_P|	_RF_SZ(32) | _RF_RS(2),		/* WDISP16 */
    116 	_RF_S|_RF_A|_RF_P|	_RF_SZ(32) | _RF_RS(2),		/* WDISP19 */
    117 	_RF_S|_RF_A|		_RF_SZ(32) | _RF_RS(0),		/* GLOB_JMP */
    118 	_RF_S|_RF_A|		_RF_SZ(32) | _RF_RS(0),		/* 7 */
    119 	_RF_S|_RF_A|		_RF_SZ(32) | _RF_RS(0),		/* 5 */
    120 	_RF_S|_RF_A|		_RF_SZ(32) | _RF_RS(0),		/* 6 */
    121 	_RF_S|_RF_A|_RF_P|	_RF_SZ(64) | _RF_RS(0),		/* DISP64 */
    122 	      _RF_A|		_RF_SZ(64) | _RF_RS(0),		/* PLT64 */
    123 	_RF_S|_RF_A|		_RF_SZ(32) | _RF_RS(10),	/* HIX22 */
    124 	_RF_S|_RF_A|		_RF_SZ(32) | _RF_RS(0),		/* LOX10 */
    125 	_RF_S|_RF_A|		_RF_SZ(32) | _RF_RS(22),	/* H44 */
    126 	_RF_S|_RF_A|		_RF_SZ(32) | _RF_RS(12),	/* M44 */
    127 	_RF_S|_RF_A|		_RF_SZ(32) | _RF_RS(0),		/* L44 */
    128 	_RF_S|_RF_A|		_RF_SZ(64) | _RF_RS(0),		/* REGISTER */
    129 	_RF_S|_RF_A|	_RF_U|	_RF_SZ(64) | _RF_RS(0),		/* UA64 */
    130 	_RF_S|_RF_A|	_RF_U|	_RF_SZ(16) | _RF_RS(0),		/* UA16 */
    131 };
    132 
    133 #ifdef RTLD_DEBUG_RELOC
    134 static const char *reloc_names[] = {
    135 	"NONE", "RELOC_8", "RELOC_16", "RELOC_32", "DISP_8",
    136 	"DISP_16", "DISP_32", "WDISP_30", "WDISP_22", "HI22",
    137 	"22", "13", "LO10", "GOT10", "GOT13",
    138 	"GOT22", "PC10", "PC22", "WPLT30", "COPY",
    139 	"GLOB_DAT", "JMP_SLOT", "RELATIVE", "UA_32", "PLT32",
    140 	"HIPLT22", "LOPLT10", "LOPLT10", "PCPLT22", "PCPLT32",
    141 	"10", "11", "64", "OLO10", "HH22",
    142 	"HM10", "LM22", "PC_HH22", "PC_HM10", "PC_LM22",
    143 	"WDISP16", "WDISP19", "GLOB_JMP", "7", "5", "6",
    144 	"DISP64", "PLT64", "HIX22", "LOX10", "H44", "M44",
    145 	"L44", "REGISTER", "UA64", "UA16"
    146 };
    147 #endif
    148 
    149 #define RELOC_RESOLVE_SYMBOL(t)		((reloc_target_flags[t] & _RF_S) != 0)
    150 #define RELOC_PC_RELATIVE(t)		((reloc_target_flags[t] & _RF_P) != 0)
    151 #define RELOC_BASE_RELATIVE(t)		((reloc_target_flags[t] & _RF_B) != 0)
    152 #define RELOC_UNALIGNED(t)		((reloc_target_flags[t] & _RF_U) != 0)
    153 #define RELOC_USE_ADDEND(t)		((reloc_target_flags[t] & _RF_A) != 0)
    154 #define RELOC_TARGET_SIZE(t)		((reloc_target_flags[t] >> 8) & 0xff)
    155 #define RELOC_VALUE_RIGHTSHIFT(t)	(reloc_target_flags[t] & 0xff)
    156 
    157 static long reloc_target_bitmask[] = {
    158 #define _BM(x)	(~(-(1ULL << (x))))
    159 	0,				/* NONE */
    160 	_BM(8), _BM(16), _BM(32),	/* RELOC_8, _16, _32 */
    161 	_BM(8), _BM(16), _BM(32),	/* DISP8, DISP16, DISP32 */
    162 	_BM(30), _BM(22),		/* WDISP30, WDISP22 */
    163 	_BM(22), _BM(22),		/* HI22, _22 */
    164 	_BM(13), _BM(10),		/* RELOC_13, _LO10 */
    165 	_BM(10), _BM(13), _BM(22),	/* GOT10, GOT13, GOT22 */
    166 	_BM(10), _BM(22),		/* _PC10, _PC22 */
    167 	_BM(30), 0,			/* _WPLT30, _COPY */
    168 	_BM(32), _BM(32), _BM(32),	/* _GLOB_DAT, JMP_SLOT, _RELATIVE */
    169 	_BM(32), _BM(32),		/* _UA32, PLT32 */
    170 	_BM(22), _BM(10),		/* _HIPLT22, LOPLT10 */
    171 	_BM(32), _BM(22), _BM(10),	/* _PCPLT32, _PCPLT22, _PCPLT10 */
    172 	_BM(10), _BM(11), -1,		/* _10, _11, _64 */
    173 	_BM(10), _BM(22),		/* _OLO10, _HH22 */
    174 	_BM(10), _BM(22),		/* _HM10, _LM22 */
    175 	_BM(22), _BM(10), _BM(22),	/* _PC_HH22, _PC_HM10, _PC_LM22 */
    176 	_BM(16), _BM(19),		/* _WDISP16, _WDISP19 */
    177 	-1,				/* GLOB_JMP */
    178 	_BM(7), _BM(5), _BM(6)		/* _7, _5, _6 */
    179 	-1, -1,				/* DISP64, PLT64 */
    180 	_BM(22), _BM(13),		/* HIX22, LOX10 */
    181 	_BM(22), _BM(10), _BM(13),	/* H44, M44, L44 */
    182 	-1, -1, _BM(16),		/* REGISTER, UA64, UA16 */
    183 #undef _BM
    184 };
    185 #define RELOC_VALUE_BITMASK(t)	(reloc_target_bitmask[t])
    186 
    187 /*
    188  * Instruction templates:
    189  */
    190 #define	BAA	0x10400000	/*	ba,a	%xcc, 0 */
    191 #define	SETHI	0x03000000	/*	sethi	%hi(0), %g1 */
    192 #define	JMP	0x81c06000	/*	jmpl	%g1+%lo(0), %g0 */
    193 #define	NOP	0x01000000	/*	sethi	%hi(0), %g0 */
    194 #define	OR	0x82806000	/*	or	%g1, 0, %g1 */
    195 #define	XOR	0x82c06000	/*	xor	%g1, 0, %g1 */
    196 #define	MOV71	0x8283a000	/*	or	%o7, 0, %g1 */
    197 #define	MOV17	0x9c806000	/*	or	%g1, 0, %o7 */
    198 #define	CALL	0x40000000	/*	call	0 */
    199 #define	SLLX	0x8b407000	/*	sllx	%g1, 0, %g1 */
    200 #define	SETHIG5	0x0b000000	/*	sethi	%hi(0), %g5 */
    201 #define	ORG5	0x82804005	/*	or	%g1, %g5, %g1 */
    202 
    203 
    204 /* %hi(v) with variable shift */
    205 #define	HIVAL(v, s)	(((v) >> (s)) &  0x003fffff)
    206 #define LOVAL(v)	((v) & 0x000003ff)
    207 
    208 int
    209 _rtld_relocate_plt_object(obj, rela, addrp, dodebug)
    210 	Obj_Entry *obj;
    211 	const Elf_Rela *rela;
    212 	caddr_t *addrp;
    213 	bool dodebug;
    214 {
    215 	const Elf_Sym *def;
    216 	const Obj_Entry *defobj;
    217 	Elf_Word *where = (Elf_Word *)((Elf_Addr)obj->relocbase + rela->r_offset);
    218 	Elf_Addr value, offset;
    219 
    220 	/* Fully resolve procedure addresses now */
    221 
    222 	assert(ELF_R_TYPE(rela->r_info) == R_TYPE(JMP_SLOT));
    223 
    224 	def = _rtld_find_symdef(ELF_R_SYM(rela->r_info), obj, &defobj, true);
    225 	if (def == NULL)
    226 		return (-1);
    227 
    228 	value = (Elf_Addr) (defobj->relocbase + def->st_value);
    229 	rdbg(dodebug, ("bind now/fixup in %s --> old=%lx new=%lx",
    230 	    defobj->strtab + def->st_name,
    231 	    (u_long)*where, (u_long)value));
    232 
    233 	/*
    234 	 * At the PLT entry pointed at by `where', we now construct
    235 	 * a direct transfer to the now fully resolved function
    236 	 * address.
    237 	 *
    238 	 * A PLT entry is supposed to start by looking like this:
    239 	 *
    240 	 *	sethi	%hi(. - .PLT0), %g1
    241 	 *	ba,a	%xcc, .PLT1
    242 	 *	nop
    243 	 *	nop
    244 	 *	nop
    245 	 *	nop
    246 	 *	nop
    247 	 *	nop
    248 	 *
    249 	 * When we replace these entries we start from the second
    250 	 * entry and do it in reverse order so the last thing we
    251 	 * do is replace the branch.  That allows us to change this
    252 	 * atomically.
    253 	 *
    254 	 * We now need to find out how far we need to jump.  We
    255 	 * have a choice of several different relocation techniques
    256 	 * which are increasingly expensive.
    257 	 */
    258 
    259 	offset = ((Elf_Addr)where) - value;
    260 	if (rela->r_addend) {
    261 		Elf_Addr *ptr = (Elf_Addr *)where;
    262 		/*
    263 		 * This entry is >32768.  Just replace the pointer.
    264 		 */
    265 		ptr[0] = value;
    266 
    267 	} else if (offset <= (1L<<20) && offset >= -(1L<<20)) {
    268 		/*
    269 		 * We're within 1MB -- we can use a direct branch insn.
    270 		 *
    271 		 * We can generate this pattern:
    272 		 *
    273 		 *	sethi	%hi(. - .PLT0), %g1
    274 		 *	ba,a	%xcc, addr
    275 		 *	nop
    276 		 *	nop
    277 		 *	nop
    278 		 *	nop
    279 		 *	nop
    280 		 *	nop
    281 		 *
    282 		 */
    283 		where[1] = BAA | ((offset >> 2) &0x3fffff);
    284 		__asm __volatile("iflush %0+4" : : "r" (where));
    285 	} else if (value >= 0 && value < (1L<<32)) {
    286 		/*
    287 		 * We're withing 32-bits of address zero.
    288 		 *
    289 		 * The resulting code in the jump slot is:
    290 		 *
    291 		 *	sethi	%hi(. - .PLT0), %g1
    292 		 *	sethi	%hi(addr), %g1
    293 		 *	jmp	%g1+%lo(addr)
    294 		 *	nop
    295 		 *	nop
    296 		 *	nop
    297 		 *	nop
    298 		 *	nop
    299 		 *
    300 		 */
    301 		where[2] = JMP   | LOVAL(value);
    302 		where[1] = SETHI | HIVAL(value, 10);
    303 		__asm __volatile("iflush %0+8" : : "r" (where));
    304 		__asm __volatile("iflush %0+4" : : "r" (where));
    305 
    306 	} else if (value <= 0 && value > -(1L<<32)) {
    307 		/*
    308 		 * We're withing 32-bits of address -1.
    309 		 *
    310 		 * The resulting code in the jump slot is:
    311 		 *
    312 		 *	sethi	%hi(. - .PLT0), %g1
    313 		 *	sethi	%hix(addr), %g1
    314 		 *	xor	%g1, %lox(addr), %g1
    315 		 *	jmp	%g1
    316 		 *	nop
    317 		 *	nop
    318 		 *	nop
    319 		 *	nop
    320 		 *
    321 		 */
    322 		where[3] = JMP;
    323 		where[2] = XOR | ((~value) & 0x00001fff);
    324 		where[1] = SETHI | HIVAL(~value, 10);
    325 		__asm __volatile("iflush %0+12" : : "r" (where));
    326 		__asm __volatile("iflush %0+8" : : "r" (where));
    327 		__asm __volatile("iflush %0+4" : : "r" (where));
    328 
    329 	} else if (offset <= (1L<<32) && offset >= -((1L<<32) - 4)) {
    330 		/*
    331 		 * We're withing 32-bits -- we can use a direct call insn
    332 		 *
    333 		 * The resulting code in the jump slot is:
    334 		 *
    335 		 *	sethi	%hi(. - .PLT0), %g1
    336 		 *	mov	%o7, %g1
    337 		 *	call	(.+offset)
    338 		 *	 mov	%g1, %o7
    339 		 *	nop
    340 		 *	nop
    341 		 *	nop
    342 		 *	nop
    343 		 *
    344 		 */
    345 		where[3] = MOV17;
    346 		where[2] = CALL	  | ((offset >> 4) & 0x3fffffff);
    347 		where[1] = MOV71;
    348 		__asm __volatile("iflush %0+12" : : "r" (where));
    349 		__asm __volatile("iflush %0+8" : : "r" (where));
    350 		__asm __volatile("iflush %0+4" : : "r" (where));
    351 
    352 	} else if (offset >= 0 && offset < (1L<<44)) {
    353 		/*
    354 		 * We're withing 44 bits.  We can generate this pattern:
    355 		 *
    356 		 * The resulting code in the jump slot is:
    357 		 *
    358 		 *	sethi	%hi(. - .PLT0), %g1
    359 		 *	sethi	%h44(addr), %g1
    360 		 *	or	%g1, %m44(addr), %g1
    361 		 *	sllx	%g1, 12, %g1
    362 		 *	jmp	%g1+%l44(addr)
    363 		 *	nop
    364 		 *	nop
    365 		 *	nop
    366 		 *
    367 		 */
    368 		where[4] = JMP   | LOVAL(offset);
    369 		where[3] = SLLX  | 12;
    370 		where[2] = OR    | (((offset) >> 12) & 0x00001fff);
    371 		where[1] = SETHI | HIVAL(offset, 22);
    372 		__asm __volatile("iflush %0+16" : : "r" (where));
    373 		__asm __volatile("iflush %0+12" : : "r" (where));
    374 		__asm __volatile("iflush %0+8" : : "r" (where));
    375 		__asm __volatile("iflush %0+4" : : "r" (where));
    376 
    377 	} else if (offset < 0 && offset > -(1L<<44)) {
    378 		/*
    379 		 * We're withing 44 bits.  We can generate this pattern:
    380 		 *
    381 		 * The resulting code in the jump slot is:
    382 		 *
    383 		 *	sethi	%hi(. - .PLT0), %g1
    384 		 *	sethi	%h44(-addr), %g1
    385 		 *	xor	%g1, %m44(-addr), %g1
    386 		 *	sllx	%g1, 12, %g1
    387 		 *	jmp	%g1+%l44(addr)
    388 		 *	nop
    389 		 *	nop
    390 		 *	nop
    391 		 *
    392 		 */
    393 		where[4] = JMP   | LOVAL(offset);
    394 		where[3] = SLLX  | 12;
    395 		where[2] = XOR   | (((~offset) >> 12) & 0x00001fff);
    396 		where[1] = SETHI | HIVAL(~offset, 22);
    397 		__asm __volatile("iflush %0+16" : : "r" (where));
    398 		__asm __volatile("iflush %0+12" : : "r" (where));
    399 		__asm __volatile("iflush %0+8" : : "r" (where));
    400 		__asm __volatile("iflush %0+4" : : "r" (where));
    401 
    402 	} else {
    403 		/*
    404 		 * We need to load all 64-bits
    405 		 *
    406 		 * The resulting code in the jump slot is:
    407 		 *
    408 		 *	sethi	%hi(. - .PLT0), %g1
    409 		 *	sethi	%hh(addr), %g1
    410 		 *	sethi	%lm(addr), %g5
    411 		 *	or	%g1, %hm(addr), %g1
    412 		 *	sllx	%g1, 32, %g1
    413 		 *	or	%g1, %g5, %g1
    414 		 *	jmp	%g1+%lo(addr)
    415 		 *	nop
    416 		 *
    417 		 */
    418 		where[6] = JMP     | LOVAL(value);
    419 		where[5] = ORG5;
    420 		where[4] = SLLX    | 12;
    421 		where[3] = OR      | LOVAL((value) >> 32);
    422 		where[2] = SETHIG5 | HIVAL(value, 10);
    423 		where[1] = SETHI   | HIVAL(value, 42);
    424 		__asm __volatile("iflush %0+20" : : "r" (where));
    425 		__asm __volatile("iflush %0+16" : : "r" (where));
    426 		__asm __volatile("iflush %0+16" : : "r" (where));
    427 		__asm __volatile("iflush %0+12" : : "r" (where));
    428 		__asm __volatile("iflush %0+8" : : "r" (where));
    429 		__asm __volatile("iflush %0+4" : : "r" (where));
    430 
    431 	}
    432 
    433 	*addrp = (caddr_t)value;
    434 	return (0);
    435 }
    436 
    437 /*
    438  * Install rtld function call into this PLT slot.
    439  */
    440 #define	SAVE		0x9de3bf50
    441 #define	SETHI_l0	0x21000000
    442 #define	SETHI_l1	0x23000000
    443 #define	OR_l0_l0	0xa0142000
    444 #define	SLLX_l0_32_l0	0xa12c3020
    445 #define	OR_l0_l1_l0	0xa0140011
    446 #define	JMPL_l0_o1	0x93c42000
    447 #define	MOV_g1_o0	0x90100001
    448 
    449 void _rtld_install_plt __P((Elf_Word *pltgot,	Elf_Addr proc));
    450 
    451 void
    452 _rtld_install_plt(pltgot, proc)
    453 	Elf_Word *pltgot;
    454 	Elf_Addr proc;
    455 {
    456 	pltgot[0] = SAVE;
    457 	pltgot[1] = SETHI_l0  | HIVAL(proc, 42);
    458 	pltgot[2] = SETHI_l1  | HIVAL(proc, 10);
    459 	pltgot[3] = OR_l0_l0  | LOVAL((proc) >> 32);
    460 	pltgot[4] = SLLX_l0_32_l0;
    461 	pltgot[5] = OR_l0_l1_l0;
    462 	pltgot[6] = JMPL_l0_o1 | LOVAL(proc);
    463 	pltgot[7] = MOV_g1_o0;
    464 }
    465 
    466 long _rtld_bind_start_0_stub __P((long x, long y));
    467 long
    468 _rtld_bind_start_0_stub(x, y)
    469 	long x, y;
    470 {
    471 	long i;
    472 	long n;
    473 
    474 	i = x - y + 1048596;
    475 	n = 32768 + (i/5120)*160 + (i%5120)/24;
    476 
    477 	return (n);
    478 }
    479 
    480 void
    481 _rtld_setup_pltgot(const Obj_Entry *obj)
    482 {
    483 	/*
    484 	 * On sparc64 we got troubles.
    485 	 *
    486 	 * Instructions are 4 bytes long.
    487 	 * Elf[64]_Addr is 8 bytes long, so are our pltglot[]
    488 	 * array entries.
    489 	 * Each PLT entry jumps to PLT0 to enter the dynamic
    490 	 * linker.
    491 	 * Loading an arbitrary 64-bit pointer takes 6
    492 	 * instructions and 2 registers.
    493 	 *
    494 	 * Somehow we need to issue a save to get a new stack
    495 	 * frame, load the address of the dynamic linker, and
    496 	 * jump there, in 8 instructions or less.
    497 	 *
    498 	 * Oh, we need to fill out both PLT0 and PLT1.
    499 	 */
    500 	{
    501 		Elf_Word *entry = (Elf_Word *)obj->pltgot;
    502 		extern void _rtld_bind_start_0 __P((long, long));
    503 		extern void _rtld_bind_start_1 __P((long, long));
    504 
    505 		/* Install in entries 0 and 1 */
    506 		_rtld_install_plt(&entry[0], (Elf_Addr) &_rtld_bind_start_0);
    507 		_rtld_install_plt(&entry[8], (Elf_Addr) &_rtld_bind_start_1);
    508 
    509 		/*
    510 		 * Install the object reference in first slot
    511 		 * of entry 2.
    512 		 */
    513 		obj->pltgot[8] = (Elf_Addr) obj;
    514 	}
    515 }
    516 
    517 int
    518 _rtld_relocate_nonplt_objects(obj, dodebug)
    519 	Obj_Entry *obj;
    520 	bool dodebug;
    521 {
    522 	const Elf_Rela *rela;
    523 
    524 	for (rela = obj->rela; rela < obj->relalim; rela++) {
    525 		Elf_Addr *where;
    526 		Elf_Word type;
    527 		Elf_Addr value = 0, mask;
    528 		const Elf_Sym *def = NULL;
    529 		const Obj_Entry *defobj = NULL;
    530 		unsigned long	 symnum;
    531 
    532 		where = (Elf_Addr *) (obj->relocbase + rela->r_offset);
    533 		symnum = ELF_R_SYM(rela->r_info);
    534 
    535 		type = ELF_R_TYPE(rela->r_info);
    536 		if (type == R_TYPE(NONE))
    537 			continue;
    538 
    539 		/* We do JMP_SLOTs in relocate_plt_object() below */
    540 		if (type == R_TYPE(JMP_SLOT))
    541 			continue;
    542 
    543 		/* COPY relocs are also handled elsewhere */
    544 		if (type == R_TYPE(COPY))
    545 			continue;
    546 
    547 		/*
    548 		 * We use the fact that relocation types are an `enum'
    549 		 * Note: R_SPARC_UA16 is currently numerically largest.
    550 		 */
    551 		if (type > R_TYPE(UA16))
    552 			return (-1);
    553 
    554 		value = rela->r_addend;
    555 
    556 		/*
    557 		 * Handle relative relocs here, because we might not
    558 		 * be able to access globals yet.
    559 		 */
    560 		if (!dodebug && type == R_TYPE(RELATIVE)) {
    561 			/* XXXX -- apparently we ignore the preexisting value */
    562 			*where = (Elf_Addr)(obj->relocbase + value);
    563 			continue;
    564 		}
    565 
    566 		if (RELOC_RESOLVE_SYMBOL(type)) {
    567 
    568 			/* Find the symbol */
    569 			def = _rtld_find_symdef(symnum, obj, &defobj, false);
    570 			if (def == NULL)
    571 				return (-1);
    572 
    573 			/* Add in the symbol's absolute address */
    574 			value += (Elf_Addr)(defobj->relocbase + def->st_value);
    575 		}
    576 
    577 		if (RELOC_PC_RELATIVE(type)) {
    578 			value -= (Elf_Addr)where;
    579 		}
    580 
    581 		if (RELOC_BASE_RELATIVE(type)) {
    582 			/*
    583 			 * Note that even though sparcs use `Elf_rela'
    584 			 * exclusively we still need the implicit memory addend
    585 			 * in relocations referring to GOT entries.
    586 			 * Undoubtedly, someone f*cked this up in the distant
    587 			 * past, and now we're stuck with it in the name of
    588 			 * compatibility for all eternity..
    589 			 *
    590 			 * In any case, the implicit and explicit should be
    591 			 * mutually exclusive. We provide a check for that
    592 			 * here.
    593 			 */
    594 #ifdef DIAGNOSTIC
    595 			if (value != 0 && *where != 0) {
    596 				xprintf("BASE_REL(%s): where=%p, *where 0x%lx, "
    597 					"addend=0x%lx, base %p\n",
    598 					obj->path, where, *where,
    599 					rela->r_addend, obj->relocbase);
    600 			}
    601 #endif
    602 			/* XXXX -- apparently we ignore the preexisting value */
    603 			value += (Elf_Addr)(obj->relocbase);
    604 		}
    605 
    606 		mask = RELOC_VALUE_BITMASK(type);
    607 		value >>= RELOC_VALUE_RIGHTSHIFT(type);
    608 		value &= mask;
    609 
    610 		if (RELOC_UNALIGNED(type)) {
    611 			/* Handle unaligned relocations. */
    612 			Elf_Addr tmp = 0;
    613 			char *ptr = (char *)where;
    614 			int i, size = RELOC_TARGET_SIZE(type)/8;
    615 
    616 			/* Read it in one byte at a time. */
    617 			for (i=0; i<size; i++)
    618 				tmp = (tmp << 8) | ptr[i];
    619 
    620 			tmp &= ~mask;
    621 			tmp |= value;
    622 
    623 			/* Write it back out. */
    624 			for (i=0; i<size; i++)
    625 				ptr[i] = ((tmp >> (8*i)) & 0xff);
    626 #ifdef RTLD_DEBUG_RELOC
    627 			value = (Elf_Addr)tmp;
    628 #endif
    629 
    630 		} else if (RELOC_TARGET_SIZE(type) > 32) {
    631 			*where &= ~mask;
    632 			*where |= value;
    633 #ifdef RTLD_DEBUG_RELOC
    634 			value = (Elf_Addr)*where;
    635 #endif
    636 		} else {
    637 			Elf32_Addr *where32 = (Elf32_Addr *)where;
    638 
    639 			*where32 &= ~mask;
    640 			*where32 |= value;
    641 #ifdef RTLD_DEBUG_RELOC
    642 			value = (Elf_Addr)*where32;
    643 #endif
    644 		}
    645 
    646 #ifdef RTLD_DEBUG_RELOC
    647 		if (RELOC_RESOLVE_SYMBOL(type)) {
    648 			rdbg(dodebug, ("%s %s in %s --> %p %s",
    649 			    reloc_names[type],
    650 			    obj->strtab + obj->symtab[symnum].st_name,
    651 			    obj->path, (void *)value, defobj->path));
    652 		} else {
    653 			rdbg(dodebug, ("%s --> %p", reloc_names[type],
    654 			    (void *)value));
    655 		}
    656 #endif
    657 	}
    658 	return (0);
    659 }
    660 
    661 int
    662 _rtld_relocate_plt_lazy(obj, dodebug)
    663 	Obj_Entry *obj;
    664 	bool dodebug;
    665 {
    666 	return (0);
    667 }
    668