Home | History | Annotate | Line # | Download | only in aarch64
      1  1.7  riastrad /* $NetBSD: rtld_start.S,v 1.7 2024/07/23 18:11:53 riastradh Exp $ */
      2  1.1      matt 
      3  1.1      matt /*-
      4  1.1      matt  * Copyright (c) 2014 The NetBSD Foundation, Inc.
      5  1.1      matt  * All rights reserved.
      6  1.1      matt  *
      7  1.1      matt  * This code is derived from software contributed to The NetBSD Foundation
      8  1.1      matt  * by Matt Thomas of 3am Software Foundry.
      9  1.1      matt  *
     10  1.1      matt  * Redistribution and use in source and binary forms, with or without
     11  1.1      matt  * modification, are permitted provided that the following conditions
     12  1.1      matt  * are met:
     13  1.1      matt  * 1. Redistributions of source code must retain the above copyright
     14  1.1      matt  *    notice, this list of conditions and the following disclaimer.
     15  1.1      matt  * 2. Redistributions in binary form must reproduce the above copyright
     16  1.1      matt  *    notice, this list of conditions and the following disclaimer in the
     17  1.1      matt  *    documentation and/or other materials provided with the distribution.
     18  1.1      matt  *
     19  1.1      matt  * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
     20  1.1      matt  * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
     21  1.1      matt  * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
     22  1.1      matt  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
     23  1.1      matt  * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
     24  1.1      matt  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
     25  1.1      matt  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
     26  1.1      matt  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
     27  1.1      matt  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
     28  1.1      matt  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
     29  1.1      matt  * POSSIBILITY OF SUCH DAMAGE.
     30  1.1      matt  */
     31  1.1      matt 
     32  1.3  jakllsch /*-
     33  1.3  jakllsch  * Copyright (c) 2014 The FreeBSD Foundation
     34  1.3  jakllsch  * All rights reserved.
     35  1.3  jakllsch  *
     36  1.3  jakllsch  * This software was developed by Andrew Turner under
     37  1.3  jakllsch  * sponsorship from the FreeBSD Foundation.
     38  1.3  jakllsch  *
     39  1.3  jakllsch  * Redistribution and use in source and binary forms, with or without
     40  1.3  jakllsch  * modification, are permitted provided that the following conditions
     41  1.3  jakllsch  * are met:
     42  1.3  jakllsch  * 1. Redistributions of source code must retain the above copyright
     43  1.3  jakllsch  *    notice, this list of conditions and the following disclaimer.
     44  1.3  jakllsch  * 2. Redistributions in binary form must reproduce the above copyright
     45  1.3  jakllsch  *    notice, this list of conditions and the following disclaimer in the
     46  1.3  jakllsch  *    documentation and/or other materials provided with the distribution.
     47  1.3  jakllsch  *
     48  1.3  jakllsch  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
     49  1.3  jakllsch  * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
     50  1.3  jakllsch  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
     51  1.3  jakllsch  * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
     52  1.3  jakllsch  * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
     53  1.3  jakllsch  * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
     54  1.3  jakllsch  * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
     55  1.3  jakllsch  * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
     56  1.3  jakllsch  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
     57  1.3  jakllsch  * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
     58  1.3  jakllsch  * SUCH DAMAGE.
     59  1.3  jakllsch  */
     60  1.3  jakllsch 
     61  1.1      matt #include <machine/asm.h>
     62  1.1      matt 
     63  1.7  riastrad RCSID("$NetBSD: rtld_start.S,v 1.7 2024/07/23 18:11:53 riastradh Exp $")
     64  1.1      matt 
     65  1.1      matt /*
     66  1.1      matt  * void _rtld_start(void (*cleanup)(void), const Obj_Entry *obj,
     67  1.1      matt  *    struct ps_strings *ps_strings);
     68  1.1      matt  *
     69  1.1      matt  * X0		= NULL
     70  1.1      matt  * X1		= NULL
     71  1.1      matt  * X2		= ps_strings
     72  1.1      matt  * X30 (LR)	= 0
     73  1.1      matt  * X29 (FP)	= 0
     74  1.1      matt  */
     75  1.1      matt ENTRY_NP(_rtld_start)
     76  1.1      matt 	mov	x24, x2			/* save ps_strings */
     77  1.1      matt 
     78  1.1      matt 	adrp	x1, :got:_DYNAMIC	/* load _DYNAMIC offset from GOT */
     79  1.1      matt 	ldr	x1, [x1, #:got_lo12:_DYNAMIC]
     80  1.1      matt 
     81  1.1      matt 	adrp	x0, _DYNAMIC		/* get &_DYNAMIC */
     82  1.1      matt 	add	x0, x0, #:lo12:_DYNAMIC
     83  1.2     skrll 
     84  1.1      matt 	sub	x25, x0, x1		/* relocbase = &_DYNAMIC - GOT:_DYNAMIC */
     85  1.1      matt 	mov	x1, x25			/* pass as 2nd argument */
     86  1.2     skrll 	bl	_C_LABEL(_rtld_relocate_nonplt_self)
     87  1.1      matt 
     88  1.1      matt 	sub	sp, sp, #16		/* reserve space for returns */
     89  1.1      matt 	mov	x0, sp			/* pointer to reserved space */
     90  1.1      matt 	mov	x1, x25			/* pass relocbase */
     91  1.2     skrll 	bl	_C_LABEL(_rtld)
     92  1.1      matt 	mov	x17, x0			/* save entry point */
     93  1.1      matt 
     94  1.1      matt 	ldp	x0, x1, [sp], #16	/* pop cleanup & obj_main */
     95  1.1      matt 	mov	x2, x24			/* restore ps_strings */
     96  1.1      matt 
     97  1.1      matt 	br	x17			/* call saved entry point */
     98  1.1      matt END(_rtld_start)
     99  1.1      matt 
    100  1.1      matt /*
    101  1.1      matt  * Upon entry from plt0 entry:
    102  1.2     skrll  *
    103  1.2     skrll  * SP+0		= &PLTGOT[n + 3]
    104  1.2     skrll  * SP+8		= return addr
    105  1.2     skrll  * X16		= &PLTGOT[2]
    106  1.1      matt  */
    107  1.1      matt ENTRY_NP(_rtld_bind_start)
    108  1.2     skrll 	ldr	x9, [sp]		/* x9 = &PLTGOT[n+3] */
    109  1.2     skrll 
    110  1.2     skrll 	/* save x0-x8 for arguments */
    111  1.2     skrll 	stp	x0, x1, [sp, #-16]!
    112  1.2     skrll 	stp	x2, x3, [sp, #-16]!
    113  1.2     skrll 	stp	x4, x5, [sp, #-16]!
    114  1.2     skrll 	stp	x6, x7, [sp, #-16]!
    115  1.2     skrll 	stp	x8, xzr, [sp, #-16]!
    116  1.2     skrll 
    117  1.2     skrll 	/* save q0-q7 for arguments */
    118  1.2     skrll 	stp	q0, q1, [sp, #-32]!
    119  1.2     skrll 	stp	q2, q3, [sp, #-32]!
    120  1.2     skrll 	stp	q4, q5, [sp, #-32]!
    121  1.2     skrll 	stp	q6, q7, [sp, #-32]!
    122  1.2     skrll 
    123  1.2     skrll 	ldr	x0, [x16, #-8]	/* x0 = PLTGOT[1] */
    124  1.2     skrll 	sub	x1, x9, x16	/* x1 = &PLTGOT[n+3] - &PLTGOT[1] = offset+8 */
    125  1.2     skrll 	sub	x1, x1, #8	/* x1 = offset */
    126  1.2     skrll 	lsr	x1, x1, #3	/* x1 /= sizeof(void *) */
    127  1.2     skrll 
    128  1.2     skrll 	bl	_C_LABEL(_rtld_bind)
    129  1.2     skrll 	mov	x17, x0		/* save result */
    130  1.2     skrll 
    131  1.2     skrll 	/* restore q0-q7 for arguments */
    132  1.2     skrll 	ldp	q6, q7, [sp], #32
    133  1.2     skrll 	ldp	q4, q5, [sp], #32
    134  1.2     skrll 	ldp	q2, q3, [sp], #32
    135  1.2     skrll 	ldp	q0, q1, [sp], #32
    136  1.2     skrll 
    137  1.2     skrll 	/* restore x0-x8 for arguments */
    138  1.2     skrll 	ldp	x8, xzr, [sp], #16
    139  1.2     skrll 	ldp	x6, x7, [sp], #16
    140  1.2     skrll 	ldp	x4, x5, [sp], #16
    141  1.2     skrll 	ldp	x2, x3, [sp], #16
    142  1.2     skrll 	ldp	x0, x1, [sp], #16
    143  1.2     skrll 
    144  1.2     skrll 	ldp	xzr, lr, [sp], #16	/* restore original lr pushed by plt0 */
    145  1.1      matt 	br	x17			/* call bound function */
    146  1.1      matt END(_rtld_bind_start)
    147  1.2     skrll 
    148  1.4     skrll /*
    149  1.6  riastrad  * Entry points used by _rtld_tlsdesc_fill.  They will be passed in x0
    150  1.6  riastrad  * a pointer to:
    151  1.4     skrll  *
    152  1.6  riastrad  *	struct rel_tlsdesc {
    153  1.6  riastrad  *		uint64_t resolver_fnc;
    154  1.6  riastrad  *		uint64_t resolver_arg;
    155  1.6  riastrad  *	};
    156  1.4     skrll  *
    157  1.6  riastrad  * They are called with nonstandard calling convention and must
    158  1.6  riastrad  * preserve all registers except x0.
    159  1.6  riastrad  */
    160  1.6  riastrad 
    161  1.6  riastrad /*
    162  1.6  riastrad  * uint64_t@x0
    163  1.6  riastrad  * _rtld_tlsdesc_static(struct rel_tlsdesc *rel_tlsdesc@x0);
    164  1.6  riastrad  *
    165  1.6  riastrad  *	Resolver function for TLS symbols resolved at load time.
    166  1.4     skrll  *
    167  1.6  riastrad  *	rel_tlsdesc->resolver_arg is the offset of the static
    168  1.6  riastrad  *	thread-local storage region, relative to the start of the TCB.
    169  1.6  riastrad  *
    170  1.6  riastrad  *	Nonstandard calling convention: Must preserve all registers
    171  1.6  riastrad  *	except x0.
    172  1.4     skrll  */
    173  1.4     skrll ENTRY(_rtld_tlsdesc_static)
    174  1.4     skrll 	.cfi_startproc
    175  1.6  riastrad 	ldr	x0, [x0, #8]		/* x0 := tcboffset */
    176  1.6  riastrad 	ret				/* return x0 = tcboffset */
    177  1.4     skrll 	.cfi_endproc
    178  1.4     skrll END(_rtld_tlsdesc_static)
    179  1.4     skrll 
    180  1.4     skrll /*
    181  1.6  riastrad  * uint64_t@x0
    182  1.6  riastrad  * _rtld_tlsdesc_undef(struct rel_tlsdesc *rel_tlsdesc@x0);
    183  1.6  riastrad  *
    184  1.6  riastrad  *	Resolver function for weak and undefined TLS symbols.
    185  1.4     skrll  *
    186  1.6  riastrad  *	rel_tlsdesc->resolver_arg is the Elf_Rela rela->r_addend.
    187  1.6  riastrad  *
    188  1.6  riastrad  *	Nonstandard calling convention: Must preserve all registers
    189  1.6  riastrad  *	except x0.
    190  1.4     skrll  */
    191  1.4     skrll ENTRY(_rtld_tlsdesc_undef)
    192  1.4     skrll 	.cfi_startproc
    193  1.6  riastrad 	str	x1, [sp, #-16]!		/* save x1 on stack */
    194  1.4     skrll 	.cfi_adjust_cfa_offset	16
    195  1.2     skrll 
    196  1.6  riastrad 	mrs	x1, tpidr_el0		/* x1 := current thread tcb */
    197  1.6  riastrad 	ldr	x0, [x0, #8]		/* x0 := rela->r_addend */
    198  1.6  riastrad 	sub	x0, x0, x1		/* x0 := rela->r_addend - tcb */
    199  1.4     skrll 
    200  1.6  riastrad 	ldr	x1, [sp], #16		/* restore x1 from stack */
    201  1.6  riastrad 	.cfi_adjust_cfa_offset	-16
    202  1.4     skrll 	.cfi_endproc
    203  1.6  riastrad 	ret				/* return x0 = rela->r_addend - tcb */
    204  1.4     skrll END(_rtld_tlsdesc_undef)
    205  1.3  jakllsch 
    206  1.3  jakllsch /*
    207  1.6  riastrad  * uint64_t@x0
    208  1.6  riastrad  * _rtld_tlsdesc_dynamic(struct rel_tlsdesc *tlsdesc@x0);
    209  1.6  riastrad  *
    210  1.6  riastrad  *	Resolver function for TLS symbols from dlopen().
    211  1.3  jakllsch  *
    212  1.6  riastrad  *	rel_tlsdesc->resolver_arg is a pointer to a struct tls_data
    213  1.6  riastrad  *	object allocated during relocation.
    214  1.6  riastrad  *
    215  1.6  riastrad  *	Nonstandard calling convention: Must preserve all registers
    216  1.6  riastrad  *	except x0.
    217  1.3  jakllsch  */
    218  1.3  jakllsch ENTRY(_rtld_tlsdesc_dynamic)
    219  1.4     skrll 	.cfi_startproc
    220  1.3  jakllsch 
    221  1.4     skrll 	/* Save registers used in fast path */
    222  1.6  riastrad 	stp	x1, x2, [sp, #(-2 * 16)]!
    223  1.6  riastrad 	stp	x3, x4, [sp, #(1 * 16)]
    224  1.4     skrll 	.cfi_adjust_cfa_offset	2 * 16
    225  1.4     skrll 	.cfi_rel_offset		x1, 0
    226  1.4     skrll 	.cfi_rel_offset		x2, 8
    227  1.4     skrll 	.cfi_rel_offset		x3, 16
    228  1.4     skrll 	.cfi_rel_offset		x4, 24
    229  1.4     skrll 
    230  1.6  riastrad 	/* Try for the fast path -- inlined version of __tls_get_addr. */
    231  1.4     skrll 
    232  1.6  riastrad 	ldr	x1, [x0, #8]		/* x1 := tlsdesc (struct tls_data *) */
    233  1.6  riastrad 	mrs	x4, tpidr_el0		/* x4 := tcb */
    234  1.6  riastrad 	ldr	x0, [x4]		/* x0 := dtv = tcb->tcb_dtv */
    235  1.4     skrll 
    236  1.6  riastrad 	ldr	x3, [x0, #-8]		/* x3 := max = DTV_MAX_INDEX(dtv) */
    237  1.6  riastrad 	ldr	x2, [x1, #0]		/* x2 := idx = tlsdesc->td_tlsindex */
    238  1.4     skrll 	cmp	x2, x3
    239  1.7  riastrad 	b.gt	1f			/* Slow path if idx > max */
    240  1.6  riastrad 
    241  1.6  riastrad 	ldr	x3, [x0, x2, lsl #3]	/* x3 := dtv[idx] */
    242  1.6  riastrad 	cbz	x3, 1f			/* Slow path if dtv[idx] is null */
    243  1.4     skrll 
    244  1.6  riastrad 	/*
    245  1.6  riastrad 	 * Fast path
    246  1.6  riastrad 	 *
    247  1.6  riastrad 	 * return (dtv[tlsdesc->td_tlsindex] + tlsdesc->td_tlsoffs - tcb)
    248  1.6  riastrad 	 */
    249  1.6  riastrad 	ldr	x2, [x1, #8]		/* x2 := offs = tlsdesc->td_tlsoffs */
    250  1.6  riastrad 	add	x2, x2, x3		/* x2 := addr = dtv[idx] + offs */
    251  1.6  riastrad 	sub	x0, x2, x4		/* x0 := addr - tcb
    252  1.4     skrll 
    253  1.6  riastrad 	/* Restore fast path registers and return */
    254  1.6  riastrad 	ldp	x3, x4, [sp, #(1 * 16)]
    255  1.6  riastrad 	ldp	x1, x2, [sp], #(2 * 16)
    256  1.6  riastrad 	.cfi_adjust_cfa_offset	-2 * 16
    257  1.6  riastrad 	ret				/* return x0 = addr - tcb */
    258  1.3  jakllsch 
    259  1.4     skrll 	/*
    260  1.4     skrll 	 * Slow path
    261  1.6  riastrad 	 *
    262  1.6  riastrad 	 * return _rtld_tls_get_addr(tp, tlsdesc->td_tlsindex,
    263  1.6  riastrad 	 *     tlsdesc->td_tlsoffs);
    264  1.4     skrll 	 *
    265  1.4     skrll 	 */
    266  1.4     skrll 1:
    267  1.5    andvar 	/* Save all integer registers */
    268  1.4     skrll 	stp	x29, x30, [sp, #-(8 * 16)]!
    269  1.4     skrll 	.cfi_adjust_cfa_offset	8 * 16
    270  1.4     skrll 	.cfi_rel_offset		x29, 0
    271  1.4     skrll 	.cfi_rel_offset		x30, 8
    272  1.4     skrll 
    273  1.6  riastrad 	stp	x5, x6, [sp, #(1 * 16)]
    274  1.6  riastrad 	stp	x7, x8, [sp, #(2 * 16)]
    275  1.6  riastrad 	stp	x9, x10, [sp, #(3 * 16)]
    276  1.4     skrll 	stp	x11, x12, [sp, #(4 * 16)]
    277  1.4     skrll 	stp	x13, x14, [sp, #(5 * 16)]
    278  1.4     skrll 	stp	x15, x16, [sp, #(6 * 16)]
    279  1.4     skrll 	stp	x17, x18, [sp, #(7 * 16)]
    280  1.6  riastrad 	.cfi_rel_offset		x5, 16
    281  1.6  riastrad 	.cfi_rel_offset		x6, 24
    282  1.6  riastrad 	.cfi_rel_offset		x7, 32
    283  1.6  riastrad 	.cfi_rel_offset		x8, 40
    284  1.6  riastrad 	.cfi_rel_offset		x9, 48
    285  1.4     skrll 	.cfi_rel_offset		x10, 56
    286  1.4     skrll 	.cfi_rel_offset		x11, 64
    287  1.4     skrll 	.cfi_rel_offset		x12, 72
    288  1.4     skrll 	.cfi_rel_offset		x13, 80
    289  1.4     skrll 	.cfi_rel_offset		x14, 88
    290  1.4     skrll 	.cfi_rel_offset		x15, 96
    291  1.4     skrll 	.cfi_rel_offset		x16, 104
    292  1.4     skrll 	.cfi_rel_offset		x17, 112
    293  1.4     skrll 	.cfi_rel_offset		x18, 120
    294  1.3  jakllsch 
    295  1.4     skrll 	/* Find the tls offset */
    296  1.6  riastrad 	mov	x0, x4			/* x0 := tcb */
    297  1.6  riastrad 	mov	x3, x1			/* x3 := tlsdesc */
    298  1.6  riastrad 	ldr	x1, [x3, #0]		/* x1 := idx = tlsdesc->td_tlsindex */
    299  1.6  riastrad 	ldr	x2, [x3, #8]		/* x2 := offs = tlsdesc->td_tlsoffs */
    300  1.6  riastrad 	bl	_rtld_tls_get_addr	/* x0 := addr = _rtld_tls_get_addr(tcb,
    301  1.6  riastrad 					 *     idx, offs) */
    302  1.6  riastrad 	mrs	x1, tpidr_el0		/* x1 := tcb */
    303  1.6  riastrad 	sub	x0, x0, x1		/* x0 := addr - tcb */
    304  1.4     skrll 
    305  1.4     skrll 	/* Restore slow path registers */
    306  1.4     skrll 	ldp	x17, x18, [sp, #(7 * 16)]
    307  1.4     skrll 	ldp	x15, x16, [sp, #(6 * 16)]
    308  1.4     skrll 	ldp	x13, x14, [sp, #(5 * 16)]
    309  1.4     skrll 	ldp	x11, x12, [sp, #(4 * 16)]
    310  1.6  riastrad 	ldp	x9, x10, [sp, #(3 * 16)]
    311  1.6  riastrad 	ldp	x7, x8, [sp, #(2 * 16)]
    312  1.6  riastrad 	ldp	x5, x6, [sp, #(1 * 16)]
    313  1.4     skrll 	ldp	x29, x30, [sp], #(8 * 16)
    314  1.6  riastrad 	.cfi_adjust_cfa_offset	-8 * 16
    315  1.4     skrll 	.cfi_restore		x29
    316  1.4     skrll 	.cfi_restore		x30
    317  1.4     skrll 
    318  1.4     skrll 	/* Restore fast path registers and return */
    319  1.6  riastrad 	ldp	x3, x4, [sp, #16]
    320  1.6  riastrad 	ldp	x1, x2, [sp], #(2 * 16)
    321  1.4     skrll 	.cfi_adjust_cfa_offset	-2 * 16
    322  1.4     skrll 	.cfi_endproc
    323  1.6  riastrad 	ret				/* return x0 = addr - tcb */
    324  1.3  jakllsch END(_rtld_tlsdesc_dynamic)
    325