lock_stubs.S revision 1.27
11.27Smaxv/*	$NetBSD: lock_stubs.S,v 1.27 2018/01/07 13:15:23 maxv Exp $	*/
21.2Sad
31.2Sad/*-
41.22Sad * Copyright (c) 2006, 2007, 2008, 2009 The NetBSD Foundation, Inc.
51.2Sad * All rights reserved.
61.2Sad *
71.2Sad * This code is derived from software contributed to The NetBSD Foundation
81.2Sad * by Andrew Doran.
91.2Sad *
101.2Sad * Redistribution and use in source and binary forms, with or without
111.2Sad * modification, are permitted provided that the following conditions
121.2Sad * are met:
131.2Sad * 1. Redistributions of source code must retain the above copyright
141.2Sad *    notice, this list of conditions and the following disclaimer.
151.2Sad * 2. Redistributions in binary form must reproduce the above copyright
161.2Sad *    notice, this list of conditions and the following disclaimer in the
171.2Sad *    documentation and/or other materials provided with the distribution.
181.2Sad *
191.2Sad * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
201.2Sad * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
211.2Sad * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
221.2Sad * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
231.2Sad * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
241.2Sad * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
251.2Sad * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
261.2Sad * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
271.2Sad * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
281.2Sad * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
291.2Sad * POSSIBILITY OF SUCH DAMAGE.
301.2Sad */
311.2Sad
321.2Sad/*
331.2Sad * AMD64 lock stubs.  Calling convention:
341.2Sad *
351.2Sad * %rdi		arg 1
361.2Sad * %rsi		arg 2
371.2Sad * %rdx		arg 3
381.2Sad * %rax		return value
391.2Sad */
401.2Sad
411.2Sad#include "opt_multiprocessor.h"
421.2Sad#include "opt_lockdebug.h"
431.2Sad
441.2Sad#include <machine/asm.h>
451.8Sbouyer#include <machine/frameasm.h>
461.2Sad
471.2Sad#include "assym.h"
481.2Sad
491.16Syamt#define	ENDLABEL(name,a) .align	a; LABEL(name)
501.27Smaxv#define	LOCK(num)	\
511.27Smaxv	HOTPATCH(HP_NAME_NOLOCK, 1)	; \
521.27Smaxv	lock
531.24Sjoerg#define	RET(num)	.Lret ## num: ret; nop; nop; ret
541.2Sad
551.2Sad#ifndef LOCKDEBUG
561.2Sad
571.2Sad/*
581.2Sad * void mutex_enter(kmutex_t *mtx);
591.2Sad *
601.2Sad * Acquire a mutex and post a load fence.
611.2Sad */
621.2Sad	.align	64
631.2Sad
641.19SchsENTRY(mutex_enter)
651.2Sad	movq	CPUVAR(CURLWP), %rcx
661.2Sad	xorq	%rax, %rax
671.11Sad	LOCK(1)
681.21Sad	cmpxchgq %rcx, (%rdi)
691.14Sad	jnz	1f
701.22Sad	RET(1)
711.14Sad1:
721.14Sad	jmp	_C_LABEL(mutex_vector_enter)
731.25SuebayasiEND(mutex_enter)
741.2Sad
751.2Sad/*
761.2Sad * void mutex_exit(kmutex_t *mtx);
771.2Sad *
781.2Sad * Release a mutex and post a load fence.
791.2Sad *
801.2Sad * See comments in mutex_vector_enter() about doing this operation unlocked
811.2Sad * on multiprocessor systems, and comments in arch/x86/include/lock.h about
821.2Sad * memory ordering on Intel x86 systems.
831.2Sad */
841.19SchsENTRY(mutex_exit)
851.2Sad	movq	CPUVAR(CURLWP), %rax
861.2Sad	xorq	%rdx, %rdx
871.21Sad	cmpxchgq %rdx, (%rdi)
881.14Sad	jnz	1f
891.2Sad	ret
901.14Sad1:
911.14Sad	jmp	_C_LABEL(mutex_vector_exit)
921.25SuebayasiEND(mutex_exit)
931.2Sad
941.2Sad/*
951.2Sad * void mutex_spin_enter(kmutex_t *mtx);
961.2Sad *
971.2Sad * Acquire a spin mutex and post a load fence.
981.2Sad */
991.19SchsENTRY(mutex_spin_enter)
1001.20Sad	movl	$1, %eax
1011.14Sad	movl	CPUVAR(ILEVEL), %esi
1021.2Sad	movzbl	MTX_IPL(%rdi), %ecx		/* new SPL */
1031.2Sad	cmpl	%ecx, %esi			/* higher? */
1041.2Sad	cmovgl	%esi, %ecx
1051.14Sad	movl	%ecx, CPUVAR(ILEVEL)		/* splraiseipl() */
1061.20Sad	subl	%eax, CPUVAR(MTX_COUNT)		/* decl doesnt set CF */
1071.14Sad	cmovncl	CPUVAR(MTX_OLDSPL), %esi
1081.14Sad	movl	%esi, CPUVAR(MTX_OLDSPL)
1091.20Sad	xchgb	%al, MTX_LOCK(%rdi)		/* lock */
1101.21Sad#ifdef MULTIPROCESSOR	/* XXX for xen */
1111.20Sad	testb	%al, %al
1121.14Sad	jnz	1f
1131.2Sad#endif
1141.22Sad	RET(2)
1151.14Sad1:
1161.14Sad	jmp	_C_LABEL(mutex_spin_retry)	/* failed; hard case */
1171.25SuebayasiEND(mutex_spin_enter)
1181.2Sad
1191.2Sad/*
1201.2Sad * void mutex_spin_exit(kmutex_t *mtx);
1211.2Sad *
1221.2Sad * Release a spin mutex and post a load fence.
1231.2Sad */
1241.19SchsENTRY(mutex_spin_exit)
1251.2Sad#ifdef DIAGNOSTIC
1261.2Sad
1271.2Sad	movl	$0x0001, %eax			/* new + expected value */
1281.4Sad	movq	CPUVAR(SELF), %r8
1291.2Sad	cmpxchgb %ah, MTX_LOCK(%rdi)		/* unlock */
1301.21Sad	jnz	_C_LABEL(mutex_vector_exit)	/* hard case if problems */
1311.4Sad	movl	CPU_INFO_MTX_OLDSPL(%r8), %edi
1321.4Sad	incl	CPU_INFO_MTX_COUNT(%r8)
1331.2Sad	jnz	1f
1341.4Sad	cmpl	CPU_INFO_ILEVEL(%r8), %edi
1351.2Sad	jae	1f
1361.4Sad	movl	CPU_INFO_IUNMASK(%r8,%rdi,4), %esi
1371.12Sdsl	CLI(ax)
1381.4Sad	testl	CPU_INFO_IPENDING(%r8), %esi
1391.2Sad	jnz	_C_LABEL(Xspllower)
1401.4Sad	movl	%edi, CPU_INFO_ILEVEL(%r8)
1411.12Sdsl	STI(ax)
1421.2Sad1:	rep					/* double byte ret as branch */
1431.2Sad	ret					/* target: see AMD docs */
1441.2Sad
1451.2Sad#else	/* DIAGNOSTIC */
1461.2Sad
1471.4Sad	movq	CPUVAR(SELF), %rsi
1481.2Sad	movb	$0x00, MTX_LOCK(%rdi)
1491.4Sad	movl	CPU_INFO_MTX_OLDSPL(%rsi), %ecx
1501.4Sad	incl	CPU_INFO_MTX_COUNT(%rsi)
1511.4Sad	movl	CPU_INFO_ILEVEL(%rsi),%edx
1521.2Sad	cmovnzl	%edx,%ecx
1531.21Sad	pushq	%rbx
1541.2Sad	cmpl	%edx,%ecx			/* new level is lower? */
1551.21Sad	jae	2f
1561.2Sad1:
1571.4Sad	movl	CPU_INFO_IPENDING(%rsi),%eax
1581.4Sad	testl	%eax,CPU_INFO_IUNMASK(%rsi,%rcx,4)/* deferred interrupts? */
1591.21Sad	jnz	3f
1601.2Sad	movl	%eax,%ebx
1611.4Sad	cmpxchg8b CPU_INFO_ISTATE(%rsi)		/* swap in new ilevel */
1621.21Sad	jnz	4f
1631.2Sad2:
1641.2Sad	popq	%rbx
1651.2Sad	ret
1661.2Sad3:
1671.2Sad	popq	%rbx
1681.2Sad	movl	%ecx, %edi
1691.2Sad	jmp	_C_LABEL(Xspllower)
1701.21Sad4:
1711.21Sad	jmp	1b
1721.2Sad
1731.2Sad#endif	/* DIAGNOSTIC */
1741.2Sad
1751.25SuebayasiEND(mutex_spin_exit)
1761.25Suebayasi
1771.2Sad/*
1781.2Sad * void	rw_enter(krwlock_t *rwl, krw_t op);
1791.2Sad *
1801.2Sad * Acquire one hold on a RW lock.
1811.2Sad */
1821.19SchsENTRY(rw_enter)
1831.2Sad	cmpl	$RW_READER, %esi
1841.2Sad	jne	2f
1851.2Sad
1861.2Sad	/*
1871.2Sad	 * Reader: this is the most common case.
1881.2Sad	 */
1891.21Sad	movq	(%rdi), %rax
1901.21Sad0:
1911.2Sad	testb	$(RW_WRITE_LOCKED|RW_WRITE_WANTED), %al
1921.21Sad	jnz	3f
1931.2Sad	leaq	RW_READ_INCR(%rax), %rdx
1941.11Sad	LOCK(2)
1951.21Sad	cmpxchgq %rdx, (%rdi)
1961.21Sad	jnz	1f
1971.22Sad	RET(3)
1981.21Sad1:
1991.21Sad	jmp	0b
2001.2Sad
2011.2Sad	/*
2021.2Sad	 * Writer: if the compare-and-set fails, don't bother retrying.
2031.2Sad	 */
2041.2Sad2:	movq	CPUVAR(CURLWP), %rcx
2051.2Sad	xorq	%rax, %rax
2061.2Sad	orq	$RW_WRITE_LOCKED, %rcx
2071.11Sad	LOCK(3)
2081.21Sad	cmpxchgq %rcx, (%rdi)
2091.14Sad	jnz	3f
2101.22Sad	RET(4)
2111.14Sad3:
2121.14Sad	jmp	_C_LABEL(rw_vector_enter)
2131.25SuebayasiEND(rw_enter)
2141.2Sad
2151.2Sad/*
2161.2Sad * void	rw_exit(krwlock_t *rwl);
2171.2Sad *
2181.2Sad * Release one hold on a RW lock.
2191.2Sad */
2201.19SchsENTRY(rw_exit)
2211.21Sad	movq	(%rdi), %rax
2221.2Sad	testb	$RW_WRITE_LOCKED, %al
2231.2Sad	jnz	2f
2241.2Sad
2251.2Sad	/*
2261.2Sad	 * Reader
2271.2Sad	 */
2281.21Sad0:	testb	$RW_HAS_WAITERS, %al
2291.14Sad	jnz	3f
2301.2Sad	cmpq	$RW_READ_INCR, %rax
2311.21Sad	jb	3f
2321.2Sad	leaq	-RW_READ_INCR(%rax), %rdx
2331.11Sad	LOCK(4)
2341.21Sad	cmpxchgq %rdx, (%rdi)
2351.21Sad	jnz	1f
2361.2Sad	ret
2371.21Sad1:
2381.21Sad	jmp	0b
2391.2Sad
2401.2Sad	/*
2411.2Sad	 * Writer
2421.2Sad	 */
2431.2Sad2:	leaq	-RW_WRITE_LOCKED(%rax), %rdx
2441.2Sad	subq	CPUVAR(CURLWP), %rdx
2451.14Sad	jnz	3f
2461.11Sad	LOCK(5)
2471.21Sad	cmpxchgq %rdx, (%rdi)
2481.2Sad	jnz	3f
2491.2Sad	ret
2501.2Sad
2511.2Sad3:	jmp	_C_LABEL(rw_vector_exit)
2521.25SuebayasiEND(rw_exit)
2531.2Sad
2541.13Sad/*
2551.13Sad * int	rw_tryenter(krwlock_t *rwl, krw_t op);
2561.13Sad *
2571.13Sad * Try to acquire one hold on a RW lock.
2581.13Sad */
2591.19SchsENTRY(rw_tryenter)
2601.13Sad	cmpl	$RW_READER, %esi
2611.13Sad	jne	2f
2621.13Sad
2631.13Sad	/*
2641.13Sad	 * Reader: this is the most common case.
2651.13Sad	 */
2661.21Sad	movq	(%rdi), %rax
2671.21Sad0:
2681.13Sad	testb	$(RW_WRITE_LOCKED|RW_WRITE_WANTED), %al
2691.22Sad	jnz	4f
2701.13Sad	leaq	RW_READ_INCR(%rax), %rdx
2711.13Sad	LOCK(8)
2721.21Sad	cmpxchgq %rdx, (%rdi)
2731.21Sad	jnz	1f
2741.21Sad	movl	%edx, %eax			/* nonzero */
2751.22Sad	RET(5)
2761.21Sad1:
2771.21Sad	jmp	0b
2781.13Sad
2791.13Sad	/*
2801.13Sad	 * Writer: if the compare-and-set fails, don't bother retrying.
2811.13Sad	 */
2821.13Sad2:	movq	CPUVAR(CURLWP), %rcx
2831.13Sad	xorq	%rax, %rax
2841.13Sad	orq	$RW_WRITE_LOCKED, %rcx
2851.13Sad	LOCK(9)
2861.21Sad	cmpxchgq %rcx, (%rdi)
2871.18Sad	movl	$0, %eax
2881.13Sad	setz	%al
2891.22Sad3:
2901.22Sad	RET(6)
2911.13Sad	ret
2921.22Sad4:
2931.22Sad	xorl	%eax, %eax
2941.22Sad	jmp	3b
2951.25SuebayasiEND(rw_tryenter)
2961.13Sad
2971.2Sad#endif	/* LOCKDEBUG */
2981.2Sad
2991.2Sad/*
3001.11Sad * Spinlocks.
3011.2Sad */
3021.19SchsENTRY(__cpu_simple_lock_init)
3031.11Sad	movb	$0, (%rdi)
3041.2Sad	ret
3051.25SuebayasiEND(__cpu_simple_lock_init)
3061.2Sad
3071.11SadNENTRY(__cpu_simple_lock)
3081.11Sad	movl	$0x0100, %eax
3091.11Sad1:
3101.11Sad	LOCK(6)
3111.11Sad	cmpxchgb %ah, (%rdi)
3121.11Sad	jnz	2f
3131.22Sad	RET(7)
3141.11Sad2:
3151.11Sad	movl	$0x0100, %eax
3161.11Sad	pause
3171.11Sad	nop
3181.7Sad	nop
3191.11Sad	cmpb	$0, (%rdi)
3201.11Sad	je	1b
3211.11Sad	jmp	2b
3221.25SuebayasiEND(__cpu_simple_lock)
3231.11Sad
3241.26SbouyerNENTRY(__cpu_simple_unlock)
3251.11Sad	movb	$0, (%rdi)
3261.2Sad	ret
3271.25SuebayasiEND(__cpu_simple_unlock)
3281.7Sad
3291.19SchsENTRY(__cpu_simple_lock_try)
3301.11Sad	movl	$0x0100, %eax
3311.11Sad	LOCK(7)
3321.11Sad	cmpxchgb %ah, (%rdi)
3331.11Sad	movl	$0, %eax
3341.22Sad	setz	%al
3351.22Sad	RET(8)
3361.25SuebayasiEND(__cpu_simple_lock_try)
3371.2Sad
3381.25Suebayasi	.type	_C_LABEL(x86_retpatch), @object
3391.22SadLABEL(x86_retpatch)
3401.22Sad#ifndef LOCKDEBUG
3411.23Sad	.quad	.Lret1, .Lret2, .Lret3, .Lret4, .Lret5, .Lret6
3421.22Sad#endif
3431.23Sad	.quad	.Lret7, .Lret8
3441.23Sad	.quad	0
3451.25SuebayasiEND(x86_retpatch)
346