lock_stubs.S revision 1.26
11.26Sbouyer/* $NetBSD: lock_stubs.S,v 1.26 2016/04/11 14:14:27 bouyer Exp $ */ 21.2Sad 31.2Sad/*- 41.22Sad * Copyright (c) 2006, 2007, 2008, 2009 The NetBSD Foundation, Inc. 51.2Sad * All rights reserved. 61.2Sad * 71.2Sad * This code is derived from software contributed to The NetBSD Foundation 81.2Sad * by Andrew Doran. 91.2Sad * 101.2Sad * Redistribution and use in source and binary forms, with or without 111.2Sad * modification, are permitted provided that the following conditions 121.2Sad * are met: 131.2Sad * 1. Redistributions of source code must retain the above copyright 141.2Sad * notice, this list of conditions and the following disclaimer. 151.2Sad * 2. Redistributions in binary form must reproduce the above copyright 161.2Sad * notice, this list of conditions and the following disclaimer in the 171.2Sad * documentation and/or other materials provided with the distribution. 181.2Sad * 191.2Sad * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS 201.2Sad * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED 211.2Sad * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR 221.2Sad * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS 231.2Sad * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 241.2Sad * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 251.2Sad * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS 261.2Sad * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN 271.2Sad * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) 281.2Sad * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 291.2Sad * POSSIBILITY OF SUCH DAMAGE. 301.2Sad */ 311.2Sad 321.2Sad/* 331.2Sad * AMD64 lock stubs. Calling convention: 341.2Sad * 351.2Sad * %rdi arg 1 361.2Sad * %rsi arg 2 371.2Sad * %rdx arg 3 381.2Sad * %rax return value 391.2Sad */ 401.2Sad 411.2Sad#include "opt_multiprocessor.h" 421.2Sad#include "opt_lockdebug.h" 431.2Sad 441.2Sad#include <machine/asm.h> 451.8Sbouyer#include <machine/frameasm.h> 461.2Sad 471.2Sad#include "assym.h" 481.2Sad 491.16Syamt#define ENDLABEL(name,a) .align a; LABEL(name) 501.24Sjoerg#define LOCK(num) .Lpatch ## num: lock 511.24Sjoerg#define RET(num) .Lret ## num: ret; nop; nop; ret 521.2Sad 531.2Sad#ifndef LOCKDEBUG 541.2Sad 551.2Sad/* 561.2Sad * void mutex_enter(kmutex_t *mtx); 571.2Sad * 581.2Sad * Acquire a mutex and post a load fence. 591.2Sad */ 601.2Sad .align 64 611.2Sad 621.19SchsENTRY(mutex_enter) 631.2Sad movq CPUVAR(CURLWP), %rcx 641.2Sad xorq %rax, %rax 651.11Sad LOCK(1) 661.21Sad cmpxchgq %rcx, (%rdi) 671.14Sad jnz 1f 681.22Sad RET(1) 691.14Sad1: 701.14Sad jmp _C_LABEL(mutex_vector_enter) 711.25SuebayasiEND(mutex_enter) 721.2Sad 731.2Sad/* 741.2Sad * void mutex_exit(kmutex_t *mtx); 751.2Sad * 761.2Sad * Release a mutex and post a load fence. 771.2Sad * 781.2Sad * See comments in mutex_vector_enter() about doing this operation unlocked 791.2Sad * on multiprocessor systems, and comments in arch/x86/include/lock.h about 801.2Sad * memory ordering on Intel x86 systems. 811.2Sad */ 821.19SchsENTRY(mutex_exit) 831.2Sad movq CPUVAR(CURLWP), %rax 841.2Sad xorq %rdx, %rdx 851.21Sad cmpxchgq %rdx, (%rdi) 861.14Sad jnz 1f 871.2Sad ret 881.14Sad1: 891.14Sad jmp _C_LABEL(mutex_vector_exit) 901.25SuebayasiEND(mutex_exit) 911.2Sad 921.2Sad/* 931.2Sad * void mutex_spin_enter(kmutex_t *mtx); 941.2Sad * 951.2Sad * Acquire a spin mutex and post a load fence. 961.2Sad */ 971.19SchsENTRY(mutex_spin_enter) 981.20Sad movl $1, %eax 991.14Sad movl CPUVAR(ILEVEL), %esi 1001.2Sad movzbl MTX_IPL(%rdi), %ecx /* new SPL */ 1011.2Sad cmpl %ecx, %esi /* higher? */ 1021.2Sad cmovgl %esi, %ecx 1031.14Sad movl %ecx, CPUVAR(ILEVEL) /* splraiseipl() */ 1041.20Sad subl %eax, CPUVAR(MTX_COUNT) /* decl doesnt set CF */ 1051.14Sad cmovncl CPUVAR(MTX_OLDSPL), %esi 1061.14Sad movl %esi, CPUVAR(MTX_OLDSPL) 1071.20Sad xchgb %al, MTX_LOCK(%rdi) /* lock */ 1081.21Sad#ifdef MULTIPROCESSOR /* XXX for xen */ 1091.20Sad testb %al, %al 1101.14Sad jnz 1f 1111.2Sad#endif 1121.22Sad RET(2) 1131.14Sad1: 1141.14Sad jmp _C_LABEL(mutex_spin_retry) /* failed; hard case */ 1151.25SuebayasiEND(mutex_spin_enter) 1161.2Sad 1171.2Sad/* 1181.2Sad * void mutex_spin_exit(kmutex_t *mtx); 1191.2Sad * 1201.2Sad * Release a spin mutex and post a load fence. 1211.2Sad */ 1221.19SchsENTRY(mutex_spin_exit) 1231.2Sad#ifdef DIAGNOSTIC 1241.2Sad 1251.2Sad movl $0x0001, %eax /* new + expected value */ 1261.4Sad movq CPUVAR(SELF), %r8 1271.2Sad cmpxchgb %ah, MTX_LOCK(%rdi) /* unlock */ 1281.21Sad jnz _C_LABEL(mutex_vector_exit) /* hard case if problems */ 1291.4Sad movl CPU_INFO_MTX_OLDSPL(%r8), %edi 1301.4Sad incl CPU_INFO_MTX_COUNT(%r8) 1311.2Sad jnz 1f 1321.4Sad cmpl CPU_INFO_ILEVEL(%r8), %edi 1331.2Sad jae 1f 1341.4Sad movl CPU_INFO_IUNMASK(%r8,%rdi,4), %esi 1351.12Sdsl CLI(ax) 1361.4Sad testl CPU_INFO_IPENDING(%r8), %esi 1371.2Sad jnz _C_LABEL(Xspllower) 1381.4Sad movl %edi, CPU_INFO_ILEVEL(%r8) 1391.12Sdsl STI(ax) 1401.2Sad1: rep /* double byte ret as branch */ 1411.2Sad ret /* target: see AMD docs */ 1421.2Sad 1431.2Sad#else /* DIAGNOSTIC */ 1441.2Sad 1451.4Sad movq CPUVAR(SELF), %rsi 1461.2Sad movb $0x00, MTX_LOCK(%rdi) 1471.4Sad movl CPU_INFO_MTX_OLDSPL(%rsi), %ecx 1481.4Sad incl CPU_INFO_MTX_COUNT(%rsi) 1491.4Sad movl CPU_INFO_ILEVEL(%rsi),%edx 1501.2Sad cmovnzl %edx,%ecx 1511.21Sad pushq %rbx 1521.2Sad cmpl %edx,%ecx /* new level is lower? */ 1531.21Sad jae 2f 1541.2Sad1: 1551.4Sad movl CPU_INFO_IPENDING(%rsi),%eax 1561.4Sad testl %eax,CPU_INFO_IUNMASK(%rsi,%rcx,4)/* deferred interrupts? */ 1571.21Sad jnz 3f 1581.2Sad movl %eax,%ebx 1591.4Sad cmpxchg8b CPU_INFO_ISTATE(%rsi) /* swap in new ilevel */ 1601.21Sad jnz 4f 1611.2Sad2: 1621.2Sad popq %rbx 1631.2Sad ret 1641.2Sad3: 1651.2Sad popq %rbx 1661.2Sad movl %ecx, %edi 1671.2Sad jmp _C_LABEL(Xspllower) 1681.21Sad4: 1691.21Sad jmp 1b 1701.2Sad 1711.2Sad#endif /* DIAGNOSTIC */ 1721.2Sad 1731.25SuebayasiEND(mutex_spin_exit) 1741.25Suebayasi 1751.2Sad/* 1761.2Sad * void rw_enter(krwlock_t *rwl, krw_t op); 1771.2Sad * 1781.2Sad * Acquire one hold on a RW lock. 1791.2Sad */ 1801.19SchsENTRY(rw_enter) 1811.2Sad cmpl $RW_READER, %esi 1821.2Sad jne 2f 1831.2Sad 1841.2Sad /* 1851.2Sad * Reader: this is the most common case. 1861.2Sad */ 1871.21Sad movq (%rdi), %rax 1881.21Sad0: 1891.2Sad testb $(RW_WRITE_LOCKED|RW_WRITE_WANTED), %al 1901.21Sad jnz 3f 1911.2Sad leaq RW_READ_INCR(%rax), %rdx 1921.11Sad LOCK(2) 1931.21Sad cmpxchgq %rdx, (%rdi) 1941.21Sad jnz 1f 1951.22Sad RET(3) 1961.21Sad1: 1971.21Sad jmp 0b 1981.2Sad 1991.2Sad /* 2001.2Sad * Writer: if the compare-and-set fails, don't bother retrying. 2011.2Sad */ 2021.2Sad2: movq CPUVAR(CURLWP), %rcx 2031.2Sad xorq %rax, %rax 2041.2Sad orq $RW_WRITE_LOCKED, %rcx 2051.11Sad LOCK(3) 2061.21Sad cmpxchgq %rcx, (%rdi) 2071.14Sad jnz 3f 2081.22Sad RET(4) 2091.14Sad3: 2101.14Sad jmp _C_LABEL(rw_vector_enter) 2111.25SuebayasiEND(rw_enter) 2121.2Sad 2131.2Sad/* 2141.2Sad * void rw_exit(krwlock_t *rwl); 2151.2Sad * 2161.2Sad * Release one hold on a RW lock. 2171.2Sad */ 2181.19SchsENTRY(rw_exit) 2191.21Sad movq (%rdi), %rax 2201.2Sad testb $RW_WRITE_LOCKED, %al 2211.2Sad jnz 2f 2221.2Sad 2231.2Sad /* 2241.2Sad * Reader 2251.2Sad */ 2261.21Sad0: testb $RW_HAS_WAITERS, %al 2271.14Sad jnz 3f 2281.2Sad cmpq $RW_READ_INCR, %rax 2291.21Sad jb 3f 2301.2Sad leaq -RW_READ_INCR(%rax), %rdx 2311.11Sad LOCK(4) 2321.21Sad cmpxchgq %rdx, (%rdi) 2331.21Sad jnz 1f 2341.2Sad ret 2351.21Sad1: 2361.21Sad jmp 0b 2371.2Sad 2381.2Sad /* 2391.2Sad * Writer 2401.2Sad */ 2411.2Sad2: leaq -RW_WRITE_LOCKED(%rax), %rdx 2421.2Sad subq CPUVAR(CURLWP), %rdx 2431.14Sad jnz 3f 2441.11Sad LOCK(5) 2451.21Sad cmpxchgq %rdx, (%rdi) 2461.2Sad jnz 3f 2471.2Sad ret 2481.2Sad 2491.2Sad3: jmp _C_LABEL(rw_vector_exit) 2501.25SuebayasiEND(rw_exit) 2511.2Sad 2521.13Sad/* 2531.13Sad * int rw_tryenter(krwlock_t *rwl, krw_t op); 2541.13Sad * 2551.13Sad * Try to acquire one hold on a RW lock. 2561.13Sad */ 2571.19SchsENTRY(rw_tryenter) 2581.13Sad cmpl $RW_READER, %esi 2591.13Sad jne 2f 2601.13Sad 2611.13Sad /* 2621.13Sad * Reader: this is the most common case. 2631.13Sad */ 2641.21Sad movq (%rdi), %rax 2651.21Sad0: 2661.13Sad testb $(RW_WRITE_LOCKED|RW_WRITE_WANTED), %al 2671.22Sad jnz 4f 2681.13Sad leaq RW_READ_INCR(%rax), %rdx 2691.13Sad LOCK(8) 2701.21Sad cmpxchgq %rdx, (%rdi) 2711.21Sad jnz 1f 2721.21Sad movl %edx, %eax /* nonzero */ 2731.22Sad RET(5) 2741.21Sad1: 2751.21Sad jmp 0b 2761.13Sad 2771.13Sad /* 2781.13Sad * Writer: if the compare-and-set fails, don't bother retrying. 2791.13Sad */ 2801.13Sad2: movq CPUVAR(CURLWP), %rcx 2811.13Sad xorq %rax, %rax 2821.13Sad orq $RW_WRITE_LOCKED, %rcx 2831.13Sad LOCK(9) 2841.21Sad cmpxchgq %rcx, (%rdi) 2851.18Sad movl $0, %eax 2861.13Sad setz %al 2871.22Sad3: 2881.22Sad RET(6) 2891.13Sad ret 2901.22Sad4: 2911.22Sad xorl %eax, %eax 2921.22Sad jmp 3b 2931.25SuebayasiEND(rw_tryenter) 2941.13Sad 2951.2Sad#endif /* LOCKDEBUG */ 2961.2Sad 2971.2Sad/* 2981.11Sad * Spinlocks. 2991.2Sad */ 3001.19SchsENTRY(__cpu_simple_lock_init) 3011.11Sad movb $0, (%rdi) 3021.2Sad ret 3031.25SuebayasiEND(__cpu_simple_lock_init) 3041.2Sad 3051.11SadNENTRY(__cpu_simple_lock) 3061.11Sad movl $0x0100, %eax 3071.11Sad1: 3081.11Sad LOCK(6) 3091.11Sad cmpxchgb %ah, (%rdi) 3101.11Sad jnz 2f 3111.22Sad RET(7) 3121.11Sad2: 3131.11Sad movl $0x0100, %eax 3141.11Sad pause 3151.11Sad nop 3161.7Sad nop 3171.11Sad cmpb $0, (%rdi) 3181.11Sad je 1b 3191.11Sad jmp 2b 3201.25SuebayasiEND(__cpu_simple_lock) 3211.11Sad 3221.26SbouyerNENTRY(__cpu_simple_unlock) 3231.11Sad movb $0, (%rdi) 3241.2Sad ret 3251.25SuebayasiEND(__cpu_simple_unlock) 3261.7Sad 3271.19SchsENTRY(__cpu_simple_lock_try) 3281.11Sad movl $0x0100, %eax 3291.11Sad LOCK(7) 3301.11Sad cmpxchgb %ah, (%rdi) 3311.11Sad movl $0, %eax 3321.22Sad setz %al 3331.22Sad RET(8) 3341.25SuebayasiEND(__cpu_simple_lock_try) 3351.2Sad 3361.2Sad/* 3371.7Sad * Patchpoints to replace with NOP when ncpu == 1. 3381.7Sad */ 3391.7Sad#ifndef LOCKDEBUG 3401.25Suebayasi .type _C_LABEL(x86_lockpatch), @object 3411.7SadLABEL(x86_lockpatch) 3421.11Sad .quad .Lpatch1, .Lpatch2, .Lpatch3, .Lpatch4 3431.13Sad .quad .Lpatch5, .Lpatch6, .Lpatch7, .Lpatch8 3441.13Sad .quad .Lpatch9 3451.7Sad .quad 0 3461.25SuebayasiEND(x86_lockpatch) 3471.7Sad#endif 3481.22Sad 3491.25Suebayasi .type _C_LABEL(x86_retpatch), @object 3501.22SadLABEL(x86_retpatch) 3511.22Sad#ifndef LOCKDEBUG 3521.23Sad .quad .Lret1, .Lret2, .Lret3, .Lret4, .Lret5, .Lret6 3531.22Sad#endif 3541.23Sad .quad .Lret7, .Lret8 3551.23Sad .quad 0 3561.25SuebayasiEND(x86_retpatch) 357