11.2Sthorpej/*	$NetBSD: atomic_init_m68k.c,v 1.2 2025/12/20 16:25:09 thorpej Exp $	*/
21.1Sthorpej
31.1Sthorpej/*-
41.1Sthorpej * Copyright (c) 2008, 2025 The NetBSD Foundation, Inc.
51.1Sthorpej * All rights reserved.
61.1Sthorpej *
71.1Sthorpej * This code is derived from software contributed to The NetBSD Foundation
81.1Sthorpej * by Jason R. Thorpe.
91.1Sthorpej *
101.1Sthorpej * Redistribution and use in source and binary forms, with or without
111.1Sthorpej * modification, are permitted provided that the following conditions
121.1Sthorpej * are met:
131.1Sthorpej * 1. Redistributions of source code must retain the above copyright
141.1Sthorpej *    notice, this list of conditions and the following disclaimer.
151.1Sthorpej * 2. Redistributions in binary form must reproduce the above copyright
161.1Sthorpej *    notice, this list of conditions and the following disclaimer in the
171.1Sthorpej *    documentation and/or other materials provided with the distribution.
181.1Sthorpej *
191.1Sthorpej * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
201.1Sthorpej * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
211.1Sthorpej * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
221.1Sthorpej * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
231.1Sthorpej * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
241.1Sthorpej * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
251.1Sthorpej * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
261.1Sthorpej * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
271.1Sthorpej * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
281.1Sthorpej * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
291.1Sthorpej * POSSIBILITY OF SUCH DAMAGE.
301.1Sthorpej */
311.1Sthorpej
321.1Sthorpej#include <sys/cdefs.h>
331.2Sthorpej__RCSID("$NetBSD: atomic_init_m68k.c,v 1.2 2025/12/20 16:25:09 thorpej Exp $");
341.1Sthorpej
351.1Sthorpej#include "extern.h"
361.1Sthorpej#include "../../../atomic/atomic_op_namespace.h"
371.1Sthorpej
381.1Sthorpej#include <sys/types.h>
391.1Sthorpej#include <sys/atomic.h>
401.1Sthorpej#include <sys/ras.h>
411.1Sthorpej#include <sys/sysctl.h>
421.1Sthorpej
431.2Sthorpej /* __sysctl syscall stub */
441.2Sthorpej#include "../../../../../../lib/libc/include/__sysctl.h"
451.2Sthorpej
461.1Sthorpej#include <machine/cpu.h>
471.1Sthorpej
481.1Sthorpej#include <stdlib.h>
491.1Sthorpej
501.1Sthorpej/*
511.1Sthorpej * libc glue for m68k atomic operations.
521.1Sthorpej *
531.1Sthorpej * 68020 and later have a CAS instruction that can be used to implement
541.1Sthorpej * everything.  However, CAS (and TAS) are unusable on some 68020 systems
551.1Sthorpej * (see __HAVE_M68K_BROKEN_RMC).
561.1Sthorpej *
571.1Sthorpej * The 68010 has no CAS instruction.
581.1Sthorpej *
591.1Sthorpej * So, for BROKEN_RMC and 68010, we use a restartable atomic sequence.
601.1Sthorpej *
611.1Sthorpej * Whichever compare-and-swap implementation is chosen is used to implement
621.1Sthorpej * all of the other atomic operations:
631.1Sthorpej *
641.1Sthorpej * ==> The *_nv() variants generally require a compare-and-swap implementation
651.1Sthorpej *     anyway.
661.1Sthorpej *
671.1Sthorpej * ==> The other single-instruction operations (ADDx, ORx, etc.) are not
681.1Sthorpej *     truly atomic in that they do not generate a non-interruptible bus
691.1Sthorpej *     cycle, and thus would not work in a multi-processor environment.
701.1Sthorpej *     (We don't support any multiprocessor m68k systems today, but hey,
711.1Sthorpej *     it could happen!)
721.1Sthorpej */
731.1Sthorpej
741.1Sthorpejextern uint32_t _atomic_cas_32_ras(volatile uint32_t *, uint32_t, uint32_t);
751.1SthorpejRAS_DECL(_atomic_cas_32_ras);
761.1Sthorpej
771.1Sthorpejextern uint16_t _atomic_cas_16_ras(volatile uint16_t *, uint16_t, uint16_t);
781.1SthorpejRAS_DECL(_atomic_cas_16_ras);
791.1Sthorpej
801.1Sthorpejextern uint8_t  _atomic_cas_8_ras(volatile uint8_t *, uint8_t, uint8_t);
811.1SthorpejRAS_DECL(_atomic_cas_8_ras);
821.1Sthorpej
831.1Sthorpej#ifdef __mc68010__
841.1Sthorpej#define	CAS32_DEFAULT	\
851.1Sthorpej	((uint32_t (*)(volatile uint32_t *, uint32_t, uint32_t))abort)
861.1Sthorpej#define	CAS16_DEFAULT	\
871.1Sthorpej	((uint16_t (*)(volatile uint16_t *, uint16_t, uint16_t))abort)
881.1Sthorpej#define	CAS8_DEFAULT	\
891.1Sthorpej	((uint8_t (*)(volatile uint8_t *, uint8_t, uint8_t))abort)
901.1Sthorpej#else
911.1Sthorpejextern uint32_t _atomic_cas_32_casl(volatile uint32_t *, uint32_t, uint32_t);
921.1Sthorpejextern uint16_t _atomic_cas_16_casw(volatile uint16_t *, uint16_t, uint16_t);
931.1Sthorpejextern uint8_t  _atomic_cas_8_casb(volatile uint8_t *, uint8_t, uint8_t);
941.1Sthorpej
951.1Sthorpej/* Default to CASx implementation, fall back on RAS only when necessary. */
961.1Sthorpej#define	CAS32_DEFAULT	_atomic_cas_32_casl
971.1Sthorpej#define	CAS16_DEFAULT	_atomic_cas_16_casw
981.1Sthorpej#define	CAS8_DEFAULT	_atomic_cas_8_casb
991.1Sthorpej#endif /* ! __mc68010__ */
1001.1Sthorpej
1011.1Sthorpejstatic uint32_t (*_atomic_cas_32_fn)(volatile uint32_t *, uint32_t, uint32_t);
1021.1Sthorpejstatic uint16_t (*_atomic_cas_16_fn)(volatile uint16_t *, uint16_t, uint16_t);
1031.1Sthorpejstatic uint8_t (*_atomic_cas_8_fn)(volatile uint8_t *, uint8_t, uint8_t);
1041.1Sthorpej
1051.1Sthorpejvoid *_atomic_cas_32_a0(volatile uint32_t *, uint32_t, uint32_t);
1061.1Sthorpej
1071.1Sthorpejvoid *
1081.1Sthorpej_atomic_cas_32_a0(volatile uint32_t *ptr, uint32_t old, uint32_t new)
1091.1Sthorpej{
1101.1Sthorpej	/* Force return value to be duplicated into %a0. */
1111.1Sthorpej	return (void *)(*_atomic_cas_32_fn)(ptr, old, new);
1121.1Sthorpej}
1131.1Sthorpej
1141.1Sthorpej#undef atomic_cas_32
1151.1Sthorpej#undef atomic_cas_uint
1161.1Sthorpej#undef atomic_cas_ulong
1171.1Sthorpej#undef atomic_cas_ptr
1181.1Sthorpej#undef atomic_cas_32_ni
1191.1Sthorpej#undef atomic_cas_uint_ni
1201.1Sthorpej#undef atomic_cas_ulong_ni
1211.1Sthorpej#undef atomic_cas_ptr_ni
1221.1Sthorpej
1231.1Sthorpej__strong_alias(_atomic_cas_32,_atomic_cas_32_a0)
1241.1Sthorpejatomic_op_alias(atomic_cas_32,_atomic_cas_32_a0)
1251.1Sthorpejatomic_op_alias(atomic_cas_uint,_atomic_cas_32_a0)
1261.1Sthorpej__strong_alias(_atomic_cas_uint,_atomic_cas_32_a0)
1271.1Sthorpejatomic_op_alias(atomic_cas_ulong,_atomic_cas_32_a0)
1281.1Sthorpej__strong_alias(_atomic_cas_ulong,_atomic_cas_32_a0)
1291.1Sthorpejatomic_op_alias(atomic_cas_ptr,_atomic_cas_32_a0)
1301.1Sthorpej__strong_alias(_atomic_cas_ptr,_atomic_cas_32_a0)
1311.1Sthorpej
1321.1Sthorpejatomic_op_alias(atomic_cas_32_ni,_atomic_cas_32_a0)
1331.1Sthorpej__strong_alias(_atomic_cas_32_ni,_atomic_cas_32_a0)
1341.1Sthorpejatomic_op_alias(atomic_cas_uint_ni,_atomic_cas_32_a0)
1351.1Sthorpej__strong_alias(_atomic_cas_uint_ni,_atomic_cas_32_a0)
1361.1Sthorpejatomic_op_alias(atomic_cas_ulong_ni,_atomic_cas_32_a0)
1371.1Sthorpej__strong_alias(_atomic_cas_ulong_ni,_atomic_cas_32_a0)
1381.1Sthorpejatomic_op_alias(atomic_cas_ptr_ni,_atomic_cas_32_a0)
1391.1Sthorpej__strong_alias(_atomic_cas_ptr_ni,_atomic_cas_32_a0)
1401.1Sthorpej
1411.1Sthorpejcrt_alias(__sync_val_compare_and_swap_4,_atomic_cas_32_a0)
1421.1Sthorpej
1431.1Sthorpejuint16_t
1441.1Sthorpej_atomic_cas_16(volatile uint16_t *ptr, uint16_t old, uint16_t new)
1451.1Sthorpej{
1461.1Sthorpej	return (*_atomic_cas_16_fn)(ptr, old, new);
1471.1Sthorpej}
1481.1Sthorpej
1491.1Sthorpej#undef atomic_cas_16
1501.1Sthorpejatomic_op_alias(atomic_cas_16,_atomic_cas_16)
1511.1Sthorpejcrt_alias(__sync_val_compare_and_swap_2,_atomic_cas_16)
1521.1Sthorpej
1531.1Sthorpejuint8_t
1541.1Sthorpej_atomic_cas_8(volatile uint8_t *ptr, uint8_t old, uint8_t new)
1551.1Sthorpej{
1561.1Sthorpej	return (*_atomic_cas_8_fn)(ptr, old, new);
1571.1Sthorpej}
1581.1Sthorpej
1591.1Sthorpej#undef atomic_cas_8
1601.1Sthorpejatomic_op_alias(atomic_cas_8,_atomic_cas_8)
1611.1Sthorpejcrt_alias(__sync_val_compare_and_swap_1,_atomic_cas_8)
1621.1Sthorpej
1631.1Sthorpejvoid __section(".text.startup") __attribute__ ((__visibility__("hidden")))
1641.1Sthorpej__libc_atomic_init(void)
1651.1Sthorpej{
1661.1Sthorpej	_atomic_cas_32_fn = CAS32_DEFAULT;
1671.1Sthorpej	_atomic_cas_16_fn = CAS16_DEFAULT;
1681.1Sthorpej	_atomic_cas_8_fn = CAS8_DEFAULT;
1691.1Sthorpej
1701.1Sthorpej#ifndef __mc68010__
1711.1Sthorpej	int mib[2];
1721.1Sthorpej	size_t len;
1731.1Sthorpej	bool broken_rmc;
1741.1Sthorpej
1751.1Sthorpej	/*
1761.1Sthorpej	 * Check to see if this system has a non-working /RMC.  If
1771.2Sthorpej	 * the __sysctl() call fails, or if it indicates that /RMC
1781.1Sthorpej	 * works fine, then we have no further work to do because
1791.1Sthorpej	 * the stubs default to the CASx-using _atomic_cas_*()
1801.1Sthorpej	 * functions.
1811.1Sthorpej	 */
1821.1Sthorpej	mib[0] = CTL_MACHDEP;
1831.1Sthorpej	mib[1] = CPU_BROKEN_RMC;
1841.1Sthorpej	len = sizeof(broken_rmc);
1851.2Sthorpej	if (__sysctl(mib, 2, &broken_rmc, &len, NULL, 0) == -1 || !broken_rmc) {
1861.1Sthorpej		return;
1871.1Sthorpej	}
1881.1Sthorpej#endif /* ! __mc68010__ */
1891.1Sthorpej
1901.1Sthorpej	/*
1911.1Sthorpej	 * If we get here, we either have a broken RMC system or a
1921.1Sthorpej	 * 68010.  In either case, we need to register the restartable
1931.1Sthorpej	 * atomic sequences with the kernel.
1941.1Sthorpej	 *
1951.1Sthorpej	 * XXX Should consider a lazy initialization of these.
1961.1Sthorpej	 */
1971.1Sthorpej	if (rasctl(RAS_ADDR(_atomic_cas_32_ras), RAS_SIZE(_atomic_cas_32_ras),
1981.1Sthorpej		   RAS_INSTALL) == 0) {
1991.1Sthorpej		_atomic_cas_32_fn = _atomic_cas_32_ras;
2001.1Sthorpej	}
2011.1Sthorpej	if (rasctl(RAS_ADDR(_atomic_cas_16_ras), RAS_SIZE(_atomic_cas_16_ras),
2021.1Sthorpej		   RAS_INSTALL) == 0) {
2031.1Sthorpej		_atomic_cas_16_fn = _atomic_cas_16_ras;
2041.1Sthorpej	}
2051.1Sthorpej	if (rasctl(RAS_ADDR(_atomic_cas_8_ras), RAS_SIZE(_atomic_cas_8_ras),
2061.1Sthorpej		   RAS_INSTALL) == 0) {
2071.1Sthorpej		_atomic_cas_8_fn = _atomic_cas_8_ras;
2081.1Sthorpej	}
2091.1Sthorpej}
210