11.6Sriastrad/*	$NetBSD: membar_ops.S,v 1.6 2022/04/09 23:32:52 riastradh Exp $	*/
21.1Sad
31.1Sad/*-
41.1Sad * Copyright (c) 2007 The NetBSD Foundation, Inc.
51.1Sad * All rights reserved.
61.1Sad *
71.1Sad * This code is derived from software contributed to The NetBSD Foundation
81.1Sad * by Jason R. Thorpe, and by Andrew Doran.
91.1Sad *
101.1Sad * Redistribution and use in source and binary forms, with or without
111.1Sad * modification, are permitted provided that the following conditions
121.1Sad * are met:
131.1Sad * 1. Redistributions of source code must retain the above copyright
141.1Sad *    notice, this list of conditions and the following disclaimer.
151.1Sad * 2. Redistributions in binary form must reproduce the above copyright
161.1Sad *    notice, this list of conditions and the following disclaimer in the
171.1Sad *    documentation and/or other materials provided with the distribution.
181.5Sriastrad *
191.1Sad * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
201.1Sad * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
211.1Sad * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
221.1Sad * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
231.1Sad * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
241.1Sad * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
251.1Sad * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
261.1Sad * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
271.1Sad * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
281.1Sad * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
291.1Sad * POSSIBILITY OF SUCH DAMAGE.
301.1Sad */
311.1Sad
321.1Sad#include "atomic_op_asm.h"
331.1Sad
341.6Sriastrad__RCSID("$NetBSD: membar_ops.S,v 1.6 2022/04/09 23:32:52 riastradh Exp $")
351.4Smatt
361.1Sad	.text
371.1Sad
381.6SriastradENTRY(_membar_acquire)
391.6Sriastrad	/*
401.6Sriastrad	 * It is tempting to use isync to order load-before-load/store.
411.6Sriastrad	 * However, isync orders prior loads only if their value flows
421.6Sriastrad	 * into a control-flow dependency prior to the isync:
431.6Sriastrad	 *
441.6Sriastrad	 *	`[I]f an isync follows a conditional Branch instruction
451.6Sriastrad	 *	 that depends on the value returned by a preceding Load
461.6Sriastrad	 *	 instruction, the load on which the Branch depends is
471.6Sriastrad	 *	 performed before any loads caused by instructions
481.6Sriastrad	 *	 following the isync. This applies even if the effects
491.6Sriastrad	 *	 of the ``dependency'' are independent of the value
501.6Sriastrad	 *	 loaded (e.g., the value is compared to itself and the
511.6Sriastrad	 *	 Branch tests the EQ bit in the selected CR field), and
521.6Sriastrad	 *	 even if the branch target is the sequentially next
531.6Sriastrad	 *	 instruction.'
541.6Sriastrad	 *
551.6Sriastrad	 *	--PowerPC Virtual Environment Architecture, Book II,
561.6Sriastrad	 *	  Version 2.01, December 2003, 1.7.1 `Storage Access
571.6Sriastrad	 *	  Ordering', p. 7.
581.6Sriastrad	 *
591.6Sriastrad	 * We are required here, however, to order _all_ prior loads,
601.6Sriastrad	 * even if they do not flow into any control flow dependency.
611.6Sriastrad	 * For example:
621.6Sriastrad	 *
631.6Sriastrad	 *	x = *p;
641.6Sriastrad	 *	membar_acquire();
651.6Sriastrad	 *	if (x) goto foo;
661.6Sriastrad	 *
671.6Sriastrad	 * This can't be implemented by:
681.6Sriastrad	 *
691.6Sriastrad	 *	lwz	x, p
701.6Sriastrad	 *	isync
711.6Sriastrad	 *	cmpwi	x, 0
721.6Sriastrad	 *	bne	foo
731.6Sriastrad	 *
741.6Sriastrad	 * isync doesn't work here because there's no conditional
751.6Sriastrad	 * dependency on x between the lwz x, p and the isync.
761.6Sriastrad	 *
771.6Sriastrad	 * isync would only work if it followed the branch:
781.6Sriastrad	 *
791.6Sriastrad	 *	lwz	x, p
801.6Sriastrad	 *	isync
811.6Sriastrad	 *	cmpwi	x, 0
821.6Sriastrad	 *	bne	foo
831.6Sriastrad	 *	...
841.6Sriastrad	 * foo:	isync
851.6Sriastrad	 *	...
861.6Sriastrad	 *
871.6Sriastrad	 * lwsync orders everything except store-before-load, so it
881.6Sriastrad	 * serves here -- see below in membar_release in lwsync.
891.6Sriastrad	 * Except we can't use it on booke, so use sync for now.
901.6Sriastrad	 */
911.6Sriastrad	sync
921.6Sriastrad	blr
931.6SriastradEND(_membar_acquire)
941.6SriastradATOMIC_OP_ALIAS(membar_acquire,_membar_acquire)
951.6Sriastrad
961.6SriastradENTRY(_membar_release)
971.6Sriastrad	/*
981.6Sriastrad	 *	`The memory barrier provides an ordering function for
991.6Sriastrad	 *	 the storage accesses caused by Load, Store, and dcbz
1001.6Sriastrad	 *	 instructions that are executed by the processor
1011.6Sriastrad	 *	 executing the [lwsync] instruction and for which the
1021.6Sriastrad	 *	 specified storage location is in storage that is
1031.6Sriastrad	 *	 Memory Coherence Required and is neither Write Through
1041.6Sriastrad	 *	 Required nor Caching Inhibited.  The applicable pairs
1051.6Sriastrad	 *	 are all pairs a_i, b_j of such accesses except those
1061.6Sriastrad	 *	 in which a_i is an access caused by a Store or dcbz
1071.6Sriastrad	 *	 instruction and b_j is an access caused by a Load
1081.6Sriastrad	 *	 instruction.'
1091.6Sriastrad	 *
1101.6Sriastrad	 *	--PowerPC Virtual Environment Architecture, Book II,
1111.6Sriastrad	 *	  Version 2.01, December 2003, 3.3.3 `Memory Barrier
1121.6Sriastrad	 *	  Instructions', p. 25.
1131.6Sriastrad	 *
1141.6Sriastrad	 * In brief, lwsync is an acquire-release barrier -- it orders
1151.6Sriastrad	 * load-before-load/store and load/store-before-store, but not
1161.6Sriastrad	 * store-before-load.  Except we can't use it on booke, so use
1171.6Sriastrad	 * sync for now.
1181.6Sriastrad	 */
1191.6Sriastrad	sync
1201.1Sad	blr
1211.6SriastradEND(_membar_release)
1221.6SriastradATOMIC_OP_ALIAS(membar_release,_membar_release)
1231.1Sad
1241.6SriastradENTRY(_membar_sync)
1251.6Sriastrad	/*
1261.6Sriastrad	 * sync, or `heavyweight sync', is a full sequential
1271.6Sriastrad	 * consistency barrier.
1281.6Sriastrad	 */
1291.1Sad	sync
1301.1Sad	blr
1311.6SriastradEND(_membar_sync)
1321.6SriastradATOMIC_OP_ALIAS(membar_sync,_membar_sync)
1331.1Sad
1341.6SriastradATOMIC_OP_ALIAS(membar_producer,_membar_release)
1351.6SriastradSTRONG_ALIAS(_membar_producer,_membar_release)
1361.6SriastradATOMIC_OP_ALIAS(membar_consumer,_membar_acquire)
1371.6SriastradSTRONG_ALIAS(_membar_consumer,_membar_acquire)
1381.6SriastradATOMIC_OP_ALIAS(membar_enter,_membar_sync)
1391.6SriastradSTRONG_ALIAS(_membar_enter,_membar_sync)
1401.6SriastradATOMIC_OP_ALIAS(membar_exit,_membar_release)
1411.6SriastradSTRONG_ALIAS(_membar_exit,_membar_release)
142