VirtualBox

source: vbox/trunk/src/VBox/Runtime/generic/critsectrw-generic.cpp

Last change on this file was 98103, checked in by vboxsync, 17 months ago

Copyright year updates by scm.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 41.0 KB
RevLine 
[25426]1/* $Id: critsectrw-generic.cpp 98103 2023-01-17 14:15:46Z vboxsync $ */
2/** @file
[45110]3 * IPRT - Read/Write Critical Section, Generic.
[25426]4 */
5
6/*
[98103]7 * Copyright (C) 2009-2023 Oracle and/or its affiliates.
[25426]8 *
[96407]9 * This file is part of VirtualBox base platform packages, as
10 * available from https://www.virtualbox.org.
[25426]11 *
[96407]12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, in version 3 of the
15 * License.
16 *
17 * This program is distributed in the hope that it will be useful, but
18 * WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 * General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 *
[25426]25 * The contents of this file may alternatively be used under the terms
26 * of the Common Development and Distribution License Version 1.0
[96407]27 * (CDDL), a copy of it is provided in the "COPYING.CDDL" file included
28 * in the VirtualBox distribution, in which case the provisions of the
[25426]29 * CDDL are applicable instead of those of the GPL.
30 *
31 * You may elect to license modified versions of this file under the
32 * terms and conditions of either the GPL or the CDDL or both.
[96407]33 *
34 * SPDX-License-Identifier: GPL-3.0-only OR CDDL-1.0
[25426]35 */
36
37
[57358]38/*********************************************************************************************************************************
39* Header Files *
40*********************************************************************************************************************************/
[45110]41#define RTCRITSECTRW_WITHOUT_REMAPPING
[25549]42#define RTASSERT_QUIET
[45110]43#include <iprt/critsect.h>
[25426]44#include "internal/iprt.h"
45
46#include <iprt/asm.h>
47#include <iprt/assert.h>
48#include <iprt/err.h>
[25663]49#include <iprt/lockvalidator.h>
[25431]50#include <iprt/mem.h>
[45110]51#include <iprt/semaphore.h>
[25426]52#include <iprt/thread.h>
53
54#include "internal/magics.h"
[25663]55#include "internal/strict.h"
[25426]56
[59036]57/* Two issues here, (1) the tracepoint generator uses IPRT, and (2) only one .d
58 file per module. */
59#ifdef IPRT_WITH_DTRACE
60# include IPRT_DTRACE_INCLUDE
[59039]61# ifdef IPRT_DTRACE_PREFIX
62# define IPRT_CRITSECTRW_EXCL_ENTERED RT_CONCAT(IPRT_DTRACE_PREFIX,IPRT_CRITSECTRW_EXCL_ENTERED)
63# define IPRT_CRITSECTRW_EXCL_ENTERED_ENABLED RT_CONCAT(IPRT_DTRACE_PREFIX,IPRT_CRITSECTRW_EXCL_ENTERED_ENABLED)
64# define IPRT_CRITSECTRW_EXCL_LEAVING RT_CONCAT(IPRT_DTRACE_PREFIX,IPRT_CRITSECTRW_EXCL_LEAVING)
65# define IPRT_CRITSECTRW_EXCL_LEAVING_ENABLED RT_CONCAT(IPRT_DTRACE_PREFIX,IPRT_CRITSECTRW_EXCL_LEAVING_ENABLED)
66# define IPRT_CRITSECTRW_EXCL_BUSY RT_CONCAT(IPRT_DTRACE_PREFIX,IPRT_CRITSECTRW_EXCL_BUSY)
67# define IPRT_CRITSECTRW_EXCL_WAITING RT_CONCAT(IPRT_DTRACE_PREFIX,IPRT_CRITSECTRW_EXCL_WAITING)
68# define IPRT_CRITSECTRW_EXCL_ENTERED_SHARED RT_CONCAT(IPRT_DTRACE_PREFIX,IPRT_CRITSECTRW_EXCL_ENTERED_SHARED)
69# define IPRT_CRITSECTRW_EXCL_LEAVING_SHARED RT_CONCAT(IPRT_DTRACE_PREFIX,IPRT_CRITSECTRW_EXCL_LEAVING_SHARED)
70# define IPRT_CRITSECTRW_SHARED_ENTERED RT_CONCAT(IPRT_DTRACE_PREFIX,IPRT_CRITSECTRW_SHARED_ENTERED)
71# define IPRT_CRITSECTRW_SHARED_LEAVING RT_CONCAT(IPRT_DTRACE_PREFIX,IPRT_CRITSECTRW_SHARED_LEAVING)
72# define IPRT_CRITSECTRW_SHARED_BUSY RT_CONCAT(IPRT_DTRACE_PREFIX,IPRT_CRITSECTRW_SHARED_BUSY)
73# define IPRT_CRITSECTRW_SHARED_WAITING RT_CONCAT(IPRT_DTRACE_PREFIX,IPRT_CRITSECTRW_SHARED_WAITING)
74# endif
[59036]75#else
76# define IPRT_CRITSECTRW_EXCL_ENTERED(a_pvCritSect, a_pszName, a_cNestings, a_cWaitingReaders, a_cWriters) do {} while (0)
77# define IPRT_CRITSECTRW_EXCL_ENTERED_ENABLED() (false)
78# define IPRT_CRITSECTRW_EXCL_LEAVING(a_pvCritSect, a_pszName, a_cNestings, a_cWaitingReaders, a_cWriters) do {} while (0)
79# define IPRT_CRITSECTRW_EXCL_LEAVING_ENABLED() (false)
80# define IPRT_CRITSECTRW_EXCL_BUSY( a_pvCritSect, a_pszName, a_fWriteMode, a_cWaitingReaders, a_cReaders, cWriters, a_pvNativeOwnerThread) do {} while (0)
81# define IPRT_CRITSECTRW_EXCL_WAITING(a_pvCritSect, a_pszName, a_fWriteMode, a_cWaitingReaders, a_cReaders, cWriters, a_pvNativeOwnerThread) do {} while (0)
82# define IPRT_CRITSECTRW_EXCL_ENTERED_SHARED(a_pvCritSect, a_pszName, a_cNestings, a_cWaitingReaders, a_cWriters) do {} while (0)
83# define IPRT_CRITSECTRW_EXCL_LEAVING_SHARED(a_pvCritSect, a_pszName, a_cNestings, a_cWaitingReaders, a_cWriters) do {} while (0)
84# define IPRT_CRITSECTRW_SHARED_ENTERED(a_pvCritSect, a_pszName, a_cReaders, a_cWaitingWriters) do {} while (0)
85# define IPRT_CRITSECTRW_SHARED_LEAVING(a_pvCritSect, a_pszName, a_cReaders, a_cWaitingWriters) do {} while (0)
86# define IPRT_CRITSECTRW_SHARED_BUSY( a_pvCritSect, a_pszName, a_pvNativeOwnerThread, a_cWaitingReaders, a_cWriters) do {} while (0)
87# define IPRT_CRITSECTRW_SHARED_WAITING(a_pvCritSect, a_pszName, a_pvNativeOwnerThread, a_cWaitingReaders, a_cWriters) do {} while (0)
88#endif
[25426]89
[25431]90
[59036]91
[45110]92RTDECL(int) RTCritSectRwInit(PRTCRITSECTRW pThis)
[25426]93{
[45110]94 return RTCritSectRwInitEx(pThis, 0, NIL_RTLOCKVALCLASS, RTLOCKVAL_SUB_CLASS_NONE, "RTCritSectRw");
[25707]95}
[45110]96RT_EXPORT_SYMBOL(RTCritSectRwInit);
[25707]97
98
[45110]99RTDECL(int) RTCritSectRwInitEx(PRTCRITSECTRW pThis, uint32_t fFlags,
100 RTLOCKVALCLASS hClass, uint32_t uSubClass, const char *pszNameFmt, ...)
[25707]101{
[45110]102 int rc;
103 AssertReturn(!(fFlags & ~( RTCRITSECT_FLAGS_NO_NESTING | RTCRITSECT_FLAGS_NO_LOCK_VAL | RTCRITSECT_FLAGS_BOOTSTRAP_HACK
104 | RTCRITSECT_FLAGS_NOP )),
105 VERR_INVALID_PARAMETER);
[62556]106 RT_NOREF_PV(hClass); RT_NOREF_PV(uSubClass); RT_NOREF_PV(pszNameFmt);
[25707]107
[62556]108
[45110]109 /*
110 * Initialize the structure, allocate the lock validator stuff and sems.
111 */
112 pThis->u32Magic = RTCRITSECTRW_MAGIC_DEAD;
113 pThis->fNeedReset = false;
[58829]114#ifdef IN_RING0
115 pThis->fFlags = (uint16_t)(fFlags | RTCRITSECT_FLAGS_RING0);
116#else
117 pThis->fFlags = (uint16_t)(fFlags & ~RTCRITSECT_FLAGS_RING0);
118#endif
[90637]119 pThis->u.u128.s.Hi = 0;
120 pThis->u.u128.s.Lo = 0;
121 pThis->u.s.hNativeWriter= NIL_RTNATIVETHREAD;
122 AssertCompile(sizeof(pThis->u.u128) >= sizeof(pThis->u.s));
[45110]123 pThis->cWriterReads = 0;
124 pThis->cWriteRecursions = 0;
125 pThis->hEvtWrite = NIL_RTSEMEVENT;
126 pThis->hEvtRead = NIL_RTSEMEVENTMULTI;
127 pThis->pValidatorWrite = NULL;
128 pThis->pValidatorRead = NULL;
[25431]129
[45110]130#ifdef RTCRITSECTRW_STRICT
[46495]131 bool const fLVEnabled = !(fFlags & RTCRITSECT_FLAGS_NO_LOCK_VAL);
[45110]132 if (!pszNameFmt)
133 {
134 static uint32_t volatile s_iAnon = 0;
135 uint32_t i = ASMAtomicIncU32(&s_iAnon) - 1;
136 rc = RTLockValidatorRecExclCreate(&pThis->pValidatorWrite, hClass, uSubClass, pThis,
137 fLVEnabled, "RTCritSectRw-%u", i);
138 if (RT_SUCCESS(rc))
139 rc = RTLockValidatorRecSharedCreate(&pThis->pValidatorRead, hClass, uSubClass, pThis,
140 false /*fSignaller*/, fLVEnabled, "RTCritSectRw-%u", i);
141 }
142 else
143 {
144 va_list va;
145 va_start(va, pszNameFmt);
146 rc = RTLockValidatorRecExclCreateV(&pThis->pValidatorWrite, hClass, uSubClass, pThis,
147 fLVEnabled, pszNameFmt, va);
148 va_end(va);
149 if (RT_SUCCESS(rc))
150 {
151 va_start(va, pszNameFmt);
152 RTLockValidatorRecSharedCreateV(&pThis->pValidatorRead, hClass, uSubClass, pThis,
153 false /*fSignaller*/, fLVEnabled, pszNameFmt, va);
154 va_end(va);
155 }
156 }
[25431]157 if (RT_SUCCESS(rc))
[45110]158 rc = RTLockValidatorRecMakeSiblings(&pThis->pValidatorWrite->Core, &pThis->pValidatorRead->Core);
159
160 if (RT_SUCCESS(rc))
161#endif
[25431]162 {
[45110]163 rc = RTSemEventMultiCreate(&pThis->hEvtRead);
[25431]164 if (RT_SUCCESS(rc))
165 {
[45110]166 rc = RTSemEventCreate(&pThis->hEvtWrite);
167 if (RT_SUCCESS(rc))
[25831]168 {
[45110]169 pThis->u32Magic = RTCRITSECTRW_MAGIC;
170 return VINF_SUCCESS;
[25831]171 }
[45110]172 RTSemEventMultiDestroy(pThis->hEvtRead);
[25431]173 }
174 }
[25426]175
[45110]176#ifdef RTCRITSECTRW_STRICT
177 RTLockValidatorRecSharedDestroy(&pThis->pValidatorRead);
178 RTLockValidatorRecExclDestroy(&pThis->pValidatorWrite);
[25663]179#endif
[45110]180 return rc;
[25426]181}
[45110]182RT_EXPORT_SYMBOL(RTCritSectRwInitEx);
[25426]183
184
[45110]185RTDECL(uint32_t) RTCritSectRwSetSubClass(PRTCRITSECTRW pThis, uint32_t uSubClass)
[25707]186{
187 AssertPtrReturn(pThis, RTLOCKVAL_SUB_CLASS_INVALID);
[45110]188 AssertReturn(pThis->u32Magic == RTCRITSECTRW_MAGIC, RTLOCKVAL_SUB_CLASS_INVALID);
[58829]189#ifdef IN_RING0
190 Assert(pThis->fFlags & RTCRITSECT_FLAGS_RING0);
191#else
192 Assert(!(pThis->fFlags & RTCRITSECT_FLAGS_RING0));
193#endif
[45110]194#ifdef RTCRITSECTRW_STRICT
195 AssertReturn(!(pThis->fFlags & RTCRITSECT_FLAGS_NOP), RTLOCKVAL_SUB_CLASS_INVALID);
[25707]196
[45110]197 RTLockValidatorRecSharedSetSubClass(pThis->pValidatorRead, uSubClass);
198 return RTLockValidatorRecExclSetSubClass(pThis->pValidatorWrite, uSubClass);
[25707]199#else
[45110]200 NOREF(uSubClass);
[25707]201 return RTLOCKVAL_SUB_CLASS_INVALID;
202#endif
203}
[45110]204RT_EXPORT_SYMBOL(RTCritSectRwSetSubClass);
[25707]205
206
[45110]207static int rtCritSectRwEnterShared(PRTCRITSECTRW pThis, PCRTLOCKVALSRCPOS pSrcPos, bool fTryOnly)
[25431]208{
[25663]209 /*
210 * Validate input.
211 */
[45110]212 AssertPtr(pThis);
213 AssertReturn(pThis->u32Magic == RTCRITSECTRW_MAGIC, VERR_SEM_DESTROYED);
[58829]214#ifdef IN_RING0
215 Assert(pThis->fFlags & RTCRITSECT_FLAGS_RING0);
216#else
217 Assert(!(pThis->fFlags & RTCRITSECT_FLAGS_RING0));
218#endif
[62556]219 RT_NOREF_PV(pSrcPos);
[25549]220
[45110]221#ifdef RTCRITSECTRW_STRICT
[25663]222 RTTHREAD hThreadSelf = RTThreadSelfAutoAdopt();
[45110]223 if (!fTryOnly)
[25663]224 {
[25710]225 int rc9;
226 RTNATIVETHREAD hNativeWriter;
[90637]227 ASMAtomicUoReadHandle(&pThis->u.s.hNativeWriter, &hNativeWriter);
[25710]228 if (hNativeWriter != NIL_RTTHREAD && hNativeWriter == RTThreadNativeSelf())
[45110]229 rc9 = RTLockValidatorRecExclCheckOrder(pThis->pValidatorWrite, hThreadSelf, pSrcPos, RT_INDEFINITE_WAIT);
[25710]230 else
[45110]231 rc9 = RTLockValidatorRecSharedCheckOrder(pThis->pValidatorRead, hThreadSelf, pSrcPos, RT_INDEFINITE_WAIT);
[25663]232 if (RT_FAILURE(rc9))
233 return rc9;
234 }
235#endif
[25549]236
[25663]237 /*
238 * Get cracking...
239 */
[90637]240 uint64_t u64State = ASMAtomicReadU64(&pThis->u.s.u64State);
[25663]241 uint64_t u64OldState = u64State;
242
[25431]243 for (;;)
244 {
[45110]245 if ((u64State & RTCSRW_DIR_MASK) == (RTCSRW_DIR_READ << RTCSRW_DIR_SHIFT))
[25431]246 {
[25549]247 /* It flows in the right direction, try follow it before it changes. */
[45110]248 uint64_t c = (u64State & RTCSRW_CNT_RD_MASK) >> RTCSRW_CNT_RD_SHIFT;
[25431]249 c++;
[45110]250 Assert(c < RTCSRW_CNT_MASK / 2);
251 u64State &= ~RTCSRW_CNT_RD_MASK;
252 u64State |= c << RTCSRW_CNT_RD_SHIFT;
[90637]253 if (ASMAtomicCmpXchgU64(&pThis->u.s.u64State, u64State, u64OldState))
[25549]254 {
[45110]255#ifdef RTCRITSECTRW_STRICT
256 RTLockValidatorRecSharedAddOwner(pThis->pValidatorRead, hThreadSelf, pSrcPos);
[25663]257#endif
[25549]258 break;
259 }
[25431]260 }
[45110]261 else if ((u64State & (RTCSRW_CNT_RD_MASK | RTCSRW_CNT_WR_MASK)) == 0)
[25431]262 {
263 /* Wrong direction, but we're alone here and can simply try switch the direction. */
[45110]264 u64State &= ~(RTCSRW_CNT_RD_MASK | RTCSRW_CNT_WR_MASK | RTCSRW_DIR_MASK);
265 u64State |= (UINT64_C(1) << RTCSRW_CNT_RD_SHIFT) | (RTCSRW_DIR_READ << RTCSRW_DIR_SHIFT);
[90637]266 if (ASMAtomicCmpXchgU64(&pThis->u.s.u64State, u64State, u64OldState))
[25549]267 {
[25663]268 Assert(!pThis->fNeedReset);
[45110]269#ifdef RTCRITSECTRW_STRICT
270 RTLockValidatorRecSharedAddOwner(pThis->pValidatorRead, hThreadSelf, pSrcPos);
[25663]271#endif
[25549]272 break;
273 }
[25431]274 }
275 else
276 {
[25663]277 /* Is the writer perhaps doing a read recursion? */
278 RTNATIVETHREAD hNativeSelf = RTThreadNativeSelf();
279 RTNATIVETHREAD hNativeWriter;
[90637]280 ASMAtomicUoReadHandle(&pThis->u.s.hNativeWriter, &hNativeWriter);
[25663]281 if (hNativeSelf == hNativeWriter)
282 {
[45110]283#ifdef RTCRITSECTRW_STRICT
284 int rc9 = RTLockValidatorRecExclRecursionMixed(pThis->pValidatorWrite, &pThis->pValidatorRead->Core, pSrcPos);
[25663]285 if (RT_FAILURE(rc9))
286 return rc9;
287#endif
288 Assert(pThis->cWriterReads < UINT32_MAX / 2);
[59036]289 uint32_t const cReads = ASMAtomicIncU32(&pThis->cWriterReads); NOREF(cReads);
290 IPRT_CRITSECTRW_EXCL_ENTERED_SHARED(pThis, NULL,
291 cReads + pThis->cWriteRecursions,
292 (uint32_t)((u64State & RTCSRW_WAIT_CNT_RD_MASK) >> RTCSRW_WAIT_CNT_RD_SHIFT),
293 (uint32_t)((u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT));
294
[25663]295 return VINF_SUCCESS; /* don't break! */
296 }
297
[45110]298 /* If we're only trying, return already. */
299 if (fTryOnly)
[59036]300 {
301 IPRT_CRITSECTRW_SHARED_BUSY(pThis, NULL,
[90637]302 (void *)pThis->u.s.hNativeWriter,
[59036]303 (uint32_t)((u64State & RTCSRW_WAIT_CNT_RD_MASK) >> RTCSRW_WAIT_CNT_RD_SHIFT),
304 (uint32_t)((u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT));
[45110]305 return VERR_SEM_BUSY;
[59036]306 }
[25663]307
[25549]308 /* Add ourselves to the queue and wait for the direction to change. */
[45110]309 uint64_t c = (u64State & RTCSRW_CNT_RD_MASK) >> RTCSRW_CNT_RD_SHIFT;
[25431]310 c++;
[45110]311 Assert(c < RTCSRW_CNT_MASK / 2);
[25549]312
[45110]313 uint64_t cWait = (u64State & RTCSRW_WAIT_CNT_RD_MASK) >> RTCSRW_WAIT_CNT_RD_SHIFT;
[25549]314 cWait++;
315 Assert(cWait <= c);
[45110]316 Assert(cWait < RTCSRW_CNT_MASK / 2);
[25549]317
[45110]318 u64State &= ~(RTCSRW_CNT_RD_MASK | RTCSRW_WAIT_CNT_RD_MASK);
319 u64State |= (c << RTCSRW_CNT_RD_SHIFT) | (cWait << RTCSRW_WAIT_CNT_RD_SHIFT);
[25549]320
[90637]321 if (ASMAtomicCmpXchgU64(&pThis->u.s.u64State, u64State, u64OldState))
[25431]322 {
[59036]323 IPRT_CRITSECTRW_SHARED_WAITING(pThis, NULL,
[90637]324 (void *)pThis->u.s.hNativeWriter,
[59036]325 (uint32_t)((u64State & RTCSRW_WAIT_CNT_RD_MASK) >> RTCSRW_WAIT_CNT_RD_SHIFT),
326 (uint32_t)((u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT));
[25431]327 for (uint32_t iLoop = 0; ; iLoop++)
328 {
[25663]329 int rc;
[45110]330#ifdef RTCRITSECTRW_STRICT
331 rc = RTLockValidatorRecSharedCheckBlocking(pThis->pValidatorRead, hThreadSelf, pSrcPos, true,
332 RT_INDEFINITE_WAIT, RTTHREADSTATE_RW_READ, false);
[25663]333 if (RT_SUCCESS(rc))
[58829]334#elif defined(IN_RING3)
[25666]335 RTTHREAD hThreadSelf = RTThreadSelf();
[25663]336 RTThreadBlocking(hThreadSelf, RTTHREADSTATE_RW_READ, false);
337#endif
338 {
[45110]339 rc = RTSemEventMultiWait(pThis->hEvtRead, RT_INDEFINITE_WAIT);
[58829]340#ifdef IN_RING3
[25663]341 RTThreadUnblocked(hThreadSelf, RTTHREADSTATE_RW_READ);
[58829]342#endif
[45110]343 if (pThis->u32Magic != RTCRITSECTRW_MAGIC)
[25663]344 return VERR_SEM_DESTROYED;
345 }
346 if (RT_FAILURE(rc))
347 {
348 /* Decrement the counts and return the error. */
349 for (;;)
350 {
[90637]351 u64OldState = u64State = ASMAtomicReadU64(&pThis->u.s.u64State);
[45110]352 c = (u64State & RTCSRW_CNT_RD_MASK) >> RTCSRW_CNT_RD_SHIFT; Assert(c > 0);
[25663]353 c--;
[45110]354 cWait = (u64State & RTCSRW_WAIT_CNT_RD_MASK) >> RTCSRW_WAIT_CNT_RD_SHIFT; Assert(cWait > 0);
[25663]355 cWait--;
[45110]356 u64State &= ~(RTCSRW_CNT_RD_MASK | RTCSRW_WAIT_CNT_RD_MASK);
357 u64State |= (c << RTCSRW_CNT_RD_SHIFT) | (cWait << RTCSRW_WAIT_CNT_RD_SHIFT);
[90637]358 if (ASMAtomicCmpXchgU64(&pThis->u.s.u64State, u64State, u64OldState))
[25663]359 break;
360 }
361 return rc;
362 }
[25431]363
[25663]364 Assert(pThis->fNeedReset);
[90637]365 u64State = ASMAtomicReadU64(&pThis->u.s.u64State);
[45110]366 if ((u64State & RTCSRW_DIR_MASK) == (RTCSRW_DIR_READ << RTCSRW_DIR_SHIFT))
[25549]367 break;
368 AssertMsg(iLoop < 1, ("%u\n", iLoop));
369 }
[25431]370
[25549]371 /* Decrement the wait count and maybe reset the semaphore (if we're last). */
372 for (;;)
373 {
374 u64OldState = u64State;
375
[45110]376 cWait = (u64State & RTCSRW_WAIT_CNT_RD_MASK) >> RTCSRW_WAIT_CNT_RD_SHIFT;
[25549]377 Assert(cWait > 0);
378 cWait--;
[45110]379 u64State &= ~RTCSRW_WAIT_CNT_RD_MASK;
380 u64State |= cWait << RTCSRW_WAIT_CNT_RD_SHIFT;
[25549]381
[90637]382 if (ASMAtomicCmpXchgU64(&pThis->u.s.u64State, u64State, u64OldState))
[25549]383 {
384 if (cWait == 0)
385 {
[25663]386 if (ASMAtomicXchgBool(&pThis->fNeedReset, false))
[25549]387 {
[25663]388 int rc = RTSemEventMultiReset(pThis->hEvtRead);
[25549]389 AssertRCReturn(rc, rc);
390 }
391 }
392 break;
393 }
[90637]394 u64State = ASMAtomicReadU64(&pThis->u.s.u64State);
[25431]395 }
[25663]396
[45110]397#ifdef RTCRITSECTRW_STRICT
398 RTLockValidatorRecSharedAddOwner(pThis->pValidatorRead, hThreadSelf, pSrcPos);
[25663]399#endif
[25549]400 break;
[25431]401 }
402 }
403
[45110]404 if (pThis->u32Magic != RTCRITSECTRW_MAGIC)
[25431]405 return VERR_SEM_DESTROYED;
[25549]406
407 ASMNopPause();
[90637]408 u64State = ASMAtomicReadU64(&pThis->u.s.u64State);
[25549]409 u64OldState = u64State;
[25431]410 }
[25549]411
412 /* got it! */
[90637]413 Assert((ASMAtomicReadU64(&pThis->u.s.u64State) & RTCSRW_DIR_MASK) == (RTCSRW_DIR_READ << RTCSRW_DIR_SHIFT));
[59036]414 IPRT_CRITSECTRW_SHARED_ENTERED(pThis, NULL,
415 (uint32_t)((u64State & RTCSRW_CNT_RD_MASK) >> RTCSRW_CNT_RD_SHIFT),
416 (uint32_t)((u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT));
[25549]417 return VINF_SUCCESS;
[25431]418}
419
420
[45110]421RTDECL(int) RTCritSectRwEnterShared(PRTCRITSECTRW pThis)
[25431]422{
[45110]423#ifndef RTCRITSECTRW_STRICT
424 return rtCritSectRwEnterShared(pThis, NULL, false /*fTryOnly*/);
[25663]425#else
426 RTLOCKVALSRCPOS SrcPos = RTLOCKVALSRCPOS_INIT_NORMAL_API();
[45110]427 return rtCritSectRwEnterShared(pThis, &SrcPos, false /*fTryOnly*/);
[25663]428#endif
429}
[45110]430RT_EXPORT_SYMBOL(RTCritSectRwEnterShared);
[25431]431
432
[45110]433RTDECL(int) RTCritSectRwEnterSharedDebug(PRTCRITSECTRW pThis, RTHCUINTPTR uId, RT_SRC_POS_DECL)
[25663]434{
435 RTLOCKVALSRCPOS SrcPos = RTLOCKVALSRCPOS_INIT_DEBUG_API();
[45110]436 return rtCritSectRwEnterShared(pThis, &SrcPos, false /*fTryOnly*/);
[25663]437}
[45110]438RT_EXPORT_SYMBOL(RTCritSectRwEnterSharedDebug);
[25431]439
440
[45110]441RTDECL(int) RTCritSectRwTryEnterShared(PRTCRITSECTRW pThis)
[25663]442{
[45110]443#ifndef RTCRITSECTRW_STRICT
444 return rtCritSectRwEnterShared(pThis, NULL, true /*fTryOnly*/);
[25663]445#else
446 RTLOCKVALSRCPOS SrcPos = RTLOCKVALSRCPOS_INIT_NORMAL_API();
[45110]447 return rtCritSectRwEnterShared(pThis, &SrcPos, true /*fTryOnly*/);
[25663]448#endif
449}
[45110]450RT_EXPORT_SYMBOL(RTCritSectRwEnterShared);
[25663]451
452
[45110]453RTDECL(int) RTCritSectRwTryEnterSharedDebug(PRTCRITSECTRW pThis, RTHCUINTPTR uId, RT_SRC_POS_DECL)
[25663]454{
455 RTLOCKVALSRCPOS SrcPos = RTLOCKVALSRCPOS_INIT_DEBUG_API();
[45110]456 return rtCritSectRwEnterShared(pThis, &SrcPos, true /*fTryOnly*/);
[25663]457}
[45110]458RT_EXPORT_SYMBOL(RTCritSectRwEnterSharedDebug);
[25663]459
460
461
[45110]462RTDECL(int) RTCritSectRwLeaveShared(PRTCRITSECTRW pThis)
[25663]463{
464 /*
465 * Validate handle.
466 */
[45110]467 AssertPtr(pThis);
468 AssertReturn(pThis->u32Magic == RTCRITSECTRW_MAGIC, VERR_SEM_DESTROYED);
[58829]469#ifdef IN_RING0
470 Assert(pThis->fFlags & RTCRITSECT_FLAGS_RING0);
471#else
472 Assert(!(pThis->fFlags & RTCRITSECT_FLAGS_RING0));
473#endif
[25663]474
475 /*
476 * Check the direction and take action accordingly.
477 */
[90637]478 uint64_t u64State = ASMAtomicReadU64(&pThis->u.s.u64State);
[25663]479 uint64_t u64OldState = u64State;
[45110]480 if ((u64State & RTCSRW_DIR_MASK) == (RTCSRW_DIR_READ << RTCSRW_DIR_SHIFT))
[25663]481 {
[45110]482#ifdef RTCRITSECTRW_STRICT
483 int rc9 = RTLockValidatorRecSharedCheckAndRelease(pThis->pValidatorRead, NIL_RTTHREAD);
[25663]484 if (RT_FAILURE(rc9))
485 return rc9;
486#endif
[59036]487 IPRT_CRITSECTRW_SHARED_LEAVING(pThis, NULL,
488 (uint32_t)((u64State & RTCSRW_CNT_RD_MASK) >> RTCSRW_CNT_RD_SHIFT) - 1,
489 (uint32_t)((u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT));
490
[25663]491 for (;;)
[25431]492 {
[45110]493 uint64_t c = (u64State & RTCSRW_CNT_RD_MASK) >> RTCSRW_CNT_RD_SHIFT;
[25663]494 AssertReturn(c > 0, VERR_NOT_OWNER);
495 c--;
496
497 if ( c > 0
[45309]498 || (u64State & RTCSRW_CNT_WR_MASK) == 0)
[25549]499 {
[25663]500 /* Don't change the direction. */
[45110]501 u64State &= ~RTCSRW_CNT_RD_MASK;
502 u64State |= c << RTCSRW_CNT_RD_SHIFT;
[90637]503 if (ASMAtomicCmpXchgU64(&pThis->u.s.u64State, u64State, u64OldState))
[25663]504 break;
[25549]505 }
[25663]506 else
507 {
508 /* Reverse the direction and signal the reader threads. */
[45110]509 u64State &= ~(RTCSRW_CNT_RD_MASK | RTCSRW_DIR_MASK);
510 u64State |= RTCSRW_DIR_WRITE << RTCSRW_DIR_SHIFT;
[90637]511 if (ASMAtomicCmpXchgU64(&pThis->u.s.u64State, u64State, u64OldState))
[25663]512 {
513 int rc = RTSemEventSignal(pThis->hEvtWrite);
514 AssertRC(rc);
515 break;
516 }
517 }
518
519 ASMNopPause();
[90637]520 u64State = ASMAtomicReadU64(&pThis->u.s.u64State);
[25663]521 u64OldState = u64State;
[25431]522 }
[25663]523 }
524 else
525 {
526 RTNATIVETHREAD hNativeSelf = RTThreadNativeSelf();
527 RTNATIVETHREAD hNativeWriter;
[90637]528 ASMAtomicUoReadHandle(&pThis->u.s.hNativeWriter, &hNativeWriter);
[25663]529 AssertReturn(hNativeSelf == hNativeWriter, VERR_NOT_OWNER);
530 AssertReturn(pThis->cWriterReads > 0, VERR_NOT_OWNER);
[45110]531#ifdef RTCRITSECTRW_STRICT
532 int rc = RTLockValidatorRecExclUnwindMixed(pThis->pValidatorWrite, &pThis->pValidatorRead->Core);
[25663]533 if (RT_FAILURE(rc))
534 return rc;
535#endif
[59036]536 uint32_t cReads = ASMAtomicDecU32(&pThis->cWriterReads); NOREF(cReads);
537 IPRT_CRITSECTRW_EXCL_LEAVING_SHARED(pThis, NULL,
538 cReads + pThis->cWriteRecursions,
539 (uint32_t)((u64State & RTCSRW_WAIT_CNT_RD_MASK) >> RTCSRW_WAIT_CNT_RD_SHIFT),
540 (uint32_t)((u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT));
[25663]541 }
542
543 return VINF_SUCCESS;
544}
[45110]545RT_EXPORT_SYMBOL(RTCritSectRwLeaveShared);
[25663]546
547
[45110]548static int rtCritSectRwEnterExcl(PRTCRITSECTRW pThis, PCRTLOCKVALSRCPOS pSrcPos, bool fTryOnly)
[25663]549{
550 /*
551 * Validate input.
552 */
[45110]553 AssertPtr(pThis);
554 AssertReturn(pThis->u32Magic == RTCRITSECTRW_MAGIC, VERR_SEM_DESTROYED);
[58829]555#ifdef IN_RING0
556 Assert(pThis->fFlags & RTCRITSECT_FLAGS_RING0);
557#else
558 Assert(!(pThis->fFlags & RTCRITSECT_FLAGS_RING0));
559#endif
[62556]560 RT_NOREF_PV(pSrcPos);
[25663]561
[45110]562#ifdef RTCRITSECTRW_STRICT
[25663]563 RTTHREAD hThreadSelf = NIL_RTTHREAD;
[45110]564 if (!fTryOnly)
[25663]565 {
566 hThreadSelf = RTThreadSelfAutoAdopt();
[45110]567 int rc9 = RTLockValidatorRecExclCheckOrder(pThis->pValidatorWrite, hThreadSelf, pSrcPos, RT_INDEFINITE_WAIT);
[25663]568 if (RT_FAILURE(rc9))
569 return rc9;
570 }
571#endif
572
573 /*
574 * Check if we're already the owner and just recursing.
575 */
576 RTNATIVETHREAD hNativeSelf = RTThreadNativeSelf();
577 RTNATIVETHREAD hNativeWriter;
[90637]578 ASMAtomicUoReadHandle(&pThis->u.s.hNativeWriter, &hNativeWriter);
[25663]579 if (hNativeSelf == hNativeWriter)
580 {
[90637]581 Assert((ASMAtomicReadU64(&pThis->u.s.u64State) & RTCSRW_DIR_MASK) == (RTCSRW_DIR_WRITE << RTCSRW_DIR_SHIFT));
[45110]582#ifdef RTCRITSECTRW_STRICT
583 int rc9 = RTLockValidatorRecExclRecursion(pThis->pValidatorWrite, pSrcPos);
[25663]584 if (RT_FAILURE(rc9))
585 return rc9;
586#endif
587 Assert(pThis->cWriteRecursions < UINT32_MAX / 2);
[59036]588 uint32_t cNestings = ASMAtomicIncU32(&pThis->cWriteRecursions); NOREF(cNestings);
589
[62448]590#ifdef IPRT_WITH_DTRACE
[59036]591 if (IPRT_CRITSECTRW_EXCL_ENTERED_ENABLED())
592 {
[90637]593 uint64_t u64State = ASMAtomicReadU64(&pThis->u.s.u64State);
[59036]594 IPRT_CRITSECTRW_EXCL_ENTERED(pThis, NULL, cNestings + pThis->cWriterReads,
595 (uint32_t)((u64State & RTCSRW_WAIT_CNT_RD_MASK) >> RTCSRW_WAIT_CNT_RD_SHIFT),
596 (uint32_t)((u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT));
597 }
[62448]598#endif
[25663]599 return VINF_SUCCESS;
600 }
601
602 /*
603 * Get cracking.
604 */
[90637]605 uint64_t u64State = ASMAtomicReadU64(&pThis->u.s.u64State);
[25663]606 uint64_t u64OldState = u64State;
607
608 for (;;)
609 {
[45110]610 if ( (u64State & RTCSRW_DIR_MASK) == (RTCSRW_DIR_WRITE << RTCSRW_DIR_SHIFT)
611 || (u64State & (RTCSRW_CNT_RD_MASK | RTCSRW_CNT_WR_MASK)) != 0)
[25663]612 {
613 /* It flows in the right direction, try follow it before it changes. */
[45110]614 uint64_t c = (u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT;
[25663]615 c++;
[45110]616 Assert(c < RTCSRW_CNT_MASK / 2);
617 u64State &= ~RTCSRW_CNT_WR_MASK;
618 u64State |= c << RTCSRW_CNT_WR_SHIFT;
[90637]619 if (ASMAtomicCmpXchgU64(&pThis->u.s.u64State, u64State, u64OldState))
[25663]620 break;
621 }
[45110]622 else if ((u64State & (RTCSRW_CNT_RD_MASK | RTCSRW_CNT_WR_MASK)) == 0)
[25663]623 {
624 /* Wrong direction, but we're alone here and can simply try switch the direction. */
[45110]625 u64State &= ~(RTCSRW_CNT_RD_MASK | RTCSRW_CNT_WR_MASK | RTCSRW_DIR_MASK);
626 u64State |= (UINT64_C(1) << RTCSRW_CNT_WR_SHIFT) | (RTCSRW_DIR_WRITE << RTCSRW_DIR_SHIFT);
[90637]627 if (ASMAtomicCmpXchgU64(&pThis->u.s.u64State, u64State, u64OldState))
[25663]628 break;
629 }
[45110]630 else if (fTryOnly)
[25663]631 /* Wrong direction and we're not supposed to wait, just return. */
[45110]632 return VERR_SEM_BUSY;
[25431]633 else
634 {
[25663]635 /* Add ourselves to the write count and break out to do the wait. */
[45110]636 uint64_t c = (u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT;
[25663]637 c++;
[45110]638 Assert(c < RTCSRW_CNT_MASK / 2);
639 u64State &= ~RTCSRW_CNT_WR_MASK;
640 u64State |= c << RTCSRW_CNT_WR_SHIFT;
[90637]641 if (ASMAtomicCmpXchgU64(&pThis->u.s.u64State, u64State, u64OldState))
[25663]642 break;
643 }
644
[45110]645 if (pThis->u32Magic != RTCRITSECTRW_MAGIC)
[25663]646 return VERR_SEM_DESTROYED;
647
648 ASMNopPause();
[90637]649 u64State = ASMAtomicReadU64(&pThis->u.s.u64State);
[25663]650 u64OldState = u64State;
651 }
652
653 /*
654 * If we're in write mode now try grab the ownership. Play fair if there
655 * are threads already waiting.
656 */
[45110]657 bool fDone = (u64State & RTCSRW_DIR_MASK) == (RTCSRW_DIR_WRITE << RTCSRW_DIR_SHIFT)
658 && ( ((u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT) == 1
659 || fTryOnly);
[25663]660 if (fDone)
[90637]661 ASMAtomicCmpXchgHandle(&pThis->u.s.hNativeWriter, hNativeSelf, NIL_RTNATIVETHREAD, fDone);
[25663]662 if (!fDone)
663 {
664 /*
[45309]665 * If only trying, undo the above writer incrementation and return.
666 */
667 if (fTryOnly)
668 {
669 for (;;)
670 {
[90637]671 u64OldState = u64State = ASMAtomicReadU64(&pThis->u.s.u64State);
[45309]672 uint64_t c = (u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT; Assert(c > 0);
673 c--;
674 u64State &= ~RTCSRW_CNT_WR_MASK;
675 u64State |= c << RTCSRW_CNT_WR_SHIFT;
[90637]676 if (ASMAtomicCmpXchgU64(&pThis->u.s.u64State, u64State, u64OldState))
[45309]677 break;
678 }
[59036]679 IPRT_CRITSECTRW_EXCL_BUSY(pThis, NULL,
680 (u64State & RTCSRW_DIR_MASK) == (RTCSRW_DIR_WRITE << RTCSRW_DIR_SHIFT) /*fWrite*/,
681 (uint32_t)((u64State & RTCSRW_WAIT_CNT_RD_MASK) >> RTCSRW_WAIT_CNT_RD_SHIFT),
682 (uint32_t)((u64State & RTCSRW_CNT_RD_MASK) >> RTCSRW_CNT_RD_SHIFT),
683 (uint32_t)((u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT),
[90637]684 (void *)pThis->u.s.hNativeWriter);
[45309]685 return VERR_SEM_BUSY;
686 }
687
688 /*
[25663]689 * Wait for our turn.
690 */
[59036]691 IPRT_CRITSECTRW_EXCL_WAITING(pThis, NULL,
692 (u64State & RTCSRW_DIR_MASK) == (RTCSRW_DIR_WRITE << RTCSRW_DIR_SHIFT) /*fWrite*/,
693 (uint32_t)((u64State & RTCSRW_WAIT_CNT_RD_MASK) >> RTCSRW_WAIT_CNT_RD_SHIFT),
694 (uint32_t)((u64State & RTCSRW_CNT_RD_MASK) >> RTCSRW_CNT_RD_SHIFT),
695 (uint32_t)((u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT),
[90637]696 (void *)pThis->u.s.hNativeWriter);
[25663]697 for (uint32_t iLoop = 0; ; iLoop++)
698 {
699 int rc;
[45110]700#ifdef RTCRITSECTRW_STRICT
[45309]701 if (hThreadSelf == NIL_RTTHREAD)
702 hThreadSelf = RTThreadSelfAutoAdopt();
703 rc = RTLockValidatorRecExclCheckBlocking(pThis->pValidatorWrite, hThreadSelf, pSrcPos, true,
704 RT_INDEFINITE_WAIT, RTTHREADSTATE_RW_WRITE, false);
[25663]705 if (RT_SUCCESS(rc))
[58829]706#elif defined(IN_RING3)
[25666]707 RTTHREAD hThreadSelf = RTThreadSelf();
[25663]708 RTThreadBlocking(hThreadSelf, RTTHREADSTATE_RW_WRITE, false);
709#endif
710 {
[45110]711 rc = RTSemEventWait(pThis->hEvtWrite, RT_INDEFINITE_WAIT);
[58829]712#ifdef IN_RING3
[25663]713 RTThreadUnblocked(hThreadSelf, RTTHREADSTATE_RW_WRITE);
[58829]714#endif
[45110]715 if (pThis->u32Magic != RTCRITSECTRW_MAGIC)
[25663]716 return VERR_SEM_DESTROYED;
717 }
718 if (RT_FAILURE(rc))
719 {
720 /* Decrement the counts and return the error. */
721 for (;;)
722 {
[90637]723 u64OldState = u64State = ASMAtomicReadU64(&pThis->u.s.u64State);
[45110]724 uint64_t c = (u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT; Assert(c > 0);
[25663]725 c--;
[45110]726 u64State &= ~RTCSRW_CNT_WR_MASK;
727 u64State |= c << RTCSRW_CNT_WR_SHIFT;
[90637]728 if (ASMAtomicCmpXchgU64(&pThis->u.s.u64State, u64State, u64OldState))
[25663]729 break;
730 }
731 return rc;
732 }
733
[90637]734 u64State = ASMAtomicReadU64(&pThis->u.s.u64State);
[45110]735 if ((u64State & RTCSRW_DIR_MASK) == (RTCSRW_DIR_WRITE << RTCSRW_DIR_SHIFT))
[25663]736 {
[90637]737 ASMAtomicCmpXchgHandle(&pThis->u.s.hNativeWriter, hNativeSelf, NIL_RTNATIVETHREAD, fDone);
[25663]738 if (fDone)
739 break;
740 }
741 AssertMsg(iLoop < 1000, ("%u\n", iLoop)); /* may loop a few times here... */
[25431]742 }
[25663]743 }
[25431]744
[25663]745 /*
746 * Got it!
747 */
[90637]748 Assert((ASMAtomicReadU64(&pThis->u.s.u64State) & RTCSRW_DIR_MASK) == (RTCSRW_DIR_WRITE << RTCSRW_DIR_SHIFT));
[25663]749 ASMAtomicWriteU32(&pThis->cWriteRecursions, 1);
750 Assert(pThis->cWriterReads == 0);
[45110]751#ifdef RTCRITSECTRW_STRICT
752 RTLockValidatorRecExclSetOwner(pThis->pValidatorWrite, hThreadSelf, pSrcPos, true);
[25663]753#endif
[59036]754 IPRT_CRITSECTRW_EXCL_ENTERED(pThis, NULL, 1,
755 (uint32_t)((u64State & RTCSRW_WAIT_CNT_RD_MASK) >> RTCSRW_WAIT_CNT_RD_SHIFT),
756 (uint32_t)((u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT));
[25663]757
758 return VINF_SUCCESS;
759}
760
761
[45110]762RTDECL(int) RTCritSectRwEnterExcl(PRTCRITSECTRW pThis)
[25663]763{
[45110]764#ifndef RTCRITSECTRW_STRICT
765 return rtCritSectRwEnterExcl(pThis, NULL, false /*fTryAgain*/);
[25663]766#else
767 RTLOCKVALSRCPOS SrcPos = RTLOCKVALSRCPOS_INIT_NORMAL_API();
[45110]768 return rtCritSectRwEnterExcl(pThis, &SrcPos, false /*fTryAgain*/);
[25663]769#endif
770}
[45110]771RT_EXPORT_SYMBOL(RTCritSectRwEnterExcl);
[25663]772
773
[45110]774RTDECL(int) RTCritSectRwEnterExclDebug(PRTCRITSECTRW pThis, RTHCUINTPTR uId, RT_SRC_POS_DECL)
[25663]775{
776 RTLOCKVALSRCPOS SrcPos = RTLOCKVALSRCPOS_INIT_DEBUG_API();
[45110]777 return rtCritSectRwEnterExcl(pThis, &SrcPos, false /*fTryAgain*/);
[25663]778}
[45110]779RT_EXPORT_SYMBOL(RTCritSectRwEnterExclDebug);
[25663]780
781
[45110]782RTDECL(int) RTCritSectRwTryEnterExcl(PRTCRITSECTRW pThis)
[25663]783{
[45110]784#ifndef RTCRITSECTRW_STRICT
785 return rtCritSectRwEnterExcl(pThis, NULL, true /*fTryAgain*/);
[25663]786#else
787 RTLOCKVALSRCPOS SrcPos = RTLOCKVALSRCPOS_INIT_NORMAL_API();
[45110]788 return rtCritSectRwEnterExcl(pThis, &SrcPos, true /*fTryAgain*/);
[25663]789#endif
790}
[45110]791RT_EXPORT_SYMBOL(RTCritSectRwTryEnterExcl);
[25663]792
793
[45110]794RTDECL(int) RTCritSectRwTryEnterExclDebug(PRTCRITSECTRW pThis, RTHCUINTPTR uId, RT_SRC_POS_DECL)
[25663]795{
796 RTLOCKVALSRCPOS SrcPos = RTLOCKVALSRCPOS_INIT_DEBUG_API();
[45110]797 return rtCritSectRwEnterExcl(pThis, &SrcPos, true /*fTryAgain*/);
[25663]798}
[45110]799RT_EXPORT_SYMBOL(RTCritSectRwTryEnterExclDebug);
[25663]800
801
[45110]802RTDECL(int) RTCritSectRwLeaveExcl(PRTCRITSECTRW pThis)
[25663]803{
804 /*
805 * Validate handle.
806 */
[45110]807 AssertPtr(pThis);
808 AssertReturn(pThis->u32Magic == RTCRITSECTRW_MAGIC, VERR_SEM_DESTROYED);
[58829]809#ifdef IN_RING0
810 Assert(pThis->fFlags & RTCRITSECT_FLAGS_RING0);
811#else
812 Assert(!(pThis->fFlags & RTCRITSECT_FLAGS_RING0));
813#endif
[25663]814
815 RTNATIVETHREAD hNativeSelf = RTThreadNativeSelf();
816 RTNATIVETHREAD hNativeWriter;
[90637]817 ASMAtomicUoReadHandle(&pThis->u.s.hNativeWriter, &hNativeWriter);
[25663]818 AssertReturn(hNativeSelf == hNativeWriter, VERR_NOT_OWNER);
819
820 /*
821 * Unwind a recursion.
822 */
823 if (pThis->cWriteRecursions == 1)
824 {
825 AssertReturn(pThis->cWriterReads == 0, VERR_WRONG_ORDER); /* (must release all read recursions before the final write.) */
[45110]826#ifdef RTCRITSECTRW_STRICT
827 int rc9 = RTLockValidatorRecExclReleaseOwner(pThis->pValidatorWrite, true);
[25663]828 if (RT_FAILURE(rc9))
829 return rc9;
830#endif
831 /*
832 * Update the state.
833 */
834 ASMAtomicWriteU32(&pThis->cWriteRecursions, 0);
[90637]835 ASMAtomicWriteHandle(&pThis->u.s.hNativeWriter, NIL_RTNATIVETHREAD);
[25663]836
[90637]837 uint64_t u64State = ASMAtomicReadU64(&pThis->u.s.u64State);
[59036]838 IPRT_CRITSECTRW_EXCL_LEAVING(pThis, NULL, 0,
839 (uint32_t)((u64State & RTCSRW_WAIT_CNT_RD_MASK) >> RTCSRW_WAIT_CNT_RD_SHIFT),
840 (uint32_t)((u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT));
841
[25663]842 for (;;)
843 {
844 uint64_t u64OldState = u64State;
845
[45110]846 uint64_t c = (u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT;
[25663]847 Assert(c > 0);
848 c--;
849
850 if ( c > 0
[45110]851 || (u64State & RTCSRW_CNT_RD_MASK) == 0)
[25663]852 {
853 /* Don't change the direction, wait up the next writer if any. */
[45110]854 u64State &= ~RTCSRW_CNT_WR_MASK;
855 u64State |= c << RTCSRW_CNT_WR_SHIFT;
[90637]856 if (ASMAtomicCmpXchgU64(&pThis->u.s.u64State, u64State, u64OldState))
[25663]857 {
858 if (c > 0)
859 {
860 int rc = RTSemEventSignal(pThis->hEvtWrite);
861 AssertRC(rc);
862 }
863 break;
864 }
865 }
866 else
867 {
868 /* Reverse the direction and signal the reader threads. */
[45110]869 u64State &= ~(RTCSRW_CNT_WR_MASK | RTCSRW_DIR_MASK);
870 u64State |= RTCSRW_DIR_READ << RTCSRW_DIR_SHIFT;
[90637]871 if (ASMAtomicCmpXchgU64(&pThis->u.s.u64State, u64State, u64OldState))
[25663]872 {
873 Assert(!pThis->fNeedReset);
874 ASMAtomicWriteBool(&pThis->fNeedReset, true);
875 int rc = RTSemEventMultiSignal(pThis->hEvtRead);
876 AssertRC(rc);
877 break;
878 }
879 }
880
881 ASMNopPause();
[45110]882 if (pThis->u32Magic != RTCRITSECTRW_MAGIC)
[25663]883 return VERR_SEM_DESTROYED;
[90637]884 u64State = ASMAtomicReadU64(&pThis->u.s.u64State);
[25663]885 }
[25431]886 }
[25663]887 else
888 {
889 Assert(pThis->cWriteRecursions != 0);
[45110]890#ifdef RTCRITSECTRW_STRICT
891 int rc9 = RTLockValidatorRecExclUnwind(pThis->pValidatorWrite);
[25663]892 if (RT_FAILURE(rc9))
893 return rc9;
894#endif
[59036]895 uint32_t cNestings = ASMAtomicDecU32(&pThis->cWriteRecursions); NOREF(cNestings);
[62448]896#ifdef IPRT_WITH_DTRACE
[59036]897 if (IPRT_CRITSECTRW_EXCL_LEAVING_ENABLED())
898 {
[90637]899 uint64_t u64State = ASMAtomicReadU64(&pThis->u.s.u64State);
[59036]900 IPRT_CRITSECTRW_EXCL_LEAVING(pThis, NULL, cNestings + pThis->cWriterReads,
901 (uint32_t)((u64State & RTCSRW_WAIT_CNT_RD_MASK) >> RTCSRW_WAIT_CNT_RD_SHIFT),
902 (uint32_t)((u64State & RTCSRW_CNT_WR_MASK) >> RTCSRW_CNT_WR_SHIFT));
903 }
[62448]904#endif
[25663]905 }
906
907 return VINF_SUCCESS;
[25431]908}
[45151]909RT_EXPORT_SYMBOL(RTCritSectRwLeaveExcl);
[25431]910
911
[45110]912RTDECL(bool) RTCritSectRwIsWriteOwner(PRTCRITSECTRW pThis)
[25426]913{
[25431]914 /*
[25663]915 * Validate handle.
[25431]916 */
[45110]917 AssertPtr(pThis);
918 AssertReturn(pThis->u32Magic == RTCRITSECTRW_MAGIC, false);
[58829]919#ifdef IN_RING0
920 Assert(pThis->fFlags & RTCRITSECT_FLAGS_RING0);
921#else
922 Assert(!(pThis->fFlags & RTCRITSECT_FLAGS_RING0));
923#endif
[25431]924
[25663]925 /*
926 * Check ownership.
927 */
928 RTNATIVETHREAD hNativeSelf = RTThreadNativeSelf();
929 RTNATIVETHREAD hNativeWriter;
[90637]930 ASMAtomicUoReadHandle(&pThis->u.s.hNativeWriter, &hNativeWriter);
[25663]931 return hNativeWriter == hNativeSelf;
[25426]932}
[45110]933RT_EXPORT_SYMBOL(RTCritSectRwIsWriteOwner);
[25426]934
935
[45110]936RTDECL(bool) RTCritSectRwIsReadOwner(PRTCRITSECTRW pThis, bool fWannaHear)
[25908]937{
[62564]938 RT_NOREF_PV(fWannaHear);
939
[25908]940 /*
941 * Validate handle.
942 */
[45110]943 AssertPtr(pThis);
944 AssertReturn(pThis->u32Magic == RTCRITSECTRW_MAGIC, false);
[58829]945#ifdef IN_RING0
946 Assert(pThis->fFlags & RTCRITSECT_FLAGS_RING0);
947#else
948 Assert(!(pThis->fFlags & RTCRITSECT_FLAGS_RING0));
949#endif
[25908]950
951 /*
952 * Inspect the state.
953 */
[90637]954 uint64_t u64State = ASMAtomicReadU64(&pThis->u.s.u64State);
[45110]955 if ((u64State & RTCSRW_DIR_MASK) == (RTCSRW_DIR_WRITE << RTCSRW_DIR_SHIFT))
[25908]956 {
957 /*
958 * It's in write mode, so we can only be a reader if we're also the
959 * current writer.
960 */
961 RTNATIVETHREAD hNativeSelf = RTThreadNativeSelf();
962 RTNATIVETHREAD hWriter;
[90637]963 ASMAtomicUoReadHandle(&pThis->u.s.hNativeWriter, &hWriter);
[25908]964 return hWriter == hNativeSelf;
965 }
966
967 /*
968 * Read mode. If there are no current readers, then we cannot be a reader.
969 */
[45110]970 if (!(u64State & RTCSRW_CNT_RD_MASK))
[25908]971 return false;
972
[45110]973#ifdef RTCRITSECTRW_STRICT
[25908]974 /*
975 * Ask the lock validator.
976 */
[45110]977 return RTLockValidatorRecSharedIsOwner(pThis->pValidatorRead, NIL_RTTHREAD);
[25908]978#else
979 /*
980 * Ok, we don't know, just tell the caller what he want to hear.
981 */
982 return fWannaHear;
983#endif
984}
[45110]985RT_EXPORT_SYMBOL(RTCritSectRwIsReadOwner);
[25908]986
987
[45110]988RTDECL(uint32_t) RTCritSectRwGetWriteRecursion(PRTCRITSECTRW pThis)
[25426]989{
[25431]990 /*
[25663]991 * Validate handle.
[25431]992 */
[45110]993 AssertPtr(pThis);
994 AssertReturn(pThis->u32Magic == RTCRITSECTRW_MAGIC, 0);
[25431]995
[25663]996 /*
997 * Return the requested data.
998 */
999 return pThis->cWriteRecursions;
[25426]1000}
[45110]1001RT_EXPORT_SYMBOL(RTCritSectRwGetWriteRecursion);
[25426]1002
1003
[45110]1004RTDECL(uint32_t) RTCritSectRwGetWriterReadRecursion(PRTCRITSECTRW pThis)
[25426]1005{
[25431]1006 /*
[25663]1007 * Validate handle.
[25431]1008 */
[45110]1009 AssertPtr(pThis);
1010 AssertReturn(pThis->u32Magic == RTCRITSECTRW_MAGIC, 0);
[25431]1011
[25663]1012 /*
1013 * Return the requested data.
1014 */
1015 return pThis->cWriterReads;
[25426]1016}
[45151]1017RT_EXPORT_SYMBOL(RTCritSectRwGetWriterReadRecursion);
[25426]1018
1019
[45151]1020RTDECL(uint32_t) RTCritSectRwGetReadCount(PRTCRITSECTRW pThis)
[25426]1021{
[25431]1022 /*
1023 * Validate input.
1024 */
[45110]1025 AssertPtr(pThis);
1026 AssertReturn(pThis->u32Magic == RTCRITSECTRW_MAGIC, 0);
[25431]1027
[25663]1028 /*
1029 * Return the requested data.
1030 */
[90637]1031 uint64_t u64State = ASMAtomicReadU64(&pThis->u.s.u64State);
[45110]1032 if ((u64State & RTCSRW_DIR_MASK) != (RTCSRW_DIR_READ << RTCSRW_DIR_SHIFT))
[25663]1033 return 0;
[45110]1034 return (u64State & RTCSRW_CNT_RD_MASK) >> RTCSRW_CNT_RD_SHIFT;
[25426]1035}
[45151]1036RT_EXPORT_SYMBOL(RTCritSectRwGetReadCount);
[25426]1037
[45110]1038
1039RTDECL(int) RTCritSectRwDelete(PRTCRITSECTRW pThis)
1040{
1041 /*
1042 * Assert free waiters and so on.
1043 */
1044 AssertPtr(pThis);
1045 Assert(pThis->u32Magic == RTCRITSECTRW_MAGIC);
1046 //Assert(pThis->cNestings == 0);
1047 //Assert(pThis->cLockers == -1);
[90637]1048 Assert(pThis->u.s.hNativeWriter == NIL_RTNATIVETHREAD);
[58829]1049#ifdef IN_RING0
1050 Assert(pThis->fFlags & RTCRITSECT_FLAGS_RING0);
1051#else
1052 Assert(!(pThis->fFlags & RTCRITSECT_FLAGS_RING0));
1053#endif
[45110]1054
1055 /*
1056 * Invalidate the structure and free the semaphores.
1057 */
1058 if (!ASMAtomicCmpXchgU32(&pThis->u32Magic, RTCRITSECTRW_MAGIC_DEAD, RTCRITSECTRW_MAGIC))
1059 return VERR_INVALID_PARAMETER;
1060
1061 pThis->fFlags = 0;
[90637]1062 pThis->u.s.u64State = 0;
[45110]1063
1064 RTSEMEVENT hEvtWrite = pThis->hEvtWrite;
1065 pThis->hEvtWrite = NIL_RTSEMEVENT;
1066 RTSEMEVENTMULTI hEvtRead = pThis->hEvtRead;
1067 pThis->hEvtRead = NIL_RTSEMEVENTMULTI;
1068
1069 int rc1 = RTSemEventDestroy(hEvtWrite); AssertRC(rc1);
1070 int rc2 = RTSemEventMultiDestroy(hEvtRead); AssertRC(rc2);
1071
[58829]1072#ifndef IN_RING0
[45110]1073 RTLockValidatorRecSharedDestroy(&pThis->pValidatorRead);
1074 RTLockValidatorRecExclDestroy(&pThis->pValidatorWrite);
[58829]1075#endif
[45110]1076
1077 return RT_SUCCESS(rc1) ? rc2 : rc1;
1078}
1079RT_EXPORT_SYMBOL(RTCritSectRwDelete);
1080
Note: See TracBrowser for help on using the repository browser.

© 2023 Oracle
ContactPrivacy policyTerms of Use