/*-
* Copyright (c) 2002-2018 The UbixOS Project.
* All rights reserved.
*
* This was developed by Christopher W. Olsen for the UbixOS Project.
*
* Redistribution and use in source and binary forms, with or without modification, are permitted
* provided that the following conditions are met:
*
* 1) Redistributions of source code must retain the above copyright notice, this list of
* conditions, the following disclaimer and the list of authors.
* 2) Redistributions in binary form must reproduce the above copyright notice, this list of
* conditions, the following disclaimer and the list of authors in the documentation and/or
* other materials provided with the distribution.
* 3) Neither the name of the UbixOS Project nor the names of its contributors may be used to
* endorse or promote products derived from this software without specific prior written
* permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
* OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include <ubixos/spinlock.h>
#include <ubixos/sched.h>
#include <string.h>
#define atomic_xadd(P, V) __sync_fetch_and_add((P), (V))
#define cmpxchg(P, O, N) __sync_val_compare_and_swap((P), (O), (N))
#define atomic_inc(P) __sync_add_and_fetch((P), 1)
#define atomic_dec(P) __sync_add_and_fetch((P), -1)
#define atomic_add(P, V) __sync_add_and_fetch((P), (V))
#define atomic_set_bit(P, V) __sync_or_and_fetch((P), 1<<(V))
#define atomic_clear_bit(P, V) __sync_and_and_fetch((P), ~(1<<(V)))
#define barrier() asm volatile("": : :"memory")
/* Pause instruction to prevent excess processor bus usage */
#define cpu_relax() asm volatile("pause\n": : :"memory")
void spinLockInit(spinLock_t lock) {
memset(lock, 0x0, sizeof(struct spinLock));
}
int spinTryLock(spinLock_t lock) {
if (!cmpxchg(&lock->locked, NULL, LLOCK_FLAG))
return 0;
/* Failure! */
return LOCKED;
}
void spinUnlock(spinLock_t lock) {
barrier();
lock->locked = 0x0;
}
void spinLock(spinLock_t lock) {
while (1) {
if (!xchg_32(&lock->locked, LOCKED))
return;
while (lock->locked == 1)
sched_yield();
}
}