diff options
author | Jeenu Viswambharan <jeenu.viswambharan@arm.com> | 2017-01-16 16:52:35 +0000 |
---|---|---|
committer | Jeenu Viswambharan <jeenu.viswambharan@arm.com> | 2017-02-14 09:26:11 +0000 |
commit | c877b414870bb3e421518caf9c7652a9807419b5 (patch) | |
tree | f7d2837cb46edd4f3578d39985c4d13fa97e419d /lib/locks/exclusive | |
parent | 4d07e7821e19dc1ebc640f5264c2a769354c8b2d (diff) |
Introduce locking primitives using CAS instruction
The ARMv8v.1 architecture extension has introduced support for far
atomics, which includes compare-and-swap. Compare and Swap instruction
is only available for AArch64.
Introduce build options to choose the architecture versions to target
ARM Trusted Firmware:
- ARM_ARCH_MAJOR: selects the major version of target ARM
Architecture. Default value is 8.
- ARM_ARCH_MINOR: selects the minor version of target ARM
Architecture. Default value is 0.
When:
(ARM_ARCH_MAJOR > 8) || ((ARM_ARCH_MAJOR == 8) && (ARM_ARCH_MINOR >= 1)),
for AArch64, Compare and Swap instruction is used to implement spin
locks. Otherwise, the implementation falls back to using
load-/store-exclusive instructions.
Update user guide, and introduce a section in Firmware Design guide to
summarize support for features introduced in ARMv8 Architecture
Extensions.
Change-Id: I73096a0039502f7aef9ec6ab3ae36680da033f16
Signed-off-by: Jeenu Viswambharan <jeenu.viswambharan@arm.com>
Diffstat (limited to 'lib/locks/exclusive')
-rw-r--r-- | lib/locks/exclusive/aarch64/spinlock.S | 70 |
1 files changed, 69 insertions, 1 deletions
diff --git a/lib/locks/exclusive/aarch64/spinlock.S b/lib/locks/exclusive/aarch64/spinlock.S index 1ca59123..bdc9ea0f 100644 --- a/lib/locks/exclusive/aarch64/spinlock.S +++ b/lib/locks/exclusive/aarch64/spinlock.S @@ -1,5 +1,5 @@ /* - * Copyright (c) 2013-2016, ARM Limited and Contributors. All rights reserved. + * Copyright (c) 2013-2017, ARM Limited and Contributors. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: @@ -33,7 +33,66 @@ .globl spin_lock .globl spin_unlock +#if (ARM_ARCH_MAJOR > 8) || ((ARM_ARCH_MAJOR == 8) && (ARM_ARCH_MINOR >= 1)) +/* + * When compiled for ARMv8.1 or later, choose spin locks based on Compare and + * Swap instruction. + */ +# define USE_CAS 1 + +/* + * Lock contenders using CAS, upon failing to acquire the lock, wait with the + * monitor in open state. Therefore, a normal store upon unlocking won't + * generate an SEV. Use explicit SEV instruction with CAS unlock. + */ +# define COND_SEV() sev + +#else + +# define USE_CAS 0 + +/* + * Lock contenders using exclusive pairs, upon failing to acquire the lock, wait + * with the monitor in exclusive state. A normal store upon unlocking will + * implicitly generate an envent; so, no explicit SEV with unlock is required. + */ +# define COND_SEV() + +#endif + +#if USE_CAS + + .arch armv8.1-a + +/* + * Acquire lock using Compare and Swap instruction. + * + * Compare for 0 with acquire semantics, and swap 1. Wait until CAS returns + * 0. + * + * void spin_lock(spinlock_t *lock); + */ +func spin_lock + mov w2, #1 + sevl +1: + wfe + mov w1, wzr + casa w1, w2, [x0] + cbnz w1, 1b + ret +endfunc spin_lock + + .arch armv8-a + +#else /* !USE_CAS */ + +/* + * Acquire lock using load-/store-exclusive instruction pair. + * + * void spin_lock(spinlock_t *lock); + */ func spin_lock mov w2, #1 sevl @@ -45,8 +104,17 @@ l2: ldaxr w1, [x0] ret endfunc spin_lock +#endif /* USE_CAS */ +/* + * Release lock previously acquired by spin_lock. + * + * Unconditionally write 0, and conditionally generate an event. + * + * void spin_unlock(spinlock_t *lock); + */ func spin_unlock stlr wzr, [x0] + COND_SEV() ret endfunc spin_unlock |