2016-02-09 19:53:10 +01:00
|
|
|
/****************************************************************************
|
|
|
|
* arch/arm/include/spinlock.h
|
|
|
|
*
|
2021-03-20 21:46:19 +01:00
|
|
|
* Licensed to the Apache Software Foundation (ASF) under one or more
|
|
|
|
* contributor license agreements. See the NOTICE file distributed with
|
|
|
|
* this work for additional information regarding copyright ownership. The
|
|
|
|
* ASF licenses this file to you under the Apache License, Version 2.0 (the
|
|
|
|
* "License"); you may not use this file except in compliance with the
|
|
|
|
* License. You may obtain a copy of the License at
|
2016-02-09 19:53:10 +01:00
|
|
|
*
|
2021-03-20 21:46:19 +01:00
|
|
|
* http://www.apache.org/licenses/LICENSE-2.0
|
2016-02-09 19:53:10 +01:00
|
|
|
*
|
2021-03-20 21:46:19 +01:00
|
|
|
* Unless required by applicable law or agreed to in writing, software
|
|
|
|
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
|
|
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
|
|
* License for the specific language governing permissions and limitations
|
|
|
|
* under the License.
|
2016-02-09 19:53:10 +01:00
|
|
|
*
|
|
|
|
****************************************************************************/
|
|
|
|
|
|
|
|
#ifndef __ARCH_ARM_INCLUDE_SPINLOCK_H
|
|
|
|
#define __ARCH_ARM_INCLUDE_SPINLOCK_H
|
|
|
|
|
|
|
|
/****************************************************************************
|
|
|
|
* Included Files
|
|
|
|
****************************************************************************/
|
|
|
|
|
|
|
|
#ifndef __ASSEMBLY__
|
|
|
|
# include <stdint.h>
|
|
|
|
#endif /* __ASSEMBLY__ */
|
|
|
|
|
|
|
|
/****************************************************************************
|
2021-03-21 11:37:01 +01:00
|
|
|
* Pre-processor Prototypes
|
2016-02-09 19:53:10 +01:00
|
|
|
****************************************************************************/
|
|
|
|
|
2016-11-21 18:55:59 +01:00
|
|
|
/* Spinlock states */
|
|
|
|
|
2016-02-09 19:53:10 +01:00
|
|
|
#define SP_UNLOCKED 0 /* The Un-locked state */
|
|
|
|
#define SP_LOCKED 1 /* The Locked state */
|
|
|
|
|
2016-11-21 20:12:43 +01:00
|
|
|
/* Memory barriers for use with NuttX spinlock logic
|
|
|
|
*
|
|
|
|
* Data Memory Barrier (DMB) acts as a memory barrier. It ensures that all
|
|
|
|
* explicit memory accesses that appear in program order before the DMB
|
|
|
|
* instruction are observed before any explicit memory accesses that appear
|
|
|
|
* in program order after the DMB instruction. It does not affect the
|
|
|
|
* ordering of any other instructions executing on the processor
|
|
|
|
*
|
|
|
|
* dmb st - Data memory barrier. Wait for stores to complete.
|
|
|
|
*
|
|
|
|
* Data Synchronization Barrier (DSB) acts as a special kind of memory
|
|
|
|
* barrier. No instruction in program order after this instruction executes
|
|
|
|
* until this instruction completes. This instruction completes when: (1) All
|
|
|
|
* explicit memory accesses before this instruction complete, and (2) all
|
|
|
|
* Cache, Branch predictor and TLB maintenance operations before this
|
|
|
|
* instruction complete.
|
|
|
|
*
|
|
|
|
* dsb sy - Data syncrhonization barrier. Assures that the CPU waits until
|
|
|
|
* all memory accesses are complete
|
|
|
|
*/
|
2016-11-21 18:55:59 +01:00
|
|
|
|
2016-11-21 20:12:43 +01:00
|
|
|
#define SP_DSB(n) __asm__ __volatile__ ("dsb sy" : : : "memory")
|
|
|
|
#define SP_DMB(n) __asm__ __volatile__ ("dmb st" : : : "memory")
|
2016-11-21 18:55:59 +01:00
|
|
|
|
2020-12-07 13:04:16 +01:00
|
|
|
#ifdef CONFIG_ARM_HAVE_WFE_SEV
|
|
|
|
#define SP_WFE() __asm__ __volatile__ ("wfe" : : : "memory")
|
|
|
|
#define SP_SEV() __asm__ __volatile__ ("sev" : : : "memory")
|
|
|
|
#endif
|
|
|
|
|
2016-02-09 19:53:10 +01:00
|
|
|
/****************************************************************************
|
|
|
|
* Public Types
|
|
|
|
****************************************************************************/
|
|
|
|
|
|
|
|
#ifndef __ASSEMBLY__
|
|
|
|
|
|
|
|
/* The Type of a spinlock.
|
|
|
|
*
|
2020-01-20 13:32:36 +01:00
|
|
|
* ARMv6 architecture introduced the concept of exclusive accesses to memory
|
2016-02-09 19:53:10 +01:00
|
|
|
* locations in the form of the Load-Exclusive (LDREX) and Store-Exclusive
|
|
|
|
* (STREX) instructions in ARM and Thumb instruction sets. ARMv6K extended
|
|
|
|
* this to included byte, halfword, and doubleword variants of LDREX and
|
2020-01-20 13:32:36 +01:00
|
|
|
* STREX. ARMv7-M supports byte and halfword, but not the doubleword variant
|
|
|
|
* (ARMv6-M does not support exclusive access).
|
2016-02-09 19:53:10 +01:00
|
|
|
*
|
|
|
|
* ARM architectures prior to ARMv6 supported SWP and SWPB instructions that
|
|
|
|
* atomically swap a 32-bit word for byte value between a register and a
|
|
|
|
* memory location. From the ARMv6 architecture, ARM deprecates the use
|
|
|
|
* of SWP and SWPB.
|
|
|
|
*/
|
|
|
|
|
|
|
|
typedef uint8_t spinlock_t;
|
|
|
|
|
|
|
|
/****************************************************************************
|
2020-09-20 06:38:25 +02:00
|
|
|
* Public Function Prototypes
|
2016-02-09 19:53:10 +01:00
|
|
|
****************************************************************************/
|
|
|
|
|
|
|
|
/****************************************************************************
|
|
|
|
* Name: up_testset
|
|
|
|
*
|
|
|
|
* Description:
|
2016-10-19 18:07:44 +02:00
|
|
|
* Perform an atomic test and set operation on the provided spinlock.
|
2016-02-09 19:53:10 +01:00
|
|
|
*
|
2018-02-04 19:22:03 +01:00
|
|
|
* This function must be provided via the architecture-specific logic.
|
2016-02-09 19:53:10 +01:00
|
|
|
*
|
|
|
|
* Input Parameters:
|
|
|
|
* lock - The address of spinlock object.
|
|
|
|
*
|
|
|
|
* Returned Value:
|
|
|
|
* The spinlock is always locked upon return. The value of previous value
|
|
|
|
* of the spinlock variable is returned, either SP_LOCKED if the spinlock
|
|
|
|
* as previously locked (meaning that the test-and-set operation failed to
|
|
|
|
* obtain the lock) or SP_UNLOCKED if the spinlock was previously unlocked
|
|
|
|
* (meaning that we successfully obtained the lock)
|
|
|
|
*
|
|
|
|
****************************************************************************/
|
|
|
|
|
|
|
|
/* See prototype in nuttx/include/nuttx/spinlock.h */
|
|
|
|
|
|
|
|
#endif /* __ASSEMBLY__ */
|
|
|
|
#endif /* __ARCH_ARM_INCLUDE_SPINLOCK_H */
|