]>
git.proxmox.com Git - ceph.git/blob - ceph/src/spdk/dpdk/lib/librte_eal/arm/include/rte_pause_64.h
1 /* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2017 Cavium, Inc
3 * Copyright(c) 2019 Arm Limited
6 #ifndef _RTE_PAUSE_ARM64_H_
7 #define _RTE_PAUSE_ARM64_H_
13 #include <rte_common.h>
15 #ifdef RTE_ARM_USE_WFE
16 #define RTE_WAIT_UNTIL_EQUAL_ARCH_DEFINED
19 #include "generic/rte_pause.h"
21 static inline void rte_pause(void)
23 asm volatile("yield" ::: "memory");
26 #ifdef RTE_WAIT_UNTIL_EQUAL_ARCH_DEFINED
28 /* Send an event to quit WFE. */
29 #define __SEVL() { asm volatile("sevl" : : : "memory"); }
31 /* Put processor into low power WFE(Wait For Event) state. */
32 #define __WFE() { asm volatile("wfe" : : : "memory"); }
34 static __rte_always_inline
void
35 rte_wait_until_equal_16(volatile uint16_t *addr
, uint16_t expected
,
40 assert(memorder
== __ATOMIC_ACQUIRE
|| memorder
== __ATOMIC_RELAXED
);
43 * Atomic exclusive load from addr, it returns the 16-bit content of
44 * *addr while making it 'monitored',when it is written by someone
45 * else, the 'monitored' state is cleared and a event is generated
46 * implicitly to exit WFE.
48 #define __LOAD_EXC_16(src, dst, memorder) { \
49 if (memorder == __ATOMIC_RELAXED) { \
50 asm volatile("ldxrh %w[tmp], [%x[addr]]" \
55 asm volatile("ldaxrh %w[tmp], [%x[addr]]" \
61 __LOAD_EXC_16(addr
, value
, memorder
)
62 if (value
!= expected
) {
66 __LOAD_EXC_16(addr
, value
, memorder
)
67 } while (value
!= expected
);
72 static __rte_always_inline
void
73 rte_wait_until_equal_32(volatile uint32_t *addr
, uint32_t expected
,
78 assert(memorder
== __ATOMIC_ACQUIRE
|| memorder
== __ATOMIC_RELAXED
);
81 * Atomic exclusive load from addr, it returns the 32-bit content of
82 * *addr while making it 'monitored',when it is written by someone
83 * else, the 'monitored' state is cleared and a event is generated
84 * implicitly to exit WFE.
86 #define __LOAD_EXC_32(src, dst, memorder) { \
87 if (memorder == __ATOMIC_RELAXED) { \
88 asm volatile("ldxr %w[tmp], [%x[addr]]" \
93 asm volatile("ldaxr %w[tmp], [%x[addr]]" \
99 __LOAD_EXC_32(addr
, value
, memorder
)
100 if (value
!= expected
) {
104 __LOAD_EXC_32(addr
, value
, memorder
)
105 } while (value
!= expected
);
110 static __rte_always_inline
void
111 rte_wait_until_equal_64(volatile uint64_t *addr
, uint64_t expected
,
116 assert(memorder
== __ATOMIC_ACQUIRE
|| memorder
== __ATOMIC_RELAXED
);
119 * Atomic exclusive load from addr, it returns the 64-bit content of
120 * *addr while making it 'monitored',when it is written by someone
121 * else, the 'monitored' state is cleared and a event is generated
122 * implicitly to exit WFE.
124 #define __LOAD_EXC_64(src, dst, memorder) { \
125 if (memorder == __ATOMIC_RELAXED) { \
126 asm volatile("ldxr %x[tmp], [%x[addr]]" \
127 : [tmp] "=&r" (dst) \
131 asm volatile("ldaxr %x[tmp], [%x[addr]]" \
132 : [tmp] "=&r" (dst) \
137 __LOAD_EXC_64(addr
, value
, memorder
)
138 if (value
!= expected
) {
142 __LOAD_EXC_64(addr
, value
, memorder
)
143 } while (value
!= expected
);
157 #endif /* _RTE_PAUSE_ARM64_H_ */