]> git.proxmox.com Git - wasi-libc.git/blob - libc-bottom-half/cloudlibc/src/common/tls.h
fix macos filename, use https
[wasi-libc.git] / libc-bottom-half / cloudlibc / src / common / tls.h
1 // Copyright (c) 2016-2017 Nuxi, https://nuxi.nl/
2 //
3 // SPDX-License-Identifier: BSD-2-Clause
4
5 #ifndef COMMON_TLS_H
6 #define COMMON_TLS_H
7
8 #include <assert.h>
9 #include <wasi/api.h>
10 #include <stdalign.h>
11 #include <stddef.h>
12
13 #if defined(__aarch64__)
14
15 #define TLS_VARIANT 1
16 #define TCB_SIZE 16
17
18 // Fetches the TCB from the CPU's registers.
19 static inline __wasi_tcb_t *tcb_get(void) {
20 __wasi_tcb_t *tcb;
21 asm volatile("mrs %0, tpidr_el0" : "=r"(tcb));
22 return tcb;
23 }
24
25 // Changes the TCB in the CPU's registers.
26 static inline void tcb_set(__wasi_tcb_t *tcb) {
27 asm volatile("msr tpidr_el0, %0" : : "r"(tcb));
28 }
29
30 #elif defined(__arm__)
31
32 #define TLS_VARIANT 1
33 #define TCB_SIZE 8
34
35 // Fetches the TCB from the CPU's registers.
36 static inline __wasi_tcb_t *tcb_get(void) {
37 __wasi_tcb_t *tcb;
38 asm volatile("mrc p15, 0, %0, cr13, cr0, 2" : "=r"(tcb));
39 return tcb;
40 }
41
42 // Changes the TCB in the CPU's registers.
43 static inline void tcb_set(__wasi_tcb_t *tcb) {
44 asm volatile("mcr p15, 0, %0, cr13, cr0, 2" : : "r"(tcb));
45 }
46
47 #elif defined(__i386__)
48
49 #define TLS_VARIANT 2
50
51 // Fetches the TCB from the CPU's registers.
52 static inline __wasi_tcb_t *tcb_get(void) {
53 __wasi_tcb_t *tcb;
54 asm volatile("mov %%gs:0, %0" : "=r"(tcb));
55 return tcb;
56 }
57
58 // Changes the TCB in the CPU's registers.
59 static inline void tcb_set(__wasi_tcb_t *tcb) {
60 asm volatile("mov %0, %%gs:0" : : "r"(tcb));
61 }
62
63 #elif defined(__x86_64__)
64
65 #define TLS_VARIANT 2
66
67 // Fetches the TCB from the CPU's registers.
68 static inline __wasi_tcb_t *tcb_get(void) {
69 __wasi_tcb_t *tcb;
70 asm volatile("mov %%fs:0, %0" : "=r"(tcb));
71 return tcb;
72 }
73
74 // Changes the TCB in the CPU's registers.
75 static inline void tcb_set(__wasi_tcb_t *tcb) {
76 asm volatile("mov %0, %%fs:0" : : "r"(tcb));
77 }
78
79 #else
80 #error "Unsupported architecture"
81 #endif
82
83 #if TLS_VARIANT == 1
84
85 // TLS Variant I: TLS register points to the TCB. The TLS data is stored
86 // after the TCB. This approach has the disadvantage that the TCB size
87 // needs to be known.
88
89 static_assert(sizeof(__wasi_tcb_t) <= TCB_SIZE,
90 "TCB does not fit in reserved space before TLS");
91
92 // Computes the total size needed to store a TCB with TLS data.
93 static inline size_t tls_size(void) {
94 return TCB_SIZE + __pt_tls_memsz_aligned +
95 (__pt_tls_align > alignof(__wasi_tcb_t) ? __pt_tls_align
96 : sizeof(__wasi_tcb_t)) -
97 1;
98 }
99
100 // Computes the address of the TCB in the combined TCB/TLS area.
101 static inline __wasi_tcb_t *tcb_addr(char *buf) {
102 if (alignof(__wasi_tcb_t) < __pt_tls_align) {
103 return (
104 __wasi_tcb_t *)(__roundup((uintptr_t)buf + TCB_SIZE, __pt_tls_align) -
105 TCB_SIZE);
106 } else {
107 return (__wasi_tcb_t *)__roundup((uintptr_t)buf, alignof(__wasi_tcb_t));
108 }
109 }
110
111 // Computes the address of the TLS data in the combined TCB/TLS area.
112 static inline char *tls_addr(char *buf) {
113 return (char *)tcb_addr(buf) + TCB_SIZE;
114 }
115
116 // Fetches the TLS area of the currently running thread.
117 static inline char *tls_get(void) {
118 return (char *)tcb_get() + TCB_SIZE;
119 }
120
121 #elif TLS_VARIANT == 2
122
123 // TLS Variant II: TLS register points to the TCB. The TLS data is
124 // stored before the TCB. This approach has the advantage that the TCB
125 // size does not need to be known.
126
127 // Computes the total size needed to store a TCB with TLS data.
128 static inline size_t tls_size(void) {
129 return __pt_tls_memsz_aligned + sizeof(__wasi_tcb_t) +
130 (__pt_tls_align > alignof(__wasi_tcb_t) ? __pt_tls_align
131 : sizeof(__wasi_tcb_t)) -
132 1;
133 }
134
135 // Computes the address of the TLS data in the combined TCB/TLS area.
136 static inline char *tls_addr(char *buf) {
137 if (alignof(__wasi_tcb_t) < __pt_tls_align) {
138 return (char *)(__roundup((uintptr_t)buf, __pt_tls_align));
139 } else {
140 return (char *)(__roundup((uintptr_t)buf + __pt_tls_memsz_aligned,
141 alignof(__wasi_tcb_t)) -
142 __pt_tls_memsz_aligned);
143 }
144 }
145
146 // Computes the address of the TCB in the combined TCB/TLS area.
147 static inline __wasi_tcb_t *tcb_addr(char *buf) {
148 return (__wasi_tcb_t *)(tls_addr(buf) + __pt_tls_memsz_aligned);
149 }
150
151 // Fetches the TLS area of the currently running thread.
152 static inline char *tls_get(void) {
153 return (char *)tcb_get() - __pt_tls_memsz_aligned;
154 }
155
156 #else
157 #error "Unknown TLS variant"
158 #endif
159
160 // Changes the CPU's registers to point to a new TLS area.
161 //
162 // This function ensures that the TCB of the old TLS area is copied into
163 // the new TLS area. This ensures that the runtime (kernel, emulator,
164 // etc) still has access to its own private data.
165 static inline void tls_replace(char *buf) {
166 __wasi_tcb_t *tcb = tcb_addr(buf);
167 *tcb = *tcb_get();
168 tcb_set(tcb);
169 }
170
171 #endif