]> git.proxmox.com Git - mirror_edk2.git/blame - ArmPkg/Library/BasePeCoffLib/Ipf/PeCoffLoaderEx.c
ArmPkg/ArmLib.h: Fixed name of the argument
[mirror_edk2.git] / ArmPkg / Library / BasePeCoffLib / Ipf / PeCoffLoaderEx.c
CommitLineData
934309d5 1/** @file\r
2 Fixes Intel Itanium(TM) specific relocation types.\r
3\r
4 Copyright (c) 2006 - 2008, Intel Corporation. All rights reserved.<BR>\r
5 This program and the accompanying materials\r
6 are licensed and made available under the terms and conditions of the BSD License\r
7 which accompanies this distribution. The full text of the license may be found at\r
8 http://opensource.org/licenses/bsd-license.php.\r
9\r
10 THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,\r
11 WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.\r
12\r
13**/\r
14\r
15#include "BasePeCoffLibInternals.h"\r
16\r
17\r
18\r
19#define EXT_IMM64(Value, Address, Size, InstPos, ValPos) \\r
20 Value |= (((UINT64)((*(Address) >> InstPos) & (((UINT64)1 << Size) - 1))) << ValPos)\r
21\r
22#define INS_IMM64(Value, Address, Size, InstPos, ValPos) \\r
23 *(UINT32*)Address = (*(UINT32*)Address & ~(((1 << Size) - 1) << InstPos)) | \\r
24 ((UINT32)((((UINT64)Value >> ValPos) & (((UINT64)1 << Size) - 1))) << InstPos)\r
25\r
26#define IMM64_IMM7B_INST_WORD_X 3\r
27#define IMM64_IMM7B_SIZE_X 7\r
28#define IMM64_IMM7B_INST_WORD_POS_X 4\r
29#define IMM64_IMM7B_VAL_POS_X 0\r
30\r
31#define IMM64_IMM9D_INST_WORD_X 3\r
32#define IMM64_IMM9D_SIZE_X 9\r
33#define IMM64_IMM9D_INST_WORD_POS_X 18\r
34#define IMM64_IMM9D_VAL_POS_X 7\r
35\r
36#define IMM64_IMM5C_INST_WORD_X 3\r
37#define IMM64_IMM5C_SIZE_X 5\r
38#define IMM64_IMM5C_INST_WORD_POS_X 13\r
39#define IMM64_IMM5C_VAL_POS_X 16\r
40\r
41#define IMM64_IC_INST_WORD_X 3\r
42#define IMM64_IC_SIZE_X 1\r
43#define IMM64_IC_INST_WORD_POS_X 12\r
44#define IMM64_IC_VAL_POS_X 21\r
45\r
46#define IMM64_IMM41A_INST_WORD_X 1\r
47#define IMM64_IMM41A_SIZE_X 10\r
48#define IMM64_IMM41A_INST_WORD_POS_X 14\r
49#define IMM64_IMM41A_VAL_POS_X 22\r
50\r
51#define IMM64_IMM41B_INST_WORD_X 1\r
52#define IMM64_IMM41B_SIZE_X 8\r
53#define IMM64_IMM41B_INST_WORD_POS_X 24\r
54#define IMM64_IMM41B_VAL_POS_X 32\r
55\r
56#define IMM64_IMM41C_INST_WORD_X 2\r
57#define IMM64_IMM41C_SIZE_X 23\r
58#define IMM64_IMM41C_INST_WORD_POS_X 0\r
59#define IMM64_IMM41C_VAL_POS_X 40\r
60\r
61#define IMM64_SIGN_INST_WORD_X 3\r
62#define IMM64_SIGN_SIZE_X 1\r
63#define IMM64_SIGN_INST_WORD_POS_X 27\r
64#define IMM64_SIGN_VAL_POS_X 63\r
65\r
66/**\r
67 Performs an Itanium-based specific relocation fixup.\r
68\r
69 @param Reloc The pointer to the relocation record.\r
70 @param Fixup The pointer to the address to fix up.\r
71 @param FixupData The pointer to a buffer to log the fixups.\r
72 @param Adjust The offset to adjust the fixup.\r
73\r
74 @retval RETURN_SUCCESS Succeed to fix the relocation entry.\r
75 @retval RETURN_UNSUPPOTED Unrecoganized relocation entry.\r
76\r
77**/\r
78RETURN_STATUS\r
79PeCoffLoaderRelocateImageEx (\r
80 IN UINT16 **Reloc,\r
81 IN OUT CHAR8 *Fixup,\r
82 IN OUT CHAR8 **FixupData,\r
83 IN UINT64 Adjust\r
84 )\r
85{\r
86 UINT64 *Fixup64;\r
87 UINT64 FixupVal;\r
88\r
89 switch ((**Reloc) >> 12) {\r
90 case EFI_IMAGE_REL_BASED_IA64_IMM64:\r
91\r
92 //\r
93 // Align it to bundle address before fixing up the\r
94 // 64-bit immediate value of the movl instruction.\r
95 //\r
96\r
97 Fixup = (CHAR8 *)((UINTN) Fixup & (UINTN) ~(15));\r
98 FixupVal = (UINT64)0;\r
99\r
100 //\r
101 // Extract the lower 32 bits of IMM64 from bundle\r
102 //\r
103 EXT_IMM64(FixupVal,\r
104 (UINT32 *)Fixup + IMM64_IMM7B_INST_WORD_X,\r
105 IMM64_IMM7B_SIZE_X,\r
106 IMM64_IMM7B_INST_WORD_POS_X,\r
107 IMM64_IMM7B_VAL_POS_X\r
108 );\r
109\r
110 EXT_IMM64(FixupVal,\r
111 (UINT32 *)Fixup + IMM64_IMM9D_INST_WORD_X,\r
112 IMM64_IMM9D_SIZE_X,\r
113 IMM64_IMM9D_INST_WORD_POS_X,\r
114 IMM64_IMM9D_VAL_POS_X\r
115 );\r
116\r
117 EXT_IMM64(FixupVal,\r
118 (UINT32 *)Fixup + IMM64_IMM5C_INST_WORD_X,\r
119 IMM64_IMM5C_SIZE_X,\r
120 IMM64_IMM5C_INST_WORD_POS_X,\r
121 IMM64_IMM5C_VAL_POS_X\r
122 );\r
123\r
124 EXT_IMM64(FixupVal,\r
125 (UINT32 *)Fixup + IMM64_IC_INST_WORD_X,\r
126 IMM64_IC_SIZE_X,\r
127 IMM64_IC_INST_WORD_POS_X,\r
128 IMM64_IC_VAL_POS_X\r
129 );\r
130\r
131 EXT_IMM64(FixupVal,\r
132 (UINT32 *)Fixup + IMM64_IMM41A_INST_WORD_X,\r
133 IMM64_IMM41A_SIZE_X,\r
134 IMM64_IMM41A_INST_WORD_POS_X,\r
135 IMM64_IMM41A_VAL_POS_X\r
136 );\r
137\r
138 //\r
139 // Update 64-bit address\r
140 //\r
141 FixupVal += Adjust;\r
142\r
143 //\r
144 // Insert IMM64 into bundle\r
145 //\r
146 INS_IMM64(FixupVal,\r
147 ((UINT32 *)Fixup + IMM64_IMM7B_INST_WORD_X),\r
148 IMM64_IMM7B_SIZE_X,\r
149 IMM64_IMM7B_INST_WORD_POS_X,\r
150 IMM64_IMM7B_VAL_POS_X\r
151 );\r
152\r
153 INS_IMM64(FixupVal,\r
154 ((UINT32 *)Fixup + IMM64_IMM9D_INST_WORD_X),\r
155 IMM64_IMM9D_SIZE_X,\r
156 IMM64_IMM9D_INST_WORD_POS_X,\r
157 IMM64_IMM9D_VAL_POS_X\r
158 );\r
159\r
160 INS_IMM64(FixupVal,\r
161 ((UINT32 *)Fixup + IMM64_IMM5C_INST_WORD_X),\r
162 IMM64_IMM5C_SIZE_X,\r
163 IMM64_IMM5C_INST_WORD_POS_X,\r
164 IMM64_IMM5C_VAL_POS_X\r
165 );\r
166\r
167 INS_IMM64(FixupVal,\r
168 ((UINT32 *)Fixup + IMM64_IC_INST_WORD_X),\r
169 IMM64_IC_SIZE_X,\r
170 IMM64_IC_INST_WORD_POS_X,\r
171 IMM64_IC_VAL_POS_X\r
172 );\r
173\r
174 INS_IMM64(FixupVal,\r
175 ((UINT32 *)Fixup + IMM64_IMM41A_INST_WORD_X),\r
176 IMM64_IMM41A_SIZE_X,\r
177 IMM64_IMM41A_INST_WORD_POS_X,\r
178 IMM64_IMM41A_VAL_POS_X\r
179 );\r
180\r
181 INS_IMM64(FixupVal,\r
182 ((UINT32 *)Fixup + IMM64_IMM41B_INST_WORD_X),\r
183 IMM64_IMM41B_SIZE_X,\r
184 IMM64_IMM41B_INST_WORD_POS_X,\r
185 IMM64_IMM41B_VAL_POS_X\r
186 );\r
187\r
188 INS_IMM64(FixupVal,\r
189 ((UINT32 *)Fixup + IMM64_IMM41C_INST_WORD_X),\r
190 IMM64_IMM41C_SIZE_X,\r
191 IMM64_IMM41C_INST_WORD_POS_X,\r
192 IMM64_IMM41C_VAL_POS_X\r
193 );\r
194\r
195 INS_IMM64(FixupVal,\r
196 ((UINT32 *)Fixup + IMM64_SIGN_INST_WORD_X),\r
197 IMM64_SIGN_SIZE_X,\r
198 IMM64_SIGN_INST_WORD_POS_X,\r
199 IMM64_SIGN_VAL_POS_X\r
200 );\r
201\r
202 Fixup64 = (UINT64 *) Fixup;\r
203 if (*FixupData != NULL) {\r
204 *FixupData = ALIGN_POINTER(*FixupData, sizeof(UINT64));\r
205 *(UINT64 *)(*FixupData) = *Fixup64;\r
206 *FixupData = *FixupData + sizeof(UINT64);\r
207 }\r
208 break;\r
209\r
210 default:\r
211 return RETURN_UNSUPPORTED;\r
212 }\r
213\r
214 return RETURN_SUCCESS;\r
215}\r
216\r
217/**\r
218 Returns TRUE if the machine type of PE/COFF image is supported. Supported\r
219 does not mean the image can be executed it means the PE/COFF loader supports\r
220 loading and relocating of the image type. It's up to the caller to support\r
221 the entry point. \r
222 \r
223 The itanium version PE/COFF loader/relocater supports itanium and EBC image.\r
224\r
225 @param Machine Machine type from the PE Header.\r
226\r
227 @return TRUE if this PE/COFF loader can load the image\r
228 @return FALSE unrecoganized machine type of image.\r
229\r
230**/\r
231BOOLEAN\r
232PeCoffLoaderImageFormatSupported (\r
233 IN UINT16 Machine\r
234 )\r
235{\r
236 if ((Machine == IMAGE_FILE_MACHINE_IA64) || (Machine == IMAGE_FILE_MACHINE_EBC)) {\r
237 return TRUE;\r
238 }\r
239\r
240 return FALSE;\r
241}\r
242\r
243\r
244/**\r
245 Performs an Itanium-based specific relocation fixup and is a no-op on other\r
246 instruction sets.\r
247\r
248 @param Reloc Pointer to Pointer to the relocation record.\r
249 @param Fixup Pointer to the address to fix up.\r
250 @param FixupData Pointer to a buffer to log the fixups.\r
251 @param Adjust The offset to adjust the fixup.\r
252\r
253 @return Status code.\r
254\r
255**/\r
256RETURN_STATUS\r
257PeHotRelocateImageEx (\r
258 IN UINT16 **Reloc,\r
259 IN OUT CHAR8 *Fixup,\r
260 IN OUT CHAR8 **FixupData,\r
261 IN UINT64 Adjust\r
262 )\r
263{\r
264 UINT64 *Fixup64;\r
265 UINT64 FixupVal;\r
266\r
267 switch ((**Reloc) >> 12) {\r
268 case EFI_IMAGE_REL_BASED_DIR64:\r
269 Fixup64 = (UINT64 *) Fixup;\r
270 *FixupData = ALIGN_POINTER (*FixupData, sizeof (UINT64));\r
271 if (*(UINT64 *) (*FixupData) == *Fixup64) {\r
272 *Fixup64 = *Fixup64 + (UINT64) Adjust;\r
273 }\r
274\r
275 *FixupData = *FixupData + sizeof (UINT64);\r
276 break;\r
277\r
278 case EFI_IMAGE_REL_BASED_IA64_IMM64:\r
279 Fixup64 = (UINT64 *) Fixup;\r
280 *FixupData = ALIGN_POINTER (*FixupData, sizeof (UINT64));\r
281 if (*(UINT64 *) (*FixupData) == *Fixup64) {\r
282 //\r
283 // Align it to bundle address before fixing up the\r
284 // 64-bit immediate value of the movl instruction.\r
285 //\r
286 //\r
287 Fixup = (CHAR8 *) ((UINT64) Fixup & (UINT64)~(15));\r
288 FixupVal = (UINT64) 0;\r
289\r
290 //\r
291 // Extract the lower 32 bits of IMM64 from bundle\r
292 //\r
293 EXT_IMM64 (\r
294 FixupVal,\r
295 (UINT32 *) Fixup + IMM64_IMM7B_INST_WORD_X,\r
296 IMM64_IMM7B_SIZE_X,\r
297 IMM64_IMM7B_INST_WORD_POS_X,\r
298 IMM64_IMM7B_VAL_POS_X\r
299 );\r
300\r
301 EXT_IMM64 (\r
302 FixupVal,\r
303 (UINT32 *) Fixup + IMM64_IMM9D_INST_WORD_X,\r
304 IMM64_IMM9D_SIZE_X,\r
305 IMM64_IMM9D_INST_WORD_POS_X,\r
306 IMM64_IMM9D_VAL_POS_X\r
307 );\r
308\r
309 EXT_IMM64 (\r
310 FixupVal,\r
311 (UINT32 *) Fixup + IMM64_IMM5C_INST_WORD_X,\r
312 IMM64_IMM5C_SIZE_X,\r
313 IMM64_IMM5C_INST_WORD_POS_X,\r
314 IMM64_IMM5C_VAL_POS_X\r
315 );\r
316\r
317 EXT_IMM64 (\r
318 FixupVal,\r
319 (UINT32 *) Fixup + IMM64_IC_INST_WORD_X,\r
320 IMM64_IC_SIZE_X,\r
321 IMM64_IC_INST_WORD_POS_X,\r
322 IMM64_IC_VAL_POS_X\r
323 );\r
324\r
325 EXT_IMM64 (\r
326 FixupVal,\r
327 (UINT32 *) Fixup + IMM64_IMM41A_INST_WORD_X,\r
328 IMM64_IMM41A_SIZE_X,\r
329 IMM64_IMM41A_INST_WORD_POS_X,\r
330 IMM64_IMM41A_VAL_POS_X\r
331 );\r
332\r
333 //\r
334 // Update 64-bit address\r
335 //\r
336 FixupVal += Adjust;\r
337\r
338 //\r
339 // Insert IMM64 into bundle\r
340 //\r
341 INS_IMM64 (\r
342 FixupVal,\r
343 ((UINT32 *) Fixup + IMM64_IMM7B_INST_WORD_X),\r
344 IMM64_IMM7B_SIZE_X,\r
345 IMM64_IMM7B_INST_WORD_POS_X,\r
346 IMM64_IMM7B_VAL_POS_X\r
347 );\r
348\r
349 INS_IMM64 (\r
350 FixupVal,\r
351 ((UINT32 *) Fixup + IMM64_IMM9D_INST_WORD_X),\r
352 IMM64_IMM9D_SIZE_X,\r
353 IMM64_IMM9D_INST_WORD_POS_X,\r
354 IMM64_IMM9D_VAL_POS_X\r
355 );\r
356\r
357 INS_IMM64 (\r
358 FixupVal,\r
359 ((UINT32 *) Fixup + IMM64_IMM5C_INST_WORD_X),\r
360 IMM64_IMM5C_SIZE_X,\r
361 IMM64_IMM5C_INST_WORD_POS_X,\r
362 IMM64_IMM5C_VAL_POS_X\r
363 );\r
364\r
365 INS_IMM64 (\r
366 FixupVal,\r
367 ((UINT32 *) Fixup + IMM64_IC_INST_WORD_X),\r
368 IMM64_IC_SIZE_X,\r
369 IMM64_IC_INST_WORD_POS_X,\r
370 IMM64_IC_VAL_POS_X\r
371 );\r
372\r
373 INS_IMM64 (\r
374 FixupVal,\r
375 ((UINT32 *) Fixup + IMM64_IMM41A_INST_WORD_X),\r
376 IMM64_IMM41A_SIZE_X,\r
377 IMM64_IMM41A_INST_WORD_POS_X,\r
378 IMM64_IMM41A_VAL_POS_X\r
379 );\r
380\r
381 INS_IMM64 (\r
382 FixupVal,\r
383 ((UINT32 *) Fixup + IMM64_IMM41B_INST_WORD_X),\r
384 IMM64_IMM41B_SIZE_X,\r
385 IMM64_IMM41B_INST_WORD_POS_X,\r
386 IMM64_IMM41B_VAL_POS_X\r
387 );\r
388\r
389 INS_IMM64 (\r
390 FixupVal,\r
391 ((UINT32 *) Fixup + IMM64_IMM41C_INST_WORD_X),\r
392 IMM64_IMM41C_SIZE_X,\r
393 IMM64_IMM41C_INST_WORD_POS_X,\r
394 IMM64_IMM41C_VAL_POS_X\r
395 );\r
396\r
397 INS_IMM64 (\r
398 FixupVal,\r
399 ((UINT32 *) Fixup + IMM64_SIGN_INST_WORD_X),\r
400 IMM64_SIGN_SIZE_X,\r
401 IMM64_SIGN_INST_WORD_POS_X,\r
402 IMM64_SIGN_VAL_POS_X\r
403 );\r
404\r
405 *(UINT64 *) (*FixupData) = *Fixup64;\r
406 }\r
407\r
408 *FixupData = *FixupData + sizeof (UINT64);\r
409 break;\r
410\r
411 default:\r
412 DEBUG ((EFI_D_ERROR, "PeHotRelocateEx:unknown fixed type\n"));\r
413 return RETURN_UNSUPPORTED;\r
414 }\r
415\r
416 return RETURN_SUCCESS;\r
417}\r
418\r
419\r
420\r