]> git.proxmox.com Git - mirror_edk2.git/blame - BaseTools/Source/C/Common/PeCoffLoaderEx.c
Sync EDKII BaseTools to BaseTools project r1971
[mirror_edk2.git] / BaseTools / Source / C / Common / PeCoffLoaderEx.c
CommitLineData
30fdf114
LG
1/** @file\r
2\r
40d841f6
LG
3Copyright (c) 2004 - 2008, Intel Corporation. All rights reserved.<BR>\r
4This program and the accompanying materials \r
30fdf114
LG
5are licensed and made available under the terms and conditions of the BSD License \r
6which accompanies this distribution. The full text of the license may be found at \r
7http://opensource.org/licenses/bsd-license.php \r
8 \r
9THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS, \r
10WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED. \r
11\r
12Module Name:\r
13\r
14 PeCoffLoaderEx.c\r
15\r
16Abstract:\r
17\r
18 IA32, X64 and IPF Specific relocation fixups\r
19\r
20Revision History\r
21\r
22--*/\r
23\r
24#include <Common/UefiBaseTypes.h>\r
25#include <IndustryStandard/PeImage.h>\r
26#include "PeCoffLib.h"\r
27\r
28#define EXT_IMM64(Value, Address, Size, InstPos, ValPos) \\r
29 Value |= (((UINT64)((*(Address) >> InstPos) & (((UINT64)1 << Size) - 1))) << ValPos)\r
30\r
31#define INS_IMM64(Value, Address, Size, InstPos, ValPos) \\r
32 *(UINT32*)Address = (*(UINT32*)Address & ~(((1 << Size) - 1) << InstPos)) | \\r
33 ((UINT32)((((UINT64)Value >> ValPos) & (((UINT64)1 << Size) - 1))) << InstPos)\r
34\r
35#define IMM64_IMM7B_INST_WORD_X 3 \r
36#define IMM64_IMM7B_SIZE_X 7 \r
37#define IMM64_IMM7B_INST_WORD_POS_X 4 \r
38#define IMM64_IMM7B_VAL_POS_X 0 \r
39\r
40#define IMM64_IMM9D_INST_WORD_X 3 \r
41#define IMM64_IMM9D_SIZE_X 9 \r
42#define IMM64_IMM9D_INST_WORD_POS_X 18 \r
43#define IMM64_IMM9D_VAL_POS_X 7 \r
44\r
45#define IMM64_IMM5C_INST_WORD_X 3 \r
46#define IMM64_IMM5C_SIZE_X 5 \r
47#define IMM64_IMM5C_INST_WORD_POS_X 13 \r
48#define IMM64_IMM5C_VAL_POS_X 16 \r
49\r
50#define IMM64_IC_INST_WORD_X 3 \r
51#define IMM64_IC_SIZE_X 1 \r
52#define IMM64_IC_INST_WORD_POS_X 12 \r
53#define IMM64_IC_VAL_POS_X 21 \r
54\r
55#define IMM64_IMM41a_INST_WORD_X 1 \r
56#define IMM64_IMM41a_SIZE_X 10 \r
57#define IMM64_IMM41a_INST_WORD_POS_X 14 \r
58#define IMM64_IMM41a_VAL_POS_X 22 \r
59\r
60#define IMM64_IMM41b_INST_WORD_X 1 \r
61#define IMM64_IMM41b_SIZE_X 8 \r
62#define IMM64_IMM41b_INST_WORD_POS_X 24 \r
63#define IMM64_IMM41b_VAL_POS_X 32 \r
64\r
65#define IMM64_IMM41c_INST_WORD_X 2 \r
66#define IMM64_IMM41c_SIZE_X 23 \r
67#define IMM64_IMM41c_INST_WORD_POS_X 0 \r
68#define IMM64_IMM41c_VAL_POS_X 40 \r
69\r
70#define IMM64_SIGN_INST_WORD_X 3 \r
71#define IMM64_SIGN_SIZE_X 1 \r
72#define IMM64_SIGN_INST_WORD_POS_X 27 \r
73#define IMM64_SIGN_VAL_POS_X 63 \r
74\r
75RETURN_STATUS\r
76PeCoffLoaderRelocateIa32Image (\r
77 IN UINT16 *Reloc,\r
78 IN OUT CHAR8 *Fixup,\r
79 IN OUT CHAR8 **FixupData,\r
80 IN UINT64 Adjust\r
81 )\r
82/*++\r
83\r
84Routine Description:\r
85\r
86 Performs an IA-32 specific relocation fixup\r
87\r
88Arguments:\r
89\r
90 Reloc - Pointer to the relocation record\r
91\r
92 Fixup - Pointer to the address to fix up\r
93\r
94 FixupData - Pointer to a buffer to log the fixups\r
95\r
96 Adjust - The offset to adjust the fixup\r
97\r
98Returns:\r
99\r
100 EFI_UNSUPPORTED - Unsupported now\r
101\r
102--*/\r
103{\r
104 return RETURN_UNSUPPORTED;\r
105}\r
106\r
107RETURN_STATUS\r
108PeCoffLoaderRelocateIpfImage (\r
109 IN UINT16 *Reloc,\r
110 IN OUT CHAR8 *Fixup, \r
111 IN OUT CHAR8 **FixupData,\r
112 IN UINT64 Adjust\r
113 )\r
114/*++\r
115\r
116Routine Description:\r
117\r
118 Performs an Itanium-based specific relocation fixup\r
119\r
120Arguments:\r
121\r
122 Reloc - Pointer to the relocation record\r
123\r
124 Fixup - Pointer to the address to fix up\r
125\r
126 FixupData - Pointer to a buffer to log the fixups\r
127\r
128 Adjust - The offset to adjust the fixup\r
129\r
130Returns:\r
131\r
132 Status code\r
133\r
134--*/\r
135{\r
136 UINT64 *F64;\r
137 UINT64 FixupVal;\r
138\r
139 switch ((*Reloc) >> 12) {\r
140\r
141 case EFI_IMAGE_REL_BASED_DIR64:\r
142 F64 = (UINT64 *) Fixup;\r
143 *F64 = *F64 + (UINT64) Adjust;\r
144 if (*FixupData != NULL) {\r
145 *FixupData = ALIGN_POINTER(*FixupData, sizeof(UINT64));\r
146 *(UINT64 *)(*FixupData) = *F64;\r
147 *FixupData = *FixupData + sizeof(UINT64);\r
148 }\r
149 break;\r
150\r
151 case EFI_IMAGE_REL_BASED_IA64_IMM64:\r
152\r
153 //\r
154 // Align it to bundle address before fixing up the\r
155 // 64-bit immediate value of the movl instruction.\r
156 //\r
157\r
158 Fixup = (CHAR8 *)((UINTN) Fixup & (UINTN) ~(15));\r
159 FixupVal = (UINT64)0;\r
160 \r
161 // \r
162 // Extract the lower 32 bits of IMM64 from bundle\r
163 //\r
164 EXT_IMM64(FixupVal,\r
165 (UINT32 *)Fixup + IMM64_IMM7B_INST_WORD_X,\r
166 IMM64_IMM7B_SIZE_X,\r
167 IMM64_IMM7B_INST_WORD_POS_X,\r
168 IMM64_IMM7B_VAL_POS_X\r
169 );\r
170\r
171 EXT_IMM64(FixupVal,\r
172 (UINT32 *)Fixup + IMM64_IMM9D_INST_WORD_X,\r
173 IMM64_IMM9D_SIZE_X,\r
174 IMM64_IMM9D_INST_WORD_POS_X,\r
175 IMM64_IMM9D_VAL_POS_X\r
176 );\r
177\r
178 EXT_IMM64(FixupVal,\r
179 (UINT32 *)Fixup + IMM64_IMM5C_INST_WORD_X,\r
180 IMM64_IMM5C_SIZE_X,\r
181 IMM64_IMM5C_INST_WORD_POS_X,\r
182 IMM64_IMM5C_VAL_POS_X\r
183 );\r
184\r
185 EXT_IMM64(FixupVal,\r
186 (UINT32 *)Fixup + IMM64_IC_INST_WORD_X,\r
187 IMM64_IC_SIZE_X,\r
188 IMM64_IC_INST_WORD_POS_X,\r
189 IMM64_IC_VAL_POS_X\r
190 );\r
191\r
192 EXT_IMM64(FixupVal,\r
193 (UINT32 *)Fixup + IMM64_IMM41a_INST_WORD_X,\r
194 IMM64_IMM41a_SIZE_X,\r
195 IMM64_IMM41a_INST_WORD_POS_X,\r
196 IMM64_IMM41a_VAL_POS_X\r
197 );\r
198 \r
199 // \r
200 // Update 64-bit address\r
201 //\r
202 FixupVal += Adjust;\r
203\r
204 // \r
205 // Insert IMM64 into bundle\r
206 //\r
207 INS_IMM64(FixupVal,\r
208 ((UINT32 *)Fixup + IMM64_IMM7B_INST_WORD_X),\r
209 IMM64_IMM7B_SIZE_X,\r
210 IMM64_IMM7B_INST_WORD_POS_X,\r
211 IMM64_IMM7B_VAL_POS_X\r
212 );\r
213\r
214 INS_IMM64(FixupVal,\r
215 ((UINT32 *)Fixup + IMM64_IMM9D_INST_WORD_X),\r
216 IMM64_IMM9D_SIZE_X,\r
217 IMM64_IMM9D_INST_WORD_POS_X,\r
218 IMM64_IMM9D_VAL_POS_X\r
219 );\r
220\r
221 INS_IMM64(FixupVal,\r
222 ((UINT32 *)Fixup + IMM64_IMM5C_INST_WORD_X),\r
223 IMM64_IMM5C_SIZE_X,\r
224 IMM64_IMM5C_INST_WORD_POS_X,\r
225 IMM64_IMM5C_VAL_POS_X\r
226 );\r
227\r
228 INS_IMM64(FixupVal,\r
229 ((UINT32 *)Fixup + IMM64_IC_INST_WORD_X),\r
230 IMM64_IC_SIZE_X,\r
231 IMM64_IC_INST_WORD_POS_X,\r
232 IMM64_IC_VAL_POS_X\r
233 );\r
234\r
235 INS_IMM64(FixupVal,\r
236 ((UINT32 *)Fixup + IMM64_IMM41a_INST_WORD_X),\r
237 IMM64_IMM41a_SIZE_X,\r
238 IMM64_IMM41a_INST_WORD_POS_X,\r
239 IMM64_IMM41a_VAL_POS_X\r
240 );\r
241\r
242 INS_IMM64(FixupVal,\r
243 ((UINT32 *)Fixup + IMM64_IMM41b_INST_WORD_X),\r
244 IMM64_IMM41b_SIZE_X,\r
245 IMM64_IMM41b_INST_WORD_POS_X,\r
246 IMM64_IMM41b_VAL_POS_X\r
247 );\r
248\r
249 INS_IMM64(FixupVal,\r
250 ((UINT32 *)Fixup + IMM64_IMM41c_INST_WORD_X),\r
251 IMM64_IMM41c_SIZE_X,\r
252 IMM64_IMM41c_INST_WORD_POS_X,\r
253 IMM64_IMM41c_VAL_POS_X\r
254 );\r
255\r
256 INS_IMM64(FixupVal,\r
257 ((UINT32 *)Fixup + IMM64_SIGN_INST_WORD_X),\r
258 IMM64_SIGN_SIZE_X,\r
259 IMM64_SIGN_INST_WORD_POS_X,\r
260 IMM64_SIGN_VAL_POS_X\r
261 );\r
262\r
263 F64 = (UINT64 *) Fixup;\r
264 if (*FixupData != NULL) {\r
265 *FixupData = ALIGN_POINTER(*FixupData, sizeof(UINT64));\r
266 *(UINT64 *)(*FixupData) = *F64;\r
267 *FixupData = *FixupData + sizeof(UINT64);\r
268 }\r
269 break;\r
270\r
271 default:\r
272 return RETURN_UNSUPPORTED;\r
273 }\r
274\r
275 return RETURN_SUCCESS;\r
276}\r
277\r
278RETURN_STATUS\r
279PeCoffLoaderRelocateX64Image (\r
280 IN UINT16 *Reloc,\r
281 IN OUT CHAR8 *Fixup, \r
282 IN OUT CHAR8 **FixupData,\r
283 IN UINT64 Adjust\r
284 )\r
285/**\r
286 Performs an x64 specific relocation fixup\r
287\r
288 @param Reloc Pointer to the relocation record\r
289 @param Fixup Pointer to the address to fix up\r
290 @param FixupData Pointer to a buffer to log the fixups\r
291 @param Adjust The offset to adjust the fixup\r
292 \r
293 @retval RETURN_SUCCESS Success to perform relocation\r
294 @retval RETURN_UNSUPPORTED Unsupported.\r
295**/\r
296{\r
297 UINT64 *F64;\r
298\r
299 switch ((*Reloc) >> 12) {\r
300\r
301 case EFI_IMAGE_REL_BASED_DIR64:\r
302 F64 = (UINT64 *) Fixup;\r
303 *F64 = *F64 + (UINT64) Adjust;\r
304 if (*FixupData != NULL) {\r
305 *FixupData = ALIGN_POINTER(*FixupData, sizeof(UINT64));\r
306 *(UINT64 *)(*FixupData) = *F64;\r
307 *FixupData = *FixupData + sizeof(UINT64);\r
308 }\r
309 break;\r
310\r
311 default:\r
312 return RETURN_UNSUPPORTED;\r
313 }\r
314\r
315 return RETURN_SUCCESS;\r
316}\r
317\r
40d841f6
LG
318/**\r
319 Pass in a pointer to an ARM MOVT or MOVW immediate instruciton and \r
320 return the immediate data encoded in the instruction\r
321\r
322 @param Instruction Pointer to ARM MOVT or MOVW immediate instruction\r
323\r
324 @return Immediate address encoded in the instruction\r
325\r
326**/\r
327UINT16\r
328ThumbMovtImmediateAddress (\r
329 IN UINT16 *Instruction\r
330 )\r
331{\r
332 UINT32 Movt;\r
333 UINT16 Address;\r
334\r
335 // Thumb2 is two 16-bit instructions working together. Not a single 32-bit instruction\r
336 // Example MOVT R0, #0 is 0x0000f2c0 or 0xf2c0 0x0000\r
337 Movt = (*Instruction << 16) | (*(Instruction + 1)); \r
338\r
339 // imm16 = imm4:i:imm3:imm8\r
340 // imm4 -> Bit19:Bit16\r
341 // i -> Bit26\r
342 // imm3 -> Bit14:Bit12\r
343 // imm8 -> Bit7:Bit0\r
344 Address = (UINT16)(Movt & 0x000000ff); // imm8\r
345 Address |= (UINT16)((Movt >> 4) & 0x0000f700); // imm4 imm3\r
346 Address |= (((Movt & BIT26) != 0) ? BIT11 : 0); // i\r
347 return Address;\r
348}\r
349\r
350\r
351/**\r
352 Update an ARM MOVT or MOVW immediate instruction immediate data.\r
353\r
354 @param Instruction Pointer to ARM MOVT or MOVW immediate instruction\r
355 @param Address New addres to patch into the instruction\r
356**/\r
357VOID\r
358ThumbMovtImmediatePatch (\r
359 IN OUT UINT16 *Instruction,\r
360 IN UINT16 Address\r
361 )\r
362{\r
363 UINT16 Patch;\r
364\r
365 // First 16-bit chunk of instruciton\r
366 Patch = ((Address >> 12) & 0x000f); // imm4 \r
367 Patch |= (((Address & BIT11) != 0) ? BIT10 : 0); // i\r
368 *Instruction = (*Instruction & ~0x040f) | Patch;\r
369\r
370 // Second 16-bit chunk of instruction\r
371 Patch = Address & 0x000000ff; // imm8\r
372 Patch |= ((Address << 4) & 0x00007000); // imm3\r
373 Instruction++;\r
374 *Instruction = (*Instruction & ~0x70ff) | Patch;\r
375}\r
376\r
377/**\r
378 Performs an ARM-based specific relocation fixup and is a no-op on other\r
379 instruction sets.\r
380\r
381 @param Reloc Pointer to the relocation record.\r
382 @param Fixup Pointer to the address to fix up.\r
383 @param FixupData Pointer to a buffer to log the fixups.\r
384 @param Adjust The offset to adjust the fixup.\r
385\r
386 @return Status code.\r
387\r
388**/\r
389RETURN_STATUS\r
390PeCoffLoaderRelocateArmImage (\r
391 IN UINT16 **Reloc,\r
392 IN OUT CHAR8 *Fixup,\r
393 IN OUT CHAR8 **FixupData,\r
394 IN UINT64 Adjust\r
395 )\r
396{\r
397 UINT16 *Fixup16;\r
398 UINT16 FixupVal;\r
399 UINT16 *Addend;\r
400\r
401 Fixup16 = (UINT16 *) Fixup;\r
402\r
403 switch ((**Reloc) >> 12) {\r
404 case EFI_IMAGE_REL_BASED_ARM_THUMB_MOVW:\r
405 FixupVal = ThumbMovtImmediateAddress (Fixup16) + (UINT16)Adjust;\r
406 ThumbMovtImmediatePatch (Fixup16, FixupVal);\r
407\r
408 if (*FixupData != NULL) {\r
409 *FixupData = ALIGN_POINTER (*FixupData, sizeof (UINT16));\r
410 *(UINT16 *)*FixupData = *Fixup16;\r
411 *FixupData = *FixupData + sizeof (UINT16);\r
412 }\r
413 break;\r
414\r
415 case EFI_IMAGE_REL_BASED_ARM_THUMB_MOVT:\r
416 // For MOVT you need to know the lower 16-bits do do the math\r
417 // So this relocation entry is really two entries.\r
418 *Reloc = *Reloc + 1;\r
419 Addend = *Reloc; \r
420 FixupVal = (UINT16)(((ThumbMovtImmediateAddress (Fixup16) << 16) + Adjust + *Addend) >> 16);\r
421 ThumbMovtImmediatePatch (Fixup16, FixupVal);\r
422\r
423 if (*FixupData != NULL) {\r
424 *FixupData = ALIGN_POINTER (*FixupData, sizeof (UINT16));\r
425 *(UINT16 *)*FixupData = *Fixup16;\r
426 *FixupData = *FixupData + sizeof (UINT16);\r
427 }\r
428 break;\r
429 \r
430 default:\r
431 return RETURN_UNSUPPORTED;\r
432 }\r
433\r
434 return RETURN_SUCCESS;\r
435}\r