]>
Commit | Line | Data |
---|---|---|
30fdf114 LG |
1 | /** @file\r |
2 | \r | |
40d841f6 LG |
3 | Copyright (c) 2004 - 2008, Intel Corporation. All rights reserved.<BR>\r |
4 | This program and the accompanying materials \r | |
30fdf114 LG |
5 | are licensed and made available under the terms and conditions of the BSD License \r |
6 | which accompanies this distribution. The full text of the license may be found at \r | |
7 | http://opensource.org/licenses/bsd-license.php \r | |
8 | \r | |
9 | THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS, \r | |
10 | WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED. \r | |
11 | \r | |
12 | Module Name:\r | |
13 | \r | |
14 | PeCoffLoaderEx.c\r | |
15 | \r | |
16 | Abstract:\r | |
17 | \r | |
18 | IA32, X64 and IPF Specific relocation fixups\r | |
19 | \r | |
20 | Revision History\r | |
21 | \r | |
22 | --*/\r | |
23 | \r | |
24 | #include <Common/UefiBaseTypes.h>\r | |
25 | #include <IndustryStandard/PeImage.h>\r | |
26 | #include "PeCoffLib.h"\r | |
da92f276 LG |
27 | #include "CommonLib.h"\r |
28 | \r | |
30fdf114 LG |
29 | \r |
30 | #define EXT_IMM64(Value, Address, Size, InstPos, ValPos) \\r | |
31 | Value |= (((UINT64)((*(Address) >> InstPos) & (((UINT64)1 << Size) - 1))) << ValPos)\r | |
32 | \r | |
33 | #define INS_IMM64(Value, Address, Size, InstPos, ValPos) \\r | |
34 | *(UINT32*)Address = (*(UINT32*)Address & ~(((1 << Size) - 1) << InstPos)) | \\r | |
35 | ((UINT32)((((UINT64)Value >> ValPos) & (((UINT64)1 << Size) - 1))) << InstPos)\r | |
36 | \r | |
37 | #define IMM64_IMM7B_INST_WORD_X 3 \r | |
38 | #define IMM64_IMM7B_SIZE_X 7 \r | |
39 | #define IMM64_IMM7B_INST_WORD_POS_X 4 \r | |
40 | #define IMM64_IMM7B_VAL_POS_X 0 \r | |
41 | \r | |
42 | #define IMM64_IMM9D_INST_WORD_X 3 \r | |
43 | #define IMM64_IMM9D_SIZE_X 9 \r | |
44 | #define IMM64_IMM9D_INST_WORD_POS_X 18 \r | |
45 | #define IMM64_IMM9D_VAL_POS_X 7 \r | |
46 | \r | |
47 | #define IMM64_IMM5C_INST_WORD_X 3 \r | |
48 | #define IMM64_IMM5C_SIZE_X 5 \r | |
49 | #define IMM64_IMM5C_INST_WORD_POS_X 13 \r | |
50 | #define IMM64_IMM5C_VAL_POS_X 16 \r | |
51 | \r | |
52 | #define IMM64_IC_INST_WORD_X 3 \r | |
53 | #define IMM64_IC_SIZE_X 1 \r | |
54 | #define IMM64_IC_INST_WORD_POS_X 12 \r | |
55 | #define IMM64_IC_VAL_POS_X 21 \r | |
56 | \r | |
57 | #define IMM64_IMM41a_INST_WORD_X 1 \r | |
58 | #define IMM64_IMM41a_SIZE_X 10 \r | |
59 | #define IMM64_IMM41a_INST_WORD_POS_X 14 \r | |
60 | #define IMM64_IMM41a_VAL_POS_X 22 \r | |
61 | \r | |
62 | #define IMM64_IMM41b_INST_WORD_X 1 \r | |
63 | #define IMM64_IMM41b_SIZE_X 8 \r | |
64 | #define IMM64_IMM41b_INST_WORD_POS_X 24 \r | |
65 | #define IMM64_IMM41b_VAL_POS_X 32 \r | |
66 | \r | |
67 | #define IMM64_IMM41c_INST_WORD_X 2 \r | |
68 | #define IMM64_IMM41c_SIZE_X 23 \r | |
69 | #define IMM64_IMM41c_INST_WORD_POS_X 0 \r | |
70 | #define IMM64_IMM41c_VAL_POS_X 40 \r | |
71 | \r | |
72 | #define IMM64_SIGN_INST_WORD_X 3 \r | |
73 | #define IMM64_SIGN_SIZE_X 1 \r | |
74 | #define IMM64_SIGN_INST_WORD_POS_X 27 \r | |
75 | #define IMM64_SIGN_VAL_POS_X 63 \r | |
76 | \r | |
77 | RETURN_STATUS\r | |
78 | PeCoffLoaderRelocateIa32Image (\r | |
79 | IN UINT16 *Reloc,\r | |
80 | IN OUT CHAR8 *Fixup,\r | |
81 | IN OUT CHAR8 **FixupData,\r | |
82 | IN UINT64 Adjust\r | |
83 | )\r | |
84 | /*++\r | |
85 | \r | |
86 | Routine Description:\r | |
87 | \r | |
88 | Performs an IA-32 specific relocation fixup\r | |
89 | \r | |
90 | Arguments:\r | |
91 | \r | |
92 | Reloc - Pointer to the relocation record\r | |
93 | \r | |
94 | Fixup - Pointer to the address to fix up\r | |
95 | \r | |
96 | FixupData - Pointer to a buffer to log the fixups\r | |
97 | \r | |
98 | Adjust - The offset to adjust the fixup\r | |
99 | \r | |
100 | Returns:\r | |
101 | \r | |
102 | EFI_UNSUPPORTED - Unsupported now\r | |
103 | \r | |
104 | --*/\r | |
105 | {\r | |
106 | return RETURN_UNSUPPORTED;\r | |
107 | }\r | |
108 | \r | |
109 | RETURN_STATUS\r | |
110 | PeCoffLoaderRelocateIpfImage (\r | |
111 | IN UINT16 *Reloc,\r | |
112 | IN OUT CHAR8 *Fixup, \r | |
113 | IN OUT CHAR8 **FixupData,\r | |
114 | IN UINT64 Adjust\r | |
115 | )\r | |
116 | /*++\r | |
117 | \r | |
118 | Routine Description:\r | |
119 | \r | |
120 | Performs an Itanium-based specific relocation fixup\r | |
121 | \r | |
122 | Arguments:\r | |
123 | \r | |
124 | Reloc - Pointer to the relocation record\r | |
125 | \r | |
126 | Fixup - Pointer to the address to fix up\r | |
127 | \r | |
128 | FixupData - Pointer to a buffer to log the fixups\r | |
129 | \r | |
130 | Adjust - The offset to adjust the fixup\r | |
131 | \r | |
132 | Returns:\r | |
133 | \r | |
134 | Status code\r | |
135 | \r | |
136 | --*/\r | |
137 | {\r | |
138 | UINT64 *F64;\r | |
139 | UINT64 FixupVal;\r | |
140 | \r | |
141 | switch ((*Reloc) >> 12) {\r | |
142 | \r | |
143 | case EFI_IMAGE_REL_BASED_DIR64:\r | |
144 | F64 = (UINT64 *) Fixup;\r | |
145 | *F64 = *F64 + (UINT64) Adjust;\r | |
146 | if (*FixupData != NULL) {\r | |
147 | *FixupData = ALIGN_POINTER(*FixupData, sizeof(UINT64));\r | |
148 | *(UINT64 *)(*FixupData) = *F64;\r | |
149 | *FixupData = *FixupData + sizeof(UINT64);\r | |
150 | }\r | |
151 | break;\r | |
152 | \r | |
153 | case EFI_IMAGE_REL_BASED_IA64_IMM64:\r | |
154 | \r | |
155 | //\r | |
156 | // Align it to bundle address before fixing up the\r | |
157 | // 64-bit immediate value of the movl instruction.\r | |
158 | //\r | |
159 | \r | |
160 | Fixup = (CHAR8 *)((UINTN) Fixup & (UINTN) ~(15));\r | |
161 | FixupVal = (UINT64)0;\r | |
162 | \r | |
163 | // \r | |
164 | // Extract the lower 32 bits of IMM64 from bundle\r | |
165 | //\r | |
166 | EXT_IMM64(FixupVal,\r | |
167 | (UINT32 *)Fixup + IMM64_IMM7B_INST_WORD_X,\r | |
168 | IMM64_IMM7B_SIZE_X,\r | |
169 | IMM64_IMM7B_INST_WORD_POS_X,\r | |
170 | IMM64_IMM7B_VAL_POS_X\r | |
171 | );\r | |
172 | \r | |
173 | EXT_IMM64(FixupVal,\r | |
174 | (UINT32 *)Fixup + IMM64_IMM9D_INST_WORD_X,\r | |
175 | IMM64_IMM9D_SIZE_X,\r | |
176 | IMM64_IMM9D_INST_WORD_POS_X,\r | |
177 | IMM64_IMM9D_VAL_POS_X\r | |
178 | );\r | |
179 | \r | |
180 | EXT_IMM64(FixupVal,\r | |
181 | (UINT32 *)Fixup + IMM64_IMM5C_INST_WORD_X,\r | |
182 | IMM64_IMM5C_SIZE_X,\r | |
183 | IMM64_IMM5C_INST_WORD_POS_X,\r | |
184 | IMM64_IMM5C_VAL_POS_X\r | |
185 | );\r | |
186 | \r | |
187 | EXT_IMM64(FixupVal,\r | |
188 | (UINT32 *)Fixup + IMM64_IC_INST_WORD_X,\r | |
189 | IMM64_IC_SIZE_X,\r | |
190 | IMM64_IC_INST_WORD_POS_X,\r | |
191 | IMM64_IC_VAL_POS_X\r | |
192 | );\r | |
193 | \r | |
194 | EXT_IMM64(FixupVal,\r | |
195 | (UINT32 *)Fixup + IMM64_IMM41a_INST_WORD_X,\r | |
196 | IMM64_IMM41a_SIZE_X,\r | |
197 | IMM64_IMM41a_INST_WORD_POS_X,\r | |
198 | IMM64_IMM41a_VAL_POS_X\r | |
199 | );\r | |
200 | \r | |
201 | // \r | |
202 | // Update 64-bit address\r | |
203 | //\r | |
204 | FixupVal += Adjust;\r | |
205 | \r | |
206 | // \r | |
207 | // Insert IMM64 into bundle\r | |
208 | //\r | |
209 | INS_IMM64(FixupVal,\r | |
210 | ((UINT32 *)Fixup + IMM64_IMM7B_INST_WORD_X),\r | |
211 | IMM64_IMM7B_SIZE_X,\r | |
212 | IMM64_IMM7B_INST_WORD_POS_X,\r | |
213 | IMM64_IMM7B_VAL_POS_X\r | |
214 | );\r | |
215 | \r | |
216 | INS_IMM64(FixupVal,\r | |
217 | ((UINT32 *)Fixup + IMM64_IMM9D_INST_WORD_X),\r | |
218 | IMM64_IMM9D_SIZE_X,\r | |
219 | IMM64_IMM9D_INST_WORD_POS_X,\r | |
220 | IMM64_IMM9D_VAL_POS_X\r | |
221 | );\r | |
222 | \r | |
223 | INS_IMM64(FixupVal,\r | |
224 | ((UINT32 *)Fixup + IMM64_IMM5C_INST_WORD_X),\r | |
225 | IMM64_IMM5C_SIZE_X,\r | |
226 | IMM64_IMM5C_INST_WORD_POS_X,\r | |
227 | IMM64_IMM5C_VAL_POS_X\r | |
228 | );\r | |
229 | \r | |
230 | INS_IMM64(FixupVal,\r | |
231 | ((UINT32 *)Fixup + IMM64_IC_INST_WORD_X),\r | |
232 | IMM64_IC_SIZE_X,\r | |
233 | IMM64_IC_INST_WORD_POS_X,\r | |
234 | IMM64_IC_VAL_POS_X\r | |
235 | );\r | |
236 | \r | |
237 | INS_IMM64(FixupVal,\r | |
238 | ((UINT32 *)Fixup + IMM64_IMM41a_INST_WORD_X),\r | |
239 | IMM64_IMM41a_SIZE_X,\r | |
240 | IMM64_IMM41a_INST_WORD_POS_X,\r | |
241 | IMM64_IMM41a_VAL_POS_X\r | |
242 | );\r | |
243 | \r | |
244 | INS_IMM64(FixupVal,\r | |
245 | ((UINT32 *)Fixup + IMM64_IMM41b_INST_WORD_X),\r | |
246 | IMM64_IMM41b_SIZE_X,\r | |
247 | IMM64_IMM41b_INST_WORD_POS_X,\r | |
248 | IMM64_IMM41b_VAL_POS_X\r | |
249 | );\r | |
250 | \r | |
251 | INS_IMM64(FixupVal,\r | |
252 | ((UINT32 *)Fixup + IMM64_IMM41c_INST_WORD_X),\r | |
253 | IMM64_IMM41c_SIZE_X,\r | |
254 | IMM64_IMM41c_INST_WORD_POS_X,\r | |
255 | IMM64_IMM41c_VAL_POS_X\r | |
256 | );\r | |
257 | \r | |
258 | INS_IMM64(FixupVal,\r | |
259 | ((UINT32 *)Fixup + IMM64_SIGN_INST_WORD_X),\r | |
260 | IMM64_SIGN_SIZE_X,\r | |
261 | IMM64_SIGN_INST_WORD_POS_X,\r | |
262 | IMM64_SIGN_VAL_POS_X\r | |
263 | );\r | |
264 | \r | |
265 | F64 = (UINT64 *) Fixup;\r | |
266 | if (*FixupData != NULL) {\r | |
267 | *FixupData = ALIGN_POINTER(*FixupData, sizeof(UINT64));\r | |
268 | *(UINT64 *)(*FixupData) = *F64;\r | |
269 | *FixupData = *FixupData + sizeof(UINT64);\r | |
270 | }\r | |
271 | break;\r | |
272 | \r | |
273 | default:\r | |
274 | return RETURN_UNSUPPORTED;\r | |
275 | }\r | |
276 | \r | |
277 | return RETURN_SUCCESS;\r | |
278 | }\r | |
279 | \r | |
280 | RETURN_STATUS\r | |
281 | PeCoffLoaderRelocateX64Image (\r | |
282 | IN UINT16 *Reloc,\r | |
283 | IN OUT CHAR8 *Fixup, \r | |
284 | IN OUT CHAR8 **FixupData,\r | |
285 | IN UINT64 Adjust\r | |
286 | )\r | |
287 | /**\r | |
288 | Performs an x64 specific relocation fixup\r | |
289 | \r | |
290 | @param Reloc Pointer to the relocation record\r | |
291 | @param Fixup Pointer to the address to fix up\r | |
292 | @param FixupData Pointer to a buffer to log the fixups\r | |
293 | @param Adjust The offset to adjust the fixup\r | |
294 | \r | |
295 | @retval RETURN_SUCCESS Success to perform relocation\r | |
296 | @retval RETURN_UNSUPPORTED Unsupported.\r | |
297 | **/\r | |
298 | {\r | |
299 | UINT64 *F64;\r | |
300 | \r | |
301 | switch ((*Reloc) >> 12) {\r | |
302 | \r | |
303 | case EFI_IMAGE_REL_BASED_DIR64:\r | |
304 | F64 = (UINT64 *) Fixup;\r | |
305 | *F64 = *F64 + (UINT64) Adjust;\r | |
306 | if (*FixupData != NULL) {\r | |
307 | *FixupData = ALIGN_POINTER(*FixupData, sizeof(UINT64));\r | |
308 | *(UINT64 *)(*FixupData) = *F64;\r | |
309 | *FixupData = *FixupData + sizeof(UINT64);\r | |
310 | }\r | |
311 | break;\r | |
312 | \r | |
313 | default:\r | |
314 | return RETURN_UNSUPPORTED;\r | |
315 | }\r | |
316 | \r | |
317 | return RETURN_SUCCESS;\r | |
318 | }\r | |
319 | \r | |
40d841f6 LG |
320 | /**\r |
321 | Pass in a pointer to an ARM MOVT or MOVW immediate instruciton and \r | |
322 | return the immediate data encoded in the instruction\r | |
323 | \r | |
324 | @param Instruction Pointer to ARM MOVT or MOVW immediate instruction\r | |
325 | \r | |
326 | @return Immediate address encoded in the instruction\r | |
327 | \r | |
328 | **/\r | |
329 | UINT16\r | |
330 | ThumbMovtImmediateAddress (\r | |
331 | IN UINT16 *Instruction\r | |
332 | )\r | |
333 | {\r | |
334 | UINT32 Movt;\r | |
335 | UINT16 Address;\r | |
336 | \r | |
337 | // Thumb2 is two 16-bit instructions working together. Not a single 32-bit instruction\r | |
338 | // Example MOVT R0, #0 is 0x0000f2c0 or 0xf2c0 0x0000\r | |
339 | Movt = (*Instruction << 16) | (*(Instruction + 1)); \r | |
340 | \r | |
341 | // imm16 = imm4:i:imm3:imm8\r | |
342 | // imm4 -> Bit19:Bit16\r | |
343 | // i -> Bit26\r | |
344 | // imm3 -> Bit14:Bit12\r | |
345 | // imm8 -> Bit7:Bit0\r | |
346 | Address = (UINT16)(Movt & 0x000000ff); // imm8\r | |
347 | Address |= (UINT16)((Movt >> 4) & 0x0000f700); // imm4 imm3\r | |
348 | Address |= (((Movt & BIT26) != 0) ? BIT11 : 0); // i\r | |
349 | return Address;\r | |
350 | }\r | |
351 | \r | |
352 | \r | |
353 | /**\r | |
354 | Update an ARM MOVT or MOVW immediate instruction immediate data.\r | |
355 | \r | |
356 | @param Instruction Pointer to ARM MOVT or MOVW immediate instruction\r | |
357 | @param Address New addres to patch into the instruction\r | |
358 | **/\r | |
359 | VOID\r | |
360 | ThumbMovtImmediatePatch (\r | |
361 | IN OUT UINT16 *Instruction,\r | |
362 | IN UINT16 Address\r | |
363 | )\r | |
364 | {\r | |
365 | UINT16 Patch;\r | |
366 | \r | |
367 | // First 16-bit chunk of instruciton\r | |
368 | Patch = ((Address >> 12) & 0x000f); // imm4 \r | |
369 | Patch |= (((Address & BIT11) != 0) ? BIT10 : 0); // i\r | |
370 | *Instruction = (*Instruction & ~0x040f) | Patch;\r | |
371 | \r | |
372 | // Second 16-bit chunk of instruction\r | |
373 | Patch = Address & 0x000000ff; // imm8\r | |
374 | Patch |= ((Address << 4) & 0x00007000); // imm3\r | |
375 | Instruction++;\r | |
376 | *Instruction = (*Instruction & ~0x70ff) | Patch;\r | |
377 | }\r | |
378 | \r | |
da92f276 LG |
379 | /**\r |
380 | Pass in a pointer to an ARM MOVW/MOVT instruciton pair and \r | |
381 | return the immediate data encoded in the two` instruction\r | |
382 | \r | |
383 | @param Instructions Pointer to ARM MOVW/MOVT insturction pair\r | |
384 | \r | |
385 | @return Immediate address encoded in the instructions\r | |
386 | \r | |
387 | **/\r | |
388 | UINT32\r | |
389 | EFIAPI\r | |
390 | ThumbMovwMovtImmediateAddress (\r | |
391 | IN UINT16 *Instructions\r | |
392 | )\r | |
393 | {\r | |
394 | UINT16 *Word;\r | |
395 | UINT16 *Top;\r | |
396 | \r | |
397 | Word = Instructions; // MOVW\r | |
398 | Top = Word + 2; // MOVT\r | |
399 | \r | |
400 | return (ThumbMovtImmediateAddress (Top) << 16) + ThumbMovtImmediateAddress (Word);\r | |
401 | }\r | |
402 | \r | |
403 | \r | |
404 | /**\r | |
405 | Update an ARM MOVW/MOVT immediate instruction instruction pair.\r | |
406 | \r | |
407 | @param Instructions Pointer to ARM MOVW/MOVT instruction pair\r | |
408 | @param Address New addres to patch into the instructions\r | |
409 | **/\r | |
410 | VOID\r | |
411 | EFIAPI\r | |
412 | ThumbMovwMovtImmediatePatch (\r | |
413 | IN OUT UINT16 *Instructions,\r | |
414 | IN UINT32 Address\r | |
415 | )\r | |
416 | {\r | |
417 | UINT16 *Word;\r | |
418 | UINT16 *Top;\r | |
419 | \r | |
420 | Word = (UINT16 *)Instructions; // MOVW\r | |
421 | Top = Word + 2; // MOVT\r | |
422 | \r | |
423 | ThumbMovtImmediatePatch (Word, (UINT16)(Address & 0xffff));\r | |
424 | ThumbMovtImmediatePatch (Top, (UINT16)(Address >> 16));\r | |
425 | }\r | |
426 | \r | |
427 | \r | |
40d841f6 LG |
428 | /**\r |
429 | Performs an ARM-based specific relocation fixup and is a no-op on other\r | |
430 | instruction sets.\r | |
431 | \r | |
432 | @param Reloc Pointer to the relocation record.\r | |
433 | @param Fixup Pointer to the address to fix up.\r | |
434 | @param FixupData Pointer to a buffer to log the fixups.\r | |
435 | @param Adjust The offset to adjust the fixup.\r | |
436 | \r | |
437 | @return Status code.\r | |
438 | \r | |
439 | **/\r | |
440 | RETURN_STATUS\r | |
441 | PeCoffLoaderRelocateArmImage (\r | |
442 | IN UINT16 **Reloc,\r | |
443 | IN OUT CHAR8 *Fixup,\r | |
444 | IN OUT CHAR8 **FixupData,\r | |
445 | IN UINT64 Adjust\r | |
446 | )\r | |
447 | {\r | |
448 | UINT16 *Fixup16;\r | |
da92f276 | 449 | UINT32 FixupVal;\r |
40d841f6 | 450 | \r |
da92f276 | 451 | Fixup16 = (UINT16 *) Fixup;\r |
40d841f6 LG |
452 | \r |
453 | switch ((**Reloc) >> 12) {\r | |
da92f276 LG |
454 | \r |
455 | case EFI_IMAGE_REL_BASED_ARM_MOV32T:\r | |
456 | FixupVal = ThumbMovwMovtImmediateAddress (Fixup16) + (UINT32)Adjust;\r | |
457 | ThumbMovwMovtImmediatePatch (Fixup16, FixupVal);\r | |
458 | \r | |
459 | \r | |
40d841f6 | 460 | if (*FixupData != NULL) {\r |
da92f276 LG |
461 | *FixupData = ALIGN_POINTER(*FixupData, sizeof(UINT64));\r |
462 | *(UINT64 *)(*FixupData) = *Fixup16;\r | |
463 | CopyMem (*FixupData, Fixup16, sizeof (UINT64));\r | |
40d841f6 LG |
464 | }\r |
465 | break;\r | |
466 | \r | |
da92f276 LG |
467 | case EFI_IMAGE_REL_BASED_ARM_MOV32A:\r |
468 | // break omitted - ARM instruction encoding not implemented\r | |
40d841f6 LG |
469 | default:\r |
470 | return RETURN_UNSUPPORTED;\r | |
471 | }\r | |
472 | \r | |
473 | return RETURN_SUCCESS;\r | |
474 | }\r |