]>
Commit | Line | Data |
---|---|---|
30fdf114 LG |
1 | /** @file\r |
2 | \r | |
40d841f6 | 3 | Copyright (c) 2004 - 2008, Intel Corporation. All rights reserved.<BR>\r |
4afd3d04 | 4 | Portions Copyright (c) 2011 - 2013, ARM Ltd. All rights reserved.<BR>\r |
40d841f6 | 5 | This program and the accompanying materials \r |
30fdf114 LG |
6 | are licensed and made available under the terms and conditions of the BSD License \r |
7 | which accompanies this distribution. The full text of the license may be found at \r | |
8 | http://opensource.org/licenses/bsd-license.php \r | |
9 | \r | |
10 | THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS, \r | |
11 | WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED. \r | |
12 | \r | |
13 | Module Name:\r | |
14 | \r | |
15 | PeCoffLoaderEx.c\r | |
16 | \r | |
17 | Abstract:\r | |
18 | \r | |
4afd3d04 | 19 | IA32, X64, IPF, ARM and AArch64 Specific relocation fixups\r |
30fdf114 LG |
20 | \r |
21 | Revision History\r | |
22 | \r | |
23 | --*/\r | |
24 | \r | |
25 | #include <Common/UefiBaseTypes.h>\r | |
26 | #include <IndustryStandard/PeImage.h>\r | |
27 | #include "PeCoffLib.h"\r | |
da92f276 | 28 | #include "CommonLib.h"\r |
4afd3d04 | 29 | #include "EfiUtilityMsgs.h"\r |
da92f276 | 30 | \r |
30fdf114 LG |
31 | \r |
32 | #define EXT_IMM64(Value, Address, Size, InstPos, ValPos) \\r | |
33 | Value |= (((UINT64)((*(Address) >> InstPos) & (((UINT64)1 << Size) - 1))) << ValPos)\r | |
34 | \r | |
35 | #define INS_IMM64(Value, Address, Size, InstPos, ValPos) \\r | |
36 | *(UINT32*)Address = (*(UINT32*)Address & ~(((1 << Size) - 1) << InstPos)) | \\r | |
37 | ((UINT32)((((UINT64)Value >> ValPos) & (((UINT64)1 << Size) - 1))) << InstPos)\r | |
38 | \r | |
39 | #define IMM64_IMM7B_INST_WORD_X 3 \r | |
40 | #define IMM64_IMM7B_SIZE_X 7 \r | |
41 | #define IMM64_IMM7B_INST_WORD_POS_X 4 \r | |
42 | #define IMM64_IMM7B_VAL_POS_X 0 \r | |
43 | \r | |
44 | #define IMM64_IMM9D_INST_WORD_X 3 \r | |
45 | #define IMM64_IMM9D_SIZE_X 9 \r | |
46 | #define IMM64_IMM9D_INST_WORD_POS_X 18 \r | |
47 | #define IMM64_IMM9D_VAL_POS_X 7 \r | |
48 | \r | |
49 | #define IMM64_IMM5C_INST_WORD_X 3 \r | |
50 | #define IMM64_IMM5C_SIZE_X 5 \r | |
51 | #define IMM64_IMM5C_INST_WORD_POS_X 13 \r | |
52 | #define IMM64_IMM5C_VAL_POS_X 16 \r | |
53 | \r | |
54 | #define IMM64_IC_INST_WORD_X 3 \r | |
55 | #define IMM64_IC_SIZE_X 1 \r | |
56 | #define IMM64_IC_INST_WORD_POS_X 12 \r | |
57 | #define IMM64_IC_VAL_POS_X 21 \r | |
58 | \r | |
59 | #define IMM64_IMM41a_INST_WORD_X 1 \r | |
60 | #define IMM64_IMM41a_SIZE_X 10 \r | |
61 | #define IMM64_IMM41a_INST_WORD_POS_X 14 \r | |
62 | #define IMM64_IMM41a_VAL_POS_X 22 \r | |
63 | \r | |
64 | #define IMM64_IMM41b_INST_WORD_X 1 \r | |
65 | #define IMM64_IMM41b_SIZE_X 8 \r | |
66 | #define IMM64_IMM41b_INST_WORD_POS_X 24 \r | |
67 | #define IMM64_IMM41b_VAL_POS_X 32 \r | |
68 | \r | |
69 | #define IMM64_IMM41c_INST_WORD_X 2 \r | |
70 | #define IMM64_IMM41c_SIZE_X 23 \r | |
71 | #define IMM64_IMM41c_INST_WORD_POS_X 0 \r | |
72 | #define IMM64_IMM41c_VAL_POS_X 40 \r | |
73 | \r | |
74 | #define IMM64_SIGN_INST_WORD_X 3 \r | |
75 | #define IMM64_SIGN_SIZE_X 1 \r | |
76 | #define IMM64_SIGN_INST_WORD_POS_X 27 \r | |
77 | #define IMM64_SIGN_VAL_POS_X 63 \r | |
78 | \r | |
79 | RETURN_STATUS\r | |
80 | PeCoffLoaderRelocateIa32Image (\r | |
81 | IN UINT16 *Reloc,\r | |
82 | IN OUT CHAR8 *Fixup,\r | |
83 | IN OUT CHAR8 **FixupData,\r | |
84 | IN UINT64 Adjust\r | |
85 | )\r | |
86 | /*++\r | |
87 | \r | |
88 | Routine Description:\r | |
89 | \r | |
90 | Performs an IA-32 specific relocation fixup\r | |
91 | \r | |
92 | Arguments:\r | |
93 | \r | |
94 | Reloc - Pointer to the relocation record\r | |
95 | \r | |
96 | Fixup - Pointer to the address to fix up\r | |
97 | \r | |
98 | FixupData - Pointer to a buffer to log the fixups\r | |
99 | \r | |
100 | Adjust - The offset to adjust the fixup\r | |
101 | \r | |
102 | Returns:\r | |
103 | \r | |
104 | EFI_UNSUPPORTED - Unsupported now\r | |
105 | \r | |
106 | --*/\r | |
107 | {\r | |
108 | return RETURN_UNSUPPORTED;\r | |
109 | }\r | |
110 | \r | |
111 | RETURN_STATUS\r | |
112 | PeCoffLoaderRelocateIpfImage (\r | |
113 | IN UINT16 *Reloc,\r | |
114 | IN OUT CHAR8 *Fixup, \r | |
115 | IN OUT CHAR8 **FixupData,\r | |
116 | IN UINT64 Adjust\r | |
117 | )\r | |
118 | /*++\r | |
119 | \r | |
120 | Routine Description:\r | |
121 | \r | |
122 | Performs an Itanium-based specific relocation fixup\r | |
123 | \r | |
124 | Arguments:\r | |
125 | \r | |
126 | Reloc - Pointer to the relocation record\r | |
127 | \r | |
128 | Fixup - Pointer to the address to fix up\r | |
129 | \r | |
130 | FixupData - Pointer to a buffer to log the fixups\r | |
131 | \r | |
132 | Adjust - The offset to adjust the fixup\r | |
133 | \r | |
134 | Returns:\r | |
135 | \r | |
136 | Status code\r | |
137 | \r | |
138 | --*/\r | |
139 | {\r | |
140 | UINT64 *F64;\r | |
141 | UINT64 FixupVal;\r | |
142 | \r | |
143 | switch ((*Reloc) >> 12) {\r | |
144 | \r | |
145 | case EFI_IMAGE_REL_BASED_DIR64:\r | |
146 | F64 = (UINT64 *) Fixup;\r | |
147 | *F64 = *F64 + (UINT64) Adjust;\r | |
148 | if (*FixupData != NULL) {\r | |
149 | *FixupData = ALIGN_POINTER(*FixupData, sizeof(UINT64));\r | |
150 | *(UINT64 *)(*FixupData) = *F64;\r | |
151 | *FixupData = *FixupData + sizeof(UINT64);\r | |
152 | }\r | |
153 | break;\r | |
154 | \r | |
155 | case EFI_IMAGE_REL_BASED_IA64_IMM64:\r | |
156 | \r | |
157 | //\r | |
158 | // Align it to bundle address before fixing up the\r | |
159 | // 64-bit immediate value of the movl instruction.\r | |
160 | //\r | |
161 | \r | |
162 | Fixup = (CHAR8 *)((UINTN) Fixup & (UINTN) ~(15));\r | |
163 | FixupVal = (UINT64)0;\r | |
164 | \r | |
165 | // \r | |
166 | // Extract the lower 32 bits of IMM64 from bundle\r | |
167 | //\r | |
168 | EXT_IMM64(FixupVal,\r | |
169 | (UINT32 *)Fixup + IMM64_IMM7B_INST_WORD_X,\r | |
170 | IMM64_IMM7B_SIZE_X,\r | |
171 | IMM64_IMM7B_INST_WORD_POS_X,\r | |
172 | IMM64_IMM7B_VAL_POS_X\r | |
173 | );\r | |
174 | \r | |
175 | EXT_IMM64(FixupVal,\r | |
176 | (UINT32 *)Fixup + IMM64_IMM9D_INST_WORD_X,\r | |
177 | IMM64_IMM9D_SIZE_X,\r | |
178 | IMM64_IMM9D_INST_WORD_POS_X,\r | |
179 | IMM64_IMM9D_VAL_POS_X\r | |
180 | );\r | |
181 | \r | |
182 | EXT_IMM64(FixupVal,\r | |
183 | (UINT32 *)Fixup + IMM64_IMM5C_INST_WORD_X,\r | |
184 | IMM64_IMM5C_SIZE_X,\r | |
185 | IMM64_IMM5C_INST_WORD_POS_X,\r | |
186 | IMM64_IMM5C_VAL_POS_X\r | |
187 | );\r | |
188 | \r | |
189 | EXT_IMM64(FixupVal,\r | |
190 | (UINT32 *)Fixup + IMM64_IC_INST_WORD_X,\r | |
191 | IMM64_IC_SIZE_X,\r | |
192 | IMM64_IC_INST_WORD_POS_X,\r | |
193 | IMM64_IC_VAL_POS_X\r | |
194 | );\r | |
195 | \r | |
196 | EXT_IMM64(FixupVal,\r | |
197 | (UINT32 *)Fixup + IMM64_IMM41a_INST_WORD_X,\r | |
198 | IMM64_IMM41a_SIZE_X,\r | |
199 | IMM64_IMM41a_INST_WORD_POS_X,\r | |
200 | IMM64_IMM41a_VAL_POS_X\r | |
201 | );\r | |
202 | \r | |
203 | // \r | |
204 | // Update 64-bit address\r | |
205 | //\r | |
206 | FixupVal += Adjust;\r | |
207 | \r | |
208 | // \r | |
209 | // Insert IMM64 into bundle\r | |
210 | //\r | |
211 | INS_IMM64(FixupVal,\r | |
212 | ((UINT32 *)Fixup + IMM64_IMM7B_INST_WORD_X),\r | |
213 | IMM64_IMM7B_SIZE_X,\r | |
214 | IMM64_IMM7B_INST_WORD_POS_X,\r | |
215 | IMM64_IMM7B_VAL_POS_X\r | |
216 | );\r | |
217 | \r | |
218 | INS_IMM64(FixupVal,\r | |
219 | ((UINT32 *)Fixup + IMM64_IMM9D_INST_WORD_X),\r | |
220 | IMM64_IMM9D_SIZE_X,\r | |
221 | IMM64_IMM9D_INST_WORD_POS_X,\r | |
222 | IMM64_IMM9D_VAL_POS_X\r | |
223 | );\r | |
224 | \r | |
225 | INS_IMM64(FixupVal,\r | |
226 | ((UINT32 *)Fixup + IMM64_IMM5C_INST_WORD_X),\r | |
227 | IMM64_IMM5C_SIZE_X,\r | |
228 | IMM64_IMM5C_INST_WORD_POS_X,\r | |
229 | IMM64_IMM5C_VAL_POS_X\r | |
230 | );\r | |
231 | \r | |
232 | INS_IMM64(FixupVal,\r | |
233 | ((UINT32 *)Fixup + IMM64_IC_INST_WORD_X),\r | |
234 | IMM64_IC_SIZE_X,\r | |
235 | IMM64_IC_INST_WORD_POS_X,\r | |
236 | IMM64_IC_VAL_POS_X\r | |
237 | );\r | |
238 | \r | |
239 | INS_IMM64(FixupVal,\r | |
240 | ((UINT32 *)Fixup + IMM64_IMM41a_INST_WORD_X),\r | |
241 | IMM64_IMM41a_SIZE_X,\r | |
242 | IMM64_IMM41a_INST_WORD_POS_X,\r | |
243 | IMM64_IMM41a_VAL_POS_X\r | |
244 | );\r | |
245 | \r | |
246 | INS_IMM64(FixupVal,\r | |
247 | ((UINT32 *)Fixup + IMM64_IMM41b_INST_WORD_X),\r | |
248 | IMM64_IMM41b_SIZE_X,\r | |
249 | IMM64_IMM41b_INST_WORD_POS_X,\r | |
250 | IMM64_IMM41b_VAL_POS_X\r | |
251 | );\r | |
252 | \r | |
253 | INS_IMM64(FixupVal,\r | |
254 | ((UINT32 *)Fixup + IMM64_IMM41c_INST_WORD_X),\r | |
255 | IMM64_IMM41c_SIZE_X,\r | |
256 | IMM64_IMM41c_INST_WORD_POS_X,\r | |
257 | IMM64_IMM41c_VAL_POS_X\r | |
258 | );\r | |
259 | \r | |
260 | INS_IMM64(FixupVal,\r | |
261 | ((UINT32 *)Fixup + IMM64_SIGN_INST_WORD_X),\r | |
262 | IMM64_SIGN_SIZE_X,\r | |
263 | IMM64_SIGN_INST_WORD_POS_X,\r | |
264 | IMM64_SIGN_VAL_POS_X\r | |
265 | );\r | |
266 | \r | |
267 | F64 = (UINT64 *) Fixup;\r | |
268 | if (*FixupData != NULL) {\r | |
269 | *FixupData = ALIGN_POINTER(*FixupData, sizeof(UINT64));\r | |
270 | *(UINT64 *)(*FixupData) = *F64;\r | |
271 | *FixupData = *FixupData + sizeof(UINT64);\r | |
272 | }\r | |
273 | break;\r | |
274 | \r | |
275 | default:\r | |
276 | return RETURN_UNSUPPORTED;\r | |
277 | }\r | |
278 | \r | |
279 | return RETURN_SUCCESS;\r | |
280 | }\r | |
281 | \r | |
282 | RETURN_STATUS\r | |
283 | PeCoffLoaderRelocateX64Image (\r | |
284 | IN UINT16 *Reloc,\r | |
285 | IN OUT CHAR8 *Fixup, \r | |
286 | IN OUT CHAR8 **FixupData,\r | |
287 | IN UINT64 Adjust\r | |
288 | )\r | |
289 | /**\r | |
290 | Performs an x64 specific relocation fixup\r | |
291 | \r | |
292 | @param Reloc Pointer to the relocation record\r | |
293 | @param Fixup Pointer to the address to fix up\r | |
294 | @param FixupData Pointer to a buffer to log the fixups\r | |
295 | @param Adjust The offset to adjust the fixup\r | |
296 | \r | |
297 | @retval RETURN_SUCCESS Success to perform relocation\r | |
298 | @retval RETURN_UNSUPPORTED Unsupported.\r | |
299 | **/\r | |
300 | {\r | |
301 | UINT64 *F64;\r | |
302 | \r | |
303 | switch ((*Reloc) >> 12) {\r | |
304 | \r | |
305 | case EFI_IMAGE_REL_BASED_DIR64:\r | |
306 | F64 = (UINT64 *) Fixup;\r | |
307 | *F64 = *F64 + (UINT64) Adjust;\r | |
308 | if (*FixupData != NULL) {\r | |
309 | *FixupData = ALIGN_POINTER(*FixupData, sizeof(UINT64));\r | |
310 | *(UINT64 *)(*FixupData) = *F64;\r | |
311 | *FixupData = *FixupData + sizeof(UINT64);\r | |
312 | }\r | |
313 | break;\r | |
314 | \r | |
315 | default:\r | |
316 | return RETURN_UNSUPPORTED;\r | |
317 | }\r | |
318 | \r | |
319 | return RETURN_SUCCESS;\r | |
320 | }\r | |
321 | \r | |
40d841f6 LG |
322 | /**\r |
323 | Pass in a pointer to an ARM MOVT or MOVW immediate instruciton and \r | |
324 | return the immediate data encoded in the instruction\r | |
325 | \r | |
326 | @param Instruction Pointer to ARM MOVT or MOVW immediate instruction\r | |
327 | \r | |
328 | @return Immediate address encoded in the instruction\r | |
329 | \r | |
330 | **/\r | |
331 | UINT16\r | |
332 | ThumbMovtImmediateAddress (\r | |
333 | IN UINT16 *Instruction\r | |
334 | )\r | |
335 | {\r | |
336 | UINT32 Movt;\r | |
337 | UINT16 Address;\r | |
338 | \r | |
339 | // Thumb2 is two 16-bit instructions working together. Not a single 32-bit instruction\r | |
340 | // Example MOVT R0, #0 is 0x0000f2c0 or 0xf2c0 0x0000\r | |
341 | Movt = (*Instruction << 16) | (*(Instruction + 1)); \r | |
342 | \r | |
343 | // imm16 = imm4:i:imm3:imm8\r | |
344 | // imm4 -> Bit19:Bit16\r | |
345 | // i -> Bit26\r | |
346 | // imm3 -> Bit14:Bit12\r | |
347 | // imm8 -> Bit7:Bit0\r | |
348 | Address = (UINT16)(Movt & 0x000000ff); // imm8\r | |
349 | Address |= (UINT16)((Movt >> 4) & 0x0000f700); // imm4 imm3\r | |
350 | Address |= (((Movt & BIT26) != 0) ? BIT11 : 0); // i\r | |
351 | return Address;\r | |
352 | }\r | |
353 | \r | |
354 | \r | |
355 | /**\r | |
356 | Update an ARM MOVT or MOVW immediate instruction immediate data.\r | |
357 | \r | |
358 | @param Instruction Pointer to ARM MOVT or MOVW immediate instruction\r | |
359 | @param Address New addres to patch into the instruction\r | |
360 | **/\r | |
361 | VOID\r | |
362 | ThumbMovtImmediatePatch (\r | |
363 | IN OUT UINT16 *Instruction,\r | |
364 | IN UINT16 Address\r | |
365 | )\r | |
366 | {\r | |
367 | UINT16 Patch;\r | |
368 | \r | |
369 | // First 16-bit chunk of instruciton\r | |
370 | Patch = ((Address >> 12) & 0x000f); // imm4 \r | |
371 | Patch |= (((Address & BIT11) != 0) ? BIT10 : 0); // i\r | |
372 | *Instruction = (*Instruction & ~0x040f) | Patch;\r | |
373 | \r | |
374 | // Second 16-bit chunk of instruction\r | |
375 | Patch = Address & 0x000000ff; // imm8\r | |
376 | Patch |= ((Address << 4) & 0x00007000); // imm3\r | |
377 | Instruction++;\r | |
378 | *Instruction = (*Instruction & ~0x70ff) | Patch;\r | |
379 | }\r | |
380 | \r | |
da92f276 LG |
381 | /**\r |
382 | Pass in a pointer to an ARM MOVW/MOVT instruciton pair and \r | |
383 | return the immediate data encoded in the two` instruction\r | |
384 | \r | |
385 | @param Instructions Pointer to ARM MOVW/MOVT insturction pair\r | |
386 | \r | |
387 | @return Immediate address encoded in the instructions\r | |
388 | \r | |
389 | **/\r | |
390 | UINT32\r | |
391 | EFIAPI\r | |
392 | ThumbMovwMovtImmediateAddress (\r | |
393 | IN UINT16 *Instructions\r | |
394 | )\r | |
395 | {\r | |
396 | UINT16 *Word;\r | |
397 | UINT16 *Top;\r | |
398 | \r | |
399 | Word = Instructions; // MOVW\r | |
400 | Top = Word + 2; // MOVT\r | |
401 | \r | |
402 | return (ThumbMovtImmediateAddress (Top) << 16) + ThumbMovtImmediateAddress (Word);\r | |
403 | }\r | |
404 | \r | |
405 | \r | |
406 | /**\r | |
407 | Update an ARM MOVW/MOVT immediate instruction instruction pair.\r | |
408 | \r | |
409 | @param Instructions Pointer to ARM MOVW/MOVT instruction pair\r | |
410 | @param Address New addres to patch into the instructions\r | |
411 | **/\r | |
412 | VOID\r | |
413 | EFIAPI\r | |
414 | ThumbMovwMovtImmediatePatch (\r | |
415 | IN OUT UINT16 *Instructions,\r | |
416 | IN UINT32 Address\r | |
417 | )\r | |
418 | {\r | |
419 | UINT16 *Word;\r | |
420 | UINT16 *Top;\r | |
421 | \r | |
422 | Word = (UINT16 *)Instructions; // MOVW\r | |
423 | Top = Word + 2; // MOVT\r | |
424 | \r | |
425 | ThumbMovtImmediatePatch (Word, (UINT16)(Address & 0xffff));\r | |
426 | ThumbMovtImmediatePatch (Top, (UINT16)(Address >> 16));\r | |
427 | }\r | |
428 | \r | |
429 | \r | |
40d841f6 LG |
430 | /**\r |
431 | Performs an ARM-based specific relocation fixup and is a no-op on other\r | |
432 | instruction sets.\r | |
433 | \r | |
434 | @param Reloc Pointer to the relocation record.\r | |
435 | @param Fixup Pointer to the address to fix up.\r | |
436 | @param FixupData Pointer to a buffer to log the fixups.\r | |
437 | @param Adjust The offset to adjust the fixup.\r | |
438 | \r | |
439 | @return Status code.\r | |
440 | \r | |
441 | **/\r | |
442 | RETURN_STATUS\r | |
443 | PeCoffLoaderRelocateArmImage (\r | |
444 | IN UINT16 **Reloc,\r | |
445 | IN OUT CHAR8 *Fixup,\r | |
446 | IN OUT CHAR8 **FixupData,\r | |
447 | IN UINT64 Adjust\r | |
448 | )\r | |
449 | {\r | |
450 | UINT16 *Fixup16;\r | |
da92f276 | 451 | UINT32 FixupVal;\r |
40d841f6 | 452 | \r |
da92f276 | 453 | Fixup16 = (UINT16 *) Fixup;\r |
40d841f6 LG |
454 | \r |
455 | switch ((**Reloc) >> 12) {\r | |
da92f276 LG |
456 | \r |
457 | case EFI_IMAGE_REL_BASED_ARM_MOV32T:\r | |
458 | FixupVal = ThumbMovwMovtImmediateAddress (Fixup16) + (UINT32)Adjust;\r | |
459 | ThumbMovwMovtImmediatePatch (Fixup16, FixupVal);\r | |
460 | \r | |
461 | \r | |
40d841f6 | 462 | if (*FixupData != NULL) {\r |
da92f276 LG |
463 | *FixupData = ALIGN_POINTER(*FixupData, sizeof(UINT64));\r |
464 | *(UINT64 *)(*FixupData) = *Fixup16;\r | |
465 | CopyMem (*FixupData, Fixup16, sizeof (UINT64));\r | |
40d841f6 LG |
466 | }\r |
467 | break;\r | |
468 | \r | |
da92f276 LG |
469 | case EFI_IMAGE_REL_BASED_ARM_MOV32A:\r |
470 | // break omitted - ARM instruction encoding not implemented\r | |
40d841f6 LG |
471 | default:\r |
472 | return RETURN_UNSUPPORTED;\r | |
473 | }\r | |
474 | \r | |
475 | return RETURN_SUCCESS;\r | |
476 | }\r | |
4afd3d04 LG |
477 | \r |
478 | RETURN_STATUS\r | |
479 | PeCoffLoaderRelocateAArch64Image (\r | |
480 | IN UINT16 *Reloc,\r | |
481 | IN OUT CHAR8 *Fixup,\r | |
482 | IN OUT CHAR8 **FixupData,\r | |
483 | IN UINT64 Adjust\r | |
484 | )\r | |
485 | /**\r | |
486 | Performs an AArch64 specific relocation fixup\r | |
487 | \r | |
488 | @param Reloc Pointer to the relocation record\r | |
489 | @param Fixup Pointer to the address to fix up\r | |
490 | @param FixupData Pointer to a buffer to log the fixups\r | |
491 | @param Adjust The offset to adjust the fixup\r | |
492 | \r | |
493 | @retval RETURN_SUCCESS Success to perform relocation\r | |
494 | @retval RETURN_UNSUPPORTED Unsupported.\r | |
495 | **/\r | |
496 | {\r | |
497 | UINT64 *F64;\r | |
498 | \r | |
499 | switch ((*Reloc) >> 12) {\r | |
500 | \r | |
501 | case EFI_IMAGE_REL_BASED_DIR64:\r | |
502 | F64 = (UINT64 *) Fixup;\r | |
503 | *F64 = *F64 + (UINT64) Adjust;\r | |
504 | if (*FixupData != NULL) {\r | |
505 | *FixupData = ALIGN_POINTER(*FixupData, sizeof(UINT64));\r | |
506 | *(UINT64 *)(*FixupData) = *F64;\r | |
507 | *FixupData = *FixupData + sizeof(UINT64);\r | |
508 | }\r | |
509 | break;\r | |
510 | \r | |
511 | default:\r | |
512 | return RETURN_UNSUPPORTED;\r | |
513 | }\r | |
514 | \r | |
515 | return RETURN_SUCCESS;\r | |
516 | }\r |