Commit | Line | Data |
---|---|---|
30fdf114 | 1 | /** @file\r |
97fa0ee9 | 2 | IA32, X64 and IPF Specific relocation fixups\r |
30fdf114 | 3 | \r |
97fa0ee9 | 4 | Copyright (c) 2004 - 2014, Intel Corporation. All rights reserved.<BR>\r |
4afd3d04 | 5 | Portions Copyright (c) 2011 - 2013, ARM Ltd. All rights reserved.<BR>\r |
40d841f6 | 6 | This program and the accompanying materials \r |
30fdf114 LG |
7 | are licensed and made available under the terms and conditions of the BSD License \r |
8 | which accompanies this distribution. The full text of the license may be found at \r | |
9 | http://opensource.org/licenses/bsd-license.php \r | |
10 | \r | |
11 | THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS, \r | |
12 | WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED. \r | |
13 | \r | |
30fdf114 LG |
14 | --*/\r |
15 | \r | |
16 | #include <Common/UefiBaseTypes.h>\r | |
17 | #include <IndustryStandard/PeImage.h>\r | |
18 | #include "PeCoffLib.h"\r | |
da92f276 | 19 | #include "CommonLib.h"\r |
4afd3d04 | 20 | #include "EfiUtilityMsgs.h"\r |
da92f276 | 21 | \r |
30fdf114 LG |
22 | \r |
23 | #define EXT_IMM64(Value, Address, Size, InstPos, ValPos) \\r | |
24 | Value |= (((UINT64)((*(Address) >> InstPos) & (((UINT64)1 << Size) - 1))) << ValPos)\r | |
25 | \r | |
26 | #define INS_IMM64(Value, Address, Size, InstPos, ValPos) \\r | |
27 | *(UINT32*)Address = (*(UINT32*)Address & ~(((1 << Size) - 1) << InstPos)) | \\r | |
28 | ((UINT32)((((UINT64)Value >> ValPos) & (((UINT64)1 << Size) - 1))) << InstPos)\r | |
29 | \r | |
30 | #define IMM64_IMM7B_INST_WORD_X 3 \r | |
31 | #define IMM64_IMM7B_SIZE_X 7 \r | |
32 | #define IMM64_IMM7B_INST_WORD_POS_X 4 \r | |
33 | #define IMM64_IMM7B_VAL_POS_X 0 \r | |
34 | \r | |
35 | #define IMM64_IMM9D_INST_WORD_X 3 \r | |
36 | #define IMM64_IMM9D_SIZE_X 9 \r | |
37 | #define IMM64_IMM9D_INST_WORD_POS_X 18 \r | |
38 | #define IMM64_IMM9D_VAL_POS_X 7 \r | |
39 | \r | |
40 | #define IMM64_IMM5C_INST_WORD_X 3 \r | |
41 | #define IMM64_IMM5C_SIZE_X 5 \r | |
42 | #define IMM64_IMM5C_INST_WORD_POS_X 13 \r | |
43 | #define IMM64_IMM5C_VAL_POS_X 16 \r | |
44 | \r | |
45 | #define IMM64_IC_INST_WORD_X 3 \r | |
46 | #define IMM64_IC_SIZE_X 1 \r | |
47 | #define IMM64_IC_INST_WORD_POS_X 12 \r | |
48 | #define IMM64_IC_VAL_POS_X 21 \r | |
49 | \r | |
50 | #define IMM64_IMM41a_INST_WORD_X 1 \r | |
51 | #define IMM64_IMM41a_SIZE_X 10 \r | |
52 | #define IMM64_IMM41a_INST_WORD_POS_X 14 \r | |
53 | #define IMM64_IMM41a_VAL_POS_X 22 \r | |
54 | \r | |
55 | #define IMM64_IMM41b_INST_WORD_X 1 \r | |
56 | #define IMM64_IMM41b_SIZE_X 8 \r | |
57 | #define IMM64_IMM41b_INST_WORD_POS_X 24 \r | |
58 | #define IMM64_IMM41b_VAL_POS_X 32 \r | |
59 | \r | |
60 | #define IMM64_IMM41c_INST_WORD_X 2 \r | |
61 | #define IMM64_IMM41c_SIZE_X 23 \r | |
62 | #define IMM64_IMM41c_INST_WORD_POS_X 0 \r | |
63 | #define IMM64_IMM41c_VAL_POS_X 40 \r | |
64 | \r | |
65 | #define IMM64_SIGN_INST_WORD_X 3 \r | |
66 | #define IMM64_SIGN_SIZE_X 1 \r | |
67 | #define IMM64_SIGN_INST_WORD_POS_X 27 \r | |
68 | #define IMM64_SIGN_VAL_POS_X 63 \r | |
69 | \r | |
70 | RETURN_STATUS\r | |
71 | PeCoffLoaderRelocateIa32Image (\r | |
72 | IN UINT16 *Reloc,\r | |
73 | IN OUT CHAR8 *Fixup,\r | |
74 | IN OUT CHAR8 **FixupData,\r | |
75 | IN UINT64 Adjust\r | |
76 | )\r | |
77 | /*++\r | |
78 | \r | |
79 | Routine Description:\r | |
80 | \r | |
81 | Performs an IA-32 specific relocation fixup\r | |
82 | \r | |
83 | Arguments:\r | |
84 | \r | |
85 | Reloc - Pointer to the relocation record\r | |
86 | \r | |
87 | Fixup - Pointer to the address to fix up\r | |
88 | \r | |
89 | FixupData - Pointer to a buffer to log the fixups\r | |
90 | \r | |
91 | Adjust - The offset to adjust the fixup\r | |
92 | \r | |
93 | Returns:\r | |
94 | \r | |
95 | EFI_UNSUPPORTED - Unsupported now\r | |
96 | \r | |
97 | --*/\r | |
98 | {\r | |
99 | return RETURN_UNSUPPORTED;\r | |
100 | }\r | |
101 | \r | |
102 | RETURN_STATUS\r | |
103 | PeCoffLoaderRelocateIpfImage (\r | |
104 | IN UINT16 *Reloc,\r | |
105 | IN OUT CHAR8 *Fixup, \r | |
106 | IN OUT CHAR8 **FixupData,\r | |
107 | IN UINT64 Adjust\r | |
108 | )\r | |
109 | /*++\r | |
110 | \r | |
111 | Routine Description:\r | |
112 | \r | |
113 | Performs an Itanium-based specific relocation fixup\r | |
114 | \r | |
115 | Arguments:\r | |
116 | \r | |
117 | Reloc - Pointer to the relocation record\r | |
118 | \r | |
119 | Fixup - Pointer to the address to fix up\r | |
120 | \r | |
121 | FixupData - Pointer to a buffer to log the fixups\r | |
122 | \r | |
123 | Adjust - The offset to adjust the fixup\r | |
124 | \r | |
125 | Returns:\r | |
126 | \r | |
127 | Status code\r | |
128 | \r | |
129 | --*/\r | |
130 | {\r | |
131 | UINT64 *F64;\r | |
132 | UINT64 FixupVal;\r | |
133 | \r | |
134 | switch ((*Reloc) >> 12) {\r | |
135 | \r | |
136 | case EFI_IMAGE_REL_BASED_DIR64:\r | |
137 | F64 = (UINT64 *) Fixup;\r | |
138 | *F64 = *F64 + (UINT64) Adjust;\r | |
139 | if (*FixupData != NULL) {\r | |
140 | *FixupData = ALIGN_POINTER(*FixupData, sizeof(UINT64));\r | |
141 | *(UINT64 *)(*FixupData) = *F64;\r | |
142 | *FixupData = *FixupData + sizeof(UINT64);\r | |
143 | }\r | |
144 | break;\r | |
145 | \r | |
146 | case EFI_IMAGE_REL_BASED_IA64_IMM64:\r | |
147 | \r | |
148 | //\r | |
149 | // Align it to bundle address before fixing up the\r | |
150 | // 64-bit immediate value of the movl instruction.\r | |
151 | //\r | |
152 | \r | |
153 | Fixup = (CHAR8 *)((UINTN) Fixup & (UINTN) ~(15));\r | |
154 | FixupVal = (UINT64)0;\r | |
155 | \r | |
156 | // \r | |
157 | // Extract the lower 32 bits of IMM64 from bundle\r | |
158 | //\r | |
159 | EXT_IMM64(FixupVal,\r | |
160 | (UINT32 *)Fixup + IMM64_IMM7B_INST_WORD_X,\r | |
161 | IMM64_IMM7B_SIZE_X,\r | |
162 | IMM64_IMM7B_INST_WORD_POS_X,\r | |
163 | IMM64_IMM7B_VAL_POS_X\r | |
164 | );\r | |
165 | \r | |
166 | EXT_IMM64(FixupVal,\r | |
167 | (UINT32 *)Fixup + IMM64_IMM9D_INST_WORD_X,\r | |
168 | IMM64_IMM9D_SIZE_X,\r | |
169 | IMM64_IMM9D_INST_WORD_POS_X,\r | |
170 | IMM64_IMM9D_VAL_POS_X\r | |
171 | );\r | |
172 | \r | |
173 | EXT_IMM64(FixupVal,\r | |
174 | (UINT32 *)Fixup + IMM64_IMM5C_INST_WORD_X,\r | |
175 | IMM64_IMM5C_SIZE_X,\r | |
176 | IMM64_IMM5C_INST_WORD_POS_X,\r | |
177 | IMM64_IMM5C_VAL_POS_X\r | |
178 | );\r | |
179 | \r | |
180 | EXT_IMM64(FixupVal,\r | |
181 | (UINT32 *)Fixup + IMM64_IC_INST_WORD_X,\r | |
182 | IMM64_IC_SIZE_X,\r | |
183 | IMM64_IC_INST_WORD_POS_X,\r | |
184 | IMM64_IC_VAL_POS_X\r | |
185 | );\r | |
186 | \r | |
187 | EXT_IMM64(FixupVal,\r | |
188 | (UINT32 *)Fixup + IMM64_IMM41a_INST_WORD_X,\r | |
189 | IMM64_IMM41a_SIZE_X,\r | |
190 | IMM64_IMM41a_INST_WORD_POS_X,\r | |
191 | IMM64_IMM41a_VAL_POS_X\r | |
192 | );\r | |
193 | \r | |
194 | // \r | |
195 | // Update 64-bit address\r | |
196 | //\r | |
197 | FixupVal += Adjust;\r | |
198 | \r | |
199 | // \r | |
200 | // Insert IMM64 into bundle\r | |
201 | //\r | |
202 | INS_IMM64(FixupVal,\r | |
203 | ((UINT32 *)Fixup + IMM64_IMM7B_INST_WORD_X),\r | |
204 | IMM64_IMM7B_SIZE_X,\r | |
205 | IMM64_IMM7B_INST_WORD_POS_X,\r | |
206 | IMM64_IMM7B_VAL_POS_X\r | |
207 | );\r | |
208 | \r | |
209 | INS_IMM64(FixupVal,\r | |
210 | ((UINT32 *)Fixup + IMM64_IMM9D_INST_WORD_X),\r | |
211 | IMM64_IMM9D_SIZE_X,\r | |
212 | IMM64_IMM9D_INST_WORD_POS_X,\r | |
213 | IMM64_IMM9D_VAL_POS_X\r | |
214 | );\r | |
215 | \r | |
216 | INS_IMM64(FixupVal,\r | |
217 | ((UINT32 *)Fixup + IMM64_IMM5C_INST_WORD_X),\r | |
218 | IMM64_IMM5C_SIZE_X,\r | |
219 | IMM64_IMM5C_INST_WORD_POS_X,\r | |
220 | IMM64_IMM5C_VAL_POS_X\r | |
221 | );\r | |
222 | \r | |
223 | INS_IMM64(FixupVal,\r | |
224 | ((UINT32 *)Fixup + IMM64_IC_INST_WORD_X),\r | |
225 | IMM64_IC_SIZE_X,\r | |
226 | IMM64_IC_INST_WORD_POS_X,\r | |
227 | IMM64_IC_VAL_POS_X\r | |
228 | );\r | |
229 | \r | |
230 | INS_IMM64(FixupVal,\r | |
231 | ((UINT32 *)Fixup + IMM64_IMM41a_INST_WORD_X),\r | |
232 | IMM64_IMM41a_SIZE_X,\r | |
233 | IMM64_IMM41a_INST_WORD_POS_X,\r | |
234 | IMM64_IMM41a_VAL_POS_X\r | |
235 | );\r | |
236 | \r | |
237 | INS_IMM64(FixupVal,\r | |
238 | ((UINT32 *)Fixup + IMM64_IMM41b_INST_WORD_X),\r | |
239 | IMM64_IMM41b_SIZE_X,\r | |
240 | IMM64_IMM41b_INST_WORD_POS_X,\r | |
241 | IMM64_IMM41b_VAL_POS_X\r | |
242 | );\r | |
243 | \r | |
244 | INS_IMM64(FixupVal,\r | |
245 | ((UINT32 *)Fixup + IMM64_IMM41c_INST_WORD_X),\r | |
246 | IMM64_IMM41c_SIZE_X,\r | |
247 | IMM64_IMM41c_INST_WORD_POS_X,\r | |
248 | IMM64_IMM41c_VAL_POS_X\r | |
249 | );\r | |
250 | \r | |
251 | INS_IMM64(FixupVal,\r | |
252 | ((UINT32 *)Fixup + IMM64_SIGN_INST_WORD_X),\r | |
253 | IMM64_SIGN_SIZE_X,\r | |
254 | IMM64_SIGN_INST_WORD_POS_X,\r | |
255 | IMM64_SIGN_VAL_POS_X\r | |
256 | );\r | |
257 | \r | |
258 | F64 = (UINT64 *) Fixup;\r | |
259 | if (*FixupData != NULL) {\r | |
260 | *FixupData = ALIGN_POINTER(*FixupData, sizeof(UINT64));\r | |
261 | *(UINT64 *)(*FixupData) = *F64;\r | |
262 | *FixupData = *FixupData + sizeof(UINT64);\r | |
263 | }\r | |
264 | break;\r | |
265 | \r | |
266 | default:\r | |
267 | return RETURN_UNSUPPORTED;\r | |
268 | }\r | |
269 | \r | |
270 | return RETURN_SUCCESS;\r | |
271 | }\r | |
272 | \r | |
273 | RETURN_STATUS\r | |
274 | PeCoffLoaderRelocateX64Image (\r | |
275 | IN UINT16 *Reloc,\r | |
276 | IN OUT CHAR8 *Fixup, \r | |
277 | IN OUT CHAR8 **FixupData,\r | |
278 | IN UINT64 Adjust\r | |
279 | )\r | |
280 | /**\r | |
281 | Performs an x64 specific relocation fixup\r | |
282 | \r | |
283 | @param Reloc Pointer to the relocation record\r | |
284 | @param Fixup Pointer to the address to fix up\r | |
285 | @param FixupData Pointer to a buffer to log the fixups\r | |
286 | @param Adjust The offset to adjust the fixup\r | |
287 | \r | |
288 | @retval RETURN_SUCCESS Success to perform relocation\r | |
289 | @retval RETURN_UNSUPPORTED Unsupported.\r | |
290 | **/\r | |
291 | {\r | |
292 | UINT64 *F64;\r | |
293 | \r | |
294 | switch ((*Reloc) >> 12) {\r | |
295 | \r | |
296 | case EFI_IMAGE_REL_BASED_DIR64:\r | |
297 | F64 = (UINT64 *) Fixup;\r | |
298 | *F64 = *F64 + (UINT64) Adjust;\r | |
299 | if (*FixupData != NULL) {\r | |
300 | *FixupData = ALIGN_POINTER(*FixupData, sizeof(UINT64));\r | |
301 | *(UINT64 *)(*FixupData) = *F64;\r | |
302 | *FixupData = *FixupData + sizeof(UINT64);\r | |
303 | }\r | |
304 | break;\r | |
305 | \r | |
306 | default:\r | |
307 | return RETURN_UNSUPPORTED;\r | |
308 | }\r | |
309 | \r | |
310 | return RETURN_SUCCESS;\r | |
311 | }\r | |
312 | \r | |
40d841f6 LG |
313 | /**\r |
314 | Pass in a pointer to an ARM MOVT or MOVW immediate instruciton and \r | |
315 | return the immediate data encoded in the instruction\r | |
316 | \r | |
317 | @param Instruction Pointer to ARM MOVT or MOVW immediate instruction\r | |
318 | \r | |
319 | @return Immediate address encoded in the instruction\r | |
320 | \r | |
321 | **/\r | |
322 | UINT16\r | |
323 | ThumbMovtImmediateAddress (\r | |
324 | IN UINT16 *Instruction\r | |
325 | )\r | |
326 | {\r | |
327 | UINT32 Movt;\r | |
328 | UINT16 Address;\r | |
329 | \r | |
330 | // Thumb2 is two 16-bit instructions working together. Not a single 32-bit instruction\r | |
331 | // Example MOVT R0, #0 is 0x0000f2c0 or 0xf2c0 0x0000\r | |
332 | Movt = (*Instruction << 16) | (*(Instruction + 1)); \r | |
333 | \r | |
334 | // imm16 = imm4:i:imm3:imm8\r | |
335 | // imm4 -> Bit19:Bit16\r | |
336 | // i -> Bit26\r | |
337 | // imm3 -> Bit14:Bit12\r | |
338 | // imm8 -> Bit7:Bit0\r | |
339 | Address = (UINT16)(Movt & 0x000000ff); // imm8\r | |
340 | Address |= (UINT16)((Movt >> 4) & 0x0000f700); // imm4 imm3\r | |
341 | Address |= (((Movt & BIT26) != 0) ? BIT11 : 0); // i\r | |
342 | return Address;\r | |
343 | }\r | |
344 | \r | |
345 | \r | |
346 | /**\r | |
347 | Update an ARM MOVT or MOVW immediate instruction immediate data.\r | |
348 | \r | |
349 | @param Instruction Pointer to ARM MOVT or MOVW immediate instruction\r | |
350 | @param Address New addres to patch into the instruction\r | |
351 | **/\r | |
352 | VOID\r | |
353 | ThumbMovtImmediatePatch (\r | |
354 | IN OUT UINT16 *Instruction,\r | |
355 | IN UINT16 Address\r | |
356 | )\r | |
357 | {\r | |
358 | UINT16 Patch;\r | |
359 | \r | |
360 | // First 16-bit chunk of instruciton\r | |
361 | Patch = ((Address >> 12) & 0x000f); // imm4 \r | |
362 | Patch |= (((Address & BIT11) != 0) ? BIT10 : 0); // i\r | |
363 | *Instruction = (*Instruction & ~0x040f) | Patch;\r | |
364 | \r | |
365 | // Second 16-bit chunk of instruction\r | |
366 | Patch = Address & 0x000000ff; // imm8\r | |
367 | Patch |= ((Address << 4) & 0x00007000); // imm3\r | |
368 | Instruction++;\r | |
369 | *Instruction = (*Instruction & ~0x70ff) | Patch;\r | |
370 | }\r | |
371 | \r | |
da92f276 LG |
372 | /**\r |
373 | Pass in a pointer to an ARM MOVW/MOVT instruciton pair and \r | |
374 | return the immediate data encoded in the two` instruction\r | |
375 | \r | |
376 | @param Instructions Pointer to ARM MOVW/MOVT insturction pair\r | |
377 | \r | |
378 | @return Immediate address encoded in the instructions\r | |
379 | \r | |
380 | **/\r | |
381 | UINT32\r | |
382 | EFIAPI\r | |
383 | ThumbMovwMovtImmediateAddress (\r | |
384 | IN UINT16 *Instructions\r | |
385 | )\r | |
386 | {\r | |
387 | UINT16 *Word;\r | |
388 | UINT16 *Top;\r | |
389 | \r | |
390 | Word = Instructions; // MOVW\r | |
391 | Top = Word + 2; // MOVT\r | |
392 | \r | |
393 | return (ThumbMovtImmediateAddress (Top) << 16) + ThumbMovtImmediateAddress (Word);\r | |
394 | }\r | |
395 | \r | |
396 | \r | |
397 | /**\r | |
398 | Update an ARM MOVW/MOVT immediate instruction instruction pair.\r | |
399 | \r | |
400 | @param Instructions Pointer to ARM MOVW/MOVT instruction pair\r | |
401 | @param Address New addres to patch into the instructions\r | |
402 | **/\r | |
403 | VOID\r | |
404 | EFIAPI\r | |
405 | ThumbMovwMovtImmediatePatch (\r | |
406 | IN OUT UINT16 *Instructions,\r | |
407 | IN UINT32 Address\r | |
408 | )\r | |
409 | {\r | |
410 | UINT16 *Word;\r | |
411 | UINT16 *Top;\r | |
412 | \r | |
413 | Word = (UINT16 *)Instructions; // MOVW\r | |
414 | Top = Word + 2; // MOVT\r | |
415 | \r | |
416 | ThumbMovtImmediatePatch (Word, (UINT16)(Address & 0xffff));\r | |
417 | ThumbMovtImmediatePatch (Top, (UINT16)(Address >> 16));\r | |
418 | }\r | |
419 | \r | |
420 | \r | |
40d841f6 LG |
421 | /**\r |
422 | Performs an ARM-based specific relocation fixup and is a no-op on other\r | |
423 | instruction sets.\r | |
424 | \r | |
425 | @param Reloc Pointer to the relocation record.\r | |
426 | @param Fixup Pointer to the address to fix up.\r | |
427 | @param FixupData Pointer to a buffer to log the fixups.\r | |
428 | @param Adjust The offset to adjust the fixup.\r | |
429 | \r | |
430 | @return Status code.\r | |
431 | \r | |
432 | **/\r | |
433 | RETURN_STATUS\r | |
434 | PeCoffLoaderRelocateArmImage (\r | |
435 | IN UINT16 **Reloc,\r | |
436 | IN OUT CHAR8 *Fixup,\r | |
437 | IN OUT CHAR8 **FixupData,\r | |
438 | IN UINT64 Adjust\r | |
439 | )\r | |
440 | {\r | |
441 | UINT16 *Fixup16;\r | |
da92f276 | 442 | UINT32 FixupVal;\r |
40d841f6 | 443 | \r |
da92f276 | 444 | Fixup16 = (UINT16 *) Fixup;\r |
40d841f6 LG |
445 | \r |
446 | switch ((**Reloc) >> 12) {\r | |
da92f276 LG |
447 | \r |
448 | case EFI_IMAGE_REL_BASED_ARM_MOV32T:\r | |
449 | FixupVal = ThumbMovwMovtImmediateAddress (Fixup16) + (UINT32)Adjust;\r | |
450 | ThumbMovwMovtImmediatePatch (Fixup16, FixupVal);\r | |
451 | \r | |
452 | \r | |
40d841f6 | 453 | if (*FixupData != NULL) {\r |
da92f276 LG |
454 | *FixupData = ALIGN_POINTER(*FixupData, sizeof(UINT64));\r |
455 | *(UINT64 *)(*FixupData) = *Fixup16;\r | |
456 | CopyMem (*FixupData, Fixup16, sizeof (UINT64));\r | |
40d841f6 LG |
457 | }\r |
458 | break;\r | |
459 | \r | |
da92f276 LG |
460 | case EFI_IMAGE_REL_BASED_ARM_MOV32A:\r |
461 | // break omitted - ARM instruction encoding not implemented\r | |
40d841f6 LG |
462 | default:\r |
463 | return RETURN_UNSUPPORTED;\r | |
464 | }\r | |
465 | \r | |
466 | return RETURN_SUCCESS;\r | |
467 | }\r | |
4afd3d04 LG |
468 | \r |
469 | RETURN_STATUS\r | |
470 | PeCoffLoaderRelocateAArch64Image (\r | |
471 | IN UINT16 *Reloc,\r | |
472 | IN OUT CHAR8 *Fixup,\r | |
473 | IN OUT CHAR8 **FixupData,\r | |
474 | IN UINT64 Adjust\r | |
475 | )\r | |
476 | /**\r | |
477 | Performs an AArch64 specific relocation fixup\r | |
478 | \r | |
479 | @param Reloc Pointer to the relocation record\r | |
480 | @param Fixup Pointer to the address to fix up\r | |
481 | @param FixupData Pointer to a buffer to log the fixups\r | |
482 | @param Adjust The offset to adjust the fixup\r | |
483 | \r | |
484 | @retval RETURN_SUCCESS Success to perform relocation\r | |
485 | @retval RETURN_UNSUPPORTED Unsupported.\r | |
486 | **/\r | |
487 | {\r | |
488 | UINT64 *F64;\r | |
489 | \r | |
490 | switch ((*Reloc) >> 12) {\r | |
491 | \r | |
492 | case EFI_IMAGE_REL_BASED_DIR64:\r | |
493 | F64 = (UINT64 *) Fixup;\r | |
494 | *F64 = *F64 + (UINT64) Adjust;\r | |
495 | if (*FixupData != NULL) {\r | |
496 | *FixupData = ALIGN_POINTER(*FixupData, sizeof(UINT64));\r | |
497 | *(UINT64 *)(*FixupData) = *F64;\r | |
498 | *FixupData = *FixupData + sizeof(UINT64);\r | |
499 | }\r | |
500 | break;\r | |
501 | \r | |
502 | default:\r | |
503 | return RETURN_UNSUPPORTED;\r | |
504 | }\r | |
505 | \r | |
506 | return RETURN_SUCCESS;\r | |
507 | }\r |