3 Copyright (c) 2004 - 2008, Intel Corporation. All rights reserved.<BR>
4 Portions Copyright (c) 2011 - 2013, ARM Ltd. All rights reserved.<BR>
5 This program and the accompanying materials
6 are licensed and made available under the terms and conditions of the BSD License
7 which accompanies this distribution. The full text of the license may be found at
8 http://opensource.org/licenses/bsd-license.php
10 THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
11 WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
19 IA32, X64, IPF, ARM and AArch64 Specific relocation fixups
25 #include <Common/UefiBaseTypes.h>
26 #include <IndustryStandard/PeImage.h>
27 #include "PeCoffLib.h"
28 #include "CommonLib.h"
29 #include "EfiUtilityMsgs.h"
32 #define EXT_IMM64(Value, Address, Size, InstPos, ValPos) \
33 Value |= (((UINT64)((*(Address) >> InstPos) & (((UINT64)1 << Size) - 1))) << ValPos)
35 #define INS_IMM64(Value, Address, Size, InstPos, ValPos) \
36 *(UINT32*)Address = (*(UINT32*)Address & ~(((1 << Size) - 1) << InstPos)) | \
37 ((UINT32)((((UINT64)Value >> ValPos) & (((UINT64)1 << Size) - 1))) << InstPos)
39 #define IMM64_IMM7B_INST_WORD_X 3
40 #define IMM64_IMM7B_SIZE_X 7
41 #define IMM64_IMM7B_INST_WORD_POS_X 4
42 #define IMM64_IMM7B_VAL_POS_X 0
44 #define IMM64_IMM9D_INST_WORD_X 3
45 #define IMM64_IMM9D_SIZE_X 9
46 #define IMM64_IMM9D_INST_WORD_POS_X 18
47 #define IMM64_IMM9D_VAL_POS_X 7
49 #define IMM64_IMM5C_INST_WORD_X 3
50 #define IMM64_IMM5C_SIZE_X 5
51 #define IMM64_IMM5C_INST_WORD_POS_X 13
52 #define IMM64_IMM5C_VAL_POS_X 16
54 #define IMM64_IC_INST_WORD_X 3
55 #define IMM64_IC_SIZE_X 1
56 #define IMM64_IC_INST_WORD_POS_X 12
57 #define IMM64_IC_VAL_POS_X 21
59 #define IMM64_IMM41a_INST_WORD_X 1
60 #define IMM64_IMM41a_SIZE_X 10
61 #define IMM64_IMM41a_INST_WORD_POS_X 14
62 #define IMM64_IMM41a_VAL_POS_X 22
64 #define IMM64_IMM41b_INST_WORD_X 1
65 #define IMM64_IMM41b_SIZE_X 8
66 #define IMM64_IMM41b_INST_WORD_POS_X 24
67 #define IMM64_IMM41b_VAL_POS_X 32
69 #define IMM64_IMM41c_INST_WORD_X 2
70 #define IMM64_IMM41c_SIZE_X 23
71 #define IMM64_IMM41c_INST_WORD_POS_X 0
72 #define IMM64_IMM41c_VAL_POS_X 40
74 #define IMM64_SIGN_INST_WORD_X 3
75 #define IMM64_SIGN_SIZE_X 1
76 #define IMM64_SIGN_INST_WORD_POS_X 27
77 #define IMM64_SIGN_VAL_POS_X 63
80 PeCoffLoaderRelocateIa32Image (
83 IN OUT CHAR8
**FixupData
,
90 Performs an IA-32 specific relocation fixup
94 Reloc - Pointer to the relocation record
96 Fixup - Pointer to the address to fix up
98 FixupData - Pointer to a buffer to log the fixups
100 Adjust - The offset to adjust the fixup
104 EFI_UNSUPPORTED - Unsupported now
108 return RETURN_UNSUPPORTED
;
112 PeCoffLoaderRelocateIpfImage (
115 IN OUT CHAR8
**FixupData
,
122 Performs an Itanium-based specific relocation fixup
126 Reloc - Pointer to the relocation record
128 Fixup - Pointer to the address to fix up
130 FixupData - Pointer to a buffer to log the fixups
132 Adjust - The offset to adjust the fixup
143 switch ((*Reloc
) >> 12) {
145 case EFI_IMAGE_REL_BASED_DIR64
:
146 F64
= (UINT64
*) Fixup
;
147 *F64
= *F64
+ (UINT64
) Adjust
;
148 if (*FixupData
!= NULL
) {
149 *FixupData
= ALIGN_POINTER(*FixupData
, sizeof(UINT64
));
150 *(UINT64
*)(*FixupData
) = *F64
;
151 *FixupData
= *FixupData
+ sizeof(UINT64
);
155 case EFI_IMAGE_REL_BASED_IA64_IMM64
:
158 // Align it to bundle address before fixing up the
159 // 64-bit immediate value of the movl instruction.
162 Fixup
= (CHAR8
*)((UINTN
) Fixup
& (UINTN
) ~(15));
163 FixupVal
= (UINT64
)0;
166 // Extract the lower 32 bits of IMM64 from bundle
169 (UINT32
*)Fixup
+ IMM64_IMM7B_INST_WORD_X
,
171 IMM64_IMM7B_INST_WORD_POS_X
,
172 IMM64_IMM7B_VAL_POS_X
176 (UINT32
*)Fixup
+ IMM64_IMM9D_INST_WORD_X
,
178 IMM64_IMM9D_INST_WORD_POS_X
,
179 IMM64_IMM9D_VAL_POS_X
183 (UINT32
*)Fixup
+ IMM64_IMM5C_INST_WORD_X
,
185 IMM64_IMM5C_INST_WORD_POS_X
,
186 IMM64_IMM5C_VAL_POS_X
190 (UINT32
*)Fixup
+ IMM64_IC_INST_WORD_X
,
192 IMM64_IC_INST_WORD_POS_X
,
197 (UINT32
*)Fixup
+ IMM64_IMM41a_INST_WORD_X
,
199 IMM64_IMM41a_INST_WORD_POS_X
,
200 IMM64_IMM41a_VAL_POS_X
204 // Update 64-bit address
209 // Insert IMM64 into bundle
212 ((UINT32
*)Fixup
+ IMM64_IMM7B_INST_WORD_X
),
214 IMM64_IMM7B_INST_WORD_POS_X
,
215 IMM64_IMM7B_VAL_POS_X
219 ((UINT32
*)Fixup
+ IMM64_IMM9D_INST_WORD_X
),
221 IMM64_IMM9D_INST_WORD_POS_X
,
222 IMM64_IMM9D_VAL_POS_X
226 ((UINT32
*)Fixup
+ IMM64_IMM5C_INST_WORD_X
),
228 IMM64_IMM5C_INST_WORD_POS_X
,
229 IMM64_IMM5C_VAL_POS_X
233 ((UINT32
*)Fixup
+ IMM64_IC_INST_WORD_X
),
235 IMM64_IC_INST_WORD_POS_X
,
240 ((UINT32
*)Fixup
+ IMM64_IMM41a_INST_WORD_X
),
242 IMM64_IMM41a_INST_WORD_POS_X
,
243 IMM64_IMM41a_VAL_POS_X
247 ((UINT32
*)Fixup
+ IMM64_IMM41b_INST_WORD_X
),
249 IMM64_IMM41b_INST_WORD_POS_X
,
250 IMM64_IMM41b_VAL_POS_X
254 ((UINT32
*)Fixup
+ IMM64_IMM41c_INST_WORD_X
),
256 IMM64_IMM41c_INST_WORD_POS_X
,
257 IMM64_IMM41c_VAL_POS_X
261 ((UINT32
*)Fixup
+ IMM64_SIGN_INST_WORD_X
),
263 IMM64_SIGN_INST_WORD_POS_X
,
267 F64
= (UINT64
*) Fixup
;
268 if (*FixupData
!= NULL
) {
269 *FixupData
= ALIGN_POINTER(*FixupData
, sizeof(UINT64
));
270 *(UINT64
*)(*FixupData
) = *F64
;
271 *FixupData
= *FixupData
+ sizeof(UINT64
);
276 return RETURN_UNSUPPORTED
;
279 return RETURN_SUCCESS
;
283 PeCoffLoaderRelocateX64Image (
286 IN OUT CHAR8
**FixupData
,
290 Performs an x64 specific relocation fixup
292 @param Reloc Pointer to the relocation record
293 @param Fixup Pointer to the address to fix up
294 @param FixupData Pointer to a buffer to log the fixups
295 @param Adjust The offset to adjust the fixup
297 @retval RETURN_SUCCESS Success to perform relocation
298 @retval RETURN_UNSUPPORTED Unsupported.
303 switch ((*Reloc
) >> 12) {
305 case EFI_IMAGE_REL_BASED_DIR64
:
306 F64
= (UINT64
*) Fixup
;
307 *F64
= *F64
+ (UINT64
) Adjust
;
308 if (*FixupData
!= NULL
) {
309 *FixupData
= ALIGN_POINTER(*FixupData
, sizeof(UINT64
));
310 *(UINT64
*)(*FixupData
) = *F64
;
311 *FixupData
= *FixupData
+ sizeof(UINT64
);
316 return RETURN_UNSUPPORTED
;
319 return RETURN_SUCCESS
;
323 Pass in a pointer to an ARM MOVT or MOVW immediate instruciton and
324 return the immediate data encoded in the instruction
326 @param Instruction Pointer to ARM MOVT or MOVW immediate instruction
328 @return Immediate address encoded in the instruction
332 ThumbMovtImmediateAddress (
333 IN UINT16
*Instruction
339 // Thumb2 is two 16-bit instructions working together. Not a single 32-bit instruction
340 // Example MOVT R0, #0 is 0x0000f2c0 or 0xf2c0 0x0000
341 Movt
= (*Instruction
<< 16) | (*(Instruction
+ 1));
343 // imm16 = imm4:i:imm3:imm8
344 // imm4 -> Bit19:Bit16
346 // imm3 -> Bit14:Bit12
348 Address
= (UINT16
)(Movt
& 0x000000ff); // imm8
349 Address
|= (UINT16
)((Movt
>> 4) & 0x0000f700); // imm4 imm3
350 Address
|= (((Movt
& BIT26
) != 0) ? BIT11
: 0); // i
356 Update an ARM MOVT or MOVW immediate instruction immediate data.
358 @param Instruction Pointer to ARM MOVT or MOVW immediate instruction
359 @param Address New addres to patch into the instruction
362 ThumbMovtImmediatePatch (
363 IN OUT UINT16
*Instruction
,
369 // First 16-bit chunk of instruciton
370 Patch
= ((Address
>> 12) & 0x000f); // imm4
371 Patch
|= (((Address
& BIT11
) != 0) ? BIT10
: 0); // i
372 *Instruction
= (*Instruction
& ~0x040f) | Patch
;
374 // Second 16-bit chunk of instruction
375 Patch
= Address
& 0x000000ff; // imm8
376 Patch
|= ((Address
<< 4) & 0x00007000); // imm3
378 *Instruction
= (*Instruction
& ~0x70ff) | Patch
;
382 Pass in a pointer to an ARM MOVW/MOVT instruciton pair and
383 return the immediate data encoded in the two` instruction
385 @param Instructions Pointer to ARM MOVW/MOVT insturction pair
387 @return Immediate address encoded in the instructions
392 ThumbMovwMovtImmediateAddress (
393 IN UINT16
*Instructions
399 Word
= Instructions
; // MOVW
400 Top
= Word
+ 2; // MOVT
402 return (ThumbMovtImmediateAddress (Top
) << 16) + ThumbMovtImmediateAddress (Word
);
407 Update an ARM MOVW/MOVT immediate instruction instruction pair.
409 @param Instructions Pointer to ARM MOVW/MOVT instruction pair
410 @param Address New addres to patch into the instructions
414 ThumbMovwMovtImmediatePatch (
415 IN OUT UINT16
*Instructions
,
422 Word
= (UINT16
*)Instructions
; // MOVW
423 Top
= Word
+ 2; // MOVT
425 ThumbMovtImmediatePatch (Word
, (UINT16
)(Address
& 0xffff));
426 ThumbMovtImmediatePatch (Top
, (UINT16
)(Address
>> 16));
431 Performs an ARM-based specific relocation fixup and is a no-op on other
434 @param Reloc Pointer to the relocation record.
435 @param Fixup Pointer to the address to fix up.
436 @param FixupData Pointer to a buffer to log the fixups.
437 @param Adjust The offset to adjust the fixup.
443 PeCoffLoaderRelocateArmImage (
446 IN OUT CHAR8
**FixupData
,
453 Fixup16
= (UINT16
*) Fixup
;
455 switch ((**Reloc
) >> 12) {
457 case EFI_IMAGE_REL_BASED_ARM_MOV32T
:
458 FixupVal
= ThumbMovwMovtImmediateAddress (Fixup16
) + (UINT32
)Adjust
;
459 ThumbMovwMovtImmediatePatch (Fixup16
, FixupVal
);
462 if (*FixupData
!= NULL
) {
463 *FixupData
= ALIGN_POINTER(*FixupData
, sizeof(UINT64
));
464 *(UINT64
*)(*FixupData
) = *Fixup16
;
465 CopyMem (*FixupData
, Fixup16
, sizeof (UINT64
));
469 case EFI_IMAGE_REL_BASED_ARM_MOV32A
:
470 // break omitted - ARM instruction encoding not implemented
472 return RETURN_UNSUPPORTED
;
475 return RETURN_SUCCESS
;
479 PeCoffLoaderRelocateAArch64Image (
482 IN OUT CHAR8
**FixupData
,
486 Performs an AArch64 specific relocation fixup
488 @param Reloc Pointer to the relocation record
489 @param Fixup Pointer to the address to fix up
490 @param FixupData Pointer to a buffer to log the fixups
491 @param Adjust The offset to adjust the fixup
493 @retval RETURN_SUCCESS Success to perform relocation
494 @retval RETURN_UNSUPPORTED Unsupported.
499 switch ((*Reloc
) >> 12) {
501 case EFI_IMAGE_REL_BASED_DIR64
:
502 F64
= (UINT64
*) Fixup
;
503 *F64
= *F64
+ (UINT64
) Adjust
;
504 if (*FixupData
!= NULL
) {
505 *FixupData
= ALIGN_POINTER(*FixupData
, sizeof(UINT64
));
506 *(UINT64
*)(*FixupData
) = *F64
;
507 *FixupData
= *FixupData
+ sizeof(UINT64
);
512 return RETURN_UNSUPPORTED
;
515 return RETURN_SUCCESS
;