/** @file\r
+IA32 and X64 Specific relocation fixups\r
\r
-Copyright (c) 2004 - 2008, Intel Corporation. All rights reserved.<BR>\r
-This program and the accompanying materials \r
-are licensed and made available under the terms and conditions of the BSD License \r
-which accompanies this distribution. The full text of the license may be found at \r
-http://opensource.org/licenses/bsd-license.php \r
- \r
-THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS, \r
-WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED. \r
+Copyright (c) 2004 - 2018, Intel Corporation. All rights reserved.<BR>\r
+Portions Copyright (c) 2011 - 2013, ARM Ltd. All rights reserved.<BR>\r
+This program and the accompanying materials\r
+are licensed and made available under the terms and conditions of the BSD License\r
+which accompanies this distribution. The full text of the license may be found at\r
+http://opensource.org/licenses/bsd-license.php\r
\r
-Module Name:\r
-\r
- PeCoffLoaderEx.c\r
-\r
-Abstract:\r
-\r
- IA32, X64 and IPF Specific relocation fixups\r
-\r
-Revision History\r
+THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,\r
+WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.\r
\r
--*/\r
\r
#include <IndustryStandard/PeImage.h>\r
#include "PeCoffLib.h"\r
#include "CommonLib.h"\r
+#include "EfiUtilityMsgs.h"\r
\r
\r
#define EXT_IMM64(Value, Address, Size, InstPos, ValPos) \\r
*(UINT32*)Address = (*(UINT32*)Address & ~(((1 << Size) - 1) << InstPos)) | \\r
((UINT32)((((UINT64)Value >> ValPos) & (((UINT64)1 << Size) - 1))) << InstPos)\r
\r
-#define IMM64_IMM7B_INST_WORD_X 3 \r
-#define IMM64_IMM7B_SIZE_X 7 \r
-#define IMM64_IMM7B_INST_WORD_POS_X 4 \r
-#define IMM64_IMM7B_VAL_POS_X 0 \r
-\r
-#define IMM64_IMM9D_INST_WORD_X 3 \r
-#define IMM64_IMM9D_SIZE_X 9 \r
-#define IMM64_IMM9D_INST_WORD_POS_X 18 \r
-#define IMM64_IMM9D_VAL_POS_X 7 \r
-\r
-#define IMM64_IMM5C_INST_WORD_X 3 \r
-#define IMM64_IMM5C_SIZE_X 5 \r
-#define IMM64_IMM5C_INST_WORD_POS_X 13 \r
-#define IMM64_IMM5C_VAL_POS_X 16 \r
-\r
-#define IMM64_IC_INST_WORD_X 3 \r
-#define IMM64_IC_SIZE_X 1 \r
-#define IMM64_IC_INST_WORD_POS_X 12 \r
-#define IMM64_IC_VAL_POS_X 21 \r
-\r
-#define IMM64_IMM41a_INST_WORD_X 1 \r
-#define IMM64_IMM41a_SIZE_X 10 \r
-#define IMM64_IMM41a_INST_WORD_POS_X 14 \r
-#define IMM64_IMM41a_VAL_POS_X 22 \r
-\r
-#define IMM64_IMM41b_INST_WORD_X 1 \r
-#define IMM64_IMM41b_SIZE_X 8 \r
-#define IMM64_IMM41b_INST_WORD_POS_X 24 \r
-#define IMM64_IMM41b_VAL_POS_X 32 \r
-\r
-#define IMM64_IMM41c_INST_WORD_X 2 \r
-#define IMM64_IMM41c_SIZE_X 23 \r
-#define IMM64_IMM41c_INST_WORD_POS_X 0 \r
-#define IMM64_IMM41c_VAL_POS_X 40 \r
-\r
-#define IMM64_SIGN_INST_WORD_X 3 \r
-#define IMM64_SIGN_SIZE_X 1 \r
-#define IMM64_SIGN_INST_WORD_POS_X 27 \r
-#define IMM64_SIGN_VAL_POS_X 63 \r
+#define IMM64_IMM7B_INST_WORD_X 3\r
+#define IMM64_IMM7B_SIZE_X 7\r
+#define IMM64_IMM7B_INST_WORD_POS_X 4\r
+#define IMM64_IMM7B_VAL_POS_X 0\r
+\r
+#define IMM64_IMM9D_INST_WORD_X 3\r
+#define IMM64_IMM9D_SIZE_X 9\r
+#define IMM64_IMM9D_INST_WORD_POS_X 18\r
+#define IMM64_IMM9D_VAL_POS_X 7\r
+\r
+#define IMM64_IMM5C_INST_WORD_X 3\r
+#define IMM64_IMM5C_SIZE_X 5\r
+#define IMM64_IMM5C_INST_WORD_POS_X 13\r
+#define IMM64_IMM5C_VAL_POS_X 16\r
+\r
+#define IMM64_IC_INST_WORD_X 3\r
+#define IMM64_IC_SIZE_X 1\r
+#define IMM64_IC_INST_WORD_POS_X 12\r
+#define IMM64_IC_VAL_POS_X 21\r
+\r
+#define IMM64_IMM41a_INST_WORD_X 1\r
+#define IMM64_IMM41a_SIZE_X 10\r
+#define IMM64_IMM41a_INST_WORD_POS_X 14\r
+#define IMM64_IMM41a_VAL_POS_X 22\r
+\r
+#define IMM64_IMM41b_INST_WORD_X 1\r
+#define IMM64_IMM41b_SIZE_X 8\r
+#define IMM64_IMM41b_INST_WORD_POS_X 24\r
+#define IMM64_IMM41b_VAL_POS_X 32\r
+\r
+#define IMM64_IMM41c_INST_WORD_X 2\r
+#define IMM64_IMM41c_SIZE_X 23\r
+#define IMM64_IMM41c_INST_WORD_POS_X 0\r
+#define IMM64_IMM41c_VAL_POS_X 40\r
+\r
+#define IMM64_SIGN_INST_WORD_X 3\r
+#define IMM64_SIGN_SIZE_X 1\r
+#define IMM64_SIGN_INST_WORD_POS_X 27\r
+#define IMM64_SIGN_VAL_POS_X 63\r
\r
RETURN_STATUS\r
PeCoffLoaderRelocateIa32Image (\r
return RETURN_UNSUPPORTED;\r
}\r
\r
-RETURN_STATUS\r
-PeCoffLoaderRelocateIpfImage (\r
- IN UINT16 *Reloc,\r
- IN OUT CHAR8 *Fixup, \r
- IN OUT CHAR8 **FixupData,\r
- IN UINT64 Adjust\r
- )\r
-/*++\r
-\r
-Routine Description:\r
-\r
- Performs an Itanium-based specific relocation fixup\r
-\r
-Arguments:\r
-\r
- Reloc - Pointer to the relocation record\r
-\r
- Fixup - Pointer to the address to fix up\r
-\r
- FixupData - Pointer to a buffer to log the fixups\r
-\r
- Adjust - The offset to adjust the fixup\r
-\r
-Returns:\r
-\r
- Status code\r
\r
---*/\r
-{\r
- UINT64 *F64;\r
- UINT64 FixupVal;\r
-\r
- switch ((*Reloc) >> 12) {\r
-\r
- case EFI_IMAGE_REL_BASED_DIR64:\r
- F64 = (UINT64 *) Fixup;\r
- *F64 = *F64 + (UINT64) Adjust;\r
- if (*FixupData != NULL) {\r
- *FixupData = ALIGN_POINTER(*FixupData, sizeof(UINT64));\r
- *(UINT64 *)(*FixupData) = *F64;\r
- *FixupData = *FixupData + sizeof(UINT64);\r
- }\r
- break;\r
-\r
- case EFI_IMAGE_REL_BASED_IA64_IMM64:\r
-\r
- //\r
- // Align it to bundle address before fixing up the\r
- // 64-bit immediate value of the movl instruction.\r
- //\r
-\r
- Fixup = (CHAR8 *)((UINTN) Fixup & (UINTN) ~(15));\r
- FixupVal = (UINT64)0;\r
- \r
- // \r
- // Extract the lower 32 bits of IMM64 from bundle\r
- //\r
- EXT_IMM64(FixupVal,\r
- (UINT32 *)Fixup + IMM64_IMM7B_INST_WORD_X,\r
- IMM64_IMM7B_SIZE_X,\r
- IMM64_IMM7B_INST_WORD_POS_X,\r
- IMM64_IMM7B_VAL_POS_X\r
- );\r
-\r
- EXT_IMM64(FixupVal,\r
- (UINT32 *)Fixup + IMM64_IMM9D_INST_WORD_X,\r
- IMM64_IMM9D_SIZE_X,\r
- IMM64_IMM9D_INST_WORD_POS_X,\r
- IMM64_IMM9D_VAL_POS_X\r
- );\r
-\r
- EXT_IMM64(FixupVal,\r
- (UINT32 *)Fixup + IMM64_IMM5C_INST_WORD_X,\r
- IMM64_IMM5C_SIZE_X,\r
- IMM64_IMM5C_INST_WORD_POS_X,\r
- IMM64_IMM5C_VAL_POS_X\r
- );\r
-\r
- EXT_IMM64(FixupVal,\r
- (UINT32 *)Fixup + IMM64_IC_INST_WORD_X,\r
- IMM64_IC_SIZE_X,\r
- IMM64_IC_INST_WORD_POS_X,\r
- IMM64_IC_VAL_POS_X\r
- );\r
-\r
- EXT_IMM64(FixupVal,\r
- (UINT32 *)Fixup + IMM64_IMM41a_INST_WORD_X,\r
- IMM64_IMM41a_SIZE_X,\r
- IMM64_IMM41a_INST_WORD_POS_X,\r
- IMM64_IMM41a_VAL_POS_X\r
- );\r
- \r
- // \r
- // Update 64-bit address\r
- //\r
- FixupVal += Adjust;\r
-\r
- // \r
- // Insert IMM64 into bundle\r
- //\r
- INS_IMM64(FixupVal,\r
- ((UINT32 *)Fixup + IMM64_IMM7B_INST_WORD_X),\r
- IMM64_IMM7B_SIZE_X,\r
- IMM64_IMM7B_INST_WORD_POS_X,\r
- IMM64_IMM7B_VAL_POS_X\r
- );\r
-\r
- INS_IMM64(FixupVal,\r
- ((UINT32 *)Fixup + IMM64_IMM9D_INST_WORD_X),\r
- IMM64_IMM9D_SIZE_X,\r
- IMM64_IMM9D_INST_WORD_POS_X,\r
- IMM64_IMM9D_VAL_POS_X\r
- );\r
-\r
- INS_IMM64(FixupVal,\r
- ((UINT32 *)Fixup + IMM64_IMM5C_INST_WORD_X),\r
- IMM64_IMM5C_SIZE_X,\r
- IMM64_IMM5C_INST_WORD_POS_X,\r
- IMM64_IMM5C_VAL_POS_X\r
- );\r
-\r
- INS_IMM64(FixupVal,\r
- ((UINT32 *)Fixup + IMM64_IC_INST_WORD_X),\r
- IMM64_IC_SIZE_X,\r
- IMM64_IC_INST_WORD_POS_X,\r
- IMM64_IC_VAL_POS_X\r
- );\r
-\r
- INS_IMM64(FixupVal,\r
- ((UINT32 *)Fixup + IMM64_IMM41a_INST_WORD_X),\r
- IMM64_IMM41a_SIZE_X,\r
- IMM64_IMM41a_INST_WORD_POS_X,\r
- IMM64_IMM41a_VAL_POS_X\r
- );\r
-\r
- INS_IMM64(FixupVal,\r
- ((UINT32 *)Fixup + IMM64_IMM41b_INST_WORD_X),\r
- IMM64_IMM41b_SIZE_X,\r
- IMM64_IMM41b_INST_WORD_POS_X,\r
- IMM64_IMM41b_VAL_POS_X\r
- );\r
-\r
- INS_IMM64(FixupVal,\r
- ((UINT32 *)Fixup + IMM64_IMM41c_INST_WORD_X),\r
- IMM64_IMM41c_SIZE_X,\r
- IMM64_IMM41c_INST_WORD_POS_X,\r
- IMM64_IMM41c_VAL_POS_X\r
- );\r
-\r
- INS_IMM64(FixupVal,\r
- ((UINT32 *)Fixup + IMM64_SIGN_INST_WORD_X),\r
- IMM64_SIGN_SIZE_X,\r
- IMM64_SIGN_INST_WORD_POS_X,\r
- IMM64_SIGN_VAL_POS_X\r
- );\r
-\r
- F64 = (UINT64 *) Fixup;\r
- if (*FixupData != NULL) {\r
- *FixupData = ALIGN_POINTER(*FixupData, sizeof(UINT64));\r
- *(UINT64 *)(*FixupData) = *F64;\r
- *FixupData = *FixupData + sizeof(UINT64);\r
- }\r
- break;\r
-\r
- default:\r
- return RETURN_UNSUPPORTED;\r
- }\r
-\r
- return RETURN_SUCCESS;\r
-}\r
-\r
-RETURN_STATUS\r
-PeCoffLoaderRelocateX64Image (\r
- IN UINT16 *Reloc,\r
- IN OUT CHAR8 *Fixup, \r
- IN OUT CHAR8 **FixupData,\r
- IN UINT64 Adjust\r
- )\r
/**\r
- Performs an x64 specific relocation fixup\r
-\r
- @param Reloc Pointer to the relocation record\r
- @param Fixup Pointer to the address to fix up\r
- @param FixupData Pointer to a buffer to log the fixups\r
- @param Adjust The offset to adjust the fixup\r
- \r
- @retval RETURN_SUCCESS Success to perform relocation\r
- @retval RETURN_UNSUPPORTED Unsupported.\r
-**/\r
-{\r
- UINT64 *F64;\r
-\r
- switch ((*Reloc) >> 12) {\r
-\r
- case EFI_IMAGE_REL_BASED_DIR64:\r
- F64 = (UINT64 *) Fixup;\r
- *F64 = *F64 + (UINT64) Adjust;\r
- if (*FixupData != NULL) {\r
- *FixupData = ALIGN_POINTER(*FixupData, sizeof(UINT64));\r
- *(UINT64 *)(*FixupData) = *F64;\r
- *FixupData = *FixupData + sizeof(UINT64);\r
- }\r
- break;\r
-\r
- default:\r
- return RETURN_UNSUPPORTED;\r
- }\r
-\r
- return RETURN_SUCCESS;\r
-}\r
-\r
-/**\r
- Pass in a pointer to an ARM MOVT or MOVW immediate instruciton and \r
+ Pass in a pointer to an ARM MOVT or MOVW immediate instruction and\r
return the immediate data encoded in the instruction\r
\r
@param Instruction Pointer to ARM MOVT or MOVW immediate instruction\r
\r
// Thumb2 is two 16-bit instructions working together. Not a single 32-bit instruction\r
// Example MOVT R0, #0 is 0x0000f2c0 or 0xf2c0 0x0000\r
- Movt = (*Instruction << 16) | (*(Instruction + 1)); \r
+ Movt = (*Instruction << 16) | (*(Instruction + 1));\r
\r
// imm16 = imm4:i:imm3:imm8\r
// imm4 -> Bit19:Bit16\r
Update an ARM MOVT or MOVW immediate instruction immediate data.\r
\r
@param Instruction Pointer to ARM MOVT or MOVW immediate instruction\r
- @param Address New addres to patch into the instruction\r
+ @param Address New address to patch into the instruction\r
**/\r
VOID\r
ThumbMovtImmediatePatch (\r
{\r
UINT16 Patch;\r
\r
- // First 16-bit chunk of instruciton\r
- Patch = ((Address >> 12) & 0x000f); // imm4 \r
+ // First 16-bit chunk of instruction\r
+ Patch = ((Address >> 12) & 0x000f); // imm4\r
Patch |= (((Address & BIT11) != 0) ? BIT10 : 0); // i\r
*Instruction = (*Instruction & ~0x040f) | Patch;\r
\r
}\r
\r
/**\r
- Pass in a pointer to an ARM MOVW/MOVT instruciton pair and \r
+ Pass in a pointer to an ARM MOVW/MOVT instruction pair and\r
return the immediate data encoded in the two` instruction\r
\r
- @param Instructions Pointer to ARM MOVW/MOVT insturction pair\r
+ @param Instructions Pointer to ARM MOVW/MOVT instruction pair\r
\r
@return Immediate address encoded in the instructions\r
\r
{\r
UINT16 *Word;\r
UINT16 *Top;\r
- \r
+\r
Word = Instructions; // MOVW\r
Top = Word + 2; // MOVT\r
- \r
+\r
return (ThumbMovtImmediateAddress (Top) << 16) + ThumbMovtImmediateAddress (Word);\r
}\r
\r
Update an ARM MOVW/MOVT immediate instruction instruction pair.\r
\r
@param Instructions Pointer to ARM MOVW/MOVT instruction pair\r
- @param Address New addres to patch into the instructions\r
+ @param Address New address to patch into the instructions\r
**/\r
VOID\r
EFIAPI\r
{\r
UINT16 *Word;\r
UINT16 *Top;\r
- \r
+\r
Word = (UINT16 *)Instructions; // MOVW\r
Top = Word + 2; // MOVT\r
\r
Fixup16 = (UINT16 *) Fixup;\r
\r
switch ((**Reloc) >> 12) {\r
- \r
+\r
case EFI_IMAGE_REL_BASED_ARM_MOV32T:\r
FixupVal = ThumbMovwMovtImmediateAddress (Fixup16) + (UINT32)Adjust;\r
ThumbMovwMovtImmediatePatch (Fixup16, FixupVal);\r
- \r
- \r
+\r
+\r
if (*FixupData != NULL) {\r
*FixupData = ALIGN_POINTER(*FixupData, sizeof(UINT64));\r
- *(UINT64 *)(*FixupData) = *Fixup16;\r
CopyMem (*FixupData, Fixup16, sizeof (UINT64));\r
+ *FixupData = *FixupData + sizeof(UINT64);\r
}\r
break;\r
- \r
+\r
case EFI_IMAGE_REL_BASED_ARM_MOV32A:\r
// break omitted - ARM instruction encoding not implemented\r
default:\r