]> git.proxmox.com Git - mirror_edk2.git/blame - Tools/Source/TianoTools/PeCoffLoader/Ipf/PeCoffLoaderEx.c
1. Removed the unnecessary #include statements and include files
[mirror_edk2.git] / Tools / Source / TianoTools / PeCoffLoader / Ipf / PeCoffLoaderEx.c
CommitLineData
878ddf1f 1/*++\r
2\r
3Copyright (c) 2004, Intel Corporation \r
4All rights reserved. This program and the accompanying materials \r
5are licensed and made available under the terms and conditions of the BSD License \r
6which accompanies this distribution. The full text of the license may be found at \r
7http://opensource.org/licenses/bsd-license.php \r
8 \r
9THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS, \r
10WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED. \r
11\r
12Module Name:\r
13\r
14 PeCoffLoaderEx.c\r
15\r
16Abstract:\r
17\r
18 Fixes Intel Itanium(TM) specific relocation types\r
19\r
20\r
21Revision History\r
22\r
23--*/\r
24\r
25#define EFI_SPECIFICATION_VERSION 0x00000000\r
26#define EDK_RELEASE_VERSION 0x00020000\r
27#include <Base.h>\r
28#include <Library/PeCoffLib.h>\r
29#include <Library/BaseMemoryLib.h>\r
30\r
31\r
32\r
33\r
34\r
35#define EXT_IMM64(Value, Address, Size, InstPos, ValPos) \\r
36 Value |= (((UINT64)((*(Address) >> InstPos) & (((UINT64)1 << Size) - 1))) << ValPos)\r
37\r
38#define INS_IMM64(Value, Address, Size, InstPos, ValPos) \\r
39 *(UINT32*)Address = (*(UINT32*)Address & ~(((1 << Size) - 1) << InstPos)) | \\r
40 ((UINT32)((((UINT64)Value >> ValPos) & (((UINT64)1 << Size) - 1))) << InstPos)\r
41\r
42#define IMM64_IMM7B_INST_WORD_X 3 \r
43#define IMM64_IMM7B_SIZE_X 7 \r
44#define IMM64_IMM7B_INST_WORD_POS_X 4 \r
45#define IMM64_IMM7B_VAL_POS_X 0 \r
46\r
47#define IMM64_IMM9D_INST_WORD_X 3 \r
48#define IMM64_IMM9D_SIZE_X 9 \r
49#define IMM64_IMM9D_INST_WORD_POS_X 18 \r
50#define IMM64_IMM9D_VAL_POS_X 7 \r
51\r
52#define IMM64_IMM5C_INST_WORD_X 3 \r
53#define IMM64_IMM5C_SIZE_X 5 \r
54#define IMM64_IMM5C_INST_WORD_POS_X 13 \r
55#define IMM64_IMM5C_VAL_POS_X 16 \r
56\r
57#define IMM64_IC_INST_WORD_X 3 \r
58#define IMM64_IC_SIZE_X 1 \r
59#define IMM64_IC_INST_WORD_POS_X 12 \r
60#define IMM64_IC_VAL_POS_X 21 \r
61\r
62#define IMM64_IMM41a_INST_WORD_X 1 \r
63#define IMM64_IMM41a_SIZE_X 10 \r
64#define IMM64_IMM41a_INST_WORD_POS_X 14 \r
65#define IMM64_IMM41a_VAL_POS_X 22 \r
66\r
67#define IMM64_IMM41b_INST_WORD_X 1 \r
68#define IMM64_IMM41b_SIZE_X 8 \r
69#define IMM64_IMM41b_INST_WORD_POS_X 24 \r
70#define IMM64_IMM41b_VAL_POS_X 32 \r
71\r
72#define IMM64_IMM41c_INST_WORD_X 2 \r
73#define IMM64_IMM41c_SIZE_X 23 \r
74#define IMM64_IMM41c_INST_WORD_POS_X 0 \r
75#define IMM64_IMM41c_VAL_POS_X 40 \r
76\r
77#define IMM64_SIGN_INST_WORD_X 3 \r
78#define IMM64_SIGN_SIZE_X 1 \r
79#define IMM64_SIGN_INST_WORD_POS_X 27 \r
80#define IMM64_SIGN_VAL_POS_X 63 \r
81\r
82RETURN_STATUS\r
83PeCoffLoaderRelocateImageEx (\r
84 IN UINT16 *Reloc,\r
85 IN OUT CHAR8 *Fixup, \r
86 IN OUT CHAR8 **FixupData,\r
87 IN UINT64 Adjust\r
88 )\r
89/*++\r
90\r
91Routine Description:\r
92\r
93 Performs an Itanium-based specific relocation fixup\r
94\r
95Arguments:\r
96\r
97 Reloc - Pointer to the relocation record\r
98\r
99 Fixup - Pointer to the address to fix up\r
100\r
101 FixupData - Pointer to a buffer to log the fixups\r
102\r
103 Adjust - The offset to adjust the fixup\r
104\r
105Returns:\r
106\r
107 Status code\r
108\r
109--*/\r
110{\r
111 UINT64 *F64;\r
112 UINT64 FixupVal;\r
113\r
114 switch ((*Reloc) >> 12) {\r
115\r
116 case EFI_IMAGE_REL_BASED_DIR64:\r
117 F64 = (UINT64 *) Fixup;\r
118 *F64 = *F64 + (UINT64) Adjust;\r
119 if (*FixupData != NULL) {\r
120 *FixupData = ALIGN_POINTER(*FixupData, sizeof(UINT64));\r
121 *(UINT64 *)(*FixupData) = *F64;\r
122 *FixupData = *FixupData + sizeof(UINT64);\r
123 }\r
124 break;\r
125\r
126 case EFI_IMAGE_REL_BASED_IA64_IMM64:\r
127\r
128 //\r
129 // Align it to bundle address before fixing up the\r
130 // 64-bit immediate value of the movl instruction.\r
131 //\r
132\r
133 Fixup = (CHAR8 *)((UINTN) Fixup & (UINTN) ~(15));\r
134 FixupVal = (UINT64)0;\r
135 \r
136 // \r
137 // Extract the lower 32 bits of IMM64 from bundle\r
138 //\r
139 EXT_IMM64(FixupVal,\r
140 (UINT32 *)Fixup + IMM64_IMM7B_INST_WORD_X,\r
141 IMM64_IMM7B_SIZE_X,\r
142 IMM64_IMM7B_INST_WORD_POS_X,\r
143 IMM64_IMM7B_VAL_POS_X\r
144 );\r
145\r
146 EXT_IMM64(FixupVal,\r
147 (UINT32 *)Fixup + IMM64_IMM9D_INST_WORD_X,\r
148 IMM64_IMM9D_SIZE_X,\r
149 IMM64_IMM9D_INST_WORD_POS_X,\r
150 IMM64_IMM9D_VAL_POS_X\r
151 );\r
152\r
153 EXT_IMM64(FixupVal,\r
154 (UINT32 *)Fixup + IMM64_IMM5C_INST_WORD_X,\r
155 IMM64_IMM5C_SIZE_X,\r
156 IMM64_IMM5C_INST_WORD_POS_X,\r
157 IMM64_IMM5C_VAL_POS_X\r
158 );\r
159\r
160 EXT_IMM64(FixupVal,\r
161 (UINT32 *)Fixup + IMM64_IC_INST_WORD_X,\r
162 IMM64_IC_SIZE_X,\r
163 IMM64_IC_INST_WORD_POS_X,\r
164 IMM64_IC_VAL_POS_X\r
165 );\r
166\r
167 EXT_IMM64(FixupVal,\r
168 (UINT32 *)Fixup + IMM64_IMM41a_INST_WORD_X,\r
169 IMM64_IMM41a_SIZE_X,\r
170 IMM64_IMM41a_INST_WORD_POS_X,\r
171 IMM64_IMM41a_VAL_POS_X\r
172 );\r
173 \r
174 // \r
175 // Update 64-bit address\r
176 //\r
177 FixupVal += Adjust;\r
178\r
179 // \r
180 // Insert IMM64 into bundle\r
181 //\r
182 INS_IMM64(FixupVal,\r
183 ((UINT32 *)Fixup + IMM64_IMM7B_INST_WORD_X),\r
184 IMM64_IMM7B_SIZE_X,\r
185 IMM64_IMM7B_INST_WORD_POS_X,\r
186 IMM64_IMM7B_VAL_POS_X\r
187 );\r
188\r
189 INS_IMM64(FixupVal,\r
190 ((UINT32 *)Fixup + IMM64_IMM9D_INST_WORD_X),\r
191 IMM64_IMM9D_SIZE_X,\r
192 IMM64_IMM9D_INST_WORD_POS_X,\r
193 IMM64_IMM9D_VAL_POS_X\r
194 );\r
195\r
196 INS_IMM64(FixupVal,\r
197 ((UINT32 *)Fixup + IMM64_IMM5C_INST_WORD_X),\r
198 IMM64_IMM5C_SIZE_X,\r
199 IMM64_IMM5C_INST_WORD_POS_X,\r
200 IMM64_IMM5C_VAL_POS_X\r
201 );\r
202\r
203 INS_IMM64(FixupVal,\r
204 ((UINT32 *)Fixup + IMM64_IC_INST_WORD_X),\r
205 IMM64_IC_SIZE_X,\r
206 IMM64_IC_INST_WORD_POS_X,\r
207 IMM64_IC_VAL_POS_X\r
208 );\r
209\r
210 INS_IMM64(FixupVal,\r
211 ((UINT32 *)Fixup + IMM64_IMM41a_INST_WORD_X),\r
212 IMM64_IMM41a_SIZE_X,\r
213 IMM64_IMM41a_INST_WORD_POS_X,\r
214 IMM64_IMM41a_VAL_POS_X\r
215 );\r
216\r
217 INS_IMM64(FixupVal,\r
218 ((UINT32 *)Fixup + IMM64_IMM41b_INST_WORD_X),\r
219 IMM64_IMM41b_SIZE_X,\r
220 IMM64_IMM41b_INST_WORD_POS_X,\r
221 IMM64_IMM41b_VAL_POS_X\r
222 );\r
223\r
224 INS_IMM64(FixupVal,\r
225 ((UINT32 *)Fixup + IMM64_IMM41c_INST_WORD_X),\r
226 IMM64_IMM41c_SIZE_X,\r
227 IMM64_IMM41c_INST_WORD_POS_X,\r
228 IMM64_IMM41c_VAL_POS_X\r
229 );\r
230\r
231 INS_IMM64(FixupVal,\r
232 ((UINT32 *)Fixup + IMM64_SIGN_INST_WORD_X),\r
233 IMM64_SIGN_SIZE_X,\r
234 IMM64_SIGN_INST_WORD_POS_X,\r
235 IMM64_SIGN_VAL_POS_X\r
236 );\r
237\r
238 F64 = (UINT64 *) Fixup;\r
239 if (*FixupData != NULL) {\r
240 *FixupData = ALIGN_POINTER(*FixupData, sizeof(UINT64));\r
241 *(UINT64 *)(*FixupData) = *F64;\r
242 *FixupData = *FixupData + sizeof(UINT64);\r
243 }\r
244 break;\r
245\r
246 default:\r
247 return RETURN_UNSUPPORTED;\r
248 }\r
249\r
250 return RETURN_SUCCESS;\r
251}\r