]>
Commit | Line | Data |
---|---|---|
e8449e1d FB |
1 | ## @file\r |
2 | # Create makefile for MS nmake and GNU make\r | |
3 | #\r | |
4 | # Copyright (c) 2019, Intel Corporation. All rights reserved.<BR>\r | |
5 | # SPDX-License-Identifier: BSD-2-Clause-Patent\r | |
6 | #\r | |
7 | from __future__ import absolute_import\r | |
8 | from AutoGen.AutoGen import AutoGen\r | |
fc8b8dea | 9 | from Common.LongFilePathSupport import LongFilePath, CopyLongFilePath\r |
e8449e1d FB |
10 | from Common.BuildToolError import *\r |
11 | from Common.DataType import *\r | |
12 | from Common.Misc import *\r | |
13 | from Common.StringUtils import NormPath,GetSplitList\r | |
14 | from collections import defaultdict\r | |
15 | from Workspace.WorkspaceCommon import OrderedListDict\r | |
16 | import os.path as path\r | |
17 | import copy\r | |
18 | import hashlib\r | |
19 | from . import InfSectionParser\r | |
20 | from . import GenC\r | |
21 | from . import GenMake\r | |
22 | from . import GenDepex\r | |
23 | from io import BytesIO\r | |
24 | from GenPatchPcdTable.GenPatchPcdTable import parsePcdInfoFromMapFile\r | |
25 | from Workspace.MetaFileCommentParser import UsageList\r | |
26 | from .GenPcdDb import CreatePcdDatabaseCode\r | |
27 | from Common.caching import cached_class_function\r | |
28 | from AutoGen.ModuleAutoGenHelper import PlatformInfo,WorkSpaceInfo\r | |
0e7e7a26 | 29 | import json\r |
94459080 | 30 | import tempfile\r |
e8449e1d FB |
31 | \r |
32 | ## Mapping Makefile type\r | |
33 | gMakeTypeMap = {TAB_COMPILER_MSFT:"nmake", "GCC":"gmake"}\r | |
34 | #\r | |
35 | # Regular expression for finding Include Directories, the difference between MSFT and INTEL/GCC/RVCT\r | |
36 | # is the former use /I , the Latter used -I to specify include directories\r | |
37 | #\r | |
38 | gBuildOptIncludePatternMsft = re.compile(r"(?:.*?)/I[ \t]*([^ ]*)", re.MULTILINE | re.DOTALL)\r | |
39 | gBuildOptIncludePatternOther = re.compile(r"(?:.*?)-I[ \t]*([^ ]*)", re.MULTILINE | re.DOTALL)\r | |
40 | \r | |
41 | ## default file name for AutoGen\r | |
42 | gAutoGenCodeFileName = "AutoGen.c"\r | |
43 | gAutoGenHeaderFileName = "AutoGen.h"\r | |
44 | gAutoGenStringFileName = "%(module_name)sStrDefs.h"\r | |
45 | gAutoGenStringFormFileName = "%(module_name)sStrDefs.hpk"\r | |
46 | gAutoGenDepexFileName = "%(module_name)s.depex"\r | |
47 | gAutoGenImageDefFileName = "%(module_name)sImgDefs.h"\r | |
48 | gAutoGenIdfFileName = "%(module_name)sIdf.hpk"\r | |
49 | gInfSpecVersion = "0x00010017"\r | |
50 | \r | |
51 | #\r | |
52 | # Match name = variable\r | |
53 | #\r | |
54 | gEfiVarStoreNamePattern = re.compile("\s*name\s*=\s*(\w+)")\r | |
55 | #\r | |
56 | # The format of guid in efivarstore statement likes following and must be correct:\r | |
57 | # guid = {0xA04A27f4, 0xDF00, 0x4D42, {0xB5, 0x52, 0x39, 0x51, 0x13, 0x02, 0x11, 0x3D}}\r | |
58 | #\r | |
59 | gEfiVarStoreGuidPattern = re.compile("\s*guid\s*=\s*({.*?{.*?}\s*})")\r | |
60 | \r | |
61 | #\r | |
62 | # Template string to generic AsBuilt INF\r | |
63 | #\r | |
64 | gAsBuiltInfHeaderString = TemplateString("""${header_comments}\r | |
65 | \r | |
66 | # DO NOT EDIT\r | |
67 | # FILE auto-generated\r | |
68 | \r | |
69 | [Defines]\r | |
70 | INF_VERSION = ${module_inf_version}\r | |
71 | BASE_NAME = ${module_name}\r | |
72 | FILE_GUID = ${module_guid}\r | |
73 | MODULE_TYPE = ${module_module_type}${BEGIN}\r | |
74 | VERSION_STRING = ${module_version_string}${END}${BEGIN}\r | |
75 | PCD_IS_DRIVER = ${pcd_is_driver_string}${END}${BEGIN}\r | |
76 | UEFI_SPECIFICATION_VERSION = ${module_uefi_specification_version}${END}${BEGIN}\r | |
77 | PI_SPECIFICATION_VERSION = ${module_pi_specification_version}${END}${BEGIN}\r | |
78 | ENTRY_POINT = ${module_entry_point}${END}${BEGIN}\r | |
79 | UNLOAD_IMAGE = ${module_unload_image}${END}${BEGIN}\r | |
80 | CONSTRUCTOR = ${module_constructor}${END}${BEGIN}\r | |
81 | DESTRUCTOR = ${module_destructor}${END}${BEGIN}\r | |
82 | SHADOW = ${module_shadow}${END}${BEGIN}\r | |
83 | PCI_VENDOR_ID = ${module_pci_vendor_id}${END}${BEGIN}\r | |
84 | PCI_DEVICE_ID = ${module_pci_device_id}${END}${BEGIN}\r | |
85 | PCI_CLASS_CODE = ${module_pci_class_code}${END}${BEGIN}\r | |
86 | PCI_REVISION = ${module_pci_revision}${END}${BEGIN}\r | |
87 | BUILD_NUMBER = ${module_build_number}${END}${BEGIN}\r | |
88 | SPEC = ${module_spec}${END}${BEGIN}\r | |
89 | UEFI_HII_RESOURCE_SECTION = ${module_uefi_hii_resource_section}${END}${BEGIN}\r | |
90 | MODULE_UNI_FILE = ${module_uni_file}${END}\r | |
91 | \r | |
92 | [Packages.${module_arch}]${BEGIN}\r | |
93 | ${package_item}${END}\r | |
94 | \r | |
95 | [Binaries.${module_arch}]${BEGIN}\r | |
96 | ${binary_item}${END}\r | |
97 | \r | |
98 | [PatchPcd.${module_arch}]${BEGIN}\r | |
99 | ${patchablepcd_item}\r | |
100 | ${END}\r | |
101 | \r | |
102 | [Protocols.${module_arch}]${BEGIN}\r | |
103 | ${protocol_item}\r | |
104 | ${END}\r | |
105 | \r | |
106 | [Ppis.${module_arch}]${BEGIN}\r | |
107 | ${ppi_item}\r | |
108 | ${END}\r | |
109 | \r | |
110 | [Guids.${module_arch}]${BEGIN}\r | |
111 | ${guid_item}\r | |
112 | ${END}\r | |
113 | \r | |
114 | [PcdEx.${module_arch}]${BEGIN}\r | |
115 | ${pcd_item}\r | |
116 | ${END}\r | |
117 | \r | |
118 | [LibraryClasses.${module_arch}]\r | |
119 | ## @LIB_INSTANCES${BEGIN}\r | |
120 | # ${libraryclasses_item}${END}\r | |
121 | \r | |
122 | ${depexsection_item}\r | |
123 | \r | |
124 | ${userextension_tianocore_item}\r | |
125 | \r | |
126 | ${tail_comments}\r | |
127 | \r | |
128 | [BuildOptions.${module_arch}]\r | |
129 | ## @AsBuilt${BEGIN}\r | |
130 | ## ${flags_item}${END}\r | |
131 | """)\r | |
132 | #\r | |
133 | # extend lists contained in a dictionary with lists stored in another dictionary\r | |
134 | # if CopyToDict is not derived from DefaultDict(list) then this may raise exception\r | |
135 | #\r | |
136 | def ExtendCopyDictionaryLists(CopyToDict, CopyFromDict):\r | |
137 | for Key in CopyFromDict:\r | |
138 | CopyToDict[Key].extend(CopyFromDict[Key])\r | |
139 | \r | |
140 | # Create a directory specified by a set of path elements and return the full path\r | |
141 | def _MakeDir(PathList):\r | |
142 | RetVal = path.join(*PathList)\r | |
143 | CreateDirectory(RetVal)\r | |
144 | return RetVal\r | |
145 | \r | |
146 | #\r | |
147 | # Convert string to C format array\r | |
148 | #\r | |
149 | def _ConvertStringToByteArray(Value):\r | |
150 | Value = Value.strip()\r | |
151 | if not Value:\r | |
152 | return None\r | |
153 | if Value[0] == '{':\r | |
154 | if not Value.endswith('}'):\r | |
155 | return None\r | |
156 | Value = Value.replace(' ', '').replace('{', '').replace('}', '')\r | |
157 | ValFields = Value.split(',')\r | |
158 | try:\r | |
159 | for Index in range(len(ValFields)):\r | |
160 | ValFields[Index] = str(int(ValFields[Index], 0))\r | |
161 | except ValueError:\r | |
162 | return None\r | |
163 | Value = '{' + ','.join(ValFields) + '}'\r | |
164 | return Value\r | |
165 | \r | |
166 | Unicode = False\r | |
167 | if Value.startswith('L"'):\r | |
168 | if not Value.endswith('"'):\r | |
169 | return None\r | |
170 | Value = Value[1:]\r | |
171 | Unicode = True\r | |
172 | elif not Value.startswith('"') or not Value.endswith('"'):\r | |
173 | return None\r | |
174 | \r | |
175 | Value = eval(Value) # translate escape character\r | |
176 | NewValue = '{'\r | |
177 | for Index in range(0, len(Value)):\r | |
178 | if Unicode:\r | |
179 | NewValue = NewValue + str(ord(Value[Index]) % 0x10000) + ','\r | |
180 | else:\r | |
181 | NewValue = NewValue + str(ord(Value[Index]) % 0x100) + ','\r | |
182 | Value = NewValue + '0}'\r | |
183 | return Value\r | |
184 | \r | |
185 | ## ModuleAutoGen class\r | |
186 | #\r | |
187 | # This class encapsules the AutoGen behaviors for the build tools. In addition to\r | |
188 | # the generation of AutoGen.h and AutoGen.c, it will generate *.depex file according\r | |
189 | # to the [depex] section in module's inf file.\r | |
190 | #\r | |
191 | class ModuleAutoGen(AutoGen):\r | |
192 | # call super().__init__ then call the worker function with different parameter count\r | |
193 | def __init__(self, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):\r | |
194 | if not hasattr(self, "_Init"):\r | |
195 | self._InitWorker(Workspace, MetaFile, Target, Toolchain, Arch, *args)\r | |
196 | self._Init = True\r | |
197 | \r | |
198 | ## Cache the timestamps of metafiles of every module in a class attribute\r | |
199 | #\r | |
200 | TimeDict = {}\r | |
201 | \r | |
202 | def __new__(cls, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):\r | |
203 | # check if this module is employed by active platform\r | |
204 | if not PlatformInfo(Workspace, args[0], Target, Toolchain, Arch,args[-1]).ValidModule(MetaFile):\r | |
205 | EdkLogger.verbose("Module [%s] for [%s] is not employed by active platform\n" \\r | |
206 | % (MetaFile, Arch))\r | |
207 | return None\r | |
208 | return super(ModuleAutoGen, cls).__new__(cls, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs)\r | |
209 | \r | |
210 | ## Initialize ModuleAutoGen\r | |
211 | #\r | |
212 | # @param Workspace EdkIIWorkspaceBuild object\r | |
213 | # @param ModuleFile The path of module file\r | |
214 | # @param Target Build target (DEBUG, RELEASE)\r | |
215 | # @param Toolchain Name of tool chain\r | |
216 | # @param Arch The arch the module supports\r | |
217 | # @param PlatformFile Platform meta-file\r | |
218 | #\r | |
219 | def _InitWorker(self, Workspace, ModuleFile, Target, Toolchain, Arch, PlatformFile,DataPipe):\r | |
220 | EdkLogger.debug(EdkLogger.DEBUG_9, "AutoGen module [%s] [%s]" % (ModuleFile, Arch))\r | |
221 | GlobalData.gProcessingFile = "%s [%s, %s, %s]" % (ModuleFile, Arch, Toolchain, Target)\r | |
222 | \r | |
a000d5d1 | 223 | self.Workspace = Workspace\r |
e8449e1d FB |
224 | self.WorkspaceDir = ""\r |
225 | self.PlatformInfo = None\r | |
226 | self.DataPipe = DataPipe\r | |
227 | self.__init_platform_info__()\r | |
228 | self.MetaFile = ModuleFile\r | |
229 | self.SourceDir = self.MetaFile.SubDir\r | |
230 | self.SourceDir = mws.relpath(self.SourceDir, self.WorkspaceDir)\r | |
231 | \r | |
232 | self.ToolChain = Toolchain\r | |
233 | self.BuildTarget = Target\r | |
234 | self.Arch = Arch\r | |
235 | self.ToolChainFamily = self.PlatformInfo.ToolChainFamily\r | |
236 | self.BuildRuleFamily = self.PlatformInfo.BuildRuleFamily\r | |
237 | \r | |
238 | self.IsCodeFileCreated = False\r | |
239 | self.IsAsBuiltInfCreated = False\r | |
240 | self.DepexGenerated = False\r | |
241 | \r | |
242 | self.BuildDatabase = self.Workspace.BuildDatabase\r | |
243 | self.BuildRuleOrder = None\r | |
244 | self.BuildTime = 0\r | |
245 | \r | |
246 | self._GuidComments = OrderedListDict()\r | |
247 | self._ProtocolComments = OrderedListDict()\r | |
248 | self._PpiComments = OrderedListDict()\r | |
249 | self._BuildTargets = None\r | |
250 | self._IntroBuildTargetList = None\r | |
251 | self._FinalBuildTargetList = None\r | |
252 | self._FileTypes = None\r | |
253 | \r | |
254 | self.AutoGenDepSet = set()\r | |
255 | self.ReferenceModules = []\r | |
256 | self.ConstPcd = {}\r | |
0e7e7a26 SS |
257 | self.Makefile = None\r |
258 | self.FileDependCache = {}\r | |
e8449e1d FB |
259 | \r |
260 | def __init_platform_info__(self):\r | |
261 | pinfo = self.DataPipe.Get("P_Info")\r | |
e8449e1d FB |
262 | self.WorkspaceDir = pinfo.get("WorkspaceDir")\r |
263 | self.PlatformInfo = PlatformInfo(self.Workspace,pinfo.get("ActivePlatform"),pinfo.get("Target"),pinfo.get("ToolChain"),pinfo.get("Arch"),self.DataPipe)\r | |
264 | ## hash() operator of ModuleAutoGen\r | |
265 | #\r | |
266 | # The module file path and arch string will be used to represent\r | |
267 | # hash value of this object\r | |
268 | #\r | |
269 | # @retval int Hash value of the module file path and arch\r | |
270 | #\r | |
271 | @cached_class_function\r | |
272 | def __hash__(self):\r | |
78fb6b0e | 273 | return hash((self.MetaFile, self.Arch, self.ToolChain,self.BuildTarget))\r |
e8449e1d FB |
274 | def __repr__(self):\r |
275 | return "%s [%s]" % (self.MetaFile, self.Arch)\r | |
276 | \r | |
277 | # Get FixedAtBuild Pcds of this Module\r | |
278 | @cached_property\r | |
279 | def FixedAtBuildPcds(self):\r | |
280 | RetVal = []\r | |
281 | for Pcd in self.ModulePcdList:\r | |
282 | if Pcd.Type != TAB_PCDS_FIXED_AT_BUILD:\r | |
283 | continue\r | |
284 | if Pcd not in RetVal:\r | |
285 | RetVal.append(Pcd)\r | |
286 | return RetVal\r | |
287 | \r | |
288 | @cached_property\r | |
289 | def FixedVoidTypePcds(self):\r | |
290 | RetVal = {}\r | |
291 | for Pcd in self.FixedAtBuildPcds:\r | |
292 | if Pcd.DatumType == TAB_VOID:\r | |
293 | if '.'.join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName)) not in RetVal:\r | |
294 | RetVal['.'.join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName))] = Pcd.DefaultValue\r | |
295 | return RetVal\r | |
296 | \r | |
297 | @property\r | |
298 | def UniqueBaseName(self):\r | |
299 | ModuleNames = self.DataPipe.Get("M_Name")\r | |
300 | if not ModuleNames:\r | |
301 | return self.Name\r | |
76e12fa3 | 302 | return ModuleNames.get((self.Name,self.MetaFile),self.Name)\r |
e8449e1d FB |
303 | \r |
304 | # Macros could be used in build_rule.txt (also Makefile)\r | |
305 | @cached_property\r | |
306 | def Macros(self):\r | |
307 | return OrderedDict((\r | |
308 | ("WORKSPACE" ,self.WorkspaceDir),\r | |
309 | ("MODULE_NAME" ,self.Name),\r | |
310 | ("MODULE_NAME_GUID" ,self.UniqueBaseName),\r | |
311 | ("MODULE_GUID" ,self.Guid),\r | |
312 | ("MODULE_VERSION" ,self.Version),\r | |
313 | ("MODULE_TYPE" ,self.ModuleType),\r | |
314 | ("MODULE_FILE" ,str(self.MetaFile)),\r | |
315 | ("MODULE_FILE_BASE_NAME" ,self.MetaFile.BaseName),\r | |
316 | ("MODULE_RELATIVE_DIR" ,self.SourceDir),\r | |
317 | ("MODULE_DIR" ,self.SourceDir),\r | |
318 | ("BASE_NAME" ,self.Name),\r | |
319 | ("ARCH" ,self.Arch),\r | |
320 | ("TOOLCHAIN" ,self.ToolChain),\r | |
321 | ("TOOLCHAIN_TAG" ,self.ToolChain),\r | |
322 | ("TOOL_CHAIN_TAG" ,self.ToolChain),\r | |
323 | ("TARGET" ,self.BuildTarget),\r | |
324 | ("BUILD_DIR" ,self.PlatformInfo.BuildDir),\r | |
325 | ("BIN_DIR" ,os.path.join(self.PlatformInfo.BuildDir, self.Arch)),\r | |
326 | ("LIB_DIR" ,os.path.join(self.PlatformInfo.BuildDir, self.Arch)),\r | |
327 | ("MODULE_BUILD_DIR" ,self.BuildDir),\r | |
328 | ("OUTPUT_DIR" ,self.OutputDir),\r | |
329 | ("DEBUG_DIR" ,self.DebugDir),\r | |
330 | ("DEST_DIR_OUTPUT" ,self.OutputDir),\r | |
331 | ("DEST_DIR_DEBUG" ,self.DebugDir),\r | |
332 | ("PLATFORM_NAME" ,self.PlatformInfo.Name),\r | |
333 | ("PLATFORM_GUID" ,self.PlatformInfo.Guid),\r | |
334 | ("PLATFORM_VERSION" ,self.PlatformInfo.Version),\r | |
335 | ("PLATFORM_RELATIVE_DIR" ,self.PlatformInfo.SourceDir),\r | |
336 | ("PLATFORM_DIR" ,mws.join(self.WorkspaceDir, self.PlatformInfo.SourceDir)),\r | |
337 | ("PLATFORM_OUTPUT_DIR" ,self.PlatformInfo.OutputDir),\r | |
338 | ("FFS_OUTPUT_DIR" ,self.FfsOutputDir)\r | |
339 | ))\r | |
340 | \r | |
341 | ## Return the module build data object\r | |
342 | @cached_property\r | |
343 | def Module(self):\r | |
344 | return self.BuildDatabase[self.MetaFile, self.Arch, self.BuildTarget, self.ToolChain]\r | |
345 | \r | |
346 | ## Return the module name\r | |
347 | @cached_property\r | |
348 | def Name(self):\r | |
349 | return self.Module.BaseName\r | |
350 | \r | |
351 | ## Return the module DxsFile if exist\r | |
352 | @cached_property\r | |
353 | def DxsFile(self):\r | |
354 | return self.Module.DxsFile\r | |
355 | \r | |
356 | ## Return the module meta-file GUID\r | |
357 | @cached_property\r | |
358 | def Guid(self):\r | |
359 | #\r | |
360 | # To build same module more than once, the module path with FILE_GUID overridden has\r | |
361 | # the file name FILE_GUIDmodule.inf, but the relative path (self.MetaFile.File) is the real path\r | |
362 | # in DSC. The overridden GUID can be retrieved from file name\r | |
363 | #\r | |
364 | if os.path.basename(self.MetaFile.File) != os.path.basename(self.MetaFile.Path):\r | |
365 | #\r | |
366 | # Length of GUID is 36\r | |
367 | #\r | |
368 | return os.path.basename(self.MetaFile.Path)[:36]\r | |
369 | return self.Module.Guid\r | |
370 | \r | |
371 | ## Return the module version\r | |
372 | @cached_property\r | |
373 | def Version(self):\r | |
374 | return self.Module.Version\r | |
375 | \r | |
376 | ## Return the module type\r | |
377 | @cached_property\r | |
378 | def ModuleType(self):\r | |
379 | return self.Module.ModuleType\r | |
380 | \r | |
381 | ## Return the component type (for Edk.x style of module)\r | |
382 | @cached_property\r | |
383 | def ComponentType(self):\r | |
384 | return self.Module.ComponentType\r | |
385 | \r | |
386 | ## Return the build type\r | |
387 | @cached_property\r | |
388 | def BuildType(self):\r | |
389 | return self.Module.BuildType\r | |
390 | \r | |
391 | ## Return the PCD_IS_DRIVER setting\r | |
392 | @cached_property\r | |
393 | def PcdIsDriver(self):\r | |
394 | return self.Module.PcdIsDriver\r | |
395 | \r | |
396 | ## Return the autogen version, i.e. module meta-file version\r | |
397 | @cached_property\r | |
398 | def AutoGenVersion(self):\r | |
399 | return self.Module.AutoGenVersion\r | |
400 | \r | |
401 | ## Check if the module is library or not\r | |
402 | @cached_property\r | |
403 | def IsLibrary(self):\r | |
404 | return bool(self.Module.LibraryClass)\r | |
405 | \r | |
406 | ## Check if the module is binary module or not\r | |
407 | @cached_property\r | |
408 | def IsBinaryModule(self):\r | |
409 | return self.Module.IsBinaryModule\r | |
410 | \r | |
411 | ## Return the directory to store intermediate files of the module\r | |
412 | @cached_property\r | |
413 | def BuildDir(self):\r | |
414 | return _MakeDir((\r | |
415 | self.PlatformInfo.BuildDir,\r | |
416 | self.Arch,\r | |
417 | self.SourceDir,\r | |
418 | self.MetaFile.BaseName\r | |
419 | ))\r | |
420 | \r | |
421 | ## Return the directory to store the intermediate object files of the module\r | |
422 | @cached_property\r | |
423 | def OutputDir(self):\r | |
424 | return _MakeDir((self.BuildDir, "OUTPUT"))\r | |
425 | \r | |
426 | ## Return the directory path to store ffs file\r | |
427 | @cached_property\r | |
428 | def FfsOutputDir(self):\r | |
429 | if GlobalData.gFdfParser:\r | |
430 | return path.join(self.PlatformInfo.BuildDir, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name)\r | |
431 | return ''\r | |
432 | \r | |
433 | ## Return the directory to store auto-gened source files of the module\r | |
434 | @cached_property\r | |
435 | def DebugDir(self):\r | |
436 | return _MakeDir((self.BuildDir, "DEBUG"))\r | |
437 | \r | |
438 | ## Return the path of custom file\r | |
439 | @cached_property\r | |
440 | def CustomMakefile(self):\r | |
441 | RetVal = {}\r | |
442 | for Type in self.Module.CustomMakefile:\r | |
443 | MakeType = gMakeTypeMap[Type] if Type in gMakeTypeMap else 'nmake'\r | |
444 | File = os.path.join(self.SourceDir, self.Module.CustomMakefile[Type])\r | |
445 | RetVal[MakeType] = File\r | |
446 | return RetVal\r | |
447 | \r | |
448 | ## Return the directory of the makefile\r | |
449 | #\r | |
450 | # @retval string The directory string of module's makefile\r | |
451 | #\r | |
452 | @cached_property\r | |
453 | def MakeFileDir(self):\r | |
454 | return self.BuildDir\r | |
455 | \r | |
456 | ## Return build command string\r | |
457 | #\r | |
458 | # @retval string Build command string\r | |
459 | #\r | |
460 | @cached_property\r | |
461 | def BuildCommand(self):\r | |
462 | return self.PlatformInfo.BuildCommand\r | |
463 | \r | |
bf1ea933 FZ |
464 | ## Get Module package and Platform package\r |
465 | #\r | |
466 | # @retval list The list of package object\r | |
467 | #\r | |
468 | @cached_property\r | |
469 | def PackageList(self):\r | |
470 | PkagList = []\r | |
471 | if self.Module.Packages:\r | |
472 | PkagList.extend(self.Module.Packages)\r | |
473 | Platform = self.BuildDatabase[self.PlatformInfo.MetaFile, self.Arch, self.BuildTarget, self.ToolChain]\r | |
474 | for Package in Platform.Packages:\r | |
475 | if Package in PkagList:\r | |
476 | continue\r | |
477 | PkagList.append(Package)\r | |
478 | return PkagList\r | |
479 | \r | |
480 | ## Get object list of all packages the module and its dependent libraries belong to and the Platform depends on\r | |
e8449e1d FB |
481 | #\r |
482 | # @retval list The list of package object\r | |
483 | #\r | |
484 | @cached_property\r | |
485 | def DerivedPackageList(self):\r | |
486 | PackageList = []\r | |
bf1ea933 FZ |
487 | PackageList.extend(self.PackageList)\r |
488 | for M in self.DependentLibraryList:\r | |
e8449e1d FB |
489 | for Package in M.Packages:\r |
490 | if Package in PackageList:\r | |
491 | continue\r | |
492 | PackageList.append(Package)\r | |
493 | return PackageList\r | |
494 | \r | |
495 | ## Get the depex string\r | |
496 | #\r | |
497 | # @return : a string contain all depex expression.\r | |
498 | def _GetDepexExpresionString(self):\r | |
499 | DepexStr = ''\r | |
500 | DepexList = []\r | |
501 | ## DPX_SOURCE IN Define section.\r | |
502 | if self.Module.DxsFile:\r | |
503 | return DepexStr\r | |
504 | for M in [self.Module] + self.DependentLibraryList:\r | |
505 | Filename = M.MetaFile.Path\r | |
506 | InfObj = InfSectionParser.InfSectionParser(Filename)\r | |
507 | DepexExpressionList = InfObj.GetDepexExpresionList()\r | |
508 | for DepexExpression in DepexExpressionList:\r | |
509 | for key in DepexExpression:\r | |
510 | Arch, ModuleType = key\r | |
511 | DepexExpr = [x for x in DepexExpression[key] if not str(x).startswith('#')]\r | |
512 | # the type of build module is USER_DEFINED.\r | |
513 | # All different DEPEX section tags would be copied into the As Built INF file\r | |
514 | # and there would be separate DEPEX section tags\r | |
515 | if self.ModuleType.upper() == SUP_MODULE_USER_DEFINED or self.ModuleType.upper() == SUP_MODULE_HOST_APPLICATION:\r | |
516 | if (Arch.upper() == self.Arch.upper()) and (ModuleType.upper() != TAB_ARCH_COMMON):\r | |
517 | DepexList.append({(Arch, ModuleType): DepexExpr})\r | |
518 | else:\r | |
519 | if Arch.upper() == TAB_ARCH_COMMON or \\r | |
520 | (Arch.upper() == self.Arch.upper() and \\r | |
521 | ModuleType.upper() in [TAB_ARCH_COMMON, self.ModuleType.upper()]):\r | |
522 | DepexList.append({(Arch, ModuleType): DepexExpr})\r | |
523 | \r | |
524 | #the type of build module is USER_DEFINED.\r | |
525 | if self.ModuleType.upper() == SUP_MODULE_USER_DEFINED or self.ModuleType.upper() == SUP_MODULE_HOST_APPLICATION:\r | |
526 | for Depex in DepexList:\r | |
527 | for key in Depex:\r | |
528 | DepexStr += '[Depex.%s.%s]\n' % key\r | |
529 | DepexStr += '\n'.join('# '+ val for val in Depex[key])\r | |
530 | DepexStr += '\n\n'\r | |
531 | if not DepexStr:\r | |
532 | return '[Depex.%s]\n' % self.Arch\r | |
533 | return DepexStr\r | |
534 | \r | |
535 | #the type of build module not is USER_DEFINED.\r | |
536 | Count = 0\r | |
537 | for Depex in DepexList:\r | |
538 | Count += 1\r | |
539 | if DepexStr != '':\r | |
540 | DepexStr += ' AND '\r | |
541 | DepexStr += '('\r | |
542 | for D in Depex.values():\r | |
543 | DepexStr += ' '.join(val for val in D)\r | |
544 | Index = DepexStr.find('END')\r | |
545 | if Index > -1 and Index == len(DepexStr) - 3:\r | |
546 | DepexStr = DepexStr[:-3]\r | |
547 | DepexStr = DepexStr.strip()\r | |
548 | DepexStr += ')'\r | |
549 | if Count == 1:\r | |
550 | DepexStr = DepexStr.lstrip('(').rstrip(')').strip()\r | |
551 | if not DepexStr:\r | |
552 | return '[Depex.%s]\n' % self.Arch\r | |
553 | return '[Depex.%s]\n# ' % self.Arch + DepexStr\r | |
554 | \r | |
555 | ## Merge dependency expression\r | |
556 | #\r | |
557 | # @retval list The token list of the dependency expression after parsed\r | |
558 | #\r | |
559 | @cached_property\r | |
560 | def DepexList(self):\r | |
561 | if self.DxsFile or self.IsLibrary or TAB_DEPENDENCY_EXPRESSION_FILE in self.FileTypes:\r | |
562 | return {}\r | |
563 | \r | |
564 | DepexList = []\r | |
565 | #\r | |
566 | # Append depex from dependent libraries, if not "BEFORE", "AFTER" expression\r | |
567 | #\r | |
568 | FixedVoidTypePcds = {}\r | |
569 | for M in [self] + self.LibraryAutoGenList:\r | |
570 | FixedVoidTypePcds.update(M.FixedVoidTypePcds)\r | |
571 | for M in [self] + self.LibraryAutoGenList:\r | |
572 | Inherited = False\r | |
573 | for D in M.Module.Depex[self.Arch, self.ModuleType]:\r | |
574 | if DepexList != []:\r | |
575 | DepexList.append('AND')\r | |
576 | DepexList.append('(')\r | |
577 | #replace D with value if D is FixedAtBuild PCD\r | |
578 | NewList = []\r | |
579 | for item in D:\r | |
580 | if '.' not in item:\r | |
581 | NewList.append(item)\r | |
582 | else:\r | |
583 | try:\r | |
584 | Value = FixedVoidTypePcds[item]\r | |
585 | if len(Value.split(',')) != 16:\r | |
586 | EdkLogger.error("build", FORMAT_INVALID,\r | |
587 | "{} used in [Depex] section should be used as FixedAtBuild type and VOID* datum type and 16 bytes in the module.".format(item))\r | |
588 | NewList.append(Value)\r | |
589 | except:\r | |
590 | EdkLogger.error("build", FORMAT_INVALID, "{} used in [Depex] section should be used as FixedAtBuild type and VOID* datum type in the module.".format(item))\r | |
591 | \r | |
592 | DepexList.extend(NewList)\r | |
593 | if DepexList[-1] == 'END': # no need of a END at this time\r | |
594 | DepexList.pop()\r | |
595 | DepexList.append(')')\r | |
596 | Inherited = True\r | |
597 | if Inherited:\r | |
598 | EdkLogger.verbose("DEPEX[%s] (+%s) = %s" % (self.Name, M.Module.BaseName, DepexList))\r | |
599 | if 'BEFORE' in DepexList or 'AFTER' in DepexList:\r | |
600 | break\r | |
601 | if len(DepexList) > 0:\r | |
602 | EdkLogger.verbose('')\r | |
603 | return {self.ModuleType:DepexList}\r | |
604 | \r | |
605 | ## Merge dependency expression\r | |
606 | #\r | |
607 | # @retval list The token list of the dependency expression after parsed\r | |
608 | #\r | |
609 | @cached_property\r | |
610 | def DepexExpressionDict(self):\r | |
611 | if self.DxsFile or self.IsLibrary or TAB_DEPENDENCY_EXPRESSION_FILE in self.FileTypes:\r | |
612 | return {}\r | |
613 | \r | |
614 | DepexExpressionString = ''\r | |
615 | #\r | |
616 | # Append depex from dependent libraries, if not "BEFORE", "AFTER" expresion\r | |
617 | #\r | |
618 | for M in [self.Module] + self.DependentLibraryList:\r | |
619 | Inherited = False\r | |
620 | for D in M.DepexExpression[self.Arch, self.ModuleType]:\r | |
621 | if DepexExpressionString != '':\r | |
622 | DepexExpressionString += ' AND '\r | |
623 | DepexExpressionString += '('\r | |
624 | DepexExpressionString += D\r | |
625 | DepexExpressionString = DepexExpressionString.rstrip('END').strip()\r | |
626 | DepexExpressionString += ')'\r | |
627 | Inherited = True\r | |
628 | if Inherited:\r | |
629 | EdkLogger.verbose("DEPEX[%s] (+%s) = %s" % (self.Name, M.BaseName, DepexExpressionString))\r | |
630 | if 'BEFORE' in DepexExpressionString or 'AFTER' in DepexExpressionString:\r | |
631 | break\r | |
632 | if len(DepexExpressionString) > 0:\r | |
633 | EdkLogger.verbose('')\r | |
634 | \r | |
635 | return {self.ModuleType:DepexExpressionString}\r | |
636 | \r | |
637 | # Get the tiano core user extension, it is contain dependent library.\r | |
638 | # @retval: a list contain tiano core userextension.\r | |
639 | #\r | |
640 | def _GetTianoCoreUserExtensionList(self):\r | |
641 | TianoCoreUserExtentionList = []\r | |
642 | for M in [self.Module] + self.DependentLibraryList:\r | |
643 | Filename = M.MetaFile.Path\r | |
644 | InfObj = InfSectionParser.InfSectionParser(Filename)\r | |
645 | TianoCoreUserExtenList = InfObj.GetUserExtensionTianoCore()\r | |
646 | for TianoCoreUserExtent in TianoCoreUserExtenList:\r | |
647 | for Section in TianoCoreUserExtent:\r | |
648 | ItemList = Section.split(TAB_SPLIT)\r | |
649 | Arch = self.Arch\r | |
650 | if len(ItemList) == 4:\r | |
651 | Arch = ItemList[3]\r | |
652 | if Arch.upper() == TAB_ARCH_COMMON or Arch.upper() == self.Arch.upper():\r | |
653 | TianoCoreList = []\r | |
654 | TianoCoreList.extend([TAB_SECTION_START + Section + TAB_SECTION_END])\r | |
655 | TianoCoreList.extend(TianoCoreUserExtent[Section][:])\r | |
656 | TianoCoreList.append('\n')\r | |
657 | TianoCoreUserExtentionList.append(TianoCoreList)\r | |
658 | \r | |
659 | return TianoCoreUserExtentionList\r | |
660 | \r | |
661 | ## Return the list of specification version required for the module\r | |
662 | #\r | |
663 | # @retval list The list of specification defined in module file\r | |
664 | #\r | |
665 | @cached_property\r | |
666 | def Specification(self):\r | |
667 | return self.Module.Specification\r | |
668 | \r | |
669 | ## Tool option for the module build\r | |
670 | #\r | |
671 | # @param PlatformInfo The object of PlatformBuildInfo\r | |
672 | # @retval dict The dict containing valid options\r | |
673 | #\r | |
674 | @cached_property\r | |
675 | def BuildOption(self):\r | |
676 | RetVal, self.BuildRuleOrder = self.PlatformInfo.ApplyBuildOption(self.Module)\r | |
677 | if self.BuildRuleOrder:\r | |
678 | self.BuildRuleOrder = ['.%s' % Ext for Ext in self.BuildRuleOrder.split()]\r | |
679 | return RetVal\r | |
680 | \r | |
681 | ## Get include path list from tool option for the module build\r | |
682 | #\r | |
683 | # @retval list The include path list\r | |
684 | #\r | |
685 | @cached_property\r | |
686 | def BuildOptionIncPathList(self):\r | |
687 | #\r | |
688 | # Regular expression for finding Include Directories, the difference between MSFT and INTEL/GCC/RVCT\r | |
689 | # is the former use /I , the Latter used -I to specify include directories\r | |
690 | #\r | |
691 | if self.PlatformInfo.ToolChainFamily in (TAB_COMPILER_MSFT):\r | |
692 | BuildOptIncludeRegEx = gBuildOptIncludePatternMsft\r | |
693 | elif self.PlatformInfo.ToolChainFamily in ('INTEL', 'GCC', 'RVCT'):\r | |
694 | BuildOptIncludeRegEx = gBuildOptIncludePatternOther\r | |
695 | else:\r | |
696 | #\r | |
697 | # New ToolChainFamily, don't known whether there is option to specify include directories\r | |
698 | #\r | |
699 | return []\r | |
700 | \r | |
701 | RetVal = []\r | |
702 | for Tool in ('CC', 'PP', 'VFRPP', 'ASLPP', 'ASLCC', 'APP', 'ASM'):\r | |
703 | try:\r | |
704 | FlagOption = self.BuildOption[Tool]['FLAGS']\r | |
705 | except KeyError:\r | |
706 | FlagOption = ''\r | |
707 | \r | |
708 | if self.ToolChainFamily != 'RVCT':\r | |
709 | IncPathList = [NormPath(Path, self.Macros) for Path in BuildOptIncludeRegEx.findall(FlagOption)]\r | |
710 | else:\r | |
711 | #\r | |
712 | # RVCT may specify a list of directory seperated by commas\r | |
713 | #\r | |
714 | IncPathList = []\r | |
715 | for Path in BuildOptIncludeRegEx.findall(FlagOption):\r | |
716 | PathList = GetSplitList(Path, TAB_COMMA_SPLIT)\r | |
717 | IncPathList.extend(NormPath(PathEntry, self.Macros) for PathEntry in PathList)\r | |
718 | \r | |
719 | #\r | |
720 | # EDK II modules must not reference header files outside of the packages they depend on or\r | |
721 | # within the module's directory tree. Report error if violation.\r | |
722 | #\r | |
723 | if GlobalData.gDisableIncludePathCheck == False:\r | |
724 | for Path in IncPathList:\r | |
725 | if (Path not in self.IncludePathList) and (CommonPath([Path, self.MetaFile.Dir]) != self.MetaFile.Dir):\r | |
726 | ErrMsg = "The include directory for the EDK II module in this line is invalid %s specified in %s FLAGS '%s'" % (Path, Tool, FlagOption)\r | |
727 | EdkLogger.error("build",\r | |
728 | PARAMETER_INVALID,\r | |
729 | ExtraData=ErrMsg,\r | |
730 | File=str(self.MetaFile))\r | |
731 | RetVal += IncPathList\r | |
732 | return RetVal\r | |
733 | \r | |
734 | ## Return a list of files which can be built from source\r | |
735 | #\r | |
736 | # What kind of files can be built is determined by build rules in\r | |
737 | # $(CONF_DIRECTORY)/build_rule.txt and toolchain family.\r | |
738 | #\r | |
739 | @cached_property\r | |
740 | def SourceFileList(self):\r | |
741 | RetVal = []\r | |
742 | ToolChainTagSet = {"", TAB_STAR, self.ToolChain}\r | |
743 | ToolChainFamilySet = {"", TAB_STAR, self.ToolChainFamily, self.BuildRuleFamily}\r | |
744 | for F in self.Module.Sources:\r | |
745 | # match tool chain\r | |
746 | if F.TagName not in ToolChainTagSet:\r | |
747 | EdkLogger.debug(EdkLogger.DEBUG_9, "The toolchain [%s] for processing file [%s] is found, "\r | |
748 | "but [%s] is currently used" % (F.TagName, str(F), self.ToolChain))\r | |
749 | continue\r | |
750 | # match tool chain family or build rule family\r | |
751 | if F.ToolChainFamily not in ToolChainFamilySet:\r | |
752 | EdkLogger.debug(\r | |
753 | EdkLogger.DEBUG_0,\r | |
754 | "The file [%s] must be built by tools of [%s], " \\r | |
755 | "but current toolchain family is [%s], buildrule family is [%s]" \\r | |
756 | % (str(F), F.ToolChainFamily, self.ToolChainFamily, self.BuildRuleFamily))\r | |
757 | continue\r | |
758 | \r | |
759 | # add the file path into search path list for file including\r | |
760 | if F.Dir not in self.IncludePathList:\r | |
761 | self.IncludePathList.insert(0, F.Dir)\r | |
762 | RetVal.append(F)\r | |
763 | \r | |
764 | self._MatchBuildRuleOrder(RetVal)\r | |
765 | \r | |
766 | for F in RetVal:\r | |
767 | self._ApplyBuildRule(F, TAB_UNKNOWN_FILE)\r | |
768 | return RetVal\r | |
769 | \r | |
770 | def _MatchBuildRuleOrder(self, FileList):\r | |
771 | Order_Dict = {}\r | |
772 | self.BuildOption\r | |
773 | for SingleFile in FileList:\r | |
774 | if self.BuildRuleOrder and SingleFile.Ext in self.BuildRuleOrder and SingleFile.Ext in self.BuildRules:\r | |
775 | key = SingleFile.Path.rsplit(SingleFile.Ext,1)[0]\r | |
776 | if key in Order_Dict:\r | |
777 | Order_Dict[key].append(SingleFile.Ext)\r | |
778 | else:\r | |
779 | Order_Dict[key] = [SingleFile.Ext]\r | |
780 | \r | |
781 | RemoveList = []\r | |
782 | for F in Order_Dict:\r | |
783 | if len(Order_Dict[F]) > 1:\r | |
784 | Order_Dict[F].sort(key=lambda i: self.BuildRuleOrder.index(i))\r | |
785 | for Ext in Order_Dict[F][1:]:\r | |
786 | RemoveList.append(F + Ext)\r | |
787 | \r | |
788 | for item in RemoveList:\r | |
789 | FileList.remove(item)\r | |
790 | \r | |
791 | return FileList\r | |
792 | \r | |
793 | ## Return the list of unicode files\r | |
794 | @cached_property\r | |
795 | def UnicodeFileList(self):\r | |
796 | return self.FileTypes.get(TAB_UNICODE_FILE,[])\r | |
797 | \r | |
798 | ## Return the list of vfr files\r | |
799 | @cached_property\r | |
800 | def VfrFileList(self):\r | |
801 | return self.FileTypes.get(TAB_VFR_FILE, [])\r | |
802 | \r | |
803 | ## Return the list of Image Definition files\r | |
804 | @cached_property\r | |
805 | def IdfFileList(self):\r | |
806 | return self.FileTypes.get(TAB_IMAGE_FILE,[])\r | |
807 | \r | |
808 | ## Return a list of files which can be built from binary\r | |
809 | #\r | |
810 | # "Build" binary files are just to copy them to build directory.\r | |
811 | #\r | |
812 | # @retval list The list of files which can be built later\r | |
813 | #\r | |
814 | @cached_property\r | |
815 | def BinaryFileList(self):\r | |
816 | RetVal = []\r | |
817 | for F in self.Module.Binaries:\r | |
818 | if F.Target not in [TAB_ARCH_COMMON, TAB_STAR] and F.Target != self.BuildTarget:\r | |
819 | continue\r | |
820 | RetVal.append(F)\r | |
821 | self._ApplyBuildRule(F, F.Type, BinaryFileList=RetVal)\r | |
822 | return RetVal\r | |
823 | \r | |
824 | @cached_property\r | |
825 | def BuildRules(self):\r | |
826 | RetVal = {}\r | |
827 | BuildRuleDatabase = self.PlatformInfo.BuildRule\r | |
828 | for Type in BuildRuleDatabase.FileTypeList:\r | |
829 | #first try getting build rule by BuildRuleFamily\r | |
830 | RuleObject = BuildRuleDatabase[Type, self.BuildType, self.Arch, self.BuildRuleFamily]\r | |
831 | if not RuleObject:\r | |
832 | # build type is always module type, but ...\r | |
833 | if self.ModuleType != self.BuildType:\r | |
834 | RuleObject = BuildRuleDatabase[Type, self.ModuleType, self.Arch, self.BuildRuleFamily]\r | |
835 | #second try getting build rule by ToolChainFamily\r | |
836 | if not RuleObject:\r | |
837 | RuleObject = BuildRuleDatabase[Type, self.BuildType, self.Arch, self.ToolChainFamily]\r | |
838 | if not RuleObject:\r | |
839 | # build type is always module type, but ...\r | |
840 | if self.ModuleType != self.BuildType:\r | |
841 | RuleObject = BuildRuleDatabase[Type, self.ModuleType, self.Arch, self.ToolChainFamily]\r | |
842 | if not RuleObject:\r | |
843 | continue\r | |
844 | RuleObject = RuleObject.Instantiate(self.Macros)\r | |
845 | RetVal[Type] = RuleObject\r | |
846 | for Ext in RuleObject.SourceFileExtList:\r | |
847 | RetVal[Ext] = RuleObject\r | |
848 | return RetVal\r | |
849 | \r | |
850 | def _ApplyBuildRule(self, File, FileType, BinaryFileList=None):\r | |
851 | if self._BuildTargets is None:\r | |
852 | self._IntroBuildTargetList = set()\r | |
853 | self._FinalBuildTargetList = set()\r | |
854 | self._BuildTargets = defaultdict(set)\r | |
855 | self._FileTypes = defaultdict(set)\r | |
856 | \r | |
857 | if not BinaryFileList:\r | |
858 | BinaryFileList = self.BinaryFileList\r | |
859 | \r | |
860 | SubDirectory = os.path.join(self.OutputDir, File.SubDir)\r | |
861 | if not os.path.exists(SubDirectory):\r | |
862 | CreateDirectory(SubDirectory)\r | |
65112778 PG |
863 | TargetList = set()\r |
864 | FinalTargetName = set()\r | |
e8449e1d FB |
865 | RuleChain = set()\r |
866 | SourceList = [File]\r | |
867 | Index = 0\r | |
868 | #\r | |
869 | # Make sure to get build rule order value\r | |
870 | #\r | |
871 | self.BuildOption\r | |
872 | \r | |
873 | while Index < len(SourceList):\r | |
65112778 PG |
874 | # Reset the FileType if not the first iteration.\r |
875 | if Index > 0:\r | |
876 | FileType = TAB_UNKNOWN_FILE\r | |
e8449e1d FB |
877 | Source = SourceList[Index]\r |
878 | Index = Index + 1\r | |
879 | \r | |
880 | if Source != File:\r | |
881 | CreateDirectory(Source.Dir)\r | |
882 | \r | |
883 | if File.IsBinary and File == Source and File in BinaryFileList:\r | |
884 | # Skip all files that are not binary libraries\r | |
885 | if not self.IsLibrary:\r | |
886 | continue\r | |
887 | RuleObject = self.BuildRules[TAB_DEFAULT_BINARY_FILE]\r | |
888 | elif FileType in self.BuildRules:\r | |
889 | RuleObject = self.BuildRules[FileType]\r | |
890 | elif Source.Ext in self.BuildRules:\r | |
891 | RuleObject = self.BuildRules[Source.Ext]\r | |
892 | else:\r | |
65112778 PG |
893 | # No more rule to apply: Source is a final target.\r |
894 | FinalTargetName.add(Source)\r | |
895 | continue\r | |
e8449e1d FB |
896 | \r |
897 | FileType = RuleObject.SourceFileType\r | |
898 | self._FileTypes[FileType].add(Source)\r | |
899 | \r | |
900 | # stop at STATIC_LIBRARY for library\r | |
901 | if self.IsLibrary and FileType == TAB_STATIC_LIBRARY:\r | |
65112778 PG |
902 | FinalTargetName.add(Source)\r |
903 | continue\r | |
e8449e1d FB |
904 | \r |
905 | Target = RuleObject.Apply(Source, self.BuildRuleOrder)\r | |
906 | if not Target:\r | |
65112778 PG |
907 | # No Target: Source is a final target.\r |
908 | FinalTargetName.add(Source)\r | |
909 | continue\r | |
e8449e1d | 910 | \r |
65112778 | 911 | TargetList.add(Target)\r |
e8449e1d FB |
912 | self._BuildTargets[FileType].add(Target)\r |
913 | \r | |
914 | if not Source.IsBinary and Source == File:\r | |
915 | self._IntroBuildTargetList.add(Target)\r | |
916 | \r | |
917 | # to avoid cyclic rule\r | |
918 | if FileType in RuleChain:\r | |
65112778 | 919 | EdkLogger.error("build", ERROR_STATEMENT, "Cyclic dependency detected while generating rule for %s" % str(Source))\r |
e8449e1d FB |
920 | \r |
921 | RuleChain.add(FileType)\r | |
922 | SourceList.extend(Target.Outputs)\r | |
65112778 PG |
923 | \r |
924 | # For each final target name, retrieve the corresponding TargetDescBlock instance.\r | |
925 | for FTargetName in FinalTargetName:\r | |
926 | for Target in TargetList:\r | |
927 | if FTargetName == Target.Target:\r | |
928 | self._FinalBuildTargetList.add(Target)\r | |
e8449e1d FB |
929 | \r |
930 | @cached_property\r | |
931 | def Targets(self):\r | |
932 | if self._BuildTargets is None:\r | |
933 | self._IntroBuildTargetList = set()\r | |
934 | self._FinalBuildTargetList = set()\r | |
935 | self._BuildTargets = defaultdict(set)\r | |
936 | self._FileTypes = defaultdict(set)\r | |
937 | \r | |
938 | #TRICK: call SourceFileList property to apply build rule for source files\r | |
939 | self.SourceFileList\r | |
940 | \r | |
941 | #TRICK: call _GetBinaryFileList to apply build rule for binary files\r | |
942 | self.BinaryFileList\r | |
943 | \r | |
944 | return self._BuildTargets\r | |
945 | \r | |
946 | @cached_property\r | |
947 | def IntroTargetList(self):\r | |
948 | self.Targets\r | |
949 | return self._IntroBuildTargetList\r | |
950 | \r | |
951 | @cached_property\r | |
952 | def CodaTargetList(self):\r | |
953 | self.Targets\r | |
954 | return self._FinalBuildTargetList\r | |
955 | \r | |
956 | @cached_property\r | |
957 | def FileTypes(self):\r | |
958 | self.Targets\r | |
959 | return self._FileTypes\r | |
960 | \r | |
bf1ea933 | 961 | ## Get the list of package object the module depends on and the Platform depends on\r |
e8449e1d FB |
962 | #\r |
963 | # @retval list The package object list\r | |
964 | #\r | |
965 | @cached_property\r | |
966 | def DependentPackageList(self):\r | |
bf1ea933 | 967 | return self.PackageList\r |
e8449e1d FB |
968 | \r |
969 | ## Return the list of auto-generated code file\r | |
970 | #\r | |
971 | # @retval list The list of auto-generated file\r | |
972 | #\r | |
973 | @cached_property\r | |
974 | def AutoGenFileList(self):\r | |
975 | AutoGenUniIdf = self.BuildType != 'UEFI_HII'\r | |
976 | UniStringBinBuffer = BytesIO()\r | |
977 | IdfGenBinBuffer = BytesIO()\r | |
978 | RetVal = {}\r | |
979 | AutoGenC = TemplateString()\r | |
980 | AutoGenH = TemplateString()\r | |
981 | StringH = TemplateString()\r | |
982 | StringIdf = TemplateString()\r | |
983 | GenC.CreateCode(self, AutoGenC, AutoGenH, StringH, AutoGenUniIdf, UniStringBinBuffer, StringIdf, AutoGenUniIdf, IdfGenBinBuffer)\r | |
984 | #\r | |
985 | # AutoGen.c is generated if there are library classes in inf, or there are object files\r | |
986 | #\r | |
987 | if str(AutoGenC) != "" and (len(self.Module.LibraryClasses) > 0\r | |
988 | or TAB_OBJECT_FILE in self.FileTypes):\r | |
989 | AutoFile = PathClass(gAutoGenCodeFileName, self.DebugDir)\r | |
990 | RetVal[AutoFile] = str(AutoGenC)\r | |
991 | self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)\r | |
992 | if str(AutoGenH) != "":\r | |
993 | AutoFile = PathClass(gAutoGenHeaderFileName, self.DebugDir)\r | |
994 | RetVal[AutoFile] = str(AutoGenH)\r | |
995 | self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)\r | |
996 | if str(StringH) != "":\r | |
997 | AutoFile = PathClass(gAutoGenStringFileName % {"module_name":self.Name}, self.DebugDir)\r | |
998 | RetVal[AutoFile] = str(StringH)\r | |
999 | self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)\r | |
1000 | if UniStringBinBuffer is not None and UniStringBinBuffer.getvalue() != b"":\r | |
1001 | AutoFile = PathClass(gAutoGenStringFormFileName % {"module_name":self.Name}, self.OutputDir)\r | |
1002 | RetVal[AutoFile] = UniStringBinBuffer.getvalue()\r | |
1003 | AutoFile.IsBinary = True\r | |
1004 | self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)\r | |
1005 | if UniStringBinBuffer is not None:\r | |
1006 | UniStringBinBuffer.close()\r | |
1007 | if str(StringIdf) != "":\r | |
1008 | AutoFile = PathClass(gAutoGenImageDefFileName % {"module_name":self.Name}, self.DebugDir)\r | |
1009 | RetVal[AutoFile] = str(StringIdf)\r | |
1010 | self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)\r | |
1011 | if IdfGenBinBuffer is not None and IdfGenBinBuffer.getvalue() != b"":\r | |
1012 | AutoFile = PathClass(gAutoGenIdfFileName % {"module_name":self.Name}, self.OutputDir)\r | |
1013 | RetVal[AutoFile] = IdfGenBinBuffer.getvalue()\r | |
1014 | AutoFile.IsBinary = True\r | |
1015 | self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)\r | |
1016 | if IdfGenBinBuffer is not None:\r | |
1017 | IdfGenBinBuffer.close()\r | |
1018 | return RetVal\r | |
1019 | \r | |
1020 | ## Return the list of library modules explicitly or implicitly used by this module\r | |
1021 | @cached_property\r | |
1022 | def DependentLibraryList(self):\r | |
1023 | # only merge library classes and PCD for non-library module\r | |
1024 | if self.IsLibrary:\r | |
1025 | return []\r | |
1026 | return self.PlatformInfo.ApplyLibraryInstance(self.Module)\r | |
1027 | \r | |
1028 | ## Get the list of PCDs from current module\r | |
1029 | #\r | |
1030 | # @retval list The list of PCD\r | |
1031 | #\r | |
1032 | @cached_property\r | |
1033 | def ModulePcdList(self):\r | |
1034 | # apply PCD settings from platform\r | |
1035 | RetVal = self.PlatformInfo.ApplyPcdSetting(self.Module, self.Module.Pcds)\r | |
1036 | \r | |
1037 | return RetVal\r | |
1038 | @cached_property\r | |
1039 | def _PcdComments(self):\r | |
1040 | ReVal = OrderedListDict()\r | |
1041 | ExtendCopyDictionaryLists(ReVal, self.Module.PcdComments)\r | |
1042 | if not self.IsLibrary:\r | |
1043 | for Library in self.DependentLibraryList:\r | |
1044 | ExtendCopyDictionaryLists(ReVal, Library.PcdComments)\r | |
1045 | return ReVal\r | |
1046 | \r | |
1047 | ## Get the list of PCDs from dependent libraries\r | |
1048 | #\r | |
1049 | # @retval list The list of PCD\r | |
1050 | #\r | |
1051 | @cached_property\r | |
1052 | def LibraryPcdList(self):\r | |
1053 | if self.IsLibrary:\r | |
1054 | return []\r | |
1055 | RetVal = []\r | |
1056 | Pcds = set()\r | |
1057 | # get PCDs from dependent libraries\r | |
1058 | for Library in self.DependentLibraryList:\r | |
1059 | PcdsInLibrary = OrderedDict()\r | |
1060 | for Key in Library.Pcds:\r | |
1061 | # skip duplicated PCDs\r | |
1062 | if Key in self.Module.Pcds or Key in Pcds:\r | |
1063 | continue\r | |
1064 | Pcds.add(Key)\r | |
1065 | PcdsInLibrary[Key] = copy.copy(Library.Pcds[Key])\r | |
1066 | RetVal.extend(self.PlatformInfo.ApplyPcdSetting(self.Module, PcdsInLibrary, Library=Library))\r | |
1067 | return RetVal\r | |
1068 | \r | |
1069 | ## Get the GUID value mapping\r | |
1070 | #\r | |
1071 | # @retval dict The mapping between GUID cname and its value\r | |
1072 | #\r | |
1073 | @cached_property\r | |
1074 | def GuidList(self):\r | |
1075 | RetVal = self.Module.Guids\r | |
1076 | for Library in self.DependentLibraryList:\r | |
1077 | RetVal.update(Library.Guids)\r | |
1078 | ExtendCopyDictionaryLists(self._GuidComments, Library.GuidComments)\r | |
1079 | ExtendCopyDictionaryLists(self._GuidComments, self.Module.GuidComments)\r | |
1080 | return RetVal\r | |
1081 | \r | |
1082 | @cached_property\r | |
1083 | def GetGuidsUsedByPcd(self):\r | |
1084 | RetVal = OrderedDict(self.Module.GetGuidsUsedByPcd())\r | |
1085 | for Library in self.DependentLibraryList:\r | |
1086 | RetVal.update(Library.GetGuidsUsedByPcd())\r | |
1087 | return RetVal\r | |
1088 | ## Get the protocol value mapping\r | |
1089 | #\r | |
1090 | # @retval dict The mapping between protocol cname and its value\r | |
1091 | #\r | |
1092 | @cached_property\r | |
1093 | def ProtocolList(self):\r | |
1094 | RetVal = OrderedDict(self.Module.Protocols)\r | |
1095 | for Library in self.DependentLibraryList:\r | |
1096 | RetVal.update(Library.Protocols)\r | |
1097 | ExtendCopyDictionaryLists(self._ProtocolComments, Library.ProtocolComments)\r | |
1098 | ExtendCopyDictionaryLists(self._ProtocolComments, self.Module.ProtocolComments)\r | |
1099 | return RetVal\r | |
1100 | \r | |
1101 | ## Get the PPI value mapping\r | |
1102 | #\r | |
1103 | # @retval dict The mapping between PPI cname and its value\r | |
1104 | #\r | |
1105 | @cached_property\r | |
1106 | def PpiList(self):\r | |
1107 | RetVal = OrderedDict(self.Module.Ppis)\r | |
1108 | for Library in self.DependentLibraryList:\r | |
1109 | RetVal.update(Library.Ppis)\r | |
1110 | ExtendCopyDictionaryLists(self._PpiComments, Library.PpiComments)\r | |
1111 | ExtendCopyDictionaryLists(self._PpiComments, self.Module.PpiComments)\r | |
1112 | return RetVal\r | |
1113 | \r | |
1114 | ## Get the list of include search path\r | |
1115 | #\r | |
1116 | # @retval list The list path\r | |
1117 | #\r | |
1118 | @cached_property\r | |
1119 | def IncludePathList(self):\r | |
1120 | RetVal = []\r | |
1121 | RetVal.append(self.MetaFile.Dir)\r | |
1122 | RetVal.append(self.DebugDir)\r | |
1123 | \r | |
bf1ea933 | 1124 | for Package in self.PackageList:\r |
e8449e1d FB |
1125 | PackageDir = mws.join(self.WorkspaceDir, Package.MetaFile.Dir)\r |
1126 | if PackageDir not in RetVal:\r | |
1127 | RetVal.append(PackageDir)\r | |
1128 | IncludesList = Package.Includes\r | |
1129 | if Package._PrivateIncludes:\r | |
1130 | if not self.MetaFile.OriginalPath.Path.startswith(PackageDir):\r | |
1131 | IncludesList = list(set(Package.Includes).difference(set(Package._PrivateIncludes)))\r | |
1132 | for Inc in IncludesList:\r | |
1133 | if Inc not in RetVal:\r | |
1134 | RetVal.append(str(Inc))\r | |
0c3e8e99 | 1135 | RetVal.extend(self.IncPathFromBuildOptions)\r |
e8449e1d FB |
1136 | return RetVal\r |
1137 | \r | |
0c3e8e99 BF |
1138 | @cached_property\r |
1139 | def IncPathFromBuildOptions(self):\r | |
1140 | IncPathList = []\r | |
1141 | for tool in self.BuildOption:\r | |
1142 | if 'FLAGS' in self.BuildOption[tool]:\r | |
1143 | flags = self.BuildOption[tool]['FLAGS']\r | |
1144 | whitespace = False\r | |
1145 | for flag in flags.split(" "):\r | |
1146 | flag = flag.strip()\r | |
1147 | if flag.startswith(("/I","-I")):\r | |
1148 | if len(flag)>2:\r | |
1149 | if os.path.exists(flag[2:]):\r | |
1150 | IncPathList.append(flag[2:])\r | |
1151 | else:\r | |
1152 | whitespace = True\r | |
1153 | continue\r | |
1154 | if whitespace and flag:\r | |
1155 | if os.path.exists(flag):\r | |
1156 | IncPathList.append(flag)\r | |
1157 | whitespace = False\r | |
1158 | return IncPathList\r | |
1159 | \r | |
e8449e1d FB |
1160 | @cached_property\r |
1161 | def IncludePathLength(self):\r | |
1162 | return sum(len(inc)+1 for inc in self.IncludePathList)\r | |
1163 | \r | |
82407bd1 RC |
1164 | ## Get the list of include paths from the packages\r |
1165 | #\r | |
1166 | # @IncludesList list The list path\r | |
1167 | #\r | |
1168 | @cached_property\r | |
1169 | def PackageIncludePathList(self):\r | |
1170 | IncludesList = []\r | |
bf1ea933 | 1171 | for Package in self.PackageList:\r |
82407bd1 RC |
1172 | PackageDir = mws.join(self.WorkspaceDir, Package.MetaFile.Dir)\r |
1173 | IncludesList = Package.Includes\r | |
1174 | if Package._PrivateIncludes:\r | |
1175 | if not self.MetaFile.Path.startswith(PackageDir):\r | |
1176 | IncludesList = list(set(Package.Includes).difference(set(Package._PrivateIncludes)))\r | |
1177 | return IncludesList\r | |
1178 | \r | |
e8449e1d FB |
1179 | ## Get HII EX PCDs which maybe used by VFR\r |
1180 | #\r | |
1181 | # efivarstore used by VFR may relate with HII EX PCDs\r | |
1182 | # Get the variable name and GUID from efivarstore and HII EX PCD\r | |
1183 | # List the HII EX PCDs in As Built INF if both name and GUID match.\r | |
1184 | #\r | |
1185 | # @retval list HII EX PCDs\r | |
1186 | #\r | |
1187 | def _GetPcdsMaybeUsedByVfr(self):\r | |
1188 | if not self.SourceFileList:\r | |
1189 | return []\r | |
1190 | \r | |
1191 | NameGuids = set()\r | |
1192 | for SrcFile in self.SourceFileList:\r | |
1193 | if SrcFile.Ext.lower() != '.vfr':\r | |
1194 | continue\r | |
1195 | Vfri = os.path.join(self.OutputDir, SrcFile.BaseName + '.i')\r | |
1196 | if not os.path.exists(Vfri):\r | |
1197 | continue\r | |
1198 | VfriFile = open(Vfri, 'r')\r | |
1199 | Content = VfriFile.read()\r | |
1200 | VfriFile.close()\r | |
1201 | Pos = Content.find('efivarstore')\r | |
1202 | while Pos != -1:\r | |
1203 | #\r | |
1204 | # Make sure 'efivarstore' is the start of efivarstore statement\r | |
1205 | # In case of the value of 'name' (name = efivarstore) is equal to 'efivarstore'\r | |
1206 | #\r | |
1207 | Index = Pos - 1\r | |
1208 | while Index >= 0 and Content[Index] in ' \t\r\n':\r | |
1209 | Index -= 1\r | |
1210 | if Index >= 0 and Content[Index] != ';':\r | |
1211 | Pos = Content.find('efivarstore', Pos + len('efivarstore'))\r | |
1212 | continue\r | |
1213 | #\r | |
1214 | # 'efivarstore' must be followed by name and guid\r | |
1215 | #\r | |
1216 | Name = gEfiVarStoreNamePattern.search(Content, Pos)\r | |
1217 | if not Name:\r | |
1218 | break\r | |
1219 | Guid = gEfiVarStoreGuidPattern.search(Content, Pos)\r | |
1220 | if not Guid:\r | |
1221 | break\r | |
1222 | NameArray = _ConvertStringToByteArray('L"' + Name.group(1) + '"')\r | |
1223 | NameGuids.add((NameArray, GuidStructureStringToGuidString(Guid.group(1))))\r | |
1224 | Pos = Content.find('efivarstore', Name.end())\r | |
1225 | if not NameGuids:\r | |
1226 | return []\r | |
1227 | HiiExPcds = []\r | |
1228 | for Pcd in self.PlatformInfo.Pcds.values():\r | |
1229 | if Pcd.Type != TAB_PCDS_DYNAMIC_EX_HII:\r | |
1230 | continue\r | |
1231 | for SkuInfo in Pcd.SkuInfoList.values():\r | |
1232 | Value = GuidValue(SkuInfo.VariableGuid, self.PlatformInfo.PackageList, self.MetaFile.Path)\r | |
1233 | if not Value:\r | |
1234 | continue\r | |
1235 | Name = _ConvertStringToByteArray(SkuInfo.VariableName)\r | |
1236 | Guid = GuidStructureStringToGuidString(Value)\r | |
1237 | if (Name, Guid) in NameGuids and Pcd not in HiiExPcds:\r | |
1238 | HiiExPcds.append(Pcd)\r | |
1239 | break\r | |
1240 | \r | |
1241 | return HiiExPcds\r | |
1242 | \r | |
1243 | def _GenOffsetBin(self):\r | |
1244 | VfrUniBaseName = {}\r | |
1245 | for SourceFile in self.Module.Sources:\r | |
1246 | if SourceFile.Type.upper() == ".VFR" :\r | |
1247 | #\r | |
1248 | # search the .map file to find the offset of vfr binary in the PE32+/TE file.\r | |
1249 | #\r | |
1250 | VfrUniBaseName[SourceFile.BaseName] = (SourceFile.BaseName + "Bin")\r | |
1251 | elif SourceFile.Type.upper() == ".UNI" :\r | |
1252 | #\r | |
1253 | # search the .map file to find the offset of Uni strings binary in the PE32+/TE file.\r | |
1254 | #\r | |
1255 | VfrUniBaseName["UniOffsetName"] = (self.Name + "Strings")\r | |
1256 | \r | |
1257 | if not VfrUniBaseName:\r | |
1258 | return None\r | |
1259 | MapFileName = os.path.join(self.OutputDir, self.Name + ".map")\r | |
1260 | EfiFileName = os.path.join(self.OutputDir, self.Name + ".efi")\r | |
1261 | VfrUniOffsetList = GetVariableOffset(MapFileName, EfiFileName, list(VfrUniBaseName.values()))\r | |
1262 | if not VfrUniOffsetList:\r | |
1263 | return None\r | |
1264 | \r | |
1265 | OutputName = '%sOffset.bin' % self.Name\r | |
1266 | UniVfrOffsetFileName = os.path.join( self.OutputDir, OutputName)\r | |
1267 | \r | |
1268 | try:\r | |
1269 | fInputfile = open(UniVfrOffsetFileName, "wb+", 0)\r | |
1270 | except:\r | |
1271 | EdkLogger.error("build", FILE_OPEN_FAILURE, "File open failed for %s" % UniVfrOffsetFileName, None)\r | |
1272 | \r | |
1273 | # Use a instance of BytesIO to cache data\r | |
1274 | fStringIO = BytesIO()\r | |
1275 | \r | |
1276 | for Item in VfrUniOffsetList:\r | |
1277 | if (Item[0].find("Strings") != -1):\r | |
1278 | #\r | |
1279 | # UNI offset in image.\r | |
1280 | # GUID + Offset\r | |
1281 | # { 0x8913c5e0, 0x33f6, 0x4d86, { 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66 } }\r | |
1282 | #\r | |
1283 | UniGuid = b'\xe0\xc5\x13\x89\xf63\x86M\x9b\xf1C\xef\x89\xfc\x06f'\r | |
1284 | fStringIO.write(UniGuid)\r | |
1285 | UniValue = pack ('Q', int (Item[1], 16))\r | |
1286 | fStringIO.write (UniValue)\r | |
1287 | else:\r | |
1288 | #\r | |
1289 | # VFR binary offset in image.\r | |
1290 | # GUID + Offset\r | |
1291 | # { 0xd0bc7cb4, 0x6a47, 0x495f, { 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2 } };\r | |
1292 | #\r | |
1293 | VfrGuid = b'\xb4|\xbc\xd0Gj_I\xaa\x11q\x07F\xda\x06\xa2'\r | |
1294 | fStringIO.write(VfrGuid)\r | |
1295 | VfrValue = pack ('Q', int (Item[1], 16))\r | |
1296 | fStringIO.write (VfrValue)\r | |
1297 | #\r | |
1298 | # write data into file.\r | |
1299 | #\r | |
1300 | try :\r | |
1301 | fInputfile.write (fStringIO.getvalue())\r | |
1302 | except:\r | |
1303 | EdkLogger.error("build", FILE_WRITE_FAILURE, "Write data to file %s failed, please check whether the "\r | |
1304 | "file been locked or using by other applications." %UniVfrOffsetFileName, None)\r | |
1305 | \r | |
1306 | fStringIO.close ()\r | |
1307 | fInputfile.close ()\r | |
1308 | return OutputName\r | |
d01a9986 | 1309 | \r |
e8449e1d FB |
1310 | @cached_property\r |
1311 | def OutputFile(self):\r | |
1312 | retVal = set()\r | |
40db176d | 1313 | \r |
91f6c533 | 1314 | for Root, Dirs, Files in os.walk(self.BuildDir):\r |
e8449e1d | 1315 | for File in Files:\r |
40db176d | 1316 | # lib file is already added through above CodaTargetList, skip it here\r |
91f6c533 SS |
1317 | if not (File.lower().endswith('.obj') or File.lower().endswith('.debug')):\r |
1318 | NewFile = path.join(Root, File)\r | |
40db176d | 1319 | retVal.add(NewFile)\r |
e8449e1d | 1320 | \r |
40db176d | 1321 | for Root, Dirs, Files in os.walk(self.FfsOutputDir):\r |
d01a9986 | 1322 | for File in Files:\r |
91f6c533 | 1323 | NewFile = path.join(Root, File)\r |
40db176d | 1324 | retVal.add(NewFile)\r |
d01a9986 | 1325 | \r |
e8449e1d FB |
1326 | return retVal\r |
1327 | \r | |
1328 | ## Create AsBuilt INF file the module\r | |
1329 | #\r | |
1330 | def CreateAsBuiltInf(self):\r | |
1331 | \r | |
1332 | if self.IsAsBuiltInfCreated:\r | |
1333 | return\r | |
1334 | \r | |
1335 | # Skip INF file generation for libraries\r | |
1336 | if self.IsLibrary:\r | |
1337 | return\r | |
1338 | \r | |
1339 | # Skip the following code for modules with no source files\r | |
1340 | if not self.SourceFileList:\r | |
1341 | return\r | |
1342 | \r | |
1343 | # Skip the following code for modules without any binary files\r | |
1344 | if self.BinaryFileList:\r | |
1345 | return\r | |
1346 | \r | |
1347 | ### TODO: How to handles mixed source and binary modules\r | |
1348 | \r | |
1349 | # Find all DynamicEx and PatchableInModule PCDs used by this module and dependent libraries\r | |
1350 | # Also find all packages that the DynamicEx PCDs depend on\r | |
1351 | Pcds = []\r | |
1352 | PatchablePcds = []\r | |
1353 | Packages = []\r | |
1354 | PcdCheckList = []\r | |
1355 | PcdTokenSpaceList = []\r | |
1356 | for Pcd in self.ModulePcdList + self.LibraryPcdList:\r | |
1357 | if Pcd.Type == TAB_PCDS_PATCHABLE_IN_MODULE:\r | |
1358 | PatchablePcds.append(Pcd)\r | |
1359 | PcdCheckList.append((Pcd.TokenCName, Pcd.TokenSpaceGuidCName, TAB_PCDS_PATCHABLE_IN_MODULE))\r | |
1360 | elif Pcd.Type in PCD_DYNAMIC_EX_TYPE_SET:\r | |
1361 | if Pcd not in Pcds:\r | |
1362 | Pcds.append(Pcd)\r | |
1363 | PcdCheckList.append((Pcd.TokenCName, Pcd.TokenSpaceGuidCName, TAB_PCDS_DYNAMIC_EX))\r | |
1364 | PcdCheckList.append((Pcd.TokenCName, Pcd.TokenSpaceGuidCName, TAB_PCDS_DYNAMIC))\r | |
1365 | PcdTokenSpaceList.append(Pcd.TokenSpaceGuidCName)\r | |
1366 | GuidList = OrderedDict(self.GuidList)\r | |
1367 | for TokenSpace in self.GetGuidsUsedByPcd:\r | |
1368 | # If token space is not referred by patch PCD or Ex PCD, remove the GUID from GUID list\r | |
1369 | # The GUIDs in GUIDs section should really be the GUIDs in source INF or referred by Ex an patch PCDs\r | |
1370 | if TokenSpace not in PcdTokenSpaceList and TokenSpace in GuidList:\r | |
1371 | GuidList.pop(TokenSpace)\r | |
1372 | CheckList = (GuidList, self.PpiList, self.ProtocolList, PcdCheckList)\r | |
1373 | for Package in self.DerivedPackageList:\r | |
1374 | if Package in Packages:\r | |
1375 | continue\r | |
1376 | BeChecked = (Package.Guids, Package.Ppis, Package.Protocols, Package.Pcds)\r | |
1377 | Found = False\r | |
1378 | for Index in range(len(BeChecked)):\r | |
1379 | for Item in CheckList[Index]:\r | |
1380 | if Item in BeChecked[Index]:\r | |
1381 | Packages.append(Package)\r | |
1382 | Found = True\r | |
1383 | break\r | |
1384 | if Found:\r | |
1385 | break\r | |
1386 | \r | |
1387 | VfrPcds = self._GetPcdsMaybeUsedByVfr()\r | |
1388 | for Pkg in self.PlatformInfo.PackageList:\r | |
1389 | if Pkg in Packages:\r | |
1390 | continue\r | |
1391 | for VfrPcd in VfrPcds:\r | |
1392 | if ((VfrPcd.TokenCName, VfrPcd.TokenSpaceGuidCName, TAB_PCDS_DYNAMIC_EX) in Pkg.Pcds or\r | |
1393 | (VfrPcd.TokenCName, VfrPcd.TokenSpaceGuidCName, TAB_PCDS_DYNAMIC) in Pkg.Pcds):\r | |
1394 | Packages.append(Pkg)\r | |
1395 | break\r | |
1396 | \r | |
1397 | ModuleType = SUP_MODULE_DXE_DRIVER if self.ModuleType == SUP_MODULE_UEFI_DRIVER and self.DepexGenerated else self.ModuleType\r | |
1398 | DriverType = self.PcdIsDriver if self.PcdIsDriver else ''\r | |
1399 | Guid = self.Guid\r | |
1400 | MDefs = self.Module.Defines\r | |
1401 | \r | |
1402 | AsBuiltInfDict = {\r | |
1403 | 'module_name' : self.Name,\r | |
1404 | 'module_guid' : Guid,\r | |
1405 | 'module_module_type' : ModuleType,\r | |
1406 | 'module_version_string' : [MDefs['VERSION_STRING']] if 'VERSION_STRING' in MDefs else [],\r | |
1407 | 'pcd_is_driver_string' : [],\r | |
1408 | 'module_uefi_specification_version' : [],\r | |
1409 | 'module_pi_specification_version' : [],\r | |
1410 | 'module_entry_point' : self.Module.ModuleEntryPointList,\r | |
1411 | 'module_unload_image' : self.Module.ModuleUnloadImageList,\r | |
1412 | 'module_constructor' : self.Module.ConstructorList,\r | |
1413 | 'module_destructor' : self.Module.DestructorList,\r | |
1414 | 'module_shadow' : [MDefs['SHADOW']] if 'SHADOW' in MDefs else [],\r | |
1415 | 'module_pci_vendor_id' : [MDefs['PCI_VENDOR_ID']] if 'PCI_VENDOR_ID' in MDefs else [],\r | |
1416 | 'module_pci_device_id' : [MDefs['PCI_DEVICE_ID']] if 'PCI_DEVICE_ID' in MDefs else [],\r | |
1417 | 'module_pci_class_code' : [MDefs['PCI_CLASS_CODE']] if 'PCI_CLASS_CODE' in MDefs else [],\r | |
1418 | 'module_pci_revision' : [MDefs['PCI_REVISION']] if 'PCI_REVISION' in MDefs else [],\r | |
1419 | 'module_build_number' : [MDefs['BUILD_NUMBER']] if 'BUILD_NUMBER' in MDefs else [],\r | |
1420 | 'module_spec' : [MDefs['SPEC']] if 'SPEC' in MDefs else [],\r | |
1421 | 'module_uefi_hii_resource_section' : [MDefs['UEFI_HII_RESOURCE_SECTION']] if 'UEFI_HII_RESOURCE_SECTION' in MDefs else [],\r | |
1422 | 'module_uni_file' : [MDefs['MODULE_UNI_FILE']] if 'MODULE_UNI_FILE' in MDefs else [],\r | |
1423 | 'module_arch' : self.Arch,\r | |
1424 | 'package_item' : [Package.MetaFile.File.replace('\\', '/') for Package in Packages],\r | |
1425 | 'binary_item' : [],\r | |
1426 | 'patchablepcd_item' : [],\r | |
1427 | 'pcd_item' : [],\r | |
1428 | 'protocol_item' : [],\r | |
1429 | 'ppi_item' : [],\r | |
1430 | 'guid_item' : [],\r | |
1431 | 'flags_item' : [],\r | |
1432 | 'libraryclasses_item' : []\r | |
1433 | }\r | |
1434 | \r | |
1435 | if 'MODULE_UNI_FILE' in MDefs:\r | |
1436 | UNIFile = os.path.join(self.MetaFile.Dir, MDefs['MODULE_UNI_FILE'])\r | |
1437 | if os.path.isfile(UNIFile):\r | |
1438 | shutil.copy2(UNIFile, self.OutputDir)\r | |
1439 | \r | |
1440 | if self.AutoGenVersion > int(gInfSpecVersion, 0):\r | |
1441 | AsBuiltInfDict['module_inf_version'] = '0x%08x' % self.AutoGenVersion\r | |
1442 | else:\r | |
1443 | AsBuiltInfDict['module_inf_version'] = gInfSpecVersion\r | |
1444 | \r | |
1445 | if DriverType:\r | |
1446 | AsBuiltInfDict['pcd_is_driver_string'].append(DriverType)\r | |
1447 | \r | |
1448 | if 'UEFI_SPECIFICATION_VERSION' in self.Specification:\r | |
1449 | AsBuiltInfDict['module_uefi_specification_version'].append(self.Specification['UEFI_SPECIFICATION_VERSION'])\r | |
1450 | if 'PI_SPECIFICATION_VERSION' in self.Specification:\r | |
1451 | AsBuiltInfDict['module_pi_specification_version'].append(self.Specification['PI_SPECIFICATION_VERSION'])\r | |
1452 | \r | |
1453 | OutputDir = self.OutputDir.replace('\\', '/').strip('/')\r | |
1454 | DebugDir = self.DebugDir.replace('\\', '/').strip('/')\r | |
1455 | for Item in self.CodaTargetList:\r | |
1456 | File = Item.Target.Path.replace('\\', '/').strip('/').replace(DebugDir, '').replace(OutputDir, '').strip('/')\r | |
1457 | if os.path.isabs(File):\r | |
1458 | File = File.replace('\\', '/').strip('/').replace(OutputDir, '').strip('/')\r | |
1459 | if Item.Target.Ext.lower() == '.aml':\r | |
1460 | AsBuiltInfDict['binary_item'].append('ASL|' + File)\r | |
1461 | elif Item.Target.Ext.lower() == '.acpi':\r | |
1462 | AsBuiltInfDict['binary_item'].append('ACPI|' + File)\r | |
1463 | elif Item.Target.Ext.lower() == '.efi':\r | |
1464 | AsBuiltInfDict['binary_item'].append('PE32|' + self.Name + '.efi')\r | |
1465 | else:\r | |
1466 | AsBuiltInfDict['binary_item'].append('BIN|' + File)\r | |
1467 | if not self.DepexGenerated:\r | |
1468 | DepexFile = os.path.join(self.OutputDir, self.Name + '.depex')\r | |
1469 | if os.path.exists(DepexFile):\r | |
1470 | self.DepexGenerated = True\r | |
1471 | if self.DepexGenerated:\r | |
1472 | if self.ModuleType in [SUP_MODULE_PEIM]:\r | |
1473 | AsBuiltInfDict['binary_item'].append('PEI_DEPEX|' + self.Name + '.depex')\r | |
1474 | elif self.ModuleType in [SUP_MODULE_DXE_DRIVER, SUP_MODULE_DXE_RUNTIME_DRIVER, SUP_MODULE_DXE_SAL_DRIVER, SUP_MODULE_UEFI_DRIVER]:\r | |
1475 | AsBuiltInfDict['binary_item'].append('DXE_DEPEX|' + self.Name + '.depex')\r | |
1476 | elif self.ModuleType in [SUP_MODULE_DXE_SMM_DRIVER]:\r | |
1477 | AsBuiltInfDict['binary_item'].append('SMM_DEPEX|' + self.Name + '.depex')\r | |
1478 | \r | |
1479 | Bin = self._GenOffsetBin()\r | |
1480 | if Bin:\r | |
1481 | AsBuiltInfDict['binary_item'].append('BIN|%s' % Bin)\r | |
1482 | \r | |
1483 | for Root, Dirs, Files in os.walk(OutputDir):\r | |
1484 | for File in Files:\r | |
1485 | if File.lower().endswith('.pdb'):\r | |
1486 | AsBuiltInfDict['binary_item'].append('DISPOSABLE|' + File)\r | |
1487 | HeaderComments = self.Module.HeaderComments\r | |
1488 | StartPos = 0\r | |
1489 | for Index in range(len(HeaderComments)):\r | |
1490 | if HeaderComments[Index].find('@BinaryHeader') != -1:\r | |
1491 | HeaderComments[Index] = HeaderComments[Index].replace('@BinaryHeader', '@file')\r | |
1492 | StartPos = Index\r | |
1493 | break\r | |
1494 | AsBuiltInfDict['header_comments'] = '\n'.join(HeaderComments[StartPos:]).replace(':#', '://')\r | |
1495 | AsBuiltInfDict['tail_comments'] = '\n'.join(self.Module.TailComments)\r | |
1496 | \r | |
1497 | GenList = [\r | |
1498 | (self.ProtocolList, self._ProtocolComments, 'protocol_item'),\r | |
1499 | (self.PpiList, self._PpiComments, 'ppi_item'),\r | |
1500 | (GuidList, self._GuidComments, 'guid_item')\r | |
1501 | ]\r | |
1502 | for Item in GenList:\r | |
1503 | for CName in Item[0]:\r | |
1504 | Comments = '\n '.join(Item[1][CName]) if CName in Item[1] else ''\r | |
1505 | Entry = Comments + '\n ' + CName if Comments else CName\r | |
1506 | AsBuiltInfDict[Item[2]].append(Entry)\r | |
1507 | PatchList = parsePcdInfoFromMapFile(\r | |
1508 | os.path.join(self.OutputDir, self.Name + '.map'),\r | |
1509 | os.path.join(self.OutputDir, self.Name + '.efi')\r | |
1510 | )\r | |
1511 | if PatchList:\r | |
1512 | for Pcd in PatchablePcds:\r | |
1513 | TokenCName = Pcd.TokenCName\r | |
1514 | for PcdItem in GlobalData.MixedPcd:\r | |
1515 | if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:\r | |
1516 | TokenCName = PcdItem[0]\r | |
1517 | break\r | |
1518 | for PatchPcd in PatchList:\r | |
1519 | if TokenCName == PatchPcd[0]:\r | |
1520 | break\r | |
1521 | else:\r | |
1522 | continue\r | |
1523 | PcdValue = ''\r | |
1524 | if Pcd.DatumType == 'BOOLEAN':\r | |
1525 | BoolValue = Pcd.DefaultValue.upper()\r | |
1526 | if BoolValue == 'TRUE':\r | |
1527 | Pcd.DefaultValue = '1'\r | |
1528 | elif BoolValue == 'FALSE':\r | |
1529 | Pcd.DefaultValue = '0'\r | |
1530 | \r | |
1531 | if Pcd.DatumType in TAB_PCD_NUMERIC_TYPES:\r | |
1532 | HexFormat = '0x%02x'\r | |
1533 | if Pcd.DatumType == TAB_UINT16:\r | |
1534 | HexFormat = '0x%04x'\r | |
1535 | elif Pcd.DatumType == TAB_UINT32:\r | |
1536 | HexFormat = '0x%08x'\r | |
1537 | elif Pcd.DatumType == TAB_UINT64:\r | |
1538 | HexFormat = '0x%016x'\r | |
1539 | PcdValue = HexFormat % int(Pcd.DefaultValue, 0)\r | |
1540 | else:\r | |
1541 | if Pcd.MaxDatumSize is None or Pcd.MaxDatumSize == '':\r | |
1542 | EdkLogger.error("build", AUTOGEN_ERROR,\r | |
1543 | "Unknown [MaxDatumSize] of PCD [%s.%s]" % (Pcd.TokenSpaceGuidCName, TokenCName)\r | |
1544 | )\r | |
1545 | ArraySize = int(Pcd.MaxDatumSize, 0)\r | |
1546 | PcdValue = Pcd.DefaultValue\r | |
1547 | if PcdValue[0] != '{':\r | |
1548 | Unicode = False\r | |
1549 | if PcdValue[0] == 'L':\r | |
1550 | Unicode = True\r | |
1551 | PcdValue = PcdValue.lstrip('L')\r | |
1552 | PcdValue = eval(PcdValue)\r | |
1553 | NewValue = '{'\r | |
1554 | for Index in range(0, len(PcdValue)):\r | |
1555 | if Unicode:\r | |
1556 | CharVal = ord(PcdValue[Index])\r | |
1557 | NewValue = NewValue + '0x%02x' % (CharVal & 0x00FF) + ', ' \\r | |
1558 | + '0x%02x' % (CharVal >> 8) + ', '\r | |
1559 | else:\r | |
1560 | NewValue = NewValue + '0x%02x' % (ord(PcdValue[Index]) % 0x100) + ', '\r | |
1561 | Padding = '0x00, '\r | |
1562 | if Unicode:\r | |
1563 | Padding = Padding * 2\r | |
1564 | ArraySize = ArraySize // 2\r | |
1565 | if ArraySize < (len(PcdValue) + 1):\r | |
1566 | if Pcd.MaxSizeUserSet:\r | |
1567 | EdkLogger.error("build", AUTOGEN_ERROR,\r | |
1568 | "The maximum size of VOID* type PCD '%s.%s' is less than its actual size occupied." % (Pcd.TokenSpaceGuidCName, TokenCName)\r | |
1569 | )\r | |
1570 | else:\r | |
1571 | ArraySize = len(PcdValue) + 1\r | |
1572 | if ArraySize > len(PcdValue) + 1:\r | |
1573 | NewValue = NewValue + Padding * (ArraySize - len(PcdValue) - 1)\r | |
1574 | PcdValue = NewValue + Padding.strip().rstrip(',') + '}'\r | |
1575 | elif len(PcdValue.split(',')) <= ArraySize:\r | |
1576 | PcdValue = PcdValue.rstrip('}') + ', 0x00' * (ArraySize - len(PcdValue.split(',')))\r | |
1577 | PcdValue += '}'\r | |
1578 | else:\r | |
1579 | if Pcd.MaxSizeUserSet:\r | |
1580 | EdkLogger.error("build", AUTOGEN_ERROR,\r | |
1581 | "The maximum size of VOID* type PCD '%s.%s' is less than its actual size occupied." % (Pcd.TokenSpaceGuidCName, TokenCName)\r | |
1582 | )\r | |
1583 | else:\r | |
1584 | ArraySize = len(PcdValue) + 1\r | |
1585 | PcdItem = '%s.%s|%s|0x%X' % \\r | |
1586 | (Pcd.TokenSpaceGuidCName, TokenCName, PcdValue, PatchPcd[1])\r | |
1587 | PcdComments = ''\r | |
1588 | if (Pcd.TokenSpaceGuidCName, Pcd.TokenCName) in self._PcdComments:\r | |
1589 | PcdComments = '\n '.join(self._PcdComments[Pcd.TokenSpaceGuidCName, Pcd.TokenCName])\r | |
1590 | if PcdComments:\r | |
1591 | PcdItem = PcdComments + '\n ' + PcdItem\r | |
1592 | AsBuiltInfDict['patchablepcd_item'].append(PcdItem)\r | |
1593 | \r | |
1594 | for Pcd in Pcds + VfrPcds:\r | |
1595 | PcdCommentList = []\r | |
1596 | HiiInfo = ''\r | |
1597 | TokenCName = Pcd.TokenCName\r | |
1598 | for PcdItem in GlobalData.MixedPcd:\r | |
1599 | if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:\r | |
1600 | TokenCName = PcdItem[0]\r | |
1601 | break\r | |
1602 | if Pcd.Type == TAB_PCDS_DYNAMIC_EX_HII:\r | |
1603 | for SkuName in Pcd.SkuInfoList:\r | |
1604 | SkuInfo = Pcd.SkuInfoList[SkuName]\r | |
1605 | HiiInfo = '## %s|%s|%s' % (SkuInfo.VariableName, SkuInfo.VariableGuid, SkuInfo.VariableOffset)\r | |
1606 | break\r | |
1607 | if (Pcd.TokenSpaceGuidCName, Pcd.TokenCName) in self._PcdComments:\r | |
1608 | PcdCommentList = self._PcdComments[Pcd.TokenSpaceGuidCName, Pcd.TokenCName][:]\r | |
1609 | if HiiInfo:\r | |
1610 | UsageIndex = -1\r | |
1611 | UsageStr = ''\r | |
1612 | for Index, Comment in enumerate(PcdCommentList):\r | |
1613 | for Usage in UsageList:\r | |
1614 | if Comment.find(Usage) != -1:\r | |
1615 | UsageStr = Usage\r | |
1616 | UsageIndex = Index\r | |
1617 | break\r | |
1618 | if UsageIndex != -1:\r | |
1619 | PcdCommentList[UsageIndex] = '## %s %s %s' % (UsageStr, HiiInfo, PcdCommentList[UsageIndex].replace(UsageStr, ''))\r | |
1620 | else:\r | |
1621 | PcdCommentList.append('## UNDEFINED ' + HiiInfo)\r | |
1622 | PcdComments = '\n '.join(PcdCommentList)\r | |
1623 | PcdEntry = Pcd.TokenSpaceGuidCName + '.' + TokenCName\r | |
1624 | if PcdComments:\r | |
1625 | PcdEntry = PcdComments + '\n ' + PcdEntry\r | |
1626 | AsBuiltInfDict['pcd_item'].append(PcdEntry)\r | |
1627 | for Item in self.BuildOption:\r | |
1628 | if 'FLAGS' in self.BuildOption[Item]:\r | |
1629 | AsBuiltInfDict['flags_item'].append('%s:%s_%s_%s_%s_FLAGS = %s' % (self.ToolChainFamily, self.BuildTarget, self.ToolChain, self.Arch, Item, self.BuildOption[Item]['FLAGS'].strip()))\r | |
1630 | \r | |
1631 | # Generated LibraryClasses section in comments.\r | |
1632 | for Library in self.LibraryAutoGenList:\r | |
1633 | AsBuiltInfDict['libraryclasses_item'].append(Library.MetaFile.File.replace('\\', '/'))\r | |
1634 | \r | |
1635 | # Generated UserExtensions TianoCore section.\r | |
1636 | # All tianocore user extensions are copied.\r | |
1637 | UserExtStr = ''\r | |
1638 | for TianoCore in self._GetTianoCoreUserExtensionList():\r | |
1639 | UserExtStr += '\n'.join(TianoCore)\r | |
1640 | ExtensionFile = os.path.join(self.MetaFile.Dir, TianoCore[1])\r | |
1641 | if os.path.isfile(ExtensionFile):\r | |
1642 | shutil.copy2(ExtensionFile, self.OutputDir)\r | |
1643 | AsBuiltInfDict['userextension_tianocore_item'] = UserExtStr\r | |
1644 | \r | |
1645 | # Generated depex expression section in comments.\r | |
1646 | DepexExpression = self._GetDepexExpresionString()\r | |
1647 | AsBuiltInfDict['depexsection_item'] = DepexExpression if DepexExpression else ''\r | |
1648 | \r | |
1649 | AsBuiltInf = TemplateString()\r | |
1650 | AsBuiltInf.Append(gAsBuiltInfHeaderString.Replace(AsBuiltInfDict))\r | |
1651 | \r | |
1652 | SaveFileOnChange(os.path.join(self.OutputDir, self.Name + '.inf'), str(AsBuiltInf), False)\r | |
1653 | \r | |
1654 | self.IsAsBuiltInfCreated = True\r | |
1655 | \r | |
3bfbc915 | 1656 | def CacheCopyFile(self, DestDir, SourceDir, File):\r |
fc8b8dea SS |
1657 | if os.path.isdir(File):\r |
1658 | return\r | |
1659 | \r | |
3bfbc915 SS |
1660 | sub_dir = os.path.relpath(File, SourceDir)\r |
1661 | destination_file = os.path.join(DestDir, sub_dir)\r | |
0e7e7a26 SS |
1662 | destination_dir = os.path.dirname(destination_file)\r |
1663 | CreateDirectory(destination_dir)\r | |
1664 | try:\r | |
1665 | CopyFileOnChange(File, destination_dir)\r | |
1666 | except:\r | |
1667 | EdkLogger.quiet("[cache warning]: fail to copy file:%s to folder:%s" % (File, destination_dir))\r | |
1668 | return\r | |
1669 | \r | |
e8449e1d | 1670 | def CopyModuleToCache(self):\r |
fc8b8dea SS |
1671 | # Find the MakeHashStr and PreMakeHashStr from latest MakeHashFileList\r |
1672 | # and PreMakeHashFileList files\r | |
1673 | MakeHashStr = None\r | |
1674 | PreMakeHashStr = None\r | |
1675 | MakeTimeStamp = 0\r | |
1676 | PreMakeTimeStamp = 0\r | |
1677 | Files = [f for f in os.listdir(LongFilePath(self.BuildDir)) if path.isfile(LongFilePath(path.join(self.BuildDir, f)))]\r | |
1678 | for File in Files:\r | |
1679 | if ".MakeHashFileList." in File:\r | |
1680 | #find lastest file through time stamp\r | |
1681 | FileTimeStamp = os.stat(LongFilePath(path.join(self.BuildDir, File)))[8]\r | |
1682 | if FileTimeStamp > MakeTimeStamp:\r | |
1683 | MakeTimeStamp = FileTimeStamp\r | |
1684 | MakeHashStr = File.split('.')[-1]\r | |
1685 | if len(MakeHashStr) != 32:\r | |
1686 | EdkLogger.quiet("[cache error]: wrong MakeHashFileList file:%s" % (File))\r | |
1687 | if ".PreMakeHashFileList." in File:\r | |
1688 | FileTimeStamp = os.stat(LongFilePath(path.join(self.BuildDir, File)))[8]\r | |
1689 | if FileTimeStamp > PreMakeTimeStamp:\r | |
1690 | PreMakeTimeStamp = FileTimeStamp\r | |
1691 | PreMakeHashStr = File.split('.')[-1]\r | |
1692 | if len(PreMakeHashStr) != 32:\r | |
1693 | EdkLogger.quiet("[cache error]: wrong PreMakeHashFileList file:%s" % (File))\r | |
0e7e7a26 | 1694 | \r |
fc8b8dea SS |
1695 | if not MakeHashStr:\r |
1696 | EdkLogger.quiet("[cache error]: No MakeHashFileList file for module:%s[%s]" % (self.MetaFile.Path, self.Arch))\r | |
1697 | return\r | |
1698 | if not PreMakeHashStr:\r | |
1699 | EdkLogger.quiet("[cache error]: No PreMakeHashFileList file for module:%s[%s]" % (self.MetaFile.Path, self.Arch))\r | |
1700 | return\r | |
0e7e7a26 | 1701 | \r |
fc8b8dea SS |
1702 | # Create Cache destination dirs\r |
1703 | FileDir = path.join(GlobalData.gBinCacheDest, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)\r | |
1704 | FfsDir = path.join(GlobalData.gBinCacheDest, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name)\r | |
1705 | CacheFileDir = path.join(FileDir, MakeHashStr)\r | |
1706 | CacheFfsDir = path.join(FfsDir, MakeHashStr)\r | |
1707 | CreateDirectory (CacheFileDir)\r | |
1708 | CreateDirectory (CacheFfsDir)\r | |
0e7e7a26 | 1709 | \r |
fc8b8dea SS |
1710 | # Create ModuleHashPair file to support multiple version cache together\r |
1711 | ModuleHashPair = path.join(FileDir, self.Name + ".ModuleHashPair")\r | |
1712 | ModuleHashPairList = [] # tuple list: [tuple(PreMakefileHash, MakeHash)]\r | |
1713 | if os.path.exists(ModuleHashPair):\r | |
1714 | with open(ModuleHashPair, 'r') as f:\r | |
1715 | ModuleHashPairList = json.load(f)\r | |
1716 | if not (PreMakeHashStr, MakeHashStr) in set(map(tuple, ModuleHashPairList)):\r | |
1717 | ModuleHashPairList.insert(0, (PreMakeHashStr, MakeHashStr))\r | |
1718 | with open(ModuleHashPair, 'w') as f:\r | |
1719 | json.dump(ModuleHashPairList, f, indent=2)\r | |
1720 | \r | |
1721 | # Copy files to Cache destination dirs\r | |
e8449e1d FB |
1722 | if not self.OutputFile:\r |
1723 | Ma = self.BuildDatabase[self.MetaFile, self.Arch, self.BuildTarget, self.ToolChain]\r | |
1724 | self.OutputFile = Ma.Binaries\r | |
1725 | for File in self.OutputFile:\r | |
fc8b8dea SS |
1726 | if File.startswith(os.path.abspath(self.FfsOutputDir)+os.sep):\r |
1727 | self.CacheCopyFile(CacheFfsDir, self.FfsOutputDir, File)\r | |
1728 | else:\r | |
1729 | if self.Name + ".autogen.hash." in File or \\r | |
1730 | self.Name + ".autogen.hashchain." in File or \\r | |
1731 | self.Name + ".hash." in File or \\r | |
1732 | self.Name + ".hashchain." in File or \\r | |
1733 | self.Name + ".PreMakeHashFileList." in File or \\r | |
1734 | self.Name + ".MakeHashFileList." in File:\r | |
1735 | self.CacheCopyFile(FileDir, self.BuildDir, File)\r | |
d01a9986 | 1736 | else:\r |
fc8b8dea | 1737 | self.CacheCopyFile(CacheFileDir, self.BuildDir, File)\r |
e8449e1d FB |
1738 | ## Create makefile for the module and its dependent libraries\r |
1739 | #\r | |
1740 | # @param CreateLibraryMakeFile Flag indicating if or not the makefiles of\r | |
1741 | # dependent libraries will be created\r | |
1742 | #\r | |
1743 | @cached_class_function\r | |
1744 | def CreateMakeFile(self, CreateLibraryMakeFile=True, GenFfsList = []):\r | |
0e7e7a26 | 1745 | \r |
e8449e1d FB |
1746 | # nest this function inside it's only caller.\r |
1747 | def CreateTimeStamp():\r | |
1748 | FileSet = {self.MetaFile.Path}\r | |
1749 | \r | |
1750 | for SourceFile in self.Module.Sources:\r | |
1751 | FileSet.add (SourceFile.Path)\r | |
1752 | \r | |
1753 | for Lib in self.DependentLibraryList:\r | |
1754 | FileSet.add (Lib.MetaFile.Path)\r | |
1755 | \r | |
1756 | for f in self.AutoGenDepSet:\r | |
1757 | FileSet.add (f.Path)\r | |
1758 | \r | |
1759 | if os.path.exists (self.TimeStampPath):\r | |
1760 | os.remove (self.TimeStampPath)\r | |
df43ea6c FB |
1761 | \r |
1762 | SaveFileOnChange(self.TimeStampPath, "\n".join(FileSet), False)\r | |
e8449e1d FB |
1763 | \r |
1764 | # Ignore generating makefile when it is a binary module\r | |
1765 | if self.IsBinaryModule:\r | |
1766 | return\r | |
1767 | \r | |
1768 | self.GenFfsList = GenFfsList\r | |
1769 | \r | |
1770 | if not self.IsLibrary and CreateLibraryMakeFile:\r | |
1771 | for LibraryAutoGen in self.LibraryAutoGenList:\r | |
1772 | LibraryAutoGen.CreateMakeFile()\r | |
673d09a2 | 1773 | \r |
0e7e7a26 SS |
1774 | # CanSkip uses timestamps to determine build skipping\r |
1775 | if self.CanSkip():\r | |
e8449e1d FB |
1776 | return\r |
1777 | \r | |
1778 | if len(self.CustomMakefile) == 0:\r | |
1779 | Makefile = GenMake.ModuleMakefile(self)\r | |
1780 | else:\r | |
1781 | Makefile = GenMake.CustomMakefile(self)\r | |
1782 | if Makefile.Generate():\r | |
1783 | EdkLogger.debug(EdkLogger.DEBUG_9, "Generated makefile for module %s [%s]" %\r | |
1784 | (self.Name, self.Arch))\r | |
1785 | else:\r | |
1786 | EdkLogger.debug(EdkLogger.DEBUG_9, "Skipped the generation of makefile for module %s [%s]" %\r | |
1787 | (self.Name, self.Arch))\r | |
1788 | \r | |
1789 | CreateTimeStamp()\r | |
1790 | \r | |
0e7e7a26 SS |
1791 | MakefileType = Makefile._FileType\r |
1792 | MakefileName = Makefile._FILE_NAME_[MakefileType]\r | |
1793 | MakefilePath = os.path.join(self.MakeFileDir, MakefileName)\r | |
fc8b8dea SS |
1794 | FilePath = path.join(self.BuildDir, self.Name + ".makefile")\r |
1795 | SaveFileOnChange(FilePath, MakefilePath, False)\r | |
0e7e7a26 | 1796 | \r |
e8449e1d FB |
1797 | def CopyBinaryFiles(self):\r |
1798 | for File in self.Module.Binaries:\r | |
1799 | SrcPath = File.Path\r | |
1800 | DstPath = os.path.join(self.OutputDir, os.path.basename(SrcPath))\r | |
1801 | CopyLongFilePath(SrcPath, DstPath)\r | |
1802 | ## Create autogen code for the module and its dependent libraries\r | |
1803 | #\r | |
1804 | # @param CreateLibraryCodeFile Flag indicating if or not the code of\r | |
1805 | # dependent libraries will be created\r | |
1806 | #\r | |
1807 | def CreateCodeFile(self, CreateLibraryCodeFile=True):\r | |
0e7e7a26 | 1808 | \r |
e8449e1d FB |
1809 | if self.IsCodeFileCreated:\r |
1810 | return\r | |
1811 | \r | |
1812 | # Need to generate PcdDatabase even PcdDriver is binarymodule\r | |
1813 | if self.IsBinaryModule and self.PcdIsDriver != '':\r | |
1814 | CreatePcdDatabaseCode(self, TemplateString(), TemplateString())\r | |
1815 | return\r | |
1816 | if self.IsBinaryModule:\r | |
1817 | if self.IsLibrary:\r | |
1818 | self.CopyBinaryFiles()\r | |
1819 | return\r | |
1820 | \r | |
1821 | if not self.IsLibrary and CreateLibraryCodeFile:\r | |
1822 | for LibraryAutoGen in self.LibraryAutoGenList:\r | |
1823 | LibraryAutoGen.CreateCodeFile()\r | |
0e7e7a26 SS |
1824 | \r |
1825 | # CanSkip uses timestamps to determine build skipping\r | |
1826 | if self.CanSkip():\r | |
e8449e1d | 1827 | return\r |
1f5e4d91 | 1828 | self.LibraryAutoGenList\r |
e8449e1d FB |
1829 | AutoGenList = []\r |
1830 | IgoredAutoGenList = []\r | |
1831 | \r | |
1832 | for File in self.AutoGenFileList:\r | |
1833 | if GenC.Generate(File.Path, self.AutoGenFileList[File], File.IsBinary):\r | |
1834 | AutoGenList.append(str(File))\r | |
1835 | else:\r | |
1836 | IgoredAutoGenList.append(str(File))\r | |
1837 | \r | |
1838 | \r | |
1839 | for ModuleType in self.DepexList:\r | |
1840 | # Ignore empty [depex] section or [depex] section for SUP_MODULE_USER_DEFINED module\r | |
1841 | if len(self.DepexList[ModuleType]) == 0 or ModuleType == SUP_MODULE_USER_DEFINED or ModuleType == SUP_MODULE_HOST_APPLICATION:\r | |
1842 | continue\r | |
1843 | \r | |
1844 | Dpx = GenDepex.DependencyExpression(self.DepexList[ModuleType], ModuleType, True)\r | |
1845 | DpxFile = gAutoGenDepexFileName % {"module_name" : self.Name}\r | |
1846 | \r | |
1847 | if len(Dpx.PostfixNotation) != 0:\r | |
1848 | self.DepexGenerated = True\r | |
1849 | \r | |
1850 | if Dpx.Generate(path.join(self.OutputDir, DpxFile)):\r | |
1851 | AutoGenList.append(str(DpxFile))\r | |
1852 | else:\r | |
1853 | IgoredAutoGenList.append(str(DpxFile))\r | |
1854 | \r | |
1855 | if IgoredAutoGenList == []:\r | |
1856 | EdkLogger.debug(EdkLogger.DEBUG_9, "Generated [%s] files for module %s [%s]" %\r | |
1857 | (" ".join(AutoGenList), self.Name, self.Arch))\r | |
1858 | elif AutoGenList == []:\r | |
1859 | EdkLogger.debug(EdkLogger.DEBUG_9, "Skipped the generation of [%s] files for module %s [%s]" %\r | |
1860 | (" ".join(IgoredAutoGenList), self.Name, self.Arch))\r | |
1861 | else:\r | |
1862 | EdkLogger.debug(EdkLogger.DEBUG_9, "Generated [%s] (skipped %s) files for module %s [%s]" %\r | |
1863 | (" ".join(AutoGenList), " ".join(IgoredAutoGenList), self.Name, self.Arch))\r | |
1864 | \r | |
1865 | self.IsCodeFileCreated = True\r | |
0e7e7a26 | 1866 | \r |
e8449e1d FB |
1867 | return AutoGenList\r |
1868 | \r | |
1869 | ## Summarize the ModuleAutoGen objects of all libraries used by this module\r | |
1870 | @cached_property\r | |
1871 | def LibraryAutoGenList(self):\r | |
1872 | RetVal = []\r | |
1873 | for Library in self.DependentLibraryList:\r | |
1874 | La = ModuleAutoGen(\r | |
1875 | self.Workspace,\r | |
1876 | Library.MetaFile,\r | |
1877 | self.BuildTarget,\r | |
1878 | self.ToolChain,\r | |
1879 | self.Arch,\r | |
1880 | self.PlatformInfo.MetaFile,\r | |
1881 | self.DataPipe\r | |
1882 | )\r | |
1883 | La.IsLibrary = True\r | |
1884 | if La not in RetVal:\r | |
1885 | RetVal.append(La)\r | |
1886 | for Lib in La.CodaTargetList:\r | |
1887 | self._ApplyBuildRule(Lib.Target, TAB_UNKNOWN_FILE)\r | |
1888 | return RetVal\r | |
1889 | \r | |
fc8b8dea SS |
1890 | def GenCMakeHash(self):\r |
1891 | # GenCMakeHash can only be called in --binary-destination\r | |
1892 | # Never called in multiprocessing and always directly save result in main process,\r | |
1893 | # so no need remote dict to share the gCMakeHashFile result with main process\r | |
e8449e1d | 1894 | \r |
fc8b8dea SS |
1895 | DependencyFileSet = set()\r |
1896 | # Add AutoGen files\r | |
1897 | if self.AutoGenFileList:\r | |
1898 | for File in set(self.AutoGenFileList):\r | |
1899 | DependencyFileSet.add(File)\r | |
1900 | \r | |
1901 | # Add Makefile\r | |
1902 | abspath = path.join(self.BuildDir, self.Name + ".makefile")\r | |
1903 | try:\r | |
1904 | with open(LongFilePath(abspath),"r") as fd:\r | |
1905 | lines = fd.readlines()\r | |
1906 | except Exception as e:\r | |
1907 | EdkLogger.error("build",FILE_NOT_FOUND, "%s doesn't exist" % abspath, ExtraData=str(e), RaiseError=False)\r | |
1908 | if lines:\r | |
1909 | DependencyFileSet.update(lines)\r | |
e8449e1d | 1910 | \r |
fc8b8dea | 1911 | # Caculate all above dependency files hash\r |
e8449e1d | 1912 | # Initialze hash object\r |
fc8b8dea | 1913 | FileList = []\r |
e8449e1d | 1914 | m = hashlib.md5()\r |
fc8b8dea SS |
1915 | for File in sorted(DependencyFileSet, key=lambda x: str(x)):\r |
1916 | if not path.exists(LongFilePath(str(File))):\r | |
1917 | EdkLogger.quiet("[cache warning]: header file %s is missing for module: %s[%s]" % (File, self.MetaFile.Path, self.Arch))\r | |
1918 | continue\r | |
1919 | with open(LongFilePath(str(File)), 'rb') as f:\r | |
e8449e1d | 1920 | Content = f.read()\r |
fc8b8dea SS |
1921 | m.update(Content)\r |
1922 | FileList.append((str(File), hashlib.md5(Content).hexdigest()))\r | |
e8449e1d | 1923 | \r |
fc8b8dea SS |
1924 | HashChainFile = path.join(self.BuildDir, self.Name + ".autogen.hashchain." + m.hexdigest())\r |
1925 | GlobalData.gCMakeHashFile[(self.MetaFile.Path, self.Arch)] = HashChainFile\r | |
1926 | try:\r | |
1927 | with open(LongFilePath(HashChainFile), 'w') as f:\r | |
1928 | json.dump(FileList, f, indent=2)\r | |
1929 | except:\r | |
1930 | EdkLogger.quiet("[cache warning]: fail to save hashchain file:%s" % HashChainFile)\r | |
1931 | return False\r | |
0e7e7a26 | 1932 | \r |
fc8b8dea SS |
1933 | def GenModuleHash(self):\r |
1934 | # GenModuleHash only called after autogen phase\r | |
1935 | # Never called in multiprocessing and always directly save result in main process,\r | |
1936 | # so no need remote dict to share the gModuleHashFile result with main process\r | |
1937 | #\r | |
1938 | # GenPreMakefileHashList consume no dict.\r | |
1939 | # GenPreMakefileHashList produce local gModuleHashFile dict.\r | |
94459080 | 1940 | \r |
0e7e7a26 SS |
1941 | DependencyFileSet = set()\r |
1942 | # Add Module Meta file\r | |
fc8b8dea | 1943 | DependencyFileSet.add(self.MetaFile.Path)\r |
0e7e7a26 SS |
1944 | \r |
1945 | # Add Module's source files\r | |
1946 | if self.SourceFileList:\r | |
1947 | for File in set(self.SourceFileList):\r | |
fc8b8dea | 1948 | DependencyFileSet.add(File.Path)\r |
0e7e7a26 SS |
1949 | \r |
1950 | # Add modules's include header files\r | |
fc8b8dea SS |
1951 | # Directly use the deps.txt file in the module BuildDir\r |
1952 | abspath = path.join(self.BuildDir, "deps.txt")\r | |
1953 | rt = None\r | |
1954 | try:\r | |
1955 | with open(LongFilePath(abspath),"r") as fd:\r | |
1956 | lines = fd.readlines()\r | |
1957 | if lines:\r | |
1958 | rt = set([item.lstrip().strip("\n") for item in lines if item.strip("\n").endswith(".h")])\r | |
1959 | except Exception as e:\r | |
1960 | EdkLogger.error("build",FILE_NOT_FOUND, "%s doesn't exist" % abspath, ExtraData=str(e), RaiseError=False)\r | |
1961 | \r | |
1962 | if rt:\r | |
1963 | DependencyFileSet.update(rt)\r | |
0e7e7a26 | 1964 | \r |
0e7e7a26 SS |
1965 | \r |
1966 | # Caculate all above dependency files hash\r | |
1967 | # Initialze hash object\r | |
1968 | FileList = []\r | |
1969 | m = hashlib.md5()\r | |
fc8b8dea | 1970 | BuildDirStr = path.abspath(self.BuildDir).lower()\r |
0e7e7a26 | 1971 | for File in sorted(DependencyFileSet, key=lambda x: str(x)):\r |
fc8b8dea SS |
1972 | # Skip the AutoGen files in BuildDir which already been\r |
1973 | # included in .autogen.hash. file\r | |
1974 | if BuildDirStr in path.abspath(File).lower():\r | |
1975 | continue\r | |
1976 | if not path.exists(LongFilePath(File)):\r | |
0e7e7a26 SS |
1977 | EdkLogger.quiet("[cache warning]: header file %s is missing for module: %s[%s]" % (File, self.MetaFile.Path, self.Arch))\r |
1978 | continue\r | |
fc8b8dea | 1979 | with open(LongFilePath(File), 'rb') as f:\r |
94459080 | 1980 | Content = f.read()\r |
0e7e7a26 | 1981 | m.update(Content)\r |
fc8b8dea | 1982 | FileList.append((File, hashlib.md5(Content).hexdigest()))\r |
0e7e7a26 | 1983 | \r |
fc8b8dea SS |
1984 | HashChainFile = path.join(self.BuildDir, self.Name + ".hashchain." + m.hexdigest())\r |
1985 | GlobalData.gModuleHashFile[(self.MetaFile.Path, self.Arch)] = HashChainFile\r | |
1986 | try:\r | |
1987 | with open(LongFilePath(HashChainFile), 'w') as f:\r | |
1988 | json.dump(FileList, f, indent=2)\r | |
1989 | except:\r | |
1990 | EdkLogger.quiet("[cache warning]: fail to save hashchain file:%s" % HashChainFile)\r | |
1991 | return False\r | |
0e7e7a26 | 1992 | \r |
fc8b8dea SS |
1993 | def GenPreMakefileHashList(self):\r |
1994 | # GenPreMakefileHashList consume below dicts:\r | |
1995 | # gPlatformHashFile\r | |
1996 | # gPackageHashFile\r | |
1997 | # gModuleHashFile\r | |
1998 | # GenPreMakefileHashList produce no dict.\r | |
1999 | # gModuleHashFile items might be produced in multiprocessing, so\r | |
2000 | # need check gModuleHashFile remote dict\r | |
94459080 | 2001 | \r |
0e7e7a26 SS |
2002 | # skip binary module\r |
2003 | if self.IsBinaryModule:\r | |
2004 | return\r | |
2005 | \r | |
fc8b8dea | 2006 | FileList = []\r |
0e7e7a26 | 2007 | m = hashlib.md5()\r |
0e7e7a26 | 2008 | # Add Platform level hash\r |
fc8b8dea SS |
2009 | HashFile = GlobalData.gPlatformHashFile\r |
2010 | if path.exists(LongFilePath(HashFile)):\r | |
2011 | FileList.append(HashFile)\r | |
2012 | m.update(HashFile.encode('utf-8'))\r | |
0e7e7a26 | 2013 | else:\r |
fc8b8dea | 2014 | EdkLogger.quiet("[cache warning]: No Platform HashFile: %s" % HashFile)\r |
0e7e7a26 SS |
2015 | \r |
2016 | # Add Package level hash\r | |
2017 | if self.DependentPackageList:\r | |
2018 | for Pkg in sorted(self.DependentPackageList, key=lambda x: x.PackageName):\r | |
fc8b8dea SS |
2019 | if not (Pkg.PackageName, Pkg.Arch) in GlobalData.gPackageHashFile:\r |
2020 | EdkLogger.quiet("[cache warning]:No Package %s for module %s[%s]" % (Pkg.PackageName, self.MetaFile.Path, self.Arch))\r | |
2021 | continue\r | |
2022 | HashFile = GlobalData.gPackageHashFile[(Pkg.PackageName, Pkg.Arch)]\r | |
2023 | if path.exists(LongFilePath(HashFile)):\r | |
2024 | FileList.append(HashFile)\r | |
2025 | m.update(HashFile.encode('utf-8'))\r | |
0e7e7a26 | 2026 | else:\r |
fc8b8dea | 2027 | EdkLogger.quiet("[cache warning]:No Package HashFile: %s" % HashFile)\r |
0e7e7a26 SS |
2028 | \r |
2029 | # Add Module self\r | |
fc8b8dea SS |
2030 | # GenPreMakefileHashList needed in both --binary-destination\r |
2031 | # and --hash. And --hash might save ModuleHashFile in remote dict\r | |
2032 | # during multiprocessing.\r | |
2033 | if (self.MetaFile.Path, self.Arch) in GlobalData.gModuleHashFile:\r | |
2034 | HashFile = GlobalData.gModuleHashFile[(self.MetaFile.Path, self.Arch)]\r | |
0e7e7a26 | 2035 | else:\r |
fc8b8dea SS |
2036 | EdkLogger.quiet("[cache error]:No ModuleHashFile for module: %s[%s]" % (self.MetaFile.Path, self.Arch))\r |
2037 | if path.exists(LongFilePath(HashFile)):\r | |
2038 | FileList.append(HashFile)\r | |
2039 | m.update(HashFile.encode('utf-8'))\r | |
0e7e7a26 | 2040 | else:\r |
fc8b8dea | 2041 | EdkLogger.quiet("[cache warning]:No Module HashFile: %s" % HashFile)\r |
0e7e7a26 | 2042 | \r |
fc8b8dea SS |
2043 | # Add Library hash\r |
2044 | if self.LibraryAutoGenList:\r | |
2045 | for Lib in sorted(self.LibraryAutoGenList, key=lambda x: x.MetaFile.Path):\r | |
0e7e7a26 | 2046 | \r |
fc8b8dea SS |
2047 | if (Lib.MetaFile.Path, Lib.Arch) in GlobalData.gModuleHashFile:\r |
2048 | HashFile = GlobalData.gModuleHashFile[(Lib.MetaFile.Path, Lib.Arch)]\r | |
2049 | else:\r | |
2050 | EdkLogger.quiet("[cache error]:No ModuleHashFile for lib: %s[%s]" % (Lib.MetaFile.Path, Lib.Arch))\r | |
2051 | if path.exists(LongFilePath(HashFile)):\r | |
2052 | FileList.append(HashFile)\r | |
2053 | m.update(HashFile.encode('utf-8'))\r | |
2054 | else:\r | |
2055 | EdkLogger.quiet("[cache warning]:No Lib HashFile: %s" % HashFile)\r | |
0e7e7a26 | 2056 | \r |
fc8b8dea SS |
2057 | # Save PreMakeHashFileList\r |
2058 | FilePath = path.join(self.BuildDir, self.Name + ".PreMakeHashFileList." + m.hexdigest())\r | |
2059 | try:\r | |
2060 | with open(LongFilePath(FilePath), 'w') as f:\r | |
2061 | json.dump(FileList, f, indent=0)\r | |
2062 | except:\r | |
2063 | EdkLogger.quiet("[cache warning]: fail to save PreMake HashFileList: %s" % FilePath)\r | |
0e7e7a26 | 2064 | \r |
fc8b8dea SS |
2065 | def GenMakefileHashList(self):\r |
2066 | # GenMakefileHashList only need in --binary-destination which will\r | |
2067 | # everything in local dict. So don't need check remote dict.\r | |
94459080 | 2068 | \r |
0e7e7a26 SS |
2069 | # skip binary module\r |
2070 | if self.IsBinaryModule:\r | |
2071 | return\r | |
2072 | \r | |
fc8b8dea | 2073 | FileList = []\r |
0e7e7a26 | 2074 | m = hashlib.md5()\r |
fc8b8dea SS |
2075 | # Add AutoGen hash\r |
2076 | HashFile = GlobalData.gCMakeHashFile[(self.MetaFile.Path, self.Arch)]\r | |
2077 | if path.exists(LongFilePath(HashFile)):\r | |
2078 | FileList.append(HashFile)\r | |
2079 | m.update(HashFile.encode('utf-8'))\r | |
2080 | else:\r | |
2081 | EdkLogger.quiet("[cache warning]:No AutoGen HashFile: %s" % HashFile)\r | |
0e7e7a26 | 2082 | \r |
fc8b8dea SS |
2083 | # Add Module self\r |
2084 | if (self.MetaFile.Path, self.Arch) in GlobalData.gModuleHashFile:\r | |
2085 | HashFile = GlobalData.gModuleHashFile[(self.MetaFile.Path, self.Arch)]\r | |
2086 | else:\r | |
2087 | EdkLogger.quiet("[cache error]:No ModuleHashFile for module: %s[%s]" % (self.MetaFile.Path, self.Arch))\r | |
2088 | if path.exists(LongFilePath(HashFile)):\r | |
2089 | FileList.append(HashFile)\r | |
2090 | m.update(HashFile.encode('utf-8'))\r | |
2091 | else:\r | |
2092 | EdkLogger.quiet("[cache warning]:No Module HashFile: %s" % HashFile)\r | |
0e7e7a26 SS |
2093 | \r |
2094 | # Add Library hash\r | |
2095 | if self.LibraryAutoGenList:\r | |
fc8b8dea SS |
2096 | for Lib in sorted(self.LibraryAutoGenList, key=lambda x: x.MetaFile.Path):\r |
2097 | if (Lib.MetaFile.Path, Lib.Arch) in GlobalData.gModuleHashFile:\r | |
2098 | HashFile = GlobalData.gModuleHashFile[(Lib.MetaFile.Path, Lib.Arch)]\r | |
2099 | else:\r | |
2100 | EdkLogger.quiet("[cache error]:No ModuleHashFile for lib: %s[%s]" % (Lib.MetaFile.Path, Lib.Arch))\r | |
2101 | if path.exists(LongFilePath(HashFile)):\r | |
2102 | FileList.append(HashFile)\r | |
2103 | m.update(HashFile.encode('utf-8'))\r | |
2104 | else:\r | |
2105 | EdkLogger.quiet("[cache warning]:No Lib HashFile: %s" % HashFile)\r | |
0e7e7a26 | 2106 | \r |
fc8b8dea SS |
2107 | # Save MakeHashFileList\r |
2108 | FilePath = path.join(self.BuildDir, self.Name + ".MakeHashFileList." + m.hexdigest())\r | |
2109 | try:\r | |
2110 | with open(LongFilePath(FilePath), 'w') as f:\r | |
2111 | json.dump(FileList, f, indent=0)\r | |
2112 | except:\r | |
2113 | EdkLogger.quiet("[cache warning]: fail to save Make HashFileList: %s" % FilePath)\r | |
2114 | \r | |
2115 | def CheckHashChainFile(self, HashChainFile):\r | |
2116 | # Assume the HashChainFile basename format is the 'x.hashchain.16BytesHexStr'\r | |
2117 | # The x is module name and the 16BytesHexStr is md5 hexdigest of\r | |
2118 | # all hashchain files content\r | |
2119 | HashStr = HashChainFile.split('.')[-1]\r | |
2120 | if len(HashStr) != 32:\r | |
2121 | EdkLogger.quiet("[cache error]: wrong format HashChainFile:%s" % (File))\r | |
2122 | return False\r | |
0e7e7a26 | 2123 | \r |
fc8b8dea SS |
2124 | try:\r |
2125 | with open(LongFilePath(HashChainFile), 'r') as f:\r | |
2126 | HashChainList = json.load(f)\r | |
2127 | except:\r | |
2128 | EdkLogger.quiet("[cache error]: fail to load HashChainFile: %s" % HashChainFile)\r | |
2129 | return False\r | |
0e7e7a26 | 2130 | \r |
fc8b8dea SS |
2131 | # Print the different file info\r |
2132 | # print(HashChainFile)\r | |
2133 | for idx, (SrcFile, SrcHash) in enumerate (HashChainList):\r | |
2134 | if SrcFile in GlobalData.gFileHashDict:\r | |
2135 | DestHash = GlobalData.gFileHashDict[SrcFile]\r | |
2136 | else:\r | |
2137 | try:\r | |
2138 | with open(LongFilePath(SrcFile), 'rb') as f:\r | |
2139 | Content = f.read()\r | |
2140 | DestHash = hashlib.md5(Content).hexdigest()\r | |
2141 | GlobalData.gFileHashDict[SrcFile] = DestHash\r | |
2142 | except IOError as X:\r | |
2143 | # cache miss if SrcFile is removed in new version code\r | |
2144 | GlobalData.gFileHashDict[SrcFile] = 0\r | |
2145 | EdkLogger.quiet("[cache insight]: first cache miss file in %s is %s" % (HashChainFile, SrcFile))\r | |
2146 | return False\r | |
2147 | if SrcHash != DestHash:\r | |
2148 | EdkLogger.quiet("[cache insight]: first cache miss file in %s is %s" % (HashChainFile, SrcFile))\r | |
2149 | return False\r | |
2150 | \r | |
2151 | return True\r | |
0e7e7a26 SS |
2152 | \r |
2153 | ## Decide whether we can skip the left autogen and make process\r | |
fc8b8dea SS |
2154 | def CanSkipbyMakeCache(self):\r |
2155 | # For --binary-source only\r | |
2156 | # CanSkipbyMakeCache consume below dicts:\r | |
2157 | # gModuleMakeCacheStatus\r | |
2158 | # gHashChainStatus\r | |
2159 | # GenPreMakefileHashList produce gModuleMakeCacheStatus, gModuleHashFile dict.\r | |
2160 | # all these dicts might be produced in multiprocessing, so\r | |
2161 | # need check these remote dict\r | |
2162 | \r | |
0e7e7a26 SS |
2163 | if not GlobalData.gBinCacheSource:\r |
2164 | return False\r | |
2165 | \r | |
fc8b8dea SS |
2166 | if (self.MetaFile.Path, self.Arch) in GlobalData.gModuleMakeCacheStatus:\r |
2167 | return GlobalData.gModuleMakeCacheStatus[(self.MetaFile.Path, self.Arch)]\r | |
94459080 | 2168 | \r |
fc8b8dea | 2169 | # If Module is binary, which has special build rule, do not skip by cache.\r |
0e7e7a26 | 2170 | if self.IsBinaryModule:\r |
fc8b8dea SS |
2171 | print("[cache miss]: MakeCache: Skip BinaryModule:", self.MetaFile.Path, self.Arch)\r |
2172 | GlobalData.gModuleMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False\r | |
0e7e7a26 SS |
2173 | return False\r |
2174 | \r | |
fc8b8dea | 2175 | # see .inc as binary file, do not skip by hash\r |
0e7e7a26 SS |
2176 | for f_ext in self.SourceFileList:\r |
2177 | if '.inc' in str(f_ext):\r | |
fc8b8dea SS |
2178 | print("[cache miss]: MakeCache: Skip '.inc' File:", self.MetaFile.Path, self.Arch)\r |
2179 | GlobalData.gModuleMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False\r | |
0e7e7a26 SS |
2180 | return False\r |
2181 | \r | |
fc8b8dea | 2182 | ModuleCacheDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)\r |
0e7e7a26 SS |
2183 | FfsDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name)\r |
2184 | \r | |
2185 | ModuleHashPairList = [] # tuple list: [tuple(PreMakefileHash, MakeHash)]\r | |
fc8b8dea | 2186 | ModuleHashPair = path.join(ModuleCacheDir, self.Name + ".ModuleHashPair")\r |
0e7e7a26 | 2187 | try:\r |
fc8b8dea | 2188 | with open(LongFilePath(ModuleHashPair), 'r') as f:\r |
94459080 | 2189 | ModuleHashPairList = json.load(f)\r |
0e7e7a26 | 2190 | except:\r |
fc8b8dea SS |
2191 | # ModuleHashPair might not exist for new added module\r |
2192 | GlobalData.gModuleMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False\r | |
0e7e7a26 | 2193 | EdkLogger.quiet("[cache warning]: fail to load ModuleHashPair file: %s" % ModuleHashPair)\r |
fc8b8dea | 2194 | print("[cache miss]: MakeCache:", self.MetaFile.Path, self.Arch)\r |
0e7e7a26 SS |
2195 | return False\r |
2196 | \r | |
fc8b8dea | 2197 | # Check the PreMakeHash in ModuleHashPairList one by one\r |
0e7e7a26 | 2198 | for idx, (PreMakefileHash, MakeHash) in enumerate (ModuleHashPairList):\r |
fc8b8dea SS |
2199 | SourceHashDir = path.join(ModuleCacheDir, MakeHash)\r |
2200 | SourceFfsHashDir = path.join(FfsDir, MakeHash)\r | |
2201 | PreMakeHashFileList_FilePah = path.join(ModuleCacheDir, self.Name + ".PreMakeHashFileList." + PreMakefileHash)\r | |
2202 | MakeHashFileList_FilePah = path.join(ModuleCacheDir, self.Name + ".MakeHashFileList." + MakeHash)\r | |
0e7e7a26 | 2203 | \r |
fc8b8dea SS |
2204 | try:\r |
2205 | with open(LongFilePath(MakeHashFileList_FilePah), 'r') as f:\r | |
2206 | MakeHashFileList = json.load(f)\r | |
2207 | except:\r | |
2208 | EdkLogger.quiet("[cache error]: fail to load MakeHashFileList file: %s" % MakeHashFileList_FilePah)\r | |
2209 | continue\r | |
0e7e7a26 | 2210 | \r |
fc8b8dea SS |
2211 | HashMiss = False\r |
2212 | for HashChainFile in MakeHashFileList:\r | |
2213 | HashChainStatus = None\r | |
2214 | if HashChainFile in GlobalData.gHashChainStatus:\r | |
2215 | HashChainStatus = GlobalData.gHashChainStatus[HashChainFile]\r | |
2216 | if HashChainStatus == False:\r | |
2217 | HashMiss = True\r | |
2218 | break\r | |
2219 | elif HashChainStatus == True:\r | |
2220 | continue\r | |
2221 | # Convert to path start with cache source dir\r | |
2222 | RelativePath = os.path.relpath(HashChainFile, self.WorkspaceDir)\r | |
2223 | NewFilePath = os.path.join(GlobalData.gBinCacheSource, RelativePath)\r | |
2224 | if self.CheckHashChainFile(NewFilePath):\r | |
2225 | GlobalData.gHashChainStatus[HashChainFile] = True\r | |
2226 | # Save the module self HashFile for GenPreMakefileHashList later usage\r | |
2227 | if self.Name + ".hashchain." in HashChainFile:\r | |
2228 | GlobalData.gModuleHashFile[(self.MetaFile.Path, self.Arch)] = HashChainFile\r | |
2229 | else:\r | |
2230 | GlobalData.gHashChainStatus[HashChainFile] = False\r | |
2231 | HashMiss = True\r | |
2232 | break\r | |
0e7e7a26 | 2233 | \r |
fc8b8dea SS |
2234 | if HashMiss:\r |
2235 | continue\r | |
0e7e7a26 | 2236 | \r |
fc8b8dea SS |
2237 | # PreMakefile cache hit, restore the module build result\r |
2238 | for root, dir, files in os.walk(SourceHashDir):\r | |
0e7e7a26 SS |
2239 | for f in files:\r |
2240 | File = path.join(root, f)\r | |
fc8b8dea SS |
2241 | self.CacheCopyFile(self.BuildDir, SourceHashDir, File)\r |
2242 | if os.path.exists(SourceFfsHashDir):\r | |
2243 | for root, dir, files in os.walk(SourceFfsHashDir):\r | |
2244 | for f in files:\r | |
2245 | File = path.join(root, f)\r | |
2246 | self.CacheCopyFile(self.FfsOutputDir, SourceFfsHashDir, File)\r | |
2247 | \r | |
2248 | if self.Name == "PcdPeim" or self.Name == "PcdDxe":\r | |
2249 | CreatePcdDatabaseCode(self, TemplateString(), TemplateString())\r | |
2250 | \r | |
2251 | print("[cache hit]: MakeCache:", self.MetaFile.Path, self.Arch)\r | |
2252 | GlobalData.gModuleMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = True\r | |
2253 | return True\r | |
0e7e7a26 | 2254 | \r |
fc8b8dea SS |
2255 | print("[cache miss]: MakeCache:", self.MetaFile.Path, self.Arch)\r |
2256 | GlobalData.gModuleMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False\r | |
2257 | return False\r | |
0e7e7a26 | 2258 | \r |
fc8b8dea SS |
2259 | ## Decide whether we can skip the left autogen and make process\r |
2260 | def CanSkipbyPreMakeCache(self):\r | |
2261 | # CanSkipbyPreMakeCache consume below dicts:\r | |
2262 | # gModulePreMakeCacheStatus\r | |
2263 | # gHashChainStatus\r | |
2264 | # gModuleHashFile\r | |
2265 | # GenPreMakefileHashList produce gModulePreMakeCacheStatus dict.\r | |
2266 | # all these dicts might be produced in multiprocessing, so\r | |
2267 | # need check these remote dicts\r | |
2268 | \r | |
2269 | if not GlobalData.gUseHashCache or GlobalData.gBinCacheDest:\r | |
0e7e7a26 SS |
2270 | return False\r |
2271 | \r | |
fc8b8dea SS |
2272 | if (self.MetaFile.Path, self.Arch) in GlobalData.gModulePreMakeCacheStatus:\r |
2273 | return GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)]\r | |
94459080 | 2274 | \r |
fc8b8dea | 2275 | # If Module is binary, which has special build rule, do not skip by cache.\r |
0e7e7a26 | 2276 | if self.IsBinaryModule:\r |
fc8b8dea SS |
2277 | print("[cache miss]: PreMakeCache: Skip BinaryModule:", self.MetaFile.Path, self.Arch)\r |
2278 | GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False\r | |
0e7e7a26 SS |
2279 | return False\r |
2280 | \r | |
fc8b8dea | 2281 | # see .inc as binary file, do not skip by hash\r |
0e7e7a26 SS |
2282 | for f_ext in self.SourceFileList:\r |
2283 | if '.inc' in str(f_ext):\r | |
fc8b8dea SS |
2284 | print("[cache miss]: PreMakeCache: Skip '.inc' File:", self.MetaFile.Path, self.Arch)\r |
2285 | GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False\r | |
0e7e7a26 SS |
2286 | return False\r |
2287 | \r | |
fc8b8dea | 2288 | # For --hash only in the incremental build\r |
56c786b0 | 2289 | if not GlobalData.gBinCacheSource:\r |
fc8b8dea SS |
2290 | Files = [path.join(self.BuildDir, f) for f in os.listdir(self.BuildDir) if path.isfile(path.join(self.BuildDir, f))]\r |
2291 | PreMakeHashFileList_FilePah = None\r | |
2292 | MakeTimeStamp = 0\r | |
2293 | # Find latest PreMakeHashFileList file in self.BuildDir folder\r | |
2294 | for File in Files:\r | |
2295 | if ".PreMakeHashFileList." in File:\r | |
2296 | FileTimeStamp = os.stat(path.join(self.BuildDir, File))[8]\r | |
2297 | if FileTimeStamp > MakeTimeStamp:\r | |
2298 | MakeTimeStamp = FileTimeStamp\r | |
2299 | PreMakeHashFileList_FilePah = File\r | |
2300 | if not PreMakeHashFileList_FilePah:\r | |
2301 | GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False\r | |
2302 | return False\r | |
56c786b0 | 2303 | \r |
fc8b8dea SS |
2304 | try:\r |
2305 | with open(LongFilePath(PreMakeHashFileList_FilePah), 'r') as f:\r | |
2306 | PreMakeHashFileList = json.load(f)\r | |
2307 | except:\r | |
2308 | EdkLogger.quiet("[cache error]: fail to load PreMakeHashFileList file: %s" % PreMakeHashFileList_FilePah)\r | |
2309 | print("[cache miss]: PreMakeCache:", self.MetaFile.Path, self.Arch)\r | |
2310 | GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False\r | |
2311 | return False\r | |
56c786b0 | 2312 | \r |
fc8b8dea SS |
2313 | HashMiss = False\r |
2314 | for HashChainFile in PreMakeHashFileList:\r | |
2315 | HashChainStatus = None\r | |
2316 | if HashChainFile in GlobalData.gHashChainStatus:\r | |
2317 | HashChainStatus = GlobalData.gHashChainStatus[HashChainFile]\r | |
2318 | if HashChainStatus == False:\r | |
2319 | HashMiss = True\r | |
2320 | break\r | |
2321 | elif HashChainStatus == True:\r | |
2322 | continue\r | |
2323 | if self.CheckHashChainFile(HashChainFile):\r | |
2324 | GlobalData.gHashChainStatus[HashChainFile] = True\r | |
2325 | # Save the module self HashFile for GenPreMakefileHashList later usage\r | |
2326 | if self.Name + ".hashchain." in HashChainFile:\r | |
2327 | GlobalData.gModuleHashFile[(self.MetaFile.Path, self.Arch)] = HashChainFile\r | |
2328 | else:\r | |
2329 | GlobalData.gHashChainStatus[HashChainFile] = False\r | |
2330 | HashMiss = True\r | |
2331 | break\r | |
56c786b0 | 2332 | \r |
fc8b8dea SS |
2333 | if HashMiss:\r |
2334 | print("[cache miss]: PreMakeCache:", self.MetaFile.Path, self.Arch)\r | |
2335 | GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False\r | |
2336 | return False\r | |
2337 | else:\r | |
2338 | print("[cache hit]: PreMakeCache:", self.MetaFile.Path, self.Arch)\r | |
2339 | GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = True\r | |
2340 | return True\r | |
56c786b0 | 2341 | \r |
fc8b8dea SS |
2342 | ModuleCacheDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)\r |
2343 | FfsDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name)\r | |
56c786b0 SS |
2344 | \r |
2345 | ModuleHashPairList = [] # tuple list: [tuple(PreMakefileHash, MakeHash)]\r | |
fc8b8dea | 2346 | ModuleHashPair = path.join(ModuleCacheDir, self.Name + ".ModuleHashPair")\r |
56c786b0 | 2347 | try:\r |
fc8b8dea | 2348 | with open(LongFilePath(ModuleHashPair), 'r') as f:\r |
94459080 | 2349 | ModuleHashPairList = json.load(f)\r |
56c786b0 | 2350 | except:\r |
fc8b8dea SS |
2351 | # ModuleHashPair might not exist for new added module\r |
2352 | GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False\r | |
2353 | EdkLogger.quiet("[cache warning]: fail to load ModuleHashPair file: %s" % ModuleHashPair)\r | |
2354 | print("[cache miss]: PreMakeCache:", self.MetaFile.Path, self.Arch)\r | |
2355 | return False\r | |
56c786b0 | 2356 | \r |
fc8b8dea | 2357 | # Check the PreMakeHash in ModuleHashPairList one by one\r |
56c786b0 | 2358 | for idx, (PreMakefileHash, MakeHash) in enumerate (ModuleHashPairList):\r |
fc8b8dea SS |
2359 | SourceHashDir = path.join(ModuleCacheDir, MakeHash)\r |
2360 | SourceFfsHashDir = path.join(FfsDir, MakeHash)\r | |
2361 | PreMakeHashFileList_FilePah = path.join(ModuleCacheDir, self.Name + ".PreMakeHashFileList." + PreMakefileHash)\r | |
2362 | MakeHashFileList_FilePah = path.join(ModuleCacheDir, self.Name + ".MakeHashFileList." + MakeHash)\r | |
56c786b0 | 2363 | \r |
56c786b0 | 2364 | try:\r |
fc8b8dea SS |
2365 | with open(LongFilePath(PreMakeHashFileList_FilePah), 'r') as f:\r |
2366 | PreMakeHashFileList = json.load(f)\r | |
56c786b0 | 2367 | except:\r |
fc8b8dea SS |
2368 | EdkLogger.quiet("[cache error]: fail to load PreMakeHashFileList file: %s" % PreMakeHashFileList_FilePah)\r |
2369 | continue\r | |
56c786b0 | 2370 | \r |
fc8b8dea SS |
2371 | HashMiss = False\r |
2372 | for HashChainFile in PreMakeHashFileList:\r | |
2373 | HashChainStatus = None\r | |
2374 | if HashChainFile in GlobalData.gHashChainStatus:\r | |
2375 | HashChainStatus = GlobalData.gHashChainStatus[HashChainFile]\r | |
2376 | if HashChainStatus == False:\r | |
2377 | HashMiss = True\r | |
2378 | break\r | |
2379 | elif HashChainStatus == True:\r | |
2380 | continue\r | |
2381 | # Convert to path start with cache source dir\r | |
2382 | RelativePath = os.path.relpath(HashChainFile, self.WorkspaceDir)\r | |
2383 | NewFilePath = os.path.join(GlobalData.gBinCacheSource, RelativePath)\r | |
2384 | if self.CheckHashChainFile(NewFilePath):\r | |
2385 | GlobalData.gHashChainStatus[HashChainFile] = True\r | |
2386 | else:\r | |
2387 | GlobalData.gHashChainStatus[HashChainFile] = False\r | |
2388 | HashMiss = True\r | |
2389 | break\r | |
e8449e1d | 2390 | \r |
fc8b8dea SS |
2391 | if HashMiss:\r |
2392 | continue\r | |
e8449e1d | 2393 | \r |
fc8b8dea SS |
2394 | # PreMakefile cache hit, restore the module build result\r |
2395 | for root, dir, files in os.walk(SourceHashDir):\r | |
2396 | for f in files:\r | |
2397 | File = path.join(root, f)\r | |
2398 | self.CacheCopyFile(self.BuildDir, SourceHashDir, File)\r | |
2399 | if os.path.exists(SourceFfsHashDir):\r | |
2400 | for root, dir, files in os.walk(SourceFfsHashDir):\r | |
2401 | for f in files:\r | |
2402 | File = path.join(root, f)\r | |
2403 | self.CacheCopyFile(self.FfsOutputDir, SourceFfsHashDir, File)\r | |
2404 | \r | |
2405 | if self.Name == "PcdPeim" or self.Name == "PcdDxe":\r | |
2406 | CreatePcdDatabaseCode(self, TemplateString(), TemplateString())\r | |
2407 | \r | |
2408 | print("[cache hit]: PreMakeCache:", self.MetaFile.Path, self.Arch)\r | |
2409 | GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = True\r | |
2410 | return True\r | |
e8449e1d | 2411 | \r |
fc8b8dea SS |
2412 | print("[cache miss]: PreMakeCache:", self.MetaFile.Path, self.Arch)\r |
2413 | GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False\r | |
2414 | return False\r | |
e8449e1d | 2415 | \r |
fc8b8dea SS |
2416 | ## Decide whether we can skip the Module build\r |
2417 | def CanSkipbyCache(self, gHitSet):\r | |
2418 | # Hashing feature is off\r | |
2419 | if not GlobalData.gBinCacheSource:\r | |
0e7e7a26 SS |
2420 | return False\r |
2421 | \r | |
fc8b8dea | 2422 | if self in gHitSet:\r |
0e7e7a26 | 2423 | return True\r |
e8449e1d | 2424 | \r |
0e7e7a26 | 2425 | return False\r |
e8449e1d FB |
2426 | \r |
2427 | ## Decide whether we can skip the ModuleAutoGen process\r | |
2428 | # If any source file is newer than the module than we cannot skip\r | |
2429 | #\r | |
2430 | def CanSkip(self):\r | |
0e7e7a26 SS |
2431 | # Don't skip if cache feature enabled\r |
2432 | if GlobalData.gUseHashCache or GlobalData.gBinCacheDest or GlobalData.gBinCacheSource:\r | |
2433 | return False\r | |
e8449e1d FB |
2434 | if self.MakeFileDir in GlobalData.gSikpAutoGenCache:\r |
2435 | return True\r | |
2436 | if not os.path.exists(self.TimeStampPath):\r | |
2437 | return False\r | |
2438 | #last creation time of the module\r | |
2439 | DstTimeStamp = os.stat(self.TimeStampPath)[8]\r | |
2440 | \r | |
2441 | SrcTimeStamp = self.Workspace._SrcTimeStamp\r | |
2442 | if SrcTimeStamp > DstTimeStamp:\r | |
2443 | return False\r | |
2444 | \r | |
2445 | with open(self.TimeStampPath,'r') as f:\r | |
2446 | for source in f:\r | |
2447 | source = source.rstrip('\n')\r | |
2448 | if not os.path.exists(source):\r | |
2449 | return False\r | |
2450 | if source not in ModuleAutoGen.TimeDict :\r | |
2451 | ModuleAutoGen.TimeDict[source] = os.stat(source)[8]\r | |
2452 | if ModuleAutoGen.TimeDict[source] > DstTimeStamp:\r | |
2453 | return False\r | |
2454 | GlobalData.gSikpAutoGenCache.add(self.MakeFileDir)\r | |
2455 | return True\r | |
2456 | \r | |
2457 | @cached_property\r | |
2458 | def TimeStampPath(self):\r | |
2459 | return os.path.join(self.MakeFileDir, 'AutoGenTimeStamp')\r |