2 # Create makefile for MS nmake and GNU make
4 # Copyright (c) 2019, Intel Corporation. All rights reserved.<BR>
5 # SPDX-License-Identifier: BSD-2-Clause-Patent
7 from __future__
import absolute_import
8 import multiprocessing
as mp
10 from Common
.Misc
import PathClass
11 from AutoGen
.ModuleAutoGen
import ModuleAutoGen
12 from AutoGen
.ModuleAutoGenHelper
import WorkSpaceInfo
,AutoGenInfo
13 import Common
.GlobalData
as GlobalData
14 import Common
.EdkLogger
as EdkLogger
16 from Common
.MultipleWorkspace
import MultipleWorkspace
as mws
17 from AutoGen
.AutoGen
import AutoGen
18 from Workspace
.WorkspaceDatabase
import BuildDB
20 from queue
import Empty
22 from Queue
import Empty
25 from AutoGen
.DataPipe
import MemoryDataPipe
36 class LogAgent(threading
.Thread
):
37 def __init__(self
,log_q
,log_level
,log_file
=None):
38 super(LogAgent
,self
).__init
__()
40 self
.log_level
= log_level
41 self
.log_file
= log_file
43 # For DEBUG level (All DEBUG_0~9 are applicable)
44 self
._DebugLogger
_agent
= logging
.getLogger("tool_debug_agent")
45 _DebugFormatter
= logging
.Formatter("[%(asctime)s.%(msecs)d]: %(message)s", datefmt
="%H:%M:%S")
46 self
._DebugLogger
_agent
.setLevel(self
.log_level
)
47 _DebugChannel
= logging
.StreamHandler(sys
.stdout
)
48 _DebugChannel
.setFormatter(_DebugFormatter
)
49 self
._DebugLogger
_agent
.addHandler(_DebugChannel
)
51 # For VERBOSE, INFO, WARN level
52 self
._InfoLogger
_agent
= logging
.getLogger("tool_info_agent")
53 _InfoFormatter
= logging
.Formatter("%(message)s")
54 self
._InfoLogger
_agent
.setLevel(self
.log_level
)
55 _InfoChannel
= logging
.StreamHandler(sys
.stdout
)
56 _InfoChannel
.setFormatter(_InfoFormatter
)
57 self
._InfoLogger
_agent
.addHandler(_InfoChannel
)
60 self
._ErrorLogger
_agent
= logging
.getLogger("tool_error_agent")
61 _ErrorFormatter
= logging
.Formatter("%(message)s")
62 self
._ErrorLogger
_agent
.setLevel(self
.log_level
)
63 _ErrorCh
= logging
.StreamHandler(sys
.stderr
)
64 _ErrorCh
.setFormatter(_ErrorFormatter
)
65 self
._ErrorLogger
_agent
.addHandler(_ErrorCh
)
68 if os
.path
.exists(self
.log_file
):
69 os
.remove(self
.log_file
)
70 _Ch
= logging
.FileHandler(self
.log_file
)
71 _Ch
.setFormatter(_DebugFormatter
)
72 self
._DebugLogger
_agent
.addHandler(_Ch
)
74 _Ch
= logging
.FileHandler(self
.log_file
)
75 _Ch
.setFormatter(_InfoFormatter
)
76 self
._InfoLogger
_agent
.addHandler(_Ch
)
78 _Ch
= logging
.FileHandler(self
.log_file
)
79 _Ch
.setFormatter(_ErrorFormatter
)
80 self
._ErrorLogger
_agent
.addHandler(_Ch
)
85 log_message
= self
.log_q
.get()
86 if log_message
is None:
88 if log_message
.name
== "tool_error":
89 self
._ErrorLogger
_agent
.log(log_message
.levelno
,log_message
.getMessage())
90 elif log_message
.name
== "tool_info":
91 self
._InfoLogger
_agent
.log(log_message
.levelno
,log_message
.getMessage())
92 elif log_message
.name
== "tool_debug":
93 self
._DebugLogger
_agent
.log(log_message
.levelno
,log_message
.getMessage())
95 self
._InfoLogger
_agent
.log(log_message
.levelno
,log_message
.getMessage())
99 class AutoGenManager(threading
.Thread
):
100 def __init__(self
,autogen_workers
, feedback_q
,error_event
):
101 super(AutoGenManager
,self
).__init
__()
102 self
.autogen_workers
= autogen_workers
103 self
.feedback_q
= feedback_q
105 self
.error_event
= error_event
110 badnews
= self
.feedback_q
.get()
113 if badnews
== "Done":
115 elif badnews
== "QueueEmpty":
116 EdkLogger
.debug(EdkLogger
.DEBUG_9
, "Worker %s: %s" % (os
.getpid(), badnews
))
117 self
.TerminateWorkers()
119 EdkLogger
.debug(EdkLogger
.DEBUG_9
, "Worker %s: %s" % (os
.getpid(), badnews
))
121 self
.TerminateWorkers()
122 if fin_num
== len(self
.autogen_workers
):
124 for w
in self
.autogen_workers
:
130 def clearQueue(self
):
131 taskq
= self
.autogen_workers
[0].module_queue
132 logq
= self
.autogen_workers
[0].log_q
134 clearQ(self
.feedback_q
)
136 # Copy the cache queue itmes to parent thread before clear
137 cacheq
= self
.autogen_workers
[0].cache_q
142 if item
== "CacheDone":
145 GlobalData
.gModuleAllCacheStatus
.add(item
)
146 if cache_num
== len(self
.autogen_workers
):
149 print ("cache_q error")
151 def TerminateWorkers(self
):
152 self
.error_event
.set()
154 self
.feedback_q
.put(None)
155 class AutoGenWorkerInProcess(mp
.Process
):
156 def __init__(self
,module_queue
,data_pipe_file_path
,feedback_q
,file_lock
,cache_q
,log_q
,error_event
):
157 mp
.Process
.__init
__(self
)
158 self
.module_queue
= module_queue
159 self
.data_pipe_file_path
=data_pipe_file_path
160 self
.data_pipe
= None
161 self
.feedback_q
= feedback_q
162 self
.PlatformMetaFileSet
= {}
163 self
.file_lock
= file_lock
164 self
.cache_q
= cache_q
166 self
.error_event
= error_event
167 def GetPlatformMetaFile(self
,filepath
,root
):
169 return self
.PlatformMetaFileSet
[(filepath
,root
)]
171 self
.PlatformMetaFileSet
[(filepath
,root
)] = filepath
172 return self
.PlatformMetaFileSet
[(filepath
,root
)]
178 self
.data_pipe
= MemoryDataPipe()
179 self
.data_pipe
.load(self
.data_pipe_file_path
)
181 self
.feedback_q
.put(taskname
+ ":" + "load data pipe %s failed." % self
.data_pipe_file_path
)
182 EdkLogger
.LogClientInitialize(self
.log_q
)
183 loglevel
= self
.data_pipe
.Get("LogLevel")
185 loglevel
= EdkLogger
.INFO
186 EdkLogger
.SetLevel(loglevel
)
187 target
= self
.data_pipe
.Get("P_Info").get("Target")
188 toolchain
= self
.data_pipe
.Get("P_Info").get("ToolChain")
189 archlist
= self
.data_pipe
.Get("P_Info").get("ArchList")
191 active_p
= self
.data_pipe
.Get("P_Info").get("ActivePlatform")
192 workspacedir
= self
.data_pipe
.Get("P_Info").get("WorkspaceDir")
193 PackagesPath
= os
.getenv("PACKAGES_PATH")
194 mws
.setWs(workspacedir
, PackagesPath
)
195 self
.Wa
= WorkSpaceInfo(
196 workspacedir
,active_p
,target
,toolchain
,archlist
198 self
.Wa
._SrcTimeStamp
= self
.data_pipe
.Get("Workspace_timestamp")
199 GlobalData
.gGlobalDefines
= self
.data_pipe
.Get("G_defines")
200 GlobalData
.gCommandLineDefines
= self
.data_pipe
.Get("CL_defines")
201 os
.environ
._data
= self
.data_pipe
.Get("Env_Var")
202 GlobalData
.gWorkspace
= workspacedir
203 GlobalData
.gDisableIncludePathCheck
= False
204 GlobalData
.gFdfParser
= self
.data_pipe
.Get("FdfParser")
205 GlobalData
.gDatabasePath
= self
.data_pipe
.Get("DatabasePath")
207 GlobalData
.gUseHashCache
= self
.data_pipe
.Get("UseHashCache")
208 GlobalData
.gBinCacheSource
= self
.data_pipe
.Get("BinCacheSource")
209 GlobalData
.gBinCacheDest
= self
.data_pipe
.Get("BinCacheDest")
210 GlobalData
.gPlatformHashFile
= self
.data_pipe
.Get("PlatformHashFile")
211 GlobalData
.gModulePreMakeCacheStatus
= dict()
212 GlobalData
.gModuleMakeCacheStatus
= dict()
213 GlobalData
.gHashChainStatus
= dict()
214 GlobalData
.gCMakeHashFile
= dict()
215 GlobalData
.gModuleHashFile
= dict()
216 GlobalData
.gFileHashDict
= dict()
217 GlobalData
.gEnableGenfdsMultiThread
= self
.data_pipe
.Get("EnableGenfdsMultiThread")
218 GlobalData
.file_lock
= self
.file_lock
219 CommandTarget
= self
.data_pipe
.Get("CommandTarget")
220 pcd_from_build_option
= []
221 for pcd_tuple
in self
.data_pipe
.Get("BuildOptPcd"):
222 pcd_id
= ".".join((pcd_tuple
[0],pcd_tuple
[1]))
223 if pcd_tuple
[2].strip():
224 pcd_id
= ".".join((pcd_id
,pcd_tuple
[2]))
225 pcd_from_build_option
.append("=".join((pcd_id
,pcd_tuple
[3])))
226 GlobalData
.BuildOptionPcd
= pcd_from_build_option
228 FfsCmd
= self
.data_pipe
.Get("FfsCommand")
231 GlobalData
.FfsCmd
= FfsCmd
232 PlatformMetaFile
= self
.GetPlatformMetaFile(self
.data_pipe
.Get("P_Info").get("ActivePlatform"),
233 self
.data_pipe
.Get("P_Info").get("WorkspaceDir"))
235 if self
.error_event
.is_set():
239 module_file
,module_root
,module_path
,module_basename
,module_originalpath
,module_arch
,IsLib
= self
.module_queue
.get_nowait()
241 EdkLogger
.debug(EdkLogger
.DEBUG_9
, "Worker %s: %s" % (os
.getpid(), "Fake Empty."))
244 if module_file
is None:
245 EdkLogger
.debug(EdkLogger
.DEBUG_9
, "Worker %s: %s" % (os
.getpid(), "Worker get the last item in the queue."))
246 self
.feedback_q
.put("QueueEmpty")
250 modulefullpath
= os
.path
.join(module_root
,module_file
)
251 taskname
= " : ".join((modulefullpath
,module_arch
))
252 module_metafile
= PathClass(module_file
,module_root
)
254 module_metafile
.Path
= module_path
256 module_metafile
.BaseName
= module_basename
257 if module_originalpath
:
258 module_metafile
.OriginalPath
= PathClass(module_originalpath
,module_root
)
260 target
= self
.data_pipe
.Get("P_Info").get("Target")
261 toolchain
= self
.data_pipe
.Get("P_Info").get("ToolChain")
262 Ma
= ModuleAutoGen(self
.Wa
,module_metafile
,target
,toolchain
,arch
,PlatformMetaFile
,self
.data_pipe
)
264 # SourceFileList calling sequence impact the makefile string sequence.
265 # Create cached SourceFileList here to unify its calling sequence for both
266 # CanSkipbyPreMakeCache and CreateCodeFile/CreateMakeFile.
267 RetVal
= Ma
.SourceFileList
268 if GlobalData
.gUseHashCache
and not GlobalData
.gBinCacheDest
and CommandTarget
in [None, "", "all"]:
270 CacheResult
= Ma
.CanSkipbyPreMakeCache()
273 self
.feedback_q
.put(taskname
)
276 self
.cache_q
.put((Ma
.MetaFile
.Path
, Ma
.Arch
, "PreMakeCache", True))
279 self
.cache_q
.put((Ma
.MetaFile
.Path
, Ma
.Arch
, "PreMakeCache", False))
281 Ma
.CreateCodeFile(False)
282 Ma
.CreateMakeFile(False,GenFfsList
=FfsCmd
.get((Ma
.MetaFile
.Path
, Ma
.Arch
),[]))
283 Ma
.CreateAsBuiltInf()
284 if GlobalData
.gBinCacheSource
and CommandTarget
in [None, "", "all"]:
286 CacheResult
= Ma
.CanSkipbyMakeCache()
289 self
.feedback_q
.put(taskname
)
292 self
.cache_q
.put((Ma
.MetaFile
.Path
, Ma
.Arch
, "MakeCache", True))
295 self
.cache_q
.put((Ma
.MetaFile
.Path
, Ma
.Arch
, "MakeCache", False))
297 except Exception as e
:
298 EdkLogger
.debug(EdkLogger
.DEBUG_9
, "Worker %s: %s" % (os
.getpid(), str(e
)))
299 self
.feedback_q
.put(taskname
)
301 EdkLogger
.debug(EdkLogger
.DEBUG_9
, "Worker %s: %s" % (os
.getpid(), "Done"))
302 self
.feedback_q
.put("Done")
303 self
.cache_q
.put("CacheDone")
305 def printStatus(self
):
306 print("Processs ID: %d Run %d modules in AutoGen " % (os
.getpid(),len(AutoGen
.Cache())))
307 print("Processs ID: %d Run %d modules in AutoGenInfo " % (os
.getpid(),len(AutoGenInfo
.GetCache())))
309 for buildobj
in BuildDB
.BuildObject
.GetCache().values():
310 if str(buildobj
).lower().endswith("dec"):
312 groupobj
['dec'].append(str(buildobj
))
314 groupobj
['dec'] = [str(buildobj
)]
315 if str(buildobj
).lower().endswith("dsc"):
317 groupobj
['dsc'].append(str(buildobj
))
319 groupobj
['dsc'] = [str(buildobj
)]
321 if str(buildobj
).lower().endswith("inf"):
323 groupobj
['inf'].append(str(buildobj
))
325 groupobj
['inf'] = [str(buildobj
)]
327 print("Processs ID: %d Run %d pkg in WDB " % (os
.getpid(),len(groupobj
.get("dec",[]))))
328 print("Processs ID: %d Run %d pla in WDB " % (os
.getpid(),len(groupobj
.get("dsc",[]))))
329 print("Processs ID: %d Run %d inf in WDB " % (os
.getpid(),len(groupobj
.get("inf",[]))))