2 # Create makefile for MS nmake and GNU make
4 # Copyright (c) 2019, Intel Corporation. All rights reserved.<BR>
5 # SPDX-License-Identifier: BSD-2-Clause-Patent
7 from __future__
import absolute_import
8 import multiprocessing
as mp
10 from Common
.Misc
import PathClass
11 from AutoGen
.ModuleAutoGen
import ModuleAutoGen
12 from AutoGen
.ModuleAutoGenHelper
import WorkSpaceInfo
,AutoGenInfo
13 import Common
.GlobalData
as GlobalData
14 import Common
.EdkLogger
as EdkLogger
16 from Common
.MultipleWorkspace
import MultipleWorkspace
as mws
17 from AutoGen
.AutoGen
import AutoGen
18 from Workspace
.WorkspaceDatabase
import BuildDB
20 from queue
import Empty
22 from Queue
import Empty
25 from AutoGen
.DataPipe
import MemoryDataPipe
36 class LogAgent(threading
.Thread
):
37 def __init__(self
,log_q
,log_level
,log_file
=None):
38 super(LogAgent
,self
).__init
__()
40 self
.log_level
= log_level
41 self
.log_file
= log_file
43 # For DEBUG level (All DEBUG_0~9 are applicable)
44 self
._DebugLogger
_agent
= logging
.getLogger("tool_debug_agent")
45 _DebugFormatter
= logging
.Formatter("[%(asctime)s.%(msecs)d]: %(message)s", datefmt
="%H:%M:%S")
46 self
._DebugLogger
_agent
.setLevel(self
.log_level
)
47 _DebugChannel
= logging
.StreamHandler(sys
.stdout
)
48 _DebugChannel
.setFormatter(_DebugFormatter
)
49 self
._DebugLogger
_agent
.addHandler(_DebugChannel
)
51 # For VERBOSE, INFO, WARN level
52 self
._InfoLogger
_agent
= logging
.getLogger("tool_info_agent")
53 _InfoFormatter
= logging
.Formatter("%(message)s")
54 self
._InfoLogger
_agent
.setLevel(self
.log_level
)
55 _InfoChannel
= logging
.StreamHandler(sys
.stdout
)
56 _InfoChannel
.setFormatter(_InfoFormatter
)
57 self
._InfoLogger
_agent
.addHandler(_InfoChannel
)
60 self
._ErrorLogger
_agent
= logging
.getLogger("tool_error_agent")
61 _ErrorFormatter
= logging
.Formatter("%(message)s")
62 self
._ErrorLogger
_agent
.setLevel(self
.log_level
)
63 _ErrorCh
= logging
.StreamHandler(sys
.stderr
)
64 _ErrorCh
.setFormatter(_ErrorFormatter
)
65 self
._ErrorLogger
_agent
.addHandler(_ErrorCh
)
68 if os
.path
.exists(self
.log_file
):
69 os
.remove(self
.log_file
)
70 _Ch
= logging
.FileHandler(self
.log_file
)
71 _Ch
.setFormatter(_DebugFormatter
)
72 self
._DebugLogger
_agent
.addHandler(_Ch
)
74 _Ch
= logging
.FileHandler(self
.log_file
)
75 _Ch
.setFormatter(_InfoFormatter
)
76 self
._InfoLogger
_agent
.addHandler(_Ch
)
78 _Ch
= logging
.FileHandler(self
.log_file
)
79 _Ch
.setFormatter(_ErrorFormatter
)
80 self
._ErrorLogger
_agent
.addHandler(_Ch
)
85 log_message
= self
.log_q
.get()
86 if log_message
is None:
88 if log_message
.name
== "tool_error":
89 self
._ErrorLogger
_agent
.log(log_message
.levelno
,log_message
.getMessage())
90 elif log_message
.name
== "tool_info":
91 self
._InfoLogger
_agent
.log(log_message
.levelno
,log_message
.getMessage())
92 elif log_message
.name
== "tool_debug":
93 self
._DebugLogger
_agent
.log(log_message
.levelno
,log_message
.getMessage())
95 self
._InfoLogger
_agent
.log(log_message
.levelno
,log_message
.getMessage())
99 class AutoGenManager(threading
.Thread
):
100 def __init__(self
,autogen_workers
, feedback_q
,error_event
):
101 super(AutoGenManager
,self
).__init
__()
102 self
.autogen_workers
= autogen_workers
103 self
.feedback_q
= feedback_q
105 self
.error_event
= error_event
110 badnews
= self
.feedback_q
.get()
113 if badnews
== "Done":
115 elif badnews
== "QueueEmpty":
116 EdkLogger
.debug(EdkLogger
.DEBUG_9
, "Worker %s: %s" % (os
.getpid(), badnews
))
117 self
.TerminateWorkers()
119 EdkLogger
.debug(EdkLogger
.DEBUG_9
, "Worker %s: %s" % (os
.getpid(), badnews
))
121 self
.TerminateWorkers()
122 if fin_num
== len(self
.autogen_workers
):
124 for w
in self
.autogen_workers
:
130 def clearQueue(self
):
131 taskq
= self
.autogen_workers
[0].module_queue
132 logq
= self
.autogen_workers
[0].log_q
134 clearQ(self
.feedback_q
)
136 # Copy the cache queue itmes to parent thread before clear
137 cacheq
= self
.autogen_workers
[0].cache_q
142 if item
== "CacheDone":
145 GlobalData
.gModuleAllCacheStatus
.add(item
)
146 if cache_num
== len(self
.autogen_workers
):
149 print ("cache_q error")
151 def TerminateWorkers(self
):
152 self
.error_event
.set()
154 self
.feedback_q
.put(None)
155 class AutoGenWorkerInProcess(mp
.Process
):
156 def __init__(self
,module_queue
,data_pipe_file_path
,feedback_q
,file_lock
,cache_q
,log_q
,error_event
):
157 mp
.Process
.__init
__(self
)
158 self
.module_queue
= module_queue
159 self
.data_pipe_file_path
=data_pipe_file_path
160 self
.data_pipe
= None
161 self
.feedback_q
= feedback_q
162 self
.PlatformMetaFileSet
= {}
163 self
.file_lock
= file_lock
164 self
.cache_q
= cache_q
166 self
.error_event
= error_event
167 def GetPlatformMetaFile(self
,filepath
,root
):
169 return self
.PlatformMetaFileSet
[(filepath
,root
)]
171 self
.PlatformMetaFileSet
[(filepath
,root
)] = filepath
172 return self
.PlatformMetaFileSet
[(filepath
,root
)]
178 self
.data_pipe
= MemoryDataPipe()
179 self
.data_pipe
.load(self
.data_pipe_file_path
)
181 self
.feedback_q
.put(taskname
+ ":" + "load data pipe %s failed." % self
.data_pipe_file_path
)
182 EdkLogger
.LogClientInitialize(self
.log_q
)
183 loglevel
= self
.data_pipe
.Get("LogLevel")
185 loglevel
= EdkLogger
.INFO
186 EdkLogger
.SetLevel(loglevel
)
187 target
= self
.data_pipe
.Get("P_Info").get("Target")
188 toolchain
= self
.data_pipe
.Get("P_Info").get("ToolChain")
189 archlist
= self
.data_pipe
.Get("P_Info").get("ArchList")
191 active_p
= self
.data_pipe
.Get("P_Info").get("ActivePlatform")
192 workspacedir
= self
.data_pipe
.Get("P_Info").get("WorkspaceDir")
193 PackagesPath
= os
.getenv("PACKAGES_PATH")
194 mws
.setWs(workspacedir
, PackagesPath
)
195 self
.Wa
= WorkSpaceInfo(
196 workspacedir
,active_p
,target
,toolchain
,archlist
198 self
.Wa
._SrcTimeStamp
= self
.data_pipe
.Get("Workspace_timestamp")
199 GlobalData
.gGlobalDefines
= self
.data_pipe
.Get("G_defines")
200 GlobalData
.gCommandLineDefines
= self
.data_pipe
.Get("CL_defines")
201 GlobalData
.gCommandMaxLength
= self
.data_pipe
.Get('gCommandMaxLength')
202 os
.environ
._data
= self
.data_pipe
.Get("Env_Var")
203 GlobalData
.gWorkspace
= workspacedir
204 GlobalData
.gDisableIncludePathCheck
= False
205 GlobalData
.gFdfParser
= self
.data_pipe
.Get("FdfParser")
206 GlobalData
.gDatabasePath
= self
.data_pipe
.Get("DatabasePath")
208 GlobalData
.gUseHashCache
= self
.data_pipe
.Get("UseHashCache")
209 GlobalData
.gBinCacheSource
= self
.data_pipe
.Get("BinCacheSource")
210 GlobalData
.gBinCacheDest
= self
.data_pipe
.Get("BinCacheDest")
211 GlobalData
.gPlatformHashFile
= self
.data_pipe
.Get("PlatformHashFile")
212 GlobalData
.gModulePreMakeCacheStatus
= dict()
213 GlobalData
.gModuleMakeCacheStatus
= dict()
214 GlobalData
.gHashChainStatus
= dict()
215 GlobalData
.gCMakeHashFile
= dict()
216 GlobalData
.gModuleHashFile
= dict()
217 GlobalData
.gFileHashDict
= dict()
218 GlobalData
.gEnableGenfdsMultiThread
= self
.data_pipe
.Get("EnableGenfdsMultiThread")
219 GlobalData
.file_lock
= self
.file_lock
220 CommandTarget
= self
.data_pipe
.Get("CommandTarget")
221 pcd_from_build_option
= []
222 for pcd_tuple
in self
.data_pipe
.Get("BuildOptPcd"):
223 pcd_id
= ".".join((pcd_tuple
[0],pcd_tuple
[1]))
224 if pcd_tuple
[2].strip():
225 pcd_id
= ".".join((pcd_id
,pcd_tuple
[2]))
226 pcd_from_build_option
.append("=".join((pcd_id
,pcd_tuple
[3])))
227 GlobalData
.BuildOptionPcd
= pcd_from_build_option
229 FfsCmd
= self
.data_pipe
.Get("FfsCommand")
232 GlobalData
.FfsCmd
= FfsCmd
233 PlatformMetaFile
= self
.GetPlatformMetaFile(self
.data_pipe
.Get("P_Info").get("ActivePlatform"),
234 self
.data_pipe
.Get("P_Info").get("WorkspaceDir"))
236 if self
.error_event
.is_set():
240 module_file
,module_root
,module_path
,module_basename
,module_originalpath
,module_arch
,IsLib
= self
.module_queue
.get_nowait()
242 EdkLogger
.debug(EdkLogger
.DEBUG_9
, "Worker %s: %s" % (os
.getpid(), "Fake Empty."))
245 if module_file
is None:
246 EdkLogger
.debug(EdkLogger
.DEBUG_9
, "Worker %s: %s" % (os
.getpid(), "Worker get the last item in the queue."))
247 self
.feedback_q
.put("QueueEmpty")
251 modulefullpath
= os
.path
.join(module_root
,module_file
)
252 taskname
= " : ".join((modulefullpath
,module_arch
))
253 module_metafile
= PathClass(module_file
,module_root
)
255 module_metafile
.Path
= module_path
257 module_metafile
.BaseName
= module_basename
258 if module_originalpath
:
259 module_metafile
.OriginalPath
= PathClass(module_originalpath
,module_root
)
261 target
= self
.data_pipe
.Get("P_Info").get("Target")
262 toolchain
= self
.data_pipe
.Get("P_Info").get("ToolChain")
263 Ma
= ModuleAutoGen(self
.Wa
,module_metafile
,target
,toolchain
,arch
,PlatformMetaFile
,self
.data_pipe
)
265 # SourceFileList calling sequence impact the makefile string sequence.
266 # Create cached SourceFileList here to unify its calling sequence for both
267 # CanSkipbyPreMakeCache and CreateCodeFile/CreateMakeFile.
268 RetVal
= Ma
.SourceFileList
269 if GlobalData
.gUseHashCache
and not GlobalData
.gBinCacheDest
and CommandTarget
in [None, "", "all"]:
271 CacheResult
= Ma
.CanSkipbyPreMakeCache()
274 self
.feedback_q
.put(taskname
)
277 self
.cache_q
.put((Ma
.MetaFile
.Path
, Ma
.Arch
, "PreMakeCache", True))
280 self
.cache_q
.put((Ma
.MetaFile
.Path
, Ma
.Arch
, "PreMakeCache", False))
282 Ma
.CreateCodeFile(False)
283 Ma
.CreateMakeFile(False,GenFfsList
=FfsCmd
.get((Ma
.MetaFile
.Path
, Ma
.Arch
),[]))
284 Ma
.CreateAsBuiltInf()
285 if GlobalData
.gBinCacheSource
and CommandTarget
in [None, "", "all"]:
287 CacheResult
= Ma
.CanSkipbyMakeCache()
290 self
.feedback_q
.put(taskname
)
293 self
.cache_q
.put((Ma
.MetaFile
.Path
, Ma
.Arch
, "MakeCache", True))
296 self
.cache_q
.put((Ma
.MetaFile
.Path
, Ma
.Arch
, "MakeCache", False))
298 except Exception as e
:
299 EdkLogger
.debug(EdkLogger
.DEBUG_9
, "Worker %s: %s" % (os
.getpid(), str(e
)))
300 self
.feedback_q
.put(taskname
)
302 EdkLogger
.debug(EdkLogger
.DEBUG_9
, "Worker %s: %s" % (os
.getpid(), "Done"))
303 self
.feedback_q
.put("Done")
304 self
.cache_q
.put("CacheDone")
306 def printStatus(self
):
307 print("Processs ID: %d Run %d modules in AutoGen " % (os
.getpid(),len(AutoGen
.Cache())))
308 print("Processs ID: %d Run %d modules in AutoGenInfo " % (os
.getpid(),len(AutoGenInfo
.GetCache())))
310 for buildobj
in BuildDB
.BuildObject
.GetCache().values():
311 if str(buildobj
).lower().endswith("dec"):
313 groupobj
['dec'].append(str(buildobj
))
315 groupobj
['dec'] = [str(buildobj
)]
316 if str(buildobj
).lower().endswith("dsc"):
318 groupobj
['dsc'].append(str(buildobj
))
320 groupobj
['dsc'] = [str(buildobj
)]
322 if str(buildobj
).lower().endswith("inf"):
324 groupobj
['inf'].append(str(buildobj
))
326 groupobj
['inf'] = [str(buildobj
)]
328 print("Processs ID: %d Run %d pkg in WDB " % (os
.getpid(),len(groupobj
.get("dec",[]))))
329 print("Processs ID: %d Run %d pla in WDB " % (os
.getpid(),len(groupobj
.get("dsc",[]))))
330 print("Processs ID: %d Run %d inf in WDB " % (os
.getpid(),len(groupobj
.get("inf",[]))))