]> git.proxmox.com Git - mirror_edk2.git/blob - BaseTools/Source/Python/AutoGen/AutoGenWorker.py
BaseTools:Fix build tools print traceback info issue
[mirror_edk2.git] / BaseTools / Source / Python / AutoGen / AutoGenWorker.py
1 ## @file
2 # Create makefile for MS nmake and GNU make
3 #
4 # Copyright (c) 2019, Intel Corporation. All rights reserved.<BR>
5 # SPDX-License-Identifier: BSD-2-Clause-Patent
6 #
7 from __future__ import absolute_import
8 import multiprocessing as mp
9 import threading
10 from Common.Misc import PathClass
11 from AutoGen.ModuleAutoGen import ModuleAutoGen
12 from AutoGen.ModuleAutoGenHelper import WorkSpaceInfo,AutoGenInfo
13 import Common.GlobalData as GlobalData
14 import Common.EdkLogger as EdkLogger
15 import os
16 from Common.MultipleWorkspace import MultipleWorkspace as mws
17 from AutoGen.AutoGen import AutoGen
18 from Workspace.WorkspaceDatabase import BuildDB
19 try:
20 from queue import Empty
21 except:
22 from Queue import Empty
23 import traceback
24 import sys
25 from AutoGen.DataPipe import MemoryDataPipe
26 import logging
27
28 def clearQ(q):
29 try:
30 while True:
31 q.get_nowait()
32 except Empty:
33 pass
34
35 class LogAgent(threading.Thread):
36 def __init__(self,log_q,log_level,log_file=None):
37 super(LogAgent,self).__init__()
38 self.log_q = log_q
39 self.log_level = log_level
40 self.log_file = log_file
41 def InitLogger(self):
42 # For DEBUG level (All DEBUG_0~9 are applicable)
43 self._DebugLogger_agent = logging.getLogger("tool_debug_agent")
44 _DebugFormatter = logging.Formatter("[%(asctime)s.%(msecs)d]: %(message)s", datefmt="%H:%M:%S")
45 self._DebugLogger_agent.setLevel(self.log_level)
46 _DebugChannel = logging.StreamHandler(sys.stdout)
47 _DebugChannel.setFormatter(_DebugFormatter)
48 self._DebugLogger_agent.addHandler(_DebugChannel)
49
50 # For VERBOSE, INFO, WARN level
51 self._InfoLogger_agent = logging.getLogger("tool_info_agent")
52 _InfoFormatter = logging.Formatter("%(message)s")
53 self._InfoLogger_agent.setLevel(self.log_level)
54 _InfoChannel = logging.StreamHandler(sys.stdout)
55 _InfoChannel.setFormatter(_InfoFormatter)
56 self._InfoLogger_agent.addHandler(_InfoChannel)
57
58 # For ERROR level
59 self._ErrorLogger_agent = logging.getLogger("tool_error_agent")
60 _ErrorFormatter = logging.Formatter("%(message)s")
61 self._ErrorLogger_agent.setLevel(self.log_level)
62 _ErrorCh = logging.StreamHandler(sys.stderr)
63 _ErrorCh.setFormatter(_ErrorFormatter)
64 self._ErrorLogger_agent.addHandler(_ErrorCh)
65
66 if self.log_file:
67 if os.path.exists(self.log_file):
68 os.remove(self.log_file)
69 _Ch = logging.FileHandler(self.log_file)
70 _Ch.setFormatter(_DebugFormatter)
71 self._DebugLogger_agent.addHandler(_Ch)
72
73 _Ch= logging.FileHandler(self.log_file)
74 _Ch.setFormatter(_InfoFormatter)
75 self._InfoLogger_agent.addHandler(_Ch)
76
77 _Ch = logging.FileHandler(self.log_file)
78 _Ch.setFormatter(_ErrorFormatter)
79 self._ErrorLogger_agent.addHandler(_Ch)
80
81 def run(self):
82 self.InitLogger()
83 while True:
84 log_message = self.log_q.get()
85 if log_message is None:
86 break
87 if log_message.name == "tool_error":
88 self._ErrorLogger_agent.log(log_message.levelno,log_message.getMessage())
89 elif log_message.name == "tool_info":
90 self._InfoLogger_agent.log(log_message.levelno,log_message.getMessage())
91 elif log_message.name == "tool_debug":
92 self._DebugLogger_agent.log(log_message.levelno,log_message.getMessage())
93 else:
94 self._InfoLogger_agent.log(log_message.levelno,log_message.getMessage())
95
96 def kill(self):
97 self.log_q.put(None)
98 class AutoGenManager(threading.Thread):
99 def __init__(self,autogen_workers, feedback_q,error_event):
100 super(AutoGenManager,self).__init__()
101 self.autogen_workers = autogen_workers
102 self.feedback_q = feedback_q
103 self.Status = True
104 self.error_event = error_event
105 def run(self):
106 try:
107 fin_num = 0
108 while True:
109 badnews = self.feedback_q.get()
110 if badnews is None:
111 break
112 if badnews == "Done":
113 fin_num += 1
114 else:
115 self.Status = False
116 self.TerminateWorkers()
117 if fin_num == len(self.autogen_workers):
118 self.clearQueue()
119 for w in self.autogen_workers:
120 w.join()
121 break
122 except Exception:
123 return
124
125 def clearQueue(self):
126 taskq = self.autogen_workers[0].module_queue
127 logq = self.autogen_workers[0].log_q
128 clearQ(taskq)
129 clearQ(self.feedback_q)
130 clearQ(logq)
131 # Copy the cache queue itmes to parent thread before clear
132 cacheq = self.autogen_workers[0].cache_q
133 try:
134 cache_num = 0
135 while True:
136 item = cacheq.get()
137 if item == "CacheDone":
138 cache_num += 1
139 else:
140 GlobalData.gModuleAllCacheStatus.add(item)
141 if cache_num == len(self.autogen_workers):
142 break
143 except:
144 print ("cache_q error")
145
146 def TerminateWorkers(self):
147 self.error_event.set()
148 def kill(self):
149 self.feedback_q.put(None)
150 class AutoGenWorkerInProcess(mp.Process):
151 def __init__(self,module_queue,data_pipe_file_path,feedback_q,file_lock,cache_q,log_q,error_event):
152 mp.Process.__init__(self)
153 self.module_queue = module_queue
154 self.data_pipe_file_path =data_pipe_file_path
155 self.data_pipe = None
156 self.feedback_q = feedback_q
157 self.PlatformMetaFileSet = {}
158 self.file_lock = file_lock
159 self.cache_q = cache_q
160 self.log_q = log_q
161 self.error_event = error_event
162 def GetPlatformMetaFile(self,filepath,root):
163 try:
164 return self.PlatformMetaFileSet[(filepath,root)]
165 except:
166 self.PlatformMetaFileSet[(filepath,root)] = filepath
167 return self.PlatformMetaFileSet[(filepath,root)]
168 def run(self):
169 try:
170 taskname = "Init"
171 with self.file_lock:
172 try:
173 self.data_pipe = MemoryDataPipe()
174 self.data_pipe.load(self.data_pipe_file_path)
175 except:
176 self.feedback_q.put(taskname + ":" + "load data pipe %s failed." % self.data_pipe_file_path)
177 EdkLogger.LogClientInitialize(self.log_q)
178 loglevel = self.data_pipe.Get("LogLevel")
179 if not loglevel:
180 loglevel = EdkLogger.INFO
181 EdkLogger.SetLevel(loglevel)
182 target = self.data_pipe.Get("P_Info").get("Target")
183 toolchain = self.data_pipe.Get("P_Info").get("ToolChain")
184 archlist = self.data_pipe.Get("P_Info").get("ArchList")
185
186 active_p = self.data_pipe.Get("P_Info").get("ActivePlatform")
187 workspacedir = self.data_pipe.Get("P_Info").get("WorkspaceDir")
188 PackagesPath = os.getenv("PACKAGES_PATH")
189 mws.setWs(workspacedir, PackagesPath)
190 self.Wa = WorkSpaceInfo(
191 workspacedir,active_p,target,toolchain,archlist
192 )
193 self.Wa._SrcTimeStamp = self.data_pipe.Get("Workspace_timestamp")
194 GlobalData.gGlobalDefines = self.data_pipe.Get("G_defines")
195 GlobalData.gCommandLineDefines = self.data_pipe.Get("CL_defines")
196 os.environ._data = self.data_pipe.Get("Env_Var")
197 GlobalData.gWorkspace = workspacedir
198 GlobalData.gDisableIncludePathCheck = False
199 GlobalData.gFdfParser = self.data_pipe.Get("FdfParser")
200 GlobalData.gDatabasePath = self.data_pipe.Get("DatabasePath")
201
202 GlobalData.gUseHashCache = self.data_pipe.Get("UseHashCache")
203 GlobalData.gBinCacheSource = self.data_pipe.Get("BinCacheSource")
204 GlobalData.gBinCacheDest = self.data_pipe.Get("BinCacheDest")
205 GlobalData.gPlatformHashFile = self.data_pipe.Get("PlatformHashFile")
206 GlobalData.gModulePreMakeCacheStatus = dict()
207 GlobalData.gModuleMakeCacheStatus = dict()
208 GlobalData.gHashChainStatus = dict()
209 GlobalData.gCMakeHashFile = dict()
210 GlobalData.gModuleHashFile = dict()
211 GlobalData.gFileHashDict = dict()
212 GlobalData.gEnableGenfdsMultiThread = self.data_pipe.Get("EnableGenfdsMultiThread")
213 GlobalData.file_lock = self.file_lock
214 CommandTarget = self.data_pipe.Get("CommandTarget")
215 pcd_from_build_option = []
216 for pcd_tuple in self.data_pipe.Get("BuildOptPcd"):
217 pcd_id = ".".join((pcd_tuple[0],pcd_tuple[1]))
218 if pcd_tuple[2].strip():
219 pcd_id = ".".join((pcd_id,pcd_tuple[2]))
220 pcd_from_build_option.append("=".join((pcd_id,pcd_tuple[3])))
221 GlobalData.BuildOptionPcd = pcd_from_build_option
222 module_count = 0
223 FfsCmd = self.data_pipe.Get("FfsCommand")
224 if FfsCmd is None:
225 FfsCmd = {}
226 GlobalData.FfsCmd = FfsCmd
227 PlatformMetaFile = self.GetPlatformMetaFile(self.data_pipe.Get("P_Info").get("ActivePlatform"),
228 self.data_pipe.Get("P_Info").get("WorkspaceDir"))
229 while True:
230 if self.module_queue.empty():
231 break
232 if self.error_event.is_set():
233 break
234 module_count += 1
235 module_file,module_root,module_path,module_basename,module_originalpath,module_arch,IsLib = self.module_queue.get_nowait()
236 modulefullpath = os.path.join(module_root,module_file)
237 taskname = " : ".join((modulefullpath,module_arch))
238 module_metafile = PathClass(module_file,module_root)
239 if module_path:
240 module_metafile.Path = module_path
241 if module_basename:
242 module_metafile.BaseName = module_basename
243 if module_originalpath:
244 module_metafile.OriginalPath = PathClass(module_originalpath,module_root)
245 arch = module_arch
246 target = self.data_pipe.Get("P_Info").get("Target")
247 toolchain = self.data_pipe.Get("P_Info").get("ToolChain")
248 Ma = ModuleAutoGen(self.Wa,module_metafile,target,toolchain,arch,PlatformMetaFile,self.data_pipe)
249 Ma.IsLibrary = IsLib
250 # SourceFileList calling sequence impact the makefile string sequence.
251 # Create cached SourceFileList here to unify its calling sequence for both
252 # CanSkipbyPreMakeCache and CreateCodeFile/CreateMakeFile.
253 RetVal = Ma.SourceFileList
254 if GlobalData.gUseHashCache and not GlobalData.gBinCacheDest and CommandTarget in [None, "", "all"]:
255 try:
256 CacheResult = Ma.CanSkipbyPreMakeCache()
257 except:
258 CacheResult = False
259 self.feedback_q.put(taskname)
260
261 if CacheResult:
262 self.cache_q.put((Ma.MetaFile.Path, Ma.Arch, "PreMakeCache", True))
263 continue
264 else:
265 self.cache_q.put((Ma.MetaFile.Path, Ma.Arch, "PreMakeCache", False))
266
267 Ma.CreateCodeFile(False)
268 Ma.CreateMakeFile(False,GenFfsList=FfsCmd.get((Ma.MetaFile.Path, Ma.Arch),[]))
269
270 if GlobalData.gBinCacheSource and CommandTarget in [None, "", "all"]:
271 try:
272 CacheResult = Ma.CanSkipbyMakeCache()
273 except:
274 CacheResult = False
275 self.feedback_q.put(taskname)
276
277 if CacheResult:
278 self.cache_q.put((Ma.MetaFile.Path, Ma.Arch, "MakeCache", True))
279 continue
280 else:
281 self.cache_q.put((Ma.MetaFile.Path, Ma.Arch, "MakeCache", False))
282
283 except Empty:
284 pass
285 except:
286 self.feedback_q.put(taskname)
287 finally:
288 self.feedback_q.put("Done")
289 self.cache_q.put("CacheDone")
290
291 def printStatus(self):
292 print("Processs ID: %d Run %d modules in AutoGen " % (os.getpid(),len(AutoGen.Cache())))
293 print("Processs ID: %d Run %d modules in AutoGenInfo " % (os.getpid(),len(AutoGenInfo.GetCache())))
294 groupobj = {}
295 for buildobj in BuildDB.BuildObject.GetCache().values():
296 if str(buildobj).lower().endswith("dec"):
297 try:
298 groupobj['dec'].append(str(buildobj))
299 except:
300 groupobj['dec'] = [str(buildobj)]
301 if str(buildobj).lower().endswith("dsc"):
302 try:
303 groupobj['dsc'].append(str(buildobj))
304 except:
305 groupobj['dsc'] = [str(buildobj)]
306
307 if str(buildobj).lower().endswith("inf"):
308 try:
309 groupobj['inf'].append(str(buildobj))
310 except:
311 groupobj['inf'] = [str(buildobj)]
312
313 print("Processs ID: %d Run %d pkg in WDB " % (os.getpid(),len(groupobj.get("dec",[]))))
314 print("Processs ID: %d Run %d pla in WDB " % (os.getpid(),len(groupobj.get("dsc",[]))))
315 print("Processs ID: %d Run %d inf in WDB " % (os.getpid(),len(groupobj.get("inf",[]))))