]> git.proxmox.com Git - mirror_edk2.git/blame - BaseTools/Source/Python/AutoGen/AutoGenWorker.py
BaseTools:Fix build tools print traceback info issue
[mirror_edk2.git] / BaseTools / Source / Python / AutoGen / AutoGenWorker.py
CommitLineData
673d09a2
FB
1## @file\r
2# Create makefile for MS nmake and GNU make\r
3#\r
4# Copyright (c) 2019, Intel Corporation. All rights reserved.<BR>\r
5# SPDX-License-Identifier: BSD-2-Clause-Patent\r
6#\r
7from __future__ import absolute_import\r
8import multiprocessing as mp\r
9import threading\r
10from Common.Misc import PathClass\r
11from AutoGen.ModuleAutoGen import ModuleAutoGen\r
12from AutoGen.ModuleAutoGenHelper import WorkSpaceInfo,AutoGenInfo\r
13import Common.GlobalData as GlobalData\r
14import Common.EdkLogger as EdkLogger\r
15import os\r
16from Common.MultipleWorkspace import MultipleWorkspace as mws\r
17from AutoGen.AutoGen import AutoGen\r
18from Workspace.WorkspaceDatabase import BuildDB\r
4acae2b3
FB
19try:\r
20 from queue import Empty\r
21except:\r
22 from Queue import Empty\r
673d09a2
FB
23import traceback\r
24import sys\r
25from AutoGen.DataPipe import MemoryDataPipe\r
1a624dd7
FB
26import logging\r
27\r
673d09a2
FB
28def clearQ(q):\r
29 try:\r
30 while True:\r
31 q.get_nowait()\r
32 except Empty:\r
33 pass\r
636ed13a
FB
34\r
35class LogAgent(threading.Thread):\r
36 def __init__(self,log_q,log_level,log_file=None):\r
37 super(LogAgent,self).__init__()\r
38 self.log_q = log_q\r
39 self.log_level = log_level\r
40 self.log_file = log_file\r
41 def InitLogger(self):\r
42 # For DEBUG level (All DEBUG_0~9 are applicable)\r
43 self._DebugLogger_agent = logging.getLogger("tool_debug_agent")\r
44 _DebugFormatter = logging.Formatter("[%(asctime)s.%(msecs)d]: %(message)s", datefmt="%H:%M:%S")\r
45 self._DebugLogger_agent.setLevel(self.log_level)\r
46 _DebugChannel = logging.StreamHandler(sys.stdout)\r
47 _DebugChannel.setFormatter(_DebugFormatter)\r
48 self._DebugLogger_agent.addHandler(_DebugChannel)\r
49\r
50 # For VERBOSE, INFO, WARN level\r
51 self._InfoLogger_agent = logging.getLogger("tool_info_agent")\r
52 _InfoFormatter = logging.Formatter("%(message)s")\r
53 self._InfoLogger_agent.setLevel(self.log_level)\r
54 _InfoChannel = logging.StreamHandler(sys.stdout)\r
55 _InfoChannel.setFormatter(_InfoFormatter)\r
56 self._InfoLogger_agent.addHandler(_InfoChannel)\r
57\r
58 # For ERROR level\r
59 self._ErrorLogger_agent = logging.getLogger("tool_error_agent")\r
60 _ErrorFormatter = logging.Formatter("%(message)s")\r
61 self._ErrorLogger_agent.setLevel(self.log_level)\r
62 _ErrorCh = logging.StreamHandler(sys.stderr)\r
63 _ErrorCh.setFormatter(_ErrorFormatter)\r
64 self._ErrorLogger_agent.addHandler(_ErrorCh)\r
65\r
66 if self.log_file:\r
67 if os.path.exists(self.log_file):\r
68 os.remove(self.log_file)\r
69 _Ch = logging.FileHandler(self.log_file)\r
70 _Ch.setFormatter(_DebugFormatter)\r
71 self._DebugLogger_agent.addHandler(_Ch)\r
72\r
73 _Ch= logging.FileHandler(self.log_file)\r
74 _Ch.setFormatter(_InfoFormatter)\r
75 self._InfoLogger_agent.addHandler(_Ch)\r
76\r
77 _Ch = logging.FileHandler(self.log_file)\r
78 _Ch.setFormatter(_ErrorFormatter)\r
79 self._ErrorLogger_agent.addHandler(_Ch)\r
80\r
81 def run(self):\r
82 self.InitLogger()\r
83 while True:\r
84 log_message = self.log_q.get()\r
85 if log_message is None:\r
86 break\r
87 if log_message.name == "tool_error":\r
88 self._ErrorLogger_agent.log(log_message.levelno,log_message.getMessage())\r
89 elif log_message.name == "tool_info":\r
90 self._InfoLogger_agent.log(log_message.levelno,log_message.getMessage())\r
91 elif log_message.name == "tool_debug":\r
92 self._DebugLogger_agent.log(log_message.levelno,log_message.getMessage())\r
93 else:\r
94 self._InfoLogger_agent.log(log_message.levelno,log_message.getMessage())\r
95\r
96 def kill(self):\r
97 self.log_q.put(None)\r
673d09a2
FB
98class AutoGenManager(threading.Thread):\r
99 def __init__(self,autogen_workers, feedback_q,error_event):\r
100 super(AutoGenManager,self).__init__()\r
101 self.autogen_workers = autogen_workers\r
102 self.feedback_q = feedback_q\r
673d09a2
FB
103 self.Status = True\r
104 self.error_event = error_event\r
105 def run(self):\r
106 try:\r
107 fin_num = 0\r
108 while True:\r
109 badnews = self.feedback_q.get()\r
110 if badnews is None:\r
111 break\r
112 if badnews == "Done":\r
113 fin_num += 1\r
114 else:\r
115 self.Status = False\r
116 self.TerminateWorkers()\r
117 if fin_num == len(self.autogen_workers):\r
118 self.clearQueue()\r
119 for w in self.autogen_workers:\r
120 w.join()\r
121 break\r
122 except Exception:\r
123 return\r
124\r
125 def clearQueue(self):\r
126 taskq = self.autogen_workers[0].module_queue\r
1a624dd7 127 logq = self.autogen_workers[0].log_q\r
673d09a2
FB
128 clearQ(taskq)\r
129 clearQ(self.feedback_q)\r
1a624dd7 130 clearQ(logq)\r
fc8b8dea
SS
131 # Copy the cache queue itmes to parent thread before clear\r
132 cacheq = self.autogen_workers[0].cache_q\r
133 try:\r
134 cache_num = 0\r
135 while True:\r
136 item = cacheq.get()\r
137 if item == "CacheDone":\r
138 cache_num += 1\r
139 else:\r
140 GlobalData.gModuleAllCacheStatus.add(item)\r
141 if cache_num == len(self.autogen_workers):\r
142 break\r
143 except:\r
144 print ("cache_q error")\r
145\r
673d09a2
FB
146 def TerminateWorkers(self):\r
147 self.error_event.set()\r
148 def kill(self):\r
149 self.feedback_q.put(None)\r
150class AutoGenWorkerInProcess(mp.Process):\r
fc8b8dea 151 def __init__(self,module_queue,data_pipe_file_path,feedback_q,file_lock,cache_q,log_q,error_event):\r
673d09a2
FB
152 mp.Process.__init__(self)\r
153 self.module_queue = module_queue\r
154 self.data_pipe_file_path =data_pipe_file_path\r
155 self.data_pipe = None\r
156 self.feedback_q = feedback_q\r
157 self.PlatformMetaFileSet = {}\r
158 self.file_lock = file_lock\r
fc8b8dea 159 self.cache_q = cache_q\r
636ed13a 160 self.log_q = log_q\r
673d09a2
FB
161 self.error_event = error_event\r
162 def GetPlatformMetaFile(self,filepath,root):\r
163 try:\r
164 return self.PlatformMetaFileSet[(filepath,root)]\r
165 except:\r
166 self.PlatformMetaFileSet[(filepath,root)] = filepath\r
167 return self.PlatformMetaFileSet[(filepath,root)]\r
168 def run(self):\r
169 try:\r
170 taskname = "Init"\r
171 with self.file_lock:\r
0075ab2c
FB
172 try:\r
173 self.data_pipe = MemoryDataPipe()\r
174 self.data_pipe.load(self.data_pipe_file_path)\r
175 except:\r
673d09a2 176 self.feedback_q.put(taskname + ":" + "load data pipe %s failed." % self.data_pipe_file_path)\r
636ed13a 177 EdkLogger.LogClientInitialize(self.log_q)\r
673d09a2
FB
178 loglevel = self.data_pipe.Get("LogLevel")\r
179 if not loglevel:\r
180 loglevel = EdkLogger.INFO\r
181 EdkLogger.SetLevel(loglevel)\r
673d09a2
FB
182 target = self.data_pipe.Get("P_Info").get("Target")\r
183 toolchain = self.data_pipe.Get("P_Info").get("ToolChain")\r
184 archlist = self.data_pipe.Get("P_Info").get("ArchList")\r
185\r
186 active_p = self.data_pipe.Get("P_Info").get("ActivePlatform")\r
187 workspacedir = self.data_pipe.Get("P_Info").get("WorkspaceDir")\r
188 PackagesPath = os.getenv("PACKAGES_PATH")\r
189 mws.setWs(workspacedir, PackagesPath)\r
190 self.Wa = WorkSpaceInfo(\r
191 workspacedir,active_p,target,toolchain,archlist\r
192 )\r
193 self.Wa._SrcTimeStamp = self.data_pipe.Get("Workspace_timestamp")\r
194 GlobalData.gGlobalDefines = self.data_pipe.Get("G_defines")\r
195 GlobalData.gCommandLineDefines = self.data_pipe.Get("CL_defines")\r
196 os.environ._data = self.data_pipe.Get("Env_Var")\r
197 GlobalData.gWorkspace = workspacedir\r
198 GlobalData.gDisableIncludePathCheck = False\r
199 GlobalData.gFdfParser = self.data_pipe.Get("FdfParser")\r
200 GlobalData.gDatabasePath = self.data_pipe.Get("DatabasePath")\r
fc8b8dea
SS
201\r
202 GlobalData.gUseHashCache = self.data_pipe.Get("UseHashCache")\r
0e7e7a26
SS
203 GlobalData.gBinCacheSource = self.data_pipe.Get("BinCacheSource")\r
204 GlobalData.gBinCacheDest = self.data_pipe.Get("BinCacheDest")\r
fc8b8dea
SS
205 GlobalData.gPlatformHashFile = self.data_pipe.Get("PlatformHashFile")\r
206 GlobalData.gModulePreMakeCacheStatus = dict()\r
207 GlobalData.gModuleMakeCacheStatus = dict()\r
208 GlobalData.gHashChainStatus = dict()\r
209 GlobalData.gCMakeHashFile = dict()\r
210 GlobalData.gModuleHashFile = dict()\r
211 GlobalData.gFileHashDict = dict()\r
0e7e7a26
SS
212 GlobalData.gEnableGenfdsMultiThread = self.data_pipe.Get("EnableGenfdsMultiThread")\r
213 GlobalData.file_lock = self.file_lock\r
214 CommandTarget = self.data_pipe.Get("CommandTarget")\r
673d09a2
FB
215 pcd_from_build_option = []\r
216 for pcd_tuple in self.data_pipe.Get("BuildOptPcd"):\r
217 pcd_id = ".".join((pcd_tuple[0],pcd_tuple[1]))\r
218 if pcd_tuple[2].strip():\r
219 pcd_id = ".".join((pcd_id,pcd_tuple[2]))\r
220 pcd_from_build_option.append("=".join((pcd_id,pcd_tuple[3])))\r
221 GlobalData.BuildOptionPcd = pcd_from_build_option\r
222 module_count = 0\r
223 FfsCmd = self.data_pipe.Get("FfsCommand")\r
224 if FfsCmd is None:\r
225 FfsCmd = {}\r
0e7e7a26 226 GlobalData.FfsCmd = FfsCmd\r
673d09a2
FB
227 PlatformMetaFile = self.GetPlatformMetaFile(self.data_pipe.Get("P_Info").get("ActivePlatform"),\r
228 self.data_pipe.Get("P_Info").get("WorkspaceDir"))\r
673d09a2
FB
229 while True:\r
230 if self.module_queue.empty():\r
231 break\r
232 if self.error_event.is_set():\r
233 break\r
234 module_count += 1\r
235 module_file,module_root,module_path,module_basename,module_originalpath,module_arch,IsLib = self.module_queue.get_nowait()\r
236 modulefullpath = os.path.join(module_root,module_file)\r
237 taskname = " : ".join((modulefullpath,module_arch))\r
238 module_metafile = PathClass(module_file,module_root)\r
239 if module_path:\r
240 module_metafile.Path = module_path\r
241 if module_basename:\r
242 module_metafile.BaseName = module_basename\r
243 if module_originalpath:\r
244 module_metafile.OriginalPath = PathClass(module_originalpath,module_root)\r
245 arch = module_arch\r
246 target = self.data_pipe.Get("P_Info").get("Target")\r
247 toolchain = self.data_pipe.Get("P_Info").get("ToolChain")\r
248 Ma = ModuleAutoGen(self.Wa,module_metafile,target,toolchain,arch,PlatformMetaFile,self.data_pipe)\r
249 Ma.IsLibrary = IsLib\r
fc8b8dea
SS
250 # SourceFileList calling sequence impact the makefile string sequence.\r
251 # Create cached SourceFileList here to unify its calling sequence for both\r
252 # CanSkipbyPreMakeCache and CreateCodeFile/CreateMakeFile.\r
253 RetVal = Ma.SourceFileList\r
254 if GlobalData.gUseHashCache and not GlobalData.gBinCacheDest and CommandTarget in [None, "", "all"]:\r
255 try:\r
256 CacheResult = Ma.CanSkipbyPreMakeCache()\r
257 except:\r
258 CacheResult = False\r
fc8b8dea
SS
259 self.feedback_q.put(taskname)\r
260\r
261 if CacheResult:\r
262 self.cache_q.put((Ma.MetaFile.Path, Ma.Arch, "PreMakeCache", True))\r
373298ca 263 continue\r
fc8b8dea
SS
264 else:\r
265 self.cache_q.put((Ma.MetaFile.Path, Ma.Arch, "PreMakeCache", False))\r
0e7e7a26 266\r
673d09a2 267 Ma.CreateCodeFile(False)\r
e3c8311f 268 Ma.CreateMakeFile(False,GenFfsList=FfsCmd.get((Ma.MetaFile.Path, Ma.Arch),[]))\r
0e7e7a26
SS
269\r
270 if GlobalData.gBinCacheSource and CommandTarget in [None, "", "all"]:\r
fc8b8dea
SS
271 try:\r
272 CacheResult = Ma.CanSkipbyMakeCache()\r
273 except:\r
274 CacheResult = False\r
fc8b8dea
SS
275 self.feedback_q.put(taskname)\r
276\r
277 if CacheResult:\r
278 self.cache_q.put((Ma.MetaFile.Path, Ma.Arch, "MakeCache", True))\r
0e7e7a26 279 continue\r
56c786b0 280 else:\r
fc8b8dea
SS
281 self.cache_q.put((Ma.MetaFile.Path, Ma.Arch, "MakeCache", False))\r
282\r
673d09a2
FB
283 except Empty:\r
284 pass\r
285 except:\r
673d09a2
FB
286 self.feedback_q.put(taskname)\r
287 finally:\r
288 self.feedback_q.put("Done")\r
fc8b8dea
SS
289 self.cache_q.put("CacheDone")\r
290\r
673d09a2
FB
291 def printStatus(self):\r
292 print("Processs ID: %d Run %d modules in AutoGen " % (os.getpid(),len(AutoGen.Cache())))\r
293 print("Processs ID: %d Run %d modules in AutoGenInfo " % (os.getpid(),len(AutoGenInfo.GetCache())))\r
294 groupobj = {}\r
295 for buildobj in BuildDB.BuildObject.GetCache().values():\r
296 if str(buildobj).lower().endswith("dec"):\r
297 try:\r
298 groupobj['dec'].append(str(buildobj))\r
299 except:\r
300 groupobj['dec'] = [str(buildobj)]\r
301 if str(buildobj).lower().endswith("dsc"):\r
302 try:\r
303 groupobj['dsc'].append(str(buildobj))\r
304 except:\r
305 groupobj['dsc'] = [str(buildobj)]\r
306\r
307 if str(buildobj).lower().endswith("inf"):\r
308 try:\r
309 groupobj['inf'].append(str(buildobj))\r
310 except:\r
311 groupobj['inf'] = [str(buildobj)]\r
312\r
313 print("Processs ID: %d Run %d pkg in WDB " % (os.getpid(),len(groupobj.get("dec",[]))))\r
314 print("Processs ID: %d Run %d pla in WDB " % (os.getpid(),len(groupobj.get("dsc",[]))))\r
315 print("Processs ID: %d Run %d inf in WDB " % (os.getpid(),len(groupobj.get("inf",[]))))\r