]> git.proxmox.com Git - mirror_edk2.git/blob - BaseTools/Source/Python/AutoGen/AutoGenWorker.py
BaseTools: Improve the method of checking queue empty
[mirror_edk2.git] / BaseTools / Source / Python / AutoGen / AutoGenWorker.py
1 ## @file
2 # Create makefile for MS nmake and GNU make
3 #
4 # Copyright (c) 2019, Intel Corporation. All rights reserved.<BR>
5 # SPDX-License-Identifier: BSD-2-Clause-Patent
6 #
7 from __future__ import absolute_import
8 import multiprocessing as mp
9 import threading
10 from Common.Misc import PathClass
11 from AutoGen.ModuleAutoGen import ModuleAutoGen
12 from AutoGen.ModuleAutoGenHelper import WorkSpaceInfo,AutoGenInfo
13 import Common.GlobalData as GlobalData
14 import Common.EdkLogger as EdkLogger
15 import os
16 from Common.MultipleWorkspace import MultipleWorkspace as mws
17 from AutoGen.AutoGen import AutoGen
18 from Workspace.WorkspaceDatabase import BuildDB
19 try:
20 from queue import Empty
21 except:
22 from Queue import Empty
23 import traceback
24 import sys
25 from AutoGen.DataPipe import MemoryDataPipe
26 import logging
27 import time
28
29 def clearQ(q):
30 try:
31 while True:
32 q.get_nowait()
33 except Empty:
34 pass
35
36 class LogAgent(threading.Thread):
37 def __init__(self,log_q,log_level,log_file=None):
38 super(LogAgent,self).__init__()
39 self.log_q = log_q
40 self.log_level = log_level
41 self.log_file = log_file
42 def InitLogger(self):
43 # For DEBUG level (All DEBUG_0~9 are applicable)
44 self._DebugLogger_agent = logging.getLogger("tool_debug_agent")
45 _DebugFormatter = logging.Formatter("[%(asctime)s.%(msecs)d]: %(message)s", datefmt="%H:%M:%S")
46 self._DebugLogger_agent.setLevel(self.log_level)
47 _DebugChannel = logging.StreamHandler(sys.stdout)
48 _DebugChannel.setFormatter(_DebugFormatter)
49 self._DebugLogger_agent.addHandler(_DebugChannel)
50
51 # For VERBOSE, INFO, WARN level
52 self._InfoLogger_agent = logging.getLogger("tool_info_agent")
53 _InfoFormatter = logging.Formatter("%(message)s")
54 self._InfoLogger_agent.setLevel(self.log_level)
55 _InfoChannel = logging.StreamHandler(sys.stdout)
56 _InfoChannel.setFormatter(_InfoFormatter)
57 self._InfoLogger_agent.addHandler(_InfoChannel)
58
59 # For ERROR level
60 self._ErrorLogger_agent = logging.getLogger("tool_error_agent")
61 _ErrorFormatter = logging.Formatter("%(message)s")
62 self._ErrorLogger_agent.setLevel(self.log_level)
63 _ErrorCh = logging.StreamHandler(sys.stderr)
64 _ErrorCh.setFormatter(_ErrorFormatter)
65 self._ErrorLogger_agent.addHandler(_ErrorCh)
66
67 if self.log_file:
68 if os.path.exists(self.log_file):
69 os.remove(self.log_file)
70 _Ch = logging.FileHandler(self.log_file)
71 _Ch.setFormatter(_DebugFormatter)
72 self._DebugLogger_agent.addHandler(_Ch)
73
74 _Ch= logging.FileHandler(self.log_file)
75 _Ch.setFormatter(_InfoFormatter)
76 self._InfoLogger_agent.addHandler(_Ch)
77
78 _Ch = logging.FileHandler(self.log_file)
79 _Ch.setFormatter(_ErrorFormatter)
80 self._ErrorLogger_agent.addHandler(_Ch)
81
82 def run(self):
83 self.InitLogger()
84 while True:
85 log_message = self.log_q.get()
86 if log_message is None:
87 break
88 if log_message.name == "tool_error":
89 self._ErrorLogger_agent.log(log_message.levelno,log_message.getMessage())
90 elif log_message.name == "tool_info":
91 self._InfoLogger_agent.log(log_message.levelno,log_message.getMessage())
92 elif log_message.name == "tool_debug":
93 self._DebugLogger_agent.log(log_message.levelno,log_message.getMessage())
94 else:
95 self._InfoLogger_agent.log(log_message.levelno,log_message.getMessage())
96
97 def kill(self):
98 self.log_q.put(None)
99 class AutoGenManager(threading.Thread):
100 def __init__(self,autogen_workers, feedback_q,error_event):
101 super(AutoGenManager,self).__init__()
102 self.autogen_workers = autogen_workers
103 self.feedback_q = feedback_q
104 self.Status = True
105 self.error_event = error_event
106 def run(self):
107 try:
108 fin_num = 0
109 while True:
110 badnews = self.feedback_q.get()
111 if badnews is None:
112 break
113 if badnews == "Done":
114 fin_num += 1
115 elif badnews == "QueueEmpty":
116 EdkLogger.debug(EdkLogger.DEBUG_9, "Worker %s: %s" % (os.getpid(), badnews))
117 self.TerminateWorkers()
118 else:
119 EdkLogger.debug(EdkLogger.DEBUG_9, "Worker %s: %s" % (os.getpid(), badnews))
120 self.Status = False
121 self.TerminateWorkers()
122 if fin_num == len(self.autogen_workers):
123 self.clearQueue()
124 for w in self.autogen_workers:
125 w.join()
126 break
127 except Exception:
128 return
129
130 def clearQueue(self):
131 taskq = self.autogen_workers[0].module_queue
132 logq = self.autogen_workers[0].log_q
133 clearQ(taskq)
134 clearQ(self.feedback_q)
135 clearQ(logq)
136 # Copy the cache queue itmes to parent thread before clear
137 cacheq = self.autogen_workers[0].cache_q
138 try:
139 cache_num = 0
140 while True:
141 item = cacheq.get()
142 if item == "CacheDone":
143 cache_num += 1
144 else:
145 GlobalData.gModuleAllCacheStatus.add(item)
146 if cache_num == len(self.autogen_workers):
147 break
148 except:
149 print ("cache_q error")
150
151 def TerminateWorkers(self):
152 self.error_event.set()
153 def kill(self):
154 self.feedback_q.put(None)
155 class AutoGenWorkerInProcess(mp.Process):
156 def __init__(self,module_queue,data_pipe_file_path,feedback_q,file_lock,cache_q,log_q,error_event):
157 mp.Process.__init__(self)
158 self.module_queue = module_queue
159 self.data_pipe_file_path =data_pipe_file_path
160 self.data_pipe = None
161 self.feedback_q = feedback_q
162 self.PlatformMetaFileSet = {}
163 self.file_lock = file_lock
164 self.cache_q = cache_q
165 self.log_q = log_q
166 self.error_event = error_event
167 def GetPlatformMetaFile(self,filepath,root):
168 try:
169 return self.PlatformMetaFileSet[(filepath,root)]
170 except:
171 self.PlatformMetaFileSet[(filepath,root)] = filepath
172 return self.PlatformMetaFileSet[(filepath,root)]
173 def run(self):
174 try:
175 taskname = "Init"
176 with self.file_lock:
177 try:
178 self.data_pipe = MemoryDataPipe()
179 self.data_pipe.load(self.data_pipe_file_path)
180 except:
181 self.feedback_q.put(taskname + ":" + "load data pipe %s failed." % self.data_pipe_file_path)
182 EdkLogger.LogClientInitialize(self.log_q)
183 loglevel = self.data_pipe.Get("LogLevel")
184 if not loglevel:
185 loglevel = EdkLogger.INFO
186 EdkLogger.SetLevel(loglevel)
187 target = self.data_pipe.Get("P_Info").get("Target")
188 toolchain = self.data_pipe.Get("P_Info").get("ToolChain")
189 archlist = self.data_pipe.Get("P_Info").get("ArchList")
190
191 active_p = self.data_pipe.Get("P_Info").get("ActivePlatform")
192 workspacedir = self.data_pipe.Get("P_Info").get("WorkspaceDir")
193 PackagesPath = os.getenv("PACKAGES_PATH")
194 mws.setWs(workspacedir, PackagesPath)
195 self.Wa = WorkSpaceInfo(
196 workspacedir,active_p,target,toolchain,archlist
197 )
198 self.Wa._SrcTimeStamp = self.data_pipe.Get("Workspace_timestamp")
199 GlobalData.gGlobalDefines = self.data_pipe.Get("G_defines")
200 GlobalData.gCommandLineDefines = self.data_pipe.Get("CL_defines")
201 os.environ._data = self.data_pipe.Get("Env_Var")
202 GlobalData.gWorkspace = workspacedir
203 GlobalData.gDisableIncludePathCheck = False
204 GlobalData.gFdfParser = self.data_pipe.Get("FdfParser")
205 GlobalData.gDatabasePath = self.data_pipe.Get("DatabasePath")
206
207 GlobalData.gUseHashCache = self.data_pipe.Get("UseHashCache")
208 GlobalData.gBinCacheSource = self.data_pipe.Get("BinCacheSource")
209 GlobalData.gBinCacheDest = self.data_pipe.Get("BinCacheDest")
210 GlobalData.gPlatformHashFile = self.data_pipe.Get("PlatformHashFile")
211 GlobalData.gModulePreMakeCacheStatus = dict()
212 GlobalData.gModuleMakeCacheStatus = dict()
213 GlobalData.gHashChainStatus = dict()
214 GlobalData.gCMakeHashFile = dict()
215 GlobalData.gModuleHashFile = dict()
216 GlobalData.gFileHashDict = dict()
217 GlobalData.gEnableGenfdsMultiThread = self.data_pipe.Get("EnableGenfdsMultiThread")
218 GlobalData.file_lock = self.file_lock
219 CommandTarget = self.data_pipe.Get("CommandTarget")
220 pcd_from_build_option = []
221 for pcd_tuple in self.data_pipe.Get("BuildOptPcd"):
222 pcd_id = ".".join((pcd_tuple[0],pcd_tuple[1]))
223 if pcd_tuple[2].strip():
224 pcd_id = ".".join((pcd_id,pcd_tuple[2]))
225 pcd_from_build_option.append("=".join((pcd_id,pcd_tuple[3])))
226 GlobalData.BuildOptionPcd = pcd_from_build_option
227 module_count = 0
228 FfsCmd = self.data_pipe.Get("FfsCommand")
229 if FfsCmd is None:
230 FfsCmd = {}
231 GlobalData.FfsCmd = FfsCmd
232 PlatformMetaFile = self.GetPlatformMetaFile(self.data_pipe.Get("P_Info").get("ActivePlatform"),
233 self.data_pipe.Get("P_Info").get("WorkspaceDir"))
234 while True:
235 if self.error_event.is_set():
236 break
237 module_count += 1
238 try:
239 module_file,module_root,module_path,module_basename,module_originalpath,module_arch,IsLib = self.module_queue.get_nowait()
240 except Empty:
241 EdkLogger.debug(EdkLogger.DEBUG_9, "Worker %s: %s" % (os.getpid(), "Fake Empty."))
242 time.sleep(0.01)
243 continue
244 if module_file is None:
245 EdkLogger.debug(EdkLogger.DEBUG_9, "Worker %s: %s" % (os.getpid(), "Worker get the last item in the queue."))
246 self.feedback_q.put("QueueEmpty")
247 time.sleep(0.01)
248 continue
249
250 modulefullpath = os.path.join(module_root,module_file)
251 taskname = " : ".join((modulefullpath,module_arch))
252 module_metafile = PathClass(module_file,module_root)
253 if module_path:
254 module_metafile.Path = module_path
255 if module_basename:
256 module_metafile.BaseName = module_basename
257 if module_originalpath:
258 module_metafile.OriginalPath = PathClass(module_originalpath,module_root)
259 arch = module_arch
260 target = self.data_pipe.Get("P_Info").get("Target")
261 toolchain = self.data_pipe.Get("P_Info").get("ToolChain")
262 Ma = ModuleAutoGen(self.Wa,module_metafile,target,toolchain,arch,PlatformMetaFile,self.data_pipe)
263 Ma.IsLibrary = IsLib
264 # SourceFileList calling sequence impact the makefile string sequence.
265 # Create cached SourceFileList here to unify its calling sequence for both
266 # CanSkipbyPreMakeCache and CreateCodeFile/CreateMakeFile.
267 RetVal = Ma.SourceFileList
268 if GlobalData.gUseHashCache and not GlobalData.gBinCacheDest and CommandTarget in [None, "", "all"]:
269 try:
270 CacheResult = Ma.CanSkipbyPreMakeCache()
271 except:
272 CacheResult = False
273 self.feedback_q.put(taskname)
274
275 if CacheResult:
276 self.cache_q.put((Ma.MetaFile.Path, Ma.Arch, "PreMakeCache", True))
277 continue
278 else:
279 self.cache_q.put((Ma.MetaFile.Path, Ma.Arch, "PreMakeCache", False))
280
281 Ma.CreateCodeFile(False)
282 Ma.CreateMakeFile(False,GenFfsList=FfsCmd.get((Ma.MetaFile.Path, Ma.Arch),[]))
283
284 if GlobalData.gBinCacheSource and CommandTarget in [None, "", "all"]:
285 try:
286 CacheResult = Ma.CanSkipbyMakeCache()
287 except:
288 CacheResult = False
289 self.feedback_q.put(taskname)
290
291 if CacheResult:
292 self.cache_q.put((Ma.MetaFile.Path, Ma.Arch, "MakeCache", True))
293 continue
294 else:
295 self.cache_q.put((Ma.MetaFile.Path, Ma.Arch, "MakeCache", False))
296
297 except Exception as e:
298 EdkLogger.debug(EdkLogger.DEBUG_9, "Worker %s: %s" % (os.getpid(), str(e)))
299 self.feedback_q.put(taskname)
300 finally:
301 EdkLogger.debug(EdkLogger.DEBUG_9, "Worker %s: %s" % (os.getpid(), "Done"))
302 self.feedback_q.put("Done")
303 self.cache_q.put("CacheDone")
304
305 def printStatus(self):
306 print("Processs ID: %d Run %d modules in AutoGen " % (os.getpid(),len(AutoGen.Cache())))
307 print("Processs ID: %d Run %d modules in AutoGenInfo " % (os.getpid(),len(AutoGenInfo.GetCache())))
308 groupobj = {}
309 for buildobj in BuildDB.BuildObject.GetCache().values():
310 if str(buildobj).lower().endswith("dec"):
311 try:
312 groupobj['dec'].append(str(buildobj))
313 except:
314 groupobj['dec'] = [str(buildobj)]
315 if str(buildobj).lower().endswith("dsc"):
316 try:
317 groupobj['dsc'].append(str(buildobj))
318 except:
319 groupobj['dsc'] = [str(buildobj)]
320
321 if str(buildobj).lower().endswith("inf"):
322 try:
323 groupobj['inf'].append(str(buildobj))
324 except:
325 groupobj['inf'] = [str(buildobj)]
326
327 print("Processs ID: %d Run %d pkg in WDB " % (os.getpid(),len(groupobj.get("dec",[]))))
328 print("Processs ID: %d Run %d pla in WDB " % (os.getpid(),len(groupobj.get("dsc",[]))))
329 print("Processs ID: %d Run %d inf in WDB " % (os.getpid(),len(groupobj.get("inf",[]))))