]> git.proxmox.com Git - mirror_edk2.git/blame_incremental - BaseTools/Source/Python/AutoGen/AutoGenWorker.py
UefiCpuPkg: Extend measurement of microcode patches to TPM
[mirror_edk2.git] / BaseTools / Source / Python / AutoGen / AutoGenWorker.py
... / ...
CommitLineData
1## @file\r
2# Create makefile for MS nmake and GNU make\r
3#\r
4# Copyright (c) 2019, Intel Corporation. All rights reserved.<BR>\r
5# SPDX-License-Identifier: BSD-2-Clause-Patent\r
6#\r
7from __future__ import absolute_import\r
8import multiprocessing as mp\r
9import threading\r
10from Common.Misc import PathClass\r
11from AutoGen.ModuleAutoGen import ModuleAutoGen\r
12from AutoGen.ModuleAutoGenHelper import WorkSpaceInfo,AutoGenInfo\r
13import Common.GlobalData as GlobalData\r
14import Common.EdkLogger as EdkLogger\r
15import os\r
16from Common.MultipleWorkspace import MultipleWorkspace as mws\r
17from AutoGen.AutoGen import AutoGen\r
18from Workspace.WorkspaceDatabase import BuildDB\r
19try:\r
20 from queue import Empty\r
21except:\r
22 from Queue import Empty\r
23import traceback\r
24import sys\r
25from AutoGen.DataPipe import MemoryDataPipe\r
26import logging\r
27import time\r
28\r
29def clearQ(q):\r
30 try:\r
31 while True:\r
32 q.get_nowait()\r
33 except Empty:\r
34 pass\r
35\r
36class LogAgent(threading.Thread):\r
37 def __init__(self,log_q,log_level,log_file=None):\r
38 super(LogAgent,self).__init__()\r
39 self.log_q = log_q\r
40 self.log_level = log_level\r
41 self.log_file = log_file\r
42 def InitLogger(self):\r
43 # For DEBUG level (All DEBUG_0~9 are applicable)\r
44 self._DebugLogger_agent = logging.getLogger("tool_debug_agent")\r
45 _DebugFormatter = logging.Formatter("[%(asctime)s.%(msecs)d]: %(message)s", datefmt="%H:%M:%S")\r
46 self._DebugLogger_agent.setLevel(self.log_level)\r
47 _DebugChannel = logging.StreamHandler(sys.stdout)\r
48 _DebugChannel.setFormatter(_DebugFormatter)\r
49 self._DebugLogger_agent.addHandler(_DebugChannel)\r
50\r
51 # For VERBOSE, INFO, WARN level\r
52 self._InfoLogger_agent = logging.getLogger("tool_info_agent")\r
53 _InfoFormatter = logging.Formatter("%(message)s")\r
54 self._InfoLogger_agent.setLevel(self.log_level)\r
55 _InfoChannel = logging.StreamHandler(sys.stdout)\r
56 _InfoChannel.setFormatter(_InfoFormatter)\r
57 self._InfoLogger_agent.addHandler(_InfoChannel)\r
58\r
59 # For ERROR level\r
60 self._ErrorLogger_agent = logging.getLogger("tool_error_agent")\r
61 _ErrorFormatter = logging.Formatter("%(message)s")\r
62 self._ErrorLogger_agent.setLevel(self.log_level)\r
63 _ErrorCh = logging.StreamHandler(sys.stderr)\r
64 _ErrorCh.setFormatter(_ErrorFormatter)\r
65 self._ErrorLogger_agent.addHandler(_ErrorCh)\r
66\r
67 if self.log_file:\r
68 if os.path.exists(self.log_file):\r
69 os.remove(self.log_file)\r
70 _Ch = logging.FileHandler(self.log_file)\r
71 _Ch.setFormatter(_DebugFormatter)\r
72 self._DebugLogger_agent.addHandler(_Ch)\r
73\r
74 _Ch= logging.FileHandler(self.log_file)\r
75 _Ch.setFormatter(_InfoFormatter)\r
76 self._InfoLogger_agent.addHandler(_Ch)\r
77\r
78 _Ch = logging.FileHandler(self.log_file)\r
79 _Ch.setFormatter(_ErrorFormatter)\r
80 self._ErrorLogger_agent.addHandler(_Ch)\r
81\r
82 def run(self):\r
83 self.InitLogger()\r
84 while True:\r
85 log_message = self.log_q.get()\r
86 if log_message is None:\r
87 break\r
88 if log_message.name == "tool_error":\r
89 self._ErrorLogger_agent.log(log_message.levelno,log_message.getMessage())\r
90 elif log_message.name == "tool_info":\r
91 self._InfoLogger_agent.log(log_message.levelno,log_message.getMessage())\r
92 elif log_message.name == "tool_debug":\r
93 self._DebugLogger_agent.log(log_message.levelno,log_message.getMessage())\r
94 else:\r
95 self._InfoLogger_agent.log(log_message.levelno,log_message.getMessage())\r
96\r
97 def kill(self):\r
98 self.log_q.put(None)\r
99class AutoGenManager(threading.Thread):\r
100 def __init__(self,autogen_workers, feedback_q,error_event):\r
101 super(AutoGenManager,self).__init__()\r
102 self.autogen_workers = autogen_workers\r
103 self.feedback_q = feedback_q\r
104 self.Status = True\r
105 self.error_event = error_event\r
106 def run(self):\r
107 try:\r
108 fin_num = 0\r
109 while True:\r
110 badnews = self.feedback_q.get()\r
111 if badnews is None:\r
112 break\r
113 if badnews == "Done":\r
114 fin_num += 1\r
115 elif badnews == "QueueEmpty":\r
116 EdkLogger.debug(EdkLogger.DEBUG_9, "Worker %s: %s" % (os.getpid(), badnews))\r
117 self.TerminateWorkers()\r
118 else:\r
119 EdkLogger.debug(EdkLogger.DEBUG_9, "Worker %s: %s" % (os.getpid(), badnews))\r
120 self.Status = False\r
121 self.TerminateWorkers()\r
122 if fin_num == len(self.autogen_workers):\r
123 self.clearQueue()\r
124 for w in self.autogen_workers:\r
125 w.join()\r
126 break\r
127 except Exception:\r
128 return\r
129\r
130 def clearQueue(self):\r
131 taskq = self.autogen_workers[0].module_queue\r
132 logq = self.autogen_workers[0].log_q\r
133 clearQ(taskq)\r
134 clearQ(self.feedback_q)\r
135 clearQ(logq)\r
136 # Copy the cache queue itmes to parent thread before clear\r
137 cacheq = self.autogen_workers[0].cache_q\r
138 try:\r
139 cache_num = 0\r
140 while True:\r
141 item = cacheq.get()\r
142 if item == "CacheDone":\r
143 cache_num += 1\r
144 else:\r
145 GlobalData.gModuleAllCacheStatus.add(item)\r
146 if cache_num == len(self.autogen_workers):\r
147 break\r
148 except:\r
149 print ("cache_q error")\r
150\r
151 def TerminateWorkers(self):\r
152 self.error_event.set()\r
153 def kill(self):\r
154 self.feedback_q.put(None)\r
155class AutoGenWorkerInProcess(mp.Process):\r
156 def __init__(self,module_queue,data_pipe_file_path,feedback_q,file_lock,cache_q,log_q,error_event):\r
157 mp.Process.__init__(self)\r
158 self.module_queue = module_queue\r
159 self.data_pipe_file_path =data_pipe_file_path\r
160 self.data_pipe = None\r
161 self.feedback_q = feedback_q\r
162 self.PlatformMetaFileSet = {}\r
163 self.file_lock = file_lock\r
164 self.cache_q = cache_q\r
165 self.log_q = log_q\r
166 self.error_event = error_event\r
167 def GetPlatformMetaFile(self,filepath,root):\r
168 try:\r
169 return self.PlatformMetaFileSet[(filepath,root)]\r
170 except:\r
171 self.PlatformMetaFileSet[(filepath,root)] = filepath\r
172 return self.PlatformMetaFileSet[(filepath,root)]\r
173 def run(self):\r
174 try:\r
175 taskname = "Init"\r
176 with self.file_lock:\r
177 try:\r
178 self.data_pipe = MemoryDataPipe()\r
179 self.data_pipe.load(self.data_pipe_file_path)\r
180 except:\r
181 self.feedback_q.put(taskname + ":" + "load data pipe %s failed." % self.data_pipe_file_path)\r
182 EdkLogger.LogClientInitialize(self.log_q)\r
183 loglevel = self.data_pipe.Get("LogLevel")\r
184 if not loglevel:\r
185 loglevel = EdkLogger.INFO\r
186 EdkLogger.SetLevel(loglevel)\r
187 target = self.data_pipe.Get("P_Info").get("Target")\r
188 toolchain = self.data_pipe.Get("P_Info").get("ToolChain")\r
189 archlist = self.data_pipe.Get("P_Info").get("ArchList")\r
190\r
191 active_p = self.data_pipe.Get("P_Info").get("ActivePlatform")\r
192 workspacedir = self.data_pipe.Get("P_Info").get("WorkspaceDir")\r
193 PackagesPath = os.getenv("PACKAGES_PATH")\r
194 mws.setWs(workspacedir, PackagesPath)\r
195 self.Wa = WorkSpaceInfo(\r
196 workspacedir,active_p,target,toolchain,archlist\r
197 )\r
198 self.Wa._SrcTimeStamp = self.data_pipe.Get("Workspace_timestamp")\r
199 GlobalData.gGlobalDefines = self.data_pipe.Get("G_defines")\r
200 GlobalData.gCommandLineDefines = self.data_pipe.Get("CL_defines")\r
201 os.environ._data = self.data_pipe.Get("Env_Var")\r
202 GlobalData.gWorkspace = workspacedir\r
203 GlobalData.gDisableIncludePathCheck = False\r
204 GlobalData.gFdfParser = self.data_pipe.Get("FdfParser")\r
205 GlobalData.gDatabasePath = self.data_pipe.Get("DatabasePath")\r
206\r
207 GlobalData.gUseHashCache = self.data_pipe.Get("UseHashCache")\r
208 GlobalData.gBinCacheSource = self.data_pipe.Get("BinCacheSource")\r
209 GlobalData.gBinCacheDest = self.data_pipe.Get("BinCacheDest")\r
210 GlobalData.gPlatformHashFile = self.data_pipe.Get("PlatformHashFile")\r
211 GlobalData.gModulePreMakeCacheStatus = dict()\r
212 GlobalData.gModuleMakeCacheStatus = dict()\r
213 GlobalData.gHashChainStatus = dict()\r
214 GlobalData.gCMakeHashFile = dict()\r
215 GlobalData.gModuleHashFile = dict()\r
216 GlobalData.gFileHashDict = dict()\r
217 GlobalData.gEnableGenfdsMultiThread = self.data_pipe.Get("EnableGenfdsMultiThread")\r
218 GlobalData.file_lock = self.file_lock\r
219 CommandTarget = self.data_pipe.Get("CommandTarget")\r
220 pcd_from_build_option = []\r
221 for pcd_tuple in self.data_pipe.Get("BuildOptPcd"):\r
222 pcd_id = ".".join((pcd_tuple[0],pcd_tuple[1]))\r
223 if pcd_tuple[2].strip():\r
224 pcd_id = ".".join((pcd_id,pcd_tuple[2]))\r
225 pcd_from_build_option.append("=".join((pcd_id,pcd_tuple[3])))\r
226 GlobalData.BuildOptionPcd = pcd_from_build_option\r
227 module_count = 0\r
228 FfsCmd = self.data_pipe.Get("FfsCommand")\r
229 if FfsCmd is None:\r
230 FfsCmd = {}\r
231 GlobalData.FfsCmd = FfsCmd\r
232 PlatformMetaFile = self.GetPlatformMetaFile(self.data_pipe.Get("P_Info").get("ActivePlatform"),\r
233 self.data_pipe.Get("P_Info").get("WorkspaceDir"))\r
234 while True:\r
235 if self.error_event.is_set():\r
236 break\r
237 module_count += 1\r
238 try:\r
239 module_file,module_root,module_path,module_basename,module_originalpath,module_arch,IsLib = self.module_queue.get_nowait()\r
240 except Empty:\r
241 EdkLogger.debug(EdkLogger.DEBUG_9, "Worker %s: %s" % (os.getpid(), "Fake Empty."))\r
242 time.sleep(0.01)\r
243 continue\r
244 if module_file is None:\r
245 EdkLogger.debug(EdkLogger.DEBUG_9, "Worker %s: %s" % (os.getpid(), "Worker get the last item in the queue."))\r
246 self.feedback_q.put("QueueEmpty")\r
247 time.sleep(0.01)\r
248 continue\r
249\r
250 modulefullpath = os.path.join(module_root,module_file)\r
251 taskname = " : ".join((modulefullpath,module_arch))\r
252 module_metafile = PathClass(module_file,module_root)\r
253 if module_path:\r
254 module_metafile.Path = module_path\r
255 if module_basename:\r
256 module_metafile.BaseName = module_basename\r
257 if module_originalpath:\r
258 module_metafile.OriginalPath = PathClass(module_originalpath,module_root)\r
259 arch = module_arch\r
260 target = self.data_pipe.Get("P_Info").get("Target")\r
261 toolchain = self.data_pipe.Get("P_Info").get("ToolChain")\r
262 Ma = ModuleAutoGen(self.Wa,module_metafile,target,toolchain,arch,PlatformMetaFile,self.data_pipe)\r
263 Ma.IsLibrary = IsLib\r
264 # SourceFileList calling sequence impact the makefile string sequence.\r
265 # Create cached SourceFileList here to unify its calling sequence for both\r
266 # CanSkipbyPreMakeCache and CreateCodeFile/CreateMakeFile.\r
267 RetVal = Ma.SourceFileList\r
268 if GlobalData.gUseHashCache and not GlobalData.gBinCacheDest and CommandTarget in [None, "", "all"]:\r
269 try:\r
270 CacheResult = Ma.CanSkipbyPreMakeCache()\r
271 except:\r
272 CacheResult = False\r
273 self.feedback_q.put(taskname)\r
274\r
275 if CacheResult:\r
276 self.cache_q.put((Ma.MetaFile.Path, Ma.Arch, "PreMakeCache", True))\r
277 continue\r
278 else:\r
279 self.cache_q.put((Ma.MetaFile.Path, Ma.Arch, "PreMakeCache", False))\r
280\r
281 Ma.CreateCodeFile(False)\r
282 Ma.CreateMakeFile(False,GenFfsList=FfsCmd.get((Ma.MetaFile.Path, Ma.Arch),[]))\r
283 Ma.CreateAsBuiltInf()\r
284 if GlobalData.gBinCacheSource and CommandTarget in [None, "", "all"]:\r
285 try:\r
286 CacheResult = Ma.CanSkipbyMakeCache()\r
287 except:\r
288 CacheResult = False\r
289 self.feedback_q.put(taskname)\r
290\r
291 if CacheResult:\r
292 self.cache_q.put((Ma.MetaFile.Path, Ma.Arch, "MakeCache", True))\r
293 continue\r
294 else:\r
295 self.cache_q.put((Ma.MetaFile.Path, Ma.Arch, "MakeCache", False))\r
296\r
297 except Exception as e:\r
298 EdkLogger.debug(EdkLogger.DEBUG_9, "Worker %s: %s" % (os.getpid(), str(e)))\r
299 self.feedback_q.put(taskname)\r
300 finally:\r
301 EdkLogger.debug(EdkLogger.DEBUG_9, "Worker %s: %s" % (os.getpid(), "Done"))\r
302 self.feedback_q.put("Done")\r
303 self.cache_q.put("CacheDone")\r
304\r
305 def printStatus(self):\r
306 print("Processs ID: %d Run %d modules in AutoGen " % (os.getpid(),len(AutoGen.Cache())))\r
307 print("Processs ID: %d Run %d modules in AutoGenInfo " % (os.getpid(),len(AutoGenInfo.GetCache())))\r
308 groupobj = {}\r
309 for buildobj in BuildDB.BuildObject.GetCache().values():\r
310 if str(buildobj).lower().endswith("dec"):\r
311 try:\r
312 groupobj['dec'].append(str(buildobj))\r
313 except:\r
314 groupobj['dec'] = [str(buildobj)]\r
315 if str(buildobj).lower().endswith("dsc"):\r
316 try:\r
317 groupobj['dsc'].append(str(buildobj))\r
318 except:\r
319 groupobj['dsc'] = [str(buildobj)]\r
320\r
321 if str(buildobj).lower().endswith("inf"):\r
322 try:\r
323 groupobj['inf'].append(str(buildobj))\r
324 except:\r
325 groupobj['inf'] = [str(buildobj)]\r
326\r
327 print("Processs ID: %d Run %d pkg in WDB " % (os.getpid(),len(groupobj.get("dec",[]))))\r
328 print("Processs ID: %d Run %d pla in WDB " % (os.getpid(),len(groupobj.get("dsc",[]))))\r
329 print("Processs ID: %d Run %d inf in WDB " % (os.getpid(),len(groupobj.get("inf",[]))))\r