]> git.proxmox.com Git - mirror_edk2.git/blame_incremental - BaseTools/Source/Python/AutoGen/AutoGenWorker.py
BaseTools: Fix the bug of --cmd-len build option
[mirror_edk2.git] / BaseTools / Source / Python / AutoGen / AutoGenWorker.py
... / ...
CommitLineData
1## @file\r
2# Create makefile for MS nmake and GNU make\r
3#\r
4# Copyright (c) 2019, Intel Corporation. All rights reserved.<BR>\r
5# SPDX-License-Identifier: BSD-2-Clause-Patent\r
6#\r
7from __future__ import absolute_import\r
8import multiprocessing as mp\r
9import threading\r
10from Common.Misc import PathClass\r
11from AutoGen.ModuleAutoGen import ModuleAutoGen\r
12from AutoGen.ModuleAutoGenHelper import WorkSpaceInfo,AutoGenInfo\r
13import Common.GlobalData as GlobalData\r
14import Common.EdkLogger as EdkLogger\r
15import os\r
16from Common.MultipleWorkspace import MultipleWorkspace as mws\r
17from AutoGen.AutoGen import AutoGen\r
18from Workspace.WorkspaceDatabase import BuildDB\r
19try:\r
20 from queue import Empty\r
21except:\r
22 from Queue import Empty\r
23import traceback\r
24import sys\r
25from AutoGen.DataPipe import MemoryDataPipe\r
26import logging\r
27import time\r
28\r
29def clearQ(q):\r
30 try:\r
31 while True:\r
32 q.get_nowait()\r
33 except Empty:\r
34 pass\r
35\r
36class LogAgent(threading.Thread):\r
37 def __init__(self,log_q,log_level,log_file=None):\r
38 super(LogAgent,self).__init__()\r
39 self.log_q = log_q\r
40 self.log_level = log_level\r
41 self.log_file = log_file\r
42 def InitLogger(self):\r
43 # For DEBUG level (All DEBUG_0~9 are applicable)\r
44 self._DebugLogger_agent = logging.getLogger("tool_debug_agent")\r
45 _DebugFormatter = logging.Formatter("[%(asctime)s.%(msecs)d]: %(message)s", datefmt="%H:%M:%S")\r
46 self._DebugLogger_agent.setLevel(self.log_level)\r
47 _DebugChannel = logging.StreamHandler(sys.stdout)\r
48 _DebugChannel.setFormatter(_DebugFormatter)\r
49 self._DebugLogger_agent.addHandler(_DebugChannel)\r
50\r
51 # For VERBOSE, INFO, WARN level\r
52 self._InfoLogger_agent = logging.getLogger("tool_info_agent")\r
53 _InfoFormatter = logging.Formatter("%(message)s")\r
54 self._InfoLogger_agent.setLevel(self.log_level)\r
55 _InfoChannel = logging.StreamHandler(sys.stdout)\r
56 _InfoChannel.setFormatter(_InfoFormatter)\r
57 self._InfoLogger_agent.addHandler(_InfoChannel)\r
58\r
59 # For ERROR level\r
60 self._ErrorLogger_agent = logging.getLogger("tool_error_agent")\r
61 _ErrorFormatter = logging.Formatter("%(message)s")\r
62 self._ErrorLogger_agent.setLevel(self.log_level)\r
63 _ErrorCh = logging.StreamHandler(sys.stderr)\r
64 _ErrorCh.setFormatter(_ErrorFormatter)\r
65 self._ErrorLogger_agent.addHandler(_ErrorCh)\r
66\r
67 if self.log_file:\r
68 if os.path.exists(self.log_file):\r
69 os.remove(self.log_file)\r
70 _Ch = logging.FileHandler(self.log_file)\r
71 _Ch.setFormatter(_DebugFormatter)\r
72 self._DebugLogger_agent.addHandler(_Ch)\r
73\r
74 _Ch= logging.FileHandler(self.log_file)\r
75 _Ch.setFormatter(_InfoFormatter)\r
76 self._InfoLogger_agent.addHandler(_Ch)\r
77\r
78 _Ch = logging.FileHandler(self.log_file)\r
79 _Ch.setFormatter(_ErrorFormatter)\r
80 self._ErrorLogger_agent.addHandler(_Ch)\r
81\r
82 def run(self):\r
83 self.InitLogger()\r
84 while True:\r
85 log_message = self.log_q.get()\r
86 if log_message is None:\r
87 break\r
88 if log_message.name == "tool_error":\r
89 self._ErrorLogger_agent.log(log_message.levelno,log_message.getMessage())\r
90 elif log_message.name == "tool_info":\r
91 self._InfoLogger_agent.log(log_message.levelno,log_message.getMessage())\r
92 elif log_message.name == "tool_debug":\r
93 self._DebugLogger_agent.log(log_message.levelno,log_message.getMessage())\r
94 else:\r
95 self._InfoLogger_agent.log(log_message.levelno,log_message.getMessage())\r
96\r
97 def kill(self):\r
98 self.log_q.put(None)\r
99class AutoGenManager(threading.Thread):\r
100 def __init__(self,autogen_workers, feedback_q,error_event):\r
101 super(AutoGenManager,self).__init__()\r
102 self.autogen_workers = autogen_workers\r
103 self.feedback_q = feedback_q\r
104 self.Status = True\r
105 self.error_event = error_event\r
106 def run(self):\r
107 try:\r
108 fin_num = 0\r
109 while True:\r
110 badnews = self.feedback_q.get()\r
111 if badnews is None:\r
112 break\r
113 if badnews == "Done":\r
114 fin_num += 1\r
115 elif badnews == "QueueEmpty":\r
116 EdkLogger.debug(EdkLogger.DEBUG_9, "Worker %s: %s" % (os.getpid(), badnews))\r
117 self.TerminateWorkers()\r
118 else:\r
119 EdkLogger.debug(EdkLogger.DEBUG_9, "Worker %s: %s" % (os.getpid(), badnews))\r
120 self.Status = False\r
121 self.TerminateWorkers()\r
122 if fin_num == len(self.autogen_workers):\r
123 self.clearQueue()\r
124 for w in self.autogen_workers:\r
125 w.join()\r
126 break\r
127 except Exception:\r
128 return\r
129\r
130 def clearQueue(self):\r
131 taskq = self.autogen_workers[0].module_queue\r
132 logq = self.autogen_workers[0].log_q\r
133 clearQ(taskq)\r
134 clearQ(self.feedback_q)\r
135 clearQ(logq)\r
136 # Copy the cache queue itmes to parent thread before clear\r
137 cacheq = self.autogen_workers[0].cache_q\r
138 try:\r
139 cache_num = 0\r
140 while True:\r
141 item = cacheq.get()\r
142 if item == "CacheDone":\r
143 cache_num += 1\r
144 else:\r
145 GlobalData.gModuleAllCacheStatus.add(item)\r
146 if cache_num == len(self.autogen_workers):\r
147 break\r
148 except:\r
149 print ("cache_q error")\r
150\r
151 def TerminateWorkers(self):\r
152 self.error_event.set()\r
153 def kill(self):\r
154 self.feedback_q.put(None)\r
155class AutoGenWorkerInProcess(mp.Process):\r
156 def __init__(self,module_queue,data_pipe_file_path,feedback_q,file_lock,cache_q,log_q,error_event):\r
157 mp.Process.__init__(self)\r
158 self.module_queue = module_queue\r
159 self.data_pipe_file_path =data_pipe_file_path\r
160 self.data_pipe = None\r
161 self.feedback_q = feedback_q\r
162 self.PlatformMetaFileSet = {}\r
163 self.file_lock = file_lock\r
164 self.cache_q = cache_q\r
165 self.log_q = log_q\r
166 self.error_event = error_event\r
167 def GetPlatformMetaFile(self,filepath,root):\r
168 try:\r
169 return self.PlatformMetaFileSet[(filepath,root)]\r
170 except:\r
171 self.PlatformMetaFileSet[(filepath,root)] = filepath\r
172 return self.PlatformMetaFileSet[(filepath,root)]\r
173 def run(self):\r
174 try:\r
175 taskname = "Init"\r
176 with self.file_lock:\r
177 try:\r
178 self.data_pipe = MemoryDataPipe()\r
179 self.data_pipe.load(self.data_pipe_file_path)\r
180 except:\r
181 self.feedback_q.put(taskname + ":" + "load data pipe %s failed." % self.data_pipe_file_path)\r
182 EdkLogger.LogClientInitialize(self.log_q)\r
183 loglevel = self.data_pipe.Get("LogLevel")\r
184 if not loglevel:\r
185 loglevel = EdkLogger.INFO\r
186 EdkLogger.SetLevel(loglevel)\r
187 target = self.data_pipe.Get("P_Info").get("Target")\r
188 toolchain = self.data_pipe.Get("P_Info").get("ToolChain")\r
189 archlist = self.data_pipe.Get("P_Info").get("ArchList")\r
190\r
191 active_p = self.data_pipe.Get("P_Info").get("ActivePlatform")\r
192 workspacedir = self.data_pipe.Get("P_Info").get("WorkspaceDir")\r
193 PackagesPath = os.getenv("PACKAGES_PATH")\r
194 mws.setWs(workspacedir, PackagesPath)\r
195 self.Wa = WorkSpaceInfo(\r
196 workspacedir,active_p,target,toolchain,archlist\r
197 )\r
198 self.Wa._SrcTimeStamp = self.data_pipe.Get("Workspace_timestamp")\r
199 GlobalData.gGlobalDefines = self.data_pipe.Get("G_defines")\r
200 GlobalData.gCommandLineDefines = self.data_pipe.Get("CL_defines")\r
201 GlobalData.gCommandMaxLength = self.data_pipe.Get('gCommandMaxLength')\r
202 os.environ._data = self.data_pipe.Get("Env_Var")\r
203 GlobalData.gWorkspace = workspacedir\r
204 GlobalData.gDisableIncludePathCheck = False\r
205 GlobalData.gFdfParser = self.data_pipe.Get("FdfParser")\r
206 GlobalData.gDatabasePath = self.data_pipe.Get("DatabasePath")\r
207\r
208 GlobalData.gUseHashCache = self.data_pipe.Get("UseHashCache")\r
209 GlobalData.gBinCacheSource = self.data_pipe.Get("BinCacheSource")\r
210 GlobalData.gBinCacheDest = self.data_pipe.Get("BinCacheDest")\r
211 GlobalData.gPlatformHashFile = self.data_pipe.Get("PlatformHashFile")\r
212 GlobalData.gModulePreMakeCacheStatus = dict()\r
213 GlobalData.gModuleMakeCacheStatus = dict()\r
214 GlobalData.gHashChainStatus = dict()\r
215 GlobalData.gCMakeHashFile = dict()\r
216 GlobalData.gModuleHashFile = dict()\r
217 GlobalData.gFileHashDict = dict()\r
218 GlobalData.gEnableGenfdsMultiThread = self.data_pipe.Get("EnableGenfdsMultiThread")\r
219 GlobalData.file_lock = self.file_lock\r
220 CommandTarget = self.data_pipe.Get("CommandTarget")\r
221 pcd_from_build_option = []\r
222 for pcd_tuple in self.data_pipe.Get("BuildOptPcd"):\r
223 pcd_id = ".".join((pcd_tuple[0],pcd_tuple[1]))\r
224 if pcd_tuple[2].strip():\r
225 pcd_id = ".".join((pcd_id,pcd_tuple[2]))\r
226 pcd_from_build_option.append("=".join((pcd_id,pcd_tuple[3])))\r
227 GlobalData.BuildOptionPcd = pcd_from_build_option\r
228 module_count = 0\r
229 FfsCmd = self.data_pipe.Get("FfsCommand")\r
230 if FfsCmd is None:\r
231 FfsCmd = {}\r
232 GlobalData.FfsCmd = FfsCmd\r
233 PlatformMetaFile = self.GetPlatformMetaFile(self.data_pipe.Get("P_Info").get("ActivePlatform"),\r
234 self.data_pipe.Get("P_Info").get("WorkspaceDir"))\r
235 while True:\r
236 if self.error_event.is_set():\r
237 break\r
238 module_count += 1\r
239 try:\r
240 module_file,module_root,module_path,module_basename,module_originalpath,module_arch,IsLib = self.module_queue.get_nowait()\r
241 except Empty:\r
242 EdkLogger.debug(EdkLogger.DEBUG_9, "Worker %s: %s" % (os.getpid(), "Fake Empty."))\r
243 time.sleep(0.01)\r
244 continue\r
245 if module_file is None:\r
246 EdkLogger.debug(EdkLogger.DEBUG_9, "Worker %s: %s" % (os.getpid(), "Worker get the last item in the queue."))\r
247 self.feedback_q.put("QueueEmpty")\r
248 time.sleep(0.01)\r
249 continue\r
250\r
251 modulefullpath = os.path.join(module_root,module_file)\r
252 taskname = " : ".join((modulefullpath,module_arch))\r
253 module_metafile = PathClass(module_file,module_root)\r
254 if module_path:\r
255 module_metafile.Path = module_path\r
256 if module_basename:\r
257 module_metafile.BaseName = module_basename\r
258 if module_originalpath:\r
259 module_metafile.OriginalPath = PathClass(module_originalpath,module_root)\r
260 arch = module_arch\r
261 target = self.data_pipe.Get("P_Info").get("Target")\r
262 toolchain = self.data_pipe.Get("P_Info").get("ToolChain")\r
263 Ma = ModuleAutoGen(self.Wa,module_metafile,target,toolchain,arch,PlatformMetaFile,self.data_pipe)\r
264 Ma.IsLibrary = IsLib\r
265 # SourceFileList calling sequence impact the makefile string sequence.\r
266 # Create cached SourceFileList here to unify its calling sequence for both\r
267 # CanSkipbyPreMakeCache and CreateCodeFile/CreateMakeFile.\r
268 RetVal = Ma.SourceFileList\r
269 if GlobalData.gUseHashCache and not GlobalData.gBinCacheDest and CommandTarget in [None, "", "all"]:\r
270 try:\r
271 CacheResult = Ma.CanSkipbyPreMakeCache()\r
272 except:\r
273 CacheResult = False\r
274 self.feedback_q.put(taskname)\r
275\r
276 if CacheResult:\r
277 self.cache_q.put((Ma.MetaFile.Path, Ma.Arch, "PreMakeCache", True))\r
278 continue\r
279 else:\r
280 self.cache_q.put((Ma.MetaFile.Path, Ma.Arch, "PreMakeCache", False))\r
281\r
282 Ma.CreateCodeFile(False)\r
283 Ma.CreateMakeFile(False,GenFfsList=FfsCmd.get((Ma.MetaFile.Path, Ma.Arch),[]))\r
284 Ma.CreateAsBuiltInf()\r
285 if GlobalData.gBinCacheSource and CommandTarget in [None, "", "all"]:\r
286 try:\r
287 CacheResult = Ma.CanSkipbyMakeCache()\r
288 except:\r
289 CacheResult = False\r
290 self.feedback_q.put(taskname)\r
291\r
292 if CacheResult:\r
293 self.cache_q.put((Ma.MetaFile.Path, Ma.Arch, "MakeCache", True))\r
294 continue\r
295 else:\r
296 self.cache_q.put((Ma.MetaFile.Path, Ma.Arch, "MakeCache", False))\r
297\r
298 except Exception as e:\r
299 EdkLogger.debug(EdkLogger.DEBUG_9, "Worker %s: %s" % (os.getpid(), str(e)))\r
300 self.feedback_q.put(taskname)\r
301 finally:\r
302 EdkLogger.debug(EdkLogger.DEBUG_9, "Worker %s: %s" % (os.getpid(), "Done"))\r
303 self.feedback_q.put("Done")\r
304 self.cache_q.put("CacheDone")\r
305\r
306 def printStatus(self):\r
307 print("Processs ID: %d Run %d modules in AutoGen " % (os.getpid(),len(AutoGen.Cache())))\r
308 print("Processs ID: %d Run %d modules in AutoGenInfo " % (os.getpid(),len(AutoGenInfo.GetCache())))\r
309 groupobj = {}\r
310 for buildobj in BuildDB.BuildObject.GetCache().values():\r
311 if str(buildobj).lower().endswith("dec"):\r
312 try:\r
313 groupobj['dec'].append(str(buildobj))\r
314 except:\r
315 groupobj['dec'] = [str(buildobj)]\r
316 if str(buildobj).lower().endswith("dsc"):\r
317 try:\r
318 groupobj['dsc'].append(str(buildobj))\r
319 except:\r
320 groupobj['dsc'] = [str(buildobj)]\r
321\r
322 if str(buildobj).lower().endswith("inf"):\r
323 try:\r
324 groupobj['inf'].append(str(buildobj))\r
325 except:\r
326 groupobj['inf'] = [str(buildobj)]\r
327\r
328 print("Processs ID: %d Run %d pkg in WDB " % (os.getpid(),len(groupobj.get("dec",[]))))\r
329 print("Processs ID: %d Run %d pla in WDB " % (os.getpid(),len(groupobj.get("dsc",[]))))\r
330 print("Processs ID: %d Run %d inf in WDB " % (os.getpid(),len(groupobj.get("inf",[]))))\r