]> git.proxmox.com Git - mirror_edk2.git/blame - BaseTools/Source/Python/AutoGen/AutoGenWorker.py
BaseTools: Move gPlatformFinalPcd to Datapipe and optimize size
[mirror_edk2.git] / BaseTools / Source / Python / AutoGen / AutoGenWorker.py
CommitLineData
673d09a2
FB
1## @file\r
2# Create makefile for MS nmake and GNU make\r
3#\r
4# Copyright (c) 2019, Intel Corporation. All rights reserved.<BR>\r
5# SPDX-License-Identifier: BSD-2-Clause-Patent\r
6#\r
7from __future__ import absolute_import\r
8import multiprocessing as mp\r
9import threading\r
10from Common.Misc import PathClass\r
11from AutoGen.ModuleAutoGen import ModuleAutoGen\r
12from AutoGen.ModuleAutoGenHelper import WorkSpaceInfo,AutoGenInfo\r
13import Common.GlobalData as GlobalData\r
14import Common.EdkLogger as EdkLogger\r
15import os\r
16from Common.MultipleWorkspace import MultipleWorkspace as mws\r
17from AutoGen.AutoGen import AutoGen\r
18from Workspace.WorkspaceDatabase import BuildDB\r
4acae2b3
FB
19try:\r
20 from queue import Empty\r
21except:\r
22 from Queue import Empty\r
673d09a2
FB
23import traceback\r
24import sys\r
25from AutoGen.DataPipe import MemoryDataPipe\r
1a624dd7 26import logging\r
bfe36cb4 27import time\r
1a624dd7 28\r
673d09a2
FB
29def clearQ(q):\r
30 try:\r
31 while True:\r
32 q.get_nowait()\r
33 except Empty:\r
34 pass\r
636ed13a
FB
35\r
36class LogAgent(threading.Thread):\r
37 def __init__(self,log_q,log_level,log_file=None):\r
38 super(LogAgent,self).__init__()\r
39 self.log_q = log_q\r
40 self.log_level = log_level\r
41 self.log_file = log_file\r
42 def InitLogger(self):\r
43 # For DEBUG level (All DEBUG_0~9 are applicable)\r
44 self._DebugLogger_agent = logging.getLogger("tool_debug_agent")\r
45 _DebugFormatter = logging.Formatter("[%(asctime)s.%(msecs)d]: %(message)s", datefmt="%H:%M:%S")\r
46 self._DebugLogger_agent.setLevel(self.log_level)\r
47 _DebugChannel = logging.StreamHandler(sys.stdout)\r
48 _DebugChannel.setFormatter(_DebugFormatter)\r
49 self._DebugLogger_agent.addHandler(_DebugChannel)\r
50\r
51 # For VERBOSE, INFO, WARN level\r
52 self._InfoLogger_agent = logging.getLogger("tool_info_agent")\r
53 _InfoFormatter = logging.Formatter("%(message)s")\r
54 self._InfoLogger_agent.setLevel(self.log_level)\r
55 _InfoChannel = logging.StreamHandler(sys.stdout)\r
56 _InfoChannel.setFormatter(_InfoFormatter)\r
57 self._InfoLogger_agent.addHandler(_InfoChannel)\r
58\r
59 # For ERROR level\r
60 self._ErrorLogger_agent = logging.getLogger("tool_error_agent")\r
61 _ErrorFormatter = logging.Formatter("%(message)s")\r
62 self._ErrorLogger_agent.setLevel(self.log_level)\r
63 _ErrorCh = logging.StreamHandler(sys.stderr)\r
64 _ErrorCh.setFormatter(_ErrorFormatter)\r
65 self._ErrorLogger_agent.addHandler(_ErrorCh)\r
66\r
67 if self.log_file:\r
68 if os.path.exists(self.log_file):\r
69 os.remove(self.log_file)\r
70 _Ch = logging.FileHandler(self.log_file)\r
71 _Ch.setFormatter(_DebugFormatter)\r
72 self._DebugLogger_agent.addHandler(_Ch)\r
73\r
74 _Ch= logging.FileHandler(self.log_file)\r
75 _Ch.setFormatter(_InfoFormatter)\r
76 self._InfoLogger_agent.addHandler(_Ch)\r
77\r
78 _Ch = logging.FileHandler(self.log_file)\r
79 _Ch.setFormatter(_ErrorFormatter)\r
80 self._ErrorLogger_agent.addHandler(_Ch)\r
81\r
82 def run(self):\r
83 self.InitLogger()\r
84 while True:\r
85 log_message = self.log_q.get()\r
86 if log_message is None:\r
87 break\r
88 if log_message.name == "tool_error":\r
89 self._ErrorLogger_agent.log(log_message.levelno,log_message.getMessage())\r
90 elif log_message.name == "tool_info":\r
91 self._InfoLogger_agent.log(log_message.levelno,log_message.getMessage())\r
92 elif log_message.name == "tool_debug":\r
93 self._DebugLogger_agent.log(log_message.levelno,log_message.getMessage())\r
94 else:\r
95 self._InfoLogger_agent.log(log_message.levelno,log_message.getMessage())\r
96\r
97 def kill(self):\r
98 self.log_q.put(None)\r
673d09a2
FB
99class AutoGenManager(threading.Thread):\r
100 def __init__(self,autogen_workers, feedback_q,error_event):\r
101 super(AutoGenManager,self).__init__()\r
102 self.autogen_workers = autogen_workers\r
103 self.feedback_q = feedback_q\r
673d09a2
FB
104 self.Status = True\r
105 self.error_event = error_event\r
106 def run(self):\r
107 try:\r
108 fin_num = 0\r
109 while True:\r
110 badnews = self.feedback_q.get()\r
111 if badnews is None:\r
112 break\r
113 if badnews == "Done":\r
114 fin_num += 1\r
bfe36cb4
FB
115 elif badnews == "QueueEmpty":\r
116 EdkLogger.debug(EdkLogger.DEBUG_9, "Worker %s: %s" % (os.getpid(), badnews))\r
117 self.TerminateWorkers()\r
673d09a2 118 else:\r
bfe36cb4 119 EdkLogger.debug(EdkLogger.DEBUG_9, "Worker %s: %s" % (os.getpid(), badnews))\r
673d09a2
FB
120 self.Status = False\r
121 self.TerminateWorkers()\r
122 if fin_num == len(self.autogen_workers):\r
123 self.clearQueue()\r
124 for w in self.autogen_workers:\r
125 w.join()\r
126 break\r
127 except Exception:\r
128 return\r
129\r
130 def clearQueue(self):\r
131 taskq = self.autogen_workers[0].module_queue\r
1a624dd7 132 logq = self.autogen_workers[0].log_q\r
673d09a2
FB
133 clearQ(taskq)\r
134 clearQ(self.feedback_q)\r
1a624dd7 135 clearQ(logq)\r
fc8b8dea
SS
136 # Copy the cache queue itmes to parent thread before clear\r
137 cacheq = self.autogen_workers[0].cache_q\r
138 try:\r
139 cache_num = 0\r
140 while True:\r
141 item = cacheq.get()\r
142 if item == "CacheDone":\r
143 cache_num += 1\r
144 else:\r
145 GlobalData.gModuleAllCacheStatus.add(item)\r
146 if cache_num == len(self.autogen_workers):\r
147 break\r
148 except:\r
149 print ("cache_q error")\r
150\r
673d09a2
FB
151 def TerminateWorkers(self):\r
152 self.error_event.set()\r
153 def kill(self):\r
154 self.feedback_q.put(None)\r
155class AutoGenWorkerInProcess(mp.Process):\r
fc8b8dea 156 def __init__(self,module_queue,data_pipe_file_path,feedback_q,file_lock,cache_q,log_q,error_event):\r
673d09a2
FB
157 mp.Process.__init__(self)\r
158 self.module_queue = module_queue\r
159 self.data_pipe_file_path =data_pipe_file_path\r
160 self.data_pipe = None\r
161 self.feedback_q = feedback_q\r
162 self.PlatformMetaFileSet = {}\r
163 self.file_lock = file_lock\r
fc8b8dea 164 self.cache_q = cache_q\r
636ed13a 165 self.log_q = log_q\r
673d09a2
FB
166 self.error_event = error_event\r
167 def GetPlatformMetaFile(self,filepath,root):\r
168 try:\r
169 return self.PlatformMetaFileSet[(filepath,root)]\r
170 except:\r
171 self.PlatformMetaFileSet[(filepath,root)] = filepath\r
172 return self.PlatformMetaFileSet[(filepath,root)]\r
173 def run(self):\r
174 try:\r
175 taskname = "Init"\r
176 with self.file_lock:\r
0075ab2c
FB
177 try:\r
178 self.data_pipe = MemoryDataPipe()\r
179 self.data_pipe.load(self.data_pipe_file_path)\r
180 except:\r
673d09a2 181 self.feedback_q.put(taskname + ":" + "load data pipe %s failed." % self.data_pipe_file_path)\r
636ed13a 182 EdkLogger.LogClientInitialize(self.log_q)\r
673d09a2
FB
183 loglevel = self.data_pipe.Get("LogLevel")\r
184 if not loglevel:\r
185 loglevel = EdkLogger.INFO\r
186 EdkLogger.SetLevel(loglevel)\r
673d09a2
FB
187 target = self.data_pipe.Get("P_Info").get("Target")\r
188 toolchain = self.data_pipe.Get("P_Info").get("ToolChain")\r
189 archlist = self.data_pipe.Get("P_Info").get("ArchList")\r
190\r
191 active_p = self.data_pipe.Get("P_Info").get("ActivePlatform")\r
192 workspacedir = self.data_pipe.Get("P_Info").get("WorkspaceDir")\r
193 PackagesPath = os.getenv("PACKAGES_PATH")\r
194 mws.setWs(workspacedir, PackagesPath)\r
195 self.Wa = WorkSpaceInfo(\r
196 workspacedir,active_p,target,toolchain,archlist\r
197 )\r
198 self.Wa._SrcTimeStamp = self.data_pipe.Get("Workspace_timestamp")\r
199 GlobalData.gGlobalDefines = self.data_pipe.Get("G_defines")\r
200 GlobalData.gCommandLineDefines = self.data_pipe.Get("CL_defines")\r
e910f076 201 GlobalData.gCommandMaxLength = self.data_pipe.Get('gCommandMaxLength')\r
673d09a2
FB
202 os.environ._data = self.data_pipe.Get("Env_Var")\r
203 GlobalData.gWorkspace = workspacedir\r
204 GlobalData.gDisableIncludePathCheck = False\r
205 GlobalData.gFdfParser = self.data_pipe.Get("FdfParser")\r
206 GlobalData.gDatabasePath = self.data_pipe.Get("DatabasePath")\r
fc8b8dea
SS
207\r
208 GlobalData.gUseHashCache = self.data_pipe.Get("UseHashCache")\r
0e7e7a26
SS
209 GlobalData.gBinCacheSource = self.data_pipe.Get("BinCacheSource")\r
210 GlobalData.gBinCacheDest = self.data_pipe.Get("BinCacheDest")\r
fc8b8dea
SS
211 GlobalData.gPlatformHashFile = self.data_pipe.Get("PlatformHashFile")\r
212 GlobalData.gModulePreMakeCacheStatus = dict()\r
213 GlobalData.gModuleMakeCacheStatus = dict()\r
214 GlobalData.gHashChainStatus = dict()\r
215 GlobalData.gCMakeHashFile = dict()\r
216 GlobalData.gModuleHashFile = dict()\r
217 GlobalData.gFileHashDict = dict()\r
0e7e7a26 218 GlobalData.gEnableGenfdsMultiThread = self.data_pipe.Get("EnableGenfdsMultiThread")\r
f4dfec6c 219 GlobalData.gPlatformFinalPcds = self.data_pipe.Get("gPlatformFinalPcds")\r
0e7e7a26
SS
220 GlobalData.file_lock = self.file_lock\r
221 CommandTarget = self.data_pipe.Get("CommandTarget")\r
673d09a2
FB
222 pcd_from_build_option = []\r
223 for pcd_tuple in self.data_pipe.Get("BuildOptPcd"):\r
224 pcd_id = ".".join((pcd_tuple[0],pcd_tuple[1]))\r
225 if pcd_tuple[2].strip():\r
226 pcd_id = ".".join((pcd_id,pcd_tuple[2]))\r
227 pcd_from_build_option.append("=".join((pcd_id,pcd_tuple[3])))\r
228 GlobalData.BuildOptionPcd = pcd_from_build_option\r
229 module_count = 0\r
230 FfsCmd = self.data_pipe.Get("FfsCommand")\r
231 if FfsCmd is None:\r
232 FfsCmd = {}\r
0e7e7a26 233 GlobalData.FfsCmd = FfsCmd\r
673d09a2
FB
234 PlatformMetaFile = self.GetPlatformMetaFile(self.data_pipe.Get("P_Info").get("ActivePlatform"),\r
235 self.data_pipe.Get("P_Info").get("WorkspaceDir"))\r
673d09a2 236 while True:\r
673d09a2
FB
237 if self.error_event.is_set():\r
238 break\r
239 module_count += 1\r
bfe36cb4
FB
240 try:\r
241 module_file,module_root,module_path,module_basename,module_originalpath,module_arch,IsLib = self.module_queue.get_nowait()\r
242 except Empty:\r
243 EdkLogger.debug(EdkLogger.DEBUG_9, "Worker %s: %s" % (os.getpid(), "Fake Empty."))\r
244 time.sleep(0.01)\r
245 continue\r
246 if module_file is None:\r
247 EdkLogger.debug(EdkLogger.DEBUG_9, "Worker %s: %s" % (os.getpid(), "Worker get the last item in the queue."))\r
248 self.feedback_q.put("QueueEmpty")\r
249 time.sleep(0.01)\r
250 continue\r
251\r
673d09a2
FB
252 modulefullpath = os.path.join(module_root,module_file)\r
253 taskname = " : ".join((modulefullpath,module_arch))\r
254 module_metafile = PathClass(module_file,module_root)\r
255 if module_path:\r
256 module_metafile.Path = module_path\r
257 if module_basename:\r
258 module_metafile.BaseName = module_basename\r
259 if module_originalpath:\r
260 module_metafile.OriginalPath = PathClass(module_originalpath,module_root)\r
261 arch = module_arch\r
262 target = self.data_pipe.Get("P_Info").get("Target")\r
263 toolchain = self.data_pipe.Get("P_Info").get("ToolChain")\r
264 Ma = ModuleAutoGen(self.Wa,module_metafile,target,toolchain,arch,PlatformMetaFile,self.data_pipe)\r
265 Ma.IsLibrary = IsLib\r
fc8b8dea
SS
266 # SourceFileList calling sequence impact the makefile string sequence.\r
267 # Create cached SourceFileList here to unify its calling sequence for both\r
268 # CanSkipbyPreMakeCache and CreateCodeFile/CreateMakeFile.\r
269 RetVal = Ma.SourceFileList\r
270 if GlobalData.gUseHashCache and not GlobalData.gBinCacheDest and CommandTarget in [None, "", "all"]:\r
271 try:\r
272 CacheResult = Ma.CanSkipbyPreMakeCache()\r
273 except:\r
274 CacheResult = False\r
fc8b8dea
SS
275 self.feedback_q.put(taskname)\r
276\r
277 if CacheResult:\r
278 self.cache_q.put((Ma.MetaFile.Path, Ma.Arch, "PreMakeCache", True))\r
373298ca 279 continue\r
fc8b8dea
SS
280 else:\r
281 self.cache_q.put((Ma.MetaFile.Path, Ma.Arch, "PreMakeCache", False))\r
0e7e7a26 282\r
673d09a2 283 Ma.CreateCodeFile(False)\r
e3c8311f 284 Ma.CreateMakeFile(False,GenFfsList=FfsCmd.get((Ma.MetaFile.Path, Ma.Arch),[]))\r
5dc2699d 285 Ma.CreateAsBuiltInf()\r
0e7e7a26 286 if GlobalData.gBinCacheSource and CommandTarget in [None, "", "all"]:\r
fc8b8dea
SS
287 try:\r
288 CacheResult = Ma.CanSkipbyMakeCache()\r
289 except:\r
290 CacheResult = False\r
fc8b8dea
SS
291 self.feedback_q.put(taskname)\r
292\r
293 if CacheResult:\r
294 self.cache_q.put((Ma.MetaFile.Path, Ma.Arch, "MakeCache", True))\r
0e7e7a26 295 continue\r
56c786b0 296 else:\r
fc8b8dea
SS
297 self.cache_q.put((Ma.MetaFile.Path, Ma.Arch, "MakeCache", False))\r
298\r
bfe36cb4
FB
299 except Exception as e:\r
300 EdkLogger.debug(EdkLogger.DEBUG_9, "Worker %s: %s" % (os.getpid(), str(e)))\r
673d09a2
FB
301 self.feedback_q.put(taskname)\r
302 finally:\r
bfe36cb4 303 EdkLogger.debug(EdkLogger.DEBUG_9, "Worker %s: %s" % (os.getpid(), "Done"))\r
673d09a2 304 self.feedback_q.put("Done")\r
fc8b8dea
SS
305 self.cache_q.put("CacheDone")\r
306\r
673d09a2
FB
307 def printStatus(self):\r
308 print("Processs ID: %d Run %d modules in AutoGen " % (os.getpid(),len(AutoGen.Cache())))\r
309 print("Processs ID: %d Run %d modules in AutoGenInfo " % (os.getpid(),len(AutoGenInfo.GetCache())))\r
310 groupobj = {}\r
311 for buildobj in BuildDB.BuildObject.GetCache().values():\r
312 if str(buildobj).lower().endswith("dec"):\r
313 try:\r
314 groupobj['dec'].append(str(buildobj))\r
315 except:\r
316 groupobj['dec'] = [str(buildobj)]\r
317 if str(buildobj).lower().endswith("dsc"):\r
318 try:\r
319 groupobj['dsc'].append(str(buildobj))\r
320 except:\r
321 groupobj['dsc'] = [str(buildobj)]\r
322\r
323 if str(buildobj).lower().endswith("inf"):\r
324 try:\r
325 groupobj['inf'].append(str(buildobj))\r
326 except:\r
327 groupobj['inf'] = [str(buildobj)]\r
328\r
329 print("Processs ID: %d Run %d pkg in WDB " % (os.getpid(),len(groupobj.get("dec",[]))))\r
330 print("Processs ID: %d Run %d pla in WDB " % (os.getpid(),len(groupobj.get("dsc",[]))))\r
331 print("Processs ID: %d Run %d inf in WDB " % (os.getpid(),len(groupobj.get("inf",[]))))\r