]> git.proxmox.com Git - mirror_edk2.git/blob - BaseTools/Source/Python/AutoGen/AutoGenWorker.py
BaseTools: Fix the bug of --cmd-len build option
[mirror_edk2.git] / BaseTools / Source / Python / AutoGen / AutoGenWorker.py
1 ## @file
2 # Create makefile for MS nmake and GNU make
3 #
4 # Copyright (c) 2019, Intel Corporation. All rights reserved.<BR>
5 # SPDX-License-Identifier: BSD-2-Clause-Patent
6 #
7 from __future__ import absolute_import
8 import multiprocessing as mp
9 import threading
10 from Common.Misc import PathClass
11 from AutoGen.ModuleAutoGen import ModuleAutoGen
12 from AutoGen.ModuleAutoGenHelper import WorkSpaceInfo,AutoGenInfo
13 import Common.GlobalData as GlobalData
14 import Common.EdkLogger as EdkLogger
15 import os
16 from Common.MultipleWorkspace import MultipleWorkspace as mws
17 from AutoGen.AutoGen import AutoGen
18 from Workspace.WorkspaceDatabase import BuildDB
19 try:
20 from queue import Empty
21 except:
22 from Queue import Empty
23 import traceback
24 import sys
25 from AutoGen.DataPipe import MemoryDataPipe
26 import logging
27 import time
28
29 def clearQ(q):
30 try:
31 while True:
32 q.get_nowait()
33 except Empty:
34 pass
35
36 class LogAgent(threading.Thread):
37 def __init__(self,log_q,log_level,log_file=None):
38 super(LogAgent,self).__init__()
39 self.log_q = log_q
40 self.log_level = log_level
41 self.log_file = log_file
42 def InitLogger(self):
43 # For DEBUG level (All DEBUG_0~9 are applicable)
44 self._DebugLogger_agent = logging.getLogger("tool_debug_agent")
45 _DebugFormatter = logging.Formatter("[%(asctime)s.%(msecs)d]: %(message)s", datefmt="%H:%M:%S")
46 self._DebugLogger_agent.setLevel(self.log_level)
47 _DebugChannel = logging.StreamHandler(sys.stdout)
48 _DebugChannel.setFormatter(_DebugFormatter)
49 self._DebugLogger_agent.addHandler(_DebugChannel)
50
51 # For VERBOSE, INFO, WARN level
52 self._InfoLogger_agent = logging.getLogger("tool_info_agent")
53 _InfoFormatter = logging.Formatter("%(message)s")
54 self._InfoLogger_agent.setLevel(self.log_level)
55 _InfoChannel = logging.StreamHandler(sys.stdout)
56 _InfoChannel.setFormatter(_InfoFormatter)
57 self._InfoLogger_agent.addHandler(_InfoChannel)
58
59 # For ERROR level
60 self._ErrorLogger_agent = logging.getLogger("tool_error_agent")
61 _ErrorFormatter = logging.Formatter("%(message)s")
62 self._ErrorLogger_agent.setLevel(self.log_level)
63 _ErrorCh = logging.StreamHandler(sys.stderr)
64 _ErrorCh.setFormatter(_ErrorFormatter)
65 self._ErrorLogger_agent.addHandler(_ErrorCh)
66
67 if self.log_file:
68 if os.path.exists(self.log_file):
69 os.remove(self.log_file)
70 _Ch = logging.FileHandler(self.log_file)
71 _Ch.setFormatter(_DebugFormatter)
72 self._DebugLogger_agent.addHandler(_Ch)
73
74 _Ch= logging.FileHandler(self.log_file)
75 _Ch.setFormatter(_InfoFormatter)
76 self._InfoLogger_agent.addHandler(_Ch)
77
78 _Ch = logging.FileHandler(self.log_file)
79 _Ch.setFormatter(_ErrorFormatter)
80 self._ErrorLogger_agent.addHandler(_Ch)
81
82 def run(self):
83 self.InitLogger()
84 while True:
85 log_message = self.log_q.get()
86 if log_message is None:
87 break
88 if log_message.name == "tool_error":
89 self._ErrorLogger_agent.log(log_message.levelno,log_message.getMessage())
90 elif log_message.name == "tool_info":
91 self._InfoLogger_agent.log(log_message.levelno,log_message.getMessage())
92 elif log_message.name == "tool_debug":
93 self._DebugLogger_agent.log(log_message.levelno,log_message.getMessage())
94 else:
95 self._InfoLogger_agent.log(log_message.levelno,log_message.getMessage())
96
97 def kill(self):
98 self.log_q.put(None)
99 class AutoGenManager(threading.Thread):
100 def __init__(self,autogen_workers, feedback_q,error_event):
101 super(AutoGenManager,self).__init__()
102 self.autogen_workers = autogen_workers
103 self.feedback_q = feedback_q
104 self.Status = True
105 self.error_event = error_event
106 def run(self):
107 try:
108 fin_num = 0
109 while True:
110 badnews = self.feedback_q.get()
111 if badnews is None:
112 break
113 if badnews == "Done":
114 fin_num += 1
115 elif badnews == "QueueEmpty":
116 EdkLogger.debug(EdkLogger.DEBUG_9, "Worker %s: %s" % (os.getpid(), badnews))
117 self.TerminateWorkers()
118 else:
119 EdkLogger.debug(EdkLogger.DEBUG_9, "Worker %s: %s" % (os.getpid(), badnews))
120 self.Status = False
121 self.TerminateWorkers()
122 if fin_num == len(self.autogen_workers):
123 self.clearQueue()
124 for w in self.autogen_workers:
125 w.join()
126 break
127 except Exception:
128 return
129
130 def clearQueue(self):
131 taskq = self.autogen_workers[0].module_queue
132 logq = self.autogen_workers[0].log_q
133 clearQ(taskq)
134 clearQ(self.feedback_q)
135 clearQ(logq)
136 # Copy the cache queue itmes to parent thread before clear
137 cacheq = self.autogen_workers[0].cache_q
138 try:
139 cache_num = 0
140 while True:
141 item = cacheq.get()
142 if item == "CacheDone":
143 cache_num += 1
144 else:
145 GlobalData.gModuleAllCacheStatus.add(item)
146 if cache_num == len(self.autogen_workers):
147 break
148 except:
149 print ("cache_q error")
150
151 def TerminateWorkers(self):
152 self.error_event.set()
153 def kill(self):
154 self.feedback_q.put(None)
155 class AutoGenWorkerInProcess(mp.Process):
156 def __init__(self,module_queue,data_pipe_file_path,feedback_q,file_lock,cache_q,log_q,error_event):
157 mp.Process.__init__(self)
158 self.module_queue = module_queue
159 self.data_pipe_file_path =data_pipe_file_path
160 self.data_pipe = None
161 self.feedback_q = feedback_q
162 self.PlatformMetaFileSet = {}
163 self.file_lock = file_lock
164 self.cache_q = cache_q
165 self.log_q = log_q
166 self.error_event = error_event
167 def GetPlatformMetaFile(self,filepath,root):
168 try:
169 return self.PlatformMetaFileSet[(filepath,root)]
170 except:
171 self.PlatformMetaFileSet[(filepath,root)] = filepath
172 return self.PlatformMetaFileSet[(filepath,root)]
173 def run(self):
174 try:
175 taskname = "Init"
176 with self.file_lock:
177 try:
178 self.data_pipe = MemoryDataPipe()
179 self.data_pipe.load(self.data_pipe_file_path)
180 except:
181 self.feedback_q.put(taskname + ":" + "load data pipe %s failed." % self.data_pipe_file_path)
182 EdkLogger.LogClientInitialize(self.log_q)
183 loglevel = self.data_pipe.Get("LogLevel")
184 if not loglevel:
185 loglevel = EdkLogger.INFO
186 EdkLogger.SetLevel(loglevel)
187 target = self.data_pipe.Get("P_Info").get("Target")
188 toolchain = self.data_pipe.Get("P_Info").get("ToolChain")
189 archlist = self.data_pipe.Get("P_Info").get("ArchList")
190
191 active_p = self.data_pipe.Get("P_Info").get("ActivePlatform")
192 workspacedir = self.data_pipe.Get("P_Info").get("WorkspaceDir")
193 PackagesPath = os.getenv("PACKAGES_PATH")
194 mws.setWs(workspacedir, PackagesPath)
195 self.Wa = WorkSpaceInfo(
196 workspacedir,active_p,target,toolchain,archlist
197 )
198 self.Wa._SrcTimeStamp = self.data_pipe.Get("Workspace_timestamp")
199 GlobalData.gGlobalDefines = self.data_pipe.Get("G_defines")
200 GlobalData.gCommandLineDefines = self.data_pipe.Get("CL_defines")
201 GlobalData.gCommandMaxLength = self.data_pipe.Get('gCommandMaxLength')
202 os.environ._data = self.data_pipe.Get("Env_Var")
203 GlobalData.gWorkspace = workspacedir
204 GlobalData.gDisableIncludePathCheck = False
205 GlobalData.gFdfParser = self.data_pipe.Get("FdfParser")
206 GlobalData.gDatabasePath = self.data_pipe.Get("DatabasePath")
207
208 GlobalData.gUseHashCache = self.data_pipe.Get("UseHashCache")
209 GlobalData.gBinCacheSource = self.data_pipe.Get("BinCacheSource")
210 GlobalData.gBinCacheDest = self.data_pipe.Get("BinCacheDest")
211 GlobalData.gPlatformHashFile = self.data_pipe.Get("PlatformHashFile")
212 GlobalData.gModulePreMakeCacheStatus = dict()
213 GlobalData.gModuleMakeCacheStatus = dict()
214 GlobalData.gHashChainStatus = dict()
215 GlobalData.gCMakeHashFile = dict()
216 GlobalData.gModuleHashFile = dict()
217 GlobalData.gFileHashDict = dict()
218 GlobalData.gEnableGenfdsMultiThread = self.data_pipe.Get("EnableGenfdsMultiThread")
219 GlobalData.file_lock = self.file_lock
220 CommandTarget = self.data_pipe.Get("CommandTarget")
221 pcd_from_build_option = []
222 for pcd_tuple in self.data_pipe.Get("BuildOptPcd"):
223 pcd_id = ".".join((pcd_tuple[0],pcd_tuple[1]))
224 if pcd_tuple[2].strip():
225 pcd_id = ".".join((pcd_id,pcd_tuple[2]))
226 pcd_from_build_option.append("=".join((pcd_id,pcd_tuple[3])))
227 GlobalData.BuildOptionPcd = pcd_from_build_option
228 module_count = 0
229 FfsCmd = self.data_pipe.Get("FfsCommand")
230 if FfsCmd is None:
231 FfsCmd = {}
232 GlobalData.FfsCmd = FfsCmd
233 PlatformMetaFile = self.GetPlatformMetaFile(self.data_pipe.Get("P_Info").get("ActivePlatform"),
234 self.data_pipe.Get("P_Info").get("WorkspaceDir"))
235 while True:
236 if self.error_event.is_set():
237 break
238 module_count += 1
239 try:
240 module_file,module_root,module_path,module_basename,module_originalpath,module_arch,IsLib = self.module_queue.get_nowait()
241 except Empty:
242 EdkLogger.debug(EdkLogger.DEBUG_9, "Worker %s: %s" % (os.getpid(), "Fake Empty."))
243 time.sleep(0.01)
244 continue
245 if module_file is None:
246 EdkLogger.debug(EdkLogger.DEBUG_9, "Worker %s: %s" % (os.getpid(), "Worker get the last item in the queue."))
247 self.feedback_q.put("QueueEmpty")
248 time.sleep(0.01)
249 continue
250
251 modulefullpath = os.path.join(module_root,module_file)
252 taskname = " : ".join((modulefullpath,module_arch))
253 module_metafile = PathClass(module_file,module_root)
254 if module_path:
255 module_metafile.Path = module_path
256 if module_basename:
257 module_metafile.BaseName = module_basename
258 if module_originalpath:
259 module_metafile.OriginalPath = PathClass(module_originalpath,module_root)
260 arch = module_arch
261 target = self.data_pipe.Get("P_Info").get("Target")
262 toolchain = self.data_pipe.Get("P_Info").get("ToolChain")
263 Ma = ModuleAutoGen(self.Wa,module_metafile,target,toolchain,arch,PlatformMetaFile,self.data_pipe)
264 Ma.IsLibrary = IsLib
265 # SourceFileList calling sequence impact the makefile string sequence.
266 # Create cached SourceFileList here to unify its calling sequence for both
267 # CanSkipbyPreMakeCache and CreateCodeFile/CreateMakeFile.
268 RetVal = Ma.SourceFileList
269 if GlobalData.gUseHashCache and not GlobalData.gBinCacheDest and CommandTarget in [None, "", "all"]:
270 try:
271 CacheResult = Ma.CanSkipbyPreMakeCache()
272 except:
273 CacheResult = False
274 self.feedback_q.put(taskname)
275
276 if CacheResult:
277 self.cache_q.put((Ma.MetaFile.Path, Ma.Arch, "PreMakeCache", True))
278 continue
279 else:
280 self.cache_q.put((Ma.MetaFile.Path, Ma.Arch, "PreMakeCache", False))
281
282 Ma.CreateCodeFile(False)
283 Ma.CreateMakeFile(False,GenFfsList=FfsCmd.get((Ma.MetaFile.Path, Ma.Arch),[]))
284 Ma.CreateAsBuiltInf()
285 if GlobalData.gBinCacheSource and CommandTarget in [None, "", "all"]:
286 try:
287 CacheResult = Ma.CanSkipbyMakeCache()
288 except:
289 CacheResult = False
290 self.feedback_q.put(taskname)
291
292 if CacheResult:
293 self.cache_q.put((Ma.MetaFile.Path, Ma.Arch, "MakeCache", True))
294 continue
295 else:
296 self.cache_q.put((Ma.MetaFile.Path, Ma.Arch, "MakeCache", False))
297
298 except Exception as e:
299 EdkLogger.debug(EdkLogger.DEBUG_9, "Worker %s: %s" % (os.getpid(), str(e)))
300 self.feedback_q.put(taskname)
301 finally:
302 EdkLogger.debug(EdkLogger.DEBUG_9, "Worker %s: %s" % (os.getpid(), "Done"))
303 self.feedback_q.put("Done")
304 self.cache_q.put("CacheDone")
305
306 def printStatus(self):
307 print("Processs ID: %d Run %d modules in AutoGen " % (os.getpid(),len(AutoGen.Cache())))
308 print("Processs ID: %d Run %d modules in AutoGenInfo " % (os.getpid(),len(AutoGenInfo.GetCache())))
309 groupobj = {}
310 for buildobj in BuildDB.BuildObject.GetCache().values():
311 if str(buildobj).lower().endswith("dec"):
312 try:
313 groupobj['dec'].append(str(buildobj))
314 except:
315 groupobj['dec'] = [str(buildobj)]
316 if str(buildobj).lower().endswith("dsc"):
317 try:
318 groupobj['dsc'].append(str(buildobj))
319 except:
320 groupobj['dsc'] = [str(buildobj)]
321
322 if str(buildobj).lower().endswith("inf"):
323 try:
324 groupobj['inf'].append(str(buildobj))
325 except:
326 groupobj['inf'] = [str(buildobj)]
327
328 print("Processs ID: %d Run %d pkg in WDB " % (os.getpid(),len(groupobj.get("dec",[]))))
329 print("Processs ID: %d Run %d pla in WDB " % (os.getpid(),len(groupobj.get("dsc",[]))))
330 print("Processs ID: %d Run %d inf in WDB " % (os.getpid(),len(groupobj.get("inf",[]))))