]> git.proxmox.com Git - mirror_edk2.git/blob - BaseTools/Source/Python/AutoGen/AutoGenWorker.py
BaseTools: Fixed the bug of multi-thread genffs for override inf
[mirror_edk2.git] / BaseTools / Source / Python / AutoGen / AutoGenWorker.py
1 ## @file
2 # Create makefile for MS nmake and GNU make
3 #
4 # Copyright (c) 2019, Intel Corporation. All rights reserved.<BR>
5 # SPDX-License-Identifier: BSD-2-Clause-Patent
6 #
7 from __future__ import absolute_import
8 import multiprocessing as mp
9 import threading
10 from Common.Misc import PathClass
11 from AutoGen.ModuleAutoGen import ModuleAutoGen
12 from AutoGen.ModuleAutoGenHelper import WorkSpaceInfo,AutoGenInfo
13 import Common.GlobalData as GlobalData
14 import Common.EdkLogger as EdkLogger
15 import os
16 from Common.MultipleWorkspace import MultipleWorkspace as mws
17 from AutoGen.AutoGen import AutoGen
18 from Workspace.WorkspaceDatabase import BuildDB
19 try:
20 from queue import Empty
21 except:
22 from Queue import Empty
23 import traceback
24 import sys
25 from AutoGen.DataPipe import MemoryDataPipe
26 import logging
27
28 def clearQ(q):
29 try:
30 while True:
31 q.get_nowait()
32 except Empty:
33 pass
34
35 class LogAgent(threading.Thread):
36 def __init__(self,log_q,log_level,log_file=None):
37 super(LogAgent,self).__init__()
38 self.log_q = log_q
39 self.log_level = log_level
40 self.log_file = log_file
41 def InitLogger(self):
42 # For DEBUG level (All DEBUG_0~9 are applicable)
43 self._DebugLogger_agent = logging.getLogger("tool_debug_agent")
44 _DebugFormatter = logging.Formatter("[%(asctime)s.%(msecs)d]: %(message)s", datefmt="%H:%M:%S")
45 self._DebugLogger_agent.setLevel(self.log_level)
46 _DebugChannel = logging.StreamHandler(sys.stdout)
47 _DebugChannel.setFormatter(_DebugFormatter)
48 self._DebugLogger_agent.addHandler(_DebugChannel)
49
50 # For VERBOSE, INFO, WARN level
51 self._InfoLogger_agent = logging.getLogger("tool_info_agent")
52 _InfoFormatter = logging.Formatter("%(message)s")
53 self._InfoLogger_agent.setLevel(self.log_level)
54 _InfoChannel = logging.StreamHandler(sys.stdout)
55 _InfoChannel.setFormatter(_InfoFormatter)
56 self._InfoLogger_agent.addHandler(_InfoChannel)
57
58 # For ERROR level
59 self._ErrorLogger_agent = logging.getLogger("tool_error_agent")
60 _ErrorFormatter = logging.Formatter("%(message)s")
61 self._ErrorLogger_agent.setLevel(self.log_level)
62 _ErrorCh = logging.StreamHandler(sys.stderr)
63 _ErrorCh.setFormatter(_ErrorFormatter)
64 self._ErrorLogger_agent.addHandler(_ErrorCh)
65
66 if self.log_file:
67 if os.path.exists(self.log_file):
68 os.remove(self.log_file)
69 _Ch = logging.FileHandler(self.log_file)
70 _Ch.setFormatter(_DebugFormatter)
71 self._DebugLogger_agent.addHandler(_Ch)
72
73 _Ch= logging.FileHandler(self.log_file)
74 _Ch.setFormatter(_InfoFormatter)
75 self._InfoLogger_agent.addHandler(_Ch)
76
77 _Ch = logging.FileHandler(self.log_file)
78 _Ch.setFormatter(_ErrorFormatter)
79 self._ErrorLogger_agent.addHandler(_Ch)
80
81 def run(self):
82 self.InitLogger()
83 while True:
84 log_message = self.log_q.get()
85 if log_message is None:
86 break
87 if log_message.name == "tool_error":
88 self._ErrorLogger_agent.log(log_message.levelno,log_message.getMessage())
89 elif log_message.name == "tool_info":
90 self._InfoLogger_agent.log(log_message.levelno,log_message.getMessage())
91 elif log_message.name == "tool_debug":
92 self._DebugLogger_agent.log(log_message.levelno,log_message.getMessage())
93 else:
94 self._InfoLogger_agent.log(log_message.levelno,log_message.getMessage())
95
96 def kill(self):
97 self.log_q.put(None)
98 class AutoGenManager(threading.Thread):
99 def __init__(self,autogen_workers, feedback_q,error_event):
100 super(AutoGenManager,self).__init__()
101 self.autogen_workers = autogen_workers
102 self.feedback_q = feedback_q
103 self.Status = True
104 self.error_event = error_event
105 def run(self):
106 try:
107 fin_num = 0
108 while True:
109 badnews = self.feedback_q.get()
110 if badnews is None:
111 break
112 if badnews == "Done":
113 fin_num += 1
114 else:
115 self.Status = False
116 self.TerminateWorkers()
117 if fin_num == len(self.autogen_workers):
118 self.clearQueue()
119 for w in self.autogen_workers:
120 w.join()
121 break
122 except Exception:
123 return
124
125 def clearQueue(self):
126 taskq = self.autogen_workers[0].module_queue
127 logq = self.autogen_workers[0].log_q
128 clearQ(taskq)
129 clearQ(self.feedback_q)
130 clearQ(logq)
131 def TerminateWorkers(self):
132 self.error_event.set()
133 def kill(self):
134 self.feedback_q.put(None)
135 class AutoGenWorkerInProcess(mp.Process):
136 def __init__(self,module_queue,data_pipe_file_path,feedback_q,file_lock,cache_lock,share_data,log_q,error_event):
137 mp.Process.__init__(self)
138 self.module_queue = module_queue
139 self.data_pipe_file_path =data_pipe_file_path
140 self.data_pipe = None
141 self.feedback_q = feedback_q
142 self.PlatformMetaFileSet = {}
143 self.file_lock = file_lock
144 self.cache_lock = cache_lock
145 self.share_data = share_data
146 self.log_q = log_q
147 self.error_event = error_event
148 def GetPlatformMetaFile(self,filepath,root):
149 try:
150 return self.PlatformMetaFileSet[(filepath,root)]
151 except:
152 self.PlatformMetaFileSet[(filepath,root)] = filepath
153 return self.PlatformMetaFileSet[(filepath,root)]
154 def run(self):
155 try:
156 taskname = "Init"
157 with self.file_lock:
158 if not os.path.exists(self.data_pipe_file_path):
159 self.feedback_q.put(taskname + ":" + "load data pipe %s failed." % self.data_pipe_file_path)
160 self.data_pipe = MemoryDataPipe()
161 self.data_pipe.load(self.data_pipe_file_path)
162 EdkLogger.LogClientInitialize(self.log_q)
163 loglevel = self.data_pipe.Get("LogLevel")
164 if not loglevel:
165 loglevel = EdkLogger.INFO
166 EdkLogger.SetLevel(loglevel)
167 target = self.data_pipe.Get("P_Info").get("Target")
168 toolchain = self.data_pipe.Get("P_Info").get("ToolChain")
169 archlist = self.data_pipe.Get("P_Info").get("ArchList")
170
171 active_p = self.data_pipe.Get("P_Info").get("ActivePlatform")
172 workspacedir = self.data_pipe.Get("P_Info").get("WorkspaceDir")
173 PackagesPath = os.getenv("PACKAGES_PATH")
174 mws.setWs(workspacedir, PackagesPath)
175 self.Wa = WorkSpaceInfo(
176 workspacedir,active_p,target,toolchain,archlist
177 )
178 self.Wa._SrcTimeStamp = self.data_pipe.Get("Workspace_timestamp")
179 GlobalData.gGlobalDefines = self.data_pipe.Get("G_defines")
180 GlobalData.gCommandLineDefines = self.data_pipe.Get("CL_defines")
181 os.environ._data = self.data_pipe.Get("Env_Var")
182 GlobalData.gWorkspace = workspacedir
183 GlobalData.gDisableIncludePathCheck = False
184 GlobalData.gFdfParser = self.data_pipe.Get("FdfParser")
185 GlobalData.gDatabasePath = self.data_pipe.Get("DatabasePath")
186 GlobalData.gBinCacheSource = self.data_pipe.Get("BinCacheSource")
187 GlobalData.gBinCacheDest = self.data_pipe.Get("BinCacheDest")
188 GlobalData.gCacheIR = self.share_data
189 GlobalData.gEnableGenfdsMultiThread = self.data_pipe.Get("EnableGenfdsMultiThread")
190 GlobalData.file_lock = self.file_lock
191 GlobalData.cache_lock = self.cache_lock
192 CommandTarget = self.data_pipe.Get("CommandTarget")
193 pcd_from_build_option = []
194 for pcd_tuple in self.data_pipe.Get("BuildOptPcd"):
195 pcd_id = ".".join((pcd_tuple[0],pcd_tuple[1]))
196 if pcd_tuple[2].strip():
197 pcd_id = ".".join((pcd_id,pcd_tuple[2]))
198 pcd_from_build_option.append("=".join((pcd_id,pcd_tuple[3])))
199 GlobalData.BuildOptionPcd = pcd_from_build_option
200 module_count = 0
201 FfsCmd = self.data_pipe.Get("FfsCommand")
202 if FfsCmd is None:
203 FfsCmd = {}
204 GlobalData.FfsCmd = FfsCmd
205 PlatformMetaFile = self.GetPlatformMetaFile(self.data_pipe.Get("P_Info").get("ActivePlatform"),
206 self.data_pipe.Get("P_Info").get("WorkspaceDir"))
207 libConstPcd = self.data_pipe.Get("LibConstPcd")
208 Refes = self.data_pipe.Get("REFS")
209 GlobalData.libConstPcd = libConstPcd
210 GlobalData.Refes = Refes
211 while True:
212 if self.module_queue.empty():
213 break
214 if self.error_event.is_set():
215 break
216 module_count += 1
217 module_file,module_root,module_path,module_basename,module_originalpath,module_arch,IsLib = self.module_queue.get_nowait()
218 modulefullpath = os.path.join(module_root,module_file)
219 taskname = " : ".join((modulefullpath,module_arch))
220 module_metafile = PathClass(module_file,module_root)
221 if module_path:
222 module_metafile.Path = module_path
223 if module_basename:
224 module_metafile.BaseName = module_basename
225 if module_originalpath:
226 module_metafile.OriginalPath = PathClass(module_originalpath,module_root)
227 arch = module_arch
228 target = self.data_pipe.Get("P_Info").get("Target")
229 toolchain = self.data_pipe.Get("P_Info").get("ToolChain")
230 Ma = ModuleAutoGen(self.Wa,module_metafile,target,toolchain,arch,PlatformMetaFile,self.data_pipe)
231 Ma.IsLibrary = IsLib
232 if IsLib:
233 if (Ma.MetaFile.File,Ma.MetaFile.Root,Ma.Arch,Ma.MetaFile.Path) in libConstPcd:
234 Ma.ConstPcd = libConstPcd[(Ma.MetaFile.File,Ma.MetaFile.Root,Ma.Arch,Ma.MetaFile.Path)]
235 if (Ma.MetaFile.File,Ma.MetaFile.Root,Ma.Arch,Ma.MetaFile.Path) in Refes:
236 Ma.ReferenceModules = Refes[(Ma.MetaFile.File,Ma.MetaFile.Root,Ma.Arch,Ma.MetaFile.Path)]
237 if GlobalData.gBinCacheSource and CommandTarget in [None, "", "all"]:
238 Ma.GenModuleFilesHash(GlobalData.gCacheIR)
239 Ma.GenPreMakefileHash(GlobalData.gCacheIR)
240 if Ma.CanSkipbyPreMakefileCache(GlobalData.gCacheIR):
241 continue
242
243 Ma.CreateCodeFile(False)
244 Ma.CreateMakeFile(False,GenFfsList=FfsCmd.get((Ma.MetaFile.Path, Ma.Arch),[]))
245
246 if GlobalData.gBinCacheSource and CommandTarget in [None, "", "all"]:
247 Ma.GenMakeHeaderFilesHash(GlobalData.gCacheIR)
248 Ma.GenMakeHash(GlobalData.gCacheIR)
249 if Ma.CanSkipbyMakeCache(GlobalData.gCacheIR):
250 continue
251 else:
252 Ma.PrintFirstMakeCacheMissFile(GlobalData.gCacheIR)
253 except Empty:
254 pass
255 except:
256 traceback.print_exc(file=sys.stdout)
257 self.feedback_q.put(taskname)
258 finally:
259 self.feedback_q.put("Done")
260 def printStatus(self):
261 print("Processs ID: %d Run %d modules in AutoGen " % (os.getpid(),len(AutoGen.Cache())))
262 print("Processs ID: %d Run %d modules in AutoGenInfo " % (os.getpid(),len(AutoGenInfo.GetCache())))
263 groupobj = {}
264 for buildobj in BuildDB.BuildObject.GetCache().values():
265 if str(buildobj).lower().endswith("dec"):
266 try:
267 groupobj['dec'].append(str(buildobj))
268 except:
269 groupobj['dec'] = [str(buildobj)]
270 if str(buildobj).lower().endswith("dsc"):
271 try:
272 groupobj['dsc'].append(str(buildobj))
273 except:
274 groupobj['dsc'] = [str(buildobj)]
275
276 if str(buildobj).lower().endswith("inf"):
277 try:
278 groupobj['inf'].append(str(buildobj))
279 except:
280 groupobj['inf'] = [str(buildobj)]
281
282 print("Processs ID: %d Run %d pkg in WDB " % (os.getpid(),len(groupobj.get("dec",[]))))
283 print("Processs ID: %d Run %d pla in WDB " % (os.getpid(),len(groupobj.get("dsc",[]))))
284 print("Processs ID: %d Run %d inf in WDB " % (os.getpid(),len(groupobj.get("inf",[]))))