]> git.proxmox.com Git - mirror_edk2.git/blob - BaseTools/Source/Python/AutoGen/AutoGenWorker.py
94ea61a4870b9c44300c7b7e444f43ee00a0f8bd
[mirror_edk2.git] / BaseTools / Source / Python / AutoGen / AutoGenWorker.py
1 ## @file
2 # Create makefile for MS nmake and GNU make
3 #
4 # Copyright (c) 2019, Intel Corporation. All rights reserved.<BR>
5 # SPDX-License-Identifier: BSD-2-Clause-Patent
6 #
7 from __future__ import absolute_import
8 import multiprocessing as mp
9 import threading
10 from Common.Misc import PathClass
11 from AutoGen.ModuleAutoGen import ModuleAutoGen
12 from AutoGen.ModuleAutoGenHelper import WorkSpaceInfo,AutoGenInfo
13 import Common.GlobalData as GlobalData
14 import Common.EdkLogger as EdkLogger
15 import os
16 from Common.MultipleWorkspace import MultipleWorkspace as mws
17 from AutoGen.AutoGen import AutoGen
18 from Workspace.WorkspaceDatabase import BuildDB
19 try:
20 from queue import Empty
21 except:
22 from Queue import Empty
23 import traceback
24 import sys
25 from AutoGen.DataPipe import MemoryDataPipe
26 import logging
27
28 def clearQ(q):
29 try:
30 while True:
31 q.get_nowait()
32 except Empty:
33 pass
34
35 class LogAgent(threading.Thread):
36 def __init__(self,log_q,log_level,log_file=None):
37 super(LogAgent,self).__init__()
38 self.log_q = log_q
39 self.log_level = log_level
40 self.log_file = log_file
41 def InitLogger(self):
42 # For DEBUG level (All DEBUG_0~9 are applicable)
43 self._DebugLogger_agent = logging.getLogger("tool_debug_agent")
44 _DebugFormatter = logging.Formatter("[%(asctime)s.%(msecs)d]: %(message)s", datefmt="%H:%M:%S")
45 self._DebugLogger_agent.setLevel(self.log_level)
46 _DebugChannel = logging.StreamHandler(sys.stdout)
47 _DebugChannel.setFormatter(_DebugFormatter)
48 self._DebugLogger_agent.addHandler(_DebugChannel)
49
50 # For VERBOSE, INFO, WARN level
51 self._InfoLogger_agent = logging.getLogger("tool_info_agent")
52 _InfoFormatter = logging.Formatter("%(message)s")
53 self._InfoLogger_agent.setLevel(self.log_level)
54 _InfoChannel = logging.StreamHandler(sys.stdout)
55 _InfoChannel.setFormatter(_InfoFormatter)
56 self._InfoLogger_agent.addHandler(_InfoChannel)
57
58 # For ERROR level
59 self._ErrorLogger_agent = logging.getLogger("tool_error_agent")
60 _ErrorFormatter = logging.Formatter("%(message)s")
61 self._ErrorLogger_agent.setLevel(self.log_level)
62 _ErrorCh = logging.StreamHandler(sys.stderr)
63 _ErrorCh.setFormatter(_ErrorFormatter)
64 self._ErrorLogger_agent.addHandler(_ErrorCh)
65
66 if self.log_file:
67 if os.path.exists(self.log_file):
68 os.remove(self.log_file)
69 _Ch = logging.FileHandler(self.log_file)
70 _Ch.setFormatter(_DebugFormatter)
71 self._DebugLogger_agent.addHandler(_Ch)
72
73 _Ch= logging.FileHandler(self.log_file)
74 _Ch.setFormatter(_InfoFormatter)
75 self._InfoLogger_agent.addHandler(_Ch)
76
77 _Ch = logging.FileHandler(self.log_file)
78 _Ch.setFormatter(_ErrorFormatter)
79 self._ErrorLogger_agent.addHandler(_Ch)
80
81 def run(self):
82 self.InitLogger()
83 while True:
84 log_message = self.log_q.get()
85 if log_message is None:
86 break
87 if log_message.name == "tool_error":
88 self._ErrorLogger_agent.log(log_message.levelno,log_message.getMessage())
89 elif log_message.name == "tool_info":
90 self._InfoLogger_agent.log(log_message.levelno,log_message.getMessage())
91 elif log_message.name == "tool_debug":
92 self._DebugLogger_agent.log(log_message.levelno,log_message.getMessage())
93 else:
94 self._InfoLogger_agent.log(log_message.levelno,log_message.getMessage())
95
96 def kill(self):
97 self.log_q.put(None)
98 class AutoGenManager(threading.Thread):
99 def __init__(self,autogen_workers, feedback_q,error_event):
100 super(AutoGenManager,self).__init__()
101 self.autogen_workers = autogen_workers
102 self.feedback_q = feedback_q
103 self.Status = True
104 self.error_event = error_event
105 def run(self):
106 try:
107 fin_num = 0
108 while True:
109 badnews = self.feedback_q.get()
110 if badnews is None:
111 break
112 if badnews == "Done":
113 fin_num += 1
114 else:
115 self.Status = False
116 self.TerminateWorkers()
117 if fin_num == len(self.autogen_workers):
118 self.clearQueue()
119 for w in self.autogen_workers:
120 w.join()
121 break
122 except Exception:
123 return
124
125 def clearQueue(self):
126 taskq = self.autogen_workers[0].module_queue
127 logq = self.autogen_workers[0].log_q
128 clearQ(taskq)
129 clearQ(self.feedback_q)
130 clearQ(logq)
131 def TerminateWorkers(self):
132 self.error_event.set()
133 def kill(self):
134 self.feedback_q.put(None)
135 class AutoGenWorkerInProcess(mp.Process):
136 def __init__(self,module_queue,data_pipe_file_path,feedback_q,file_lock,cache_lock,share_data,log_q,error_event):
137 mp.Process.__init__(self)
138 self.module_queue = module_queue
139 self.data_pipe_file_path =data_pipe_file_path
140 self.data_pipe = None
141 self.feedback_q = feedback_q
142 self.PlatformMetaFileSet = {}
143 self.file_lock = file_lock
144 self.cache_lock = cache_lock
145 self.share_data = share_data
146 self.log_q = log_q
147 self.error_event = error_event
148 def GetPlatformMetaFile(self,filepath,root):
149 try:
150 return self.PlatformMetaFileSet[(filepath,root)]
151 except:
152 self.PlatformMetaFileSet[(filepath,root)] = filepath
153 return self.PlatformMetaFileSet[(filepath,root)]
154 def run(self):
155 try:
156 taskname = "Init"
157 with self.file_lock:
158 try:
159 self.data_pipe = MemoryDataPipe()
160 self.data_pipe.load(self.data_pipe_file_path)
161 except:
162 self.feedback_q.put(taskname + ":" + "load data pipe %s failed." % self.data_pipe_file_path)
163 EdkLogger.LogClientInitialize(self.log_q)
164 loglevel = self.data_pipe.Get("LogLevel")
165 if not loglevel:
166 loglevel = EdkLogger.INFO
167 EdkLogger.SetLevel(loglevel)
168 target = self.data_pipe.Get("P_Info").get("Target")
169 toolchain = self.data_pipe.Get("P_Info").get("ToolChain")
170 archlist = self.data_pipe.Get("P_Info").get("ArchList")
171
172 active_p = self.data_pipe.Get("P_Info").get("ActivePlatform")
173 workspacedir = self.data_pipe.Get("P_Info").get("WorkspaceDir")
174 PackagesPath = os.getenv("PACKAGES_PATH")
175 mws.setWs(workspacedir, PackagesPath)
176 self.Wa = WorkSpaceInfo(
177 workspacedir,active_p,target,toolchain,archlist
178 )
179 self.Wa._SrcTimeStamp = self.data_pipe.Get("Workspace_timestamp")
180 GlobalData.gGlobalDefines = self.data_pipe.Get("G_defines")
181 GlobalData.gCommandLineDefines = self.data_pipe.Get("CL_defines")
182 os.environ._data = self.data_pipe.Get("Env_Var")
183 GlobalData.gWorkspace = workspacedir
184 GlobalData.gDisableIncludePathCheck = False
185 GlobalData.gFdfParser = self.data_pipe.Get("FdfParser")
186 GlobalData.gDatabasePath = self.data_pipe.Get("DatabasePath")
187 GlobalData.gBinCacheSource = self.data_pipe.Get("BinCacheSource")
188 GlobalData.gBinCacheDest = self.data_pipe.Get("BinCacheDest")
189 GlobalData.gCacheIR = self.share_data
190 GlobalData.gEnableGenfdsMultiThread = self.data_pipe.Get("EnableGenfdsMultiThread")
191 GlobalData.file_lock = self.file_lock
192 GlobalData.cache_lock = self.cache_lock
193 CommandTarget = self.data_pipe.Get("CommandTarget")
194 pcd_from_build_option = []
195 for pcd_tuple in self.data_pipe.Get("BuildOptPcd"):
196 pcd_id = ".".join((pcd_tuple[0],pcd_tuple[1]))
197 if pcd_tuple[2].strip():
198 pcd_id = ".".join((pcd_id,pcd_tuple[2]))
199 pcd_from_build_option.append("=".join((pcd_id,pcd_tuple[3])))
200 GlobalData.BuildOptionPcd = pcd_from_build_option
201 module_count = 0
202 FfsCmd = self.data_pipe.Get("FfsCommand")
203 if FfsCmd is None:
204 FfsCmd = {}
205 GlobalData.FfsCmd = FfsCmd
206 PlatformMetaFile = self.GetPlatformMetaFile(self.data_pipe.Get("P_Info").get("ActivePlatform"),
207 self.data_pipe.Get("P_Info").get("WorkspaceDir"))
208 libConstPcd = self.data_pipe.Get("LibConstPcd")
209 Refes = self.data_pipe.Get("REFS")
210 GlobalData.libConstPcd = libConstPcd
211 GlobalData.Refes = Refes
212 while True:
213 if self.module_queue.empty():
214 break
215 if self.error_event.is_set():
216 break
217 module_count += 1
218 module_file,module_root,module_path,module_basename,module_originalpath,module_arch,IsLib = self.module_queue.get_nowait()
219 modulefullpath = os.path.join(module_root,module_file)
220 taskname = " : ".join((modulefullpath,module_arch))
221 module_metafile = PathClass(module_file,module_root)
222 if module_path:
223 module_metafile.Path = module_path
224 if module_basename:
225 module_metafile.BaseName = module_basename
226 if module_originalpath:
227 module_metafile.OriginalPath = PathClass(module_originalpath,module_root)
228 arch = module_arch
229 target = self.data_pipe.Get("P_Info").get("Target")
230 toolchain = self.data_pipe.Get("P_Info").get("ToolChain")
231 Ma = ModuleAutoGen(self.Wa,module_metafile,target,toolchain,arch,PlatformMetaFile,self.data_pipe)
232 Ma.IsLibrary = IsLib
233 if IsLib:
234 if (Ma.MetaFile.File,Ma.MetaFile.Root,Ma.Arch,Ma.MetaFile.Path) in libConstPcd:
235 Ma.ConstPcd = libConstPcd[(Ma.MetaFile.File,Ma.MetaFile.Root,Ma.Arch,Ma.MetaFile.Path)]
236 if (Ma.MetaFile.File,Ma.MetaFile.Root,Ma.Arch,Ma.MetaFile.Path) in Refes:
237 Ma.ReferenceModules = Refes[(Ma.MetaFile.File,Ma.MetaFile.Root,Ma.Arch,Ma.MetaFile.Path)]
238 if GlobalData.gBinCacheSource and CommandTarget in [None, "", "all"]:
239 Ma.GenModuleFilesHash(GlobalData.gCacheIR)
240 Ma.GenPreMakefileHash(GlobalData.gCacheIR)
241 if Ma.CanSkipbyPreMakefileCache(GlobalData.gCacheIR):
242 continue
243
244 Ma.CreateCodeFile(False)
245 Ma.CreateMakeFile(False,GenFfsList=FfsCmd.get((Ma.MetaFile.Path, Ma.Arch),[]))
246
247 if GlobalData.gBinCacheSource and CommandTarget in [None, "", "all"]:
248 Ma.GenMakeHeaderFilesHash(GlobalData.gCacheIR)
249 Ma.GenMakeHash(GlobalData.gCacheIR)
250 if Ma.CanSkipbyMakeCache(GlobalData.gCacheIR):
251 continue
252 else:
253 Ma.PrintFirstMakeCacheMissFile(GlobalData.gCacheIR)
254 except Empty:
255 pass
256 except:
257 traceback.print_exc(file=sys.stdout)
258 self.feedback_q.put(taskname)
259 finally:
260 self.feedback_q.put("Done")
261 def printStatus(self):
262 print("Processs ID: %d Run %d modules in AutoGen " % (os.getpid(),len(AutoGen.Cache())))
263 print("Processs ID: %d Run %d modules in AutoGenInfo " % (os.getpid(),len(AutoGenInfo.GetCache())))
264 groupobj = {}
265 for buildobj in BuildDB.BuildObject.GetCache().values():
266 if str(buildobj).lower().endswith("dec"):
267 try:
268 groupobj['dec'].append(str(buildobj))
269 except:
270 groupobj['dec'] = [str(buildobj)]
271 if str(buildobj).lower().endswith("dsc"):
272 try:
273 groupobj['dsc'].append(str(buildobj))
274 except:
275 groupobj['dsc'] = [str(buildobj)]
276
277 if str(buildobj).lower().endswith("inf"):
278 try:
279 groupobj['inf'].append(str(buildobj))
280 except:
281 groupobj['inf'] = [str(buildobj)]
282
283 print("Processs ID: %d Run %d pkg in WDB " % (os.getpid(),len(groupobj.get("dec",[]))))
284 print("Processs ID: %d Run %d pla in WDB " % (os.getpid(),len(groupobj.get("dsc",[]))))
285 print("Processs ID: %d Run %d inf in WDB " % (os.getpid(),len(groupobj.get("inf",[]))))