]> git.proxmox.com Git - mirror_edk2.git/blob - BaseTools/Source/Python/AutoGen/AutoGenWorker.py
BaseTools: Improve the cache hit in the edk2 build cache
[mirror_edk2.git] / BaseTools / Source / Python / AutoGen / AutoGenWorker.py
1 ## @file
2 # Create makefile for MS nmake and GNU make
3 #
4 # Copyright (c) 2019, Intel Corporation. All rights reserved.<BR>
5 # SPDX-License-Identifier: BSD-2-Clause-Patent
6 #
7 from __future__ import absolute_import
8 import multiprocessing as mp
9 import threading
10 from Common.Misc import PathClass
11 from AutoGen.ModuleAutoGen import ModuleAutoGen
12 from AutoGen.ModuleAutoGenHelper import WorkSpaceInfo,AutoGenInfo
13 import Common.GlobalData as GlobalData
14 import Common.EdkLogger as EdkLogger
15 import os
16 from Common.MultipleWorkspace import MultipleWorkspace as mws
17 from AutoGen.AutoGen import AutoGen
18 from Workspace.WorkspaceDatabase import BuildDB
19 try:
20 from queue import Empty
21 except:
22 from Queue import Empty
23 import traceback
24 import sys
25 from AutoGen.DataPipe import MemoryDataPipe
26 import logging
27
28 def clearQ(q):
29 try:
30 while True:
31 q.get_nowait()
32 except Empty:
33 pass
34
35 class LogAgent(threading.Thread):
36 def __init__(self,log_q,log_level,log_file=None):
37 super(LogAgent,self).__init__()
38 self.log_q = log_q
39 self.log_level = log_level
40 self.log_file = log_file
41 def InitLogger(self):
42 # For DEBUG level (All DEBUG_0~9 are applicable)
43 self._DebugLogger_agent = logging.getLogger("tool_debug_agent")
44 _DebugFormatter = logging.Formatter("[%(asctime)s.%(msecs)d]: %(message)s", datefmt="%H:%M:%S")
45 self._DebugLogger_agent.setLevel(self.log_level)
46 _DebugChannel = logging.StreamHandler(sys.stdout)
47 _DebugChannel.setFormatter(_DebugFormatter)
48 self._DebugLogger_agent.addHandler(_DebugChannel)
49
50 # For VERBOSE, INFO, WARN level
51 self._InfoLogger_agent = logging.getLogger("tool_info_agent")
52 _InfoFormatter = logging.Formatter("%(message)s")
53 self._InfoLogger_agent.setLevel(self.log_level)
54 _InfoChannel = logging.StreamHandler(sys.stdout)
55 _InfoChannel.setFormatter(_InfoFormatter)
56 self._InfoLogger_agent.addHandler(_InfoChannel)
57
58 # For ERROR level
59 self._ErrorLogger_agent = logging.getLogger("tool_error_agent")
60 _ErrorFormatter = logging.Formatter("%(message)s")
61 self._ErrorLogger_agent.setLevel(self.log_level)
62 _ErrorCh = logging.StreamHandler(sys.stderr)
63 _ErrorCh.setFormatter(_ErrorFormatter)
64 self._ErrorLogger_agent.addHandler(_ErrorCh)
65
66 if self.log_file:
67 if os.path.exists(self.log_file):
68 os.remove(self.log_file)
69 _Ch = logging.FileHandler(self.log_file)
70 _Ch.setFormatter(_DebugFormatter)
71 self._DebugLogger_agent.addHandler(_Ch)
72
73 _Ch= logging.FileHandler(self.log_file)
74 _Ch.setFormatter(_InfoFormatter)
75 self._InfoLogger_agent.addHandler(_Ch)
76
77 _Ch = logging.FileHandler(self.log_file)
78 _Ch.setFormatter(_ErrorFormatter)
79 self._ErrorLogger_agent.addHandler(_Ch)
80
81 def run(self):
82 self.InitLogger()
83 while True:
84 log_message = self.log_q.get()
85 if log_message is None:
86 break
87 if log_message.name == "tool_error":
88 self._ErrorLogger_agent.log(log_message.levelno,log_message.getMessage())
89 elif log_message.name == "tool_info":
90 self._InfoLogger_agent.log(log_message.levelno,log_message.getMessage())
91 elif log_message.name == "tool_debug":
92 self._DebugLogger_agent.log(log_message.levelno,log_message.getMessage())
93 else:
94 self._InfoLogger_agent.log(log_message.levelno,log_message.getMessage())
95
96 def kill(self):
97 self.log_q.put(None)
98 class AutoGenManager(threading.Thread):
99 def __init__(self,autogen_workers, feedback_q,error_event):
100 super(AutoGenManager,self).__init__()
101 self.autogen_workers = autogen_workers
102 self.feedback_q = feedback_q
103 self.Status = True
104 self.error_event = error_event
105 def run(self):
106 try:
107 fin_num = 0
108 while True:
109 badnews = self.feedback_q.get()
110 if badnews is None:
111 break
112 if badnews == "Done":
113 fin_num += 1
114 else:
115 self.Status = False
116 self.TerminateWorkers()
117 if fin_num == len(self.autogen_workers):
118 self.clearQueue()
119 for w in self.autogen_workers:
120 w.join()
121 break
122 except Exception:
123 return
124
125 def clearQueue(self):
126 taskq = self.autogen_workers[0].module_queue
127 logq = self.autogen_workers[0].log_q
128 clearQ(taskq)
129 clearQ(self.feedback_q)
130 clearQ(logq)
131 def TerminateWorkers(self):
132 self.error_event.set()
133 def kill(self):
134 self.feedback_q.put(None)
135 class AutoGenWorkerInProcess(mp.Process):
136 def __init__(self,module_queue,data_pipe_file_path,feedback_q,file_lock, share_data,log_q,error_event):
137 mp.Process.__init__(self)
138 self.module_queue = module_queue
139 self.data_pipe_file_path =data_pipe_file_path
140 self.data_pipe = None
141 self.feedback_q = feedback_q
142 self.PlatformMetaFileSet = {}
143 self.file_lock = file_lock
144 self.share_data = share_data
145 self.log_q = log_q
146 self.error_event = error_event
147 def GetPlatformMetaFile(self,filepath,root):
148 try:
149 return self.PlatformMetaFileSet[(filepath,root)]
150 except:
151 self.PlatformMetaFileSet[(filepath,root)] = filepath
152 return self.PlatformMetaFileSet[(filepath,root)]
153 def run(self):
154 try:
155 taskname = "Init"
156 with self.file_lock:
157 if not os.path.exists(self.data_pipe_file_path):
158 self.feedback_q.put(taskname + ":" + "load data pipe %s failed." % self.data_pipe_file_path)
159 self.data_pipe = MemoryDataPipe()
160 self.data_pipe.load(self.data_pipe_file_path)
161 EdkLogger.LogClientInitialize(self.log_q)
162 loglevel = self.data_pipe.Get("LogLevel")
163 if not loglevel:
164 loglevel = EdkLogger.INFO
165 EdkLogger.SetLevel(loglevel)
166 target = self.data_pipe.Get("P_Info").get("Target")
167 toolchain = self.data_pipe.Get("P_Info").get("ToolChain")
168 archlist = self.data_pipe.Get("P_Info").get("ArchList")
169
170 active_p = self.data_pipe.Get("P_Info").get("ActivePlatform")
171 workspacedir = self.data_pipe.Get("P_Info").get("WorkspaceDir")
172 PackagesPath = os.getenv("PACKAGES_PATH")
173 mws.setWs(workspacedir, PackagesPath)
174 self.Wa = WorkSpaceInfo(
175 workspacedir,active_p,target,toolchain,archlist
176 )
177 self.Wa._SrcTimeStamp = self.data_pipe.Get("Workspace_timestamp")
178 GlobalData.gGlobalDefines = self.data_pipe.Get("G_defines")
179 GlobalData.gCommandLineDefines = self.data_pipe.Get("CL_defines")
180 os.environ._data = self.data_pipe.Get("Env_Var")
181 GlobalData.gWorkspace = workspacedir
182 GlobalData.gDisableIncludePathCheck = False
183 GlobalData.gFdfParser = self.data_pipe.Get("FdfParser")
184 GlobalData.gDatabasePath = self.data_pipe.Get("DatabasePath")
185 GlobalData.gBinCacheSource = self.data_pipe.Get("BinCacheSource")
186 GlobalData.gBinCacheDest = self.data_pipe.Get("BinCacheDest")
187 GlobalData.gCacheIR = self.data_pipe.Get("CacheIR")
188 GlobalData.gEnableGenfdsMultiThread = self.data_pipe.Get("EnableGenfdsMultiThread")
189 GlobalData.file_lock = self.file_lock
190 CommandTarget = self.data_pipe.Get("CommandTarget")
191 pcd_from_build_option = []
192 for pcd_tuple in self.data_pipe.Get("BuildOptPcd"):
193 pcd_id = ".".join((pcd_tuple[0],pcd_tuple[1]))
194 if pcd_tuple[2].strip():
195 pcd_id = ".".join((pcd_id,pcd_tuple[2]))
196 pcd_from_build_option.append("=".join((pcd_id,pcd_tuple[3])))
197 GlobalData.BuildOptionPcd = pcd_from_build_option
198 module_count = 0
199 FfsCmd = self.data_pipe.Get("FfsCommand")
200 if FfsCmd is None:
201 FfsCmd = {}
202 GlobalData.FfsCmd = FfsCmd
203 PlatformMetaFile = self.GetPlatformMetaFile(self.data_pipe.Get("P_Info").get("ActivePlatform"),
204 self.data_pipe.Get("P_Info").get("WorkspaceDir"))
205 libConstPcd = self.data_pipe.Get("LibConstPcd")
206 Refes = self.data_pipe.Get("REFS")
207 GlobalData.libConstPcd = libConstPcd
208 GlobalData.Refes = Refes
209 while True:
210 if self.module_queue.empty():
211 break
212 if self.error_event.is_set():
213 break
214 module_count += 1
215 module_file,module_root,module_path,module_basename,module_originalpath,module_arch,IsLib = self.module_queue.get_nowait()
216 modulefullpath = os.path.join(module_root,module_file)
217 taskname = " : ".join((modulefullpath,module_arch))
218 module_metafile = PathClass(module_file,module_root)
219 if module_path:
220 module_metafile.Path = module_path
221 if module_basename:
222 module_metafile.BaseName = module_basename
223 if module_originalpath:
224 module_metafile.OriginalPath = PathClass(module_originalpath,module_root)
225 arch = module_arch
226 target = self.data_pipe.Get("P_Info").get("Target")
227 toolchain = self.data_pipe.Get("P_Info").get("ToolChain")
228 Ma = ModuleAutoGen(self.Wa,module_metafile,target,toolchain,arch,PlatformMetaFile,self.data_pipe)
229 Ma.IsLibrary = IsLib
230 if IsLib:
231 if (Ma.MetaFile.File,Ma.MetaFile.Root,Ma.Arch,Ma.MetaFile.Path) in libConstPcd:
232 Ma.ConstPcd = libConstPcd[(Ma.MetaFile.File,Ma.MetaFile.Root,Ma.Arch,Ma.MetaFile.Path)]
233 if (Ma.MetaFile.File,Ma.MetaFile.Root,Ma.Arch,Ma.MetaFile.Path) in Refes:
234 Ma.ReferenceModules = Refes[(Ma.MetaFile.File,Ma.MetaFile.Root,Ma.Arch,Ma.MetaFile.Path)]
235 if GlobalData.gBinCacheSource and CommandTarget in [None, "", "all"]:
236 Ma.GenModuleFilesHash(GlobalData.gCacheIR)
237 Ma.GenPreMakefileHash(GlobalData.gCacheIR)
238 if Ma.CanSkipbyPreMakefileCache(GlobalData.gCacheIR):
239 continue
240
241 Ma.CreateCodeFile(False)
242 Ma.CreateMakeFile(False,GenFfsList=FfsCmd.get((Ma.MetaFile.File, Ma.Arch),[]))
243
244 if GlobalData.gBinCacheSource and CommandTarget in [None, "", "all"]:
245 Ma.GenMakeHeaderFilesHash(GlobalData.gCacheIR)
246 Ma.GenMakeHash(GlobalData.gCacheIR)
247 if Ma.CanSkipbyMakeCache(GlobalData.gCacheIR):
248 continue
249 except Empty:
250 pass
251 except:
252 traceback.print_exc(file=sys.stdout)
253 self.feedback_q.put(taskname)
254 finally:
255 self.feedback_q.put("Done")
256 def printStatus(self):
257 print("Processs ID: %d Run %d modules in AutoGen " % (os.getpid(),len(AutoGen.Cache())))
258 print("Processs ID: %d Run %d modules in AutoGenInfo " % (os.getpid(),len(AutoGenInfo.GetCache())))
259 groupobj = {}
260 for buildobj in BuildDB.BuildObject.GetCache().values():
261 if str(buildobj).lower().endswith("dec"):
262 try:
263 groupobj['dec'].append(str(buildobj))
264 except:
265 groupobj['dec'] = [str(buildobj)]
266 if str(buildobj).lower().endswith("dsc"):
267 try:
268 groupobj['dsc'].append(str(buildobj))
269 except:
270 groupobj['dsc'] = [str(buildobj)]
271
272 if str(buildobj).lower().endswith("inf"):
273 try:
274 groupobj['inf'].append(str(buildobj))
275 except:
276 groupobj['inf'] = [str(buildobj)]
277
278 print("Processs ID: %d Run %d pkg in WDB " % (os.getpid(),len(groupobj.get("dec",[]))))
279 print("Processs ID: %d Run %d pla in WDB " % (os.getpid(),len(groupobj.get("dsc",[]))))
280 print("Processs ID: %d Run %d inf in WDB " % (os.getpid(),len(groupobj.get("inf",[]))))