]>
git.proxmox.com Git - mirror_edk2.git/blob - BaseTools/Source/Python/AutoGen/AutoGenWorker.py
2 # Create makefile for MS nmake and GNU make
4 # Copyright (c) 2019, Intel Corporation. All rights reserved.<BR>
5 # SPDX-License-Identifier: BSD-2-Clause-Patent
7 from __future__
import absolute_import
8 import multiprocessing
as mp
10 from Common
.Misc
import PathClass
11 from AutoGen
.ModuleAutoGen
import ModuleAutoGen
12 from AutoGen
.ModuleAutoGenHelper
import WorkSpaceInfo
,AutoGenInfo
13 import Common
.GlobalData
as GlobalData
14 import Common
.EdkLogger
as EdkLogger
16 from Common
.MultipleWorkspace
import MultipleWorkspace
as mws
17 from AutoGen
.AutoGen
import AutoGen
18 from Workspace
.WorkspaceDatabase
import BuildDB
20 from queue
import Empty
22 from Queue
import Empty
25 from AutoGen
.DataPipe
import MemoryDataPipe
35 class LogAgent(threading
.Thread
):
36 def __init__(self
,log_q
,log_level
,log_file
=None):
37 super(LogAgent
,self
).__init
__()
39 self
.log_level
= log_level
40 self
.log_file
= log_file
42 # For DEBUG level (All DEBUG_0~9 are applicable)
43 self
._DebugLogger
_agent
= logging
.getLogger("tool_debug_agent")
44 _DebugFormatter
= logging
.Formatter("[%(asctime)s.%(msecs)d]: %(message)s", datefmt
="%H:%M:%S")
45 self
._DebugLogger
_agent
.setLevel(self
.log_level
)
46 _DebugChannel
= logging
.StreamHandler(sys
.stdout
)
47 _DebugChannel
.setFormatter(_DebugFormatter
)
48 self
._DebugLogger
_agent
.addHandler(_DebugChannel
)
50 # For VERBOSE, INFO, WARN level
51 self
._InfoLogger
_agent
= logging
.getLogger("tool_info_agent")
52 _InfoFormatter
= logging
.Formatter("%(message)s")
53 self
._InfoLogger
_agent
.setLevel(self
.log_level
)
54 _InfoChannel
= logging
.StreamHandler(sys
.stdout
)
55 _InfoChannel
.setFormatter(_InfoFormatter
)
56 self
._InfoLogger
_agent
.addHandler(_InfoChannel
)
59 self
._ErrorLogger
_agent
= logging
.getLogger("tool_error_agent")
60 _ErrorFormatter
= logging
.Formatter("%(message)s")
61 self
._ErrorLogger
_agent
.setLevel(self
.log_level
)
62 _ErrorCh
= logging
.StreamHandler(sys
.stderr
)
63 _ErrorCh
.setFormatter(_ErrorFormatter
)
64 self
._ErrorLogger
_agent
.addHandler(_ErrorCh
)
67 if os
.path
.exists(self
.log_file
):
68 os
.remove(self
.log_file
)
69 _Ch
= logging
.FileHandler(self
.log_file
)
70 _Ch
.setFormatter(_DebugFormatter
)
71 self
._DebugLogger
_agent
.addHandler(_Ch
)
73 _Ch
= logging
.FileHandler(self
.log_file
)
74 _Ch
.setFormatter(_InfoFormatter
)
75 self
._InfoLogger
_agent
.addHandler(_Ch
)
77 _Ch
= logging
.FileHandler(self
.log_file
)
78 _Ch
.setFormatter(_ErrorFormatter
)
79 self
._ErrorLogger
_agent
.addHandler(_Ch
)
84 log_message
= self
.log_q
.get()
85 if log_message
is None:
87 if log_message
.name
== "tool_error":
88 self
._ErrorLogger
_agent
.log(log_message
.levelno
,log_message
.getMessage())
89 elif log_message
.name
== "tool_info":
90 self
._InfoLogger
_agent
.log(log_message
.levelno
,log_message
.getMessage())
91 elif log_message
.name
== "tool_debug":
92 self
._DebugLogger
_agent
.log(log_message
.levelno
,log_message
.getMessage())
94 self
._InfoLogger
_agent
.log(log_message
.levelno
,log_message
.getMessage())
98 class AutoGenManager(threading
.Thread
):
99 def __init__(self
,autogen_workers
, feedback_q
,error_event
):
100 super(AutoGenManager
,self
).__init
__()
101 self
.autogen_workers
= autogen_workers
102 self
.feedback_q
= feedback_q
104 self
.error_event
= error_event
109 badnews
= self
.feedback_q
.get()
112 if badnews
== "Done":
116 self
.TerminateWorkers()
117 if fin_num
== len(self
.autogen_workers
):
119 for w
in self
.autogen_workers
:
125 def clearQueue(self
):
126 taskq
= self
.autogen_workers
[0].module_queue
127 logq
= self
.autogen_workers
[0].log_q
129 clearQ(self
.feedback_q
)
131 def TerminateWorkers(self
):
132 self
.error_event
.set()
134 self
.feedback_q
.put(None)
135 class AutoGenWorkerInProcess(mp
.Process
):
136 def __init__(self
,module_queue
,data_pipe_file_path
,feedback_q
,file_lock
,cache_lock
,share_data
,log_q
,error_event
):
137 mp
.Process
.__init
__(self
)
138 self
.module_queue
= module_queue
139 self
.data_pipe_file_path
=data_pipe_file_path
140 self
.data_pipe
= None
141 self
.feedback_q
= feedback_q
142 self
.PlatformMetaFileSet
= {}
143 self
.file_lock
= file_lock
144 self
.cache_lock
= cache_lock
145 self
.share_data
= share_data
147 self
.error_event
= error_event
148 def GetPlatformMetaFile(self
,filepath
,root
):
150 return self
.PlatformMetaFileSet
[(filepath
,root
)]
152 self
.PlatformMetaFileSet
[(filepath
,root
)] = filepath
153 return self
.PlatformMetaFileSet
[(filepath
,root
)]
158 if not os
.path
.exists(self
.data_pipe_file_path
):
159 self
.feedback_q
.put(taskname
+ ":" + "load data pipe %s failed." % self
.data_pipe_file_path
)
160 self
.data_pipe
= MemoryDataPipe()
161 self
.data_pipe
.load(self
.data_pipe_file_path
)
162 EdkLogger
.LogClientInitialize(self
.log_q
)
163 loglevel
= self
.data_pipe
.Get("LogLevel")
165 loglevel
= EdkLogger
.INFO
166 EdkLogger
.SetLevel(loglevel
)
167 target
= self
.data_pipe
.Get("P_Info").get("Target")
168 toolchain
= self
.data_pipe
.Get("P_Info").get("ToolChain")
169 archlist
= self
.data_pipe
.Get("P_Info").get("ArchList")
171 active_p
= self
.data_pipe
.Get("P_Info").get("ActivePlatform")
172 workspacedir
= self
.data_pipe
.Get("P_Info").get("WorkspaceDir")
173 PackagesPath
= os
.getenv("PACKAGES_PATH")
174 mws
.setWs(workspacedir
, PackagesPath
)
175 self
.Wa
= WorkSpaceInfo(
176 workspacedir
,active_p
,target
,toolchain
,archlist
178 self
.Wa
._SrcTimeStamp
= self
.data_pipe
.Get("Workspace_timestamp")
179 GlobalData
.gGlobalDefines
= self
.data_pipe
.Get("G_defines")
180 GlobalData
.gCommandLineDefines
= self
.data_pipe
.Get("CL_defines")
181 os
.environ
._data
= self
.data_pipe
.Get("Env_Var")
182 GlobalData
.gWorkspace
= workspacedir
183 GlobalData
.gDisableIncludePathCheck
= False
184 GlobalData
.gFdfParser
= self
.data_pipe
.Get("FdfParser")
185 GlobalData
.gDatabasePath
= self
.data_pipe
.Get("DatabasePath")
186 GlobalData
.gBinCacheSource
= self
.data_pipe
.Get("BinCacheSource")
187 GlobalData
.gBinCacheDest
= self
.data_pipe
.Get("BinCacheDest")
188 GlobalData
.gCacheIR
= self
.share_data
189 GlobalData
.gEnableGenfdsMultiThread
= self
.data_pipe
.Get("EnableGenfdsMultiThread")
190 GlobalData
.file_lock
= self
.file_lock
191 GlobalData
.cache_lock
= self
.cache_lock
192 CommandTarget
= self
.data_pipe
.Get("CommandTarget")
193 pcd_from_build_option
= []
194 for pcd_tuple
in self
.data_pipe
.Get("BuildOptPcd"):
195 pcd_id
= ".".join((pcd_tuple
[0],pcd_tuple
[1]))
196 if pcd_tuple
[2].strip():
197 pcd_id
= ".".join((pcd_id
,pcd_tuple
[2]))
198 pcd_from_build_option
.append("=".join((pcd_id
,pcd_tuple
[3])))
199 GlobalData
.BuildOptionPcd
= pcd_from_build_option
201 FfsCmd
= self
.data_pipe
.Get("FfsCommand")
204 GlobalData
.FfsCmd
= FfsCmd
205 PlatformMetaFile
= self
.GetPlatformMetaFile(self
.data_pipe
.Get("P_Info").get("ActivePlatform"),
206 self
.data_pipe
.Get("P_Info").get("WorkspaceDir"))
207 libConstPcd
= self
.data_pipe
.Get("LibConstPcd")
208 Refes
= self
.data_pipe
.Get("REFS")
209 GlobalData
.libConstPcd
= libConstPcd
210 GlobalData
.Refes
= Refes
212 if self
.module_queue
.empty():
214 if self
.error_event
.is_set():
217 module_file
,module_root
,module_path
,module_basename
,module_originalpath
,module_arch
,IsLib
= self
.module_queue
.get_nowait()
218 modulefullpath
= os
.path
.join(module_root
,module_file
)
219 taskname
= " : ".join((modulefullpath
,module_arch
))
220 module_metafile
= PathClass(module_file
,module_root
)
222 module_metafile
.Path
= module_path
224 module_metafile
.BaseName
= module_basename
225 if module_originalpath
:
226 module_metafile
.OriginalPath
= PathClass(module_originalpath
,module_root
)
228 target
= self
.data_pipe
.Get("P_Info").get("Target")
229 toolchain
= self
.data_pipe
.Get("P_Info").get("ToolChain")
230 Ma
= ModuleAutoGen(self
.Wa
,module_metafile
,target
,toolchain
,arch
,PlatformMetaFile
,self
.data_pipe
)
233 if (Ma
.MetaFile
.File
,Ma
.MetaFile
.Root
,Ma
.Arch
,Ma
.MetaFile
.Path
) in libConstPcd
:
234 Ma
.ConstPcd
= libConstPcd
[(Ma
.MetaFile
.File
,Ma
.MetaFile
.Root
,Ma
.Arch
,Ma
.MetaFile
.Path
)]
235 if (Ma
.MetaFile
.File
,Ma
.MetaFile
.Root
,Ma
.Arch
,Ma
.MetaFile
.Path
) in Refes
:
236 Ma
.ReferenceModules
= Refes
[(Ma
.MetaFile
.File
,Ma
.MetaFile
.Root
,Ma
.Arch
,Ma
.MetaFile
.Path
)]
237 if GlobalData
.gBinCacheSource
and CommandTarget
in [None, "", "all"]:
238 Ma
.GenModuleFilesHash(GlobalData
.gCacheIR
)
239 Ma
.GenPreMakefileHash(GlobalData
.gCacheIR
)
240 if Ma
.CanSkipbyPreMakefileCache(GlobalData
.gCacheIR
):
243 Ma
.CreateCodeFile(False)
244 Ma
.CreateMakeFile(False,GenFfsList
=FfsCmd
.get((Ma
.MetaFile
.Path
, Ma
.Arch
),[]))
246 if GlobalData
.gBinCacheSource
and CommandTarget
in [None, "", "all"]:
247 Ma
.GenMakeHeaderFilesHash(GlobalData
.gCacheIR
)
248 Ma
.GenMakeHash(GlobalData
.gCacheIR
)
249 if Ma
.CanSkipbyMakeCache(GlobalData
.gCacheIR
):
252 Ma
.PrintFirstMakeCacheMissFile(GlobalData
.gCacheIR
)
256 traceback
.print_exc(file=sys
.stdout
)
257 self
.feedback_q
.put(taskname
)
259 self
.feedback_q
.put("Done")
260 def printStatus(self
):
261 print("Processs ID: %d Run %d modules in AutoGen " % (os
.getpid(),len(AutoGen
.Cache())))
262 print("Processs ID: %d Run %d modules in AutoGenInfo " % (os
.getpid(),len(AutoGenInfo
.GetCache())))
264 for buildobj
in BuildDB
.BuildObject
.GetCache().values():
265 if str(buildobj
).lower().endswith("dec"):
267 groupobj
['dec'].append(str(buildobj
))
269 groupobj
['dec'] = [str(buildobj
)]
270 if str(buildobj
).lower().endswith("dsc"):
272 groupobj
['dsc'].append(str(buildobj
))
274 groupobj
['dsc'] = [str(buildobj
)]
276 if str(buildobj
).lower().endswith("inf"):
278 groupobj
['inf'].append(str(buildobj
))
280 groupobj
['inf'] = [str(buildobj
)]
282 print("Processs ID: %d Run %d pkg in WDB " % (os
.getpid(),len(groupobj
.get("dec",[]))))
283 print("Processs ID: %d Run %d pla in WDB " % (os
.getpid(),len(groupobj
.get("dsc",[]))))
284 print("Processs ID: %d Run %d inf in WDB " % (os
.getpid(),len(groupobj
.get("inf",[]))))