]>
git.proxmox.com Git - mirror_edk2.git/blob - BaseTools/Source/Python/AutoGen/AutoGenWorker.py
94ea61a4870b9c44300c7b7e444f43ee00a0f8bd
2 # Create makefile for MS nmake and GNU make
4 # Copyright (c) 2019, Intel Corporation. All rights reserved.<BR>
5 # SPDX-License-Identifier: BSD-2-Clause-Patent
7 from __future__
import absolute_import
8 import multiprocessing
as mp
10 from Common
.Misc
import PathClass
11 from AutoGen
.ModuleAutoGen
import ModuleAutoGen
12 from AutoGen
.ModuleAutoGenHelper
import WorkSpaceInfo
,AutoGenInfo
13 import Common
.GlobalData
as GlobalData
14 import Common
.EdkLogger
as EdkLogger
16 from Common
.MultipleWorkspace
import MultipleWorkspace
as mws
17 from AutoGen
.AutoGen
import AutoGen
18 from Workspace
.WorkspaceDatabase
import BuildDB
20 from queue
import Empty
22 from Queue
import Empty
25 from AutoGen
.DataPipe
import MemoryDataPipe
35 class LogAgent(threading
.Thread
):
36 def __init__(self
,log_q
,log_level
,log_file
=None):
37 super(LogAgent
,self
).__init
__()
39 self
.log_level
= log_level
40 self
.log_file
= log_file
42 # For DEBUG level (All DEBUG_0~9 are applicable)
43 self
._DebugLogger
_agent
= logging
.getLogger("tool_debug_agent")
44 _DebugFormatter
= logging
.Formatter("[%(asctime)s.%(msecs)d]: %(message)s", datefmt
="%H:%M:%S")
45 self
._DebugLogger
_agent
.setLevel(self
.log_level
)
46 _DebugChannel
= logging
.StreamHandler(sys
.stdout
)
47 _DebugChannel
.setFormatter(_DebugFormatter
)
48 self
._DebugLogger
_agent
.addHandler(_DebugChannel
)
50 # For VERBOSE, INFO, WARN level
51 self
._InfoLogger
_agent
= logging
.getLogger("tool_info_agent")
52 _InfoFormatter
= logging
.Formatter("%(message)s")
53 self
._InfoLogger
_agent
.setLevel(self
.log_level
)
54 _InfoChannel
= logging
.StreamHandler(sys
.stdout
)
55 _InfoChannel
.setFormatter(_InfoFormatter
)
56 self
._InfoLogger
_agent
.addHandler(_InfoChannel
)
59 self
._ErrorLogger
_agent
= logging
.getLogger("tool_error_agent")
60 _ErrorFormatter
= logging
.Formatter("%(message)s")
61 self
._ErrorLogger
_agent
.setLevel(self
.log_level
)
62 _ErrorCh
= logging
.StreamHandler(sys
.stderr
)
63 _ErrorCh
.setFormatter(_ErrorFormatter
)
64 self
._ErrorLogger
_agent
.addHandler(_ErrorCh
)
67 if os
.path
.exists(self
.log_file
):
68 os
.remove(self
.log_file
)
69 _Ch
= logging
.FileHandler(self
.log_file
)
70 _Ch
.setFormatter(_DebugFormatter
)
71 self
._DebugLogger
_agent
.addHandler(_Ch
)
73 _Ch
= logging
.FileHandler(self
.log_file
)
74 _Ch
.setFormatter(_InfoFormatter
)
75 self
._InfoLogger
_agent
.addHandler(_Ch
)
77 _Ch
= logging
.FileHandler(self
.log_file
)
78 _Ch
.setFormatter(_ErrorFormatter
)
79 self
._ErrorLogger
_agent
.addHandler(_Ch
)
84 log_message
= self
.log_q
.get()
85 if log_message
is None:
87 if log_message
.name
== "tool_error":
88 self
._ErrorLogger
_agent
.log(log_message
.levelno
,log_message
.getMessage())
89 elif log_message
.name
== "tool_info":
90 self
._InfoLogger
_agent
.log(log_message
.levelno
,log_message
.getMessage())
91 elif log_message
.name
== "tool_debug":
92 self
._DebugLogger
_agent
.log(log_message
.levelno
,log_message
.getMessage())
94 self
._InfoLogger
_agent
.log(log_message
.levelno
,log_message
.getMessage())
98 class AutoGenManager(threading
.Thread
):
99 def __init__(self
,autogen_workers
, feedback_q
,error_event
):
100 super(AutoGenManager
,self
).__init
__()
101 self
.autogen_workers
= autogen_workers
102 self
.feedback_q
= feedback_q
104 self
.error_event
= error_event
109 badnews
= self
.feedback_q
.get()
112 if badnews
== "Done":
116 self
.TerminateWorkers()
117 if fin_num
== len(self
.autogen_workers
):
119 for w
in self
.autogen_workers
:
125 def clearQueue(self
):
126 taskq
= self
.autogen_workers
[0].module_queue
127 logq
= self
.autogen_workers
[0].log_q
129 clearQ(self
.feedback_q
)
131 def TerminateWorkers(self
):
132 self
.error_event
.set()
134 self
.feedback_q
.put(None)
135 class AutoGenWorkerInProcess(mp
.Process
):
136 def __init__(self
,module_queue
,data_pipe_file_path
,feedback_q
,file_lock
,cache_lock
,share_data
,log_q
,error_event
):
137 mp
.Process
.__init
__(self
)
138 self
.module_queue
= module_queue
139 self
.data_pipe_file_path
=data_pipe_file_path
140 self
.data_pipe
= None
141 self
.feedback_q
= feedback_q
142 self
.PlatformMetaFileSet
= {}
143 self
.file_lock
= file_lock
144 self
.cache_lock
= cache_lock
145 self
.share_data
= share_data
147 self
.error_event
= error_event
148 def GetPlatformMetaFile(self
,filepath
,root
):
150 return self
.PlatformMetaFileSet
[(filepath
,root
)]
152 self
.PlatformMetaFileSet
[(filepath
,root
)] = filepath
153 return self
.PlatformMetaFileSet
[(filepath
,root
)]
159 self
.data_pipe
= MemoryDataPipe()
160 self
.data_pipe
.load(self
.data_pipe_file_path
)
162 self
.feedback_q
.put(taskname
+ ":" + "load data pipe %s failed." % self
.data_pipe_file_path
)
163 EdkLogger
.LogClientInitialize(self
.log_q
)
164 loglevel
= self
.data_pipe
.Get("LogLevel")
166 loglevel
= EdkLogger
.INFO
167 EdkLogger
.SetLevel(loglevel
)
168 target
= self
.data_pipe
.Get("P_Info").get("Target")
169 toolchain
= self
.data_pipe
.Get("P_Info").get("ToolChain")
170 archlist
= self
.data_pipe
.Get("P_Info").get("ArchList")
172 active_p
= self
.data_pipe
.Get("P_Info").get("ActivePlatform")
173 workspacedir
= self
.data_pipe
.Get("P_Info").get("WorkspaceDir")
174 PackagesPath
= os
.getenv("PACKAGES_PATH")
175 mws
.setWs(workspacedir
, PackagesPath
)
176 self
.Wa
= WorkSpaceInfo(
177 workspacedir
,active_p
,target
,toolchain
,archlist
179 self
.Wa
._SrcTimeStamp
= self
.data_pipe
.Get("Workspace_timestamp")
180 GlobalData
.gGlobalDefines
= self
.data_pipe
.Get("G_defines")
181 GlobalData
.gCommandLineDefines
= self
.data_pipe
.Get("CL_defines")
182 os
.environ
._data
= self
.data_pipe
.Get("Env_Var")
183 GlobalData
.gWorkspace
= workspacedir
184 GlobalData
.gDisableIncludePathCheck
= False
185 GlobalData
.gFdfParser
= self
.data_pipe
.Get("FdfParser")
186 GlobalData
.gDatabasePath
= self
.data_pipe
.Get("DatabasePath")
187 GlobalData
.gBinCacheSource
= self
.data_pipe
.Get("BinCacheSource")
188 GlobalData
.gBinCacheDest
= self
.data_pipe
.Get("BinCacheDest")
189 GlobalData
.gCacheIR
= self
.share_data
190 GlobalData
.gEnableGenfdsMultiThread
= self
.data_pipe
.Get("EnableGenfdsMultiThread")
191 GlobalData
.file_lock
= self
.file_lock
192 GlobalData
.cache_lock
= self
.cache_lock
193 CommandTarget
= self
.data_pipe
.Get("CommandTarget")
194 pcd_from_build_option
= []
195 for pcd_tuple
in self
.data_pipe
.Get("BuildOptPcd"):
196 pcd_id
= ".".join((pcd_tuple
[0],pcd_tuple
[1]))
197 if pcd_tuple
[2].strip():
198 pcd_id
= ".".join((pcd_id
,pcd_tuple
[2]))
199 pcd_from_build_option
.append("=".join((pcd_id
,pcd_tuple
[3])))
200 GlobalData
.BuildOptionPcd
= pcd_from_build_option
202 FfsCmd
= self
.data_pipe
.Get("FfsCommand")
205 GlobalData
.FfsCmd
= FfsCmd
206 PlatformMetaFile
= self
.GetPlatformMetaFile(self
.data_pipe
.Get("P_Info").get("ActivePlatform"),
207 self
.data_pipe
.Get("P_Info").get("WorkspaceDir"))
208 libConstPcd
= self
.data_pipe
.Get("LibConstPcd")
209 Refes
= self
.data_pipe
.Get("REFS")
210 GlobalData
.libConstPcd
= libConstPcd
211 GlobalData
.Refes
= Refes
213 if self
.module_queue
.empty():
215 if self
.error_event
.is_set():
218 module_file
,module_root
,module_path
,module_basename
,module_originalpath
,module_arch
,IsLib
= self
.module_queue
.get_nowait()
219 modulefullpath
= os
.path
.join(module_root
,module_file
)
220 taskname
= " : ".join((modulefullpath
,module_arch
))
221 module_metafile
= PathClass(module_file
,module_root
)
223 module_metafile
.Path
= module_path
225 module_metafile
.BaseName
= module_basename
226 if module_originalpath
:
227 module_metafile
.OriginalPath
= PathClass(module_originalpath
,module_root
)
229 target
= self
.data_pipe
.Get("P_Info").get("Target")
230 toolchain
= self
.data_pipe
.Get("P_Info").get("ToolChain")
231 Ma
= ModuleAutoGen(self
.Wa
,module_metafile
,target
,toolchain
,arch
,PlatformMetaFile
,self
.data_pipe
)
234 if (Ma
.MetaFile
.File
,Ma
.MetaFile
.Root
,Ma
.Arch
,Ma
.MetaFile
.Path
) in libConstPcd
:
235 Ma
.ConstPcd
= libConstPcd
[(Ma
.MetaFile
.File
,Ma
.MetaFile
.Root
,Ma
.Arch
,Ma
.MetaFile
.Path
)]
236 if (Ma
.MetaFile
.File
,Ma
.MetaFile
.Root
,Ma
.Arch
,Ma
.MetaFile
.Path
) in Refes
:
237 Ma
.ReferenceModules
= Refes
[(Ma
.MetaFile
.File
,Ma
.MetaFile
.Root
,Ma
.Arch
,Ma
.MetaFile
.Path
)]
238 if GlobalData
.gBinCacheSource
and CommandTarget
in [None, "", "all"]:
239 Ma
.GenModuleFilesHash(GlobalData
.gCacheIR
)
240 Ma
.GenPreMakefileHash(GlobalData
.gCacheIR
)
241 if Ma
.CanSkipbyPreMakefileCache(GlobalData
.gCacheIR
):
244 Ma
.CreateCodeFile(False)
245 Ma
.CreateMakeFile(False,GenFfsList
=FfsCmd
.get((Ma
.MetaFile
.Path
, Ma
.Arch
),[]))
247 if GlobalData
.gBinCacheSource
and CommandTarget
in [None, "", "all"]:
248 Ma
.GenMakeHeaderFilesHash(GlobalData
.gCacheIR
)
249 Ma
.GenMakeHash(GlobalData
.gCacheIR
)
250 if Ma
.CanSkipbyMakeCache(GlobalData
.gCacheIR
):
253 Ma
.PrintFirstMakeCacheMissFile(GlobalData
.gCacheIR
)
257 traceback
.print_exc(file=sys
.stdout
)
258 self
.feedback_q
.put(taskname
)
260 self
.feedback_q
.put("Done")
261 def printStatus(self
):
262 print("Processs ID: %d Run %d modules in AutoGen " % (os
.getpid(),len(AutoGen
.Cache())))
263 print("Processs ID: %d Run %d modules in AutoGenInfo " % (os
.getpid(),len(AutoGenInfo
.GetCache())))
265 for buildobj
in BuildDB
.BuildObject
.GetCache().values():
266 if str(buildobj
).lower().endswith("dec"):
268 groupobj
['dec'].append(str(buildobj
))
270 groupobj
['dec'] = [str(buildobj
)]
271 if str(buildobj
).lower().endswith("dsc"):
273 groupobj
['dsc'].append(str(buildobj
))
275 groupobj
['dsc'] = [str(buildobj
)]
277 if str(buildobj
).lower().endswith("inf"):
279 groupobj
['inf'].append(str(buildobj
))
281 groupobj
['inf'] = [str(buildobj
)]
283 print("Processs ID: %d Run %d pkg in WDB " % (os
.getpid(),len(groupobj
.get("dec",[]))))
284 print("Processs ID: %d Run %d pla in WDB " % (os
.getpid(),len(groupobj
.get("dsc",[]))))
285 print("Processs ID: %d Run %d inf in WDB " % (os
.getpid(),len(groupobj
.get("inf",[]))))