]>
git.proxmox.com Git - mirror_edk2.git/blob - BaseTools/Source/Python/AutoGen/AutoGenWorker.py
2 # Create makefile for MS nmake and GNU make
4 # Copyright (c) 2019, Intel Corporation. All rights reserved.<BR>
5 # SPDX-License-Identifier: BSD-2-Clause-Patent
7 from __future__
import absolute_import
8 import multiprocessing
as mp
10 from Common
.Misc
import PathClass
11 from AutoGen
.ModuleAutoGen
import ModuleAutoGen
12 from AutoGen
.ModuleAutoGenHelper
import WorkSpaceInfo
,AutoGenInfo
13 import Common
.GlobalData
as GlobalData
14 import Common
.EdkLogger
as EdkLogger
16 from Common
.MultipleWorkspace
import MultipleWorkspace
as mws
17 from AutoGen
.AutoGen
import AutoGen
18 from Workspace
.WorkspaceDatabase
import BuildDB
20 from queue
import Empty
22 from Queue
import Empty
25 from AutoGen
.DataPipe
import MemoryDataPipe
35 class LogAgent(threading
.Thread
):
36 def __init__(self
,log_q
,log_level
,log_file
=None):
37 super(LogAgent
,self
).__init
__()
39 self
.log_level
= log_level
40 self
.log_file
= log_file
42 # For DEBUG level (All DEBUG_0~9 are applicable)
43 self
._DebugLogger
_agent
= logging
.getLogger("tool_debug_agent")
44 _DebugFormatter
= logging
.Formatter("[%(asctime)s.%(msecs)d]: %(message)s", datefmt
="%H:%M:%S")
45 self
._DebugLogger
_agent
.setLevel(self
.log_level
)
46 _DebugChannel
= logging
.StreamHandler(sys
.stdout
)
47 _DebugChannel
.setFormatter(_DebugFormatter
)
48 self
._DebugLogger
_agent
.addHandler(_DebugChannel
)
50 # For VERBOSE, INFO, WARN level
51 self
._InfoLogger
_agent
= logging
.getLogger("tool_info_agent")
52 _InfoFormatter
= logging
.Formatter("%(message)s")
53 self
._InfoLogger
_agent
.setLevel(self
.log_level
)
54 _InfoChannel
= logging
.StreamHandler(sys
.stdout
)
55 _InfoChannel
.setFormatter(_InfoFormatter
)
56 self
._InfoLogger
_agent
.addHandler(_InfoChannel
)
59 self
._ErrorLogger
_agent
= logging
.getLogger("tool_error_agent")
60 _ErrorFormatter
= logging
.Formatter("%(message)s")
61 self
._ErrorLogger
_agent
.setLevel(self
.log_level
)
62 _ErrorCh
= logging
.StreamHandler(sys
.stderr
)
63 _ErrorCh
.setFormatter(_ErrorFormatter
)
64 self
._ErrorLogger
_agent
.addHandler(_ErrorCh
)
67 if os
.path
.exists(self
.log_file
):
68 os
.remove(self
.log_file
)
69 _Ch
= logging
.FileHandler(self
.log_file
)
70 _Ch
.setFormatter(_DebugFormatter
)
71 self
._DebugLogger
_agent
.addHandler(_Ch
)
73 _Ch
= logging
.FileHandler(self
.log_file
)
74 _Ch
.setFormatter(_InfoFormatter
)
75 self
._InfoLogger
_agent
.addHandler(_Ch
)
77 _Ch
= logging
.FileHandler(self
.log_file
)
78 _Ch
.setFormatter(_ErrorFormatter
)
79 self
._ErrorLogger
_agent
.addHandler(_Ch
)
84 log_message
= self
.log_q
.get()
85 if log_message
is None:
87 if log_message
.name
== "tool_error":
88 self
._ErrorLogger
_agent
.log(log_message
.levelno
,log_message
.getMessage())
89 elif log_message
.name
== "tool_info":
90 self
._InfoLogger
_agent
.log(log_message
.levelno
,log_message
.getMessage())
91 elif log_message
.name
== "tool_debug":
92 self
._DebugLogger
_agent
.log(log_message
.levelno
,log_message
.getMessage())
94 self
._InfoLogger
_agent
.log(log_message
.levelno
,log_message
.getMessage())
98 class AutoGenManager(threading
.Thread
):
99 def __init__(self
,autogen_workers
, feedback_q
,error_event
):
100 super(AutoGenManager
,self
).__init
__()
101 self
.autogen_workers
= autogen_workers
102 self
.feedback_q
= feedback_q
104 self
.error_event
= error_event
109 badnews
= self
.feedback_q
.get()
112 if badnews
== "Done":
116 self
.TerminateWorkers()
117 if fin_num
== len(self
.autogen_workers
):
119 for w
in self
.autogen_workers
:
125 def clearQueue(self
):
126 taskq
= self
.autogen_workers
[0].module_queue
127 logq
= self
.autogen_workers
[0].log_q
129 clearQ(self
.feedback_q
)
131 def TerminateWorkers(self
):
132 self
.error_event
.set()
134 self
.feedback_q
.put(None)
135 class AutoGenWorkerInProcess(mp
.Process
):
136 def __init__(self
,module_queue
,data_pipe_file_path
,feedback_q
,file_lock
, share_data
,log_q
,error_event
):
137 mp
.Process
.__init
__(self
)
138 self
.module_queue
= module_queue
139 self
.data_pipe_file_path
=data_pipe_file_path
140 self
.data_pipe
= None
141 self
.feedback_q
= feedback_q
142 self
.PlatformMetaFileSet
= {}
143 self
.file_lock
= file_lock
144 self
.share_data
= share_data
146 self
.error_event
= error_event
147 def GetPlatformMetaFile(self
,filepath
,root
):
149 return self
.PlatformMetaFileSet
[(filepath
,root
)]
151 self
.PlatformMetaFileSet
[(filepath
,root
)] = filepath
152 return self
.PlatformMetaFileSet
[(filepath
,root
)]
157 if not os
.path
.exists(self
.data_pipe_file_path
):
158 self
.feedback_q
.put(taskname
+ ":" + "load data pipe %s failed." % self
.data_pipe_file_path
)
159 self
.data_pipe
= MemoryDataPipe()
160 self
.data_pipe
.load(self
.data_pipe_file_path
)
161 EdkLogger
.LogClientInitialize(self
.log_q
)
162 loglevel
= self
.data_pipe
.Get("LogLevel")
164 loglevel
= EdkLogger
.INFO
165 EdkLogger
.SetLevel(loglevel
)
166 target
= self
.data_pipe
.Get("P_Info").get("Target")
167 toolchain
= self
.data_pipe
.Get("P_Info").get("ToolChain")
168 archlist
= self
.data_pipe
.Get("P_Info").get("ArchList")
170 active_p
= self
.data_pipe
.Get("P_Info").get("ActivePlatform")
171 workspacedir
= self
.data_pipe
.Get("P_Info").get("WorkspaceDir")
172 PackagesPath
= os
.getenv("PACKAGES_PATH")
173 mws
.setWs(workspacedir
, PackagesPath
)
174 self
.Wa
= WorkSpaceInfo(
175 workspacedir
,active_p
,target
,toolchain
,archlist
177 self
.Wa
._SrcTimeStamp
= self
.data_pipe
.Get("Workspace_timestamp")
178 GlobalData
.gGlobalDefines
= self
.data_pipe
.Get("G_defines")
179 GlobalData
.gCommandLineDefines
= self
.data_pipe
.Get("CL_defines")
180 os
.environ
._data
= self
.data_pipe
.Get("Env_Var")
181 GlobalData
.gWorkspace
= workspacedir
182 GlobalData
.gDisableIncludePathCheck
= False
183 GlobalData
.gFdfParser
= self
.data_pipe
.Get("FdfParser")
184 GlobalData
.gDatabasePath
= self
.data_pipe
.Get("DatabasePath")
185 GlobalData
.gBinCacheSource
= self
.data_pipe
.Get("BinCacheSource")
186 GlobalData
.gBinCacheDest
= self
.data_pipe
.Get("BinCacheDest")
187 GlobalData
.gCacheIR
= self
.data_pipe
.Get("CacheIR")
188 GlobalData
.gEnableGenfdsMultiThread
= self
.data_pipe
.Get("EnableGenfdsMultiThread")
189 GlobalData
.file_lock
= self
.file_lock
190 CommandTarget
= self
.data_pipe
.Get("CommandTarget")
191 pcd_from_build_option
= []
192 for pcd_tuple
in self
.data_pipe
.Get("BuildOptPcd"):
193 pcd_id
= ".".join((pcd_tuple
[0],pcd_tuple
[1]))
194 if pcd_tuple
[2].strip():
195 pcd_id
= ".".join((pcd_id
,pcd_tuple
[2]))
196 pcd_from_build_option
.append("=".join((pcd_id
,pcd_tuple
[3])))
197 GlobalData
.BuildOptionPcd
= pcd_from_build_option
199 FfsCmd
= self
.data_pipe
.Get("FfsCommand")
202 GlobalData
.FfsCmd
= FfsCmd
203 PlatformMetaFile
= self
.GetPlatformMetaFile(self
.data_pipe
.Get("P_Info").get("ActivePlatform"),
204 self
.data_pipe
.Get("P_Info").get("WorkspaceDir"))
205 libConstPcd
= self
.data_pipe
.Get("LibConstPcd")
206 Refes
= self
.data_pipe
.Get("REFS")
207 GlobalData
.libConstPcd
= libConstPcd
208 GlobalData
.Refes
= Refes
210 if self
.module_queue
.empty():
212 if self
.error_event
.is_set():
215 module_file
,module_root
,module_path
,module_basename
,module_originalpath
,module_arch
,IsLib
= self
.module_queue
.get_nowait()
216 modulefullpath
= os
.path
.join(module_root
,module_file
)
217 taskname
= " : ".join((modulefullpath
,module_arch
))
218 module_metafile
= PathClass(module_file
,module_root
)
220 module_metafile
.Path
= module_path
222 module_metafile
.BaseName
= module_basename
223 if module_originalpath
:
224 module_metafile
.OriginalPath
= PathClass(module_originalpath
,module_root
)
226 target
= self
.data_pipe
.Get("P_Info").get("Target")
227 toolchain
= self
.data_pipe
.Get("P_Info").get("ToolChain")
228 Ma
= ModuleAutoGen(self
.Wa
,module_metafile
,target
,toolchain
,arch
,PlatformMetaFile
,self
.data_pipe
)
231 if (Ma
.MetaFile
.File
,Ma
.MetaFile
.Root
,Ma
.Arch
,Ma
.MetaFile
.Path
) in libConstPcd
:
232 Ma
.ConstPcd
= libConstPcd
[(Ma
.MetaFile
.File
,Ma
.MetaFile
.Root
,Ma
.Arch
,Ma
.MetaFile
.Path
)]
233 if (Ma
.MetaFile
.File
,Ma
.MetaFile
.Root
,Ma
.Arch
,Ma
.MetaFile
.Path
) in Refes
:
234 Ma
.ReferenceModules
= Refes
[(Ma
.MetaFile
.File
,Ma
.MetaFile
.Root
,Ma
.Arch
,Ma
.MetaFile
.Path
)]
235 if GlobalData
.gBinCacheSource
and CommandTarget
in [None, "", "all"]:
236 Ma
.GenModuleFilesHash(GlobalData
.gCacheIR
)
237 Ma
.GenPreMakefileHash(GlobalData
.gCacheIR
)
238 if Ma
.CanSkipbyPreMakefileCache(GlobalData
.gCacheIR
):
241 Ma
.CreateCodeFile(False)
242 Ma
.CreateMakeFile(False,GenFfsList
=FfsCmd
.get((Ma
.MetaFile
.File
, Ma
.Arch
),[]))
244 if GlobalData
.gBinCacheSource
and CommandTarget
in [None, "", "all"]:
245 Ma
.GenMakeHeaderFilesHash(GlobalData
.gCacheIR
)
246 Ma
.GenMakeHash(GlobalData
.gCacheIR
)
247 if Ma
.CanSkipbyMakeCache(GlobalData
.gCacheIR
):
252 traceback
.print_exc(file=sys
.stdout
)
253 self
.feedback_q
.put(taskname
)
255 self
.feedback_q
.put("Done")
256 def printStatus(self
):
257 print("Processs ID: %d Run %d modules in AutoGen " % (os
.getpid(),len(AutoGen
.Cache())))
258 print("Processs ID: %d Run %d modules in AutoGenInfo " % (os
.getpid(),len(AutoGenInfo
.GetCache())))
260 for buildobj
in BuildDB
.BuildObject
.GetCache().values():
261 if str(buildobj
).lower().endswith("dec"):
263 groupobj
['dec'].append(str(buildobj
))
265 groupobj
['dec'] = [str(buildobj
)]
266 if str(buildobj
).lower().endswith("dsc"):
268 groupobj
['dsc'].append(str(buildobj
))
270 groupobj
['dsc'] = [str(buildobj
)]
272 if str(buildobj
).lower().endswith("inf"):
274 groupobj
['inf'].append(str(buildobj
))
276 groupobj
['inf'] = [str(buildobj
)]
278 print("Processs ID: %d Run %d pkg in WDB " % (os
.getpid(),len(groupobj
.get("dec",[]))))
279 print("Processs ID: %d Run %d pla in WDB " % (os
.getpid(),len(groupobj
.get("dsc",[]))))
280 print("Processs ID: %d Run %d inf in WDB " % (os
.getpid(),len(groupobj
.get("inf",[]))))