BaseTools: Enable Multiple Process AutoGen
BZ: https://bugzilla.tianocore.org/show_bug.cgi?id=1875 Assign the Module AutoGen tasks into multiple sub process. Cc: Liming Gao <liming.gao@intel.com> Signed-off-by: Bob Feng <bob.c.feng@intel.com> Acked-by: Laszlo Ersek <lersek@redhat.com> Tested-by: Laszlo Ersek <lersek@redhat.com> Acked-by: Liming Gao <liming.gao@intel.com>
This commit is contained in:
191
BaseTools/Source/Python/AutoGen/AutoGenWorker.py
Normal file
191
BaseTools/Source/Python/AutoGen/AutoGenWorker.py
Normal file
@@ -0,0 +1,191 @@
|
||||
## @file
|
||||
# Create makefile for MS nmake and GNU make
|
||||
#
|
||||
# Copyright (c) 2019, Intel Corporation. All rights reserved.<BR>
|
||||
# SPDX-License-Identifier: BSD-2-Clause-Patent
|
||||
#
|
||||
from __future__ import absolute_import
|
||||
import multiprocessing as mp
|
||||
import threading
|
||||
from Common.Misc import PathClass
|
||||
from AutoGen.ModuleAutoGen import ModuleAutoGen
|
||||
from AutoGen.ModuleAutoGenHelper import WorkSpaceInfo,AutoGenInfo
|
||||
import Common.GlobalData as GlobalData
|
||||
import Common.EdkLogger as EdkLogger
|
||||
import os
|
||||
from Common.MultipleWorkspace import MultipleWorkspace as mws
|
||||
from AutoGen.AutoGen import AutoGen
|
||||
from Workspace.WorkspaceDatabase import BuildDB
|
||||
from queue import Empty
|
||||
import traceback
|
||||
import sys
|
||||
from AutoGen.DataPipe import MemoryDataPipe
|
||||
def clearQ(q):
|
||||
try:
|
||||
while True:
|
||||
q.get_nowait()
|
||||
except Empty:
|
||||
pass
|
||||
class AutoGenManager(threading.Thread):
|
||||
def __init__(self,autogen_workers, feedback_q,error_event):
|
||||
super(AutoGenManager,self).__init__()
|
||||
self.autogen_workers = autogen_workers
|
||||
self.feedback_q = feedback_q
|
||||
self.terminate = False
|
||||
self.Status = True
|
||||
self.error_event = error_event
|
||||
def run(self):
|
||||
try:
|
||||
fin_num = 0
|
||||
while True:
|
||||
badnews = self.feedback_q.get()
|
||||
if badnews is None:
|
||||
break
|
||||
if badnews == "Done":
|
||||
fin_num += 1
|
||||
else:
|
||||
self.Status = False
|
||||
self.TerminateWorkers()
|
||||
if fin_num == len(self.autogen_workers):
|
||||
self.clearQueue()
|
||||
for w in self.autogen_workers:
|
||||
w.join()
|
||||
break
|
||||
except Exception:
|
||||
return
|
||||
|
||||
def clearQueue(self):
|
||||
taskq = self.autogen_workers[0].module_queue
|
||||
clearQ(taskq)
|
||||
clearQ(self.feedback_q)
|
||||
|
||||
def TerminateWorkers(self):
|
||||
self.error_event.set()
|
||||
def kill(self):
|
||||
self.feedback_q.put(None)
|
||||
class AutoGenWorkerInProcess(mp.Process):
|
||||
def __init__(self,module_queue,data_pipe_file_path,feedback_q,file_lock,error_event):
|
||||
mp.Process.__init__(self)
|
||||
self.module_queue = module_queue
|
||||
self.data_pipe_file_path =data_pipe_file_path
|
||||
self.data_pipe = None
|
||||
self.feedback_q = feedback_q
|
||||
self.PlatformMetaFileSet = {}
|
||||
self.file_lock = file_lock
|
||||
self.error_event = error_event
|
||||
def GetPlatformMetaFile(self,filepath,root):
|
||||
try:
|
||||
return self.PlatformMetaFileSet[(filepath,root)]
|
||||
except:
|
||||
self.PlatformMetaFileSet[(filepath,root)] = filepath
|
||||
return self.PlatformMetaFileSet[(filepath,root)]
|
||||
def run(self):
|
||||
try:
|
||||
taskname = "Init"
|
||||
with self.file_lock:
|
||||
if not os.path.exists(self.data_pipe_file_path):
|
||||
self.feedback_q.put(taskname + ":" + "load data pipe %s failed." % self.data_pipe_file_path)
|
||||
self.data_pipe = MemoryDataPipe()
|
||||
self.data_pipe.load(self.data_pipe_file_path)
|
||||
EdkLogger.Initialize()
|
||||
loglevel = self.data_pipe.Get("LogLevel")
|
||||
if not loglevel:
|
||||
loglevel = EdkLogger.INFO
|
||||
EdkLogger.SetLevel(loglevel)
|
||||
logfile = self.data_pipe.Get("LogFile")
|
||||
if logfile:
|
||||
EdkLogger.SetLogFile(logfile)
|
||||
target = self.data_pipe.Get("P_Info").get("Target")
|
||||
toolchain = self.data_pipe.Get("P_Info").get("ToolChain")
|
||||
archlist = self.data_pipe.Get("P_Info").get("ArchList")
|
||||
|
||||
active_p = self.data_pipe.Get("P_Info").get("ActivePlatform")
|
||||
workspacedir = self.data_pipe.Get("P_Info").get("WorkspaceDir")
|
||||
PackagesPath = os.getenv("PACKAGES_PATH")
|
||||
mws.setWs(workspacedir, PackagesPath)
|
||||
self.Wa = WorkSpaceInfo(
|
||||
workspacedir,active_p,target,toolchain,archlist
|
||||
)
|
||||
self.Wa._SrcTimeStamp = self.data_pipe.Get("Workspace_timestamp")
|
||||
GlobalData.gGlobalDefines = self.data_pipe.Get("G_defines")
|
||||
GlobalData.gCommandLineDefines = self.data_pipe.Get("CL_defines")
|
||||
os.environ._data = self.data_pipe.Get("Env_Var")
|
||||
GlobalData.gWorkspace = workspacedir
|
||||
GlobalData.gDisableIncludePathCheck = False
|
||||
GlobalData.gFdfParser = self.data_pipe.Get("FdfParser")
|
||||
GlobalData.gDatabasePath = self.data_pipe.Get("DatabasePath")
|
||||
pcd_from_build_option = []
|
||||
for pcd_tuple in self.data_pipe.Get("BuildOptPcd"):
|
||||
pcd_id = ".".join((pcd_tuple[0],pcd_tuple[1]))
|
||||
if pcd_tuple[2].strip():
|
||||
pcd_id = ".".join((pcd_id,pcd_tuple[2]))
|
||||
pcd_from_build_option.append("=".join((pcd_id,pcd_tuple[3])))
|
||||
GlobalData.BuildOptionPcd = pcd_from_build_option
|
||||
module_count = 0
|
||||
FfsCmd = self.data_pipe.Get("FfsCommand")
|
||||
if FfsCmd is None:
|
||||
FfsCmd = {}
|
||||
PlatformMetaFile = self.GetPlatformMetaFile(self.data_pipe.Get("P_Info").get("ActivePlatform"),
|
||||
self.data_pipe.Get("P_Info").get("WorkspaceDir"))
|
||||
libConstPcd = self.data_pipe.Get("LibConstPcd")
|
||||
Refes = self.data_pipe.Get("REFS")
|
||||
while True:
|
||||
if self.module_queue.empty():
|
||||
break
|
||||
if self.error_event.is_set():
|
||||
break
|
||||
module_count += 1
|
||||
module_file,module_root,module_path,module_basename,module_originalpath,module_arch,IsLib = self.module_queue.get_nowait()
|
||||
modulefullpath = os.path.join(module_root,module_file)
|
||||
taskname = " : ".join((modulefullpath,module_arch))
|
||||
module_metafile = PathClass(module_file,module_root)
|
||||
if module_path:
|
||||
module_metafile.Path = module_path
|
||||
if module_basename:
|
||||
module_metafile.BaseName = module_basename
|
||||
if module_originalpath:
|
||||
module_metafile.OriginalPath = PathClass(module_originalpath,module_root)
|
||||
arch = module_arch
|
||||
target = self.data_pipe.Get("P_Info").get("Target")
|
||||
toolchain = self.data_pipe.Get("P_Info").get("ToolChain")
|
||||
Ma = ModuleAutoGen(self.Wa,module_metafile,target,toolchain,arch,PlatformMetaFile,self.data_pipe)
|
||||
Ma.IsLibrary = IsLib
|
||||
if IsLib:
|
||||
if (Ma.MetaFile.File,Ma.MetaFile.Root,Ma.Arch,Ma.MetaFile.Path) in libConstPcd:
|
||||
Ma.ConstPcd = libConstPcd[(Ma.MetaFile.File,Ma.MetaFile.Root,Ma.Arch,Ma.MetaFile.Path)]
|
||||
if (Ma.MetaFile.File,Ma.MetaFile.Root,Ma.Arch,Ma.MetaFile.Path) in Refes:
|
||||
Ma.ReferenceModules = Refes[(Ma.MetaFile.File,Ma.MetaFile.Root,Ma.Arch,Ma.MetaFile.Path)]
|
||||
Ma.CreateCodeFile(False)
|
||||
Ma.CreateMakeFile(False,GenFfsList=FfsCmd.get((Ma.MetaFile.File, Ma.Arch),[]))
|
||||
except Empty:
|
||||
pass
|
||||
except:
|
||||
traceback.print_exc(file=sys.stdout)
|
||||
self.feedback_q.put(taskname)
|
||||
finally:
|
||||
self.feedback_q.put("Done")
|
||||
def printStatus(self):
|
||||
print("Processs ID: %d Run %d modules in AutoGen " % (os.getpid(),len(AutoGen.Cache())))
|
||||
print("Processs ID: %d Run %d modules in AutoGenInfo " % (os.getpid(),len(AutoGenInfo.GetCache())))
|
||||
groupobj = {}
|
||||
for buildobj in BuildDB.BuildObject.GetCache().values():
|
||||
if str(buildobj).lower().endswith("dec"):
|
||||
try:
|
||||
groupobj['dec'].append(str(buildobj))
|
||||
except:
|
||||
groupobj['dec'] = [str(buildobj)]
|
||||
if str(buildobj).lower().endswith("dsc"):
|
||||
try:
|
||||
groupobj['dsc'].append(str(buildobj))
|
||||
except:
|
||||
groupobj['dsc'] = [str(buildobj)]
|
||||
|
||||
if str(buildobj).lower().endswith("inf"):
|
||||
try:
|
||||
groupobj['inf'].append(str(buildobj))
|
||||
except:
|
||||
groupobj['inf'] = [str(buildobj)]
|
||||
|
||||
print("Processs ID: %d Run %d pkg in WDB " % (os.getpid(),len(groupobj.get("dec",[]))))
|
||||
print("Processs ID: %d Run %d pla in WDB " % (os.getpid(),len(groupobj.get("dsc",[]))))
|
||||
print("Processs ID: %d Run %d inf in WDB " % (os.getpid(),len(groupobj.get("inf",[]))))
|
@@ -11,6 +11,7 @@ import Common.GlobalData as GlobalData
|
||||
import os
|
||||
import pickle
|
||||
from pickle import HIGHEST_PROTOCOL
|
||||
from Common import EdkLogger
|
||||
|
||||
class PCD_DATA():
|
||||
def __init__(self,TokenCName,TokenSpaceGuidCName,Type,DatumType,SkuInfoList,DefaultValue,
|
||||
@@ -34,6 +35,7 @@ class DataPipe(object):
|
||||
def __init__(self, BuildDir=None):
|
||||
self.data_container = {}
|
||||
self.BuildDir = BuildDir
|
||||
self.dump_file = ""
|
||||
|
||||
class MemoryDataPipe(DataPipe):
|
||||
|
||||
@@ -41,6 +43,7 @@ class MemoryDataPipe(DataPipe):
|
||||
return self.data_container.get(key)
|
||||
|
||||
def dump(self,file_path):
|
||||
self.dump_file = file_path
|
||||
with open(file_path,'wb') as fd:
|
||||
pickle.dump(self.data_container,fd,pickle.HIGHEST_PROTOCOL)
|
||||
|
||||
@@ -71,7 +74,7 @@ class MemoryDataPipe(DataPipe):
|
||||
for m in PlatformInfo.Platform.Modules:
|
||||
m_pcds = PlatformInfo.Platform.Modules[m].Pcds
|
||||
if m_pcds:
|
||||
ModulePcds[(m.File,m.Root)] = [PCD_DATA(
|
||||
ModulePcds[(m.File,m.Root,m.Arch)] = [PCD_DATA(
|
||||
pcd.TokenCName,pcd.TokenSpaceGuidCName,pcd.Type,
|
||||
pcd.DatumType,pcd.SkuInfoList,pcd.DefaultValue,
|
||||
pcd.MaxDatumSize,pcd.UserDefinedDefaultStoresFlag,pcd.validateranges,
|
||||
@@ -83,11 +86,18 @@ class MemoryDataPipe(DataPipe):
|
||||
|
||||
#Module's Library Instance
|
||||
ModuleLibs = {}
|
||||
libModules = {}
|
||||
for m in PlatformInfo.Platform.Modules:
|
||||
module_obj = BuildDB.BuildObject[m,PlatformInfo.Arch,PlatformInfo.BuildTarget,PlatformInfo.ToolChain]
|
||||
Libs = GetModuleLibInstances(module_obj, PlatformInfo.Platform, BuildDB.BuildObject, PlatformInfo.Arch,PlatformInfo.BuildTarget,PlatformInfo.ToolChain)
|
||||
ModuleLibs[(m.File,m.Root,module_obj.Arch)] = [(l.MetaFile.File,l.MetaFile.Root,l.Arch) for l in Libs]
|
||||
for lib in Libs:
|
||||
try:
|
||||
libModules[(lib.MetaFile.File,lib.MetaFile.Root,lib.Arch,lib.MetaFile.Path)].append((m.File,m.Root,module_obj.Arch,m.Path))
|
||||
except:
|
||||
libModules[(lib.MetaFile.File,lib.MetaFile.Root,lib.Arch,lib.MetaFile.Path)] = [(m.File,m.Root,module_obj.Arch,m.Path)]
|
||||
ModuleLibs[(m.File,m.Root,module_obj.Arch,m.Path)] = [(l.MetaFile.File,l.MetaFile.Root,l.Arch,l.MetaFile.Path) for l in Libs]
|
||||
self.DataContainer = {"DEPS":ModuleLibs}
|
||||
self.DataContainer = {"REFS":libModules}
|
||||
|
||||
#Platform BuildOptions
|
||||
|
||||
@@ -143,5 +153,8 @@ class MemoryDataPipe(DataPipe):
|
||||
|
||||
self.DataContainer = {"GuidDict": PlatformInfo.Platform._GuidDict}
|
||||
|
||||
self.DataContainer = {"DatabasePath":GlobalData.gDatabasePath}
|
||||
self.DataContainer = {"FdfParser": True if GlobalData.gFdfParser else False}
|
||||
|
||||
self.DataContainer = {"LogLevel": EdkLogger.GetLevel()}
|
||||
self.DataContainer = {"LogFile": GlobalData.gOptions.LogFile if GlobalData.gOptions.LogFile is not None else ""}
|
||||
|
@@ -1472,8 +1472,8 @@ def CreateModuleEntryPointCode(Info, AutoGenC, AutoGenH):
|
||||
|
||||
if Info.ModuleType in [SUP_MODULE_PEI_CORE, SUP_MODULE_DXE_CORE, SUP_MODULE_SMM_CORE, SUP_MODULE_MM_CORE_STANDALONE]:
|
||||
if Info.SourceFileList:
|
||||
if NumEntryPoints != 1:
|
||||
EdkLogger.error(
|
||||
if NumEntryPoints != 1:
|
||||
EdkLogger.error(
|
||||
"build",
|
||||
AUTOGEN_ERROR,
|
||||
'%s must have exactly one entry point' % Info.ModuleType,
|
||||
|
@@ -1686,6 +1686,7 @@ class ModuleAutoGen(AutoGen):
|
||||
if not self.IsLibrary and CreateLibraryMakeFile:
|
||||
for LibraryAutoGen in self.LibraryAutoGenList:
|
||||
LibraryAutoGen.CreateMakeFile()
|
||||
|
||||
# Don't enable if hash feature enabled, CanSkip uses timestamps to determine build skipping
|
||||
if not GlobalData.gUseHashCache and self.CanSkip():
|
||||
return
|
||||
@@ -1729,7 +1730,6 @@ class ModuleAutoGen(AutoGen):
|
||||
if not self.IsLibrary and CreateLibraryCodeFile:
|
||||
for LibraryAutoGen in self.LibraryAutoGenList:
|
||||
LibraryAutoGen.CreateCodeFile()
|
||||
|
||||
# Don't enable if hash feature enabled, CanSkip uses timestamps to determine build skipping
|
||||
if not GlobalData.gUseHashCache and self.CanSkip():
|
||||
return
|
||||
|
@@ -133,6 +133,12 @@ class PlatformAutoGen(AutoGen):
|
||||
self.DataPipe.FillData(self)
|
||||
|
||||
return True
|
||||
def FillData_LibConstPcd(self):
|
||||
libConstPcd = {}
|
||||
for LibAuto in self.LibraryAutoGenList:
|
||||
if LibAuto.ConstPcd:
|
||||
libConstPcd[(LibAuto.MetaFile.File,LibAuto.MetaFile.Root,LibAuto.Arch,LibAuto.MetaFile.Path)] = LibAuto.ConstPcd
|
||||
self.DataPipe.DataContainer = {"LibConstPcd":libConstPcd}
|
||||
## hash() operator of PlatformAutoGen
|
||||
#
|
||||
# The platform file path and arch string will be used to represent
|
||||
@@ -162,7 +168,7 @@ class PlatformAutoGen(AutoGen):
|
||||
return
|
||||
|
||||
for Ma in self.ModuleAutoGenList:
|
||||
Ma.CreateCodeFile(True)
|
||||
Ma.CreateCodeFile(CreateModuleCodeFile)
|
||||
|
||||
## Generate Fds Command
|
||||
@cached_property
|
||||
@@ -179,9 +185,9 @@ class PlatformAutoGen(AutoGen):
|
||||
for Ma in self._MaList:
|
||||
key = (Ma.MetaFile.File, self.Arch)
|
||||
if key in FfsCommand:
|
||||
Ma.CreateMakeFile(True, FfsCommand[key])
|
||||
Ma.CreateMakeFile(CreateModuleMakeFile, FfsCommand[key])
|
||||
else:
|
||||
Ma.CreateMakeFile(True)
|
||||
Ma.CreateMakeFile(CreateModuleMakeFile)
|
||||
|
||||
# no need to create makefile for the platform more than once
|
||||
if self.IsMakeFileCreated:
|
||||
@@ -1086,10 +1092,10 @@ class PlatformAutoGen(AutoGen):
|
||||
Libs = GetModuleLibInstances(module_obj, self.Platform, self.BuildDatabase, self.Arch,self.BuildTarget,self.ToolChain)
|
||||
else:
|
||||
Libs = []
|
||||
ModuleLibs.update( set([(l.MetaFile.File,l.MetaFile.Root,l.Arch,True) for l in Libs]))
|
||||
ModuleLibs.update( set([(l.MetaFile.File,l.MetaFile.Root,l.MetaFile.Path,l.MetaFile.BaseName,l.MetaFile.OriginalPath,l.Arch,True) for l in Libs]))
|
||||
if WithoutPcd and module_obj.PcdIsDriver:
|
||||
continue
|
||||
ModuleLibs.add((m.File,m.Root,module_obj.Arch,False))
|
||||
ModuleLibs.add((m.File,m.Root,m.Path,m.BaseName,m.OriginalPath,module_obj.Arch,bool(module_obj.LibraryClass)))
|
||||
|
||||
return ModuleLibs
|
||||
|
||||
|
@@ -113,6 +113,8 @@ class WorkspaceAutoGen(AutoGen):
|
||||
self.ProcessMixedPcd()
|
||||
self.VerifyPcdsFromFDF()
|
||||
self.CollectAllPcds()
|
||||
for Pa in self.AutoGenObjectList:
|
||||
Pa.FillData_LibConstPcd()
|
||||
self.GeneratePkgLevelHash()
|
||||
#
|
||||
# Check PCDs token value conflict in each DEC file.
|
||||
@@ -881,7 +883,7 @@ class WorkspaceAutoGen(AutoGen):
|
||||
if not CreateDepsMakeFile:
|
||||
return
|
||||
for Pa in self.AutoGenObjectList:
|
||||
Pa.CreateMakeFile(True)
|
||||
Pa.CreateMakeFile(CreateDepsMakeFile)
|
||||
|
||||
## Create autogen code for platform and modules
|
||||
#
|
||||
@@ -895,7 +897,7 @@ class WorkspaceAutoGen(AutoGen):
|
||||
if not CreateDepsCodeFile:
|
||||
return
|
||||
for Pa in self.AutoGenObjectList:
|
||||
Pa.CreateCodeFile(True)
|
||||
Pa.CreateCodeFile(CreateDepsCodeFile)
|
||||
|
||||
## Create AsBuilt INF file the platform
|
||||
#
|
||||
|
Reference in New Issue
Block a user