--- /dev/null
+RunToolFromSource
\ No newline at end of file
$(DEBUG_DIR)(+)$(MODULE_NAME).efi\r
\r
<Command.MSFT, Command.INTEL, Command.RVCT, Command.ARMGCC> \r
- GenFw -e $(MODULE_TYPE) -o ${dst} ${src}\r
+ "$(GENFW)" -e $(MODULE_TYPE) -o ${dst} ${src} $(GENFW_FLAGS)\r
$(CP) ${dst} $(OUTPUT_DIR)\r
$(CP) ${dst} $(BIN_DIR)\r
-$(CP) $(DEBUG_DIR)(+)*.map $(OUTPUT_DIR)\r
$(OBJCOPY) --only-keep-debug ${src} $(BIN_DIR)(+)$(MODULE_NAME).debug\r
$(OBJCOPY) --strip-unneeded ${src}\r
$(OBJCOPY) --add-gnu-debuglink=$(BIN_DIR)(+)$(MODULE_NAME).debug ${src}\r
- GenFw -e $(MODULE_TYPE) -o ${dst} ${src}\r
+ "$(GENFW)" -e $(MODULE_TYPE) -o ${dst} ${src} $(GENFW_FLAGS)\r
$(CP) ${dst} $(OUTPUT_DIR)\r
$(CP) ${dst} $(BIN_DIR)\r
-$(CP) $(DEBUG_DIR)(+)*.map $(OUTPUT_DIR)\r
"$(MTOC)" -subsystem $(MODULE_TYPE) $(MTOC_FLAGS) ${src} $(DEBUG_DIR)(+)$(MODULE_NAME).pecoff\r
# create symbol file for GDB debug\r
-$(DSYMUTIL) ${src}\r
- GenFw -e $(MODULE_TYPE) -o ${dst} $(DEBUG_DIR)(+)$(MODULE_NAME).pecoff \r
+ "$(GENFW)" -e $(MODULE_TYPE) -o ${dst} $(DEBUG_DIR)(+)$(MODULE_NAME).pecoff $(GENFW_FLAGS)\r
$(CP) ${dst} $(OUTPUT_DIR)\r
$(CP) ${dst} $(BIN_DIR)\r
-$(CP) $(DEBUG_DIR)(+)*.map $(OUTPUT_DIR)\r
<Command.MSFT, Command.INTEL>\r
"$(ASLCC)" /Fo$(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.obj $(CC_FLAGS) $(ASLCC_FLAGS) $(INC) ${src}\r
"$(ASLDLINK)" /OUT:$(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.dll $(ASLDLINK_FLAGS) $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.obj\r
- GenFw -o ${dst} -c $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.dll\r
+ "$(GENFW)" -o ${dst} -c $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.dll $(GENFW_FLAGS)\r
\r
<Command.GCC>\r
"$(ASLCC)" -o $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.obj $(CC_FLAGS) $(ASLCC_FLAGS) $(INC) ${src}\r
"$(ASLDLINK)" -o $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.dll $(ASLDLINK_FLAGS) $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.obj\r
- GenFw -o ${dst} -c $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.dll\r
+ "$(GENFW)" -o ${dst} -c $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.dll $(GENFW_FLAGS)\r
\r
[Acpi-Table-Code-File]\r
<InputFile>\r
<Command.MSFT, Command.INTEL>\r
"$(ASLCC)" /Fo$(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.obj $(CC_FLAGS) $(ASLCC_FLAGS) $(INC) ${src}\r
"$(ASLDLINK)" /OUT:$(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.dll $(ASLDLINK_FLAGS) $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.obj\r
- GenFw -o ${dst} -c $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.dll\r
+ "$(GENFW)" -o ${dst} -c $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.dll $(GENFW_FLAGS)\r
\r
<Command.GCC>\r
"$(ASLCC)" -o $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.obj $(CC_FLAGS) $(ASLCC_FLAGS) $(INC) ${src}\r
"$(ASLDLINK)" -o $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.dll $(ASLDLINK_FLAGS) $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.obj\r
- GenFw -o ${dst} -c $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.dll\r
+ "$(GENFW)" -o ${dst} -c $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.dll $(GENFW_FLAGS)\r
\r
<Command.XCODE> \r
"$(ASLCC)" -o $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.obj $(ASLCC_FLAGS) $(INC) ${src}\r
"$(ASLDLINK)" -o $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.dll $(ASLDLINK_FLAGS) $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.obj\r
"$(MTOC)" -subsystem $(MODULE_TYPE) $(MTOC_FLAGS) $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.dll $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.efi\r
- GenFw -o ${dst} -c $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.efi\r
+ "$(GENFW)" -o ${dst} -c $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.efi $(GENFW_FLAGS)\r
\r
\r
[Masm16-Code-File]\r
$(OUTPUT_DIR)(+)${s_base}.mcb\r
\r
<Command>\r
- GenFw -o ${dst} -m ${src}\r
+ "$(GENFW)" -o ${dst} -m ${src} $(GENFW_FLAGS)\r
\r
[Microcode-Binary-File]\r
<InputFile>\r
$(OUTPUT_DIR)(+)$(MODULE_NAME).bin\r
\r
<Command>\r
- GenFw -o ${dst} -j $(MICROCODE_BINARY_FILES)\r
+ "$(GENFW)" -o ${dst} -j $(MICROCODE_BINARY_FILES) $(GENFW_FLAGS)\r
-$(CP) ${dst} $(BIN_DIR)\r
\r
[EFI-Image-File]\r
$(OUTPUT_DIR)(+)$(MODULE_NAME)hii.lib\r
\r
<Command.MSFT, Command.INTEL>\r
- GenFw -o $(OUTPUT_DIR)(+)$(MODULE_NAME)hii.rc -g $(MODULE_GUID) --hiipackage $(HII_BINARY_PACKAGES)\r
+ "$(GENFW)" -o $(OUTPUT_DIR)(+)$(MODULE_NAME)hii.rc -g $(MODULE_GUID) --hiipackage $(HII_BINARY_PACKAGES) $(GENFW_FLAGS)\r
"$(RC)" /Fo${dst} $(OUTPUT_DIR)(+)$(MODULE_NAME)hii.rc\r
\r
<Command.GCC>\r
- GenFw -o $(OUTPUT_DIR)(+)$(MODULE_NAME)hii.rc -g $(MODULE_GUID) --hiibinpackage $(HII_BINARY_PACKAGES)\r
+ "$(GENFW)" -o $(OUTPUT_DIR)(+)$(MODULE_NAME)hii.rc -g $(MODULE_GUID) --hiibinpackage $(HII_BINARY_PACKAGES) $(GENFW_FLAGS)\r
"$(RC)" $(RC_FLAGS) $(OUTPUT_DIR)(+)$(MODULE_NAME)hii.rc ${dst}\r
# cores or CPUs. Less than 2 means disable multithread build.\r
MAX_CONCURRENT_THREAD_NUMBER = 1\r
\r
-# MULTIPLE_THREAD BOOLEAN Optional If "Enable", multi-thread is enable for bulding.\r
-# If "Disable", multi-thread is disable for building.\r
-MULTIPLE_THREAD = Disable\r
-\r
# Build rules definition\r
#\r
#\r
\r
DEFINE ICC11_BINX64 = C:\Program Files\Intel\Compiler\DEF(ICC11_VERSION)\DEF(ICC11_BUILD)\bin\ia32_intel64\r
DEFINE ICC11_ASMX64 = C:\Program Files\Intel\Compiler\DEF(ICC11_VERSION)\DEF(ICC11_BUILD)\bin\ia32_intel64\r
-DEFINE ICC11_BINX64x86 = C:\Program Files (x86)\Intel\Compiler\DEF(ICC11_VERSION)\DEF(ICC11_BUILD)\bin\ia32_intel64\r
-DEFINE ICC11_ASMX64x86 = C:\Program Files (x86)\Intel\Compiler\DEF(ICC11_VERSION)\DEF(ICC11_BUILD)\bin\ia32_intel64\r
+DEFINE ICC11_BINX64x86 = C:\Program Files (x86)\Intel\Compiler\DEF(ICC11_VERSION)\DEF(ICC11_BUILD)\bin\intel64\r
+DEFINE ICC11_ASMX64x86 = C:\Program Files (x86)\Intel\Compiler\DEF(ICC11_VERSION)\DEF(ICC11_BUILD)\bin\intel64\r
\r
DEFINE ICC11_BIN64 = C:\Program Files\Intel\Compiler\DEF(ICC11_VERSION)\DEF(ICC11_BUILD)\bin\ia32_ia64\r
DEFINE ICC11_BIN64x86 = C:\Program Files (x86)\Intel\Compiler\DEF(ICC11_VERSION)\DEF(ICC11_BUILD)\bin\ia32_ia64\r
*_XCODE32_ARM_PP_FLAGS = $(ARCHCC_FLAGS) $(PLATFORM_FLAGS) -E -x assembler-with-cpp -include $(DEST_DIR_DEBUG)/AutoGen.h\r
*_XCODE32_ARM_VFRPP_FLAGS = $(ARCHCC_FLAGS) $(PLATFORM_FLAGS) -x c -E -P -DVFRCOMPILE --include $(DEST_DIR_DEBUG)/$(MODULE_NAME)StrDefs.h\r
\r
- DEBUG_XCODE32_ARM_CC_FLAGS = $(ARCHCC_FLAGS) $(PLATFORM_FLAGS) -mthumb-interwork -g -Oz -mabi=aapcs -mapcs -fno-short-enums -save-temps -combine -fshort-wchar -fno-strict-aliasing -Wall -Werror -Wno-missing-braces -fomit-frame-pointer -c -include AutoGen.h -mdynamic-no-pic -fno-stack-protector\r
-RELEASE_XCODE32_ARM_CC_FLAGS = $(ARCHCC_FLAGS) $(PLATFORM_FLAGS) -mthumb-interwork -Oz -mabi=aapcs -mapcs -fno-short-enums -save-temps -combine -fshort-wchar -fno-strict-aliasing -Wall -Werror -Wno-missing-braces -fomit-frame-pointer -c -include AutoGen.h -mdynamic-no-pic -fno-stack-protector\r
+ DEBUG_XCODE32_ARM_CC_FLAGS = $(ARCHCC_FLAGS) $(PLATFORM_FLAGS) -mthumb-interwork -g -Oz -mabi=aapcs -mapcs -fno-short-enums -save-temps -combine -fshort-wchar -fno-strict-aliasing -Wall -Werror -Wno-missing-braces -fomit-frame-pointer -c -include AutoGen.h -fno-stack-protector\r
+RELEASE_XCODE32_ARM_CC_FLAGS = $(ARCHCC_FLAGS) $(PLATFORM_FLAGS) -mthumb-interwork -Oz -mabi=aapcs -mapcs -fno-short-enums -save-temps -combine -fshort-wchar -fno-strict-aliasing -Wall -Werror -Wno-missing-braces -fomit-frame-pointer -c -include AutoGen.h -fno-stack-protector\r
\r
\r
####################################################################################\r
*_*_*_OPTROM_PATH = EfiRom\r
*_*_*_OPTROM_FLAGS = -e\r
\r
+##################\r
+# GenFw tool definitions\r
+##################\r
+*_*_*_GENFW_PATH = GenFw\r
+*_*_*_GENFW_FLAGS = \r
+\r
##################\r
# Asl Compiler definitions\r
##################\r
*_*_*_TIANO_PATH = TianoCompress\r
*_*_*_TIANO_GUID = A31280AD-481E-41B6-95E8-127F4C984779\r
\r
+##################\r
+# BPDG tool definitions\r
+##################\r
+*_*_*_VPDTOOL_PATH = BPDG\r
+*_*_*_VPDTOOL_GUID = 8C3D856A-9BE6-468E-850A-24F7A8D38E08\r
## @file\r
#\r
-# The makefile can be invoked with\r
-# ARCH = x86_64 or x64 for EM64T build\r
-# ARCH = ia32 or IA32 for IA32 build\r
-# ARCH = ia64 or IA64 for IA64 build\r
-#\r
+# The makefile can be invoked with
+# ARCH = x86_64 or x64 for EM64T build
+# ARCH = ia32 or IA32 for IA32 build
+# ARCH = ia64 or IA64 for IA64 build
+#
# Copyright (c) 2007 - 2010, Intel Corporation. All rights reserved.<BR>\r
# This program and the accompanying materials\r
# are licensed and made available under the terms and conditions of the BSD License\r
# http://opensource.org/licenses/bsd-license.php\r
#\r
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,\r
-# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.\r
-\r
-ARCH ?= IA32\r
-\r
-CYGWIN:=$(findstring CYGWIN, $(shell uname -s))\r
-LINUX:=$(findstring Linux, $(shell uname -s))\r
-DARWIN:=$(findstring Darwin, $(shell uname -s))\r
-\r
-CC = gcc\r
-CXX = g++\r
-AS = gcc\r
-AR = ar\r
-LD = ld\r
-LINKER ?= $(CC)\r
-ifeq ($(ARCH), IA32)\r
-ARCH_INCLUDE = -I $(MAKEROOT)/Include/Ia32/\r
-endif\r
-\r
-ifeq ($(ARCH), X64)\r
-ARCH_INCLUDE = -I $(MAKEROOT)/Include/X64/\r
-endif\r
-\r
-INCLUDE = $(TOOL_INCLUDE) -I $(MAKEROOT) -I $(MAKEROOT)/Include/Common -I $(MAKEROOT)/Include/ -I $(MAKEROOT)/Include/IndustryStandard -I $(MAKEROOT)/Common/ -I .. -I . $(ARCH_INCLUDE) \r
-CPPFLAGS = $(INCLUDE)\r
-CFLAGS = -MD -fshort-wchar -fno-strict-aliasing -fno-merge-constants -nostdlib -Wall -Werror -c -g\r
-LFLAGS =\r
-\r
-#\r
-# Snow Leopard is a 32-bit and 64-bit environment. uname -m returns -i386, but gcc defaults \r
-# to x86_64. So make sure tools match uname -m\r
-#\r
-uname_s = $(shell uname -s)\r
-uname_m = $(shell uname -m)\r
-ifeq ($(uname_s),Darwin)\r
-ifeq ($(uname_m),i386)\r
- CFLAGS += -arch i386\r
- CPPFLAGS += -arch i386\r
- LFLAGS += -arch i386\r
-endif\r
-endif\r
- \r
-.PHONY: all\r
-.PHONY: install\r
-.PHONY: clean\r
-\r
-all:\r
-\r
-$(MAKEROOT)/libs:\r
- mkdir $(MAKEROOT)/libs \r
-\r
-$(MAKEROOT)/bin:\r
- mkdir $(MAKEROOT)/bin\r
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+
+ARCH ?= IA32
+
+CYGWIN:=$(findstring CYGWIN, $(shell uname -s))
+LINUX:=$(findstring Linux, $(shell uname -s))
+DARWIN:=$(findstring Darwin, $(shell uname -s))
+
+CC = gcc
+CXX = g++
+AS = gcc
+AR = ar
+LD = ld
+LINKER ?= $(CC)
+ifeq ($(ARCH), IA32)
+ARCH_INCLUDE = -I $(MAKEROOT)/Include/Ia32/
+endif
+
+ifeq ($(ARCH), X64)
+ARCH_INCLUDE = -I $(MAKEROOT)/Include/X64/
+endif
+
+INCLUDE = $(TOOL_INCLUDE) -I $(MAKEROOT) -I $(MAKEROOT)/Include/Common -I $(MAKEROOT)/Include/ -I $(MAKEROOT)/Include/IndustryStandard -I $(MAKEROOT)/Common/ -I .. -I . $(ARCH_INCLUDE)
+CPPFLAGS = $(INCLUDE)
+CFLAGS = -MD -fshort-wchar -fno-strict-aliasing -fno-merge-constants -nostdlib -Wall -Werror -c -g
+LFLAGS =
+
+#
+# Snow Leopard is a 32-bit and 64-bit environment. uname -m returns -i386, but gcc defaults
+# to x86_64. So make sure tools match uname -m
+#
+uname_s = $(shell uname -s)
+ifeq ($(uname_s),Darwin)
+ CFLAGS += -arch i386
+ CPPFLAGS += -arch i386
+ LFLAGS += -arch i386
+endif
+
+.PHONY: all
+.PHONY: install
+.PHONY: clean
+
+all:
+
+$(MAKEROOT)/libs:
+ mkdir $(MAKEROOT)/libs
+
+$(MAKEROOT)/bin:
+ mkdir $(MAKEROOT)/bin
from GenFds.FdfParser import *\r
from CommonDataClass.CommonClass import SkuInfoClass\r
from Workspace.BuildClassObject import *\r
+import Common.VpdInfoFile as VpdInfoFile\r
\r
## Regular expression for splitting Dependency Expression stirng into tokens\r
gDepexTokenPattern = re.compile("(\(|\)|\w+| \S+\.inf)")\r
self._BuildCommand = self.AutoGenObjectList[0].BuildCommand\r
return self._BuildCommand\r
\r
- ## Create makefile for the platform and mdoules in it\r
+ ## Create makefile for the platform and modules in it\r
#\r
# @param CreateDepsMakeFile Flag indicating if the makefile for\r
# modules will be created as well\r
UnicodePcdArray = []\r
HiiPcdArray = []\r
OtherPcdArray = []\r
- for Pcd in self._DynamicPcdList:\r
- # just pick the a value to determine whether is unicode string type\r
- Sku = Pcd.SkuInfoList[Pcd.SkuInfoList.keys()[0]]\r
- PcdValue = Sku.DefaultValue\r
- if Pcd.DatumType == 'VOID*' and PcdValue.startswith("L"):\r
- # if found PCD which datum value is unicode string the insert to left size of UnicodeIndex\r
- UnicodePcdArray.append(Pcd)\r
- elif len(Sku.VariableName) > 0:\r
- # if found HII type PCD then insert to right of UnicodeIndex\r
- HiiPcdArray.append(Pcd)\r
- else:\r
- OtherPcdArray.append(Pcd)\r
- del self._DynamicPcdList[:]\r
+ VpdFile = VpdInfoFile.VpdInfoFile()\r
+ NeedProcessVpdMapFile = False \r
+ \r
+ if (self.Workspace.ArchList[-1] == self.Arch): \r
+ for Pcd in self._DynamicPcdList:\r
+\r
+ # just pick the a value to determine whether is unicode string type\r
+ Sku = Pcd.SkuInfoList[Pcd.SkuInfoList.keys()[0]]\r
+ Sku.VpdOffset = Sku.VpdOffset.strip()\r
+ \r
+ PcdValue = Sku.DefaultValue\r
+ if Pcd.DatumType == 'VOID*' and PcdValue.startswith("L"):\r
+ # if found PCD which datum value is unicode string the insert to left size of UnicodeIndex\r
+ UnicodePcdArray.append(Pcd)\r
+ elif len(Sku.VariableName) > 0:\r
+ # if found HII type PCD then insert to right of UnicodeIndex\r
+ HiiPcdArray.append(Pcd)\r
+ else:\r
+ OtherPcdArray.append(Pcd)\r
+ \r
+ if Pcd.Type in [TAB_PCDS_DYNAMIC_VPD, TAB_PCDS_DYNAMIC_EX_VPD]:\r
+ if not (self.Platform.VpdToolGuid == None or self.Platform.VpdToolGuid == ''):\r
+ #\r
+ # Fix the optional data of VPD PCD.\r
+ #\r
+ if (Pcd.DatumType.strip() != "VOID*"):\r
+ if Sku.DefaultValue == '':\r
+ Pcd.SkuInfoList[Pcd.SkuInfoList.keys()[0]].DefaultValue = Pcd.MaxDatumSize\r
+ Pcd.MaxDatumSize = None\r
+ else:\r
+ EdkLogger.error("build", AUTOGEN_ERROR, "PCD setting error",\r
+ File=self.MetaFile,\r
+ ExtraData="\n\tPCD: %s.%s format incorrect in DSC: %s\n\t\t\n"\r
+ % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName, self.Platform.MetaFile.Path)) \r
+ \r
+ VpdFile.Add(Pcd, Sku.VpdOffset)\r
+ # if the offset of a VPD is *, then it need to be fixed up by third party tool.\r
+ if not NeedProcessVpdMapFile and Sku.VpdOffset == "*":\r
+ NeedProcessVpdMapFile = True\r
+ \r
+ #\r
+ # Fix the PCDs define in VPD PCD section that never referenced by module.\r
+ # An example is PCD for signature usage.\r
+ # \r
+ for DscPcd in self.Platform.Pcds:\r
+ DscPcdEntry = self.Platform.Pcds[DscPcd]\r
+ if DscPcdEntry.Type in [TAB_PCDS_DYNAMIC_VPD, TAB_PCDS_DYNAMIC_EX_VPD]:\r
+ if not (self.Platform.VpdToolGuid == None or self.Platform.VpdToolGuid == ''):\r
+ FoundFlag = False\r
+ for VpdPcd in VpdFile._VpdArray.keys():\r
+ # This PCD has been referenced by module\r
+ if (VpdPcd.TokenSpaceGuidCName == DscPcdEntry.TokenSpaceGuidCName) and \\r
+ (VpdPcd.TokenCName == DscPcdEntry.TokenCName):\r
+ FoundFlag = True\r
+ \r
+ # Not found, it should be signature\r
+ if not FoundFlag :\r
+ # just pick the a value to determine whether is unicode string type\r
+ Sku = DscPcdEntry.SkuInfoList[DscPcdEntry.SkuInfoList.keys()[0]]\r
+ Sku.VpdOffset = Sku.VpdOffset.strip() \r
+ \r
+ # Need to iterate DEC pcd information to get the value & datumtype\r
+ for eachDec in self.PackageList:\r
+ for DecPcd in eachDec.Pcds:\r
+ DecPcdEntry = eachDec.Pcds[DecPcd]\r
+ if (DecPcdEntry.TokenSpaceGuidCName == DscPcdEntry.TokenSpaceGuidCName) and \\r
+ (DecPcdEntry.TokenCName == DscPcdEntry.TokenCName):\r
+ DscPcdEntry.DatumType = DecPcdEntry.DatumType\r
+ DscPcdEntry.DefaultValue = DecPcdEntry.DefaultValue\r
+ Sku.DefaultValue = DecPcdEntry.DefaultValue \r
+ \r
+ VpdFile.Add(DscPcdEntry, Sku.VpdOffset)\r
+ # if the offset of a VPD is *, then it need to be fixed up by third party tool.\r
+ if not NeedProcessVpdMapFile and Sku.VpdOffset == "*":\r
+ NeedProcessVpdMapFile = True \r
+ \r
+ \r
+ if (self.Platform.FlashDefinition == None or self.Platform.FlashDefinition == '') and \\r
+ VpdFile.GetCount() != 0:\r
+ EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE, \r
+ "Fail to get FLASH_DEFINITION definition in DSC file %s which is required when DSC contains VPD PCD." % str(self.Platform.MetaFile))\r
+ \r
+ if VpdFile.GetCount() != 0:\r
+ WorkspaceDb = self.BuildDatabase.WorkspaceDb\r
+ DscTimeStamp = WorkspaceDb.GetTimeStamp(WorkspaceDb.GetFileId(str(self.Platform.MetaFile)))\r
+ FvPath = os.path.join(self.BuildDir, "FV")\r
+ if not os.path.exists(FvPath):\r
+ try:\r
+ os.makedirs(FvPath)\r
+ except:\r
+ EdkLogger.error("build", FILE_WRITE_FAILURE, "Fail to create FV folder under %s" % self.BuildDir)\r
+ \r
+ VpdFileName = self.Platform.VpdFileName \r
+ if VpdFileName == None or VpdFileName == "" :\r
+ VpdFilePath = os.path.join(FvPath, "%s.txt" % self.Platform.VpdToolGuid)\r
+ else :\r
+ VpdFilePath = os.path.join(FvPath, "%s.txt" % VpdFileName) \r
+ \r
+ if not os.path.exists(VpdFilePath) or os.path.getmtime(VpdFilePath) < DscTimeStamp:\r
+ VpdFile.Write(VpdFilePath)\r
+ \r
+ # retrieve BPDG tool's path from tool_def.txt according to VPD_TOOL_GUID defined in DSC file.\r
+ BPDGToolName = None\r
+ for ToolDef in self.ToolDefinition.values():\r
+ if ToolDef.has_key("GUID") and ToolDef["GUID"] == self.Platform.VpdToolGuid:\r
+ if not ToolDef.has_key("PATH"):\r
+ EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE, "PATH attribute was not provided for BPDG guid tool %s in tools_def.txt" % self.Platform.VpdToolGuid)\r
+ BPDGToolName = ToolDef["PATH"]\r
+ break\r
+ # Call third party GUID BPDG tool.\r
+ if BPDGToolName != None:\r
+ VpdInfoFile.CallExtenalBPDGTool(BPDGToolName, VpdFilePath, VpdFileName)\r
+ else:\r
+ EdkLogger.error("Build", FILE_NOT_FOUND, "Fail to find third-party BPDG tool to process VPD PCDs. BPDG Guid tool need to be defined in tools_def.txt and VPD_TOOL_GUID need to be provided in DSC file.")\r
+ \r
+ # Process VPD map file generated by third party BPDG tool\r
+ if NeedProcessVpdMapFile:\r
+ if VpdFileName == None or VpdFileName == "" :\r
+ VpdMapFilePath = os.path.join(self.BuildDir, "FV", "%s.map" % self.Platform.VpdToolGuid)\r
+ else :\r
+ VpdMapFilePath = os.path.join(self.BuildDir, "FV", "%s.map" % VpdFileName)\r
+ if os.path.exists(VpdMapFilePath):\r
+ VpdFile.Read(VpdMapFilePath)\r
+ \r
+ # Fixup "*" offset\r
+ for Pcd in self._DynamicPcdList:\r
+ # just pick the a value to determine whether is unicode string type\r
+ Sku = Pcd.SkuInfoList[Pcd.SkuInfoList.keys()[0]] \r
+ if Sku.VpdOffset == "*":\r
+ Sku.VpdOffset = VpdFile.GetOffset(Pcd)[0]\r
+ else:\r
+ EdkLogger.error("build", FILE_READ_FAILURE, "Can not find VPD map file %s to fix up VPD offset." % VpdMapFilePath)\r
+ \r
+ # Delete the DynamicPcdList At the last time enter into this function \r
+ del self._DynamicPcdList[:] \r
self._DynamicPcdList.extend(UnicodePcdArray)\r
self._DynamicPcdList.extend(HiiPcdArray)\r
self._DynamicPcdList.extend(OtherPcdArray)\r
\r
## Get list of non-dynamic PCDs\r
def _GetNonDynamicPcdList(self):\r
+ if self._NonDynamicPcdList == None:\r
+ self.CollectPlatformDynamicPcds()\r
return self._NonDynamicPcdList\r
\r
## Get list of dynamic PCDs\r
def _GetDynamicPcdList(self):\r
+ if self._DynamicPcdList == None:\r
+ self.CollectPlatformDynamicPcds()\r
return self._DynamicPcdList\r
\r
## Generate Token Number for all PCD\r
if FromPcd != None:\r
if ToPcd.Pending and FromPcd.Type not in [None, '']:\r
ToPcd.Type = FromPcd.Type\r
+ elif (ToPcd.Type not in [None, '']) and (FromPcd.Type not in [None, ''])\\r
+ and (ToPcd.Type != FromPcd.Type) and (ToPcd.Type in FromPcd.Type):\r
+ if ToPcd.Type.strip() == "DynamicEx":\r
+ ToPcd.Type = FromPcd.Type \r
elif ToPcd.Type not in [None, ''] and FromPcd.Type not in [None, ''] \\r
and ToPcd.Type != FromPcd.Type:\r
EdkLogger.error("build", OPTION_CONFLICT, "Mismatched PCD type",\r
ArraySize = ArraySize / 2;
if ArraySize < (len(Value) + 1):
- ArraySize = len(Value) + 1
+ EdkLogger.error("build", AUTOGEN_ERROR,
+ "The maximum size of VOID* type PCD '%s.%s' is less than its actual size occupied." % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName),
+ ExtraData="[%s]" % str(Info))
Value = NewValue + '0 }'
Array = '[%d]' % ArraySize
#
VariableHeadValueList = []
Pcd.InitString = 'UNINIT'
- if Pcd.Type in ["DynamicVpd", "DynamicExVpd"]:
- Pcd.TokenTypeList = ['PCD_TYPE_VPD']
- elif Pcd.DatumType == 'VOID*':
- Pcd.TokenTypeList = ['PCD_TYPE_STRING']
+ if Pcd.DatumType == 'VOID*':
+ if Pcd.Type not in ["DynamicVpd", "DynamicExVpd"]:
+ Pcd.TokenTypeList = ['PCD_TYPE_STRING']
+ else:
+ Pcd.TokenTypeList = []
elif Pcd.DatumType == 'BOOLEAN':
Pcd.TokenTypeList = ['PCD_DATUM_TYPE_UINT8']
else:
Dict['SIZE_TABLE_MAXIMUM_LENGTH'].append(Pcd.MaxDatumSize)
if Pcd.MaxDatumSize != '':
MaxDatumSize = int(Pcd.MaxDatumSize, 0)
- if MaxDatumSize > Size:
- Size = MaxDatumSize
+ if MaxDatumSize < Size:
+ EdkLogger.error("build", AUTOGEN_ERROR,
+ "The maximum size of VOID* type PCD '%s.%s' is less than its actual size occupied." % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName),
+ ExtraData="[%s]" % str(Platform))
+ Size = MaxDatumSize
Dict['STRING_TABLE_LENGTH'].append(Size)
StringTableIndex += 1
StringTableSize += (Size)
TAB_DEC_DEFINES_PACKAGE_NAME = 'PACKAGE_NAME'\r
TAB_DEC_DEFINES_PACKAGE_GUID = 'PACKAGE_GUID'\r
TAB_DEC_DEFINES_PACKAGE_VERSION = 'PACKAGE_VERSION'\r
+TAB_DEC_DEFINES_PKG_UNI_FILE = 'PKG_UNI_FILE'\r
\r
#\r
# Dsc Definitions\r
TAB_DSC_DEFINES_BS_BASE_ADDRESS = 'BsBaseAddress'\r
TAB_DSC_DEFINES_RT_BASE_ADDRESS = 'RtBaseAddress'\r
TAB_DSC_DEFINES_DEFINE = 'DEFINE'\r
+TAB_DSC_DEFINES_VPD_TOOL_GUID = 'VPD_TOOL_GUID'\r
+TAB_DSC_DEFINES_VPD_FILENAME = 'VPD_FILENAME'\r
TAB_FIX_LOAD_TOP_MEMORY_ADDRESS = 'FIX_LOAD_TOP_MEMORY_ADDRESS'\r
\r
#\r
while Template:
MatchObj = gPlaceholderPattern.search(Template, SearchFrom)
if not MatchObj:
- if MatchEnd < len(Template):
+ if MatchEnd <= len(Template):
TemplateSection = TemplateString.Section(Template[SectionStart:], PlaceHolderList)
TemplateSectionList.append(TemplateSection)
break
\r
return Line\r
\r
+## CleanString2\r
+#\r
+# Split comments in a string\r
+# Remove spaces\r
+#\r
+# @param Line: The string to be cleaned\r
+# @param CommentCharacter: Comment char, used to ignore comment content, default is DataType.TAB_COMMENT_SPLIT\r
+#\r
+# @retval Path Formatted path\r
+#\r
+def CleanString2(Line, CommentCharacter = DataType.TAB_COMMENT_SPLIT, AllowCppStyleComment=False):\r
+ #\r
+ # remove whitespace\r
+ #\r
+ Line = Line.strip();\r
+ #\r
+ # Replace R8's comment character\r
+ #\r
+ if AllowCppStyleComment:\r
+ Line = Line.replace(DataType.TAB_COMMENT_R8_SPLIT, CommentCharacter)\r
+ #\r
+ # separate comments and statements\r
+ #\r
+ LineParts = Line.split(CommentCharacter, 1);\r
+ #\r
+ # remove whitespace again\r
+ #\r
+ Line = LineParts[0].strip();\r
+ if len(LineParts) > 1:\r
+ Comment = LineParts[1].strip()\r
+ # Remove prefixed and trailing comment characters\r
+ Start = 0\r
+ End = len(Comment)\r
+ while Start < End and Comment.startswith(CommentCharacter, Start, End):\r
+ Start += 1\r
+ while End >= 0 and Comment.endswith(CommentCharacter, Start, End):\r
+ End -= 1\r
+ Comment = Comment[Start:End]\r
+ Comment = Comment.strip()\r
+ else:\r
+ Comment = ''\r
+\r
+ return Line, Comment\r
+\r
## GetMultipleValuesOfKeyFromLines\r
#\r
# Parse multiple strings to clean comment and spaces\r
--- /dev/null
+## @file\r
+# \r
+# This package manage the VPD PCD information file which will be generated\r
+# by build tool's autogen.\r
+# The VPD PCD information file will be input for third-party BPDG tool which\r
+# is pointed by *_*_*_VPD_TOOL_GUID in conf/tools_def.txt \r
+#\r
+#\r
+# Copyright (c) 2010, Intel Corporation. All rights reserved.<BR>\r
+# This program and the accompanying materials\r
+# are licensed and made available under the terms and conditions of the BSD License\r
+# which accompanies this distribution. The full text of the license may be found at\r
+# http://opensource.org/licenses/bsd-license.php\r
+#\r
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,\r
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.\r
+#\r
+import os\r
+import re\r
+import Common.EdkLogger as EdkLogger\r
+import Common.BuildToolError as BuildToolError\r
+import subprocess\r
+\r
+FILE_COMMENT_TEMPLATE = \\r
+"""\r
+## @file\r
+#\r
+# THIS IS AUTO-GENERATED FILE BY BUILD TOOLS AND PLEASE DO NOT MAKE MODIFICATION.\r
+#\r
+# This file lists all VPD informations for a platform collected by build.exe.\r
+# \r
+# Copyright (c) 2010, Intel Corporation. All rights reserved.<BR>\r
+# This program and the accompanying materials\r
+# are licensed and made available under the terms and conditions of the BSD License\r
+# which accompanies this distribution. The full text of the license may be found at\r
+# http://opensource.org/licenses/bsd-license.php\r
+#\r
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,\r
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.\r
+#\r
+\r
+"""\r
+\r
+## The class manage VpdInfoFile.\r
+#\r
+# This file contains an ordered (based on position in the DSC file) list of the PCDs specified in the platform description file (DSC). The Value field that will be assigned to the PCD comes from the DSC file, INF file (if not defined in the DSC file) or the DEC file (if not defined in the INF file). This file is used as an input to the BPDG tool.\r
+# Format for this file (using EBNF notation) is:\r
+# <File> :: = [<CommentBlock>]\r
+# [<PcdEntry>]*\r
+# <CommentBlock> ::= ["#" <String> <EOL>]*\r
+# <PcdEntry> ::= <PcdName> "|" <Offset> "|" <Size> "|" <Value> <EOL>\r
+# <PcdName> ::= <TokenSpaceCName> "." <PcdCName>\r
+# <TokenSpaceCName> ::= C Variable Name of the Token Space GUID\r
+# <PcdCName> ::= C Variable Name of the PCD\r
+# <Offset> ::= {"*"} {<HexNumber>}\r
+# <HexNumber> ::= "0x" (a-fA-F0-9){1,8}\r
+# <Size> ::= <HexNumber>\r
+# <Value> ::= {<HexNumber>} {<NonNegativeInt>} {<QString>} {<Array>}\r
+# <NonNegativeInt> ::= (0-9)+\r
+# <QString> ::= ["L"] <DblQuote> <String> <DblQuote>\r
+# <DblQuote> ::= 0x22\r
+# <Array> ::= {<CArray>} {<NList>}\r
+# <CArray> ::= "{" <HexNumber> ["," <HexNumber>]* "}"\r
+# <NList> ::= <HexNumber> ["," <HexNumber>]*\r
+#\r
+class VpdInfoFile:\r
+ \r
+ ## The mapping dictionary from datum type to size string.\r
+ _MAX_SIZE_TYPE = {"BOOLEAN":"1", "UINT8":"1", "UINT16":"2", "UINT32":"4", "UINT64":"8"}\r
+ _rVpdPcdLine = None \r
+ ## Constructor\r
+ def __init__(self):\r
+ ## Dictionary for VPD in following format\r
+ #\r
+ # Key : PcdClassObject instance. \r
+ # @see BuildClassObject.PcdClassObject\r
+ # Value : offset in different SKU such as [sku1_offset, sku2_offset]\r
+ self._VpdArray = {}\r
+ \r
+ ## Add a VPD PCD collected from platform's autogen when building.\r
+ #\r
+ # @param vpds The list of VPD PCD collected for a platform.\r
+ # @see BuildClassObject.PcdClassObject\r
+ #\r
+ # @param offset integer value for VPD's offset in specific SKU.\r
+ #\r
+ def Add(self, Vpd, Offset):\r
+ if (Vpd == None):\r
+ EdkLogger.error("VpdInfoFile", BuildToolError.ATTRIBUTE_UNKNOWN_ERROR, "Invalid VPD PCD entry.")\r
+ \r
+ if not (Offset >= 0 or Offset == "*"):\r
+ EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID, "Invalid offset parameter: %s." % Offset)\r
+ \r
+ if Vpd.DatumType == "VOID*":\r
+ if Vpd.MaxDatumSize <= 0:\r
+ EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID, \r
+ "Invalid max datum size for VPD PCD %s.%s" % (Vpd.TokenSpaceGuidCName, Vpd.TokenCName))\r
+ elif Vpd.DatumType in ["BOOLEAN", "UINT8", "UINT16", "UINT32", "UINT64"]: \r
+ if Vpd.MaxDatumSize == None or Vpd.MaxDatumSize == "":\r
+ Vpd.MaxDatumSize = VpdInfoFile._MAX_SIZE_TYPE[Vpd.DatumType]\r
+ else:\r
+ EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID, \r
+ "Invalid DatumType %s for VPD PCD %s.%s" % (Vpd.DatumType, Vpd.TokenSpaceGuidCName, Vpd.TokenCName))\r
+ \r
+ if Vpd not in self._VpdArray.keys():\r
+ #\r
+ # If there is no Vpd instance in dict, that imply this offset for a given SKU is a new one \r
+ #\r
+ self._VpdArray[Vpd] = [Offset]\r
+ else:\r
+ #\r
+ # If there is an offset for a specific SKU in dict, then append this offset for other sku to array.\r
+ #\r
+ self._VpdArray[Vpd].append(Offset)\r
+ \r
+ \r
+ ## Generate VPD PCD information into a text file\r
+ # \r
+ # If parameter FilePath is invalid, then assert.\r
+ # If \r
+ # @param FilePath The given file path which would hold VPD information\r
+ def Write(self, FilePath):\r
+ if not (FilePath != None or len(FilePath) != 0):\r
+ EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID, \r
+ "Invalid parameter FilePath: %s." % FilePath) \r
+ try:\r
+ fd = open(FilePath, "w")\r
+ except:\r
+ EdkLogger.error("VpdInfoFile", \r
+ BuildToolError.FILE_OPEN_FAILURE, \r
+ "Fail to open file %s for written." % FilePath)\r
+ \r
+ try:\r
+ # write file header\r
+ fd.write(FILE_COMMENT_TEMPLATE)\r
+\r
+ # write each of PCD in VPD type\r
+ for Pcd in self._VpdArray.keys():\r
+ for Offset in self._VpdArray[Pcd]:\r
+ PcdValue = str(Pcd.SkuInfoList[Pcd.SkuInfoList.keys()[0]].DefaultValue).strip()\r
+ if PcdValue == "" :\r
+ PcdValue = Pcd.DefaultValue\r
+ \r
+ fd.write("%s.%s|%s|%s|%s \n" % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName, str(Offset).strip(), str(Pcd.MaxDatumSize).strip(),PcdValue))\r
+ except:\r
+ EdkLogger.error("VpdInfoFile",\r
+ BuildToolError.FILE_WRITE_FAILURE,\r
+ "Fail to write file %s" % FilePath) \r
+ fd.close()\r
+\r
+ ## Read an existing VPD PCD info file.\r
+ #\r
+ # This routine will read VPD PCD information from existing file and construct\r
+ # internal PcdClassObject array.\r
+ # This routine could be used by third-party tool to parse VPD info file content.\r
+ #\r
+ # @param FilePath The full path string for existing VPD PCD info file.\r
+ def Read(self, FilePath):\r
+ try:\r
+ fd = open(FilePath, "r")\r
+ except:\r
+ EdkLogger.error("VpdInfoFile", \r
+ BuildToolError.FILE_OPEN_FAILURE, \r
+ "Fail to open file %s for written." % FilePath)\r
+ Lines = fd.readlines()\r
+ for Line in Lines:\r
+ Line = Line.strip()\r
+ if len(Line) == 0 or Line.startswith("#"):\r
+ continue\r
+ \r
+ #\r
+ # the line must follow output format defined in BPDG spec.\r
+ #\r
+ try:\r
+ PcdName, Offset, Size, Value = Line.split("#")[0].split("|")\r
+ TokenSpaceName, PcdTokenName = PcdName.split(".")\r
+ except:\r
+ EdkLogger.error("BPDG", BuildToolError.PARSER_ERROR, "Fail to parse VPD information file %s" % FilePath)\r
+ \r
+ Found = False\r
+ for VpdObject in self._VpdArray.keys():\r
+ if VpdObject.TokenSpaceGuidCName == TokenSpaceName and VpdObject.TokenCName == PcdTokenName.strip():\r
+ if self._VpdArray[VpdObject][0] == "*":\r
+ if Offset == "*":\r
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "The offset of %s has not been fixed up by third-party BPDG tool." % PcdName)\r
+ \r
+ self._VpdArray[VpdObject][0] = Offset\r
+ Found = True\r
+ break\r
+ if not Found:\r
+ EdkLogger.error("BPDG", BuildToolError.PARSER_ERROR, "Can not find PCD defined in VPD guid file.")\r
+ \r
+ ## Get count of VPD PCD collected from platform's autogen when building.\r
+ #\r
+ # @return The integer count value \r
+ def GetCount(self):\r
+ Count = 0\r
+ for OffsetList in self._VpdArray.values():\r
+ Count += len(OffsetList)\r
+ \r
+ return Count\r
+ \r
+ ## Get an offset value for a given VPD PCD\r
+ #\r
+ # Because BPDG only support one Sku, so only return offset for SKU default. \r
+ #\r
+ # @param vpd A given VPD PCD \r
+ def GetOffset(self, vpd):\r
+ if not self._VpdArray.has_key(vpd):\r
+ return None\r
+ \r
+ if len(self._VpdArray[vpd]) == 0:\r
+ return None\r
+ \r
+ return self._VpdArray[vpd]\r
+ \r
+## Call external BPDG tool to process VPD file\r
+# \r
+# @param ToolPath The string path name for BPDG tool\r
+# @param VpdFileName The string path name for VPD information guid.txt\r
+# \r
+def CallExtenalBPDGTool(ToolPath, VpdFilePath, VpdFileName):\r
+ assert ToolPath != None, "Invalid parameter ToolPath"\r
+ assert VpdFilePath != None and os.path.exists(VpdFilePath), "Invalid parameter VpdFileName"\r
+ \r
+ OutputDir = os.path.dirname(VpdFilePath)\r
+ if (VpdFileName == None or VpdFileName == "") :\r
+ FileName = os.path.basename(VpdFilePath)\r
+ BaseName, ext = os.path.splitext(FileName)\r
+ OutputMapFileName = os.path.join(OutputDir, "%s.map" % BaseName)\r
+ OutputBinFileName = os.path.join(OutputDir, "%s.bin" % BaseName)\r
+ else :\r
+ OutputMapFileName = os.path.join(OutputDir, "%s.map" % VpdFileName)\r
+ OutputBinFileName = os.path.join(OutputDir, "%s.bin" % VpdFileName)\r
+ \r
+ try:\r
+ PopenObject = subprocess.Popen([ToolPath,\r
+ '-o', OutputBinFileName, \r
+ '-m', OutputMapFileName,\r
+ '-s',\r
+ '-f',\r
+ '-v',\r
+ VpdFilePath],\r
+ stdout=subprocess.PIPE, \r
+ stderr= subprocess.PIPE)\r
+ except Exception, X:\r
+ EdkLogger.error("BPDG", BuildToolError.COMMAND_FAILURE, ExtraData="%s" % (str(X)))\r
+ (out, error) = PopenObject.communicate()\r
+ print out\r
+ while PopenObject.returncode == None :\r
+ PopenObject.wait()\r
+ \r
+ if PopenObject.returncode != 0:\r
+ if PopenObject.returncode != 0:\r
+ EdkLogger.debug(EdkLogger.DEBUG_1, "Fail to call BPDG tool", str(error))\r
+ EdkLogger.error("BPDG", BuildToolError.COMMAND_FAILURE, "Fail to execute BPDG tool with exit code: %d, the error message is: \n %s" % \\r
+ (PopenObject.returncode, str(error)))\r
+ \r
+ return PopenObject.returncode\r
MODEL_FILE_FDF = 1014\r
MODEL_FILE_INC = 1015\r
MODEL_FILE_CIF = 1016\r
+MODEL_FILE_OTHERS = 1099\r
\r
MODEL_IDENTIFIER_FILE_HEADER = 2001\r
MODEL_IDENTIFIER_FUNCTION_HEADER = 2002\r
MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSEIF = 50013\r
MODEL_META_DATA_CONDITIONAL_STATEMENT_ENDIF = 5014\r
MODEL_META_DATA_COMPONENT_SOURCE_OVERRIDE_PATH = 5015\r
+MODEL_META_DATA_COMMENT = 5016\r
+MODEL_META_DATA_GLOBAL_DEFINE = 5017\r
\r
MODEL_EXTERNAL_DEPENDENCY = 10000\r
\r
('MODEL_FILE_DSC', MODEL_FILE_DSC),\r
('MODEL_FILE_FDF', MODEL_FILE_FDF),\r
('MODEL_FILE_INC', MODEL_FILE_INC),\r
+ ('MODEL_FILE_CIF', MODEL_FILE_CIF),\r
+ ('MODEL_FILE_OTHERS', MODEL_FILE_OTHERS),\r
('MODEL_IDENTIFIER_FILE_HEADER', MODEL_IDENTIFIER_FILE_HEADER),\r
('MODEL_IDENTIFIER_FUNCTION_HEADER', MODEL_IDENTIFIER_FUNCTION_HEADER),\r
('MODEL_IDENTIFIER_COMMENT', MODEL_IDENTIFIER_COMMENT),\r
("MODEL_META_DATA_COMPONENT", MODEL_META_DATA_COMPONENT),\r
('MODEL_META_DATA_USER_EXTENSION', MODEL_META_DATA_USER_EXTENSION),\r
('MODEL_META_DATA_PACKAGE', MODEL_META_DATA_PACKAGE),\r
- ('MODEL_META_DATA_NMAKE', MODEL_META_DATA_NMAKE)\r
+ ('MODEL_META_DATA_NMAKE', MODEL_META_DATA_NMAKE),\r
+ ('MODEL_META_DATA_COMMENT', MODEL_META_DATA_COMMENT)\r
]\r
\r
## FunctionClass\r
#\r
# This class defines a structure of a function\r
-# \r
+#\r
# @param ID: ID of a Function\r
# @param Header: Header of a Function\r
-# @param Modifier: Modifier of a Function \r
+# @param Modifier: Modifier of a Function\r
# @param Name: Name of a Function\r
# @param ReturnStatement: ReturnStatement of a Funciont\r
# @param StartLine: StartLine of a Function\r
#\r
# @var ID: ID of a Function\r
# @var Header: Header of a Function\r
-# @var Modifier: Modifier of a Function \r
+# @var Modifier: Modifier of a Function\r
# @var Name: Name of a Function\r
# @var ReturnStatement: ReturnStatement of a Funciont\r
# @var StartLine: StartLine of a Function\r
FunNameStartLine = -1, FunNameStartColumn = -1):\r
self.ID = ID\r
self.Header = Header\r
- self.Modifier = Modifier \r
+ self.Modifier = Modifier\r
self.Name = Name\r
self.ReturnStatement = ReturnStatement\r
self.StartLine = StartLine\r
self.BelongsToFile = BelongsToFile\r
self.FunNameStartLine = FunNameStartLine\r
self.FunNameStartColumn = FunNameStartColumn\r
- \r
+\r
self.IdentifierList = IdentifierList\r
self.PcdList = PcdList\r
\r
## IdentifierClass\r
#\r
# This class defines a structure of a variable\r
-# \r
+#\r
# @param ID: ID of a Identifier\r
# @param Modifier: Modifier of a Identifier\r
# @param Type: Type of a Identifier\r
## PcdClass\r
#\r
# This class defines a structure of a Pcd\r
-# \r
+#\r
# @param ID: ID of a Pcd\r
# @param CName: CName of a Pcd\r
# @param TokenSpaceGuidCName: TokenSpaceGuidCName of a Pcd\r
## FileClass\r
#\r
# This class defines a structure of a file\r
-# \r
+#\r
# @param ID: ID of a File\r
# @param Name: Name of a File\r
# @param ExtName: ExtName of a File\r
class FileClass(object):\r
def __init__(self, ID = -1, Name = '', ExtName = '', Path = '', FullPath = '', Model = MODEL_UNKNOWN, TimeStamp = '', \\r
FunctionList = [], IdentifierList = [], PcdList = []):\r
- self.ID = ID \r
+ self.ID = ID\r
self.Name = Name\r
- self.ExtName = ExtName \r
+ self.ExtName = ExtName\r
self.Path = Path\r
self.FullPath = FullPath\r
self.Model = Model\r
self.TimeStamp = TimeStamp\r
- \r
+\r
self.FunctionList = FunctionList\r
self.IdentifierList = IdentifierList\r
self.PcdList = PcdList\r
\r
# Check all required checkpoints\r
def Check(self):\r
+ self.GeneralCheck()\r
self.MetaDataFileCheck()\r
self.DoxygenCheck()\r
self.IncludeFileCheck()\r
self.FunctionLayoutCheck()\r
self.NamingConventionCheck()\r
\r
+ # General Checking\r
+ def GeneralCheck(self):\r
+ self.GeneralCheckNonAcsii()\r
+\r
+ # Check whether file has non ACSII char\r
+ def GeneralCheckNonAcsii(self):\r
+ if EccGlobalData.gConfig.GeneralCheckNonAcsii == '1' or EccGlobalData.gConfig.GeneralCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':\r
+ EdkLogger.quiet("Checking Non-ACSII char in file ...")\r
+ SqlCommand = """select ID, FullPath, ExtName from File"""\r
+ RecordSet = EccGlobalData.gDb.TblInf.Exec(SqlCommand)\r
+ for Record in RecordSet:\r
+ if Record[2].upper() not in EccGlobalData.gConfig.BinaryExtList:\r
+ op = open(Record[1]).readlines()\r
+ IndexOfLine = 0\r
+ for Line in op:\r
+ IndexOfLine += 1\r
+ IndexOfChar = 0\r
+ for Char in Line:\r
+ IndexOfChar += 1\r
+ if ord(Char) > 126:\r
+ OtherMsg = "File %s has Non-ASCII char at line %s column %s" %(Record[1], IndexOfLine, IndexOfChar)\r
+ EccGlobalData.gDb.TblReport.Insert(ERROR_GENERAL_CHECK_NON_ACSII, OtherMsg = OtherMsg, BelongsToTable = 'File', BelongsToItem = Record[0])\r
+\r
# C Function Layout Checking\r
def FunctionLayoutCheck(self):\r
self.FunctionLayoutCheckReturnType()\r
if EccGlobalData.gConfig.CFunctionLayoutCheckReturnType == '1' or EccGlobalData.gConfig.CFunctionLayoutCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':\r
EdkLogger.quiet("Checking function layout return type ...")\r
\r
- for Dirpath, Dirnames, Filenames in self.WalkTree():\r
- for F in Filenames:\r
- if os.path.splitext(F)[1] in ('.c', '.h'):\r
- FullName = os.path.join(Dirpath, F)\r
- c.CheckFuncLayoutReturnType(FullName)\r
+# for Dirpath, Dirnames, Filenames in self.WalkTree():\r
+# for F in Filenames:\r
+# if os.path.splitext(F)[1] in ('.c', '.h'):\r
+# FullName = os.path.join(Dirpath, F)\r
+# c.CheckFuncLayoutReturnType(FullName)\r
+ for FullName in EccGlobalData.gCFileList + EccGlobalData.gHFileList:\r
+ c.CheckFuncLayoutReturnType(FullName)\r
\r
# Check whether any optional functional modifiers exist and next to the return type\r
def FunctionLayoutCheckModifier(self):\r
if EccGlobalData.gConfig.CFunctionLayoutCheckOptionalFunctionalModifier == '1' or EccGlobalData.gConfig.CFunctionLayoutCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':\r
EdkLogger.quiet("Checking function layout modifier ...")\r
\r
- for Dirpath, Dirnames, Filenames in self.WalkTree():\r
- for F in Filenames:\r
- if os.path.splitext(F)[1] in ('.c', '.h'):\r
- FullName = os.path.join(Dirpath, F)\r
- c.CheckFuncLayoutModifier(FullName)\r
+# for Dirpath, Dirnames, Filenames in self.WalkTree():\r
+# for F in Filenames:\r
+# if os.path.splitext(F)[1] in ('.c', '.h'):\r
+# FullName = os.path.join(Dirpath, F)\r
+# c.CheckFuncLayoutModifier(FullName)\r
+ for FullName in EccGlobalData.gCFileList + EccGlobalData.gHFileList:\r
+ c.CheckFuncLayoutModifier(FullName)\r
\r
# Check whether the next line contains the function name, left justified, followed by the beginning of the parameter list\r
# Check whether the closing parenthesis is on its own line and also indented two spaces\r
if EccGlobalData.gConfig.CFunctionLayoutCheckFunctionName == '1' or EccGlobalData.gConfig.CFunctionLayoutCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':\r
EdkLogger.quiet("Checking function layout function name ...")\r
\r
- for Dirpath, Dirnames, Filenames in self.WalkTree():\r
- for F in Filenames:\r
- if os.path.splitext(F)[1] in ('.c', '.h'):\r
- FullName = os.path.join(Dirpath, F)\r
- c.CheckFuncLayoutName(FullName)\r
+# for Dirpath, Dirnames, Filenames in self.WalkTree():\r
+# for F in Filenames:\r
+# if os.path.splitext(F)[1] in ('.c', '.h'):\r
+# FullName = os.path.join(Dirpath, F)\r
+# c.CheckFuncLayoutName(FullName)\r
+ for FullName in EccGlobalData.gCFileList + EccGlobalData.gHFileList:\r
+ c.CheckFuncLayoutName(FullName)\r
+\r
# Check whether the function prototypes in include files have the same form as function definitions\r
def FunctionLayoutCheckPrototype(self):\r
if EccGlobalData.gConfig.CFunctionLayoutCheckFunctionPrototype == '1' or EccGlobalData.gConfig.CFunctionLayoutCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':\r
EdkLogger.quiet("Checking function layout function prototype ...")\r
\r
- for Dirpath, Dirnames, Filenames in self.WalkTree():\r
- for F in Filenames:\r
- if os.path.splitext(F)[1] in ('.c'):\r
- FullName = os.path.join(Dirpath, F)\r
- EdkLogger.quiet("[PROTOTYPE]" + FullName)\r
- c.CheckFuncLayoutPrototype(FullName)\r
+# for Dirpath, Dirnames, Filenames in self.WalkTree():\r
+# for F in Filenames:\r
+# if os.path.splitext(F)[1] in ('.c'):\r
+# FullName = os.path.join(Dirpath, F)\r
+# EdkLogger.quiet("[PROTOTYPE]" + FullName)\r
+# c.CheckFuncLayoutPrototype(FullName)\r
+ for FullName in EccGlobalData.gCFileList:\r
+ EdkLogger.quiet("[PROTOTYPE]" + FullName)\r
+ c.CheckFuncLayoutPrototype(FullName)\r
\r
# Check whether the body of a function is contained by open and close braces that must be in the first column\r
def FunctionLayoutCheckBody(self):\r
if EccGlobalData.gConfig.CFunctionLayoutCheckFunctionBody == '1' or EccGlobalData.gConfig.CFunctionLayoutCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':\r
EdkLogger.quiet("Checking function layout function body ...")\r
\r
- for Dirpath, Dirnames, Filenames in self.WalkTree():\r
- for F in Filenames:\r
- if os.path.splitext(F)[1] in ('.c'):\r
- FullName = os.path.join(Dirpath, F)\r
- c.CheckFuncLayoutBody(FullName)\r
+# for Dirpath, Dirnames, Filenames in self.WalkTree():\r
+# for F in Filenames:\r
+# if os.path.splitext(F)[1] in ('.c'):\r
+# FullName = os.path.join(Dirpath, F)\r
+# c.CheckFuncLayoutBody(FullName)\r
+ for FullName in EccGlobalData.gCFileList:\r
+ c.CheckFuncLayoutBody(FullName)\r
\r
# Check whether the data declarations is the first code in a module.\r
# self.CFunctionLayoutCheckDataDeclaration = 1\r
if EccGlobalData.gConfig.CFunctionLayoutCheckNoInitOfVariable == '1' or EccGlobalData.gConfig.CFunctionLayoutCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':\r
EdkLogger.quiet("Checking function layout local variables ...")\r
\r
- for Dirpath, Dirnames, Filenames in self.WalkTree():\r
- for F in Filenames:\r
- if os.path.splitext(F)[1] in ('.c'):\r
- FullName = os.path.join(Dirpath, F)\r
- c.CheckFuncLayoutLocalVariable(FullName)\r
+# for Dirpath, Dirnames, Filenames in self.WalkTree():\r
+# for F in Filenames:\r
+# if os.path.splitext(F)[1] in ('.c'):\r
+# FullName = os.path.join(Dirpath, F)\r
+# c.CheckFuncLayoutLocalVariable(FullName)\r
+\r
+ for FullName in EccGlobalData.gCFileList:\r
+ c.CheckFuncLayoutLocalVariable(FullName)\r
\r
# Check whether no use of STATIC for functions\r
# self.CFunctionLayoutCheckNoStatic = 1\r
if EccGlobalData.gConfig.DeclarationDataTypeCheckNoUseCType == '1' or EccGlobalData.gConfig.DeclarationDataTypeCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':\r
EdkLogger.quiet("Checking Declaration No use C type ...")\r
\r
- for Dirpath, Dirnames, Filenames in self.WalkTree():\r
- for F in Filenames:\r
- if os.path.splitext(F)[1] in ('.h', '.c'):\r
- FullName = os.path.join(Dirpath, F)\r
- c.CheckDeclNoUseCType(FullName)\r
+# for Dirpath, Dirnames, Filenames in self.WalkTree():\r
+# for F in Filenames:\r
+# if os.path.splitext(F)[1] in ('.h', '.c'):\r
+# FullName = os.path.join(Dirpath, F)\r
+# c.CheckDeclNoUseCType(FullName)\r
+ for FullName in EccGlobalData.gCFileList + EccGlobalData.gHFileList:\r
+ c.CheckDeclNoUseCType(FullName)\r
\r
# Check whether the modifiers IN, OUT, OPTIONAL, and UNALIGNED are used only to qualify arguments to a function and should not appear in a data type declaration\r
def DeclCheckInOutModifier(self):\r
if EccGlobalData.gConfig.DeclarationDataTypeCheckInOutModifier == '1' or EccGlobalData.gConfig.DeclarationDataTypeCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':\r
EdkLogger.quiet("Checking Declaration argument modifier ...")\r
\r
- for Dirpath, Dirnames, Filenames in self.WalkTree():\r
- for F in Filenames:\r
- if os.path.splitext(F)[1] in ('.h', '.c'):\r
- FullName = os.path.join(Dirpath, F)\r
- c.CheckDeclArgModifier(FullName)\r
+# for Dirpath, Dirnames, Filenames in self.WalkTree():\r
+# for F in Filenames:\r
+# if os.path.splitext(F)[1] in ('.h', '.c'):\r
+# FullName = os.path.join(Dirpath, F)\r
+# c.CheckDeclArgModifier(FullName)\r
+ for FullName in EccGlobalData.gCFileList + EccGlobalData.gHFileList:\r
+ c.CheckDeclArgModifier(FullName)\r
\r
# Check whether the EFIAPI modifier should be used at the entry of drivers, events, and member functions of protocols\r
def DeclCheckEFIAPIModifier(self):\r
if EccGlobalData.gConfig.DeclarationDataTypeCheckEnumeratedType == '1' or EccGlobalData.gConfig.DeclarationDataTypeCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':\r
EdkLogger.quiet("Checking Declaration enum typedef ...")\r
\r
- for Dirpath, Dirnames, Filenames in self.WalkTree():\r
- for F in Filenames:\r
- if os.path.splitext(F)[1] in ('.h', '.c'):\r
- FullName = os.path.join(Dirpath, F)\r
- EdkLogger.quiet("[ENUM]" + FullName)\r
- c.CheckDeclEnumTypedef(FullName)\r
+# for Dirpath, Dirnames, Filenames in self.WalkTree():\r
+# for F in Filenames:\r
+# if os.path.splitext(F)[1] in ('.h', '.c'):\r
+# FullName = os.path.join(Dirpath, F)\r
+# EdkLogger.quiet("[ENUM]" + FullName)\r
+# c.CheckDeclEnumTypedef(FullName)\r
+ for FullName in EccGlobalData.gCFileList + EccGlobalData.gHFileList:\r
+ EdkLogger.quiet("[ENUM]" + FullName)\r
+ c.CheckDeclEnumTypedef(FullName)\r
\r
# Check whether Structure Type has a 'typedef' and the name is capital\r
def DeclCheckStructureDeclaration(self):\r
if EccGlobalData.gConfig.DeclarationDataTypeCheckStructureDeclaration == '1' or EccGlobalData.gConfig.DeclarationDataTypeCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':\r
EdkLogger.quiet("Checking Declaration struct typedef ...")\r
\r
- for Dirpath, Dirnames, Filenames in self.WalkTree():\r
- for F in Filenames:\r
- if os.path.splitext(F)[1] in ('.h', '.c'):\r
- FullName = os.path.join(Dirpath, F)\r
- EdkLogger.quiet("[STRUCT]" + FullName)\r
- c.CheckDeclStructTypedef(FullName)\r
+# for Dirpath, Dirnames, Filenames in self.WalkTree():\r
+# for F in Filenames:\r
+# if os.path.splitext(F)[1] in ('.h', '.c'):\r
+# FullName = os.path.join(Dirpath, F)\r
+# EdkLogger.quiet("[STRUCT]" + FullName)\r
+# c.CheckDeclStructTypedef(FullName)\r
+ for FullName in EccGlobalData.gCFileList + EccGlobalData.gHFileList:\r
+ EdkLogger.quiet("[STRUCT]" + FullName)\r
+ c.CheckDeclStructTypedef(FullName)\r
\r
# Check whether having same Structure\r
def DeclCheckSameStructure(self):\r
if EccGlobalData.gConfig.DeclarationDataTypeCheckUnionType == '1' or EccGlobalData.gConfig.DeclarationDataTypeCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':\r
EdkLogger.quiet("Checking Declaration union typedef ...")\r
\r
- for Dirpath, Dirnames, Filenames in self.WalkTree():\r
- for F in Filenames:\r
- if os.path.splitext(F)[1] in ('.h', '.c'):\r
- FullName = os.path.join(Dirpath, F)\r
- EdkLogger.quiet("[UNION]" + FullName)\r
- c.CheckDeclUnionTypedef(FullName)\r
+# for Dirpath, Dirnames, Filenames in self.WalkTree():\r
+# for F in Filenames:\r
+# if os.path.splitext(F)[1] in ('.h', '.c'):\r
+# FullName = os.path.join(Dirpath, F)\r
+# EdkLogger.quiet("[UNION]" + FullName)\r
+# c.CheckDeclUnionTypedef(FullName)\r
+ for FullName in EccGlobalData.gCFileList + EccGlobalData.gHFileList:\r
+ EdkLogger.quiet("[UNION]" + FullName)\r
+ c.CheckDeclUnionTypedef(FullName)\r
\r
# Predicate Expression Checking\r
def PredicateExpressionCheck(self):\r
if EccGlobalData.gConfig.PredicateExpressionCheckBooleanValue == '1' or EccGlobalData.gConfig.PredicateExpressionCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':\r
EdkLogger.quiet("Checking predicate expression Boolean value ...")\r
\r
- for Dirpath, Dirnames, Filenames in self.WalkTree():\r
- for F in Filenames:\r
- if os.path.splitext(F)[1] in ('.c'):\r
- FullName = os.path.join(Dirpath, F)\r
- EdkLogger.quiet("[BOOLEAN]" + FullName)\r
- c.CheckBooleanValueComparison(FullName)\r
+# for Dirpath, Dirnames, Filenames in self.WalkTree():\r
+# for F in Filenames:\r
+# if os.path.splitext(F)[1] in ('.c'):\r
+# FullName = os.path.join(Dirpath, F)\r
+# EdkLogger.quiet("[BOOLEAN]" + FullName)\r
+# c.CheckBooleanValueComparison(FullName)\r
+ for FullName in EccGlobalData.gCFileList:\r
+ EdkLogger.quiet("[BOOLEAN]" + FullName)\r
+ c.CheckBooleanValueComparison(FullName)\r
\r
# Check whether Non-Boolean comparisons use a compare operator (==, !=, >, < >=, <=).\r
def PredicateExpressionCheckNonBooleanOperator(self):\r
if EccGlobalData.gConfig.PredicateExpressionCheckNonBooleanOperator == '1' or EccGlobalData.gConfig.PredicateExpressionCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':\r
EdkLogger.quiet("Checking predicate expression Non-Boolean variable...")\r
\r
- for Dirpath, Dirnames, Filenames in self.WalkTree():\r
- for F in Filenames:\r
- if os.path.splitext(F)[1] in ('.c'):\r
- FullName = os.path.join(Dirpath, F)\r
- EdkLogger.quiet("[NON-BOOLEAN]" + FullName)\r
- c.CheckNonBooleanValueComparison(FullName)\r
+# for Dirpath, Dirnames, Filenames in self.WalkTree():\r
+# for F in Filenames:\r
+# if os.path.splitext(F)[1] in ('.c'):\r
+# FullName = os.path.join(Dirpath, F)\r
+# EdkLogger.quiet("[NON-BOOLEAN]" + FullName)\r
+# c.CheckNonBooleanValueComparison(FullName)\r
+ for FullName in EccGlobalData.gCFileList:\r
+ EdkLogger.quiet("[NON-BOOLEAN]" + FullName)\r
+ c.CheckNonBooleanValueComparison(FullName)\r
+\r
# Check whether a comparison of any pointer to zero must be done via the NULL type\r
def PredicateExpressionCheckComparisonNullType(self):\r
if EccGlobalData.gConfig.PredicateExpressionCheckComparisonNullType == '1' or EccGlobalData.gConfig.PredicateExpressionCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':\r
EdkLogger.quiet("Checking predicate expression NULL pointer ...")\r
\r
- for Dirpath, Dirnames, Filenames in self.WalkTree():\r
- for F in Filenames:\r
- if os.path.splitext(F)[1] in ('.c'):\r
- FullName = os.path.join(Dirpath, F)\r
- EdkLogger.quiet("[POINTER]" + FullName)\r
- c.CheckPointerNullComparison(FullName)\r
+# for Dirpath, Dirnames, Filenames in self.WalkTree():\r
+# for F in Filenames:\r
+# if os.path.splitext(F)[1] in ('.c'):\r
+# FullName = os.path.join(Dirpath, F)\r
+# EdkLogger.quiet("[POINTER]" + FullName)\r
+# c.CheckPointerNullComparison(FullName)\r
+ for FullName in EccGlobalData.gCFileList:\r
+ EdkLogger.quiet("[POINTER]" + FullName)\r
+ c.CheckPointerNullComparison(FullName)\r
+\r
# Include file checking\r
def IncludeFileCheck(self):\r
self.IncludeFileCheckIfndef()\r
if EccGlobalData.gConfig.IncludeFileCheckIfndefStatement == '1' or EccGlobalData.gConfig.IncludeFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':\r
EdkLogger.quiet("Checking header file ifndef ...")\r
\r
- for Dirpath, Dirnames, Filenames in self.WalkTree():\r
- for F in Filenames:\r
- if os.path.splitext(F)[1] in ('.h'):\r
- FullName = os.path.join(Dirpath, F)\r
- MsgList = c.CheckHeaderFileIfndef(FullName)\r
+# for Dirpath, Dirnames, Filenames in self.WalkTree():\r
+# for F in Filenames:\r
+# if os.path.splitext(F)[1] in ('.h'):\r
+# FullName = os.path.join(Dirpath, F)\r
+# MsgList = c.CheckHeaderFileIfndef(FullName)\r
+ for FullName in EccGlobalData.gHFileList:\r
+ MsgList = c.CheckHeaderFileIfndef(FullName)\r
\r
# Check whether include files NOT contain code or define data variables\r
def IncludeFileCheckData(self):\r
if EccGlobalData.gConfig.IncludeFileCheckData == '1' or EccGlobalData.gConfig.IncludeFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':\r
EdkLogger.quiet("Checking header file data ...")\r
\r
- for Dirpath, Dirnames, Filenames in self.WalkTree():\r
- for F in Filenames:\r
- if os.path.splitext(F)[1] in ('.h'):\r
- FullName = os.path.join(Dirpath, F)\r
- MsgList = c.CheckHeaderFileData(FullName)\r
+# for Dirpath, Dirnames, Filenames in self.WalkTree():\r
+# for F in Filenames:\r
+# if os.path.splitext(F)[1] in ('.h'):\r
+# FullName = os.path.join(Dirpath, F)\r
+# MsgList = c.CheckHeaderFileData(FullName)\r
+ for FullName in EccGlobalData.gHFileList:\r
+ MsgList = c.CheckHeaderFileData(FullName)\r
\r
# Doxygen document checking\r
def DoxygenCheck(self):\r
MsgList = c.CheckFileHeaderDoxygenComments(FullName)\r
elif Ext in ('.inf', '.dec', '.dsc', '.fdf'):\r
FullName = os.path.join(Dirpath, F)\r
- if not open(FullName).read().startswith('## @file'):\r
+ op = open(FullName).readlines()\r
+ if not op[0].startswith('## @file') and op[6].startswith('## @file') and op[7].startswith('## @file'):\r
SqlStatement = """ select ID from File where FullPath like '%s'""" % FullName\r
ResultSet = EccGlobalData.gDb.TblFile.Exec(SqlStatement)\r
for Result in ResultSet:\r
Msg = 'INF/DEC/DSC/FDF file header comment should begin with ""## @file""'\r
EccGlobalData.gDb.TblReport.Insert(ERROR_DOXYGEN_CHECK_FILE_HEADER, Msg, "File", Result[0])\r
- \r
+\r
\r
# Check whether the function headers are followed Doxygen special documentation blocks in section 2.3.5\r
def DoxygenCheckFunctionHeader(self):\r
if EccGlobalData.gConfig.DoxygenCheckFunctionHeader == '1' or EccGlobalData.gConfig.DoxygenCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':\r
EdkLogger.quiet("Checking Doxygen function header ...")\r
\r
- for Dirpath, Dirnames, Filenames in self.WalkTree():\r
- for F in Filenames:\r
- if os.path.splitext(F)[1] in ('.h', '.c'):\r
- FullName = os.path.join(Dirpath, F)\r
- MsgList = c.CheckFuncHeaderDoxygenComments(FullName)\r
+# for Dirpath, Dirnames, Filenames in self.WalkTree():\r
+# for F in Filenames:\r
+# if os.path.splitext(F)[1] in ('.h', '.c'):\r
+# FullName = os.path.join(Dirpath, F)\r
+# MsgList = c.CheckFuncHeaderDoxygenComments(FullName)\r
+ for FullName in EccGlobalData.gCFileList + EccGlobalData.gHFileList:\r
+ MsgList = c.CheckFuncHeaderDoxygenComments(FullName)\r
+\r
\r
# Check whether the first line of text in a comment block is a brief description of the element being documented.\r
# The brief description must end with a period.\r
if EccGlobalData.gConfig.DoxygenCheckCommentFormat == '1' or EccGlobalData.gConfig.DoxygenCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':\r
EdkLogger.quiet("Checking Doxygen comment ///< ...")\r
\r
- for Dirpath, Dirnames, Filenames in self.WalkTree():\r
- for F in Filenames:\r
- if os.path.splitext(F)[1] in ('.h', '.c'):\r
- FullName = os.path.join(Dirpath, F)\r
- MsgList = c.CheckDoxygenTripleForwardSlash(FullName)\r
+# for Dirpath, Dirnames, Filenames in self.WalkTree():\r
+# for F in Filenames:\r
+# if os.path.splitext(F)[1] in ('.h', '.c'):\r
+# FullName = os.path.join(Dirpath, F)\r
+# MsgList = c.CheckDoxygenTripleForwardSlash(FullName)\r
+ for FullName in EccGlobalData.gCFileList + EccGlobalData.gHFileList:\r
+ MsgList = c.CheckDoxygenTripleForwardSlash(FullName)\r
\r
# Check whether only Doxygen commands allowed to mark the code are @bug and @todo.\r
def DoxygenCheckCommand(self):\r
if EccGlobalData.gConfig.DoxygenCheckCommand == '1' or EccGlobalData.gConfig.DoxygenCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':\r
EdkLogger.quiet("Checking Doxygen command ...")\r
\r
- for Dirpath, Dirnames, Filenames in self.WalkTree():\r
- for F in Filenames:\r
- if os.path.splitext(F)[1] in ('.h', '.c'):\r
- FullName = os.path.join(Dirpath, F)\r
- MsgList = c.CheckDoxygenCommand(FullName)\r
+# for Dirpath, Dirnames, Filenames in self.WalkTree():\r
+# for F in Filenames:\r
+# if os.path.splitext(F)[1] in ('.h', '.c'):\r
+# FullName = os.path.join(Dirpath, F)\r
+# MsgList = c.CheckDoxygenCommand(FullName)\r
+ for FullName in EccGlobalData.gCFileList + EccGlobalData.gHFileList:\r
+ MsgList = c.CheckDoxygenCommand(FullName)\r
\r
# Meta-Data File Processing Checking\r
def MetaDataFileCheck(self):\r
SqlCommand2 = """select Name from File where ID = %s""" %Record[5]\r
DscFileName = os.path.splitext(EccGlobalData.gDb.TblDsc.Exec(SqlCommand1)[0][0])[0]\r
FdfFileName = os.path.splitext(EccGlobalData.gDb.TblDsc.Exec(SqlCommand2)[0][0])[0]\r
- print DscFileName, 111, FdfFileName\r
if DscFileName != FdfFileName:\r
continue\r
if not EccGlobalData.gException.IsException(ERROR_META_DATA_FILE_CHECK_PCD_DUPLICATE, Record[1]):\r
SqlCommand = """\r
select ID from File where FullPath in\r
(select B.Path || '\\' || A.Value1 from INF as A, File as B where A.Model = %s and A.BelongsToFile = %s\r
- and B.ID = %s)\r
- """ %(MODEL_EFI_SOURCE_FILE, BelongsToFile, BelongsToFile)\r
+ and B.ID = %s and (B.Model = %s or B.Model = %s))\r
+ """ %(MODEL_EFI_SOURCE_FILE, BelongsToFile, BelongsToFile, MODEL_FILE_C, MODEL_FILE_H)\r
TableSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)\r
for Tbl in TableSet:\r
TblName = 'Identifier' + str(Tbl[0])\r
if Path.startswith('\\') or Path.startswith('/'):\r
Path = Path[1:]\r
return Path\r
- \r
+\r
# Check whether two module INFs under one workspace has the same FILE_GUID value\r
def MetaDataFileCheckModuleFileGuidDuplication(self):\r
if EccGlobalData.gConfig.MetaDataFileCheckModuleFileGuidDuplication == '1' or EccGlobalData.gConfig.MetaDataFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':\r
if not EccGlobalData.gException.IsException(ERROR_META_DATA_FILE_CHECK_MODULE_FILE_GUID_DUPLICATION, InfPath1):\r
Msg = "The FILE_GUID of INF file [%s] is duplicated with that of %s" % (InfPath1, InfPath2)\r
EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_MODULE_FILE_GUID_DUPLICATION, OtherMsg = Msg, BelongsToTable = Table.Table, BelongsToItem = Record[0])\r
- \r
+\r
\r
# Check whether these is duplicate Guid/Ppi/Protocol name\r
def CheckGuidProtocolPpi(self, ErrorID, Model, Table):\r
class Configuration(object):\r
def __init__(self, Filename):\r
self.Filename = Filename\r
- \r
+\r
self.Version = 0.1\r
\r
## Identify to if check all items\r
# SpaceCheckAll\r
#\r
self.AutoCorrect = 0\r
- \r
+\r
# List customized Modifer here, split with ','\r
# Defaultly use the definition in class DataType\r
self.ModifierList = MODIFIER_LIST\r
- \r
+\r
## General Checking\r
self.GeneralCheckAll = 0\r
- \r
+\r
# Check whether NO Tab is used, replaced with spaces\r
self.GeneralCheckNoTab = 1\r
# The width of Tab\r
self.GeneralCheckCarriageReturn = 1\r
# Check whether the file exists\r
self.GeneralCheckFileExistence = 1\r
- \r
+ # Check whether file has non ACSII char\r
+ self.GeneralCheckNonAcsii = 1\r
+\r
## Space Checking\r
self.SpaceCheckAll = 1\r
- \r
+\r
## Predicate Expression Checking\r
self.PredicateExpressionCheckAll = 0\r
- \r
+\r
# Check whether Boolean values, variable type BOOLEAN not use explicit comparisons to TRUE or FALSE\r
self.PredicateExpressionCheckBooleanValue = 1\r
- # Check whether Non-Boolean comparisons use a compare operator (==, !=, >, < >=, <=). \r
+ # Check whether Non-Boolean comparisons use a compare operator (==, !=, >, < >=, <=).\r
self.PredicateExpressionCheckNonBooleanOperator = 1\r
# Check whether a comparison of any pointer to zero must be done via the NULL type\r
self.PredicateExpressionCheckComparisonNullType = 1\r
- \r
+\r
## Headers Checking\r
self.HeaderCheckAll = 0\r
- \r
+\r
# Check whether File header exists\r
self.HeaderCheckFile = 1\r
# Check whether Function header exists\r
self.HeaderCheckFunction = 1\r
- \r
+\r
## C Function Layout Checking\r
self.CFunctionLayoutCheckAll = 0\r
- \r
+\r
# Check whether return type exists and in the first line\r
self.CFunctionLayoutCheckReturnType = 1\r
# Check whether any optional functional modifiers exist and next to the return type\r
self.CFunctionLayoutCheckNoInitOfVariable = 1\r
# Check whether no use of STATIC for functions\r
self.CFunctionLayoutCheckNoStatic = 1\r
- \r
+\r
## Include Files Checking\r
self.IncludeFileCheckAll = 0\r
- \r
+\r
#Check whether having include files with same name\r
self.IncludeFileCheckSameName = 1\r
# Check whether all include file contents is guarded by a #ifndef statement.\r
# Check whether include files contain only public or only private data\r
# Check whether include files NOT contain code or define data variables\r
self.IncludeFileCheckData = 1\r
- \r
+\r
## Declarations and Data Types Checking\r
self.DeclarationDataTypeCheckAll = 0\r
- \r
+\r
# Check whether no use of int, unsigned, char, void, static, long in any .c, .h or .asl files.\r
self.DeclarationDataTypeCheckNoUseCType = 1\r
# Check whether the modifiers IN, OUT, OPTIONAL, and UNALIGNED are used only to qualify arguments to a function and should not appear in a data type declaration\r
self.DeclarationDataTypeCheckSameStructure = 1\r
# Check whether Union Type has a 'typedef' and the name is capital\r
self.DeclarationDataTypeCheckUnionType = 1\r
- \r
+\r
## Naming Conventions Checking\r
self.NamingConventionCheckAll = 0\r
- \r
+\r
# Check whether only capital letters are used for #define declarations\r
self.NamingConventionCheckDefineStatement = 1\r
# Check whether only capital letters are used for typedef declarations\r
self.NamingConventionCheckFunctionName = 1\r
# Check whether NO use short variable name with single character\r
self.NamingConventionCheckSingleCharacterVariable = 1\r
- \r
+\r
## Doxygen Checking\r
self.DoxygenCheckAll = 0\r
- \r
+\r
# Check whether the file headers are followed Doxygen special documentation blocks in section 2.3.5\r
self.DoxygenCheckFileHeader = 1\r
# Check whether the function headers are followed Doxygen special documentation blocks in section 2.3.5\r
self.DoxygenCheckFunctionHeader = 1\r
- # Check whether the first line of text in a comment block is a brief description of the element being documented. \r
+ # Check whether the first line of text in a comment block is a brief description of the element being documented.\r
# The brief description must end with a period.\r
self.DoxygenCheckCommentDescription = 1\r
# Check whether comment lines with '///< ... text ...' format, if it is used, it should be after the code section.\r
self.DoxygenCheckCommentFormat = 1\r
# Check whether only Doxygen commands allowed to mark the code are @bug and @todo.\r
self.DoxygenCheckCommand = 1\r
- \r
+\r
## Meta-Data File Processing Checking\r
self.MetaDataFileCheckAll = 0\r
- \r
+\r
# Check whether each file defined in meta-data exists\r
self.MetaDataFileCheckPathName = 1\r
# Generate a list for all files defined in meta-data files\r
self.MetaDataFileCheckGenerateFileList = 1\r
# The path of log file\r
self.MetaDataFileCheckPathOfGenerateFileList = 'File.log'\r
- # Check whether all Library Instances defined for a given module (or dependent library instance) match the module's type. \r
- # Each Library Instance must specify the Supported Module Types in its INF file, \r
+ # Check whether all Library Instances defined for a given module (or dependent library instance) match the module's type.\r
+ # Each Library Instance must specify the Supported Module Types in its INF file,\r
# and any module specifying the library instance must be one of the supported types.\r
self.MetaDataFileCheckLibraryInstance = 1\r
# Check whether a Library Instance has been defined for all dependent library classes\r
# The directory listed here will not be parsed, split with ','\r
self.SkipDirList = []\r
\r
+ # A list for binary file ext name\r
+ self.BinaryExtList = []\r
+\r
self.ParseConfig()\r
- \r
+\r
def ParseConfig(self):\r
Filepath = os.path.normpath(self.Filename)\r
if not os.path.isfile(Filepath):\r
ErrorMsg = "Can't find configuration file '%s'" % Filepath\r
EdkLogger.error("Ecc", EdkLogger.ECC_ERROR, ErrorMsg, File = Filepath)\r
- \r
+\r
LineNo = 0\r
for Line in open(Filepath, 'r'):\r
LineNo = LineNo + 1\r
continue\r
if List[0] == 'SkipDirList':\r
List[1] = GetSplitValueList(List[1], TAB_COMMA_SPLIT)\r
+ if List[0] == 'BinaryExtList':\r
+ List[1] = GetSplitValueList(List[1], TAB_COMMA_SPLIT)\r
self.__dict__[List[0]] = List[1]\r
- \r
+\r
def ShowMe(self):\r
print self.Filename\r
for Key in self.__dict__.keys():\r
# This class defined the ECC databse\r
# During the phase of initialization, the database will create all tables and\r
# insert all records of table DataModel\r
-# \r
+#\r
# @param object: Inherited from object class\r
# @param DbPath: A string for the path of the ECC database\r
#\r
self.TblDec = None\r
self.TblDsc = None\r
self.TblFdf = None\r
- \r
+\r
## Initialize ECC database\r
#\r
# 1. Delete all old existing tables\r
# to avoid non-ascii charater conversion error\r
self.Conn.text_factory = str\r
self.Cur = self.Conn.cursor()\r
- \r
+\r
self.TblDataModel = TableDataModel(self.Cur)\r
self.TblFile = TableFile(self.Cur)\r
self.TblFunction = TableFunction(self.Cur)\r
self.TblDec = TableDec(self.Cur)\r
self.TblDsc = TableDsc(self.Cur)\r
self.TblFdf = TableFdf(self.Cur)\r
- \r
+\r
#\r
# Create new tables\r
#\r
self.TblDec.Create()\r
self.TblDsc.Create()\r
self.TblFdf.Create()\r
- \r
+\r
#\r
# Init each table's ID\r
#\r
self.TblDec.InitID()\r
self.TblDsc.InitID()\r
self.TblFdf.InitID()\r
- \r
+\r
#\r
# Initialize table DataModel\r
#\r
if NewDatabase:\r
self.TblDataModel.InitTable()\r
- \r
+\r
EdkLogger.verbose("Initialize ECC database ... DONE!")\r
\r
## Query a table\r
#\r
def QueryTable(self, Table):\r
Table.Query()\r
- \r
+\r
## Close entire database\r
#\r
# Commit all first\r
def Close(self):\r
#\r
# Commit to file\r
- # \r
+ #\r
self.Conn.commit()\r
- \r
+\r
#\r
# Close connection and cursor\r
#\r
self.Cur.close()\r
self.Conn.close()\r
- \r
+\r
## Insert one file information\r
#\r
# Insert one file's information to the database\r
# Insert a record for file\r
#\r
FileID = self.TblFile.Insert(File.Name, File.ExtName, File.Path, File.FullPath, Model = File.Model, TimeStamp = File.TimeStamp)\r
- IdTable = TableIdentifier(self.Cur)\r
- IdTable.Table = "Identifier%s" % FileID\r
- IdTable.Create()\r
\r
- #\r
- # Insert function of file\r
- #\r
- for Function in File.FunctionList:\r
- FunctionID = self.TblFunction.Insert(Function.Header, Function.Modifier, Function.Name, Function.ReturnStatement, \\r
- Function.StartLine, Function.StartColumn, Function.EndLine, Function.EndColumn, \\r
- Function.BodyStartLine, Function.BodyStartColumn, FileID, \\r
- Function.FunNameStartLine, Function.FunNameStartColumn)\r
+ if File.Model == DataClass.MODEL_FILE_C or File.Model == DataClass.MODEL_FILE_H:\r
+ IdTable = TableIdentifier(self.Cur)\r
+ IdTable.Table = "Identifier%s" % FileID\r
+ IdTable.Create()\r
+ #\r
+ # Insert function of file\r
+ #\r
+ for Function in File.FunctionList:\r
+ FunctionID = self.TblFunction.Insert(Function.Header, Function.Modifier, Function.Name, Function.ReturnStatement, \\r
+ Function.StartLine, Function.StartColumn, Function.EndLine, Function.EndColumn, \\r
+ Function.BodyStartLine, Function.BodyStartColumn, FileID, \\r
+ Function.FunNameStartLine, Function.FunNameStartColumn)\r
+ #\r
+ # Insert Identifier of function\r
+ #\r
+ for Identifier in Function.IdentifierList:\r
+ IdentifierID = IdTable.Insert(Identifier.Modifier, Identifier.Type, Identifier.Name, Identifier.Value, Identifier.Model, \\r
+ FileID, FunctionID, Identifier.StartLine, Identifier.StartColumn, Identifier.EndLine, Identifier.EndColumn)\r
+ #\r
+ # Insert Pcd of function\r
+ #\r
+ for Pcd in Function.PcdList:\r
+ PcdID = self.TblPcd.Insert(Pcd.CName, Pcd.TokenSpaceGuidCName, Pcd.Token, Pcd.DatumType, Pcd.Model, \\r
+ FileID, FunctionID, Pcd.StartLine, Pcd.StartColumn, Pcd.EndLine, Pcd.EndColumn)\r
#\r
- # Insert Identifier of function\r
+ # Insert Identifier of file\r
#\r
- for Identifier in Function.IdentifierList:\r
+ for Identifier in File.IdentifierList:\r
IdentifierID = IdTable.Insert(Identifier.Modifier, Identifier.Type, Identifier.Name, Identifier.Value, Identifier.Model, \\r
- FileID, FunctionID, Identifier.StartLine, Identifier.StartColumn, Identifier.EndLine, Identifier.EndColumn)\r
+ FileID, -1, Identifier.StartLine, Identifier.StartColumn, Identifier.EndLine, Identifier.EndColumn)\r
#\r
- # Insert Pcd of function\r
+ # Insert Pcd of file\r
#\r
- for Pcd in Function.PcdList:\r
+ for Pcd in File.PcdList:\r
PcdID = self.TblPcd.Insert(Pcd.CName, Pcd.TokenSpaceGuidCName, Pcd.Token, Pcd.DatumType, Pcd.Model, \\r
- FileID, FunctionID, Pcd.StartLine, Pcd.StartColumn, Pcd.EndLine, Pcd.EndColumn)\r
- #\r
- # Insert Identifier of file\r
- #\r
- for Identifier in File.IdentifierList:\r
- IdentifierID = IdTable.Insert(Identifier.Modifier, Identifier.Type, Identifier.Name, Identifier.Value, Identifier.Model, \\r
- FileID, -1, Identifier.StartLine, Identifier.StartColumn, Identifier.EndLine, Identifier.EndColumn)\r
- #\r
- # Insert Pcd of file\r
- #\r
- for Pcd in File.PcdList:\r
- PcdID = self.TblPcd.Insert(Pcd.CName, Pcd.TokenSpaceGuidCName, Pcd.Token, Pcd.DatumType, Pcd.Model, \\r
- FileID, -1, Pcd.StartLine, Pcd.StartColumn, Pcd.EndLine, Pcd.EndColumn)\r
- \r
+ FileID, -1, Pcd.StartLine, Pcd.StartColumn, Pcd.EndLine, Pcd.EndColumn)\r
+\r
EdkLogger.verbose("Insert information from file %s ... DONE!" % File.FullPath)\r
\r
## UpdateIdentifierBelongsToFunction\r
#\r
def UpdateIdentifierBelongsToFunction_disabled(self):\r
EdkLogger.verbose("Update 'BelongsToFunction' for Identifiers started ...")\r
- \r
+\r
SqlCommand = """select ID, BelongsToFile, StartLine, EndLine, Model from Identifier"""\r
EdkLogger.debug(4, "SqlCommand: %s" %SqlCommand)\r
self.Cur.execute(SqlCommand)\r
# Check whether an identifier belongs to a function\r
#\r
EdkLogger.debug(4, "For common identifiers ... ")\r
- SqlCommand = """select ID from Function \r
+ SqlCommand = """select ID from Function\r
where StartLine < %s and EndLine > %s\r
and BelongsToFile = %s""" % (StartLine, EndLine, BelongsToFile)\r
EdkLogger.debug(4, "SqlCommand: %s" %SqlCommand)\r
SqlCommand = """Update Identifier set BelongsToFunction = %s where ID = %s""" % (ID[0], IdentifierID)\r
EdkLogger.debug(4, "SqlCommand: %s" %SqlCommand)\r
self.Cur.execute(SqlCommand)\r
- \r
+\r
#\r
# Check whether the identifier is a function header\r
#\r
- EdkLogger.debug(4, "For function headers ... ") \r
+ EdkLogger.debug(4, "For function headers ... ")\r
if Model == DataClass.MODEL_IDENTIFIER_COMMENT:\r
- SqlCommand = """select ID from Function \r
+ SqlCommand = """select ID from Function\r
where StartLine = %s + 1\r
and BelongsToFile = %s""" % (EndLine, BelongsToFile)\r
EdkLogger.debug(4, "SqlCommand: %s" %SqlCommand)\r
SqlCommand = """Update Identifier set BelongsToFunction = %s, Model = %s where ID = %s""" % (ID[0], DataClass.MODEL_IDENTIFIER_FUNCTION_HEADER, IdentifierID)\r
EdkLogger.debug(4, "SqlCommand: %s" %SqlCommand)\r
self.Cur.execute(SqlCommand)\r
- \r
+\r
EdkLogger.verbose("Update 'BelongsToFunction' for Identifiers ... DONE")\r
\r
\r
#\r
def UpdateIdentifierBelongsToFunction(self):\r
EdkLogger.verbose("Update 'BelongsToFunction' for Identifiers started ...")\r
- \r
+\r
SqlCommand = """select ID, BelongsToFile, StartLine, EndLine from Function"""\r
Records = self.TblFunction.Exec(SqlCommand)\r
Data1 = []\r
# self.Cur.executemany(SqlCommand, Data2)\r
#\r
# EdkLogger.verbose("Update 'BelongsToFunction' for Identifiers ... DONE")\r
- \r
+\r
\r
##\r
#\r
#EdkLogger.SetLevel(EdkLogger.VERBOSE)\r
EdkLogger.SetLevel(EdkLogger.DEBUG_0)\r
EdkLogger.verbose("Start at " + time.strftime('%H:%M:%S', time.localtime()))\r
- \r
+\r
Db = Database(DATABASE_PATH)\r
Db.InitDatabase()\r
Db.QueryTable(Db.TblDataModel)\r
- \r
+\r
identifier1 = DataClass.IdentifierClass(-1, '', '', "i''1", 'aaa', DataClass.MODEL_IDENTIFIER_COMMENT, 1, -1, 32, 43, 54, 43)\r
identifier2 = DataClass.IdentifierClass(-1, '', '', 'i1', 'aaa', DataClass.MODEL_IDENTIFIER_COMMENT, 1, -1, 15, 43, 20, 43)\r
identifier3 = DataClass.IdentifierClass(-1, '', '', 'i1', 'aaa', DataClass.MODEL_IDENTIFIER_COMMENT, 1, -1, 55, 43, 58, 43)\r
file = DataClass.FileClass(-1, 'F1', 'c', 'C:\\', 'C:\\F1.exe', DataClass.MODEL_FILE_C, '2007-12-28', [fun1], [identifier1, identifier2, identifier3, identifier4], [])\r
Db.InsertOneFile(file)\r
Db.UpdateIdentifierBelongsToFunction()\r
- \r
+\r
Db.QueryTable(Db.TblFile)\r
Db.QueryTable(Db.TblFunction)\r
Db.QueryTable(Db.TblPcd)\r
Db.QueryTable(Db.TblIdentifier)\r
- \r
+\r
Db.Close()\r
EdkLogger.verbose("End at " + time.strftime('%H:%M:%S', time.localtime()))\r
- \r
+\r
self.BuildMetaDataFileDatabase()\r
\r
EccGlobalData.gIdentifierTableList = GetTableList((MODEL_FILE_C, MODEL_FILE_H), 'Identifier', EccGlobalData.gDb)\r
+ EccGlobalData.gCFileList = GetFileList(MODEL_FILE_C, EccGlobalData.gDb)\r
+ EccGlobalData.gHFileList = GetFileList(MODEL_FILE_H, EccGlobalData.gDb)\r
\r
## BuildMetaDataFileDatabase\r
#\r
\r
if Options.Workspace:\r
os.environ["WORKSPACE"] = Options.Workspace\r
- \r
+\r
# Check workspace envirnoment\r
if "WORKSPACE" not in os.environ:\r
EdkLogger.error("ECC", BuildToolError.ATTRIBUTE_NOT_AVAILABLE, "Environment variable not found",\r
gConfig = None\r
gDb = None\r
gIdentifierTableList = []\r
+gCFileList = []\r
+gHFileList = []\r
gException = None
\ No newline at end of file
ERROR_GENERAL_CHECK_NO_PROGMA = 1005\r
ERROR_GENERAL_CHECK_CARRIAGE_RETURN = 1006\r
ERROR_GENERAL_CHECK_FILE_EXISTENCE = 1007\r
+ERROR_GENERAL_CHECK_NON_ACSII = 1008\r
\r
ERROR_SPACE_CHECK_ALL = 2000\r
\r
ERROR_GENERAL_CHECK_NO_PROGMA : """There should be no use of "#progma" in source file except "#pragma pack(#)\"""",\r
ERROR_GENERAL_CHECK_CARRIAGE_RETURN : "There should be a carriage return at the end of the file",\r
ERROR_GENERAL_CHECK_FILE_EXISTENCE : "File not found",\r
+ ERROR_GENERAL_CHECK_NON_ACSII : "File has invalid Non-ACSII char",\r
\r
ERROR_SPACE_CHECK_ALL : "",\r
\r
Filepath = os.path.normpath(Filepath)\r
SqlCommand = """\r
select Value1, FullPath from Inf, File where Inf.Model = %s and Inf.BelongsToFile in(\r
- select distinct B.BelongsToFile from File as A left join Inf as B \r
+ select distinct B.BelongsToFile from File as A left join Inf as B\r
where A.ID = B.BelongsToFile and B.Model = %s and (A.Path || '%s' || B.Value1) = '%s')\r
and Inf.BelongsToFile = File.ID""" \\r
% (MODEL_META_DATA_PACKAGE, MODEL_EFI_SOURCE_FILE, '\\', Filepath)\r
InfFullPath = os.path.normpath(os.path.join(WorkSpace, Record[1]))\r
(DecPath, DecName) = os.path.split(DecFullPath)\r
(InfPath, InfName) = os.path.split(InfFullPath)\r
- SqlCommand = """select Value1 from Dec where BelongsToFile = \r
+ SqlCommand = """select Value1 from Dec where BelongsToFile =\r
(select ID from File where FullPath = '%s') and Model = %s""" \\r
% (DecFullPath, MODEL_EFI_INCLUDE)\r
NewRecordSet = Db.TblDec.Exec(SqlCommand)\r
IncludePath = os.path.normpath(os.path.join(DecPath, NewRecord[0]))\r
if IncludePath not in IncludeList:\r
IncludeList.append(IncludePath)\r
- \r
+\r
return IncludeList\r
\r
+## Get the file list\r
+#\r
+# Search table file and find all specific type files\r
+#\r
+def GetFileList(FileModel, Db):\r
+ FileList = []\r
+ SqlCommand = """select FullPath from File where Model = %s""" % str(FileModel)\r
+ RecordSet = Db.TblFile.Exec(SqlCommand)\r
+ for Record in RecordSet:\r
+ FileList.append(Record[0])\r
+\r
+ return FileList\r
+\r
## Get the table list\r
#\r
# Search table file and find all small tables\r
for Record in RecordSet:\r
TableName = Table + str(Record[0])\r
TableList.append(TableName)\r
- \r
+\r
return TableList\r
\r
dirnames.append(Dirname)\r
\r
for f in filenames:\r
+ collector = None\r
FullName = os.path.normpath(os.path.join(dirpath, f))\r
+ model = DataClass.MODEL_FILE_OTHERS\r
if os.path.splitext(f)[1] in ('.h', '.c'):\r
EdkLogger.info("Parsing " + FullName)\r
model = f.endswith('c') and DataClass.MODEL_FILE_C or DataClass.MODEL_FILE_H\r
collector.CleanFileProfileBuffer()\r
collector.ParseFileWithClearedPPDirective()\r
# collector.PrintFragments()\r
- BaseName = os.path.basename(f)\r
- DirName = os.path.dirname(FullName)\r
- Ext = os.path.splitext(f)[1].lstrip('.')\r
- ModifiedTime = os.path.getmtime(FullName)\r
- FileObj = DataClass.FileClass(-1, BaseName, Ext, DirName, FullName, model, ModifiedTime, GetFunctionList(), GetIdentifierList(), [])\r
- FileObjList.append(FileObj)\r
+ BaseName = os.path.basename(f)\r
+ DirName = os.path.dirname(FullName)\r
+ Ext = os.path.splitext(f)[1].lstrip('.')\r
+ ModifiedTime = os.path.getmtime(FullName)\r
+ FileObj = DataClass.FileClass(-1, BaseName, Ext, DirName, FullName, model, ModifiedTime, GetFunctionList(), GetIdentifierList(), [])\r
+ FileObjList.append(FileObj)\r
+ if collector:\r
collector.CleanFileProfileBuffer()\r
\r
if len(ParseErrorFileList) > 0:\r
\r
Db = GetDB()\r
for file in FileObjList:\r
- Db.InsertOneFile(file)\r
+ if file.ExtName.upper() not in ['INF', 'DEC', 'DSC', 'FDF']:\r
+ Db.InsertOneFile(file)\r
\r
Db.UpdateIdentifierBelongsToFunction()\r
\r
from File\r
where FullPath like '%s'\r
""" % FullFileName\r
-\r
ResultSet = Db.TblFile.Exec(SqlStatement)\r
\r
FileID = -1\r
return FileID\r
\r
def GetIncludeFileList(FullFileName):\r
+ if os.path.splitext(FullFileName)[1].upper() not in ('.H'):\r
+ return []\r
IFList = IncludeFileListDict.get(FullFileName)\r
if IFList != None:\r
return IFList\r
FileTable = 'Identifier' + str(FileID)\r
SqlStatement = """ select Value, ID\r
from %s\r
- where Model = %d and StartLine = 1 and StartColumn = 0\r
+ where Model = %d and (StartLine = 1 or StartLine = 7 or StartLine = 8) and StartColumn = 0\r
""" % (FileTable, DataClass.MODEL_IDENTIFIER_COMMENT)\r
ResultSet = Db.TblFile.Exec(SqlStatement)\r
if len(ResultSet) == 0:\r
PrintErrorMsg(ERROR_HEADER_CHECK_FILE, 'No Comment appear at the very beginning of file.', 'File', FileID)\r
return ErrorMsgList\r
\r
+ IsFoundError1 = True\r
+ IsFoundError2 = True\r
+ IsFoundError3 = True\r
for Result in ResultSet:\r
- CommentStr = Result[0]\r
- if not CommentStr.startswith('/** @file'):\r
- PrintErrorMsg(ERROR_DOXYGEN_CHECK_FILE_HEADER, 'File header comment should begin with ""/** @file""', FileTable, Result[1])\r
- if not CommentStr.endswith('**/'):\r
- PrintErrorMsg(ERROR_HEADER_CHECK_FILE, 'File header comment should end with **/', FileTable, Result[1])\r
- if CommentStr.find('.') == -1:\r
- PrintErrorMsg(ERROR_DOXYGEN_CHECK_COMMENT_DESCRIPTION, 'Comment description should end with period \'.\'', FileTable, Result[1])\r
+ CommentStr = Result[0].strip()\r
+ ID = Result[1]\r
+ if CommentStr.startswith('/** @file'):\r
+ IsFoundError1 = False\r
+ if CommentStr.endswith('**/'):\r
+ IsFoundError2 = False\r
+ if CommentStr.find('.') != -1:\r
+ IsFoundError3 = False\r
+\r
+ if IsFoundError1:\r
+ PrintErrorMsg(ERROR_DOXYGEN_CHECK_FILE_HEADER, 'File header comment should begin with ""/** @file""', FileTable, ID)\r
+ if IsFoundError2:\r
+ PrintErrorMsg(ERROR_HEADER_CHECK_FILE, 'File header comment should end with ""**/""', FileTable, ID)\r
+ if IsFoundError3:\r
+ PrintErrorMsg(ERROR_DOXYGEN_CHECK_COMMENT_DESCRIPTION, 'Comment description should end with period "".""', FileTable, ID)\r
\r
def CheckFuncHeaderDoxygenComments(FullFileName):\r
ErrorMsgList = []\r
# Identify to if check all items\r
# 1 - Check all items and ignore all other detailed items\r
# 0 - Not check all items, the tool will go through all other detailed items to decide to check or not\r
-# \r
+#\r
CheckAll = 0\r
\r
#\r
GeneralCheckCarriageReturn = 1\r
# Check whether the file exists\r
GeneralCheckFileExistence = 1\r
+# Check whether file has non ACSII char\r
+GeneralCheckNonAcsii = 1\r
\r
#\r
# Space Checking\r
\r
# Check whether Boolean values, variable type BOOLEAN not use explicit comparisons to TRUE or FALSE\r
PredicateExpressionCheckBooleanValue = 1\r
-# Check whether Non-Boolean comparisons use a compare operator (==, !=, >, < >=, <=). \r
+# Check whether Non-Boolean comparisons use a compare operator (==, !=, >, < >=, <=).\r
PredicateExpressionCheckNonBooleanOperator = 1\r
# Check whether a comparison of any pointer to zero must be done via the NULL type\r
PredicateExpressionCheckComparisonNullType = 1\r
DoxygenCheckFileHeader = 1\r
# Check whether the function headers are followed Doxygen special documentation blocks in section 2.3.5\r
DoxygenCheckFunctionHeader = 1\r
-# Check whether the first line of text in a comment block is a brief description of the element being documented. \r
+# Check whether the first line of text in a comment block is a brief description of the element being documented.\r
# The brief description must end with a period.\r
DoxygenCheckCommentDescription = 1\r
# Check whether comment lines with '///< ... text ...' format, if it is used, it should be after the code section.\r
MetaDataFileCheckGenerateFileList = 1\r
# The path of log file\r
MetaDataFileCheckPathOfGenerateFileList = File.log\r
-# Check whether all Library Instances defined for a given module (or dependent library instance) match the module's type. \r
-# Each Library Instance must specify the Supported Module Types in its INF file, \r
+# Check whether all Library Instances defined for a given module (or dependent library instance) match the module's type.\r
+# Each Library Instance must specify the Supported Module Types in its INF file,\r
# and any module specifying the library instance must be one of the supported types.\r
MetaDataFileCheckLibraryInstance = 1\r
# Check whether a Library Instance has been defined for all dependent library classes\r
# GotoStatementCheckAll = 0\r
# SpellingCheckAll = 0\r
#\r
+\r
+# A list for binary file ext name\r
+BinaryExtList = EXE, EFI, FV, ROM, DLL, COM, BMP, GIF, PYD, CMP, BIN, JPG, UNI, RAW, COM2, LIB, DEPEX, SYS, DB\r
## @file\r
-# Windows makefile for Python tools build.\r
+# Linux makefile for Python tools build.\r
#\r
# Copyright (c) 2007 - 2010, Intel Corporation. All rights reserved.<BR>\r
# This program and the accompanying materials\r
raise Warning("Value %s is not a number", self.FileName, Line)\r
\r
for Profile in AllMacroList:\r
- if Profile.FileName == FileLineTuple[0] and Profile.MacroName == Name and Profile.DefinedAtLine <= FileLineTuple[1]:\r
+ if Profile.MacroName == Name and Profile.DefinedAtLine <= FileLineTuple[1]:\r
if Op == None:\r
if Value == 'Bool' and Profile.MacroValue == None or Profile.MacroValue.upper() == 'FALSE':\r
return False\r
#\r
\r
PathClassObj = PathClass(self.InfFileName, GenFdsGlobalVariable.WorkSpaceDir)\r
- ErrorCode, ErrorInfo = PathClassObj.Validate()\r
+ ErrorCode, ErrorInfo = PathClassObj.Validate(".inf")\r
if ErrorCode != 0:\r
EdkLogger.error("GenFds", ErrorCode, ExtraData=ErrorInfo)\r
\r
if len(PlatformArchList) == 0:\r
self.InDsc = False\r
PathClassObj = PathClass(self.InfFileName, GenFdsGlobalVariable.WorkSpaceDir)\r
- ErrorCode, ErrorInfo = PathClassObj.Validate()\r
+ ErrorCode, ErrorInfo = PathClassObj.Validate(".inf")\r
if ErrorCode != 0:\r
EdkLogger.error("GenFds", ErrorCode, ExtraData=ErrorInfo)\r
if len(ArchList) == 1:\r
"""call Workspace build create database"""
os.environ["WORKSPACE"] = Workspace
+ FdfParser.InputMacroDict["WORKSPACE"] = Workspace
BuildWorkSpace = WorkspaceDatabase(':memory:', FdfParser.InputMacroDict)
BuildWorkSpace.InitDatabase()
BIN_DIR=$(EDK_TOOLS_PATH)\Bin\Win32\r
\r
\r
-APPLICATIONS=$(BIN_DIR)\build.exe $(BIN_DIR)\GenFds.exe $(BIN_DIR)\Trim.exe $(BIN_DIR)\MigrationMsa2Inf.exe $(BIN_DIR)\Fpd2Dsc.exe $(BIN_DIR)\TargetTool.exe $(BIN_DIR)\spd2dec.exe $(BIN_DIR)\GenDepex.exe $(BIN_DIR)\GenPatchPcdTable.exe $(BIN_DIR)\PatchPcdValue.exe\r
+APPLICATIONS=$(BIN_DIR)\build.exe $(BIN_DIR)\GenFds.exe $(BIN_DIR)\Trim.exe $(BIN_DIR)\MigrationMsa2Inf.exe $(BIN_DIR)\Fpd2Dsc.exe $(BIN_DIR)\TargetTool.exe $(BIN_DIR)\spd2dec.exe $(BIN_DIR)\GenDepex.exe $(BIN_DIR)\GenPatchPcdTable.exe $(BIN_DIR)\PatchPcdValue.exe $(BIN_DIR)\BPDG.exe\r
\r
COMMON_PYTHON=$(BASE_TOOLS_PATH)\Source\Python\Common\BuildToolError.py \\r
$(BASE_TOOLS_PATH)\Source\Python\Common\Database.py \\r
$(BASE_TOOLS_PATH)\Source\Python\Common\String.py \\r
$(BASE_TOOLS_PATH)\Source\Python\Common\TargetTxtClassObject.py \\r
$(BASE_TOOLS_PATH)\Source\Python\Common\ToolDefClassObject.py \\r
+ $(BASE_TOOLS_PATH)\Source\Python\Common\VpdInfoFile.py \\r
$(BASE_TOOLS_PATH)\Source\Python\Common\XmlParser.py \\r
$(BASE_TOOLS_PATH)\Source\Python\Common\XmlRoutines.py \\r
$(BASE_TOOLS_PATH)\Source\Python\Common\__init__.py \\r
$(BASE_TOOLS_PATH)\Source\Python\Autogen\GenMake.py \\r
$(BASE_TOOLS_PATH)\Source\Python\Autogen\StrGather.py \\r
$(BASE_TOOLS_PATH)\Source\Python\Autogen\UniClassObject.py \\r
- $(BASE_TOOLS_PATH)\Source\Python\Autogen\__init__.py \r
+ $(BASE_TOOLS_PATH)\Source\Python\Autogen\__init__.py\r
\r
\r
all: SetPythonPath $(APPLICATIONS)\r
$(BIN_DIR)\PatchPcdValue.exe: $(BASE_TOOLS_PATH)\Source\Python\PatchPcdValue\PatchPcdValue.py $(COMMON_PYTHON)\r
@pushd . & @cd PatchPcdValue & @$(FREEZE) --include-modules=$(MODULES) --install-dir=$(BIN_DIR) PatchPcdValue.py & @popd\r
\r
+$(BIN_DIR)\BPDG.exe: $(BASE_TOOLS_PATH)\Source\Python\BPDG\BPDG.py $(COMMON_PYTHON)\r
+ @pushd . & @cd BPDG & @$(FREEZE) --include-modules=$(MODULES) --install-dir=$(BIN_DIR) BPDG.py & @popd\r
+ \r
clean:\r
cleanall: \r
@del /f /q $(BIN_DIR)\*.pyd $(BIN_DIR)\*.dll\r
## @file\r
# This file is used to define each component of the build database\r
#\r
-# Copyright (c) 2007 - 2008, Intel Corporation. All rights reserved.<BR>\r
+# Copyright (c) 2007 - 2010, Intel Corporation. All rights reserved.<BR>\r
# This program and the accompanying materials\r
# are licensed and made available under the terms and conditions of the BSD License\r
# which accompanies this distribution. The full text of the license may be found at\r
# @param MaxDatumSize: Input value for MaxDatumSize of Pcd, default is None\r
# @param SkuInfoList: Input value for SkuInfoList of Pcd, default is {}\r
# @param IsOverrided: Input value for IsOverrided of Pcd, default is False\r
+# @param GuidValue: Input value for TokenSpaceGuidValue of Pcd, default is None\r
#\r
# @var TokenCName: To store value for TokenCName\r
# @var TokenSpaceGuidCName: To store value for TokenSpaceGuidCName\r
# @var Phase: To store value for Phase, default is "DXE"\r
#\r
class PcdClassObject(object):\r
- def __init__(self, Name = None, Guid = None, Type = None, DatumType = None, Value = None, Token = None, MaxDatumSize = None, SkuInfoList = {}, GuidValue = None):\r
+ def __init__(self, Name = None, Guid = None, Type = None, DatumType = None, Value = None, Token = None, MaxDatumSize = None, SkuInfoList = {}, IsOverrided = False, GuidValue = None):\r
self.TokenCName = Name\r
self.TokenSpaceGuidCName = Guid\r
self.TokenSpaceGuidValue = GuidValue\r
self.SkuInfoList = SkuInfoList\r
self.Phase = "DXE"\r
self.Pending = False\r
-\r
+ self.IsOverrided = IsOverrided\r
+ \r
## Convert the class to a string\r
#\r
# Convert each member of the class to string\r
'MaxDatumSize=' + str(self.MaxDatumSize) + ', '\r
for Item in self.SkuInfoList.values():\r
Rtn = Rtn + 'SkuId=' + Item.SkuId + ', ' + 'SkuIdName=' + Item.SkuIdName\r
- Rtn = Rtn + str(self.IsOverrided)\r
+ Rtn = Rtn + ', IsOverrided=' + str(self.IsOverrided)\r
\r
return Rtn\r
\r
self.MetaFile = FilePath
self._FileDir = os.path.dirname(self.MetaFile)
self._Macros = copy.copy(Macros)
+ self._Macros["WORKSPACE"] = os.environ["WORKSPACE"]
# for recursive parsing
self._Owner = Owner
## [FixedPcd], [FeaturePcd], [PatchPcd], [Pcd] and [PcdEx] sections parser
def _PcdParser(self):
TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT, 1)
- self._ValueList[0:1] = GetSplitValueList(TokenList[0], TAB_SPLIT)
+ ValueList = GetSplitValueList(TokenList[0], TAB_SPLIT)
+ if len(ValueList) != 2:
+ EdkLogger.error('Parser', FORMAT_INVALID, "Illegal token space GUID and PCD name format",
+ ExtraData=self._CurrentLine + " (<TokenSpaceGuidCName>.<PcdCName>)",
+ File=self.MetaFile, Line=self._LineIndex+1)
+ self._ValueList[0:1] = ValueList
if len(TokenList) > 1:
self._ValueList[2] = TokenList[1]
if self._ValueList[0] == '' or self._ValueList[1] == '':
# sections which allow "!include" directive
_IncludeAllowedSection = [
+ TAB_COMMON_DEFINES.upper(),
TAB_LIBRARIES.upper(),
TAB_LIBRARY_CLASSES.upper(),
TAB_SKUIDS.upper(),
continue
# file private macros
elif Line.upper().startswith('DEFINE '):
- self._MacroParser()
+ (Name, Value) = self._MacroParser()
+ # Make the defined macro in DSC [Defines] section also
+ # available for FDF file.
+ if self._SectionName == TAB_COMMON_DEFINES.upper():
+ self._LastItem = self._Store(
+ MODEL_META_DATA_GLOBAL_DEFINE,
+ Name,
+ Value,
+ '',
+ 'COMMON',
+ 'COMMON',
+ self._Owner,
+ self._From,
+ self._LineIndex+1,
+ -1,
+ self._LineIndex+1,
+ -1,
+ self._Enabled
+ )
continue
elif Line.upper().startswith('EDK_GLOBAL '):
(Name, Value) = self._MacroParser()
if TokenList[0] in ['FLASH_DEFINITION', 'OUTPUT_DIRECTORY']:
TokenList[1] = NormPath(TokenList[1], self._Macros)
self._ValueList[0:len(TokenList)] = TokenList
+ # Treat elements in the [defines] section as global macros for FDF file.
+ self._LastItem = self._Store(
+ MODEL_META_DATA_GLOBAL_DEFINE,
+ TokenList[0],
+ TokenList[1],
+ '',
+ 'COMMON',
+ 'COMMON',
+ self._Owner,
+ self._From,
+ self._LineIndex+1,
+ -1,
+ self._LineIndex+1,
+ -1,
+ self._Enabled
+ )
## <subsection_header> parser
def _SubsectionHeaderParser(self):
EdkLogger.error("Parser", FORMAT_INVALID, File=self.MetaFile, Line=self._LineIndex+1,
ExtraData="'!include' is not allowed under section [%s]" % self._SectionName)
# the included file must be relative to the parsing file
- IncludedFile = os.path.join(self._FileDir, self._ValueList[1])
+ IncludedFile = os.path.join(self._FileDir, NormPath(self._ValueList[1], self._Macros))
Parser = DscParser(IncludedFile, self._FileType, self._Table, self._Macros, From=self._LastItem)
# set the parser status with current status
Parser._SectionName = self._SectionName
self._SectionType = Parser._SectionType
self._Scope = Parser._Scope
self._Enabled = Parser._Enabled
+ self._Macros.update(Parser._Macros)
else:
if DirectiveName in ["!IF", "!IFDEF", "!IFNDEF"]:
# evaluate the expression
#
def __init__(self, FilePath, FileType, Table, Macro=None):
MetaFileParser.__init__(self, FilePath, FileType, Table, Macro, -1)
+ self._Comments = []
## Parser starter
def Start(self):
EdkLogger.error("Parser", FILE_READ_FAILURE, ExtraData=self.MetaFile)
for Index in range(0, len(self._Content)):
- Line = CleanString(self._Content[Index])
+ Line, Comment = CleanString2(self._Content[Index])
+ self._CurrentLine = Line
+ self._LineIndex = Index
+
+ # save comment for later use
+ if Comment:
+ self._Comments.append((Comment, self._LineIndex+1))
# skip empty line
if Line == '':
continue
- self._CurrentLine = Line
- self._LineIndex = Index
# section header
if Line[0] == TAB_SECTION_START and Line[-1] == TAB_SECTION_END:
self._SectionHeaderParser()
+ self._Comments = []
continue
elif Line.startswith('DEFINE '):
self._MacroParser()
continue
elif len(self._SectionType) == 0:
+ self._Comments = []
continue
# section content
self._ValueList = ['','','']
self._SectionParser[self._SectionType[0]](self)
if self._ValueList == None:
+ self._Comments = []
continue
#
-1,
0
)
+ for Comment, LineNo in self._Comments:
+ self._Store(
+ MODEL_META_DATA_COMMENT,
+ Comment,
+ self._ValueList[0],
+ self._ValueList[1],
+ Arch,
+ ModuleType,
+ self._LastItem,
+ LineNo,
+ -1,
+ LineNo,
+ -1,
+ 0
+ )
+ self._Comments = []
self._Done()
## Section header parser
import os\r
import os.path\r
import pickle\r
+import uuid\r
\r
import Common.EdkLogger as EdkLogger\r
import Common.GlobalData as GlobalData\r
RecordList = self._RawData[MODEL_META_DATA_DEFINE, self._Arch]\r
for Record in RecordList:\r
GlobalData.gEdkGlobal[Record[0]] = Record[1]\r
+ \r
+ RecordList = self._RawData[MODEL_META_DATA_GLOBAL_DEFINE, self._Arch]\r
+ for Record in RecordList:\r
+ GlobalData.gGlobalDefines[Record[0]] = Record[1]\r
\r
## XXX[key] = value\r
def __setitem__(self, key, value):\r
self._Pcds = None\r
self._BuildOptions = None\r
self._LoadFixAddress = None\r
+ self._VpdToolGuid = None\r
+ self._VpdFileName = None\r
\r
## Get architecture\r
def _GetArch(self):\r
self._SkuName = Record[1]\r
elif Name == TAB_FIX_LOAD_TOP_MEMORY_ADDRESS:\r
self._LoadFixAddress = Record[1]\r
+ elif Name == TAB_DSC_DEFINES_VPD_TOOL_GUID:\r
+ #\r
+ # try to convert GUID to a real UUID value to see whether the GUID is format \r
+ # for VPD_TOOL_GUID is correct.\r
+ #\r
+ try:\r
+ uuid.UUID(Record[1])\r
+ except:\r
+ EdkLogger.error("build", FORMAT_INVALID, "Invalid GUID format for VPD_TOOL_GUID", File=self.MetaFile)\r
+ self._VpdToolGuid = Record[1] \r
+ elif Name == TAB_DSC_DEFINES_VPD_FILENAME:\r
+ self._VpdFileName = Record[1] \r
# set _Header to non-None in order to avoid database re-querying\r
self._Header = 'DUMMY'\r
\r
def _SetSkuName(self, Value):\r
if Value in self.SkuIds:\r
self._SkuName = Value\r
+ # Needs to re-retrieve the PCD information\r
+ self._Pcds = None\r
\r
def _GetFdfFile(self):\r
if self._FlashDefinition == None:\r
self._LoadFixAddress = ''\r
return self._LoadFixAddress\r
\r
+ ## Retrieve the GUID string for VPD tool\r
+ def _GetVpdToolGuid(self):\r
+ if self._VpdToolGuid == None:\r
+ if self._Header == None:\r
+ self._GetHeaderInfo()\r
+ if self._VpdToolGuid == None:\r
+ self._VpdToolGuid = ''\r
+ return self._VpdToolGuid\r
+ \r
+ ## Retrieve the VPD file Name, this is optional in DSC file\r
+ def _GetVpdFileName(self):\r
+ if self._VpdFileName == None:\r
+ if self._Header == None:\r
+ self._GetHeaderInfo()\r
+ if self._VpdFileName == None:\r
+ self._VpdFileName = ''\r
+ return self._VpdFileName \r
+ \r
## Retrieve [SkuIds] section information\r
def _GetSkuIds(self):\r
if self._SkuIds == None:\r
'',\r
MaxDatumSize,\r
{},\r
+ False,\r
None\r
)\r
Module.Pcds[PcdCName, TokenSpaceGuid] = Pcd\r
'',\r
MaxDatumSize,\r
{},\r
+ False,\r
None\r
)\r
return Pcds\r
'',\r
MaxDatumSize,\r
{self.SkuName : SkuInfo},\r
+ False,\r
None\r
)\r
return Pcds\r
'',\r
'',\r
{self.SkuName : SkuInfo},\r
+ False,\r
None\r
)\r
return Pcds\r
PcdDict[Arch, SkuName, PcdCName, TokenSpaceGuid] = Setting\r
# Remove redundant PCD candidates, per the ARCH and SKU\r
for PcdCName, TokenSpaceGuid in PcdSet:\r
- ValueList = ['', '']\r
+ ValueList = ['', '', '']\r
Setting = PcdDict[self._Arch, self.SkuName, PcdCName, TokenSpaceGuid]\r
if Setting == None:\r
continue\r
TokenList = Setting.split(TAB_VALUE_SPLIT)\r
ValueList[0:len(TokenList)] = TokenList\r
- VpdOffset, MaxDatumSize = ValueList\r
-\r
- SkuInfo = SkuInfoClass(self.SkuName, self.SkuIds[self.SkuName], '', '', '', '', VpdOffset)\r
+ #\r
+ # For the VOID* type, it can have optional data of MaxDatumSize and InitialValue\r
+ # For the Integer & Boolean type, the optional data can only be InitialValue.\r
+ # At this point, we put all the data into the PcdClssObject for we don't know the PCD's datumtype\r
+ # until the DEC parser has been called.\r
+ # \r
+ VpdOffset, MaxDatumSize, InitialValue = ValueList\r
+\r
+ SkuInfo = SkuInfoClass(self.SkuName, self.SkuIds[self.SkuName], '', '', '', '', VpdOffset, InitialValue)\r
Pcds[PcdCName, TokenSpaceGuid] = PcdClassObject(\r
PcdCName,\r
TokenSpaceGuid,\r
'',\r
MaxDatumSize,\r
{self.SkuName : SkuInfo},\r
+ False,\r
None\r
)\r
return Pcds\r
#\r
def AddPcd(self, Name, Guid, Value):\r
if (Name, Guid) not in self.Pcds:\r
- self.Pcds[Name, Guid] = PcdClassObject(Name, Guid, '', '', '', '', '', {}, None)\r
+ self.Pcds[Name, Guid] = PcdClassObject(Name, Guid, '', '', '', '', '', {}, False, None)\r
self.Pcds[Name, Guid].DefaultValue = Value\r
\r
Arch = property(_GetArch, _SetArch)\r
BsBaseAddress = property(_GetBsBaseAddress)\r
RtBaseAddress = property(_GetRtBaseAddress)\r
LoadFixAddress = property(_GetLoadFixAddress)\r
-\r
+ VpdToolGuid = property(_GetVpdToolGuid)\r
+ VpdFileName = property(_GetVpdFileName) \r
SkuIds = property(_GetSkuIds)\r
Modules = property(_GetModules)\r
LibraryInstances = property(_GetLibraryInstances)\r
Pcds = property(_GetPcds)\r
BuildOptions = property(_GetBuildOptions)\r
\r
-## Platform build information from DSC file\r
+## Platform build information from DEC file\r
#\r
# This class is used to retrieve information stored in database and convert them\r
# into PackageBuildClassObject form for easier use for AutoGen.\r
TAB_DEC_DEFINES_PACKAGE_NAME : "_PackageName",\r
TAB_DEC_DEFINES_PACKAGE_GUID : "_Guid",\r
TAB_DEC_DEFINES_PACKAGE_VERSION : "_Version",\r
+ TAB_DEC_DEFINES_PKG_UNI_FILE : "_PkgUniFile",\r
}\r
\r
\r
self._PackageName = None\r
self._Guid = None\r
self._Version = None\r
+ self._PkgUniFile = None\r
self._Protocols = None\r
self._Ppis = None\r
self._Guids = None\r
TokenNumber,\r
'',\r
{},\r
+ False,\r
None\r
)\r
return Pcds\r
'',\r
'',\r
{},\r
+ False,\r
self.Guids[TokenSpaceGuid]\r
)\r
\r
# "FixedAtBuild", "PatchableInModule", "FeatureFlag", "Dynamic", "DynamicEx"\r
#\r
PcdType = self._PCD_TYPE_STRING_[Type]\r
- if Type in [MODEL_PCD_DYNAMIC, MODEL_PCD_DYNAMIC_EX]:\r
+ if Type == MODEL_PCD_DYNAMIC:\r
Pcd.Pending = True\r
for T in ["FixedAtBuild", "PatchableInModule", "FeatureFlag", "Dynamic", "DynamicEx"]:\r
if (PcdCName, TokenSpaceGuid, T) in Package.Pcds:\r
\r
## Database\r
#\r
-# This class defined the build databse for all modules, packages and platform.\r
+# This class defined the build database for all modules, packages and platform.\r
# It will call corresponding parser for the given file if it cannot find it in\r
# the database.\r
#\r
import traceback\r
import sys\r
import time\r
+import struct\r
from datetime import datetime\r
from StringIO import StringIO\r
from Common import EdkLogger\r
'SMM_DRIVER' : '0xA (SMM)', # Extension of module type to support PI 1.1 SMM drivers\r
}\r
\r
+## The look up table of the supported opcode in the dependency expression binaries\r
+gOpCodeList = ["BEFORE", "AFTER", "PUSH", "AND", "OR", "NOT", "TRUE", "FALSE", "END", "SOR"]\r
+\r
##\r
# Writes a string to the file object.\r
#\r
IncludeFiles[FullFileName.lower().replace("\\", "/")] = FullFileName\r
break\r
\r
+##\r
+# Parse binary dependency expression section\r
+#\r
+# This utility class parses the dependency expression section and translate the readable\r
+# GUID name and value.\r
+#\r
+class DepexParser(object):\r
+ ##\r
+ # Constructor function for class DepexParser\r
+ #\r
+ # This constructor function collect GUID values so that the readable\r
+ # GUID name can be translated.\r
+ #\r
+ # @param self The object pointer\r
+ # @param Wa Workspace context information\r
+ #\r
+ def __init__(self, Wa):\r
+ self._GuidDb = {}\r
+ for Package in Wa.BuildDatabase.WorkspaceDb.PackageList:\r
+ for Protocol in Package.Protocols:\r
+ GuidValue = GuidStructureStringToGuidString(Package.Protocols[Protocol])\r
+ self._GuidDb[GuidValue.upper()] = Protocol\r
+ for Ppi in Package.Ppis:\r
+ GuidValue = GuidStructureStringToGuidString(Package.Ppis[Ppi])\r
+ self._GuidDb[GuidValue.upper()] = Ppi\r
+ for Guid in Package.Guids:\r
+ GuidValue = GuidStructureStringToGuidString(Package.Guids[Guid])\r
+ self._GuidDb[GuidValue.upper()] = Guid\r
+ \r
+ ##\r
+ # Parse the binary dependency expression files.\r
+ # \r
+ # This function parses the binary dependency expression file and translate it\r
+ # to the instruction list.\r
+ #\r
+ # @param self The object pointer\r
+ # @param DepexFileName The file name of binary dependency expression file.\r
+ #\r
+ def ParseDepexFile(self, DepexFileName):\r
+ DepexFile = open(DepexFileName, "rb")\r
+ DepexStatement = []\r
+ OpCode = DepexFile.read(1)\r
+ while OpCode:\r
+ Statement = gOpCodeList[struct.unpack("B", OpCode)[0]]\r
+ if Statement in ["BEFORE", "AFTER", "PUSH"]:\r
+ GuidValue = "%08X-%04X-%04X-%02X%02X-%02X%02X%02X%02X%02X%02X" % \\r
+ struct.unpack("LHHBBBBBBBB", DepexFile.read(16))\r
+ GuidString = self._GuidDb.get(GuidValue, GuidValue)\r
+ Statement = "%s %s" % (Statement, GuidString)\r
+ DepexStatement.append(Statement)\r
+ OpCode = DepexFile.read(1) \r
+ \r
+ return DepexStatement\r
+ \r
##\r
# Reports library information\r
#\r
#\r
def __init__(self, M):\r
self.Depex = ""\r
+ self._DepexFileName = os.path.join(M.BuildDir, "OUTPUT", M.Module.BaseName + ".depex") \r
ModuleType = M.ModuleType\r
if not ModuleType:\r
ModuleType = gComponentType2ModuleType.get(M.ComponentType, "")\r
#\r
# This function generates report for the module dependency expression.\r
#\r
- # @param self The object pointer\r
- # @param File The file object for report\r
+ # @param self The object pointer\r
+ # @param File The file object for report\r
+ # @param GlobalDepexParser The platform global Dependency expression parser object\r
#\r
- def GenerateReport(self, File):\r
+ def GenerateReport(self, File, GlobalDepexParser):\r
if not self.Depex:\r
return\r
- \r
+\r
FileWrite(File, gSubSectionStart)\r
+ if os.path.isfile(self._DepexFileName):\r
+ try:\r
+ DepexStatements = GlobalDepexParser.ParseDepexFile(self._DepexFileName)\r
+ FileWrite(File, "Final Dependency Expression (DEPEX) Instructions")\r
+ for DepexStatement in DepexStatements:\r
+ FileWrite(File, " %s" % DepexStatement)\r
+ FileWrite(File, gSubSectionSep)\r
+ except:\r
+ EdkLogger.warn(None, "Dependency expression file is corrupted", self._DepexFileName)\r
+ \r
FileWrite(File, "Dependency Expression (DEPEX) from %s" % self.Source)\r
\r
if self.Source == "INF":\r
# This function generates report for separate module expression\r
# in a platform build.\r
#\r
- # @param self The object pointer\r
- # @param File The file object for report\r
- # @param GlobalPcdReport The platform global PCD class object\r
- # @param ReportType The kind of report items in the final report file\r
+ # @param self The object pointer\r
+ # @param File The file object for report\r
+ # @param GlobalPcdReport The platform global PCD report object\r
+ # @param GlobalPredictionReport The platform global Prediction report object\r
+ # @param GlobalDepexParser The platform global Dependency expression parser object\r
+ # @param ReportType The kind of report items in the final report file\r
#\r
- def GenerateReport(self, File, GlobalPcdReport, GlobalPredictionReport, ReportType):\r
+ def GenerateReport(self, File, GlobalPcdReport, GlobalPredictionReport, GlobalDepexParser, ReportType):\r
FileWrite(File, gSectionStart)\r
\r
FwReportFileName = os.path.join(self._BuildDir, "DEBUG", self.ModuleName + ".txt")\r
self.LibraryReport.GenerateReport(File)\r
\r
if "DEPEX" in ReportType:\r
- self.DepexReport.GenerateReport(File)\r
+ self.DepexReport.GenerateReport(File, GlobalDepexParser)\r
\r
if "BUILD_FLAGS" in ReportType:\r
self.BuildFlagsReport.GenerateReport(File)\r
if "FIXED_ADDRESS" in ReportType or "EXECUTION_ORDER" in ReportType:\r
self.PredictionReport = PredictionReport(Wa)\r
\r
+ self.DepexParser = None\r
+ if "DEPEX" in ReportType:\r
+ self.DepexParser = DepexParser(Wa)\r
+ \r
self.ModuleReportList = []\r
if MaList != None:\r
self._IsModuleBuild = True\r
FdReportListItem.GenerateReport(File)\r
\r
for ModuleReportItem in self.ModuleReportList:\r
- ModuleReportItem.GenerateReport(File, self.PcdReport, self.PredictionReport, ReportType)\r
+ ModuleReportItem.GenerateReport(File, self.PcdReport, self.PredictionReport, self.DepexParser, ReportType)\r
\r
if not self._IsModuleBuild:\r
if "EXECUTION_ORDER" in ReportType:\r
import time\r
import platform\r
import traceback\r
+import encodings.ascii \r
\r
from struct import *\r
from threading import *\r
self.LoadFixAddress = 0\r
self.UniFlag = UniFlag\r
\r
- # print dot charater during doing some time-consuming work\r
+ # print dot character during doing some time-consuming work\r
self.Progress = Utils.Progressor()\r
\r
# parse target.txt, tools_def.txt, and platform file\r
if len (SmmModuleList) > 0:\r
MapBuffer.write('SMM_CODE_PAGE_NUMBER = 0x%x\n' % (SmmSize/0x1000))\r
\r
- PeiBaseAddr = TopMemoryAddress - RtSize - BtSize\r
+ PeiBaseAddr = TopMemoryAddress - RtSize - BtSize \r
BtBaseAddr = TopMemoryAddress - RtSize\r
- RtBaseAddr = TopMemoryAddress - ReservedRuntimeMemorySize\r
+ RtBaseAddr = TopMemoryAddress - ReservedRuntimeMemorySize \r
\r
self._RebaseModule (MapBuffer, PeiBaseAddr, PeiModuleList, TopMemoryAddress == 0)\r
self._RebaseModule (MapBuffer, BtBaseAddr, BtModuleList, TopMemoryAddress == 0)\r