X-Git-Url: https://git.proxmox.com/?p=mirror_edk2.git;a=blobdiff_plain;f=BaseTools%2FSource%2FPython%2FWorkspace%2FMetaFileParser.py;h=a2ded0c845ae49bfd6e880ffc42c549592df29f7;hp=451d8547d22b3ffcc132de96b930f4114ebaf6a4;hb=46e4b3940e2f1862aa605900616a543a43f17222;hpb=47fea6afd74af76c7e2a2b03d319b7ac035ac26a diff --git a/BaseTools/Source/Python/Workspace/MetaFileParser.py b/BaseTools/Source/Python/Workspace/MetaFileParser.py index 451d8547d2..a2ded0c845 100644 --- a/BaseTools/Source/Python/Workspace/MetaFileParser.py +++ b/BaseTools/Source/Python/Workspace/MetaFileParser.py @@ -1,8 +1,8 @@ ## @file # This file is used to parse meta files # -# Copyright (c) 2008 - 2015, Intel Corporation. All rights reserved.
-# Copyright (c) 2015, Hewlett Packard Enterprise Development, L.P.
+# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.
+# (C) Copyright 2015-2018 Hewlett Packard Enterprise Development LP
# This program and the accompanying materials # are licensed and made available under the terms and conditions of the BSD License # which accompanies this distribution. The full text of the license may be found at @@ -19,21 +19,26 @@ import Common.LongFilePathOs as os import re import time import copy +import md5 import Common.EdkLogger as EdkLogger import Common.GlobalData as GlobalData from CommonDataClass.DataClass import * from Common.DataType import * -from Common.String import * -from Common.Misc import GuidStructureStringToGuidString, CheckPcdDatum, PathClass, AnalyzePcdData, AnalyzeDscPcd +from Common.StringUtils import * +from Common.Misc import GuidStructureStringToGuidString, CheckPcdDatum, PathClass, AnalyzePcdData, AnalyzeDscPcd, AnalyzePcdExpression, ParseFieldValue from Common.Expression import * from CommonDataClass.Exceptions import * from Common.LongFilePathSupport import OpenLongFilePath as open - +from collections import defaultdict from MetaFileTable import MetaFileStorage from MetaFileCommentParser import CheckInfComment +## RegEx for finding file versions +hexVersionPattern = re.compile(r'0[xX][\da-f-A-F]{5,8}') +decVersionPattern = re.compile(r'\d+\.\d+') + ## A decorator used to parse macro definition def ParseMacro(Parser): def MacroParser(self): @@ -144,20 +149,21 @@ class MetaFileParser(object): # # @param FilePath The path of platform description file # @param FileType The raw data of DSC file + # @param Arch Default Arch value for filtering sections # @param Table Database used to retrieve module/package information - # @param Macros Macros used for replacement in file # @param Owner Owner ID (for sub-section parsing) # @param From ID from which the data comes (for !INCLUDE directive) # - def __init__(self, FilePath, FileType, Table, Owner= -1, From= -1): + def __init__(self, FilePath, FileType, Arch, Table, Owner= -1, From= -1): self._Table = Table self._RawTable = Table + self._Arch = Arch self._FileType = FileType self.MetaFile = FilePath self._FileDir = self.MetaFile.Dir self._Defines = {} self._FileLocalMacros = {} - self._SectionsMacroDict = {} + self._SectionsMacroDict = defaultdict(dict) # for recursive parsing self._Owner = [Owner] @@ -180,6 +186,7 @@ class MetaFileParser(object): self._PostProcessed = False # Different version of meta-file has different way to parse. self._Version = 0 + self._GuidDict = {} # for Parser PCD value {GUID(gTokeSpaceGuidName)} ## Store the parsed data in table def _Store(self, *Args): @@ -211,6 +218,15 @@ class MetaFileParser(object): def _SetFinished(self, Value): self._Finished = Value + ## Remove records that do not match given Filter Arch + def _FilterRecordList(self, RecordList, FilterArch): + NewRecordList = [] + for Record in RecordList: + Arch = Record[3] + if Arch == TAB_ARCH_COMMON or Arch == FilterArch: + NewRecordList.append(Record) + return NewRecordList + ## Use [] style to query data in table, just for readability # # DataInfo = [data_type, scope1(arch), scope2(platform/moduletype)] @@ -229,14 +245,14 @@ class MetaFileParser(object): self.Start() # No specific ARCH or Platform given, use raw data - if self._RawTable and (len(DataInfo) == 1 or DataInfo[1] == None): - return self._RawTable.Query(*DataInfo) + if self._RawTable and (len(DataInfo) == 1 or DataInfo[1] is None): + return self._FilterRecordList(self._RawTable.Query(*DataInfo), self._Arch) # Do post-process if necessary if not self._PostProcessed: self._PostProcess() - return self._Table.Query(*DataInfo) + return self._FilterRecordList(self._Table.Query(*DataInfo), DataInfo[1]) ## Data parser for the common format in different type of file # @@ -285,7 +301,7 @@ class MetaFileParser(object): for Item in GetSplitValueList(self._CurrentLine[1:-1], TAB_COMMA_SPLIT): if Item == '': continue - ItemList = GetSplitValueList(Item, TAB_SPLIT,2) + ItemList = GetSplitValueList(Item, TAB_SPLIT,3) # different section should not mix in one section if self._SectionName != '' and self._SectionName != ItemList[0].upper(): EdkLogger.error('Parser', FORMAT_INVALID, "Different section names in the same section", @@ -294,7 +310,7 @@ class MetaFileParser(object): if self._SectionName in self.DataType: self._SectionType = self.DataType[self._SectionName] # Check if the section name is valid - if self._SectionName not in SECTIONS_HAVE_ITEM_AFTER_ARCH and len(ItemList) > 3: + if self._SectionName not in SECTIONS_HAVE_ITEM_AFTER_ARCH_SET and len(ItemList) > 3: EdkLogger.error("Parser", FORMAT_UNKNOWN_ERROR, "%s is not a valid section name" % Item, self.MetaFile, self._LineIndex + 1, self._CurrentLine) elif self._Version >= 0x00010005: @@ -307,21 +323,25 @@ class MetaFileParser(object): if len(ItemList) > 1: S1 = ItemList[1].upper() else: - S1 = 'COMMON' + S1 = TAB_ARCH_COMMON ArchList.add(S1) # S2 may be Platform or ModuleType if len(ItemList) > 2: - if self._SectionName.upper() in SECTIONS_HAVE_ITEM_PCD: + if self._SectionName.upper() in SECTIONS_HAVE_ITEM_PCD_SET: S2 = ItemList[2] else: S2 = ItemList[2].upper() else: - S2 = 'COMMON' - self._Scope.append([S1, S2]) + S2 = TAB_COMMON + if len(ItemList) > 3: + S3 = ItemList[3] + else: + S3 = TAB_COMMON + self._Scope.append([S1, S2, S3]) # 'COMMON' must not be used with specific ARCHs at the same section - if 'COMMON' in ArchList and len(ArchList) > 1: + if TAB_ARCH_COMMON in ArchList and len(ArchList) > 1: EdkLogger.error('Parser', FORMAT_INVALID, "'common' ARCH must not be used with specific ARCHs", File=self.MetaFile, Line=self._LineIndex + 1, ExtraData=self._CurrentLine) # If the section information is needed later, it should be stored in database @@ -341,11 +361,18 @@ class MetaFileParser(object): self._ValueList = [ReplaceMacro(Value, self._Macros) for Value in self._ValueList] Name, Value = self._ValueList[1], self._ValueList[2] + MacroUsed = GlobalData.gMacroRefPattern.findall(Value) + if len(MacroUsed) != 0: + for Macro in MacroUsed: + if Macro in GlobalData.gGlobalDefines: + EdkLogger.error("Parser", FORMAT_INVALID, "Global macro %s is not permitted." % (Macro), ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1) + else: + EdkLogger.error("Parser", FORMAT_INVALID, "%s not defined" % (Macro), ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1) # Sometimes, we need to make differences between EDK and EDK2 modules if Name == 'INF_VERSION': - if re.match(r'0[xX][\da-f-A-F]{5,8}', Value): + if hexVersionPattern.match(Value): self._Version = int(Value, 0) - elif re.match(r'\d+\.\d+', Value): + elif decVersionPattern.match(Value): ValueList = Value.split('.') Major = '%04o' % int(ValueList[0], 0) Minor = '%04o' % int(ValueList[1], 0) @@ -392,19 +419,18 @@ class MetaFileParser(object): ## Construct section Macro dict def _ConstructSectionMacroDict(self, Name, Value): - ScopeKey = [(Scope[0], Scope[1]) for Scope in self._Scope] + ScopeKey = [(Scope[0], Scope[1],Scope[2]) for Scope in self._Scope] ScopeKey = tuple(ScopeKey) - SectionDictKey = self._SectionType, ScopeKey # # DecParser SectionType is a list, will contain more than one item only in Pcd Section # As Pcd section macro usage is not alllowed, so here it is safe # if type(self) == DecParser: SectionDictKey = self._SectionType[0], ScopeKey - if SectionDictKey not in self._SectionsMacroDict: - self._SectionsMacroDict[SectionDictKey] = {} - SectionLocalMacros = self._SectionsMacroDict[SectionDictKey] - SectionLocalMacros[Name] = Value + else: + SectionDictKey = self._SectionType, ScopeKey + + self._SectionsMacroDict[SectionDictKey][Name] = Value ## Get section Macros that are applicable to current line, which may come from other sections ## that share the same name while scope is wider @@ -424,20 +450,20 @@ class MetaFileParser(object): continue for ActiveScope in self._Scope: - Scope0, Scope1 = ActiveScope[0], ActiveScope[1] - if(Scope0, Scope1) not in Scope: + Scope0, Scope1 ,Scope2= ActiveScope[0], ActiveScope[1],ActiveScope[2] + if(Scope0, Scope1,Scope2) not in Scope: break else: SpeSpeMacroDict.update(self._SectionsMacroDict[(SectionType, Scope)]) for ActiveScope in self._Scope: - Scope0, Scope1 = ActiveScope[0], ActiveScope[1] - if(Scope0, Scope1) not in Scope and (Scope0, "COMMON") not in Scope and ("COMMON", Scope1) not in Scope: + Scope0, Scope1,Scope2 = ActiveScope[0], ActiveScope[1],ActiveScope[2] + if(Scope0, Scope1,Scope2) not in Scope and (Scope0, TAB_COMMON, TAB_COMMON) not in Scope and (TAB_COMMON, Scope1, TAB_COMMON) not in Scope: break else: ComSpeMacroDict.update(self._SectionsMacroDict[(SectionType, Scope)]) - if ("COMMON", "COMMON") in Scope: + if (TAB_COMMON, TAB_COMMON, TAB_COMMON) in Scope: ComComMacroDict.update(self._SectionsMacroDict[(SectionType, Scope)]) Macros.update(ComComMacroDict) @@ -490,14 +516,14 @@ class InfParser(MetaFileParser): # # @param FilePath The path of module description file # @param FileType The raw data of DSC file + # @param Arch Default Arch value for filtering sections # @param Table Database used to retrieve module/package information - # @param Macros Macros used for replacement in file # - def __init__(self, FilePath, FileType, Table): + def __init__(self, FilePath, FileType, Arch, Table): # prevent re-initialization if hasattr(self, "_Table"): return - MetaFileParser.__init__(self, FilePath, FileType, Table) + MetaFileParser.__init__(self, FilePath, FileType, Arch, Table) self.PcdsDict = {} ## Parser starter @@ -545,8 +571,8 @@ class InfParser(MetaFileParser): if Line[0] == TAB_SECTION_START and Line[-1] == TAB_SECTION_END: if not GetHeaderComment: for Cmt, LNo in Comments: - self._Store(MODEL_META_DATA_HEADER_COMMENT, Cmt, '', '', 'COMMON', - 'COMMON', self._Owner[-1], LNo, -1, LNo, -1, 0) + self._Store(MODEL_META_DATA_HEADER_COMMENT, Cmt, '', '', TAB_COMMON, + TAB_COMMON, self._Owner[-1], LNo, -1, LNo, -1, 0) GetHeaderComment = True else: TailComments.extend(SectionComments + Comments) @@ -597,7 +623,7 @@ class InfParser(MetaFileParser): self._ValueList = ['', '', ''] # parse current line, result will be put in self._ValueList self._SectionParser[self._SectionType](self) - if self._ValueList == None or self._ItemType == MODEL_META_DATA_DEFINE: + if self._ValueList is None or self._ItemType == MODEL_META_DATA_DEFINE: self._ItemType = -1 Comments = [] continue @@ -609,7 +635,7 @@ class InfParser(MetaFileParser): # Model, Value1, Value2, Value3, Arch, Platform, BelongsToItem=-1, # LineBegin=-1, ColumnBegin=-1, LineEnd=-1, ColumnEnd=-1, Enabled=-1 # - for Arch, Platform in self._Scope: + for Arch, Platform,_ in self._Scope: LastItem = self._Store(self._SectionType, self._ValueList[0], self._ValueList[1], @@ -635,8 +661,8 @@ class InfParser(MetaFileParser): # If there are tail comments in INF file, save to database whatever the comments are for Comment in TailComments: - self._Store(MODEL_META_DATA_TAIL_COMMENT, Comment[0], '', '', 'COMMON', - 'COMMON', self._Owner[-1], -1, -1, -1, -1, 0) + self._Store(MODEL_META_DATA_TAIL_COMMENT, Comment[0], '', '', TAB_COMMON, + TAB_COMMON, self._Owner[-1], -1, -1, -1, -1, 0) self._Done() ## Data parser for the format in which there's path @@ -786,6 +812,7 @@ class DscParser(MetaFileParser): # DSC file supported data types (one type per section) DataType = { TAB_SKUIDS.upper() : MODEL_EFI_SKU_ID, + TAB_DEFAULT_STORES.upper() : MODEL_EFI_DEFAULT_STORES, TAB_LIBRARIES.upper() : MODEL_EFI_LIBRARY_INSTANCE, TAB_LIBRARY_CLASSES.upper() : MODEL_EFI_LIBRARY_CLASS, TAB_BUILD_OPTIONS.upper() : MODEL_META_DATA_BUILD_OPTION, @@ -831,7 +858,9 @@ class DscParser(MetaFileParser): "ISO_LANGUAGES", "TIME_STAMP_FILE", "VPD_TOOL_GUID", - "FIX_LOAD_TOP_MEMORY_ADDRESS" + "FIX_LOAD_TOP_MEMORY_ADDRESS", + "PREBUILD", + "POSTBUILD" ] SubSectionDefineKeywords = [ @@ -840,22 +869,24 @@ class DscParser(MetaFileParser): SymbolPattern = ValueExpression.SymbolPattern + IncludedFiles = set() + ## Constructor of DscParser # # Initialize object of DscParser # # @param FilePath The path of platform description file # @param FileType The raw data of DSC file + # @param Arch Default Arch value for filtering sections # @param Table Database used to retrieve module/package information - # @param Macros Macros used for replacement in file # @param Owner Owner ID (for sub-section parsing) # @param From ID from which the data comes (for !INCLUDE directive) # - def __init__(self, FilePath, FileType, Table, Owner= -1, From= -1): + def __init__(self, FilePath, FileType, Arch, Table, Owner= -1, From= -1): # prevent re-initialization if hasattr(self, "_Table"): return - MetaFileParser.__init__(self, FilePath, FileType, Table, Owner, From) + MetaFileParser.__init__(self, FilePath, FileType, Arch, Table, Owner, From) self._Version = 0x00010005 # Only EDK2 dsc file is supported # to store conditional directive evaluation result self._DirectiveStack = [] @@ -904,7 +935,7 @@ class DscParser(MetaFileParser): self._SubsectionType = MODEL_UNKNOWN self._SubsectionName = '' self._Owner[-1] = -1 - OwnerId = {} + OwnerId.clear() continue # subsection header elif Line[0] == TAB_OPTION_START and Line[-1] == TAB_OPTION_END: @@ -913,6 +944,8 @@ class DscParser(MetaFileParser): elif Line[0] == '!': self._DirectiveParser() continue + if Line[0] == TAB_OPTION_START and not self._InSubsection: + EdkLogger.error("Parser", FILE_READ_FAILURE, "Missing the '{' before %s in Line %s" % (Line, Index+1),ExtraData=self.MetaFile) if self._InSubsection: SectionType = self._SubsectionType @@ -922,13 +955,13 @@ class DscParser(MetaFileParser): self._ValueList = ['', '', ''] self._SectionParser[SectionType](self) - if self._ValueList == None: + if self._ValueList is None: continue # # Model, Value1, Value2, Value3, Arch, ModuleType, BelongsToItem=-1, BelongsToFile=-1, # LineBegin=-1, ColumnBegin=-1, LineEnd=-1, ColumnEnd=-1, Enabled=-1 # - for Arch, ModuleType in self._Scope: + for Arch, ModuleType, DefaultStore in self._Scope: Owner = self._Owner[-1] if self._SubsectionType != MODEL_UNKNOWN: Owner = OwnerId[Arch] @@ -939,6 +972,7 @@ class DscParser(MetaFileParser): self._ValueList[2], Arch, ModuleType, + DefaultStore, Owner, self._From, self._LineIndex + 1, @@ -991,7 +1025,7 @@ class DscParser(MetaFileParser): ExtraData=self._CurrentLine) ItemType = self.DataType[DirectiveName] - Scope = [['COMMON', 'COMMON']] + Scope = [[TAB_COMMON, TAB_COMMON, TAB_COMMON]] if ItemType == MODEL_META_DATA_INCLUDE: Scope = self._Scope if ItemType == MODEL_META_DATA_CONDITIONAL_STATEMENT_ENDIF: @@ -1021,7 +1055,7 @@ class DscParser(MetaFileParser): # Model, Value1, Value2, Value3, Arch, ModuleType, BelongsToItem=-1, BelongsToFile=-1, # LineBegin=-1, ColumnBegin=-1, LineEnd=-1, ColumnEnd=-1, Enabled=-1 # - for Arch, ModuleType in Scope: + for Arch, ModuleType, DefaultStore in Scope: self._LastItem = self._Store( ItemType, self._ValueList[0], @@ -1029,6 +1063,7 @@ class DscParser(MetaFileParser): self._ValueList[2], Arch, ModuleType, + DefaultStore, self._Owner[-1], self._From, self._LineIndex + 1, @@ -1064,9 +1099,16 @@ class DscParser(MetaFileParser): @ParseMacro def _SkuIdParser(self): + TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT) + if len(TokenList) not in (2,3): + EdkLogger.error('Parser', FORMAT_INVALID, "Correct format is '|[|]'", + ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1) + self._ValueList[0:len(TokenList)] = TokenList + @ParseMacro + def _DefaultStoresParser(self): TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT) if len(TokenList) != 2: - EdkLogger.error('Parser', FORMAT_INVALID, "Correct format is '|'", + EdkLogger.error('Parser', FORMAT_INVALID, "Correct format is '|'", ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1) self._ValueList[0:len(TokenList)] = TokenList @@ -1093,6 +1135,13 @@ class DscParser(MetaFileParser): def _PcdParser(self): TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT, 1) self._ValueList[0:1] = GetSplitValueList(TokenList[0], TAB_SPLIT) + PcdNameTockens = GetSplitValueList(TokenList[0], TAB_SPLIT) + if len(PcdNameTockens) == 2: + self._ValueList[0], self._ValueList[1] = PcdNameTockens[0], PcdNameTockens[1] + elif len(PcdNameTockens) == 3: + self._ValueList[0], self._ValueList[1] = ".".join((PcdNameTockens[0], PcdNameTockens[1])), PcdNameTockens[2] + elif len(PcdNameTockens) > 3: + self._ValueList[0], self._ValueList[1] = ".".join((PcdNameTockens[0], PcdNameTockens[1])), ".".join(PcdNameTockens[2:]) if len(TokenList) == 2: self._ValueList[2] = TokenList[1] if self._ValueList[0] == '' or self._ValueList[1] == '': @@ -1101,9 +1150,9 @@ class DscParser(MetaFileParser): File=self.MetaFile, Line=self._LineIndex + 1) if self._ValueList[2] == '': # - # The PCD values are optional for FIXEDATBUILD and PATCHABLEINMODULE + # The PCD values are optional for FIXEDATBUILD, PATCHABLEINMODULE, Dynamic/DynamicEx default # - if self._SectionType in (MODEL_PCD_FIXED_AT_BUILD, MODEL_PCD_PATCHABLE_IN_MODULE): + if self._SectionType in (MODEL_PCD_FIXED_AT_BUILD, MODEL_PCD_PATCHABLE_IN_MODULE, MODEL_PCD_DYNAMIC_DEFAULT, MODEL_PCD_DYNAMIC_EX_DEFAULT): return EdkLogger.error('Parser', FORMAT_INVALID, "No PCD value given", ExtraData=self._CurrentLine + " (.|)", @@ -1111,11 +1160,18 @@ class DscParser(MetaFileParser): # Validate the datum type of Dynamic Defaul PCD and DynamicEx Default PCD ValueList = GetSplitValueList(self._ValueList[2]) - if len(ValueList) > 1 and ValueList[1] != TAB_VOID \ + if len(ValueList) > 1 and ValueList[1] in [TAB_UINT8 , TAB_UINT16, TAB_UINT32 , TAB_UINT64] \ and self._ItemType in [MODEL_PCD_DYNAMIC_DEFAULT, MODEL_PCD_DYNAMIC_EX_DEFAULT]: EdkLogger.error('Parser', FORMAT_INVALID, "The datum type '%s' of PCD is wrong" % ValueList[1], ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1) + # Validate the VariableName of DynamicHii and DynamicExHii for PCD Entry must not be an empty string + if self._ItemType in [MODEL_PCD_DYNAMIC_HII, MODEL_PCD_DYNAMIC_EX_HII]: + DscPcdValueList = GetSplitValueList(TokenList[1], TAB_VALUE_SPLIT, 1) + if len(DscPcdValueList[0].replace('L','').replace('"','').strip()) == 0: + EdkLogger.error('Parser', FORMAT_INVALID, "The VariableName field in the HII format PCD entry must not be an empty string", + ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1) + # if value are 'True', 'true', 'TRUE' or 'False', 'false', 'FALSE', replace with integer 1 or 0. DscPcdValueList = GetSplitValueList(TokenList[1], TAB_VALUE_SPLIT, 1) if DscPcdValueList[0] in ['True', 'true', 'TRUE']: @@ -1190,6 +1246,13 @@ class DscParser(MetaFileParser): # PCD cannot be referenced in macro definition if self._ItemType not in [MODEL_META_DATA_DEFINE, MODEL_META_DATA_GLOBAL_DEFINE]: Macros.update(self._Symbols) + if GlobalData.BuildOptionPcd: + for Item in GlobalData.BuildOptionPcd: + if type(Item) is tuple: + continue + PcdName, TmpValue = Item.split("=") + TmpValue = BuildOptionValue(TmpValue, self._GuidDict) + Macros[PcdName.strip()] = TmpValue return Macros def _PostProcess(self): @@ -1207,6 +1270,7 @@ class DscParser(MetaFileParser): MODEL_META_DATA_CONDITIONAL_STATEMENT_ENDIF : self.__ProcessDirective, MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSEIF : self.__ProcessDirective, MODEL_EFI_SKU_ID : self.__ProcessSkuId, + MODEL_EFI_DEFAULT_STORES : self.__ProcessDefaultStores, MODEL_EFI_LIBRARY_INSTANCE : self.__ProcessLibraryInstance, MODEL_EFI_LIBRARY_CLASS : self.__ProcessLibraryClass, MODEL_PCD_FIXED_AT_BUILD : self.__ProcessPcd, @@ -1231,7 +1295,7 @@ class DscParser(MetaFileParser): self._DirectiveEvalStack = [] self._FileWithError = self.MetaFile self._FileLocalMacros = {} - self._SectionsMacroDict = {} + self._SectionsMacroDict.clear() GlobalData.gPlatformDefines = {} # Get all macro and PCD which has straitforward value @@ -1240,7 +1304,7 @@ class DscParser(MetaFileParser): self._ContentIndex = 0 self._InSubsection = False while self._ContentIndex < len(self._Content) : - Id, self._ItemType, V1, V2, V3, S1, S2, Owner, self._From, \ + Id, self._ItemType, V1, V2, V3, S1, S2, S3,Owner, self._From, \ LineStart, ColStart, LineEnd, ColEnd, Enabled = self._Content[self._ContentIndex] if self._From < 0: @@ -1248,7 +1312,7 @@ class DscParser(MetaFileParser): self._ContentIndex += 1 - self._Scope = [[S1, S2]] + self._Scope = [[S1, S2, S3]] # # For !include directive, handle it specially, # merge arch and module type in case of duplicate items @@ -1257,9 +1321,9 @@ class DscParser(MetaFileParser): if self._ContentIndex >= len(self._Content): break Record = self._Content[self._ContentIndex] - if LineStart == Record[9] and LineEnd == Record[11]: - if [Record[5], Record[6]] not in self._Scope: - self._Scope.append([Record[5], Record[6]]) + if LineStart == Record[10] and LineEnd == Record[12]: + if [Record[5], Record[6],Record[7]] not in self._Scope: + self._Scope.append([Record[5], Record[6],Record[7]]) self._ContentIndex += 1 else: break @@ -1300,7 +1364,7 @@ class DscParser(MetaFileParser): File=self._FileWithError, ExtraData=' '.join(self._ValueList), Line=self._LineIndex + 1) - if self._ValueList == None: + if self._ValueList is None: continue NewOwner = self._IdMapping.get(Owner, -1) @@ -1312,6 +1376,7 @@ class DscParser(MetaFileParser): self._ValueList[2], S1, S2, + S3, NewOwner, self._From, self._LineIndex + 1, @@ -1347,7 +1412,7 @@ class DscParser(MetaFileParser): MODEL_PCD_DYNAMIC_VPD, MODEL_PCD_DYNAMIC_EX_DEFAULT, MODEL_PCD_DYNAMIC_EX_HII, MODEL_PCD_DYNAMIC_EX_VPD): Records = self._RawTable.Query(PcdType, BelongsToItem= -1.0) - for TokenSpaceGuid, PcdName, Value, Dummy2, Dummy3, ID, Line in Records: + for TokenSpaceGuid, PcdName, Value, Dummy2, Dummy3, Dummy4,ID, Line in Records: Name = TokenSpaceGuid + '.' + PcdName if Name not in GlobalData.gPlatformOtherPcds: PcdLine = Line @@ -1442,12 +1507,12 @@ class DscParser(MetaFileParser): # Allow using system environment variables in path after !include # __IncludeMacros['WORKSPACE'] = GlobalData.gGlobalDefines['WORKSPACE'] - if "ECP_SOURCE" in GlobalData.gGlobalDefines.keys(): + if "ECP_SOURCE" in GlobalData.gGlobalDefines: __IncludeMacros['ECP_SOURCE'] = GlobalData.gGlobalDefines['ECP_SOURCE'] # # During GenFds phase call DSC parser, will go into this branch. # - elif "ECP_SOURCE" in GlobalData.gCommandLineDefines.keys(): + elif "ECP_SOURCE" in GlobalData.gCommandLineDefines: __IncludeMacros['ECP_SOURCE'] = GlobalData.gCommandLineDefines['ECP_SOURCE'] __IncludeMacros['EFI_SOURCE'] = GlobalData.gGlobalDefines['EFI_SOURCE'] @@ -1476,19 +1541,39 @@ class DscParser(MetaFileParser): self._FileWithError = IncludedFile1 IncludedFileTable = MetaFileStorage(self._Table.Cur, IncludedFile1, MODEL_FILE_DSC, False) - Owner = self._Content[self._ContentIndex - 1][0] - Parser = DscParser(IncludedFile1, self._FileType, IncludedFileTable, - Owner=Owner, From=Owner) + FromItem = self._Content[self._ContentIndex - 1][0] + if self._InSubsection: + Owner = self._Content[self._ContentIndex - 1][8] + else: + Owner = self._Content[self._ContentIndex - 1][0] + Parser = DscParser(IncludedFile1, self._FileType, self._Arch, IncludedFileTable, + Owner=Owner, From=FromItem) + + self.IncludedFiles.add (IncludedFile1) + + # todo: rework the nested include checking logic + # Current nested include checking rely on dsc file order inside build.db. + # It is not reliable and will lead to build fail in some case. + # + # When project A and B include a common dsc file C. + # Build project A. It give dsc file A = ID 1 in build.db, and C ID = 2. + # Build project B. It give dsc file B ID = 3, and C ID still = 2. + # Then, we build project B fail, unless we clean build.db. + # In oldder BaseTools, the project B ID will still be 1, + # that's why it work before. # Does not allow lower level included file to include upper level included file - if Parser._From != Owner and int(Owner) > int (Parser._From): - EdkLogger.error('parser', FILE_ALREADY_EXIST, File=self._FileWithError, - Line=self._LineIndex + 1, ExtraData="{0} is already included at a higher level.".format(IncludedFile1)) + #if Parser._From != Owner and int(Owner) > int (Parser._From): + # EdkLogger.error('parser', FILE_ALREADY_EXIST, File=self._FileWithError, + # Line=self._LineIndex + 1, ExtraData="{0} is already included at a higher level.".format(IncludedFile1)) # set the parser status with current status Parser._SectionName = self._SectionName - Parser._SectionType = self._SectionType + if self._InSubsection: + Parser._SectionType = self._SubsectionType + else: + Parser._SectionType = self._SectionType Parser._Scope = self._Scope Parser._Enabled = self._Enabled # Parse the included file @@ -1496,7 +1581,11 @@ class DscParser(MetaFileParser): # update current status with sub-parser's status self._SectionName = Parser._SectionName - self._SectionType = Parser._SectionType + if not self._InSubsection: + self._SectionType = Parser._SectionType + self._SubsectionType = Parser._SubsectionType + self._InSubsection = Parser._InSubsection + self._Scope = Parser._Scope self._Enabled = Parser._Enabled @@ -1511,6 +1600,9 @@ class DscParser(MetaFileParser): def __ProcessSkuId(self): self._ValueList = [ReplaceMacro(Value, self._Macros, RaiseError=True) for Value in self._ValueList] + def __ProcessDefaultStores(self): + self._ValueList = [ReplaceMacro(Value, self._Macros, RaiseError=True) + for Value in self._ValueList] def __ProcessLibraryInstance(self): self._ValueList = [ReplaceMacro(Value, self._Macros) for Value in self._ValueList] @@ -1528,11 +1620,13 @@ class DscParser(MetaFileParser): EdkLogger.error('build', FORMAT_INVALID, "Pcd format incorrect.", File=self._FileWithError, Line=self._LineIndex + 1, ExtraData="%s.%s|%s" % (self._ValueList[0], self._ValueList[1], self._ValueList[2])) PcdValue = ValList[Index] - if PcdValue: + if PcdValue and "." not in self._ValueList[0]: try: ValList[Index] = ValueExpression(PcdValue, self._Macros)(True) except WrnExpression, Value: ValList[Index] = Value.result + except: + pass if ValList[Index] == 'True': ValList[Index] = '1' @@ -1542,7 +1636,10 @@ class DscParser(MetaFileParser): if (not self._DirectiveEvalStack) or (False not in self._DirectiveEvalStack): GlobalData.gPlatformPcds[TAB_SPLIT.join(self._ValueList[0:2])] = PcdValue self._Symbols[TAB_SPLIT.join(self._ValueList[0:2])] = PcdValue - self._ValueList[2] = '|'.join(ValList) + try: + self._ValueList[2] = '|'.join(ValList) + except Exception: + print ValList def __ProcessComponent(self): self._ValueList[0] = ReplaceMacro(self._ValueList[0], self._Macros) @@ -1557,6 +1654,7 @@ class DscParser(MetaFileParser): _SectionParser = { MODEL_META_DATA_HEADER : _DefineParser, MODEL_EFI_SKU_ID : _SkuIdParser, + MODEL_EFI_DEFAULT_STORES : _DefaultStoresParser, MODEL_EFI_LIBRARY_INSTANCE : _LibraryInstanceParser, MODEL_EFI_LIBRARY_CLASS : _LibraryClassParser, MODEL_PCD_FIXED_AT_BUILD : _PcdParser, @@ -1610,17 +1708,22 @@ class DecParser(MetaFileParser): # # @param FilePath The path of platform description file # @param FileType The raw data of DSC file + # @param Arch Default Arch value for filtering sections # @param Table Database used to retrieve module/package information - # @param Macros Macros used for replacement in file # - def __init__(self, FilePath, FileType, Table): + def __init__(self, FilePath, FileType, Arch, Table): # prevent re-initialization if hasattr(self, "_Table"): return - MetaFileParser.__init__(self, FilePath, FileType, Table, -1) + MetaFileParser.__init__(self, FilePath, FileType, Arch, Table, -1) self._Comments = [] self._Version = 0x00010005 # Only EDK2 dec file is supported self._AllPCDs = [] # Only for check duplicate PCD + self._AllPcdDict = {} + + self._CurrentStructurePcdName = "" + self._include_flag = False + self._package_flag = False ## Parser starter def Start(self): @@ -1630,6 +1733,7 @@ class DecParser(MetaFileParser): except: EdkLogger.error("Parser", FILE_READ_FAILURE, ExtraData=self.MetaFile) + self._DefinesCount = 0 for Index in range(0, len(Content)): Line, Comment = CleanString2(Content[Index]) self._CurrentLine = Line @@ -1645,8 +1749,15 @@ class DecParser(MetaFileParser): # section header if Line[0] == TAB_SECTION_START and Line[-1] == TAB_SECTION_END: self._SectionHeaderParser() + if self._SectionName == TAB_DEC_DEFINES.upper(): + self._DefinesCount += 1 self._Comments = [] continue + if self._SectionType == MODEL_UNKNOWN: + EdkLogger.error("Parser", FORMAT_INVALID, + "" + "Not able to determine \"%s\" in which section."%self._CurrentLine, + self.MetaFile, self._LineIndex + 1) elif len(self._SectionType) == 0: self._Comments = [] continue @@ -1654,7 +1765,7 @@ class DecParser(MetaFileParser): # section content self._ValueList = ['', '', ''] self._SectionParser[self._SectionType[0]](self) - if self._ValueList == None or self._ItemType == MODEL_META_DATA_DEFINE: + if self._ValueList is None or self._ItemType == MODEL_META_DATA_DEFINE: self._ItemType = -1 self._Comments = [] continue @@ -1694,6 +1805,10 @@ class DecParser(MetaFileParser): 0 ) self._Comments = [] + if self._DefinesCount > 1: + EdkLogger.error('Parser', FORMAT_INVALID, 'Multiple [Defines] section is exist.', self.MetaFile ) + if self._DefinesCount == 0: + EdkLogger.error('Parser', FORMAT_INVALID, 'No [Defines] section exist.',self.MetaFile) self._Done() @@ -1708,7 +1823,8 @@ class DecParser(MetaFileParser): self._SectionName = '' self._SectionType = [] ArchList = set() - Line = self._CurrentLine.replace("%s%s" % (TAB_COMMA_SPLIT, TAB_SPACE_SPLIT), TAB_COMMA_SPLIT) + PrivateList = set() + Line = re.sub(',[\s]*', TAB_COMMA_SPLIT, self._CurrentLine) for Item in Line[1:-1].split(TAB_COMMA_SPLIT): if Item == '': EdkLogger.error("Parser", FORMAT_UNKNOWN_ERROR, @@ -1718,6 +1834,9 @@ class DecParser(MetaFileParser): # different types of PCD are permissible in one section self._SectionName = ItemList[0].upper() + if self._SectionName == TAB_DEC_DEFINES.upper() and (len(ItemList) > 1 or len(Line.split(TAB_COMMA_SPLIT)) > 1): + EdkLogger.error("Parser", FORMAT_INVALID, "Defines section format is invalid", + self.MetaFile, self._LineIndex + 1, self._CurrentLine) if self._SectionName in self.DataType: if self.DataType[self._SectionName] not in self._SectionType: self._SectionType.append(self.DataType[self._SectionName]) @@ -1738,21 +1857,32 @@ class DecParser(MetaFileParser): if len(ItemList) > 1: S1 = ItemList[1].upper() else: - S1 = 'COMMON' + S1 = TAB_ARCH_COMMON ArchList.add(S1) # S2 may be Platform or ModuleType if len(ItemList) > 2: S2 = ItemList[2].upper() + # only Includes, GUIDs, PPIs, Protocols section have Private tag + if self._SectionName in [TAB_INCLUDES.upper(), TAB_GUIDS.upper(), TAB_PROTOCOLS.upper(), TAB_PPIS.upper()]: + if S2 != 'PRIVATE': + EdkLogger.error("Parser", FORMAT_INVALID, 'Please use keyword "Private" as section tag modifier.', + File=self.MetaFile, Line=self._LineIndex + 1, ExtraData=self._CurrentLine) else: - S2 = 'COMMON' + S2 = TAB_COMMON + PrivateList.add(S2) if [S1, S2, self.DataType[self._SectionName]] not in self._Scope: self._Scope.append([S1, S2, self.DataType[self._SectionName]]) # 'COMMON' must not be used with specific ARCHs at the same section - if 'COMMON' in ArchList and len(ArchList) > 1: + if TAB_ARCH_COMMON in ArchList and len(ArchList) > 1: EdkLogger.error('Parser', FORMAT_INVALID, "'common' ARCH must not be used with specific ARCHs", File=self.MetaFile, Line=self._LineIndex + 1, ExtraData=self._CurrentLine) + # It is not permissible to mix section tags without the Private attribute with section tags with the Private attribute + if TAB_COMMON in PrivateList and len(PrivateList) > 1: + EdkLogger.error('Parser', FORMAT_INVALID, "Can't mix section tags without the Private attribute with section tags with the Private attribute", + File=self.MetaFile, Line=self._LineIndex + 1, ExtraData=self._CurrentLine) + ## [guids], [ppis] and [protocols] section parser @ParseMacro def _GuidParser(self): @@ -1776,6 +1906,8 @@ class DecParser(MetaFileParser): File=self.MetaFile, Line=self._LineIndex + 1) self._ValueList[0] = TokenList[0] self._ValueList[1] = TokenList[1] + if self._ValueList[0] not in self._GuidDict: + self._GuidDict[self._ValueList[0]] = self._ValueList[1] ## PCD sections parser # @@ -1787,91 +1919,143 @@ class DecParser(MetaFileParser): # @ParseMacro def _PcdParser(self): - TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT, 1) - self._ValueList[0:1] = GetSplitValueList(TokenList[0], TAB_SPLIT) - ValueRe = re.compile(r'^[a-zA-Z_][a-zA-Z0-9_]*') - # check PCD information - if self._ValueList[0] == '' or self._ValueList[1] == '': - EdkLogger.error('Parser', FORMAT_INVALID, "No token space GUID or PCD name specified", - ExtraData=self._CurrentLine + \ - " (.|||)", - File=self.MetaFile, Line=self._LineIndex + 1) - # check format of token space GUID CName - if not ValueRe.match(self._ValueList[0]): - EdkLogger.error('Parser', FORMAT_INVALID, "The format of the token space GUID CName is invalid. The correct format is '(a-zA-Z_)[a-zA-Z0-9_]*'", - ExtraData=self._CurrentLine + \ - " (.|||)", - File=self.MetaFile, Line=self._LineIndex + 1) - # check format of PCD CName - if not ValueRe.match(self._ValueList[1]): - EdkLogger.error('Parser', FORMAT_INVALID, "The format of the PCD CName is invalid. The correct format is '(a-zA-Z_)[a-zA-Z0-9_]*'", - ExtraData=self._CurrentLine + \ - " (.|||)", - File=self.MetaFile, Line=self._LineIndex + 1) - # check PCD datum information - if len(TokenList) < 2 or TokenList[1] == '': - EdkLogger.error('Parser', FORMAT_INVALID, "No PCD Datum information given", - ExtraData=self._CurrentLine + \ - " (.|||)", - File=self.MetaFile, Line=self._LineIndex + 1) - - - ValueRe = re.compile(r'^\s*L?\".*\|.*\"') - PtrValue = ValueRe.findall(TokenList[1]) - - # Has VOID* type string, may contain "|" character in the string. - if len(PtrValue) != 0: - ptrValueList = re.sub(ValueRe, '', TokenList[1]) - ValueList = GetSplitValueList(ptrValueList) - ValueList[0] = PtrValue[0] - else: - ValueList = GetSplitValueList(TokenList[1]) - - - # check if there's enough datum information given - if len(ValueList) != 3: - EdkLogger.error('Parser', FORMAT_INVALID, "Invalid PCD Datum information given", - ExtraData=self._CurrentLine + \ - " (.|||)", - File=self.MetaFile, Line=self._LineIndex + 1) - # check default value - if ValueList[0] == '': - EdkLogger.error('Parser', FORMAT_INVALID, "Missing DefaultValue in PCD Datum information", - ExtraData=self._CurrentLine + \ - " (.|||)", - File=self.MetaFile, Line=self._LineIndex + 1) - # check datum type - if ValueList[1] == '': - EdkLogger.error('Parser', FORMAT_INVALID, "Missing DatumType in PCD Datum information", - ExtraData=self._CurrentLine + \ - " (.|||)", - File=self.MetaFile, Line=self._LineIndex + 1) - # check token of the PCD - if ValueList[2] == '': - EdkLogger.error('Parser', FORMAT_INVALID, "Missing Token in PCD Datum information", - ExtraData=self._CurrentLine + \ - " (.|||)", - File=self.MetaFile, Line=self._LineIndex + 1) - # check format of default value against the datum type - IsValid, Cause = CheckPcdDatum(ValueList[1], ValueList[0]) - if not IsValid: - EdkLogger.error('Parser', FORMAT_INVALID, Cause, ExtraData=self._CurrentLine, - File=self.MetaFile, Line=self._LineIndex + 1) - - if ValueList[0] in ['True', 'true', 'TRUE']: - ValueList[0] = '1' - elif ValueList[0] in ['False', 'false', 'FALSE']: - ValueList[0] = '0' - - # check for duplicate PCD definition - if (self._Scope[0], self._ValueList[0], self._ValueList[1]) in self._AllPCDs: - EdkLogger.error('Parser', FORMAT_INVALID, - "The same PCD name and GUID have been already defined", - ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1) - else: - self._AllPCDs.append((self._Scope[0], self._ValueList[0], self._ValueList[1])) + if self._CurrentStructurePcdName: + self._ValueList[0] = self._CurrentStructurePcdName + + if "|" not in self._CurrentLine: + if "" == self._CurrentLine: + self._include_flag = True + self._package_flag = False + self._ValueList = None + return + if "" == self._CurrentLine: + self._package_flag = True + self._ValueList = None + self._include_flag = False + return + + if self._include_flag: + self._ValueList[1] = "_" + md5.new(self._CurrentLine).hexdigest() + self._ValueList[2] = self._CurrentLine + if self._package_flag and "}" != self._CurrentLine: + self._ValueList[1] = "_" + md5.new(self._CurrentLine).hexdigest() + self._ValueList[2] = self._CurrentLine + if self._CurrentLine == "}": + self._package_flag = False + self._include_flag = False + self._ValueList = None + return + else: + PcdTockens = self._CurrentLine.split(TAB_VALUE_SPLIT) + PcdNames = PcdTockens[0].split(TAB_SPLIT) + if len(PcdNames) == 2: + self._CurrentStructurePcdName = "" + else: + if self._CurrentStructurePcdName != TAB_SPLIT.join(PcdNames[:2]): + EdkLogger.error('Parser', FORMAT_INVALID, "Pcd Name does not match: %s and %s " % (self._CurrentStructurePcdName , TAB_SPLIT.join(PcdNames[:2])), + File=self.MetaFile, Line=self._LineIndex + 1) + self._ValueList[1] = TAB_SPLIT.join(PcdNames[2:]) + self._ValueList[2] = PcdTockens[1] + if not self._CurrentStructurePcdName: + TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT, 1) + self._ValueList[0:1] = GetSplitValueList(TokenList[0], TAB_SPLIT) + ValueRe = re.compile(r'^[a-zA-Z_][a-zA-Z0-9_]*') + # check PCD information + if self._ValueList[0] == '' or self._ValueList[1] == '': + EdkLogger.error('Parser', FORMAT_INVALID, "No token space GUID or PCD name specified", + ExtraData=self._CurrentLine + \ + " (.|||)", + File=self.MetaFile, Line=self._LineIndex + 1) + # check format of token space GUID CName + if not ValueRe.match(self._ValueList[0]): + EdkLogger.error('Parser', FORMAT_INVALID, "The format of the token space GUID CName is invalid. The correct format is '(a-zA-Z_)[a-zA-Z0-9_]*'", + ExtraData=self._CurrentLine + \ + " (.|||)", + File=self.MetaFile, Line=self._LineIndex + 1) + # check format of PCD CName + if not ValueRe.match(self._ValueList[1]): + EdkLogger.error('Parser', FORMAT_INVALID, "The format of the PCD CName is invalid. The correct format is '(a-zA-Z_)[a-zA-Z0-9_]*'", + ExtraData=self._CurrentLine + \ + " (.|||)", + File=self.MetaFile, Line=self._LineIndex + 1) + # check PCD datum information + if len(TokenList) < 2 or TokenList[1] == '': + EdkLogger.error('Parser', FORMAT_INVALID, "No PCD Datum information given", + ExtraData=self._CurrentLine + \ + " (.|||)", + File=self.MetaFile, Line=self._LineIndex + 1) + + + ValueRe = re.compile(r'^\s*L?\".*\|.*\"') + PtrValue = ValueRe.findall(TokenList[1]) + + # Has VOID* type string, may contain "|" character in the string. + if len(PtrValue) != 0: + ptrValueList = re.sub(ValueRe, '', TokenList[1]) + ValueList = AnalyzePcdExpression(ptrValueList) + ValueList[0] = PtrValue[0] + else: + ValueList = AnalyzePcdExpression(TokenList[1]) + + + # check if there's enough datum information given + if len(ValueList) != 3: + EdkLogger.error('Parser', FORMAT_INVALID, "Invalid PCD Datum information given", + ExtraData=self._CurrentLine + \ + " (.|||)", + File=self.MetaFile, Line=self._LineIndex + 1) + # check default value + if ValueList[0] == '': + EdkLogger.error('Parser', FORMAT_INVALID, "Missing DefaultValue in PCD Datum information", + ExtraData=self._CurrentLine + \ + " (.|||)", + File=self.MetaFile, Line=self._LineIndex + 1) + # check datum type + if ValueList[1] == '': + EdkLogger.error('Parser', FORMAT_INVALID, "Missing DatumType in PCD Datum information", + ExtraData=self._CurrentLine + \ + " (.|||)", + File=self.MetaFile, Line=self._LineIndex + 1) + # check token of the PCD + if ValueList[2] == '': + EdkLogger.error('Parser', FORMAT_INVALID, "Missing Token in PCD Datum information", + ExtraData=self._CurrentLine + \ + " (.|||)", + File=self.MetaFile, Line=self._LineIndex + 1) + + PcdValue = ValueList[0] + if PcdValue: + try: + self._GuidDict.update(self._AllPcdDict) + ValueList[0] = ValueExpressionEx(ValueList[0], ValueList[1], self._GuidDict)(True) + except BadExpression, Value: + EdkLogger.error('Parser', FORMAT_INVALID, Value, ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1) + # check format of default value against the datum type + IsValid, Cause = CheckPcdDatum(ValueList[1], ValueList[0]) + if not IsValid: + EdkLogger.error('Parser', FORMAT_INVALID, Cause, ExtraData=self._CurrentLine, + File=self.MetaFile, Line=self._LineIndex + 1) + + if Cause == "StructurePcd": + self._CurrentStructurePcdName = TAB_SPLIT.join(self._ValueList[0:2]) + self._ValueList[0] = self._CurrentStructurePcdName + self._ValueList[1] = ValueList[1].strip() + + if ValueList[0] in ['True', 'true', 'TRUE']: + ValueList[0] = '1' + elif ValueList[0] in ['False', 'false', 'FALSE']: + ValueList[0] = '0' + + # check for duplicate PCD definition + if (self._Scope[0], self._ValueList[0], self._ValueList[1]) in self._AllPCDs: + EdkLogger.error('Parser', FORMAT_INVALID, + "The same PCD name and GUID have been already defined", + ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1) + else: + self._AllPCDs.append((self._Scope[0], self._ValueList[0], self._ValueList[1])) + self._AllPcdDict[TAB_SPLIT.join(self._ValueList[0:2])] = ValueList[0] - self._ValueList[2] = ValueList[0].strip() + '|' + ValueList[1].strip() + '|' + ValueList[2].strip() + self._ValueList[2] = ValueList[0].strip() + '|' + ValueList[1].strip() + '|' + ValueList[2].strip() _SectionParser = { MODEL_META_DATA_HEADER : MetaFileParser._DefineParser,