X-Git-Url: https://git.proxmox.com/?p=mirror_edk2.git;a=blobdiff_plain;f=BaseTools%2FSource%2FPython%2FWorkspace%2FMetaFileParser.py;h=fbfc182c8bfff2f142da3a7c7aec80b157fcfdf1;hp=303844785043405bc4489806eef628a926a370e8;hb=1100bc5aa05097306cdecc4d0118cc312da79d45;hpb=a09395932d997d41f59ae3ee2f7f77f91f5caa02 diff --git a/BaseTools/Source/Python/Workspace/MetaFileParser.py b/BaseTools/Source/Python/Workspace/MetaFileParser.py index 3038447850..fbfc182c8b 100644 --- a/BaseTools/Source/Python/Workspace/MetaFileParser.py +++ b/BaseTools/Source/Python/Workspace/MetaFileParser.py @@ -1,8 +1,8 @@ ## @file # This file is used to parse meta files # -# Copyright (c) 2008 - 2017, Intel Corporation. All rights reserved.
-# (C) Copyright 2015-2016 Hewlett Packard Enterprise Development LP
+# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.
+# (C) Copyright 2015-2018 Hewlett Packard Enterprise Development LP
# This program and the accompanying materials # are licensed and made available under the terms and conditions of the BSD License # which accompanies this distribution. The full text of the license may be found at @@ -15,6 +15,8 @@ ## # Import Modules # +from __future__ import print_function +from __future__ import absolute_import import Common.LongFilePathOs as os import re import time @@ -26,14 +28,18 @@ import Common.GlobalData as GlobalData from CommonDataClass.DataClass import * from Common.DataType import * -from Common.String import * -from Common.Misc import GuidStructureStringToGuidString, CheckPcdDatum, PathClass, AnalyzePcdData, AnalyzeDscPcd, AnalyzePcdExpression +from Common.StringUtils import * +from Common.Misc import GuidStructureStringToGuidString, CheckPcdDatum, PathClass, AnalyzePcdData, AnalyzeDscPcd, AnalyzePcdExpression, ParseFieldValue from Common.Expression import * from CommonDataClass.Exceptions import * from Common.LongFilePathSupport import OpenLongFilePath as open +from collections import defaultdict +from .MetaFileTable import MetaFileStorage +from .MetaFileCommentParser import CheckInfComment -from MetaFileTable import MetaFileStorage -from MetaFileCommentParser import CheckInfComment +## RegEx for finding file versions +hexVersionPattern = re.compile(r'0[xX][\da-f-A-F]{5,8}') +decVersionPattern = re.compile(r'\d+\.\d+') ## A decorator used to parse macro definition def ParseMacro(Parser): @@ -73,10 +79,10 @@ def ParseMacro(Parser): # # First judge whether this DEFINE is in conditional directive statements or not. # - if type(self) == DscParser and self._InDirective > -1: + if isinstance(self, DscParser) and self._InDirective > -1: pass else: - if type(self) == DecParser: + if isinstance(self, DecParser): if MODEL_META_DATA_HEADER in self._SectionType: self._FileLocalMacros[Name] = Value else: @@ -87,7 +93,7 @@ def ParseMacro(Parser): self._ConstructSectionMacroDict(Name, Value) # EDK_GLOBAL defined macros - elif type(self) != DscParser: + elif not isinstance(self, DscParser): EdkLogger.error('Parser', FORMAT_INVALID, "EDK_GLOBAL can only be used in .dsc file", ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1) elif self._SectionType != MODEL_META_DATA_HEADER: @@ -159,7 +165,7 @@ class MetaFileParser(object): self._FileDir = self.MetaFile.Dir self._Defines = {} self._FileLocalMacros = {} - self._SectionsMacroDict = {} + self._SectionsMacroDict = defaultdict(dict) # for recursive parsing self._Owner = [Owner] @@ -182,6 +188,7 @@ class MetaFileParser(object): self._PostProcessed = False # Different version of meta-file has different way to parse. self._Version = 0 + self._GuidDict = {} # for Parser PCD value {GUID(gTokeSpaceGuidName)} ## Store the parsed data in table def _Store(self, *Args): @@ -218,7 +225,7 @@ class MetaFileParser(object): NewRecordList = [] for Record in RecordList: Arch = Record[3] - if Arch == 'COMMON' or Arch == FilterArch: + if Arch == TAB_ARCH_COMMON or Arch == FilterArch: NewRecordList.append(Record) return NewRecordList @@ -227,7 +234,7 @@ class MetaFileParser(object): # DataInfo = [data_type, scope1(arch), scope2(platform/moduletype)] # def __getitem__(self, DataInfo): - if type(DataInfo) != type(()): + if not isinstance(DataInfo, type(())): DataInfo = (DataInfo,) # Parse the file first, if necessary @@ -240,7 +247,7 @@ class MetaFileParser(object): self.Start() # No specific ARCH or Platform given, use raw data - if self._RawTable and (len(DataInfo) == 1 or DataInfo[1] == None): + if self._RawTable and (len(DataInfo) == 1 or DataInfo[1] is None): return self._FilterRecordList(self._RawTable.Query(*DataInfo), self._Arch) # Do post-process if necessary @@ -269,7 +276,7 @@ class MetaFileParser(object): TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT) self._ValueList[0:len(TokenList)] = TokenList # Don't do macro replacement for dsc file at this point - if type(self) != DscParser: + if not isinstance(self, DscParser): Macros = self._Macros self._ValueList = [ReplaceMacro(Value, Macros) for Value in self._ValueList] @@ -296,7 +303,7 @@ class MetaFileParser(object): for Item in GetSplitValueList(self._CurrentLine[1:-1], TAB_COMMA_SPLIT): if Item == '': continue - ItemList = GetSplitValueList(Item, TAB_SPLIT,3) + ItemList = GetSplitValueList(Item, TAB_SPLIT, 3) # different section should not mix in one section if self._SectionName != '' and self._SectionName != ItemList[0].upper(): EdkLogger.error('Parser', FORMAT_INVALID, "Different section names in the same section", @@ -305,7 +312,7 @@ class MetaFileParser(object): if self._SectionName in self.DataType: self._SectionType = self.DataType[self._SectionName] # Check if the section name is valid - if self._SectionName not in SECTIONS_HAVE_ITEM_AFTER_ARCH and len(ItemList) > 3: + if self._SectionName not in SECTIONS_HAVE_ITEM_AFTER_ARCH_SET and len(ItemList) > 3: EdkLogger.error("Parser", FORMAT_UNKNOWN_ERROR, "%s is not a valid section name" % Item, self.MetaFile, self._LineIndex + 1, self._CurrentLine) elif self._Version >= 0x00010005: @@ -318,25 +325,25 @@ class MetaFileParser(object): if len(ItemList) > 1: S1 = ItemList[1].upper() else: - S1 = 'COMMON' + S1 = TAB_ARCH_COMMON ArchList.add(S1) # S2 may be Platform or ModuleType if len(ItemList) > 2: - if self._SectionName.upper() in SECTIONS_HAVE_ITEM_PCD: + if self._SectionName.upper() in SECTIONS_HAVE_ITEM_PCD_SET: S2 = ItemList[2] else: S2 = ItemList[2].upper() else: - S2 = 'COMMON' + S2 = TAB_COMMON if len(ItemList) > 3: S3 = ItemList[3] else: - S3 = "COMMON" + S3 = TAB_COMMON self._Scope.append([S1, S2, S3]) # 'COMMON' must not be used with specific ARCHs at the same section - if 'COMMON' in ArchList and len(ArchList) > 1: + if TAB_ARCH_COMMON in ArchList and len(ArchList) > 1: EdkLogger.error('Parser', FORMAT_INVALID, "'common' ARCH must not be used with specific ARCHs", File=self.MetaFile, Line=self._LineIndex + 1, ExtraData=self._CurrentLine) # If the section information is needed later, it should be stored in database @@ -363,11 +370,11 @@ class MetaFileParser(object): EdkLogger.error("Parser", FORMAT_INVALID, "Global macro %s is not permitted." % (Macro), ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1) else: EdkLogger.error("Parser", FORMAT_INVALID, "%s not defined" % (Macro), ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1) - # Sometimes, we need to make differences between EDK and EDK2 modules + # Sometimes, we need to make differences between EDK and EDK2 modules if Name == 'INF_VERSION': - if re.match(r'0[xX][\da-f-A-F]{5,8}', Value): - self._Version = int(Value, 0) - elif re.match(r'\d+\.\d+', Value): + if hexVersionPattern.match(Value): + self._Version = int(Value, 0) + elif decVersionPattern.match(Value): ValueList = Value.split('.') Major = '%04o' % int(ValueList[0], 0) Minor = '%04o' % int(ValueList[1], 0) @@ -376,7 +383,7 @@ class MetaFileParser(object): EdkLogger.error('Parser', FORMAT_INVALID, "Invalid version number", ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1) - if type(self) == InfParser and self._Version < 0x00010005: + if isinstance(self, InfParser) and self._Version < 0x00010005: # EDK module allows using defines as macros self._FileLocalMacros[Name] = Value self._Defines[Name] = Value @@ -392,7 +399,7 @@ class MetaFileParser(object): self._ValueList[1] = TokenList2[1] # keys else: self._ValueList[1] = TokenList[0] - if len(TokenList) == 2 and type(self) != DscParser: # value + if len(TokenList) == 2 and not isinstance(self, DscParser): # value self._ValueList[2] = ReplaceMacro(TokenList[1], self._Macros) if self._ValueList[1].count('_') != 4: @@ -412,23 +419,22 @@ class MetaFileParser(object): Macros.update(self._GetApplicableSectionMacro()) return Macros - ## Construct section Macro dict + ## Construct section Macro dict def _ConstructSectionMacroDict(self, Name, Value): - ScopeKey = [(Scope[0], Scope[1],Scope[2]) for Scope in self._Scope] + ScopeKey = [(Scope[0], Scope[1], Scope[2]) for Scope in self._Scope] ScopeKey = tuple(ScopeKey) - SectionDictKey = self._SectionType, ScopeKey # # DecParser SectionType is a list, will contain more than one item only in Pcd Section # As Pcd section macro usage is not alllowed, so here it is safe # - if type(self) == DecParser: + if isinstance(self, DecParser): SectionDictKey = self._SectionType[0], ScopeKey - if SectionDictKey not in self._SectionsMacroDict: - self._SectionsMacroDict[SectionDictKey] = {} - SectionLocalMacros = self._SectionsMacroDict[SectionDictKey] - SectionLocalMacros[Name] = Value + else: + SectionDictKey = self._SectionType, ScopeKey + + self._SectionsMacroDict[SectionDictKey][Name] = Value - ## Get section Macros that are applicable to current line, which may come from other sections + ## Get section Macros that are applicable to current line, which may come from other sections ## that share the same name while scope is wider def _GetApplicableSectionMacro(self): Macros = {} @@ -438,7 +444,7 @@ class MetaFileParser(object): SpeSpeMacroDict = {} ActiveSectionType = self._SectionType - if type(self) == DecParser: + if isinstance(self, DecParser): ActiveSectionType = self._SectionType[0] for (SectionType, Scope) in self._SectionsMacroDict: @@ -446,20 +452,20 @@ class MetaFileParser(object): continue for ActiveScope in self._Scope: - Scope0, Scope1 ,Scope2= ActiveScope[0], ActiveScope[1],ActiveScope[2] - if(Scope0, Scope1,Scope2) not in Scope: + Scope0, Scope1, Scope2= ActiveScope[0], ActiveScope[1], ActiveScope[2] + if(Scope0, Scope1, Scope2) not in Scope: break else: SpeSpeMacroDict.update(self._SectionsMacroDict[(SectionType, Scope)]) for ActiveScope in self._Scope: - Scope0, Scope1,Scope2 = ActiveScope[0], ActiveScope[1],ActiveScope[2] - if(Scope0, Scope1,Scope2) not in Scope and (Scope0, "COMMON","COMMON") not in Scope and ("COMMON", Scope1,"COMMON") not in Scope: + Scope0, Scope1, Scope2 = ActiveScope[0], ActiveScope[1], ActiveScope[2] + if(Scope0, Scope1, Scope2) not in Scope and (Scope0, TAB_COMMON, TAB_COMMON) not in Scope and (TAB_COMMON, Scope1, TAB_COMMON) not in Scope: break else: ComSpeMacroDict.update(self._SectionsMacroDict[(SectionType, Scope)]) - if ("COMMON", "COMMON","COMMON") in Scope: + if (TAB_COMMON, TAB_COMMON, TAB_COMMON) in Scope: ComComMacroDict.update(self._SectionsMacroDict[(SectionType, Scope)]) Macros.update(ComComMacroDict) @@ -567,8 +573,8 @@ class InfParser(MetaFileParser): if Line[0] == TAB_SECTION_START and Line[-1] == TAB_SECTION_END: if not GetHeaderComment: for Cmt, LNo in Comments: - self._Store(MODEL_META_DATA_HEADER_COMMENT, Cmt, '', '', 'COMMON', - 'COMMON', self._Owner[-1], LNo, -1, LNo, -1, 0) + self._Store(MODEL_META_DATA_HEADER_COMMENT, Cmt, '', '', TAB_COMMON, + TAB_COMMON, self._Owner[-1], LNo, -1, LNo, -1, 0) GetHeaderComment = True else: TailComments.extend(SectionComments + Comments) @@ -619,7 +625,7 @@ class InfParser(MetaFileParser): self._ValueList = ['', '', ''] # parse current line, result will be put in self._ValueList self._SectionParser[self._SectionType](self) - if self._ValueList == None or self._ItemType == MODEL_META_DATA_DEFINE: + if self._ValueList is None or self._ItemType == MODEL_META_DATA_DEFINE: self._ItemType = -1 Comments = [] continue @@ -631,7 +637,7 @@ class InfParser(MetaFileParser): # Model, Value1, Value2, Value3, Arch, Platform, BelongsToItem=-1, # LineBegin=-1, ColumnBegin=-1, LineEnd=-1, ColumnEnd=-1, Enabled=-1 # - for Arch, Platform,_ in self._Scope: + for Arch, Platform, _ in self._Scope: LastItem = self._Store(self._SectionType, self._ValueList[0], self._ValueList[1], @@ -657,8 +663,8 @@ class InfParser(MetaFileParser): # If there are tail comments in INF file, save to database whatever the comments are for Comment in TailComments: - self._Store(MODEL_META_DATA_TAIL_COMMENT, Comment[0], '', '', 'COMMON', - 'COMMON', self._Owner[-1], -1, -1, -1, -1, 0) + self._Store(MODEL_META_DATA_TAIL_COMMENT, Comment[0], '', '', TAB_COMMON, + TAB_COMMON, self._Owner[-1], -1, -1, -1, -1, 0) self._Done() ## Data parser for the format in which there's path @@ -834,6 +840,7 @@ class DscParser(MetaFileParser): TAB_ELSE.upper() : MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSE, TAB_END_IF.upper() : MODEL_META_DATA_CONDITIONAL_STATEMENT_ENDIF, TAB_USER_EXTENSIONS.upper() : MODEL_META_DATA_USER_EXTENSION, + TAB_ERROR.upper() : MODEL_META_DATA_CONDITIONAL_STATEMENT_ERROR, } # Valid names in define section @@ -880,7 +887,7 @@ class DscParser(MetaFileParser): # def __init__(self, FilePath, FileType, Arch, Table, Owner= -1, From= -1): # prevent re-initialization - if hasattr(self, "_Table"): + if hasattr(self, "_Table") and self._Table is Table: return MetaFileParser.__init__(self, FilePath, FileType, Arch, Table, Owner, From) self._Version = 0x00010005 # Only EDK2 dsc file is supported @@ -931,17 +938,24 @@ class DscParser(MetaFileParser): self._SubsectionType = MODEL_UNKNOWN self._SubsectionName = '' self._Owner[-1] = -1 - OwnerId = {} + OwnerId.clear() continue # subsection header elif Line[0] == TAB_OPTION_START and Line[-1] == TAB_OPTION_END: self._SubsectionType = MODEL_META_DATA_SUBSECTION_HEADER # directive line elif Line[0] == '!': - self._DirectiveParser() + TokenList = GetSplitValueList(Line, ' ', 1) + if TokenList[0] == TAB_INCLUDE: + for Arch, ModuleType, DefaultStore in self._Scope: + if self._SubsectionType != MODEL_UNKNOWN and Arch in OwnerId: + self._Owner[-1] = OwnerId[Arch] + self._DirectiveParser() + else: + self._DirectiveParser() continue if Line[0] == TAB_OPTION_START and not self._InSubsection: - EdkLogger.error("Parser", FILE_READ_FAILURE, "Missing the '{' before %s in Line %s" % (Line, Index+1),ExtraData=self.MetaFile) + EdkLogger.error("Parser", FILE_READ_FAILURE, "Missing the '{' before %s in Line %s" % (Line, Index+1), ExtraData=self.MetaFile) if self._InSubsection: SectionType = self._SubsectionType @@ -951,7 +965,7 @@ class DscParser(MetaFileParser): self._ValueList = ['', '', ''] self._SectionParser[SectionType](self) - if self._ValueList == None: + if self._ValueList is None: continue # # Model, Value1, Value2, Value3, Arch, ModuleType, BelongsToItem=-1, BelongsToFile=-1, @@ -959,7 +973,7 @@ class DscParser(MetaFileParser): # for Arch, ModuleType, DefaultStore in self._Scope: Owner = self._Owner[-1] - if self._SubsectionType != MODEL_UNKNOWN: + if self._SubsectionType != MODEL_UNKNOWN and Arch in OwnerId: Owner = OwnerId[Arch] self._LastItem = self._Store( self._ItemType, @@ -1021,9 +1035,11 @@ class DscParser(MetaFileParser): ExtraData=self._CurrentLine) ItemType = self.DataType[DirectiveName] - Scope = [['COMMON', 'COMMON','COMMON']] + Scope = [[TAB_COMMON, TAB_COMMON, TAB_COMMON]] if ItemType == MODEL_META_DATA_INCLUDE: Scope = self._Scope + elif ItemType == MODEL_META_DATA_CONDITIONAL_STATEMENT_ERROR: + Scope = self._Scope if ItemType == MODEL_META_DATA_CONDITIONAL_STATEMENT_ENDIF: # Remove all directives between !if and !endif, including themselves while self._DirectiveStack: @@ -1037,7 +1053,7 @@ class DscParser(MetaFileParser): EdkLogger.error("Parser", FORMAT_INVALID, "Redundant '!endif'", File=self.MetaFile, Line=self._LineIndex + 1, ExtraData=self._CurrentLine) - elif ItemType != MODEL_META_DATA_INCLUDE: + elif ItemType not in {MODEL_META_DATA_INCLUDE, MODEL_META_DATA_CONDITIONAL_STATEMENT_ERROR}: # Break if there's a !else is followed by a !elseif if ItemType == MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSEIF and \ self._DirectiveStack and \ @@ -1096,15 +1112,15 @@ class DscParser(MetaFileParser): @ParseMacro def _SkuIdParser(self): TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT) - if len(TokenList) not in (2,3): - EdkLogger.error('Parser', FORMAT_INVALID, "Correct format is '|[|]'", + if len(TokenList) not in (2, 3): + EdkLogger.error('Parser', FORMAT_INVALID, "Correct format is '|[|]'", ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1) self._ValueList[0:len(TokenList)] = TokenList @ParseMacro def _DefaultStoresParser(self): TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT) if len(TokenList) != 2: - EdkLogger.error('Parser', FORMAT_INVALID, "Correct format is '|'", + EdkLogger.error('Parser', FORMAT_INVALID, "Correct format is '|'", ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1) self._ValueList[0:len(TokenList)] = TokenList @@ -1156,7 +1172,7 @@ class DscParser(MetaFileParser): # Validate the datum type of Dynamic Defaul PCD and DynamicEx Default PCD ValueList = GetSplitValueList(self._ValueList[2]) - if len(ValueList) > 1 and ValueList[1] in [TAB_UINT8 , TAB_UINT16, TAB_UINT32 , TAB_UINT64] \ + if len(ValueList) > 1 and ValueList[1] in [TAB_UINT8, TAB_UINT16, TAB_UINT32, TAB_UINT64] \ and self._ItemType in [MODEL_PCD_DYNAMIC_DEFAULT, MODEL_PCD_DYNAMIC_EX_DEFAULT]: EdkLogger.error('Parser', FORMAT_INVALID, "The datum type '%s' of PCD is wrong" % ValueList[1], ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1) @@ -1164,7 +1180,7 @@ class DscParser(MetaFileParser): # Validate the VariableName of DynamicHii and DynamicExHii for PCD Entry must not be an empty string if self._ItemType in [MODEL_PCD_DYNAMIC_HII, MODEL_PCD_DYNAMIC_EX_HII]: DscPcdValueList = GetSplitValueList(TokenList[1], TAB_VALUE_SPLIT, 1) - if len(DscPcdValueList[0].replace('L','').replace('"','').strip()) == 0: + if len(DscPcdValueList[0].replace('L', '').replace('"', '').strip()) == 0: EdkLogger.error('Parser', FORMAT_INVALID, "The VariableName field in the HII format PCD entry must not be an empty string", ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1) @@ -1182,6 +1198,7 @@ class DscParser(MetaFileParser): if self._CurrentLine[-1] == '{': self._ValueList[0] = self._CurrentLine[0:-1].strip() self._InSubsection = True + self._SubsectionType = MODEL_UNKNOWN else: self._ValueList[0] = self._CurrentLine @@ -1242,6 +1259,13 @@ class DscParser(MetaFileParser): # PCD cannot be referenced in macro definition if self._ItemType not in [MODEL_META_DATA_DEFINE, MODEL_META_DATA_GLOBAL_DEFINE]: Macros.update(self._Symbols) + if GlobalData.BuildOptionPcd: + for Item in GlobalData.BuildOptionPcd: + if isinstance(Item, tuple): + continue + PcdName, TmpValue = Item.split("=") + TmpValue = BuildOptionValue(TmpValue, self._GuidDict) + Macros[PcdName.strip()] = TmpValue return Macros def _PostProcess(self): @@ -1276,6 +1300,7 @@ class DscParser(MetaFileParser): MODEL_META_DATA_BUILD_OPTION : self.__ProcessBuildOption, MODEL_UNKNOWN : self._Skip, MODEL_META_DATA_USER_EXTENSION : self._SkipUserExtension, + MODEL_META_DATA_CONDITIONAL_STATEMENT_ERROR : self._ProcessError, } self._Table = MetaFileStorage(self._RawTable.Cur, self.MetaFile, MODEL_FILE_DSC, True) @@ -1284,7 +1309,7 @@ class DscParser(MetaFileParser): self._DirectiveEvalStack = [] self._FileWithError = self.MetaFile self._FileLocalMacros = {} - self._SectionsMacroDict = {} + self._SectionsMacroDict.clear() GlobalData.gPlatformDefines = {} # Get all macro and PCD which has straitforward value @@ -1293,7 +1318,7 @@ class DscParser(MetaFileParser): self._ContentIndex = 0 self._InSubsection = False while self._ContentIndex < len(self._Content) : - Id, self._ItemType, V1, V2, V3, S1, S2, S3,Owner, self._From, \ + Id, self._ItemType, V1, V2, V3, S1, S2, S3, Owner, self._From, \ LineStart, ColStart, LineEnd, ColEnd, Enabled = self._Content[self._ContentIndex] if self._From < 0: @@ -1311,8 +1336,8 @@ class DscParser(MetaFileParser): break Record = self._Content[self._ContentIndex] if LineStart == Record[10] and LineEnd == Record[12]: - if [Record[5], Record[6],Record[7]] not in self._Scope: - self._Scope.append([Record[5], Record[6],Record[7]]) + if [Record[5], Record[6], Record[7]] not in self._Scope: + self._Scope.append([Record[5], Record[6], Record[7]]) self._ContentIndex += 1 else: break @@ -1326,8 +1351,8 @@ class DscParser(MetaFileParser): self._InSubsection = False try: Processer[self._ItemType]() - except EvaluationException, Excpt: - # + except EvaluationException as Excpt: + # # Only catch expression evaluation error here. We need to report # the precise number of line on which the error occurred # @@ -1348,12 +1373,12 @@ class DscParser(MetaFileParser): EdkLogger.error('Parser', FORMAT_INVALID, "Invalid expression: %s" % str(Excpt), File=self._FileWithError, ExtraData=' '.join(self._ValueList), Line=self._LineIndex + 1) - except MacroException, Excpt: + except MacroException as Excpt: EdkLogger.error('Parser', FORMAT_INVALID, str(Excpt), File=self._FileWithError, ExtraData=' '.join(self._ValueList), Line=self._LineIndex + 1) - if self._ValueList == None: + if self._ValueList is None: continue NewOwner = self._IdMapping.get(Owner, -1) @@ -1379,6 +1404,10 @@ class DscParser(MetaFileParser): GlobalData.gPlatformDefines.update(self._FileLocalMacros) self._PostProcessed = True self._Content = None + def _ProcessError(self): + if not self._Enabled: + return + EdkLogger.error('Parser', ERROR_STATEMENT, self._ValueList[1], File=self.MetaFile, Line=self._LineIndex + 1) def __ProcessSectionHeader(self): self._SectionName = self._ValueList[0] @@ -1401,7 +1430,7 @@ class DscParser(MetaFileParser): MODEL_PCD_DYNAMIC_VPD, MODEL_PCD_DYNAMIC_EX_DEFAULT, MODEL_PCD_DYNAMIC_EX_HII, MODEL_PCD_DYNAMIC_EX_VPD): Records = self._RawTable.Query(PcdType, BelongsToItem= -1.0) - for TokenSpaceGuid, PcdName, Value, Dummy2, Dummy3, Dummy4,ID, Line in Records: + for TokenSpaceGuid, PcdName, Value, Dummy2, Dummy3, Dummy4, ID, Line in Records: Name = TokenSpaceGuid + '.' + PcdName if Name not in GlobalData.gPlatformOtherPcds: PcdLine = Line @@ -1446,11 +1475,11 @@ class DscParser(MetaFileParser): Macros.update(GlobalData.gGlobalDefines) try: Result = ValueExpression(self._ValueList[1], Macros)() - except SymbolNotFound, Exc: + except SymbolNotFound as Exc: EdkLogger.debug(EdkLogger.DEBUG_5, str(Exc), self._ValueList[1]) Result = False - except WrnExpression, Excpt: - # + except WrnExpression as Excpt: + # # Catch expression evaluation warning here. We need to report # the precise number of line and return the evaluation result # @@ -1496,18 +1525,18 @@ class DscParser(MetaFileParser): # Allow using system environment variables in path after !include # __IncludeMacros['WORKSPACE'] = GlobalData.gGlobalDefines['WORKSPACE'] - if "ECP_SOURCE" in GlobalData.gGlobalDefines.keys(): + if "ECP_SOURCE" in GlobalData.gGlobalDefines: __IncludeMacros['ECP_SOURCE'] = GlobalData.gGlobalDefines['ECP_SOURCE'] # # During GenFds phase call DSC parser, will go into this branch. # - elif "ECP_SOURCE" in GlobalData.gCommandLineDefines.keys(): + elif "ECP_SOURCE" in GlobalData.gCommandLineDefines: __IncludeMacros['ECP_SOURCE'] = GlobalData.gCommandLineDefines['ECP_SOURCE'] __IncludeMacros['EFI_SOURCE'] = GlobalData.gGlobalDefines['EFI_SOURCE'] __IncludeMacros['EDK_SOURCE'] = GlobalData.gGlobalDefines['EDK_SOURCE'] # - # Allow using MACROs comes from [Defines] section to keep compatible. + # Allow using MACROs comes from [Defines] section to keep compatible. # __IncludeMacros.update(self._Macros) @@ -1529,32 +1558,27 @@ class DscParser(MetaFileParser): self._FileWithError = IncludedFile1 - IncludedFileTable = MetaFileStorage(self._Table.Cur, IncludedFile1, MODEL_FILE_DSC, False) - Owner = self._Content[self._ContentIndex - 1][0] + FromItem = self._Content[self._ContentIndex - 1][0] + if self._InSubsection: + Owner = self._Content[self._ContentIndex - 1][8] + else: + Owner = self._Content[self._ContentIndex - 1][0] + IncludedFileTable = MetaFileStorage(self._Table.Cur, IncludedFile1, MODEL_FILE_DSC, False, FromItem=FromItem) Parser = DscParser(IncludedFile1, self._FileType, self._Arch, IncludedFileTable, - Owner=Owner, From=Owner) + Owner=Owner, From=FromItem) self.IncludedFiles.add (IncludedFile1) - # Does not allow lower level included file to include upper level included file - if Parser._From != Owner and int(Owner) > int (Parser._From): - EdkLogger.error('parser', FILE_ALREADY_EXIST, File=self._FileWithError, - Line=self._LineIndex + 1, ExtraData="{0} is already included at a higher level.".format(IncludedFile1)) - - # set the parser status with current status Parser._SectionName = self._SectionName + Parser._SubsectionType = self._SubsectionType + Parser._InSubsection = self._InSubsection Parser._SectionType = self._SectionType Parser._Scope = self._Scope Parser._Enabled = self._Enabled # Parse the included file Parser.Start() - # update current status with sub-parser's status - self._SectionName = Parser._SectionName - self._SectionType = Parser._SectionType - self._Scope = Parser._Scope - self._Enabled = Parser._Enabled # Insert all records in the table for the included file into dsc file table Records = IncludedFileTable.GetAll() @@ -1590,8 +1614,10 @@ class DscParser(MetaFileParser): if PcdValue and "." not in self._ValueList[0]: try: ValList[Index] = ValueExpression(PcdValue, self._Macros)(True) - except WrnExpression, Value: + except WrnExpression as Value: ValList[Index] = Value.result + except: + pass if ValList[Index] == 'True': ValList[Index] = '1' @@ -1604,7 +1630,7 @@ class DscParser(MetaFileParser): try: self._ValueList[2] = '|'.join(ValList) except Exception: - print ValList + print(ValList) def __ProcessComponent(self): self._ValueList[0] = ReplaceMacro(self._ValueList[0], self._Macros) @@ -1730,7 +1756,7 @@ class DecParser(MetaFileParser): # section content self._ValueList = ['', '', ''] self._SectionParser[self._SectionType[0]](self) - if self._ValueList == None or self._ItemType == MODEL_META_DATA_DEFINE: + if self._ValueList is None or self._ItemType == MODEL_META_DATA_DEFINE: self._ItemType = -1 self._Comments = [] continue @@ -1773,7 +1799,7 @@ class DecParser(MetaFileParser): if self._DefinesCount > 1: EdkLogger.error('Parser', FORMAT_INVALID, 'Multiple [Defines] section is exist.', self.MetaFile ) if self._DefinesCount == 0: - EdkLogger.error('Parser', FORMAT_INVALID, 'No [Defines] section exist.',self.MetaFile) + EdkLogger.error('Parser', FORMAT_INVALID, 'No [Defines] section exist.', self.MetaFile) self._Done() @@ -1822,7 +1848,7 @@ class DecParser(MetaFileParser): if len(ItemList) > 1: S1 = ItemList[1].upper() else: - S1 = 'COMMON' + S1 = TAB_ARCH_COMMON ArchList.add(S1) # S2 may be Platform or ModuleType if len(ItemList) > 2: @@ -1833,18 +1859,18 @@ class DecParser(MetaFileParser): EdkLogger.error("Parser", FORMAT_INVALID, 'Please use keyword "Private" as section tag modifier.', File=self.MetaFile, Line=self._LineIndex + 1, ExtraData=self._CurrentLine) else: - S2 = 'COMMON' + S2 = TAB_COMMON PrivateList.add(S2) if [S1, S2, self.DataType[self._SectionName]] not in self._Scope: self._Scope.append([S1, S2, self.DataType[self._SectionName]]) # 'COMMON' must not be used with specific ARCHs at the same section - if 'COMMON' in ArchList and len(ArchList) > 1: + if TAB_ARCH_COMMON in ArchList and len(ArchList) > 1: EdkLogger.error('Parser', FORMAT_INVALID, "'common' ARCH must not be used with specific ARCHs", File=self.MetaFile, Line=self._LineIndex + 1, ExtraData=self._CurrentLine) # It is not permissible to mix section tags without the Private attribute with section tags with the Private attribute - if 'COMMON' in PrivateList and len(PrivateList) > 1: + if TAB_COMMON in PrivateList and len(PrivateList) > 1: EdkLogger.error('Parser', FORMAT_INVALID, "Can't mix section tags without the Private attribute with section tags with the Private attribute", File=self.MetaFile, Line=self._LineIndex + 1, ExtraData=self._CurrentLine) @@ -1871,6 +1897,8 @@ class DecParser(MetaFileParser): File=self.MetaFile, Line=self._LineIndex + 1) self._ValueList[0] = TokenList[0] self._ValueList[1] = TokenList[1] + if self._ValueList[0] not in self._GuidDict: + self._GuidDict[self._ValueList[0]] = self._ValueList[1] ## PCD sections parser # @@ -1888,22 +1916,24 @@ class DecParser(MetaFileParser): if "|" not in self._CurrentLine: if "" == self._CurrentLine: self._include_flag = True + self._package_flag = False self._ValueList = None return if "" == self._CurrentLine: self._package_flag = True self._ValueList = None + self._include_flag = False return if self._include_flag: self._ValueList[1] = "_" + md5.new(self._CurrentLine).hexdigest() self._ValueList[2] = self._CurrentLine - self._include_flag = False if self._package_flag and "}" != self._CurrentLine: self._ValueList[1] = "_" + md5.new(self._CurrentLine).hexdigest() self._ValueList[2] = self._CurrentLine if self._CurrentLine == "}": self._package_flag = False + self._include_flag = False self._ValueList = None return else: @@ -1912,6 +1942,9 @@ class DecParser(MetaFileParser): if len(PcdNames) == 2: self._CurrentStructurePcdName = "" else: + if self._CurrentStructurePcdName != TAB_SPLIT.join(PcdNames[:2]): + EdkLogger.error('Parser', FORMAT_INVALID, "Pcd Name does not match: %s and %s " % (self._CurrentStructurePcdName, TAB_SPLIT.join(PcdNames[:2])), + File=self.MetaFile, Line=self._LineIndex + 1) self._ValueList[1] = TAB_SPLIT.join(PcdNames[2:]) self._ValueList[2] = PcdTockens[1] if not self._CurrentStructurePcdName: @@ -1984,15 +2017,10 @@ class DecParser(MetaFileParser): PcdValue = ValueList[0] if PcdValue: try: - ValueList[0] = ValueExpression(PcdValue, self._AllPcdDict)(True) - except WrnExpression, Value: - ValueList[0] = Value.result - - if ValueList[0] == 'True': - ValueList[0] = '1' - if ValueList[0] == 'False': - ValueList[0] = '0' - + self._GuidDict.update(self._AllPcdDict) + ValueList[0] = ValueExpressionEx(ValueList[0], ValueList[1], self._GuidDict)(True) + except BadExpression as Value: + EdkLogger.error('Parser', FORMAT_INVALID, Value, ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1) # check format of default value against the datum type IsValid, Cause = CheckPcdDatum(ValueList[1], ValueList[0]) if not IsValid: