Sync BaseTool trunk (version r2460) into EDKII BaseTools. The change mainly includes:
authorlgao4 <lgao4@6f19259b-4bc3-4df7-8a09-765794883524>
Wed, 7 Dec 2011 06:19:28 +0000 (06:19 +0000)
committerlgao4 <lgao4@6f19259b-4bc3-4df7-8a09-765794883524>
Wed, 7 Dec 2011 06:19:28 +0000 (06:19 +0000)
  1. Support use expression as DSC file PCD value.
  2. Update FDF parser to fix bug to get complete macro value.
  3. Fix bug to replace SET statement macro and evaluate SET statement PCD value in FDF file.
  4. Fix a bug for MACRO defined in conditional block cannot be processed correctly

Signed-off-by: lgao4
Reviewed-by: gikidy
git-svn-id: https://edk2.svn.sourceforge.net/svnroot/edk2/trunk/edk2@12827 6f19259b-4bc3-4df7-8a09-765794883524

68 files changed:
BaseTools/Bin/Win32/BPDG.exe
BaseTools/Bin/Win32/BootSectImage.exe
BaseTools/Bin/Win32/EfiLdrImage.exe
BaseTools/Bin/Win32/EfiRom.exe
BaseTools/Bin/Win32/Fpd2Dsc.exe
BaseTools/Bin/Win32/GenBootSector.exe
BaseTools/Bin/Win32/GenCrc32.exe
BaseTools/Bin/Win32/GenDepex.exe
BaseTools/Bin/Win32/GenFds.exe
BaseTools/Bin/Win32/GenFfs.exe
BaseTools/Bin/Win32/GenFv.exe
BaseTools/Bin/Win32/GenFw.exe
BaseTools/Bin/Win32/GenPage.exe
BaseTools/Bin/Win32/GenPatchPcdTable.exe
BaseTools/Bin/Win32/GenSec.exe
BaseTools/Bin/Win32/GenVtf.exe
BaseTools/Bin/Win32/LzmaCompress.exe
BaseTools/Bin/Win32/MigrationMsa2Inf.exe
BaseTools/Bin/Win32/PatchPcdValue.exe
BaseTools/Bin/Win32/Spd2Dec.exe
BaseTools/Bin/Win32/Split.exe
BaseTools/Bin/Win32/TargetTool.exe
BaseTools/Bin/Win32/TianoCompress.exe
BaseTools/Bin/Win32/Trim.exe
BaseTools/Bin/Win32/UPT.exe
BaseTools/Bin/Win32/VfrCompile.exe
BaseTools/Bin/Win32/VolInfo.exe
BaseTools/Bin/Win32/build.exe
BaseTools/Source/C/EfiRom/EfiRom.c
BaseTools/Source/C/GenFv/GenFvInternalLib.c
BaseTools/Source/C/Include/Common/BuildVersion.h
BaseTools/Source/Python/AutoGen/AutoGen.py
BaseTools/Source/Python/AutoGen/GenC.py
BaseTools/Source/Python/Common/BuildVersion.py
BaseTools/Source/Python/Common/Expression.py
BaseTools/Source/Python/Common/GlobalData.py
BaseTools/Source/Python/Common/Misc.py
BaseTools/Source/Python/Ecc/Check.py
BaseTools/Source/Python/Ecc/Configuration.py
BaseTools/Source/Python/Ecc/Database.py
BaseTools/Source/Python/Ecc/Ecc.py
BaseTools/Source/Python/Ecc/MetaDataParser.py
BaseTools/Source/Python/Ecc/MetaFileWorkspace/MetaDataTable.py [new file with mode: 0644]
BaseTools/Source/Python/Ecc/MetaFileWorkspace/MetaFileParser.py [new file with mode: 0644]
BaseTools/Source/Python/Ecc/MetaFileWorkspace/MetaFileTable.py [new file with mode: 0644]
BaseTools/Source/Python/Ecc/MetaFileWorkspace/__init__.py [new file with mode: 0644]
BaseTools/Source/Python/Ecc/c.py
BaseTools/Source/Python/Ecc/config.ini
BaseTools/Source/Python/Eot/Parser.py
BaseTools/Source/Python/GenFds/FdfParser.py
BaseTools/Source/Python/GenFds/GenFds.py
BaseTools/Source/Python/Table/TableFdf.py
BaseTools/Source/Python/Table/TableFile.py
BaseTools/Source/Python/Trim/Trim.py
BaseTools/Source/Python/UPT/BuildVersion.py
BaseTools/Source/Python/UPT/Core/DependencyRules.py
BaseTools/Source/Python/UPT/Core/DistributionPackageClass.py
BaseTools/Source/Python/UPT/Core/IpiDb.py
BaseTools/Source/Python/UPT/InstallPkg.py
BaseTools/Source/Python/UPT/Library/Misc.py
BaseTools/Source/Python/UPT/Logger/StringTable.py
BaseTools/Source/Python/UPT/Logger/ToolError.py
BaseTools/Source/Python/UPT/Xml/PackageSurfaceAreaXml.py
BaseTools/Source/Python/UPT/Xml/XmlParser.py
BaseTools/Source/Python/Workspace/MetaFileParser.py
BaseTools/Source/Python/Workspace/WorkspaceDatabase.py
BaseTools/Source/Python/build/build.py
BaseTools/UserManuals/EfiRom_Utility_Man_Page.rtf

index d75855948c5d1435a6ac2cf5936830308f53f2c2..d67e1b67db3db1c90f07d918be7df4d3f3ce6aef 100644 (file)
Binary files a/BaseTools/Bin/Win32/BPDG.exe and b/BaseTools/Bin/Win32/BPDG.exe differ
index a278b171b48d3f0c357a9aec4a13cf297b91ce12..7af47afbf9697a568463fb8e3f767bf0d913a710 100755 (executable)
Binary files a/BaseTools/Bin/Win32/BootSectImage.exe and b/BaseTools/Bin/Win32/BootSectImage.exe differ
index ccd168488d0ded04a0b91d2f820b5dad39241d84..69de8394f0ceab748163b639d2c9c23086a5e85f 100755 (executable)
Binary files a/BaseTools/Bin/Win32/EfiLdrImage.exe and b/BaseTools/Bin/Win32/EfiLdrImage.exe differ
index d7f3e71c27e685c2c59381b7b319ef7ef4d2f484..4b3de928143be93ccd93234b978322b5bfe9e825 100755 (executable)
Binary files a/BaseTools/Bin/Win32/EfiRom.exe and b/BaseTools/Bin/Win32/EfiRom.exe differ
index 938a3820a038d79a9207c8da1f7f584746a674fd..2c48bc05f4d14fc029e09565a827adcbdf52edad 100755 (executable)
Binary files a/BaseTools/Bin/Win32/Fpd2Dsc.exe and b/BaseTools/Bin/Win32/Fpd2Dsc.exe differ
index d5149510f9f9235d38551cbbe02949d2a4ebd717..51603b91e348342b10cd60723c3f5cf9f08baae7 100755 (executable)
Binary files a/BaseTools/Bin/Win32/GenBootSector.exe and b/BaseTools/Bin/Win32/GenBootSector.exe differ
index 4003ba854118eaca40013e045bd893307d286780..8f84be2365c772f4c6b842239cbf8bb6020a3102 100755 (executable)
Binary files a/BaseTools/Bin/Win32/GenCrc32.exe and b/BaseTools/Bin/Win32/GenCrc32.exe differ
index 8afed630101e90ac2531275cedec7d39acc076e4..d2013b73eb5328fca6bda006f506df76558b6fb7 100755 (executable)
Binary files a/BaseTools/Bin/Win32/GenDepex.exe and b/BaseTools/Bin/Win32/GenDepex.exe differ
index 470ed8aa985e8e5a3f85fc6ff76fe9cd8f4435fa..92bebfa6b6ca9f86d4b21653c73599302fdd804b 100755 (executable)
Binary files a/BaseTools/Bin/Win32/GenFds.exe and b/BaseTools/Bin/Win32/GenFds.exe differ
index f0ab6d84f9422c3e1a67257a62ee956485aa9820..37a069e265b332182b133e0317479103ee4c4894 100755 (executable)
Binary files a/BaseTools/Bin/Win32/GenFfs.exe and b/BaseTools/Bin/Win32/GenFfs.exe differ
index 2b58bf51c03a887f0400977dab64a9799b7e3102..03b80f3bfe4b4ea6726ee5c3c71d5ee6ffdc63a6 100755 (executable)
Binary files a/BaseTools/Bin/Win32/GenFv.exe and b/BaseTools/Bin/Win32/GenFv.exe differ
index 6e509a87f49a35a85f8027fef288864a8683be5e..6df993458241c091b0f48cc7abf0dba2f289ff83 100755 (executable)
Binary files a/BaseTools/Bin/Win32/GenFw.exe and b/BaseTools/Bin/Win32/GenFw.exe differ
index a2a70f345441a492d1cfc3d995dc35de11b6573a..3f70426fc82e33e5c301cb1bf6e5b788c235af3b 100755 (executable)
Binary files a/BaseTools/Bin/Win32/GenPage.exe and b/BaseTools/Bin/Win32/GenPage.exe differ
index 7f3cb9c5970c95b4540aebfe4e2348184f3fb573..cd9ec75569b699ec1bba690ddbdca398dc1aa504 100755 (executable)
Binary files a/BaseTools/Bin/Win32/GenPatchPcdTable.exe and b/BaseTools/Bin/Win32/GenPatchPcdTable.exe differ
index 37b12443bb96d5a1af6322b33d617efb200cf203..f8f11e82c1a86edc99616b0ccb01b7ea9eca8cb7 100755 (executable)
Binary files a/BaseTools/Bin/Win32/GenSec.exe and b/BaseTools/Bin/Win32/GenSec.exe differ
index e93adbad7580be9894819c09cf43761cef59317f..6e60e8c36507420aed9acf9bab44b4a1e98013f9 100755 (executable)
Binary files a/BaseTools/Bin/Win32/GenVtf.exe and b/BaseTools/Bin/Win32/GenVtf.exe differ
index 62b472cb5a441baacf48b2bc05ac816d15db7ef0..8d97bcb2d30d41de7f9d323ab76c6d18f557b540 100755 (executable)
Binary files a/BaseTools/Bin/Win32/LzmaCompress.exe and b/BaseTools/Bin/Win32/LzmaCompress.exe differ
index e133108645a302024cd70e4d4360a3cd202fc472..6a96e513fc7ac73d270cd59bc7f87772eba087d2 100755 (executable)
Binary files a/BaseTools/Bin/Win32/MigrationMsa2Inf.exe and b/BaseTools/Bin/Win32/MigrationMsa2Inf.exe differ
index 339740a31f73892c711b4adec1e86e69b877ecdd..ac5121474465ae06f5f62adfbf51c198780587d7 100755 (executable)
Binary files a/BaseTools/Bin/Win32/PatchPcdValue.exe and b/BaseTools/Bin/Win32/PatchPcdValue.exe differ
index ab96d5a90a417e4598694e2420075172c0199fd2..e003096aa5bd864a61c734aa3e9a64cc6c8e4deb 100755 (executable)
Binary files a/BaseTools/Bin/Win32/Spd2Dec.exe and b/BaseTools/Bin/Win32/Spd2Dec.exe differ
index 9c1b0f676446e94924e56a0844f2facd1ed20e50..5285c71db9c309b5bac293da521649537c3e175b 100755 (executable)
Binary files a/BaseTools/Bin/Win32/Split.exe and b/BaseTools/Bin/Win32/Split.exe differ
index 59f83aa3f1c8a44b35e5dabd5daee6b28bce1af8..5eecbeb9a5f286cc9bce660e98874f0b6e0ee395 100755 (executable)
Binary files a/BaseTools/Bin/Win32/TargetTool.exe and b/BaseTools/Bin/Win32/TargetTool.exe differ
index 1a718ecf57d0c3d684481e2fdf5fe74e8bb7b1a7..58d2972ec9952f16b08d109136b19bfc3b0fa157 100755 (executable)
Binary files a/BaseTools/Bin/Win32/TianoCompress.exe and b/BaseTools/Bin/Win32/TianoCompress.exe differ
index 71e8e352d07271ef9a3dc3292bf3b72ccc20af36..563ca64a864246c8c2bf45e06041618f9768c4c9 100755 (executable)
Binary files a/BaseTools/Bin/Win32/Trim.exe and b/BaseTools/Bin/Win32/Trim.exe differ
index ac13bfe6bb0f40bb2bbe1fa0896414f496f36c3a..3dad60db652f810bd6eb027196caec81ad39b453 100644 (file)
Binary files a/BaseTools/Bin/Win32/UPT.exe and b/BaseTools/Bin/Win32/UPT.exe differ
index 0b1c7bdaca92b830e472ae930610c8d7728fd077..4a14a12c0ae3e3d2e01c302bc74b727e127f5dad 100755 (executable)
Binary files a/BaseTools/Bin/Win32/VfrCompile.exe and b/BaseTools/Bin/Win32/VfrCompile.exe differ
index 0436a0c81504824fd9135806e62d6febd04f1608..ad84ee10dcc705d41a1cae40051c27ddd8f4be0f 100755 (executable)
Binary files a/BaseTools/Bin/Win32/VolInfo.exe and b/BaseTools/Bin/Win32/VolInfo.exe differ
index ef96aea362228772d2897371744743ef1454b6a6..4158f832ab5bec0609f6568214b5f5fbd28193a8 100755 (executable)
Binary files a/BaseTools/Bin/Win32/build.exe and b/BaseTools/Bin/Win32/build.exe differ
index 9f71b19323e9eea951bf984691e5cc73aa97c2bd..5152f38ba0599b46e3efdfddf2252f27f4c006fd 100644 (file)
@@ -1,6 +1,6 @@
 /** @file\r
 \r
-Copyright (c) 1999 - 2010, Intel Corporation. All rights reserved.<BR>\r
+Copyright (c) 1999 - 2011, Intel Corporation. All rights reserved.<BR>\r
 This program and the accompanying materials are licensed and made available \r
 under the terms and conditions of the BSD License which accompanies this \r
 distribution.  The full text of the license may be found at\r
@@ -1200,19 +1200,19 @@ Returns:
   //\r
   // Summary usage\r
   //\r
-  fprintf (stdout, "Usage: %s [options] [file name<s>] \n\n", UTILITY_NAME);\r
+  fprintf (stdout, "Usage: %s -f VendorId -i DeviceId [options] [file name<s>] \n\n", UTILITY_NAME);\r
   \r
   //\r
   // Copyright declaration\r
   // \r
-  fprintf (stdout, "Copyright (c) 2007 - 2010, Intel Corporation. All rights reserved.\n\n");\r
+  fprintf (stdout, "Copyright (c) 2007 - 2011, Intel Corporation. All rights reserved.\n\n");\r
 \r
   //\r
   // Details Option\r
   //\r
   fprintf (stdout, "Options:\n");\r
   fprintf (stdout, "  -o FileName, --output FileName\n\\r
-            File will be created to store the ouput content.\n");\r
+            File will be created to store the output content.\n");\r
   fprintf (stdout, "  -e EfiFileName\n\\r
             EFI PE32 image files.\n");\r
   fprintf (stdout, "  -ec EfiFileName\n\\r
@@ -1224,9 +1224,9 @@ Returns:
   fprintf (stdout, "  -r Rev    Hex Revision in the PCI data structure header.\n");\r
   fprintf (stdout, "  -n        Not to automatically set the LAST bit in the last file.\n");\r
   fprintf (stdout, "  -f VendorId\n\\r
-            Hex PCI Vendor ID for the device OpROM.\n");\r
+            Hex PCI Vendor ID for the device OpROM, must be specified\n");\r
   fprintf (stdout, "  -i DeviceId\n\\r
-            Hex PCI Device ID for the device OpROM.\n");\r
+            Hex PCI Device ID for the device OpROM, must be specified\n");\r
   fprintf (stdout, "  -p, --pci23\n\\r
             Default layout meets PCI 3.0 specifications\n\\r
             specifying this flag will for a PCI 2.3 layout.\n");\r
index 684933f64da0365e4495e16ea6a15f43dd71f94a..c01e504cbed99323738fbce6d105fe8818363786 100644 (file)
@@ -1021,7 +1021,7 @@ Returns:
   Status = VerifyFfsFile ((EFI_FFS_FILE_HEADER *)FileBuffer);\r
   if (EFI_ERROR (Status)) {\r
     free (FileBuffer);\r
-    Error (NULL, 0, 3000, "Invalid", "%s is a FFS file.", FvInfo->FvFiles[Index]);\r
+    Error (NULL, 0, 3000, "Invalid", "%s is not a valid FFS file.", FvInfo->FvFiles[Index]);\r
     return EFI_INVALID_PARAMETER;\r
   }\r
 \r
index bbcd09a918206a7e000cc9400593a0be5ddec7da..64bce09165ebef590f567309e1ea11ebbd2bd260 100644 (file)
@@ -1,3 +1,3 @@
 //This file is for build version number auto generation\r
 //\r
-#define __BUILD_VERSION "Build 2423"\r
+#define __BUILD_VERSION "Build 2460"\r
index 700b689a54fbda3f57c14a32498eecd4c4defd7f..cb3369ffdf315394a3c05534e9273789d07a7bf9 100644 (file)
@@ -184,6 +184,7 @@ class WorkspaceAutoGen(AutoGen):
         self.MetaFile       = ActivePlatform\r
         self.WorkspaceDir   = WorkspaceDir\r
         self.Platform       = self.BuildDatabase[self.MetaFile, 'COMMON', Target, Toolchain]\r
+        GlobalData.gActivePlatform = self.Platform\r
         self.BuildTarget    = Target\r
         self.ToolChain      = Toolchain\r
         self.ArchList       = ArchList\r
@@ -960,8 +961,7 @@ class PlatformAutoGen(AutoGen):
                                 "Fail to get FLASH_DEFINITION definition in DSC file %s which is required when DSC contains VPD PCD." % str(self.Platform.MetaFile))\r
             \r
             if VpdFile.GetCount() != 0:\r
-                WorkspaceDb = self.BuildDatabase.WorkspaceDb\r
-                DscTimeStamp = WorkspaceDb.GetTimeStamp(WorkspaceDb.GetFileId(str(self.Platform.MetaFile)))\r
+                DscTimeStamp = self.Platform.MetaFile.TimeStamp\r
                 FvPath = os.path.join(self.BuildDir, "FV")\r
                 if not os.path.exists(FvPath):\r
                     try:\r
@@ -1001,7 +1001,7 @@ class PlatformAutoGen(AutoGen):
                             # just pick the a value to determine whether is unicode string type\r
                             Sku = Pcd.SkuInfoList[Pcd.SkuInfoList.keys()[0]]                        \r
                             if Sku.VpdOffset == "*":\r
-                                Sku.VpdOffset = VpdFile.GetOffset(Pcd)[0]\r
+                                Sku.VpdOffset = VpdFile.GetOffset(Pcd)[0].strip()\r
                     else:\r
                         EdkLogger.error("build", FILE_READ_FAILURE, "Can not find VPD map file %s to fix up VPD offset." % VpdMapFilePath)\r
             \r
@@ -1242,18 +1242,43 @@ class PlatformAutoGen(AutoGen):
         if self._PcdTokenNumber == None:\r
             self._PcdTokenNumber = sdict()\r
             TokenNumber = 1\r
+            #\r
+            # Make the Dynamic and DynamicEx PCD use within different TokenNumber area. \r
+            # Such as:\r
+            # \r
+            # Dynamic PCD:\r
+            # TokenNumber 0 ~ 10\r
+            # DynamicEx PCD:\r
+            # TokeNumber 11 ~ 20\r
+            #\r
             for Pcd in self.DynamicPcdList:\r
                 if Pcd.Phase == "PEI":\r
-                    EdkLogger.debug(EdkLogger.DEBUG_5, "%s %s (%s) -> %d" % (Pcd.TokenCName, Pcd.TokenSpaceGuidCName, Pcd.Phase, TokenNumber))\r
-                    self._PcdTokenNumber[Pcd.TokenCName, Pcd.TokenSpaceGuidCName] = TokenNumber\r
-                    TokenNumber += 1\r
-\r
+                    if Pcd.Type in ["Dynamic", "DynamicDefault", "DynamicVpd", "DynamicHii"]:\r
+                        EdkLogger.debug(EdkLogger.DEBUG_5, "%s %s (%s) -> %d" % (Pcd.TokenCName, Pcd.TokenSpaceGuidCName, Pcd.Phase, TokenNumber))\r
+                        self._PcdTokenNumber[Pcd.TokenCName, Pcd.TokenSpaceGuidCName] = TokenNumber\r
+                        TokenNumber += 1\r
+                        \r
+            for Pcd in self.DynamicPcdList:\r
+                if Pcd.Phase == "PEI":\r
+                    if Pcd.Type in ["DynamicEx", "DynamicExDefault", "DynamicExVpd", "DynamicExHii"]:\r
+                        EdkLogger.debug(EdkLogger.DEBUG_5, "%s %s (%s) -> %d" % (Pcd.TokenCName, Pcd.TokenSpaceGuidCName, Pcd.Phase, TokenNumber))\r
+                        self._PcdTokenNumber[Pcd.TokenCName, Pcd.TokenSpaceGuidCName] = TokenNumber\r
+                        TokenNumber += 1\r
+                        \r
             for Pcd in self.DynamicPcdList:\r
                 if Pcd.Phase == "DXE":\r
-                    EdkLogger.debug(EdkLogger.DEBUG_5, "%s %s (%s) -> %d" % (Pcd.TokenCName, Pcd.TokenSpaceGuidCName, Pcd.Phase, TokenNumber))\r
-                    self._PcdTokenNumber[Pcd.TokenCName, Pcd.TokenSpaceGuidCName] = TokenNumber\r
-                    TokenNumber += 1\r
-\r
+                    if Pcd.Type in ["Dynamic", "DynamicDefault", "DynamicVpd", "DynamicHii"]:\r
+                        EdkLogger.debug(EdkLogger.DEBUG_5, "%s %s (%s) -> %d" % (Pcd.TokenCName, Pcd.TokenSpaceGuidCName, Pcd.Phase, TokenNumber))\r
+                        self._PcdTokenNumber[Pcd.TokenCName, Pcd.TokenSpaceGuidCName] = TokenNumber\r
+                        TokenNumber += 1\r
+                        \r
+            for Pcd in self.DynamicPcdList:\r
+                if Pcd.Phase == "DXE":\r
+                    if Pcd.Type in ["DynamicEx", "DynamicExDefault", "DynamicExVpd", "DynamicExHii"]:\r
+                        EdkLogger.debug(EdkLogger.DEBUG_5, "%s %s (%s) -> %d" % (Pcd.TokenCName, Pcd.TokenSpaceGuidCName, Pcd.Phase, TokenNumber))\r
+                        self._PcdTokenNumber[Pcd.TokenCName, Pcd.TokenSpaceGuidCName] = TokenNumber\r
+                        TokenNumber += 1\r
+                        \r
             for Pcd in self.NonDynamicPcdList:\r
                 self._PcdTokenNumber[Pcd.TokenCName, Pcd.TokenSpaceGuidCName] = TokenNumber\r
                 TokenNumber += 1\r
index 5638bfd043c41067b8827d565be46ae778868cb0..530dfd7c978dfc2b9dea7ee173dae904abee3e3b 100644 (file)
@@ -312,7 +312,7 @@ gAutoGenHPrologueString = TemplateString("""
 
 """)
 
-gAutoGenHCppPrologueString = """
+gAutoGenHCppPrologueString = """\
 #ifdef __cplusplus
 extern "C" {
 #endif
@@ -971,9 +971,9 @@ def CreateModulePcdCode(Info, AutoGenC, AutoGenH, Pcd):
 
         if Pcd.DatumType == 'BOOLEAN':
             BoolValue = Value.upper()
-            if BoolValue == 'TRUE':
+            if BoolValue == 'TRUE' or BoolValue == '1':
                 Value = '1U'
-            elif BoolValue == 'FALSE':
+            elif BoolValue == 'FALSE' or BoolValue == '0':
                 Value = '0U'
 
         if Pcd.DatumType in ['UINT64', 'UINT32', 'UINT16', 'UINT8']:
@@ -1193,14 +1193,14 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, Phase):
 
     Dict = {
         'PHASE'                         : Phase,
-        'GUID_TABLE_SIZE'               : '1',
-        'STRING_TABLE_SIZE'             : '1',
-        'SKUID_TABLE_SIZE'              : '1',
-        'LOCAL_TOKEN_NUMBER_TABLE_SIZE' : '1',
-        'LOCAL_TOKEN_NUMBER'            : '0',
-        'EXMAPPING_TABLE_SIZE'          : '1',
-        'EX_TOKEN_NUMBER'               : '0',
-        'SIZE_TABLE_SIZE'               : '2',
+        'GUID_TABLE_SIZE'               : '1U',
+        'STRING_TABLE_SIZE'             : '1U',
+        'SKUID_TABLE_SIZE'              : '1U',
+        'LOCAL_TOKEN_NUMBER_TABLE_SIZE' : '1U',
+        'LOCAL_TOKEN_NUMBER'            : '0U',
+        'EXMAPPING_TABLE_SIZE'          : '1U',
+        'EX_TOKEN_NUMBER'               : '0U',
+        'SIZE_TABLE_SIZE'               : '2U',
         'GUID_TABLE_EMPTY'              : 'TRUE',
         'STRING_TABLE_EMPTY'            : 'TRUE',
         'SKUID_TABLE_EMPTY'             : 'TRUE',
@@ -1208,7 +1208,7 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, Phase):
         'EXMAP_TABLE_EMPTY'             : 'TRUE',
         'PCD_DATABASE_UNINIT_EMPTY'     : '  UINT8  dummy; /* PCD_DATABASE_UNINIT is emptry */',
         'SYSTEM_SKU_ID'                 : '  SKU_ID             SystemSkuId;',
-        'SYSTEM_SKU_ID_VALUE'           : '0'
+        'SYSTEM_SKU_ID_VALUE'           : '0U'
     }
 
     for DatumType in ['UINT64','UINT32','UINT16','UINT8','BOOLEAN', "VOID*"]:
@@ -1313,10 +1313,10 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, Phase):
             if SkuId == None or SkuId == '':
                 continue
 
-            if SkuId not in Dict['SKUID_VALUE']:
-                Dict['SKUID_VALUE'].append(SkuId)
+            if (SkuId + 'U') not in Dict['SKUID_VALUE']:
+                Dict['SKUID_VALUE'].append(SkuId + 'U')
 
-            SkuIdIndex =   Dict['SKUID_VALUE'].index(SkuId)
+            SkuIdIndex =   Dict['SKUID_VALUE'].index(SkuId + 'U')
             if len(Sku.VariableName) > 0:
                 Pcd.TokenTypeList += ['PCD_TYPE_HII']
                 Pcd.InitString = 'INIT'
@@ -1346,11 +1346,11 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, Phase):
                 VariableHeadGuidIndex = GuidList.index(VariableGuid)
 
                 if "PCD_TYPE_STRING" in Pcd.TokenTypeList:
-                    VariableHeadValueList.append('%d, %d, %s, offsetof(%s_PCD_DATABASE, Init.%s_%s)' %
+                    VariableHeadValueList.append('%dU, %dU, %sU, offsetof(%s_PCD_DATABASE, Init.%s_%s)' %
                                                  (VariableHeadGuidIndex, VariableHeadStringIndex, Sku.VariableOffset,
                                                   Phase, CName, TokenSpaceGuid))
                 else:
-                    VariableHeadValueList.append('%d, %d, %s, offsetof(%s_PCD_DATABASE, Init.%s_%s_VariableDefault_%s)' %
+                    VariableHeadValueList.append('%dU, %dU, %sU, offsetof(%s_PCD_DATABASE, Init.%s_%s_VariableDefault_%s)' %
                                                  (VariableHeadGuidIndex, VariableHeadStringIndex, Sku.VariableOffset,
                                                   Phase, CName, TokenSpaceGuid, SkuIdIndex))
                 Dict['VARDEF_CNAME_'+Pcd.DatumType].append(CName)
@@ -1359,11 +1359,24 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, Phase):
                 if "PCD_TYPE_STRING" in  Pcd.TokenTypeList:
                     Dict['VARDEF_VALUE_' + Pcd.DatumType].append("%s_%s[%d]" % (Pcd.TokenCName, TokenSpaceGuid, SkuIdIndex))
                 else:
-                    Dict['VARDEF_VALUE_'+Pcd.DatumType].append(Sku.HiiDefaultValue)
+                    #
+                    # ULL (for UINT64) or U(other integer type) should be append to avoid
+                    # warning under linux building environment.
+                    #
+                    if Pcd.DatumType == "UINT64":
+                        Dict['VARDEF_VALUE_'+Pcd.DatumType].append(Sku.HiiDefaultValue + "ULL")
+                    elif Pcd.DatumType in ("UINT32", "UINT16", "UINT8"):
+                        Dict['VARDEF_VALUE_'+Pcd.DatumType].append(Sku.HiiDefaultValue + "U")
+                    elif Pcd.DatumType == "BOOLEAN":
+                        if Sku.HiiDefaultValue in ["1", "0"]:
+                            Dict['VARDEF_VALUE_'+Pcd.DatumType].append(Sku.HiiDefaultValue + "U")
+                    else:
+                        Dict['VARDEF_VALUE_'+Pcd.DatumType].append(Sku.HiiDefaultValue)
+
             elif Sku.VpdOffset != '':
                 Pcd.TokenTypeList += ['PCD_TYPE_VPD']
                 Pcd.InitString = 'INIT'
-                VpdHeadOffsetList.append(Sku.VpdOffset)
+                VpdHeadOffsetList.append(str(Sku.VpdOffset) + 'U')
                 continue
           
             if Pcd.DatumType == 'VOID*':
@@ -1390,11 +1403,11 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, Phase):
                         Size = len(Sku.DefaultValue.replace(',',' ').split())
                         Dict['STRING_TABLE_VALUE'].append(Sku.DefaultValue)
 
-                    StringHeadOffsetList.append(str(StringTableSize))
+                    StringHeadOffsetList.append(str(StringTableSize) + 'U')
                     Dict['SIZE_TABLE_CNAME'].append(CName)
                     Dict['SIZE_TABLE_GUID'].append(TokenSpaceGuid)
-                    Dict['SIZE_TABLE_CURRENT_LENGTH'].append(Size)
-                    Dict['SIZE_TABLE_MAXIMUM_LENGTH'].append(Pcd.MaxDatumSize)
+                    Dict['SIZE_TABLE_CURRENT_LENGTH'].append(str(Size) + 'U')
+                    Dict['SIZE_TABLE_MAXIMUM_LENGTH'].append(str(Pcd.MaxDatumSize) + 'U')
                     if Pcd.MaxDatumSize != '':
                         MaxDatumSize = int(Pcd.MaxDatumSize, 0)
                         if MaxDatumSize < Size:
@@ -1425,6 +1438,9 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, Phase):
                     ValueList.append(Sku.DefaultValue + "ULL")
                 elif Pcd.DatumType in ("UINT32", "UINT16", "UINT8"):
                     ValueList.append(Sku.DefaultValue + "U")
+                elif Pcd.DatumType == "BOOLEAN":
+                    if Sku.DefaultValue in ["1", "0"]:
+                        ValueList.append(Sku.DefaultValue + "U")              
                 else:
                     ValueList.append(Sku.DefaultValue)
 
@@ -1489,8 +1505,19 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, Phase):
         Dict['TOKEN_CNAME'][GeneratedTokenNumber] = CName
         Dict['TOKEN_GUID'][GeneratedTokenNumber] = TokenSpaceGuid
         Dict['TOKEN_TYPE'][GeneratedTokenNumber] = ' | '.join(Pcd.TokenTypeList)
+        
+        Pcd.TokenTypeList = list(set(Pcd.TokenTypeList))
+        #
+        # Update VARDEF_HEADER
+        #
+        if 'PCD_TYPE_HII' in Pcd.TokenTypeList:
+            Dict['VARDEF_HEADER'][GeneratedTokenNumber] = '_Variable_Header'
+        else:
+            Dict['VARDEF_HEADER'][GeneratedTokenNumber] = ''
+        
+        
         if Pcd.Type in gDynamicExPcd:
-            Dict['EXMAPPING_TABLE_EXTOKEN'].append(Pcd.TokenValue)
+            Dict['EXMAPPING_TABLE_EXTOKEN'].append(str(Pcd.TokenValue) + 'U')
             if Phase == 'DXE':
                 GeneratedTokenNumber += NumberOfPeiLocalTokens
             #
@@ -1502,12 +1529,12 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, Phase):
             # Therefore, 1 is added to GeneratedTokenNumber to generate a PCD Token Number before being inserted
             # to the EXMAPPING_TABLE.
             #
-            Dict['EXMAPPING_TABLE_LOCAL_TOKEN'].append(GeneratedTokenNumber + 1)
-            Dict['EXMAPPING_TABLE_GUID_INDEX'].append(GuidList.index(TokenSpaceGuid))
+            Dict['EXMAPPING_TABLE_LOCAL_TOKEN'].append(str(GeneratedTokenNumber + 1) + 'U')
+            Dict['EXMAPPING_TABLE_GUID_INDEX'].append(str(GuidList.index(TokenSpaceGuid)) + 'U')
 
     if GuidList != []:
         Dict['GUID_TABLE_EMPTY'] = 'FALSE'
-        Dict['GUID_TABLE_SIZE'] = len(GuidList)
+        Dict['GUID_TABLE_SIZE'] = str(len(GuidList)) + 'U'
     else:
         Dict['GUID_STRUCTURE'] = [GuidStringToGuidStructureString('00000000-0000-0000-0000-000000000000')]
 
@@ -1519,13 +1546,13 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, Phase):
         Dict['STRING_TABLE_VALUE'].append('{ 0 }')
     else:
         Dict['STRING_TABLE_EMPTY'] = 'FALSE'
-        Dict['STRING_TABLE_SIZE'] = StringTableSize
+        Dict['STRING_TABLE_SIZE'] = str(StringTableSize) + 'U'
 
     if Dict['SIZE_TABLE_CNAME'] == []:
         Dict['SIZE_TABLE_CNAME'].append('')
         Dict['SIZE_TABLE_GUID'].append('')
-        Dict['SIZE_TABLE_CURRENT_LENGTH'].append(0)
-        Dict['SIZE_TABLE_MAXIMUM_LENGTH'].append(0)
+        Dict['SIZE_TABLE_CURRENT_LENGTH'].append('0U')
+        Dict['SIZE_TABLE_MAXIMUM_LENGTH'].append('0U')
 
     if NumberOfLocalTokens != 0:
         Dict['DATABASE_EMPTY']                = 'FALSE'
@@ -1534,15 +1561,15 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, Phase):
 
     if NumberOfExTokens != 0:
         Dict['EXMAP_TABLE_EMPTY']    = 'FALSE'
-        Dict['EXMAPPING_TABLE_SIZE'] = NumberOfExTokens
-        Dict['EX_TOKEN_NUMBER']      = NumberOfExTokens
+        Dict['EXMAPPING_TABLE_SIZE'] = str(NumberOfExTokens) + 'U'
+        Dict['EX_TOKEN_NUMBER']      = str(NumberOfExTokens) + 'U'
     else:
-        Dict['EXMAPPING_TABLE_EXTOKEN'].append(0)
-        Dict['EXMAPPING_TABLE_LOCAL_TOKEN'].append(0)
-        Dict['EXMAPPING_TABLE_GUID_INDEX'].append(0)
+        Dict['EXMAPPING_TABLE_EXTOKEN'].append('0U')
+        Dict['EXMAPPING_TABLE_LOCAL_TOKEN'].append('0U')
+        Dict['EXMAPPING_TABLE_GUID_INDEX'].append('0U')
 
     if NumberOfSizeItems != 0:
-        Dict['SIZE_TABLE_SIZE'] = NumberOfSizeItems * 2
+        Dict['SIZE_TABLE_SIZE'] = str(NumberOfSizeItems * 2) + 'U'
 
     AutoGenH.Append(gPcdDatabaseAutoGenH.Replace(Dict))
     if NumberOfLocalTokens == 0:
@@ -1716,8 +1743,8 @@ def CreateModuleEntryPointCode(Info, AutoGenC, AutoGenH):
         UefiSpecVersion = '0x00000000'
     Dict = {
         'Function'       :   Info.Module.ModuleEntryPointList,
-        'PiSpecVersion'  :   PiSpecVersion,
-        'UefiSpecVersion':   UefiSpecVersion
+        'PiSpecVersion'  :   PiSpecVersion + 'U',
+        'UefiSpecVersion':   UefiSpecVersion + 'U'
     }
 
     if Info.ModuleType in ['PEI_CORE', 'DXE_CORE', 'SMM_CORE']:
@@ -1777,7 +1804,7 @@ def CreateModuleUnloadImageCode(Info, AutoGenC, AutoGenH):
     # Unload Image Handlers
     #
     NumUnloadImage = len(Info.Module.ModuleUnloadImageList)
-    Dict = {'Count':NumUnloadImage, 'Function':Info.Module.ModuleUnloadImageList}
+    Dict = {'Count':str(NumUnloadImage) + 'U', 'Function':Info.Module.ModuleUnloadImageList}
     if NumUnloadImage < 2:
         AutoGenC.Append(gUefiUnloadImageString[NumUnloadImage].Replace(Dict))
     else:
index 4bb9a8b52116e76b261c1179336f88f5cf52982c..fc3239135cd175a87d23486a98b74eafa5573e62 100644 (file)
@@ -1,3 +1,3 @@
 #This file is for build version number auto generation\r
 #\r
-gBUILD_VERSION = "Build 2423"\r
+gBUILD_VERSION = "Build 2460"\r
index e2889a8dd30e7e3ca9c1b80b74b1254e943b42df..8b112d50b39b0627b6e09b271902de2c0ed5a007 100644 (file)
@@ -14,7 +14,6 @@
 #\r
 from Common.GlobalData import *\r
 from CommonDataClass.Exceptions import BadExpression\r
-from CommonDataClass.Exceptions import SymbolNotFound\r
 from CommonDataClass.Exceptions import WrnExpression\r
 from Misc import GuidStringToGuidStructureString\r
 \r
@@ -36,6 +35,7 @@ ERR_RELCMP_STR_OTHERS   = 'Operator taking Operand of string type and Boolean/Nu
 ERR_STRING_CMP          = 'Unicode string and general string cannot be compared: [%s %s %s]'\r
 ERR_ARRAY_TOKEN         = 'Bad C array or C format GUID token: [%s].'\r
 ERR_ARRAY_ELE           = 'This must be HEX value for NList or Array: [%s].'\r
+ERR_EMPTY_EXPR          = 'Empty expression is not allowed.'\r
 \r
 ## SplitString\r
 #  Split string to list according double quote\r
@@ -133,7 +133,7 @@ class ValueExpression(object):
     @staticmethod\r
     def Eval(Operator, Oprand1, Oprand2 = None):\r
         WrnExp = None\r
-        \r
+\r
         if Operator not in ["==", "!=", ">=", "<=", ">", "<", "in", "not in"] and \\r
             (type(Oprand1) == type('') or type(Oprand2) == type('')):\r
             raise BadExpression(ERR_STRING_EXPR % Operator)\r
@@ -166,13 +166,13 @@ class ValueExpression(object):
                     raise WrnExp\r
                 else:\r
                     raise BadExpression(ERR_RELCMP_STR_OTHERS % Operator)\r
-            elif TypeDict[type(Oprand1)] != TypeDict[type(Oprand2)]: \r
+            elif TypeDict[type(Oprand1)] != TypeDict[type(Oprand2)]:\r
                 if Operator in ["==", "!=", ">=", "<=", ">", "<"] and set((TypeDict[type(Oprand1)], TypeDict[type(Oprand2)])) == set((TypeDict[type(True)], TypeDict[type(0)])):\r
                     # comparison between number and boolean is allowed\r
                     pass\r
-                elif Operator in ['&', '|', '^', "&&", "||"] and set((TypeDict[type(Oprand1)], TypeDict[type(Oprand2)])) == set((TypeDict[type(True)], TypeDict[type(0)])):\r
+                elif Operator in ['&', '|', '^', "and", "or"] and set((TypeDict[type(Oprand1)], TypeDict[type(Oprand2)])) == set((TypeDict[type(True)], TypeDict[type(0)])):\r
                     # bitwise and logical operation between number and boolean is allowed\r
-                    pass                \r
+                    pass\r
                 else:\r
                     raise BadExpression(ERR_EXPR_TYPE)\r
             if type(Oprand1) == type('') and type(Oprand2) == type(''):\r
@@ -198,7 +198,7 @@ class ValueExpression(object):
                 Val = True\r
             else:\r
                 Val = False\r
-        \r
+\r
         if WrnExp:\r
             WrnExp.result = Val\r
             raise WrnExp\r
@@ -216,8 +216,7 @@ class ValueExpression(object):
                                   ['TARGET', 'TOOL_CHAIN_TAG', 'ARCH'])\r
 \r
         if not self._Expr.strip():\r
-            self._NoProcess = True\r
-            return\r
+            raise BadExpression(ERR_EMPTY_EXPR)\r
 \r
         #\r
         # The symbol table including PCD and macro mapping\r
@@ -227,25 +226,64 @@ class ValueExpression(object):
         self._Idx = 0\r
         self._Len = len(self._Expr)\r
         self._Token = ''\r
+        self._WarnExcept = None\r
 \r
         # Literal token without any conversion\r
         self._LiteralToken = ''\r
 \r
     # Public entry for this class\r
-    def __call__(self):\r
+    #   @param RealValue: False: only evaluate if the expression is true or false, used for conditional expression\r
+    #                     True : return the evaluated str(value), used for PCD value\r
+    #\r
+    #   @return: True or False if RealValue is False\r
+    #            Evaluated value of string format if RealValue is True\r
+    #\r
+    def __call__(self, RealValue=False):\r
         if self._NoProcess:\r
             return self._Expr\r
 \r
+        self._Expr = self._Expr.strip()\r
+        if RealValue:\r
+            self._Token = self._Expr\r
+            if self.__IsNumberToken():\r
+                return self._Expr\r
+\r
+            Token = self._GetToken()\r
+            if type(Token) == type('') and Token.startswith('{') and Token.endswith('}') and self._Idx >= self._Len:\r
+                return self._Expr\r
+\r
+            self._Idx = 0\r
+            self._Token = ''\r
+\r
         Val = self._OrExpr()\r
-        if type(Val) == type('') and Val == 'L""':\r
-            Val = ''\r
+        RealVal = Val\r
+        if type(Val) == type(''):\r
+            if Val == 'L""':\r
+                Val = False\r
+            elif not Val:\r
+                Val = False\r
+                RealVal = '""'\r
+            elif not Val.startswith('L"') and not Val.startswith('{'):\r
+                Val = True\r
+                RealVal = '"' + RealVal + '"'\r
 \r
         # The expression has been parsed, but the end of expression is not reached\r
         # It means the rest does not comply EBNF of <Expression>\r
         if self._Idx != self._Len:\r
             raise BadExpression(ERR_SNYTAX % self._Expr[self._Idx:])\r
 \r
-        return Val\r
+        if RealValue:\r
+            RetVal = str(RealVal)\r
+        elif Val:\r
+            RetVal = True\r
+        else:\r
+            RetVal = False\r
+\r
+        if self._WarnExcept:\r
+            self._WarnExcept.result = RetVal\r
+            raise self._WarnExcept\r
+        else:\r
+            return RetVal\r
 \r
     # Template function to parse binary operators which have same precedence\r
     # Expr [Operator Expr]*\r
@@ -253,7 +291,11 @@ class ValueExpression(object):
         Val = EvalFunc()\r
         while self._IsOperator(OpLst):\r
             Op = self._Token\r
-            Val = self.Eval(Op, Val, EvalFunc())\r
+            try:\r
+                Val = self.Eval(Op, Val, EvalFunc())\r
+            except WrnExpression, Warn:\r
+                self._WarnExcept = Warn\r
+                Val = Warn.result\r
         return Val\r
 \r
     # A [|| B]*\r
@@ -285,7 +327,11 @@ class ValueExpression(object):
                 if not self._IsOperator(["IN", "in"]):\r
                     raise BadExpression(ERR_REL_NOT_IN)\r
                 Op += ' ' + self._Token\r
-            Val = self.Eval(Op, Val, self._RelExpr())\r
+            try:\r
+                Val = self.Eval(Op, Val, self._RelExpr())\r
+            except WrnExpression, Warn:\r
+                self._WarnExcept = Warn\r
+                Val = Warn.result\r
         return Val\r
 \r
     # A [ > B]*\r
@@ -300,7 +346,11 @@ class ValueExpression(object):
     def _UnaryExpr(self):\r
         if self._IsOperator(["!", "NOT", "not"]):\r
             Val = self._UnaryExpr()\r
-            return self.Eval('not', Val)\r
+            try:\r
+                return self.Eval('not', Val)\r
+            except WrnExpression, Warn:\r
+                self._WarnExcept = Warn\r
+                return Warn.result\r
         return self._IdenExpr()\r
 \r
     # Parse identifier or encapsulated expression\r
@@ -407,8 +457,8 @@ class ValueExpression(object):
         # PCD token\r
         if self.PcdPattern.match(self._Token):\r
             if self._Token not in self._Symb:\r
-                raise SymbolNotFound(ERR_PCD_RESOLVE % self._Token)\r
-            self._Token = ValueExpression(self._Symb[self._Token], self._Symb)()\r
+                raise BadExpression(ERR_PCD_RESOLVE % self._Token)\r
+            self._Token = ValueExpression(self._Symb[self._Token], self._Symb)(True)\r
             if type(self._Token) != type(''):\r
                 self._LiteralToken = hex(self._Token)\r
                 return\r
@@ -459,7 +509,7 @@ class ValueExpression(object):
             if not Token:\r
                 self._LiteralToken = '0x0'\r
             else:\r
-                self._LiteralToken = '0x' + Token\r
+                self._LiteralToken = '0x' + Token.lower()\r
             return True\r
         return False\r
 \r
@@ -488,7 +538,7 @@ class ValueExpression(object):
             if Match and not Expr[Match.end():Match.end()+1].isalnum() \\r
                 and Expr[Match.end():Match.end()+1] != '_':\r
                 self._Idx += Match.end()\r
-                self._Token = ValueExpression(GuidStringToGuidStructureString(Expr[0:Match.end()]))()\r
+                self._Token = ValueExpression(GuidStringToGuidStructureString(Expr[0:Match.end()]))(True)\r
                 return self._Token\r
             elif self.__IsIdChar(Ch):\r
                 return self.__GetIdToken()\r
@@ -526,7 +576,7 @@ class ValueExpression(object):
         OpToken = ''\r
         for Ch in Expr:\r
             if Ch in self.NonLetterOpLst:\r
-                if '!' == Ch and OpToken in ['!=', '!']:\r
+                if '!' == Ch and OpToken:\r
                     break\r
                 self._Idx += 1\r
                 OpToken += Ch\r
@@ -551,5 +601,15 @@ class ValueExpression(object):
 \r
 if __name__ == '__main__':\r
     pass\r
-\r
-\r
+    while True:\r
+        input = raw_input('Input expr: ')\r
+        if input in 'qQ':\r
+            break\r
+        try:\r
+            print ValueExpression(input)(True)\r
+            print ValueExpression(input)(False)\r
+        except WrnExpression, Ex:\r
+            print Ex.result\r
+            print str(Ex)\r
+        except Exception, Ex:\r
+            print str(Ex)\r
index bc7e0476768aa7b82f6269adaa0fb7ceb54b1322..492aa3996282bcadf17f8b173b9c0fd97086507a 100644 (file)
@@ -26,6 +26,7 @@ gAllFiles = None
 
 gGlobalDefines = {}
 gPlatformDefines = {}
+gActivePlatform = None
 gCommandLineDefines = {}
 gEdkGlobal = {}
 gOverrideDir = {}
index 50504aa73ca9689eb406fb5a90bf0c6b60d09946..8b5598b5f0b36465f4593744714c716742c23f2b 100644 (file)
@@ -445,8 +445,10 @@ def RealPath2(File, Dir='', OverrideDir=''):
                 return NewFile[len(OverrideDir):], NewFile[0:len(OverrideDir)]
             else:
                 return NewFile[len(OverrideDir)+1:], NewFile[0:len(OverrideDir)]
-
-    NewFile = GlobalData.gAllFiles[os.path.normpath(os.path.join(Dir, File))]
+    if GlobalData.gAllFiles:
+        NewFile = GlobalData.gAllFiles[os.path.normpath(os.path.join(Dir, File))]
+    else:
+        NewFile = os.path.normpath(os.path.join(Dir, File))
     if NewFile:
         if Dir:
             if Dir[-1] == os.path.sep:
index 6f5f9fd0b5570e4318db377c5f78f023a900319c..73d7318de0dace245f214e5b8f0b0f6077c5970b 100644 (file)
@@ -15,6 +15,7 @@ import re
 from CommonDataClass.DataClass import *\r
 from Common.DataType import SUP_MODULE_LIST_STRING, TAB_VALUE_SPLIT\r
 from EccToolError import *\r
+from MetaDataParser import ParseHeaderCommentSection\r
 import EccGlobalData\r
 import c\r
 \r
@@ -48,7 +49,7 @@ class Check(object):
         if EccGlobalData.gConfig.GeneralCheckNonAcsii == '1' or EccGlobalData.gConfig.GeneralCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':\r
             EdkLogger.quiet("Checking Non-ACSII char in file ...")\r
             SqlCommand = """select ID, FullPath, ExtName from File"""\r
-            RecordSet = EccGlobalData.gDb.TblInf.Exec(SqlCommand)\r
+            RecordSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)\r
             for Record in RecordSet:\r
                 if Record[2].upper() not in EccGlobalData.gConfig.BinaryExtList:\r
                     op = open(Record[1]).readlines()\r
@@ -415,13 +416,81 @@ class Check(object):
                     elif Ext in ('.inf', '.dec', '.dsc', '.fdf'):\r
                         FullName = os.path.join(Dirpath, F)\r
                         op = open(FullName).readlines()\r
-                        if not op[0].startswith('## @file') and op[6].startswith('## @file') and op[7].startswith('## @file'):\r
+                        FileLinesList = op\r
+                        LineNo             = 0\r
+                        CurrentSection     = MODEL_UNKNOWN \r
+                        HeaderSectionLines       = []\r
+                        HeaderCommentStart = False \r
+                        HeaderCommentEnd   = False\r
+                        \r
+                        for Line in FileLinesList:\r
+                            LineNo   = LineNo + 1\r
+                            Line     = Line.strip()\r
+                            if (LineNo < len(FileLinesList) - 1):\r
+                                NextLine = FileLinesList[LineNo].strip()\r
+            \r
+                            #\r
+                            # blank line\r
+                            #\r
+                            if (Line == '' or not Line) and LineNo == len(FileLinesList):\r
+                                LastSectionFalg = True\r
+\r
+                            #\r
+                            # check whether file header comment section started\r
+                            #\r
+                            if Line.startswith('#') and \\r
+                                (Line.find('@file') > -1) and \\r
+                                not HeaderCommentStart:\r
+                                if CurrentSection != MODEL_UNKNOWN:\r
+                                    SqlStatement = """ select ID from File where FullPath like '%s'""" % FullName\r
+                                    ResultSet = EccGlobalData.gDb.TblFile.Exec(SqlStatement)\r
+                                    for Result in ResultSet:\r
+                                        Msg = 'INF/DEC/DSC/FDF file header comment should begin with ""## @file"" or ""# @file""at the very top file'\r
+                                        EccGlobalData.gDb.TblReport.Insert(ERROR_DOXYGEN_CHECK_FILE_HEADER, Msg, "File", Result[0])\r
+\r
+                                else:\r
+                                    CurrentSection = MODEL_IDENTIFIER_FILE_HEADER\r
+                                    #\r
+                                    # Append the first line to section lines.\r
+                                    #\r
+                                    HeaderSectionLines.append((Line, LineNo))\r
+                                    HeaderCommentStart = True\r
+                                    continue        \r
+            \r
+                            #\r
+                            # Collect Header content.\r
+                            #\r
+                            if (Line.startswith('#') and CurrentSection == MODEL_IDENTIFIER_FILE_HEADER) and\\r
+                                HeaderCommentStart and not Line.startswith('##') and not\\r
+                                HeaderCommentEnd and NextLine != '':\r
+                                HeaderSectionLines.append((Line, LineNo))\r
+                                continue\r
+                            #\r
+                            # Header content end\r
+                            #\r
+                            if (Line.startswith('##') or not Line.strip().startswith("#")) and HeaderCommentStart \\r
+                                and not HeaderCommentEnd:\r
+                                if Line.startswith('##'):\r
+                                    HeaderCommentEnd = True\r
+                                HeaderSectionLines.append((Line, LineNo))\r
+                                ParseHeaderCommentSection(HeaderSectionLines, FullName)\r
+                                break\r
+                        if HeaderCommentStart == False:\r
                             SqlStatement = """ select ID from File where FullPath like '%s'""" % FullName\r
                             ResultSet = EccGlobalData.gDb.TblFile.Exec(SqlStatement)\r
                             for Result in ResultSet:\r
-                                Msg = 'INF/DEC/DSC/FDF file header comment should begin with ""## @file""'\r
+                                Msg = 'INF/DEC/DSC/FDF file header comment should begin with ""## @file"" or ""# @file"" at the very top file'\r
                                 EccGlobalData.gDb.TblReport.Insert(ERROR_DOXYGEN_CHECK_FILE_HEADER, Msg, "File", Result[0])\r
+                        if HeaderCommentEnd == False:\r
+                            SqlStatement = """ select ID from File where FullPath like '%s'""" % FullName\r
+                            ResultSet = EccGlobalData.gDb.TblFile.Exec(SqlStatement)\r
+                            for Result in ResultSet:\r
+                                Msg = 'INF/DEC/DSC/FDF file header comment should end with ""##"" at the end of file header comment block'\r
+                                # Check whether File header Comment End with '##'\r
+                                if EccGlobalData.gConfig.HeaderCheckFileCommentEnd == '1' or EccGlobalData.gConfig.HeaderCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':\r
+                                    EccGlobalData.gDb.TblReport.Insert(ERROR_DOXYGEN_CHECK_FILE_HEADER, Msg, "File", Result[0])\r
 \r
+                                     \r
 \r
     # Check whether the function headers are followed Doxygen special documentation blocks in section 2.3.5\r
     def DoxygenCheckFunctionHeader(self):\r
@@ -504,9 +573,9 @@ class Check(object):
     def MetaDataFileCheckLibraryInstance(self):\r
         if EccGlobalData.gConfig.MetaDataFileCheckLibraryInstance == '1' or EccGlobalData.gConfig.MetaDataFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':\r
             EdkLogger.quiet("Checking for library instance type issue ...")\r
-            SqlCommand = """select A.ID, A.Value2, B.Value2 from Inf as A left join Inf as B\r
-                            where A.Value1 = 'LIBRARY_CLASS' and A.Model = %s\r
-                            and B.Value1 = 'MODULE_TYPE' and B.Model = %s and A.BelongsToFile = B.BelongsToFile\r
+            SqlCommand = """select A.ID, A.Value3, B.Value3 from Inf as A left join Inf as B\r
+                            where A.Value2 = 'LIBRARY_CLASS' and A.Model = %s\r
+                            and B.Value2 = 'MODULE_TYPE' and B.Model = %s and A.BelongsToFile = B.BelongsToFile\r
                             group by A.BelongsToFile""" % (MODEL_META_DATA_HEADER, MODEL_META_DATA_HEADER)\r
             RecordSet = EccGlobalData.gDb.TblInf.Exec(SqlCommand)\r
             LibraryClasses = {}\r
@@ -528,8 +597,8 @@ class Check(object):
                 if Record[2] != 'BASE' and Record[2] not in SupModType:\r
                     EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_LIBRARY_INSTANCE_2, OtherMsg="The Library Class '%s' does not specify its supported module types" % (List[0]), BelongsToTable='Inf', BelongsToItem=Record[0])\r
 \r
-            SqlCommand = """select A.ID, A.Value1, B.Value2 from Inf as A left join Inf as B\r
-                            where A.Model = %s and B.Value1 = '%s' and B.Model = %s\r
+            SqlCommand = """select A.ID, A.Value1, B.Value3 from Inf as A left join Inf as B\r
+                            where A.Model = %s and B.Value2 = '%s' and B.Model = %s\r
                             and B.BelongsToFile = A.BelongsToFile""" \\r
                             % (MODEL_EFI_LIBRARY_CLASS, 'MODULE_TYPE', MODEL_META_DATA_HEADER)\r
             RecordSet = EccGlobalData.gDb.TblInf.Exec(SqlCommand)\r
@@ -558,11 +627,13 @@ class Check(object):
             SqlCommand = """select ID, Value1, Value2 from Dsc where Model = %s""" % MODEL_EFI_LIBRARY_CLASS\r
             LibraryClasses = EccGlobalData.gDb.TblDsc.Exec(SqlCommand)\r
             for LibraryClass in LibraryClasses:\r
-                if LibraryClass[1].upper() != 'NULL':\r
+                if LibraryClass[1].upper() == 'NULL' or LibraryClass[1].startswith('!ifdef') or LibraryClass[1].startswith('!ifndef') or LibraryClass[1].endswith('!endif'):\r
+                    continue\r
+                else:\r
                     LibraryIns = os.path.normpath(os.path.join(EccGlobalData.gWorkspace, LibraryClass[2]))\r
-                    SqlCommand = """select Value2 from Inf where BelongsToFile =\r
+                    SqlCommand = """select Value3 from Inf where BelongsToFile =\r
                                     (select ID from File where lower(FullPath) = lower('%s'))\r
-                                    and Value1 = '%s'""" % (LibraryIns, 'LIBRARY_CLASS')\r
+                                    and Value2 = '%s'""" % (LibraryIns, 'LIBRARY_CLASS')\r
                     RecordSet = EccGlobalData.gDb.TblInf.Exec(SqlCommand)\r
                     IsFound = False\r
                     for Record in RecordSet:\r
@@ -591,8 +662,8 @@ class Check(object):
                     EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_LIBRARY_NO_USE, OtherMsg="The Library Class [%s] is not used in any platform" % (Record[1]), BelongsToTable='Inf', BelongsToItem=Record[0])\r
             SqlCommand = """\r
                          select A.ID, A.Value1, A.BelongsToFile, A.StartLine, B.StartLine from Dsc as A left join Dsc as B\r
-                         where A.Model = %s and B.Model = %s and A.Value3 = B.Value3 and A.Arch = B.Arch and A.ID <> B.ID\r
-                         and A.Value1 = B.Value1 and A.StartLine <> B.StartLine and B.BelongsToFile = A.BelongsToFile""" \\r
+                         where A.Model = %s and B.Model = %s and A.Scope1 = B.Scope1 and A.Scope2 = B.Scope2 and A.ID <> B.ID\r
+                         and A.Value1 = B.Value1 and A.Value2 <> B.Value2 and A.BelongsToItem = -1 and B.BelongsToItem = -1 and A.StartLine <> B.StartLine and B.BelongsToFile = A.BelongsToFile""" \\r
                             % (MODEL_EFI_LIBRARY_CLASS, MODEL_EFI_LIBRARY_CLASS)\r
             RecordSet = EccGlobalData.gDb.TblDsc.Exec(SqlCommand)\r
             for Record in RecordSet:\r
@@ -631,9 +702,10 @@ class Check(object):
         if EccGlobalData.gConfig.MetaDataFileCheckPcdDuplicate == '1' or EccGlobalData.gConfig.MetaDataFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':\r
             EdkLogger.quiet("Checking for duplicate PCDs defined in both DSC and FDF files ...")\r
             SqlCommand = """\r
-                         select A.ID, A.Value2, A.BelongsToFile, B.ID, B.Value2, B.BelongsToFile from Dsc as A, Fdf as B\r
+                         select A.ID, A.Value1, A.Value2, A.BelongsToFile, B.ID, B.Value1, B.Value2, B.BelongsToFile from Dsc as A, Fdf as B\r
                          where A.Model >= %s and A.Model < %s\r
                          and B.Model >= %s and B.Model < %s\r
+                         and A.Value1 = B.Value1\r
                          and A.Value2 = B.Value2\r
                          and A.Enabled > -1\r
                          and B.Enabled > -1\r
@@ -641,71 +713,74 @@ class Check(object):
                          """ % (MODEL_PCD, MODEL_META_DATA_HEADER, MODEL_PCD, MODEL_META_DATA_HEADER)\r
             RecordSet = EccGlobalData.gDb.TblDsc.Exec(SqlCommand)\r
             for Record in RecordSet:\r
-                SqlCommand1 = """select Name from File where ID = %s""" % Record[2]\r
-                SqlCommand2 = """select Name from File where ID = %s""" % Record[5]\r
+                SqlCommand1 = """select Name from File where ID = %s""" % Record[3]\r
+                SqlCommand2 = """select Name from File where ID = %s""" % Record[7]\r
                 DscFileName = os.path.splitext(EccGlobalData.gDb.TblDsc.Exec(SqlCommand1)[0][0])[0]\r
                 FdfFileName = os.path.splitext(EccGlobalData.gDb.TblDsc.Exec(SqlCommand2)[0][0])[0]\r
                 if DscFileName != FdfFileName:\r
                     continue\r
-                if not EccGlobalData.gException.IsException(ERROR_META_DATA_FILE_CHECK_PCD_DUPLICATE, Record[1]):\r
-                    EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_PCD_DUPLICATE, OtherMsg="The PCD [%s] is defined in both FDF file and DSC file" % (Record[1]), BelongsToTable='Dsc', BelongsToItem=Record[0])\r
-                if not EccGlobalData.gException.IsException(ERROR_META_DATA_FILE_CHECK_PCD_DUPLICATE, Record[3]):\r
-                    EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_PCD_DUPLICATE, OtherMsg="The PCD [%s] is defined in both FDF file and DSC file" % (Record[4]), BelongsToTable='Fdf', BelongsToItem=Record[3])\r
+                if not EccGlobalData.gException.IsException(ERROR_META_DATA_FILE_CHECK_PCD_DUPLICATE, Record[1] + '.' + Record[2]):\r
+                    EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_PCD_DUPLICATE, OtherMsg="The PCD [%s] is defined in both FDF file and DSC file" % (Record[1] + '.' + Record[2]), BelongsToTable='Dsc', BelongsToItem=Record[0])\r
+                if not EccGlobalData.gException.IsException(ERROR_META_DATA_FILE_CHECK_PCD_DUPLICATE, Record[5] + '.' + Record[6]):\r
+                    EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_PCD_DUPLICATE, OtherMsg="The PCD [%s] is defined in both FDF file and DSC file" % (Record[5] + '.' + Record[6]), BelongsToTable='Fdf', BelongsToItem=Record[4])\r
 \r
             EdkLogger.quiet("Checking for duplicate PCDs defined in DEC files ...")\r
             SqlCommand = """\r
-                         select A.ID, A.Value2 from Dec as A, Dec as B\r
+                         select A.ID, A.Value1, A.Value2, A.Model, B.Model from Dec as A left join Dec as B\r
                          where A.Model >= %s and A.Model < %s\r
                          and B.Model >= %s and B.Model < %s\r
+                         and A.Value1 = B.Value1\r
                          and A.Value2 = B.Value2\r
-                         and ((A.Arch = B.Arch) and (A.Arch != 'COMMON' or B.Arch != 'COMMON'))\r
-                         and A.ID != B.ID\r
+                         and A.Scope1 = B.Scope1\r
+                         and A.ID <> B.ID\r
+                         and A.Model = B.Model\r
                          and A.Enabled > -1\r
                          and B.Enabled > -1\r
                          and A.BelongsToFile = B.BelongsToFile\r
                          group by A.ID\r
                          """ % (MODEL_PCD, MODEL_META_DATA_HEADER, MODEL_PCD, MODEL_META_DATA_HEADER)\r
-            RecordSet = EccGlobalData.gDb.TblDsc.Exec(SqlCommand)\r
+            RecordSet = EccGlobalData.gDb.TblDec.Exec(SqlCommand)\r
             for Record in RecordSet:\r
-                if not EccGlobalData.gException.IsException(ERROR_META_DATA_FILE_CHECK_PCD_DUPLICATE, Record[1]):\r
-                    EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_PCD_DUPLICATE, OtherMsg="The PCD [%s] is defined duplicated in DEC file" % (Record[1]), BelongsToTable='Dec', BelongsToItem=Record[0])\r
+                RecordCat = Record[1] + '.' + Record[2]\r
+                if not EccGlobalData.gException.IsException(ERROR_META_DATA_FILE_CHECK_PCD_DUPLICATE, RecordCat):\r
+                    EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_PCD_DUPLICATE, OtherMsg="The PCD [%s] is defined duplicated in DEC file" % RecordCat, BelongsToTable='Dec', BelongsToItem=Record[0])\r
 \r
     # Check whether PCD settings in the FDF file can only be related to flash.\r
     def MetaDataFileCheckPcdFlash(self):\r
         if EccGlobalData.gConfig.MetaDataFileCheckPcdFlash == '1' or EccGlobalData.gConfig.MetaDataFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':\r
             EdkLogger.quiet("Checking only Flash related PCDs are used in FDF ...")\r
             SqlCommand = """\r
-                         select ID, Value2, BelongsToFile from Fdf as A\r
+                         select ID, Value1, Value2, BelongsToFile from Fdf as A\r
                          where A.Model >= %s and Model < %s\r
                          and A.Enabled > -1\r
                          and A.Value2 not like '%%Flash%%'\r
                          """ % (MODEL_PCD, MODEL_META_DATA_HEADER)\r
             RecordSet = EccGlobalData.gDb.TblFdf.Exec(SqlCommand)\r
             for Record in RecordSet:\r
-                if not EccGlobalData.gException.IsException(ERROR_META_DATA_FILE_CHECK_PCD_FLASH, Record[1]):\r
-                    EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_PCD_FLASH, OtherMsg="The PCD [%s] defined in FDF file is not related to Flash" % (Record[1]), BelongsToTable='Fdf', BelongsToItem=Record[0])\r
+                if not EccGlobalData.gException.IsException(ERROR_META_DATA_FILE_CHECK_PCD_FLASH, Record[1] + '.' + Record[2]):\r
+                    EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_PCD_FLASH, OtherMsg="The PCD [%s] defined in FDF file is not related to Flash" % (Record[1] + '.' + Record[2]), BelongsToTable='Fdf', BelongsToItem=Record[0])\r
 \r
     # Check whether PCDs used in Inf files but not specified in Dsc or FDF files\r
     def MetaDataFileCheckPcdNoUse(self):\r
         if EccGlobalData.gConfig.MetaDataFileCheckPcdNoUse == '1' or EccGlobalData.gConfig.MetaDataFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':\r
             EdkLogger.quiet("Checking for non-specified PCDs ...")\r
             SqlCommand = """\r
-                         select ID, Value2, BelongsToFile from Inf as A\r
+                         select ID, Value1, Value2, BelongsToFile from Inf as A\r
                          where A.Model >= %s and Model < %s\r
                          and A.Enabled > -1\r
-                         and A.Value2 not in\r
-                             (select Value2 from Dsc as B\r
+                         and (A.Value1, A.Value2) not in\r
+                             (select Value1, Value2 from Dsc as B\r
                               where B.Model >= %s and B.Model < %s\r
                               and B.Enabled > -1)\r
-                         and A.Value2 not in\r
-                             (select Value2 from Fdf as C\r
+                         and (A.Value1, A.Value2) not in\r
+                             (select Value1, Value2 from Fdf as C\r
                               where C.Model >= %s and C.Model < %s\r
                               and C.Enabled > -1)\r
                          """ % (MODEL_PCD, MODEL_META_DATA_HEADER, MODEL_PCD, MODEL_META_DATA_HEADER, MODEL_PCD, MODEL_META_DATA_HEADER)\r
             RecordSet = EccGlobalData.gDb.TblInf.Exec(SqlCommand)\r
             for Record in RecordSet:\r
-                if not EccGlobalData.gException.IsException(ERROR_META_DATA_FILE_CHECK_PCD_NO_USE, Record[1]):\r
-                    EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_PCD_NO_USE, OtherMsg="The PCD [%s] defined in INF file is not specified in either DSC or FDF files" % (Record[1]), BelongsToTable='Inf', BelongsToItem=Record[0])\r
+                if not EccGlobalData.gException.IsException(ERROR_META_DATA_FILE_CHECK_PCD_NO_USE, Record[1] + '.' + Record[2]):\r
+                    EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_PCD_NO_USE, OtherMsg="The PCD [%s] defined in INF file is not specified in either DSC or FDF files" % (Record[1] + '.' + Record[2]), BelongsToTable='Inf', BelongsToItem=Record[0])\r
 \r
     # Check whether having duplicate guids defined for Guid/Protocol/Ppi\r
     def MetaDataFileCheckGuidDuplicate(self):\r
@@ -729,7 +804,7 @@ class Check(object):
         if EccGlobalData.gConfig.MetaDataFileCheckModuleFileNoUse == '1' or EccGlobalData.gConfig.MetaDataFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':\r
             EdkLogger.quiet("Checking for no used module files ...")\r
             SqlCommand = """\r
-                         select upper(Path) from File where ID in (select BelongsToFile from INF where BelongsToFile != -1)\r
+                         select upper(Path) from File where ID in (select BelongsToFile from Inf where BelongsToFile != -1)\r
                          """\r
             InfPathSet = EccGlobalData.gDb.TblInf.Exec(SqlCommand)\r
             InfPathList = []\r
@@ -756,15 +831,15 @@ class Check(object):
         if EccGlobalData.gConfig.MetaDataFileCheckPcdType == '1' or EccGlobalData.gConfig.MetaDataFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':\r
             EdkLogger.quiet("Checking for pcd type in c code function usage ...")\r
             SqlCommand = """\r
-                         select ID, Model, Value1, BelongsToFile from INF where Model > %s and Model < %s\r
+                         select ID, Model, Value1, Value2, BelongsToFile from INF where Model > %s and Model < %s\r
                          """ % (MODEL_PCD, MODEL_META_DATA_HEADER)\r
             PcdSet = EccGlobalData.gDb.TblInf.Exec(SqlCommand)\r
             for Pcd in PcdSet:\r
                 Model = Pcd[1]\r
                 PcdName = Pcd[2]\r
-                if len(Pcd[2].split(".")) > 1:\r
-                    PcdName = Pcd[2].split(".")[1]\r
-                BelongsToFile = Pcd[3]\r
+                if Pcd[3]:\r
+                    PcdName = Pcd[3]\r
+                BelongsToFile = Pcd[4]\r
                 SqlCommand = """\r
                              select ID from File where FullPath in\r
                             (select B.Path || '\\' || A.Value1 from INF as A, File as B where A.Model = %s and A.BelongsToFile = %s\r
@@ -809,9 +884,9 @@ class Check(object):
             EdkLogger.quiet("Checking for pcd type in c code function usage ...")\r
             Table = EccGlobalData.gDb.TblInf\r
             SqlCommand = """\r
-                         select A.ID, A.Value2, A.BelongsToFile, B.BelongsToFile from %s as A, %s as B\r
-                         where A.Value1 = 'FILE_GUID' and B.Value1 = 'FILE_GUID' and\r
-                         A.Value2 = B.Value2 and A.ID <> B.ID group by A.ID\r
+                         select A.ID, A.Value3, A.BelongsToFile, B.BelongsToFile from %s as A, %s as B\r
+                         where A.Value2 = 'FILE_GUID' and B.Value2 = 'FILE_GUID' and\r
+                         A.Value3 = B.Value3 and A.ID <> B.ID group by A.ID\r
                          """ % (Table.Table, Table.Table)\r
             RecordSet = Table.Exec(SqlCommand)\r
             for Record in RecordSet:\r
@@ -836,7 +911,7 @@ class Check(object):
                      select A.ID, A.Value1 from %s as A, %s as B\r
                      where A.Model = %s and B.Model = %s\r
                      and A.Value1 = B.Value1 and A.ID <> B.ID\r
-                     and A.Arch = B.Arch\r
+                     and A.Scope1 = B.Scope1\r
                      and A.Enabled > -1\r
                      and B.Enabled > -1\r
                      group by A.ID\r
@@ -857,16 +932,16 @@ class Check(object):
         if Model == MODEL_EFI_PPI:\r
             Name = 'ppi'\r
         SqlCommand = """\r
-                     select A.ID, A.Value2 from %s as A, %s as B\r
+                     select A.ID, A.Value1, A.Value2 from %s as A, %s as B\r
                      where A.Model = %s and B.Model = %s\r
                      and A.Value2 = B.Value2 and A.ID <> B.ID\r
-                     and A.Arch = B.Arch\r
+                     and A.Scope1 = B.Scope1 and A.Value1 <> B.Value1\r
                      group by A.ID\r
                      """ % (Table.Table, Table.Table, Model, Model)\r
         RecordSet = Table.Exec(SqlCommand)\r
-        for Record in RecordSet:\r
-            if not EccGlobalData.gException.IsException(ErrorID, Record[1]):\r
-                EccGlobalData.gDb.TblReport.Insert(ErrorID, OtherMsg="The %s value [%s] is used more than one time" % (Name.upper(), Record[1]), BelongsToTable=Table.Table, BelongsToItem=Record[0])\r
+        for Record in RecordSet:     \r
+            if not EccGlobalData.gException.IsException(ErrorID, Record[1] + ':' + Record[2]):\r
+                EccGlobalData.gDb.TblReport.Insert(ErrorID, OtherMsg="The %s value [%s] is used more than one time" % (Name.upper(), Record[2]), BelongsToTable=Table.Table, BelongsToItem=Record[0])\r
 \r
     # Naming Convention Check\r
     def NamingConventionCheck(self):\r
index 1478ee6351bae5e3807ad3819b81ada3d872d874..0ba8d732d77d42747aaf62a9d94b6af139fd31c1 100644 (file)
@@ -100,7 +100,15 @@ class Configuration(object):
         self.HeaderCheckFile = 1\r
         # Check whether Function header exists\r
         self.HeaderCheckFunction = 1\r
-\r
+        # Check whether Meta data File header Comment End with '##'\r
+        self.HeaderCheckFileCommentEnd = 1\r
+        # Check whether C File header Comment content start with two spaces\r
+        self.HeaderCheckCFileCommentStartSpacesNum = 1\r
+        # Check whether C File header Comment's each reference at list should begin with a bullet character '-'\r
+        self.HeaderCheckCFileCommentReferenceFormat = 1\r
+        # Check whether C File header Comment have the License immediately after the ""Copyright"" line\r
+        self.HeaderCheckCFileCommentLicenseFormat = 1\r
+  \r
         ## C Function Layout Checking\r
         self.CFunctionLayoutCheckAll = 0\r
 \r
index 4b79cb708fab74265139c26e99943eae7a8f8d1d..f3e2b5934ad0fb77f82ccb5772b5e1f501b6f85d 100644 (file)
@@ -26,9 +26,9 @@ from Table.TableFunction import TableFunction
 from Table.TablePcd import TablePcd\r
 from Table.TableIdentifier import TableIdentifier\r
 from Table.TableReport import TableReport\r
-from Table.TableInf import TableInf\r
-from Table.TableDec import TableDec\r
-from Table.TableDsc import TableDsc\r
+from MetaFileWorkspace.MetaFileTable import ModuleTable\r
+from MetaFileWorkspace.MetaFileTable import PackageTable\r
+from MetaFileWorkspace.MetaFileTable import PlatformTable\r
 from Table.TableFdf import TableFdf\r
 \r
 ##\r
@@ -92,9 +92,9 @@ class Database(object):
         self.TblIdentifier = TableIdentifier(self.Cur)\r
         self.TblPcd = TablePcd(self.Cur)\r
         self.TblReport = TableReport(self.Cur)\r
-        self.TblInf = TableInf(self.Cur)\r
-        self.TblDec = TableDec(self.Cur)\r
-        self.TblDsc = TableDsc(self.Cur)\r
+        self.TblInf = ModuleTable(self.Cur)\r
+        self.TblDec = PackageTable(self.Cur)\r
+        self.TblDsc = PlatformTable(self.Cur)\r
         self.TblFdf = TableFdf(self.Cur)\r
 \r
         #\r
index 05dba2568bcc36683b50edb6bb85628f57f27fc2..ab438c7e86d561fd7857e47feb395bcfc3368d38 100644 (file)
@@ -22,13 +22,17 @@ from MetaDataParser import *
 from optparse import OptionParser\r
 from Configuration import Configuration\r
 from Check import Check\r
-from Common.InfClassObject import Inf\r
-from Common.DecClassObject import Dec\r
-from Common.DscClassObject import Dsc\r
-from Common.FdfClassObject import Fdf\r
+\r
+\r
 from Common.String import NormPath\r
 from Common.BuildVersion import gBUILD_VERSION\r
 from Common import BuildToolError\r
+\r
+from MetaFileWorkspace.MetaFileParser import DscParser\r
+from MetaFileWorkspace.MetaFileParser import DecParser\r
+from MetaFileWorkspace.MetaFileParser import InfParser\r
+from MetaFileWorkspace.MetaFileParser import Fdf\r
+from MetaFileWorkspace.MetaFileTable import MetaFileStorage\r
 import c\r
 import re, string\r
 from Exception import *\r
@@ -53,6 +57,7 @@ class Ecc(object):
         self.IsInit = True\r
         self.ScanSourceCode = True\r
         self.ScanMetaData = True\r
+        self.MetaFile = ''\r
 \r
         # Parse the options and args\r
         self.ParseOption()\r
@@ -124,7 +129,6 @@ class Ecc(object):
         for Root, Dirs, Files in os.walk(EccGlobalData.gTarget):\r
             if p.match(Root.upper()):\r
                 continue\r
-\r
             for Dir in Dirs:\r
                 Dirname = os.path.join(Root, Dir)\r
                 if os.path.islink(Dirname):\r
@@ -139,19 +143,28 @@ class Ecc(object):
                     Filename = os.path.normpath(os.path.join(Root, File))\r
                     EdkLogger.quiet("Parsing %s" % Filename)\r
                     Op.write("%s\r" % Filename)\r
-                    Dec(Filename, True, True, EccGlobalData.gWorkspace, EccGlobalData.gDb)\r
+                    #Dec(Filename, True, True, EccGlobalData.gWorkspace, EccGlobalData.gDb)\r
+                    self.MetaFile = DecParser(Filename, MODEL_FILE_DEC, EccGlobalData.gDb.TblDec)\r
+                    self.MetaFile.Start()\r
                     continue\r
                 if len(File) > 4 and File[-4:].upper() == ".DSC":\r
                     Filename = os.path.normpath(os.path.join(Root, File))\r
                     EdkLogger.quiet("Parsing %s" % Filename)\r
                     Op.write("%s\r" % Filename)\r
-                    Dsc(Filename, True, True, EccGlobalData.gWorkspace, EccGlobalData.gDb)\r
+                    #Dsc(Filename, True, True, EccGlobalData.gWorkspace, EccGlobalData.gDb)\r
+                    self.MetaFile = DscParser(Filename, MODEL_FILE_DSC, MetaFileStorage(EccGlobalData.gDb.TblDsc.Cur, Filename, MODEL_FILE_DSC, True))\r
+                    # alwasy do post-process, in case of macros change\r
+                    self.MetaFile.DoPostProcess()\r
+                    self.MetaFile.Start()\r
+                    self.MetaFile._PostProcess()\r
                     continue\r
                 if len(File) > 4 and File[-4:].upper() == ".INF":\r
                     Filename = os.path.normpath(os.path.join(Root, File))\r
                     EdkLogger.quiet("Parsing %s" % Filename)\r
                     Op.write("%s\r" % Filename)\r
-                    Inf(Filename, True, True, EccGlobalData.gWorkspace, EccGlobalData.gDb)\r
+                    #Inf(Filename, True, True, EccGlobalData.gWorkspace, EccGlobalData.gDb)\r
+                    self.MetaFile = InfParser(Filename, MODEL_FILE_INF, EccGlobalData.gDb.TblInf)\r
+                    self.MetaFile.Start()\r
                     continue\r
                 if len(File) > 4 and File[-4:].upper() == ".FDF":\r
                     Filename = os.path.normpath(os.path.join(Root, File))\r
index 4dda2e53600126c91c84abda4fed36d018b581b4..bbafa00dddd127e0660c40d824a830d97afe87f3 100644 (file)
@@ -13,8 +13,9 @@
 \r
 import os\r
 from CommonDataClass.DataClass import *\r
-\r
-\r
+from EccToolError import *\r
+import EccGlobalData\r
+import re\r
 ## Get the inlcude path list for a source file\r
 #\r
 # 1. Find the source file belongs to which inf file\r
@@ -76,3 +77,188 @@ def GetTableList(FileModelList, Table, Db):
 \r
     return TableList\r
 \r
+## ParseHeaderCommentSection\r
+#\r
+# Parse Header comment section lines, extract Abstract, Description, Copyright\r
+# , License lines\r
+#\r
+# @param CommentList:   List of (Comment, LineNumber)\r
+# @param FileName:      FileName of the comment\r
+#\r
+def ParseHeaderCommentSection(CommentList, FileName = None):\r
+    \r
+    Abstract = ''\r
+    Description = ''\r
+    Copyright = ''\r
+    License = ''\r
+    EndOfLine = "\n"\r
+    STR_HEADER_COMMENT_START = "@file"\r
+    \r
+    #\r
+    # used to indicate the state of processing header comment section of dec, \r
+    # inf files\r
+    #\r
+    HEADER_COMMENT_NOT_STARTED = -1\r
+    HEADER_COMMENT_STARTED     = 0\r
+    HEADER_COMMENT_FILE        = 1\r
+    HEADER_COMMENT_ABSTRACT    = 2\r
+    HEADER_COMMENT_DESCRIPTION = 3\r
+    HEADER_COMMENT_COPYRIGHT   = 4\r
+    HEADER_COMMENT_LICENSE     = 5\r
+    HEADER_COMMENT_END         = 6\r
+    #\r
+    # first find the last copyright line\r
+    #\r
+    Last = 0\r
+    HeaderCommentStage = HEADER_COMMENT_NOT_STARTED\r
+    for Index in xrange(len(CommentList)-1, 0, -1):\r
+        Line = CommentList[Index][0]\r
+        if _IsCopyrightLine(Line):\r
+            Last = Index\r
+            break\r
+    \r
+    for Item in CommentList:\r
+        Line = Item[0]\r
+        LineNo = Item[1]\r
+        \r
+        if not Line.startswith('#') and Line:\r
+            SqlStatement = """ select ID from File where FullPath like '%s'""" % FileName\r
+            ResultSet = EccGlobalData.gDb.TblFile.Exec(SqlStatement)\r
+            for Result in ResultSet:\r
+                Msg = 'Comment must start with #'\r
+                EccGlobalData.gDb.TblReport.Insert(ERROR_DOXYGEN_CHECK_FILE_HEADER, Msg, "File", Result[0])\r
+        Comment = CleanString2(Line)[1]\r
+        Comment = Comment.strip()\r
+        #\r
+        # if there are blank lines between License or Description, keep them as they would be \r
+        # indication of different block; or in the position that Abstract should be, also keep it\r
+        # as it indicates that no abstract\r
+        #\r
+        if not Comment and HeaderCommentStage not in [HEADER_COMMENT_LICENSE, \\r
+                                                      HEADER_COMMENT_DESCRIPTION, HEADER_COMMENT_ABSTRACT]:\r
+            continue\r
+        \r
+        if HeaderCommentStage == HEADER_COMMENT_NOT_STARTED:\r
+            if Comment.startswith(STR_HEADER_COMMENT_START):\r
+                HeaderCommentStage = HEADER_COMMENT_ABSTRACT\r
+            else:\r
+                License += Comment + EndOfLine\r
+        else:\r
+            if HeaderCommentStage == HEADER_COMMENT_ABSTRACT:\r
+                #\r
+                # in case there is no abstract and description\r
+                #\r
+                if not Comment:\r
+                    Abstract = ''\r
+                    HeaderCommentStage = HEADER_COMMENT_DESCRIPTION\r
+                elif _IsCopyrightLine(Comment):                    \r
+                    Copyright += Comment + EndOfLine\r
+                    HeaderCommentStage = HEADER_COMMENT_COPYRIGHT\r
+                else:                    \r
+                    Abstract += Comment + EndOfLine\r
+                    HeaderCommentStage = HEADER_COMMENT_DESCRIPTION\r
+            elif HeaderCommentStage == HEADER_COMMENT_DESCRIPTION:\r
+                #\r
+                # in case there is no description\r
+                #                \r
+                if _IsCopyrightLine(Comment):                    \r
+                    Copyright += Comment + EndOfLine\r
+                    HeaderCommentStage = HEADER_COMMENT_COPYRIGHT\r
+                else:\r
+                    Description += Comment + EndOfLine                \r
+            elif HeaderCommentStage == HEADER_COMMENT_COPYRIGHT:\r
+                if _IsCopyrightLine(Comment):                    \r
+                    Copyright += Comment + EndOfLine\r
+                else:\r
+                    #\r
+                    # Contents after copyright line are license, those non-copyright lines in between\r
+                    # copyright line will be discarded \r
+                    #\r
+                    if LineNo > Last:\r
+                        if License:\r
+                            License += EndOfLine\r
+                        License += Comment + EndOfLine\r
+                        HeaderCommentStage = HEADER_COMMENT_LICENSE                \r
+            else:\r
+                if not Comment and not License:\r
+                    continue\r
+                License += Comment + EndOfLine\r
+    \r
+    if not Copyright:\r
+        SqlStatement = """ select ID from File where FullPath like '%s'""" % FileName\r
+        ResultSet = EccGlobalData.gDb.TblFile.Exec(SqlStatement)\r
+        for Result in ResultSet:\r
+            Msg = 'Header comment section must have copyright information'\r
+            EccGlobalData.gDb.TblReport.Insert(ERROR_DOXYGEN_CHECK_FILE_HEADER, Msg, "File", Result[0])\r
+\r
+    if not License:\r
+        SqlStatement = """ select ID from File where FullPath like '%s'""" % FileName\r
+        ResultSet = EccGlobalData.gDb.TblFile.Exec(SqlStatement)\r
+        for Result in ResultSet:\r
+            Msg = 'Header comment section must have license information'\r
+            EccGlobalData.gDb.TblReport.Insert(ERROR_DOXYGEN_CHECK_FILE_HEADER, Msg, "File", Result[0])\r
+                     \r
+    return Abstract.strip(), Description.strip(), Copyright.strip(), License.strip()\r
+\r
+## _IsCopyrightLine\r
+# check whether current line is copyright line, the criteria is whether there is case insensitive keyword "Copyright" \r
+# followed by zero or more white space characters followed by a "(" character \r
+#\r
+# @param LineContent:  the line need to be checked\r
+# @return: True if current line is copyright line, False else\r
+#\r
+def _IsCopyrightLine (LineContent):\r
+    LineContent = LineContent.upper()\r
+    Result = False\r
+    \r
+    ReIsCopyrightRe = re.compile(r"""(^|\s)COPYRIGHT *\(""", re.DOTALL)\r
+    if ReIsCopyrightRe.search(LineContent):\r
+        Result = True\r
+        \r
+    return Result\r
+\r
+\r
+## CleanString2\r
+#\r
+# Split comments in a string\r
+# Remove spaces\r
+#\r
+# @param Line:              The string to be cleaned\r
+# @param CommentCharacter:  Comment char, used to ignore comment content, \r
+#                           default is DataType.TAB_COMMENT_SPLIT\r
+#\r
+def CleanString2(Line, CommentCharacter='#', AllowCppStyleComment=False):\r
+    #\r
+    # remove whitespace\r
+    #\r
+    Line = Line.strip()\r
+    #\r
+    # Replace EDK1's comment character\r
+    #\r
+    if AllowCppStyleComment:\r
+        Line = Line.replace('//', CommentCharacter)\r
+    #\r
+    # separate comments and statements\r
+    #\r
+    LineParts = Line.split(CommentCharacter, 1)\r
+    #\r
+    # remove whitespace again\r
+    #\r
+    Line = LineParts[0].strip()\r
+    if len(LineParts) > 1:\r
+        Comment = LineParts[1].strip()\r
+        #\r
+        # Remove prefixed and trailing comment characters\r
+        #\r
+        Start = 0\r
+        End = len(Comment)\r
+        while Start < End and Comment.startswith(CommentCharacter, Start, End):\r
+            Start += 1\r
+        while End >= 0 and Comment.endswith(CommentCharacter, Start, End):\r
+            End -= 1\r
+        Comment = Comment[Start:End]\r
+        Comment = Comment.strip()\r
+    else:\r
+        Comment = ''\r
+\r
+    return Line, Comment\r
diff --git a/BaseTools/Source/Python/Ecc/MetaFileWorkspace/MetaDataTable.py b/BaseTools/Source/Python/Ecc/MetaFileWorkspace/MetaDataTable.py
new file mode 100644 (file)
index 0000000..3060698
--- /dev/null
@@ -0,0 +1,215 @@
+## @file\r
+# This file is used to create/update/query/erase table for files\r
+#\r
+# Copyright (c) 2008, Intel Corporation. All rights reserved.<BR>\r
+# This program and the accompanying materials\r
+# are licensed and made available under the terms and conditions of the BSD License\r
+# which accompanies this distribution.  The full text of the license may be found at\r
+# http://opensource.org/licenses/bsd-license.php\r
+#\r
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,\r
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.\r
+#\r
+\r
+##\r
+# Import Modules\r
+#\r
+import os\r
+\r
+import Common.EdkLogger as EdkLogger\r
+from CommonDataClass import DataClass\r
+from CommonDataClass.DataClass import FileClass\r
+\r
+## Convert to SQL required string format\r
+def ConvertToSqlString(StringList):\r
+    return map(lambda s: "'" + s.replace("'", "''") + "'", StringList)\r
+\r
+## TableFile\r
+#\r
+# This class defined a common table\r
+#\r
+# @param object:     Inherited from object class\r
+#\r
+# @param Cursor:     Cursor of the database\r
+# @param TableName:  Name of the table\r
+#\r
+class Table(object):\r
+    _COLUMN_ = ''\r
+    _ID_STEP_ = 1\r
+    _ID_MAX_ = 0x80000000\r
+    _DUMMY_ = 0\r
+\r
+    def __init__(self, Cursor, Name='', IdBase=0, Temporary=False):\r
+        self.Cur = Cursor\r
+        self.Table = Name\r
+        self.IdBase = int(IdBase)\r
+        self.ID = int(IdBase)\r
+        self.Temporary = Temporary\r
+\r
+    def __str__(self):\r
+        return self.Table\r
+\r
+    ## Create table\r
+    #\r
+    # Create a table\r
+    #\r
+    def Create(self, NewTable=True):\r
+        if NewTable:\r
+            self.Drop()\r
+\r
+        if self.Temporary:\r
+            SqlCommand = """create temp table IF NOT EXISTS %s (%s)""" % (self.Table, self._COLUMN_)\r
+        else:\r
+            SqlCommand = """create table IF NOT EXISTS %s (%s)""" % (self.Table, self._COLUMN_)\r
+        EdkLogger.debug(EdkLogger.DEBUG_8, SqlCommand)\r
+        self.Cur.execute(SqlCommand)\r
+        self.ID = self.GetId()\r
+\r
+    ## Insert table\r
+    #\r
+    # Insert a record into a table\r
+    #\r
+    def Insert(self, *Args):\r
+        self.ID = self.ID + self._ID_STEP_\r
+        if self.ID >= (self.IdBase + self._ID_MAX_):\r
+            self.ID = self.IdBase + self._ID_STEP_\r
+        Values = ", ".join([str(Arg) for Arg in Args])\r
+        SqlCommand = "insert into %s values(%s, %s)" % (self.Table, self.ID, Values)\r
+        EdkLogger.debug(EdkLogger.DEBUG_5, SqlCommand)\r
+        self.Cur.execute(SqlCommand)\r
+        return self.ID\r
+\r
+    ## Query table\r
+    #\r
+    # Query all records of the table\r
+    #\r
+    def Query(self):\r
+        SqlCommand = """select * from %s""" % self.Table\r
+        self.Cur.execute(SqlCommand)\r
+        for Rs in self.Cur:\r
+            EdkLogger.verbose(str(Rs))\r
+        TotalCount = self.GetId()\r
+\r
+    ## Drop a table\r
+    #\r
+    # Drop the table\r
+    #\r
+    def Drop(self):\r
+        SqlCommand = """drop table IF EXISTS %s""" % self.Table\r
+        self.Cur.execute(SqlCommand)\r
+\r
+    ## Get count\r
+    #\r
+    # Get a count of all records of the table\r
+    #\r
+    # @retval Count:  Total count of all records\r
+    #\r
+    def GetCount(self):\r
+        SqlCommand = """select count(ID) from %s""" % self.Table\r
+        Record = self.Cur.execute(SqlCommand).fetchall()\r
+        return Record[0][0]\r
+\r
+    def GetId(self):\r
+        SqlCommand = """select max(ID) from %s""" % self.Table\r
+        Record = self.Cur.execute(SqlCommand).fetchall()\r
+        Id = Record[0][0]\r
+        if Id == None:\r
+            Id = self.IdBase\r
+        return Id\r
+\r
+    ## Init the ID of the table\r
+    #\r
+    # Init the ID of the table\r
+    #\r
+    def InitID(self):\r
+        self.ID = self.GetId()\r
+\r
+    ## Exec\r
+    #\r
+    # Exec Sql Command, return result\r
+    #\r
+    # @param SqlCommand:  The SqlCommand to be executed\r
+    #\r
+    # @retval RecordSet:  The result after executed\r
+    #\r
+    def Exec(self, SqlCommand):\r
+        EdkLogger.debug(EdkLogger.DEBUG_5, SqlCommand)\r
+        self.Cur.execute(SqlCommand)\r
+        RecordSet = self.Cur.fetchall()\r
+        return RecordSet\r
+\r
+    def SetEndFlag(self):\r
+        pass\r
+\r
+    def IsIntegral(self):\r
+        Result = self.Exec("select min(ID) from %s" % (self.Table))\r
+        if Result[0][0] != -1:\r
+            return False\r
+        return True\r
+\r
+    def GetAll(self):\r
+        return self.Exec("select * from %s where ID > 0 order by ID" % (self.Table))\r
+\r
+\r
+## TableDataModel\r
+#\r
+# This class defined a table used for data model\r
+#\r
+# @param object:       Inherited from object class\r
+#\r
+#\r
+class TableDataModel(Table):\r
+    _COLUMN_ = """\r
+        ID INTEGER PRIMARY KEY,\r
+        CrossIndex INTEGER NOT NULL,\r
+        Name VARCHAR NOT NULL,\r
+        Description VARCHAR\r
+        """\r
+    def __init__(self, Cursor):\r
+        Table.__init__(self, Cursor, 'DataModel')\r
+\r
+    ## Insert table\r
+    #\r
+    # Insert a record into table DataModel\r
+    #\r
+    # @param ID:           ID of a ModelType\r
+    # @param CrossIndex:   CrossIndex of a ModelType\r
+    # @param Name:         Name of a ModelType\r
+    # @param Description:  Description of a ModelType\r
+    #\r
+    def Insert(self, CrossIndex, Name, Description):\r
+        (Name, Description) = ConvertToSqlString((Name, Description))\r
+        return Table.Insert(self, CrossIndex, Name, Description)\r
+\r
+    ## Init table\r
+    #\r
+    # Create all default records of table DataModel\r
+    #\r
+    def InitTable(self):\r
+        EdkLogger.verbose("\nInitialize table DataModel started ...")\r
+        Count = self.GetCount()\r
+        if Count != None and Count != 0:\r
+            return\r
+        for Item in DataClass.MODEL_LIST:\r
+            CrossIndex = Item[1]\r
+            Name = Item[0]\r
+            Description = Item[0]\r
+            self.Insert(CrossIndex, Name, Description)\r
+        EdkLogger.verbose("Initialize table DataModel ... DONE!")\r
+\r
+    ## Get CrossIndex\r
+    #\r
+    # Get a model's cross index from its name\r
+    #\r
+    # @param ModelName:    Name of the model\r
+    # @retval CrossIndex:  CrossIndex of the model\r
+    #\r
+    def GetCrossIndex(self, ModelName):\r
+        CrossIndex = -1\r
+        SqlCommand = """select CrossIndex from DataModel where name = '""" + ModelName + """'"""\r
+        self.Cur.execute(SqlCommand)\r
+        for Item in self.Cur:\r
+            CrossIndex = Item[0]\r
+\r
+        return CrossIndex\r
+\r
diff --git a/BaseTools/Source/Python/Ecc/MetaFileWorkspace/MetaFileParser.py b/BaseTools/Source/Python/Ecc/MetaFileWorkspace/MetaFileParser.py
new file mode 100644 (file)
index 0000000..fc29baf
--- /dev/null
@@ -0,0 +1,1849 @@
+## @file
+# This file is used to parse meta files
+#
+# Copyright (c) 2008 - 2010, Intel Corporation. All rights reserved.<BR>
+# This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution.  The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import os
+import re
+import time
+import copy
+
+import Common.EdkLogger as EdkLogger
+import Common.GlobalData as GlobalData
+import EccGlobalData
+
+from CommonDataClass.DataClass import *
+from Common.DataType import *
+from Common.String import *
+from Common.Misc import GuidStructureStringToGuidString, CheckPcdDatum, PathClass, AnalyzePcdData
+from Common.Expression import *
+from CommonDataClass.Exceptions import *
+
+from MetaFileTable import MetaFileStorage
+from GenFds.FdfParser import FdfParser  
+
+## A decorator used to parse macro definition
+def ParseMacro(Parser):
+    def MacroParser(self):
+        Match = gMacroDefPattern.match(self._CurrentLine)
+        if not Match:
+            # Not 'DEFINE/EDK_GLOBAL' statement, call decorated method
+            Parser(self)
+            return
+
+        TokenList = GetSplitValueList(self._CurrentLine[Match.end(1):], TAB_EQUAL_SPLIT, 1)
+        # Syntax check
+        if not TokenList[0]:
+            EdkLogger.error('Parser', FORMAT_INVALID, "No macro name given",
+                            ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex+1)
+        if len(TokenList) < 2:
+            TokenList.append('')
+
+        Type = Match.group(1)
+        Name, Value = TokenList
+        # Global macros can be only defined via environment variable
+        if Name in GlobalData.gGlobalDefines:
+            EdkLogger.error('Parser', FORMAT_INVALID, "%s can only be defined via environment variable" % Name,
+                            ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex+1)
+        # Only upper case letters, digit and '_' are allowed
+        if not gMacroNamePattern.match(Name):
+            EdkLogger.error('Parser', FORMAT_INVALID, "The macro name must be in the pattern [A-Z][A-Z0-9_]*",
+                            ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex+1)
+
+        Value = ReplaceMacro(Value, self._Macros)
+        self._ItemType = MODEL_META_DATA_DEFINE
+        # DEFINE defined macros
+        if Type == TAB_DSC_DEFINES_DEFINE:
+            if type(self) == DecParser:
+                if MODEL_META_DATA_HEADER in self._SectionType:
+                    self._FileLocalMacros[Name] = Value
+                else:
+                    for Scope in self._Scope:
+                        self._SectionsMacroDict.setdefault((Scope[2], Scope[0], Scope[1]), {})[Name] = Value
+            elif self._SectionType == MODEL_META_DATA_HEADER:
+                self._FileLocalMacros[Name] = Value
+            else:
+                SectionDictKey = self._SectionType, self._Scope[0][0], self._Scope[0][1]
+                if SectionDictKey not in self._SectionsMacroDict:
+                    self._SectionsMacroDict[SectionDictKey] = {}
+                SectionLocalMacros = self._SectionsMacroDict[SectionDictKey]
+                SectionLocalMacros[Name] = Value
+        # EDK_GLOBAL defined macros
+        elif type(self) != DscParser:
+            EdkLogger.error('Parser', FORMAT_INVALID, "EDK_GLOBAL can only be used in .dsc file",
+                            ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex+1)
+        elif self._SectionType != MODEL_META_DATA_HEADER:
+            EdkLogger.error('Parser', FORMAT_INVALID, "EDK_GLOBAL can only be used under [Defines] section",
+                            ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex+1)
+        elif (Name in self._FileLocalMacros) and (self._FileLocalMacros[Name] != Value):
+            EdkLogger.error('Parser', FORMAT_INVALID, "EDK_GLOBAL defined a macro with the same name and different value as one defined by 'DEFINE'",
+                            ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex+1)
+            
+        self._ValueList = [Type, Name, Value]
+
+    return MacroParser
+
+## Base class of parser
+#
+#  This class is used for derivation purpose. The specific parser for one kind
+# type file must derive this class and implement some public interfaces.
+#
+#   @param      FilePath        The path of platform description file
+#   @param      FileType        The raw data of DSC file
+#   @param      Table           Database used to retrieve module/package information
+#   @param      Macros          Macros used for replacement in file
+#   @param      Owner           Owner ID (for sub-section parsing)
+#   @param      From            ID from which the data comes (for !INCLUDE directive)
+#
+class MetaFileParser(object):
+    # data type (file content) for specific file type
+    DataType = {}
+
+    # Parser objects used to implement singleton
+    MetaFiles = {}
+
+    ## Factory method
+    #
+    # One file, one parser object. This factory method makes sure that there's
+    # only one object constructed for one meta file.
+    #
+    #   @param  Class           class object of real AutoGen class
+    #                           (InfParser, DecParser or DscParser)
+    #   @param  FilePath        The path of meta file
+    #   @param  *args           The specific class related parameters
+    #   @param  **kwargs        The specific class related dict parameters
+    #
+    def __new__(Class, FilePath, *args, **kwargs):
+        if FilePath in Class.MetaFiles:
+            return Class.MetaFiles[FilePath]
+        else:
+            ParserObject = super(MetaFileParser, Class).__new__(Class)
+            Class.MetaFiles[FilePath] = ParserObject
+            return ParserObject
+
+    ## Constructor of MetaFileParser
+    #
+    #  Initialize object of MetaFileParser
+    #
+    #   @param      FilePath        The path of platform description file
+    #   @param      FileType        The raw data of DSC file
+    #   @param      Table           Database used to retrieve module/package information
+    #   @param      Macros          Macros used for replacement in file
+    #   @param      Owner           Owner ID (for sub-section parsing)
+    #   @param      From            ID from which the data comes (for !INCLUDE directive)
+    #
+    def __init__(self, FilePath, FileType, Table, Owner=-1, From=-1):
+        self._Table = Table
+        self._RawTable = Table
+        self._FileType = FileType
+        self.MetaFile = FilePath
+        self._Defines = {}
+        self._FileLocalMacros = {}
+        self._SectionsMacroDict = {}
+
+        # for recursive parsing
+        self._Owner = [Owner]
+        self._From = From
+
+        # parsr status for parsing
+        self._ValueList = ['', '', '', '', '']
+        self._Scope = []
+        self._LineIndex = 0
+        self._CurrentLine = ''
+        self._SectionType = MODEL_UNKNOWN
+        self._SectionName = ''
+        self._InSubsection = False
+        self._SubsectionType = MODEL_UNKNOWN
+        self._SubsectionName = ''
+        self._ItemType = MODEL_UNKNOWN
+        self._LastItem = -1
+        self._Enabled = 0
+        self._Finished = False
+        self._PostProcessed = False
+        # Different version of meta-file has different way to parse.
+        self._Version = 0
+
+    ## Store the parsed data in table
+    def _Store(self, *Args):
+        return self._Table.Insert(*Args)
+
+    ## Virtual method for starting parse
+    def Start(self):
+        raise NotImplementedError
+
+    ## Notify a post-process is needed
+    def DoPostProcess(self):
+        self._PostProcessed = False
+
+    ## Set parsing complete flag in both class and table
+    def _Done(self):
+        self._Finished = True
+        ## Do not set end flag when processing included files
+        if self._From == -1:
+            self._Table.SetEndFlag()
+
+    def _PostProcess(self):
+        self._PostProcessed = True
+
+    ## Get the parse complete flag
+    def _GetFinished(self):
+        return self._Finished
+
+    ## Set the complete flag
+    def _SetFinished(self, Value):
+        self._Finished = Value
+
+    ## Use [] style to query data in table, just for readability
+    #
+    #   DataInfo = [data_type, scope1(arch), scope2(platform/moduletype)]
+    #
+    def __getitem__(self, DataInfo):
+        if type(DataInfo) != type(()):
+            DataInfo = (DataInfo,)
+
+        # Parse the file first, if necessary
+        if not self._Finished:
+            if self._RawTable.IsIntegrity():
+                self._Finished = True
+            else:
+                self._Table = self._RawTable
+                self._PostProcessed = False
+                self.Start()
+
+        # No specific ARCH or Platform given, use raw data
+        if self._RawTable and (len(DataInfo) == 1 or DataInfo[1] == None):
+            return self._RawTable.Query(*DataInfo)
+
+        # Do post-process if necessary
+        if not self._PostProcessed:
+            self._PostProcess()
+
+        return self._Table.Query(*DataInfo)
+
+    ## Data parser for the common format in different type of file
+    #
+    #   The common format in the meatfile is like
+    #
+    #       xxx1 | xxx2 | xxx3
+    #
+    @ParseMacro
+    def _CommonParser(self):
+        TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT)
+        self._ValueList[0:len(TokenList)] = TokenList
+
+    ## Data parser for the format in which there's path
+    #
+    #   Only path can have macro used. So we need to replace them before use.
+    #
+    @ParseMacro
+    def _PathParser(self):
+        TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT)
+        self._ValueList[0:len(TokenList)] = TokenList
+        # Don't do macro replacement for dsc file at this point
+        if type(self) != DscParser:
+            Macros = self._Macros
+            self._ValueList = [ReplaceMacro(Value, Macros) for Value in self._ValueList]
+
+    ## Skip unsupported data
+    def _Skip(self):
+        EdkLogger.warn("Parser", "Unrecognized content", File=self.MetaFile,
+                        Line=self._LineIndex+1, ExtraData=self._CurrentLine);
+        self._ValueList[0:1] = [self._CurrentLine]
+
+    ## Section header parser
+    #
+    #   The section header is always in following format:
+    #
+    #       [section_name.arch<.platform|module_type>]
+    #
+    def _SectionHeaderParser(self):
+        self._Scope = []
+        self._SectionName = ''
+        ArchList = set()
+        for Item in GetSplitValueList(self._CurrentLine[1:-1], TAB_COMMA_SPLIT):
+            if Item == '':
+                continue
+            ItemList = GetSplitValueList(Item, TAB_SPLIT)
+            # different section should not mix in one section
+            if self._SectionName != '' and self._SectionName != ItemList[0].upper():
+                EdkLogger.error('Parser', FORMAT_INVALID, "Different section names in the same section",
+                                File=self.MetaFile, Line=self._LineIndex+1, ExtraData=self._CurrentLine)
+            self._SectionName = ItemList[0].upper()
+            if self._SectionName in self.DataType:
+                self._SectionType = self.DataType[self._SectionName]
+            else:
+                self._SectionType = MODEL_UNKNOWN
+                EdkLogger.warn("Parser", "Unrecognized section", File=self.MetaFile,
+                                Line=self._LineIndex+1, ExtraData=self._CurrentLine)
+            # S1 is always Arch
+            if len(ItemList) > 1:
+                S1 = ItemList[1].upper()
+            else:
+                S1 = 'COMMON'
+            ArchList.add(S1)
+            # S2 may be Platform or ModuleType
+            if len(ItemList) > 2:
+                S2 = ItemList[2].upper()
+            else:
+                S2 = 'COMMON'
+            self._Scope.append([S1, S2])
+
+        # 'COMMON' must not be used with specific ARCHs at the same section
+        if 'COMMON' in ArchList and len(ArchList) > 1:
+            EdkLogger.error('Parser', FORMAT_INVALID, "'common' ARCH must not be used with specific ARCHs",
+                            File=self.MetaFile, Line=self._LineIndex+1, ExtraData=self._CurrentLine)
+        # If the section information is needed later, it should be stored in database
+        self._ValueList[0] = self._SectionName
+
+    ## [defines] section parser
+    @ParseMacro
+    def _DefineParser(self):
+        TokenList = GetSplitValueList(self._CurrentLine, TAB_EQUAL_SPLIT, 1)
+        self._ValueList[1:len(TokenList)] = TokenList
+        if not self._ValueList[1]:
+            EdkLogger.error('Parser', FORMAT_INVALID, "No name specified",
+                            ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex+1)
+        if not self._ValueList[2]:
+            EdkLogger.error('Parser', FORMAT_INVALID, "No value specified",
+                            ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex+1)
+
+        self._ValueList = [ReplaceMacro(Value, self._Macros) for Value in self._ValueList]
+        Name, Value = self._ValueList[1], self._ValueList[2]
+        # Sometimes, we need to make differences between EDK and EDK2 modules 
+        if Name == 'INF_VERSION':
+            try:
+                self._Version = int(Value, 0)
+            except:
+                EdkLogger.error('Parser', FORMAT_INVALID, "Invalid version number",
+                                ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex+1)
+
+        if type(self) == InfParser and self._Version < 0x00010005:
+            # EDK module allows using defines as macros
+            self._FileLocalMacros[Name] = Value
+        self._Defines[Name] = Value
+
+    ## [BuildOptions] section parser
+    @ParseMacro
+    def _BuildOptionParser(self):
+        TokenList = GetSplitValueList(self._CurrentLine, TAB_EQUAL_SPLIT, 1)
+        TokenList2 = GetSplitValueList(TokenList[0], ':', 1)
+        if len(TokenList2) == 2:
+            self._ValueList[0] = TokenList2[0]              # toolchain family
+            self._ValueList[1] = TokenList2[1]              # keys
+        else:
+            self._ValueList[1] = TokenList[0]
+        if len(TokenList) == 2 and type(self) != DscParser: # value
+            self._ValueList[2] = ReplaceMacro(TokenList[1], self._Macros)
+
+        if self._ValueList[1].count('_') != 4:
+            EdkLogger.error(
+                'Parser',
+                FORMAT_INVALID,
+                "'%s' must be in format of <TARGET>_<TOOLCHAIN>_<ARCH>_<TOOL>_FLAGS" % self._ValueList[1],
+                ExtraData=self._CurrentLine,
+                File=self.MetaFile,
+                Line=self._LineIndex+1
+                )
+
+    def _GetMacros(self):
+        Macros = {}
+        Macros.update(self._FileLocalMacros)
+        Macros.update(self._GetApplicableSectionMacro())
+        return Macros
+
+
+    ## Get section Macros that are applicable to current line, which may come from other sections 
+    ## that share the same name while scope is wider
+    def _GetApplicableSectionMacro(self):
+        Macros = {}
+        for Scope1, Scope2 in [("COMMON", "COMMON"), ("COMMON", self._Scope[0][1]),
+                               (self._Scope[0][0], "COMMON"), (self._Scope[0][0], self._Scope[0][1])]:
+            if (self._SectionType, Scope1, Scope2) in self._SectionsMacroDict:
+                Macros.update(self._SectionsMacroDict[(self._SectionType, Scope1, Scope2)])
+        return Macros
+
+    _SectionParser  = {}
+    Finished        = property(_GetFinished, _SetFinished)
+    _Macros         = property(_GetMacros)
+
+
+## INF file parser class
+#
+#   @param      FilePath        The path of platform description file
+#   @param      FileType        The raw data of DSC file
+#   @param      Table           Database used to retrieve module/package information
+#   @param      Macros          Macros used for replacement in file
+#
+class InfParser(MetaFileParser):
+    # INF file supported data types (one type per section)
+    DataType = {
+        TAB_UNKNOWN.upper() : MODEL_UNKNOWN,
+        TAB_INF_DEFINES.upper() : MODEL_META_DATA_HEADER,
+        TAB_DSC_DEFINES_DEFINE : MODEL_META_DATA_DEFINE,
+        TAB_BUILD_OPTIONS.upper() : MODEL_META_DATA_BUILD_OPTION,
+        TAB_INCLUDES.upper() : MODEL_EFI_INCLUDE,
+        TAB_LIBRARIES.upper() : MODEL_EFI_LIBRARY_INSTANCE,
+        TAB_LIBRARY_CLASSES.upper() : MODEL_EFI_LIBRARY_CLASS,
+        TAB_PACKAGES.upper() : MODEL_META_DATA_PACKAGE,
+        TAB_NMAKE.upper() : MODEL_META_DATA_NMAKE,
+        TAB_INF_FIXED_PCD.upper() : MODEL_PCD_FIXED_AT_BUILD,
+        TAB_INF_PATCH_PCD.upper() : MODEL_PCD_PATCHABLE_IN_MODULE,
+        TAB_INF_FEATURE_PCD.upper() : MODEL_PCD_FEATURE_FLAG,
+        TAB_INF_PCD_EX.upper() : MODEL_PCD_DYNAMIC_EX,
+        TAB_INF_PCD.upper() : MODEL_PCD_DYNAMIC,
+        TAB_SOURCES.upper() : MODEL_EFI_SOURCE_FILE,
+        TAB_GUIDS.upper() : MODEL_EFI_GUID,
+        TAB_PROTOCOLS.upper() : MODEL_EFI_PROTOCOL,
+        TAB_PPIS.upper() : MODEL_EFI_PPI,
+        TAB_DEPEX.upper() : MODEL_EFI_DEPEX,
+        TAB_BINARIES.upper() : MODEL_EFI_BINARY_FILE,
+        TAB_USER_EXTENSIONS.upper() : MODEL_META_DATA_USER_EXTENSION
+    }
+
+    ## Constructor of InfParser
+    #
+    #  Initialize object of InfParser
+    #
+    #   @param      FilePath        The path of module description file
+    #   @param      FileType        The raw data of DSC file
+    #   @param      Table           Database used to retrieve module/package information
+    #   @param      Macros          Macros used for replacement in file
+    #
+    def __init__(self, FilePath, FileType, Table):
+        # prevent re-initialization
+        if hasattr(self, "_Table"):
+            return
+        MetaFileParser.__init__(self, FilePath, FileType, Table)
+        self.TblFile = EccGlobalData.gDb.TblFile
+        self.FileID = -1
+
+    ## Parser starter
+    def Start(self):
+        NmakeLine = ''
+        Content = ''
+        try:
+            Content = open(str(self.MetaFile), 'r').readlines()
+        except:
+            EdkLogger.error("Parser", FILE_READ_FAILURE, ExtraData=self.MetaFile)
+        #
+        # Insert a record for file
+        #
+        Filename = NormPath(self.MetaFile)
+        FileID = self.TblFile.GetFileId(Filename)
+        if FileID:
+            self.FileID = FileID
+        else:
+            self.FileID = self.TblFile.InsertFile(Filename, MODEL_FILE_INF)
+            
+        # parse the file line by line
+        IsFindBlockComment = False
+
+        for Index in range(0, len(Content)):
+            # skip empty, commented, block commented lines
+            Line = CleanString(Content[Index], AllowCppStyleComment=True)
+            NextLine = ''
+            if Index + 1 < len(Content):
+                NextLine = CleanString(Content[Index + 1])
+            if Line == '':
+                continue
+            if Line.find(DataType.TAB_COMMENT_EDK_START) > -1:
+                IsFindBlockComment = True
+                continue
+            if Line.find(DataType.TAB_COMMENT_EDK_END) > -1:
+                IsFindBlockComment = False
+                continue
+            if IsFindBlockComment:
+                continue
+
+            self._LineIndex = Index
+            self._CurrentLine = Line
+
+            # section header
+            if Line[0] == TAB_SECTION_START and Line[-1] == TAB_SECTION_END:
+                self._SectionHeaderParser()
+                # Check invalid sections
+                if self._Version < 0x00010005:
+                    if self._SectionType in [MODEL_META_DATA_BUILD_OPTION,
+                                             MODEL_EFI_LIBRARY_CLASS,
+                                             MODEL_META_DATA_PACKAGE,
+                                             MODEL_PCD_FIXED_AT_BUILD,
+                                             MODEL_PCD_PATCHABLE_IN_MODULE,
+                                             MODEL_PCD_FEATURE_FLAG,
+                                             MODEL_PCD_DYNAMIC_EX,
+                                             MODEL_PCD_DYNAMIC,
+                                             MODEL_EFI_GUID,
+                                             MODEL_EFI_PROTOCOL,
+                                             MODEL_EFI_PPI,
+                                             MODEL_META_DATA_USER_EXTENSION]:
+                        EdkLogger.error('Parser', FORMAT_INVALID,
+                                        "Section [%s] is not allowed in inf file without version" % (self._SectionName),
+                                        ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex+1)
+                elif self._SectionType in [MODEL_EFI_INCLUDE,
+                                           MODEL_EFI_LIBRARY_INSTANCE,
+                                           MODEL_META_DATA_NMAKE]:
+                    EdkLogger.error('Parser', FORMAT_INVALID,
+                                    "Section [%s] is not allowed in inf file with version 0x%08x" % (self._SectionName, self._Version),
+                                    ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex+1)
+                continue
+            # merge two lines specified by '\' in section NMAKE
+            elif self._SectionType == MODEL_META_DATA_NMAKE:
+                if Line[-1] == '\\':
+                    if NextLine == '':
+                        self._CurrentLine = NmakeLine + Line[0:-1]
+                        NmakeLine = ''
+                    else:
+                        if NextLine[0] == TAB_SECTION_START and NextLine[-1] == TAB_SECTION_END:
+                            self._CurrentLine = NmakeLine + Line[0:-1]
+                            NmakeLine = ''
+                        else:
+                            NmakeLine = NmakeLine + ' ' + Line[0:-1]
+                            continue
+                else:
+                    self._CurrentLine = NmakeLine + Line
+                    NmakeLine = ''
+
+            # section content
+            self._ValueList = ['','','']
+            # parse current line, result will be put in self._ValueList
+            self._SectionParser[self._SectionType](self)
+            if self._ValueList == None or self._ItemType == MODEL_META_DATA_DEFINE:
+                self._ItemType = -1
+                continue
+            #
+            # Model, Value1, Value2, Value3, Arch, Platform, BelongsToItem=-1,
+            # LineBegin=-1, ColumnBegin=-1, LineEnd=-1, ColumnEnd=-1, Enabled=-1
+            #
+            self._ValueList[0] = self._ValueList[0].replace('/', '\\')
+            for Arch, Platform in self._Scope:
+                self._Store(self._SectionType,
+                            self._ValueList[0],
+                            self._ValueList[1],
+                            self._ValueList[2],
+                            Arch,
+                            Platform,
+                            self._Owner[-1],
+                            self.FileID,
+                            self._LineIndex+1,
+                            -1,
+                            self._LineIndex+1,
+                            -1,
+                            0
+                            )
+        if IsFindBlockComment:
+            EdkLogger.error("Parser", FORMAT_INVALID, "Open block comments (starting with /*) are expected to end with */", 
+                            File=self.MetaFile)
+        self._Done()
+
+    ## Data parser for the format in which there's path
+    #
+    #   Only path can have macro used. So we need to replace them before use.
+    #
+    def _IncludeParser(self):
+        TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT)
+        self._ValueList[0:len(TokenList)] = TokenList
+        Macros = self._Macros
+        if Macros:
+            for Index in range(0, len(self._ValueList)):
+                Value = self._ValueList[Index]
+                if not Value:
+                    continue
+
+                if Value.upper().find('$(EFI_SOURCE)\Edk'.upper()) > -1 or Value.upper().find('$(EFI_SOURCE)/Edk'.upper()) > -1:
+                    Value = '$(EDK_SOURCE)' + Value[17:]
+                if Value.find('$(EFI_SOURCE)') > -1 or Value.find('$(EDK_SOURCE)') > -1:
+                    pass
+                elif Value.startswith('.'):
+                    pass
+                elif Value.startswith('$('):
+                    pass
+                else:
+                    Value = '$(EFI_SOURCE)/' + Value
+
+                self._ValueList[Index] = ReplaceMacro(Value, Macros)
+
+    ## Parse [Sources] section
+    #
+    #   Only path can have macro used. So we need to replace them before use.
+    #
+    @ParseMacro
+    def _SourceFileParser(self):
+        TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT)
+        self._ValueList[0:len(TokenList)] = TokenList
+        Macros = self._Macros
+        # For Acpi tables, remove macro like ' TABLE_NAME=Sata1'
+        if 'COMPONENT_TYPE' in Macros:
+            if self._Defines['COMPONENT_TYPE'].upper() == 'ACPITABLE':
+                self._ValueList[0] = GetSplitValueList(self._ValueList[0], ' ', 1)[0]
+        if self._Defines['BASE_NAME'] == 'Microcode':
+            pass
+        self._ValueList = [ReplaceMacro(Value, Macros) for Value in self._ValueList]
+
+    ## Parse [Binaries] section
+    #
+    #   Only path can have macro used. So we need to replace them before use.
+    #
+    @ParseMacro
+    def _BinaryFileParser(self):
+        TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT, 2)
+        if len(TokenList) < 2:
+            EdkLogger.error('Parser', FORMAT_INVALID, "No file type or path specified",
+                            ExtraData=self._CurrentLine + " (<FileType> | <FilePath> [| <Target>])",
+                            File=self.MetaFile, Line=self._LineIndex+1)
+        if not TokenList[0]:
+            EdkLogger.error('Parser', FORMAT_INVALID, "No file type specified",
+                            ExtraData=self._CurrentLine + " (<FileType> | <FilePath> [| <Target>])",
+                            File=self.MetaFile, Line=self._LineIndex+1)
+        if not TokenList[1]:
+            EdkLogger.error('Parser', FORMAT_INVALID, "No file path specified",
+                            ExtraData=self._CurrentLine + " (<FileType> | <FilePath> [| <Target>])",
+                            File=self.MetaFile, Line=self._LineIndex+1)
+        self._ValueList[0:len(TokenList)] = TokenList
+        self._ValueList[1] = ReplaceMacro(self._ValueList[1], self._Macros)
+
+    ## [nmake] section parser (Edk.x style only)
+    def _NmakeParser(self):
+        TokenList = GetSplitValueList(self._CurrentLine, TAB_EQUAL_SPLIT, 1)
+        self._ValueList[0:len(TokenList)] = TokenList
+        # remove macros
+        self._ValueList[1] = ReplaceMacro(self._ValueList[1], self._Macros)
+        # remove self-reference in macro setting
+        #self._ValueList[1] = ReplaceMacro(self._ValueList[1], {self._ValueList[0]:''})
+
+    ## [FixedPcd], [FeaturePcd], [PatchPcd], [Pcd] and [PcdEx] sections parser
+    @ParseMacro
+    def _PcdParser(self):
+        TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT, 1)
+        ValueList = GetSplitValueList(TokenList[0], TAB_SPLIT)
+        if len(ValueList) != 2:
+            EdkLogger.error('Parser', FORMAT_INVALID, "Illegal token space GUID and PCD name format",
+                            ExtraData=self._CurrentLine + " (<TokenSpaceGuidCName>.<PcdCName>)",
+                            File=self.MetaFile, Line=self._LineIndex+1)
+        self._ValueList[0:1] = ValueList
+        if len(TokenList) > 1:
+            self._ValueList[2] = TokenList[1]
+        if self._ValueList[0] == '' or self._ValueList[1] == '':
+            EdkLogger.error('Parser', FORMAT_INVALID, "No token space GUID or PCD name specified",
+                            ExtraData=self._CurrentLine + " (<TokenSpaceGuidCName>.<PcdCName>)",
+                            File=self.MetaFile, Line=self._LineIndex+1)
+
+        # if value are 'True', 'true', 'TRUE' or 'False', 'false', 'FALSE', replace with integer 1 or 0.
+        if self._ValueList[2] != '':
+            InfPcdValueList = GetSplitValueList(TokenList[1], TAB_VALUE_SPLIT, 1)
+            if InfPcdValueList[0] in ['True', 'true', 'TRUE']:
+                self._ValueList[2] = TokenList[1].replace(InfPcdValueList[0], '1', 1);
+            elif InfPcdValueList[0] in ['False', 'false', 'FALSE']:
+                self._ValueList[2] = TokenList[1].replace(InfPcdValueList[0], '0', 1);
+
+    ## [depex] section parser
+    @ParseMacro
+    def _DepexParser(self):
+        self._ValueList[0:1] = [self._CurrentLine]
+
+    _SectionParser = {
+        MODEL_UNKNOWN                   :   MetaFileParser._Skip,
+        MODEL_META_DATA_HEADER          :   MetaFileParser._DefineParser,
+        MODEL_META_DATA_BUILD_OPTION    :   MetaFileParser._BuildOptionParser,
+        MODEL_EFI_INCLUDE               :   _IncludeParser,                 # for Edk.x modules
+        MODEL_EFI_LIBRARY_INSTANCE      :   MetaFileParser._CommonParser,   # for Edk.x modules
+        MODEL_EFI_LIBRARY_CLASS         :   MetaFileParser._PathParser,
+        MODEL_META_DATA_PACKAGE         :   MetaFileParser._PathParser,
+        MODEL_META_DATA_NMAKE           :   _NmakeParser,                   # for Edk.x modules
+        MODEL_PCD_FIXED_AT_BUILD        :   _PcdParser,
+        MODEL_PCD_PATCHABLE_IN_MODULE   :   _PcdParser,
+        MODEL_PCD_FEATURE_FLAG          :   _PcdParser,
+        MODEL_PCD_DYNAMIC_EX            :   _PcdParser,
+        MODEL_PCD_DYNAMIC               :   _PcdParser,
+        MODEL_EFI_SOURCE_FILE           :   _SourceFileParser,
+        MODEL_EFI_GUID                  :   MetaFileParser._CommonParser,
+        MODEL_EFI_PROTOCOL              :   MetaFileParser._CommonParser,
+        MODEL_EFI_PPI                   :   MetaFileParser._CommonParser,
+        MODEL_EFI_DEPEX                 :   _DepexParser,
+        MODEL_EFI_BINARY_FILE           :   _BinaryFileParser,
+        MODEL_META_DATA_USER_EXTENSION  :   MetaFileParser._Skip,
+    }
+
+## DSC file parser class
+#
+#   @param      FilePath        The path of platform description file
+#   @param      FileType        The raw data of DSC file
+#   @param      Table           Database used to retrieve module/package information
+#   @param      Macros          Macros used for replacement in file
+#   @param      Owner           Owner ID (for sub-section parsing)
+#   @param      From            ID from which the data comes (for !INCLUDE directive)
+#
+class DscParser(MetaFileParser):
+    # DSC file supported data types (one type per section)
+    DataType = {
+        TAB_SKUIDS.upper()                          :   MODEL_EFI_SKU_ID,
+        TAB_LIBRARIES.upper()                       :   MODEL_EFI_LIBRARY_INSTANCE,
+        TAB_LIBRARY_CLASSES.upper()                 :   MODEL_EFI_LIBRARY_CLASS,
+        TAB_BUILD_OPTIONS.upper()                   :   MODEL_META_DATA_BUILD_OPTION,
+        TAB_PCDS_FIXED_AT_BUILD_NULL.upper()        :   MODEL_PCD_FIXED_AT_BUILD,
+        TAB_PCDS_PATCHABLE_IN_MODULE_NULL.upper()   :   MODEL_PCD_PATCHABLE_IN_MODULE,
+        TAB_PCDS_FEATURE_FLAG_NULL.upper()          :   MODEL_PCD_FEATURE_FLAG,
+        TAB_PCDS_DYNAMIC_DEFAULT_NULL.upper()       :   MODEL_PCD_DYNAMIC_DEFAULT,
+        TAB_PCDS_DYNAMIC_HII_NULL.upper()           :   MODEL_PCD_DYNAMIC_HII,
+        TAB_PCDS_DYNAMIC_VPD_NULL.upper()           :   MODEL_PCD_DYNAMIC_VPD,
+        TAB_PCDS_DYNAMIC_EX_DEFAULT_NULL.upper()    :   MODEL_PCD_DYNAMIC_EX_DEFAULT,
+        TAB_PCDS_DYNAMIC_EX_HII_NULL.upper()        :   MODEL_PCD_DYNAMIC_EX_HII,
+        TAB_PCDS_DYNAMIC_EX_VPD_NULL.upper()        :   MODEL_PCD_DYNAMIC_EX_VPD,
+        TAB_COMPONENTS.upper()                      :   MODEL_META_DATA_COMPONENT,
+        TAB_COMPONENTS_SOURCE_OVERRIDE_PATH.upper() :   MODEL_META_DATA_COMPONENT_SOURCE_OVERRIDE_PATH,
+        TAB_DSC_DEFINES.upper()                     :   MODEL_META_DATA_HEADER,
+        TAB_DSC_DEFINES_DEFINE                      :   MODEL_META_DATA_DEFINE,
+        TAB_DSC_DEFINES_EDKGLOBAL                   :   MODEL_META_DATA_GLOBAL_DEFINE,
+        TAB_INCLUDE.upper()                         :   MODEL_META_DATA_INCLUDE,
+        TAB_IF.upper()                              :   MODEL_META_DATA_CONDITIONAL_STATEMENT_IF,
+        TAB_IF_DEF.upper()                          :   MODEL_META_DATA_CONDITIONAL_STATEMENT_IFDEF,
+        TAB_IF_N_DEF.upper()                        :   MODEL_META_DATA_CONDITIONAL_STATEMENT_IFNDEF,
+        TAB_ELSE_IF.upper()                         :   MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSEIF,
+        TAB_ELSE.upper()                            :   MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSE,
+        TAB_END_IF.upper()                          :   MODEL_META_DATA_CONDITIONAL_STATEMENT_ENDIF,
+    }
+
+    # Valid names in define section
+    DefineKeywords = [
+        "DSC_SPECIFICATION",
+        "PLATFORM_NAME",
+        "PLATFORM_GUID",
+        "PLATFORM_VERSION",
+        "SKUID_IDENTIFIER",
+        "SUPPORTED_ARCHITECTURES",
+        "BUILD_TARGETS",
+        "OUTPUT_DIRECTORY",
+        "FLASH_DEFINITION",
+        "BUILD_NUMBER",
+        "RFC_LANGUAGES",
+        "ISO_LANGUAGES",
+        "TIME_STAMP_FILE",
+        "VPD_TOOL_GUID",
+        "FIX_LOAD_TOP_MEMORY_ADDRESS"
+    ]
+
+    SymbolPattern = ValueExpression.SymbolPattern
+
+    ## Constructor of DscParser
+    #
+    #  Initialize object of DscParser
+    #
+    #   @param      FilePath        The path of platform description file
+    #   @param      FileType        The raw data of DSC file
+    #   @param      Table           Database used to retrieve module/package information
+    #   @param      Macros          Macros used for replacement in file
+    #   @param      Owner           Owner ID (for sub-section parsing)
+    #   @param      From            ID from which the data comes (for !INCLUDE directive)
+    #
+    def __init__(self, FilePath, FileType, Table, Owner=-1, From=-1):
+        # prevent re-initialization
+        if hasattr(self, "_Table"):
+            return
+        MetaFileParser.__init__(self, FilePath, FileType, Table, Owner, From)
+        self._Version = 0x00010005  # Only EDK2 dsc file is supported
+        # to store conditional directive evaluation result
+        self._DirectiveStack = []
+        self._DirectiveEvalStack = []
+        self._Enabled = 1
+
+        # Final valid replacable symbols
+        self._Symbols = {}
+        #
+        #  Map the ID between the original table and new table to track
+        #  the owner item
+        #
+        self._IdMapping = {-1:-1}
+        
+        self.TblFile = EccGlobalData.gDb.TblFile
+        self.FileID = -1
+
+    ## Parser starter
+    def Start(self):
+        Content = ''
+        try:
+            Content = open(str(self.MetaFile), 'r').readlines()
+        except:
+            EdkLogger.error("Parser", FILE_READ_FAILURE, ExtraData=self.MetaFile)
+        #
+        # Insert a record for file
+        #
+        Filename = NormPath(self.MetaFile)
+        FileID = self.TblFile.GetFileId(Filename)
+        if FileID:
+            self.FileID = FileID
+        else:
+            self.FileID = self.TblFile.InsertFile(Filename, MODEL_FILE_DSC)
+        
+        
+        for Index in range(0, len(Content)):
+            Line = CleanString(Content[Index])
+            # skip empty line
+            if Line == '':
+                continue
+
+            self._CurrentLine = Line
+            self._LineIndex = Index
+            if self._InSubsection and self._Owner[-1] == -1:
+                self._Owner.append(self._LastItem)
+            
+            # section header
+            if Line[0] == TAB_SECTION_START and Line[-1] == TAB_SECTION_END:
+                self._SectionType = MODEL_META_DATA_SECTION_HEADER
+            # subsection ending
+            elif Line[0] == '}' and self._InSubsection:
+                self._InSubsection = False
+                self._SubsectionType = MODEL_UNKNOWN
+                self._SubsectionName = ''
+                self._Owner[-1] = -1
+                continue
+            # subsection header
+            elif Line[0] == TAB_OPTION_START and Line[-1] == TAB_OPTION_END:
+                self._SubsectionType = MODEL_META_DATA_SUBSECTION_HEADER
+            # directive line
+            elif Line[0] == '!':
+                self._DirectiveParser()
+                continue
+
+            if self._InSubsection:
+                SectionType = self._SubsectionType
+            else:
+                SectionType = self._SectionType
+            self._ItemType = SectionType
+
+            self._ValueList = ['', '', '']
+            self._SectionParser[SectionType](self)
+            if self._ValueList == None:
+                continue
+            #
+            # Model, Value1, Value2, Value3, Arch, ModuleType, BelongsToItem=-1, BelongsToFile=-1,
+            # LineBegin=-1, ColumnBegin=-1, LineEnd=-1, ColumnEnd=-1, Enabled=-1
+            #
+            for Arch, ModuleType in self._Scope:
+                self._LastItem = self._Store(
+                                        self._ItemType,
+                                        self._ValueList[0],
+                                        self._ValueList[1],
+                                        self._ValueList[2],
+                                        Arch,
+                                        ModuleType,
+                                        self._Owner[-1],
+                                        self.FileID,
+                                        self._From,
+                                        self._LineIndex+1,
+                                        -1,
+                                        self._LineIndex+1,
+                                        -1,
+                                        self._Enabled
+                                        )
+
+        if self._DirectiveStack:
+            Type, Line, Text = self._DirectiveStack[-1]
+            EdkLogger.error('Parser', FORMAT_INVALID, "No matching '!endif' found",
+                            ExtraData=Text, File=self.MetaFile, Line=Line)
+        self._Done()
+
+    ## <subsection_header> parser
+    def _SubsectionHeaderParser(self):
+        self._SubsectionName = self._CurrentLine[1:-1].upper()
+        if self._SubsectionName in self.DataType:
+            self._SubsectionType = self.DataType[self._SubsectionName]
+        else:
+            self._SubsectionType = MODEL_UNKNOWN
+            EdkLogger.warn("Parser", "Unrecognized sub-section", File=self.MetaFile,
+                           Line=self._LineIndex+1, ExtraData=self._CurrentLine)
+        self._ValueList[0] = self._SubsectionName
+
+    ## Directive statement parser
+    def _DirectiveParser(self):
+        self._ValueList = ['','','']
+        TokenList = GetSplitValueList(self._CurrentLine, ' ', 1)
+        self._ValueList[0:len(TokenList)] = TokenList
+
+        # Syntax check
+        DirectiveName = self._ValueList[0].upper()
+        if DirectiveName not in self.DataType:
+            EdkLogger.error("Parser", FORMAT_INVALID, "Unknown directive [%s]" % DirectiveName,
+                            File=self.MetaFile, Line=self._LineIndex+1)
+        if DirectiveName in ['!IF', '!IFDEF', '!INCLUDE', '!IFNDEF', '!ELSEIF'] and self._ValueList[1] == '':
+            EdkLogger.error("Parser", FORMAT_INVALID, "Missing expression",
+                            File=self.MetaFile, Line=self._LineIndex+1,
+                            ExtraData=self._CurrentLine)
+
+        ItemType = self.DataType[DirectiveName]
+        if ItemType == MODEL_META_DATA_CONDITIONAL_STATEMENT_ENDIF:
+            # Remove all directives between !if and !endif, including themselves
+            while self._DirectiveStack:
+                # Remove any !else or !elseif
+                DirectiveInfo = self._DirectiveStack.pop()
+                if DirectiveInfo[0] in [MODEL_META_DATA_CONDITIONAL_STATEMENT_IF,
+                                        MODEL_META_DATA_CONDITIONAL_STATEMENT_IFDEF,
+                                        MODEL_META_DATA_CONDITIONAL_STATEMENT_IFNDEF]:
+                    break
+            else:
+                EdkLogger.error("Parser", FORMAT_INVALID, "Redundant '!endif'",
+                                File=self.MetaFile, Line=self._LineIndex+1,
+                                ExtraData=self._CurrentLine)
+        elif ItemType != MODEL_META_DATA_INCLUDE:
+            # Break if there's a !else is followed by a !elseif
+            if ItemType == MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSEIF and \
+               self._DirectiveStack and \
+               self._DirectiveStack[-1][0] == MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSE:
+                EdkLogger.error("Parser", FORMAT_INVALID, "'!elseif' after '!else'",
+                                File=self.MetaFile, Line=self._LineIndex+1,
+                                ExtraData=self._CurrentLine)
+            self._DirectiveStack.append((ItemType, self._LineIndex+1, self._CurrentLine))
+        elif self._From > 0:
+            EdkLogger.error('Parser', FORMAT_INVALID,
+                            "No '!include' allowed in included file",
+                            ExtraData=self._CurrentLine, File=self.MetaFile, 
+                            Line=self._LineIndex+1)
+
+        #
+        # Model, Value1, Value2, Value3, Arch, ModuleType, BelongsToItem=-1, BelongsToFile=-1,
+        # LineBegin=-1, ColumnBegin=-1, LineEnd=-1, ColumnEnd=-1, Enabled=-1
+        #
+        self._LastItem = self._Store(
+                                ItemType,
+                                self._ValueList[0],
+                                self._ValueList[1],
+                                self._ValueList[2],
+                                'COMMON',
+                                'COMMON',
+                                self._Owner[-1],
+                                self.FileID,
+                                self._From,
+                                self._LineIndex+1,
+                                -1,
+                                self._LineIndex+1,
+                                -1,
+                                0
+                                )
+
+    ## [defines] section parser
+    @ParseMacro
+    def _DefineParser(self):
+        TokenList = GetSplitValueList(self._CurrentLine, TAB_EQUAL_SPLIT, 1)
+        self._ValueList[1:len(TokenList)] = TokenList
+
+        # Syntax check
+        if not self._ValueList[1]:
+            EdkLogger.error('Parser', FORMAT_INVALID, "No name specified",
+                            ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex+1)
+        if not self._ValueList[2]:
+            EdkLogger.error('Parser', FORMAT_INVALID, "No value specified",
+                            ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex+1)
+        if not self._ValueList[1] in self.DefineKeywords:
+            EdkLogger.error('Parser', FORMAT_INVALID,
+                            "Unknown keyword found: %s. "
+                            "If this is a macro you must "
+                            "add it as a DEFINE in the DSC" % self._ValueList[1],
+                            ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex+1)
+        self._Defines[self._ValueList[1]] = self._ValueList[2]
+        self._ItemType = self.DataType[TAB_DSC_DEFINES.upper()]
+
+    @ParseMacro
+    def _SkuIdParser(self):
+        TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT)
+        if len(TokenList) != 2:
+            EdkLogger.error('Parser', FORMAT_INVALID, "Correct format is '<Integer>|<UiName>'",
+                            ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex+1)
+        self._ValueList[0:len(TokenList)] = TokenList
+
+    ## Parse Edk style of library modules
+    def _LibraryInstanceParser(self):
+        self._ValueList[0] = self._CurrentLine
+
+    ## PCD sections parser
+    #
+    #   [PcdsFixedAtBuild]
+    #   [PcdsPatchableInModule]
+    #   [PcdsFeatureFlag]
+    #   [PcdsDynamicEx
+    #   [PcdsDynamicExDefault]
+    #   [PcdsDynamicExVpd]
+    #   [PcdsDynamicExHii]
+    #   [PcdsDynamic]
+    #   [PcdsDynamicDefault]
+    #   [PcdsDynamicVpd]
+    #   [PcdsDynamicHii]
+    #
+    @ParseMacro
+    def _PcdParser(self):
+        TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT, 1)
+        self._ValueList[0:1] = GetSplitValueList(TokenList[0], TAB_SPLIT)
+        if len(TokenList) == 2:
+            self._ValueList[2] = TokenList[1]
+        if self._ValueList[0] == '' or self._ValueList[1] == '':
+            EdkLogger.error('Parser', FORMAT_INVALID, "No token space GUID or PCD name specified",
+                            ExtraData=self._CurrentLine + " (<TokenSpaceGuidCName>.<TokenCName>|<PcdValue>)",
+                            File=self.MetaFile, Line=self._LineIndex+1)
+        if self._ValueList[2] == '':
+            EdkLogger.error('Parser', FORMAT_INVALID, "No PCD value given",
+                            ExtraData=self._CurrentLine + " (<TokenSpaceGuidCName>.<TokenCName>|<PcdValue>)",
+                            File=self.MetaFile, Line=self._LineIndex+1)
+        # if value are 'True', 'true', 'TRUE' or 'False', 'false', 'FALSE', replace with integer 1 or 0.
+        DscPcdValueList = GetSplitValueList(TokenList[1], TAB_VALUE_SPLIT, 1)
+        if DscPcdValueList[0] in ['True', 'true', 'TRUE']:
+            self._ValueList[2] = TokenList[1].replace(DscPcdValueList[0], '1', 1);
+        elif DscPcdValueList[0] in ['False', 'false', 'FALSE']:
+            self._ValueList[2] = TokenList[1].replace(DscPcdValueList[0], '0', 1);
+
+    ## [components] section parser
+    @ParseMacro
+    def _ComponentParser(self):
+        if self._CurrentLine[-1] == '{':
+            self._ValueList[0] = self._CurrentLine[0:-1].strip()
+            self._InSubsection = True
+        else:
+            self._ValueList[0] = self._CurrentLine
+
+    ## [LibraryClasses] section
+    @ParseMacro
+    def _LibraryClassParser(self):
+        TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT)
+        if len(TokenList) < 2:
+            EdkLogger.error('Parser', FORMAT_INVALID, "No library class or instance specified",
+                            ExtraData=self._CurrentLine + " (<LibraryClassName>|<LibraryInstancePath>)",
+                            File=self.MetaFile, Line=self._LineIndex+1)
+        if TokenList[0] == '':
+            EdkLogger.error('Parser', FORMAT_INVALID, "No library class specified",
+                            ExtraData=self._CurrentLine + " (<LibraryClassName>|<LibraryInstancePath>)",
+                            File=self.MetaFile, Line=self._LineIndex+1)
+        if TokenList[1] == '':
+            EdkLogger.error('Parser', FORMAT_INVALID, "No library instance specified",
+                            ExtraData=self._CurrentLine + " (<LibraryClassName>|<LibraryInstancePath>)",
+                            File=self.MetaFile, Line=self._LineIndex+1)
+
+        self._ValueList[0:len(TokenList)] = TokenList
+
+    def _CompponentSourceOverridePathParser(self):
+        self._ValueList[0] = self._CurrentLine
+
+    ## [BuildOptions] section parser
+    @ParseMacro
+    def _BuildOptionParser(self):
+        TokenList = GetSplitValueList(self._CurrentLine, TAB_EQUAL_SPLIT, 1)
+        TokenList2 = GetSplitValueList(TokenList[0], ':', 1)
+        if len(TokenList2) == 2:
+            self._ValueList[0] = TokenList2[0]  # toolchain family
+            self._ValueList[1] = TokenList2[1]  # keys
+        else:
+            self._ValueList[1] = TokenList[0]
+        if len(TokenList) == 2:                 # value
+            self._ValueList[2] = TokenList[1]
+
+        if self._ValueList[1].count('_') != 4:
+            EdkLogger.error(
+                'Parser',
+                FORMAT_INVALID,
+                "'%s' must be in format of <TARGET>_<TOOLCHAIN>_<ARCH>_<TOOL>_FLAGS" % self._ValueList[1],
+                ExtraData=self._CurrentLine,
+                File=self.MetaFile,
+                Line=self._LineIndex+1
+                )
+
+    ## Override parent's method since we'll do all macro replacements in parser
+    def _GetMacros(self):
+        Macros = {}
+        Macros.update(self._FileLocalMacros)
+        Macros.update(self._GetApplicableSectionMacro())
+        Macros.update(GlobalData.gEdkGlobal)
+        Macros.update(GlobalData.gPlatformDefines)
+        Macros.update(GlobalData.gCommandLineDefines)
+        # PCD cannot be referenced in macro definition
+        if self._ItemType not in [MODEL_META_DATA_DEFINE, MODEL_META_DATA_GLOBAL_DEFINE]:
+            Macros.update(self._Symbols)
+        return Macros
+
+    def _PostProcess(self):
+        Processer = {
+            MODEL_META_DATA_SECTION_HEADER                  :   self.__ProcessSectionHeader,
+            MODEL_META_DATA_SUBSECTION_HEADER               :   self.__ProcessSubsectionHeader,
+            MODEL_META_DATA_HEADER                          :   self.__ProcessDefine,
+            MODEL_META_DATA_DEFINE                          :   self.__ProcessDefine,
+            MODEL_META_DATA_GLOBAL_DEFINE                   :   self.__ProcessDefine,
+            MODEL_META_DATA_INCLUDE                         :   self.__ProcessDirective,
+            MODEL_META_DATA_CONDITIONAL_STATEMENT_IF        :   self.__ProcessDirective,
+            MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSE      :   self.__ProcessDirective,
+            MODEL_META_DATA_CONDITIONAL_STATEMENT_IFDEF     :   self.__ProcessDirective,
+            MODEL_META_DATA_CONDITIONAL_STATEMENT_IFNDEF    :   self.__ProcessDirective,
+            MODEL_META_DATA_CONDITIONAL_STATEMENT_ENDIF     :   self.__ProcessDirective,
+            MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSEIF    :   self.__ProcessDirective,
+            MODEL_EFI_SKU_ID                                :   self.__ProcessSkuId,
+            MODEL_EFI_LIBRARY_INSTANCE                      :   self.__ProcessLibraryInstance,
+            MODEL_EFI_LIBRARY_CLASS                         :   self.__ProcessLibraryClass,
+            MODEL_PCD_FIXED_AT_BUILD                        :   self.__ProcessPcd,
+            MODEL_PCD_PATCHABLE_IN_MODULE                   :   self.__ProcessPcd,
+            MODEL_PCD_FEATURE_FLAG                          :   self.__ProcessPcd,
+            MODEL_PCD_DYNAMIC_DEFAULT                       :   self.__ProcessPcd,
+            MODEL_PCD_DYNAMIC_HII                           :   self.__ProcessPcd,
+            MODEL_PCD_DYNAMIC_VPD                           :   self.__ProcessPcd,
+            MODEL_PCD_DYNAMIC_EX_DEFAULT                    :   self.__ProcessPcd,
+            MODEL_PCD_DYNAMIC_EX_HII                        :   self.__ProcessPcd,
+            MODEL_PCD_DYNAMIC_EX_VPD                        :   self.__ProcessPcd,
+            MODEL_META_DATA_COMPONENT                       :   self.__ProcessComponent,
+            MODEL_META_DATA_COMPONENT_SOURCE_OVERRIDE_PATH  :   self.__ProcessSourceOverridePath,
+            MODEL_META_DATA_BUILD_OPTION                    :   self.__ProcessBuildOption,
+            MODEL_UNKNOWN                                   :   self._Skip,
+            MODEL_META_DATA_USER_EXTENSION                  :   self._Skip,
+        }
+        
+        self._RawTable = self._Table
+        self._Table = MetaFileStorage(self._RawTable.Cur, self.MetaFile, MODEL_FILE_DSC, True)
+        self._DirectiveStack = []
+        self._DirectiveEvalStack = []
+        self._FileWithError = self.MetaFile
+        self._FileLocalMacros = {}
+        self._SectionsMacroDict = {}
+        GlobalData.gPlatformDefines = {}
+
+        # Get all macro and PCD which has straitforward value
+        self.__RetrievePcdValue()
+        self._Content = self._RawTable.GetAll()
+        self._ContentIndex = 0
+        while self._ContentIndex < len(self._Content) :
+            Id, self._ItemType, V1, V2, V3, S1, S2, Owner, BelongsToFile, self._From, \
+                LineStart, ColStart, LineEnd, ColEnd, Enabled = self._Content[self._ContentIndex]
+
+            if self._From < 0:
+                self._FileWithError = self.MetaFile
+
+            self._ContentIndex += 1
+
+            self._Scope = [[S1, S2]]
+            self._LineIndex = LineStart - 1
+            self._ValueList = [V1, V2, V3]
+
+            try:
+                Processer[self._ItemType]()
+            except EvaluationException, Excpt:
+                # 
+                # Only catch expression evaluation error here. We need to report
+                # the precise number of line on which the error occurred
+                #
+                EdkLogger.error('Parser', FORMAT_INVALID, "Invalid expression: %s" % str(Excpt),
+                                File=self._FileWithError, ExtraData=' '.join(self._ValueList), 
+                                Line=self._LineIndex+1)
+            except MacroException, Excpt:
+                EdkLogger.error('Parser', FORMAT_INVALID, str(Excpt),
+                                File=self._FileWithError, ExtraData=' '.join(self._ValueList), 
+                                Line=self._LineIndex+1)
+
+            if self._ValueList == None:
+                continue 
+
+            NewOwner = self._IdMapping.get(Owner, -1)
+            self._Enabled = int((not self._DirectiveEvalStack) or (False not in self._DirectiveEvalStack))
+            self._LastItem = self._Store(
+                                self._ItemType,
+                                self._ValueList[0],
+                                self._ValueList[1],
+                                self._ValueList[2],
+                                S1,
+                                S2,
+                                NewOwner,
+                                BelongsToFile,
+                                self._From,
+                                self._LineIndex+1,
+                                -1,
+                                self._LineIndex+1,
+                                -1,
+                                self._Enabled
+                                )
+            self._IdMapping[Id] = self._LastItem
+
+        RecordList = self._Table.GetAll()
+        for Record in RecordList:
+            EccGlobalData.gDb.TblDsc.Insert(Record[1],Record[2],Record[3],Record[4],Record[5],Record[6],Record[7],Record[8],Record[9],Record[10],Record[11],Record[12],Record[13],Record[14])
+        GlobalData.gPlatformDefines.update(self._FileLocalMacros)
+        self._PostProcessed = True
+        self._Content = None
+
+    def __ProcessSectionHeader(self):
+        self._SectionName = self._ValueList[0]
+        if self._SectionName in self.DataType:
+            self._SectionType = self.DataType[self._SectionName]
+        else:
+            self._SectionType = MODEL_UNKNOWN
+
+    def __ProcessSubsectionHeader(self):
+        self._SubsectionName = self._ValueList[0]
+        if self._SubsectionName in self.DataType:
+            self._SubsectionType = self.DataType[self._SubsectionName]
+        else:
+            self._SubsectionType = MODEL_UNKNOWN
+
+    def __RetrievePcdValue(self):
+        Records = self._RawTable.Query(MODEL_PCD_FEATURE_FLAG, BelongsToItem=-1.0)
+        for TokenSpaceGuid,PcdName,Value,Dummy2,Dummy3,ID,Line in Records:
+            Value, DatumType, MaxDatumSize = AnalyzePcdData(Value)
+            # Only use PCD whose value is straitforward (no macro and PCD)
+            if self.SymbolPattern.findall(Value):
+                continue
+            Name = TokenSpaceGuid + '.' + PcdName
+            # Don't use PCD with different values.
+            if Name in self._Symbols and self._Symbols[Name] != Value:
+                self._Symbols.pop(Name)
+                continue 
+            self._Symbols[Name] = Value
+
+        Records = self._RawTable.Query(MODEL_PCD_FIXED_AT_BUILD, BelongsToItem=-1.0)
+        for TokenSpaceGuid,PcdName,Value,Dummy2,Dummy3,ID,Line in Records:
+            Value, DatumType, MaxDatumSize = AnalyzePcdData(Value)
+            # Only use PCD whose value is straitforward (no macro and PCD)
+            if self.SymbolPattern.findall(Value):
+                continue 
+            Name = TokenSpaceGuid+'.'+PcdName
+            # Don't use PCD with different values.
+            if Name in self._Symbols and self._Symbols[Name] != Value:
+                self._Symbols.pop(Name)
+                continue 
+            self._Symbols[Name] = Value
+
+    def __ProcessDefine(self):
+        if not self._Enabled:
+            return
+
+        Type, Name, Value = self._ValueList
+        Value = ReplaceMacro(Value, self._Macros, False)
+        if self._ItemType == MODEL_META_DATA_DEFINE:
+            if self._SectionType == MODEL_META_DATA_HEADER:
+                self._FileLocalMacros[Name] = Value
+            else:
+                SectionDictKey = self._SectionType, self._Scope[0][0], self._Scope[0][1]
+                if SectionDictKey not in self._SectionsMacroDict:
+                    self._SectionsMacroDict[SectionDictKey] = {}
+                SectionLocalMacros = self._SectionsMacroDict[SectionDictKey]
+                SectionLocalMacros[Name] = Value
+        elif self._ItemType == MODEL_META_DATA_GLOBAL_DEFINE:
+            GlobalData.gEdkGlobal[Name] = Value
+        
+        #
+        # Keyword in [Defines] section can be used as Macros
+        #
+        if (self._ItemType == MODEL_META_DATA_HEADER) and (self._SectionType == MODEL_META_DATA_HEADER):
+            self._FileLocalMacros[Name] = Value
+            
+        self._ValueList = [Type, Name, Value]
+
+    def __ProcessDirective(self):
+        Result = None
+        if self._ItemType in [MODEL_META_DATA_CONDITIONAL_STATEMENT_IF,
+                              MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSEIF]:
+            Macros = self._Macros
+            Macros.update(GlobalData.gGlobalDefines)
+            try:
+                Result = ValueExpression(self._ValueList[1], Macros)()
+            except SymbolNotFound, Exc:
+                EdkLogger.debug(EdkLogger.DEBUG_5, str(Exc), self._ValueList[1])
+                Result = False
+            except WrnExpression, Excpt:
+                # 
+                # Catch expression evaluation warning here. We need to report
+                # the precise number of line and return the evaluation result
+                #
+                EdkLogger.warn('Parser', "Suspicious expression: %s" % str(Excpt),
+                                File=self._FileWithError, ExtraData=' '.join(self._ValueList), 
+                                Line=self._LineIndex+1)
+                Result = Excpt.result
+
+        if self._ItemType in [MODEL_META_DATA_CONDITIONAL_STATEMENT_IF,
+                              MODEL_META_DATA_CONDITIONAL_STATEMENT_IFDEF,
+                              MODEL_META_DATA_CONDITIONAL_STATEMENT_IFNDEF]:
+            self._DirectiveStack.append(self._ItemType)
+            if self._ItemType == MODEL_META_DATA_CONDITIONAL_STATEMENT_IF:
+                Result = bool(Result)
+            else:
+                Macro = self._ValueList[1]
+                Macro = Macro[2:-1] if (Macro.startswith("$(") and Macro.endswith(")")) else Macro
+                Result = Macro in self._Macros
+                if self._ItemType == MODEL_META_DATA_CONDITIONAL_STATEMENT_IFNDEF:
+                    Result = not Result
+            self._DirectiveEvalStack.append(Result)
+        elif self._ItemType == MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSEIF:
+            self._DirectiveStack.append(self._ItemType)
+            self._DirectiveEvalStack[-1] = not self._DirectiveEvalStack[-1]
+            self._DirectiveEvalStack.append(bool(Result))
+        elif self._ItemType == MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSE:
+            self._DirectiveStack[-1] = self._ItemType
+            self._DirectiveEvalStack[-1] = not self._DirectiveEvalStack[-1]
+        elif self._ItemType == MODEL_META_DATA_CONDITIONAL_STATEMENT_ENDIF:
+            # Back to the nearest !if/!ifdef/!ifndef
+            while self._DirectiveStack:
+                self._DirectiveEvalStack.pop()
+                Directive = self._DirectiveStack.pop()
+                if Directive in [MODEL_META_DATA_CONDITIONAL_STATEMENT_IF,
+                                 MODEL_META_DATA_CONDITIONAL_STATEMENT_IFDEF,
+                                 MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSE,
+                                 MODEL_META_DATA_CONDITIONAL_STATEMENT_IFNDEF]:
+                    break
+        elif self._ItemType == MODEL_META_DATA_INCLUDE:
+            # The included file must be relative to workspace or same directory as DSC file
+            __IncludeMacros = {}
+            #
+            # Allow using system environment variables  in path after !include
+            #
+            __IncludeMacros['WORKSPACE'] = GlobalData.gGlobalDefines['WORKSPACE']
+            if "ECP_SOURCE" in GlobalData.gGlobalDefines.keys():
+                __IncludeMacros['ECP_SOURCE'] = GlobalData.gGlobalDefines['ECP_SOURCE']
+            #
+            # During GenFds phase call DSC parser, will go into this branch.
+            #
+            elif "ECP_SOURCE" in GlobalData.gCommandLineDefines.keys():
+                __IncludeMacros['ECP_SOURCE'] = GlobalData.gCommandLineDefines['ECP_SOURCE']
+
+            __IncludeMacros['EFI_SOURCE'] = GlobalData.gGlobalDefines['EFI_SOURCE']
+            __IncludeMacros['EDK_SOURCE'] = GlobalData.gGlobalDefines['EDK_SOURCE']
+            #
+            # Allow using MACROs comes from [Defines] section to keep compatible. 
+            #
+            __IncludeMacros.update(self._Macros)
+            
+            IncludedFile = NormPath(ReplaceMacro(self._ValueList[1], __IncludeMacros, RaiseError=True))
+            #
+            # First search the include file under the same directory as DSC file
+            #
+            IncludedFile1 = PathClass(IncludedFile, self.MetaFile.Dir)
+            ErrorCode, ErrorInfo1 = IncludedFile1.Validate()
+            if ErrorCode != 0:
+                #
+                # Also search file under the WORKSPACE directory
+                #
+                IncludedFile1 = PathClass(IncludedFile, GlobalData.gWorkspace)
+                ErrorCode, ErrorInfo2 = IncludedFile1.Validate()
+                if ErrorCode != 0:
+                    EdkLogger.error('parser', ErrorCode, File=self._FileWithError, 
+                                    Line=self._LineIndex+1, ExtraData=ErrorInfo1 + "\n"+ ErrorInfo2)
+
+            self._FileWithError = IncludedFile1
+
+            IncludedFileTable = MetaFileStorage(self._Table.Cur, IncludedFile1, MODEL_FILE_DSC, True)
+            Owner = self._Content[self._ContentIndex-1][0]
+            Parser = DscParser(IncludedFile1, self._FileType, IncludedFileTable, 
+                               Owner=Owner, From=Owner)
+
+            # set the parser status with current status
+            Parser._SectionName = self._SectionName
+            Parser._SectionType = self._SectionType
+            Parser._Scope = self._Scope
+            Parser._Enabled = self._Enabled
+            # Parse the included file
+            Parser.Start()
+
+            # update current status with sub-parser's status
+            self._SectionName = Parser._SectionName
+            self._SectionType = Parser._SectionType
+            self._Scope       = Parser._Scope
+            self._Enabled     = Parser._Enabled
+
+            # Insert all records in the table for the included file into dsc file table
+            Records = IncludedFileTable.GetAll()
+            if Records:
+                self._Content[self._ContentIndex:self._ContentIndex] = Records
+                self._Content.pop(self._ContentIndex-1)
+                self._ValueList = None
+                self._ContentIndex -= 1
+                
+    def __ProcessSkuId(self):
+        self._ValueList = [ReplaceMacro(Value, self._Macros, RaiseError=True)
+                           for Value in self._ValueList]
+
+    def __ProcessLibraryInstance(self):
+        self._ValueList = [ReplaceMacro(Value, self._Macros) for Value in self._ValueList]
+
+    def __ProcessLibraryClass(self):
+        self._ValueList[1] = ReplaceMacro(self._ValueList[1], self._Macros, RaiseError=True)
+
+    def __ProcessPcd(self):
+        ValueList = GetSplitValueList(self._ValueList[2])
+        #
+        # PCD value can be an expression
+        #
+        if len(ValueList) > 1 and ValueList[1] == 'VOID*':
+            PcdValue = ValueList[0]      
+            try:
+                ValueList[0] = ValueExpression(PcdValue, self._Macros)(True)
+            except WrnExpression, Value:
+                ValueList[0] = Value.result          
+        else:
+            PcdValue = ValueList[-1]
+            try:
+                ValueList[-1] = ValueExpression(PcdValue, self._Macros)(True)
+            except WrnExpression, Value:
+                ValueList[-1] = Value.result
+            
+            if ValueList[-1] == 'True':
+                ValueList[-1] = '1'
+            if ValueList[-1] == 'False':
+                ValueList[-1] = '0'      
+
+        self._ValueList[2] = '|'.join(ValueList)
+
+    def __ProcessComponent(self):
+        self._ValueList[0] = ReplaceMacro(self._ValueList[0], self._Macros)
+
+    def __ProcessSourceOverridePath(self):
+        self._ValueList[0] = ReplaceMacro(self._ValueList[0], self._Macros)
+
+    def __ProcessBuildOption(self):
+        self._ValueList = [ReplaceMacro(Value, self._Macros, RaiseError=False)
+                           for Value in self._ValueList]
+
+    _SectionParser = {
+        MODEL_META_DATA_HEADER                          :   _DefineParser,
+        MODEL_EFI_SKU_ID                                :   _SkuIdParser,
+        MODEL_EFI_LIBRARY_INSTANCE                      :   _LibraryInstanceParser,
+        MODEL_EFI_LIBRARY_CLASS                         :   _LibraryClassParser,
+        MODEL_PCD_FIXED_AT_BUILD                        :   _PcdParser,
+        MODEL_PCD_PATCHABLE_IN_MODULE                   :   _PcdParser,
+        MODEL_PCD_FEATURE_FLAG                          :   _PcdParser,
+        MODEL_PCD_DYNAMIC_DEFAULT                       :   _PcdParser,
+        MODEL_PCD_DYNAMIC_HII                           :   _PcdParser,
+        MODEL_PCD_DYNAMIC_VPD                           :   _PcdParser,
+        MODEL_PCD_DYNAMIC_EX_DEFAULT                    :   _PcdParser,
+        MODEL_PCD_DYNAMIC_EX_HII                        :   _PcdParser,
+        MODEL_PCD_DYNAMIC_EX_VPD                        :   _PcdParser,
+        MODEL_META_DATA_COMPONENT                       :   _ComponentParser,
+        MODEL_META_DATA_COMPONENT_SOURCE_OVERRIDE_PATH  :   _CompponentSourceOverridePathParser,
+        MODEL_META_DATA_BUILD_OPTION                    :   _BuildOptionParser,
+        MODEL_UNKNOWN                                   :   MetaFileParser._Skip,
+        MODEL_META_DATA_USER_EXTENSION                  :   MetaFileParser._Skip,
+        MODEL_META_DATA_SECTION_HEADER                  :   MetaFileParser._SectionHeaderParser,
+        MODEL_META_DATA_SUBSECTION_HEADER               :   _SubsectionHeaderParser,
+    }
+
+    _Macros     = property(_GetMacros)
+
+## DEC file parser class
+#
+#   @param      FilePath        The path of platform description file
+#   @param      FileType        The raw data of DSC file
+#   @param      Table           Database used to retrieve module/package information
+#   @param      Macros          Macros used for replacement in file
+#
+class DecParser(MetaFileParser):
+    # DEC file supported data types (one type per section)
+    DataType = {
+        TAB_DEC_DEFINES.upper()                     :   MODEL_META_DATA_HEADER,
+        TAB_DSC_DEFINES_DEFINE                      :   MODEL_META_DATA_DEFINE,
+        TAB_INCLUDES.upper()                        :   MODEL_EFI_INCLUDE,
+        TAB_LIBRARY_CLASSES.upper()                 :   MODEL_EFI_LIBRARY_CLASS,
+        TAB_GUIDS.upper()                           :   MODEL_EFI_GUID,
+        TAB_PPIS.upper()                            :   MODEL_EFI_PPI,
+        TAB_PROTOCOLS.upper()                       :   MODEL_EFI_PROTOCOL,
+        TAB_PCDS_FIXED_AT_BUILD_NULL.upper()        :   MODEL_PCD_FIXED_AT_BUILD,
+        TAB_PCDS_PATCHABLE_IN_MODULE_NULL.upper()   :   MODEL_PCD_PATCHABLE_IN_MODULE,
+        TAB_PCDS_FEATURE_FLAG_NULL.upper()          :   MODEL_PCD_FEATURE_FLAG,
+        TAB_PCDS_DYNAMIC_NULL.upper()               :   MODEL_PCD_DYNAMIC,
+        TAB_PCDS_DYNAMIC_EX_NULL.upper()            :   MODEL_PCD_DYNAMIC_EX,
+    }
+
+    ## Constructor of DecParser
+    #
+    #  Initialize object of DecParser
+    #
+    #   @param      FilePath        The path of platform description file
+    #   @param      FileType        The raw data of DSC file
+    #   @param      Table           Database used to retrieve module/package information
+    #   @param      Macros          Macros used for replacement in file
+    #
+    def __init__(self, FilePath, FileType, Table):
+        # prevent re-initialization
+        if hasattr(self, "_Table"):
+            return
+        MetaFileParser.__init__(self, FilePath, FileType, Table)
+        self._Comments = []
+        self._Version = 0x00010005  # Only EDK2 dec file is supported
+        self.TblFile = EccGlobalData.gDb.TblFile
+        self.FileID = -1
+
+    ## Parser starter
+    def Start(self):
+        Content = ''
+        try:
+            Content = open(str(self.MetaFile), 'r').readlines()
+        except:
+            EdkLogger.error("Parser", FILE_READ_FAILURE, ExtraData=self.MetaFile)
+
+        #
+        # Insert a record for file
+        #
+        Filename = NormPath(self.MetaFile)
+        FileID = self.TblFile.GetFileId(Filename)
+        if FileID:
+            self.FileID = FileID
+        else:
+            self.FileID = self.TblFile.InsertFile(Filename, MODEL_FILE_DEC)
+        
+        for Index in range(0, len(Content)):
+            Line, Comment = CleanString2(Content[Index])
+            self._CurrentLine = Line
+            self._LineIndex = Index
+
+            # save comment for later use
+            if Comment:
+                self._Comments.append((Comment, self._LineIndex+1))
+            # skip empty line
+            if Line == '':
+                continue
+
+            # section header
+            if Line[0] == TAB_SECTION_START and Line[-1] == TAB_SECTION_END:
+                self._SectionHeaderParser()
+                self._Comments = []
+                continue
+            elif len(self._SectionType) == 0:
+                self._Comments = []
+                continue
+
+            # section content
+            self._ValueList = ['','','']
+            self._SectionParser[self._SectionType[0]](self)
+            if self._ValueList == None or self._ItemType == MODEL_META_DATA_DEFINE:
+                self._ItemType = -1
+                self._Comments = []
+                continue
+
+            #
+            # Model, Value1, Value2, Value3, Arch, BelongsToItem=-1, LineBegin=-1,
+            # ColumnBegin=-1, LineEnd=-1, ColumnEnd=-1, FeatureFlag='', Enabled=-1
+            #
+            for Arch, ModuleType, Type in self._Scope:
+                self._LastItem = self._Store(
+                    Type,
+                    self._ValueList[0],
+                    self._ValueList[1],
+                    self._ValueList[2],
+                    Arch,
+                    ModuleType,
+                    self._Owner[-1],
+                    self.FileID,
+                    self._LineIndex+1,
+                    -1,
+                    self._LineIndex+1,
+                    -1,
+                    0
+                    )
+                for Comment, LineNo in self._Comments:
+                    self._Store(
+                        MODEL_META_DATA_COMMENT,
+                        Comment,
+                        self._ValueList[0],
+                        self._ValueList[1],
+                        Arch,
+                        ModuleType,
+                        self._LastItem,
+                        self.FileID,
+                        LineNo,
+                        -1,
+                        LineNo,
+                        -1,
+                        0
+                        )
+            self._Comments = []
+        self._Done()
+
+    def _GetApplicableSectionMacro(self):
+        Macros = {}
+        for S1, S2, SectionType in self._Scope:
+            for Scope1, Scope2 in [("COMMON", "COMMON"), ("COMMON", S2), (S1, "COMMON"), (S1, S2)]:
+                if (SectionType, Scope1, Scope2) in self._SectionsMacroDict:
+                    Macros.update(self._SectionsMacroDict[(SectionType, Scope1, Scope2)])
+        return Macros
+
+    ## Section header parser
+    #
+    #   The section header is always in following format:
+    #
+    #       [section_name.arch<.platform|module_type>]
+    #
+    def _SectionHeaderParser(self):
+        self._Scope = []
+        self._SectionName = ''
+        self._SectionType = []
+        ArchList = set()
+        for Item in GetSplitValueList(self._CurrentLine[1:-1], TAB_COMMA_SPLIT):
+            if Item == '':
+                continue
+            ItemList = GetSplitValueList(Item, TAB_SPLIT)
+
+            # different types of PCD are permissible in one section
+            self._SectionName = ItemList[0].upper()
+            if self._SectionName in self.DataType:
+                if self.DataType[self._SectionName] not in self._SectionType:
+                    self._SectionType.append(self.DataType[self._SectionName])
+            else:
+                EdkLogger.warn("Parser", "Unrecognized section", File=self.MetaFile,
+                                Line=self._LineIndex+1, ExtraData=self._CurrentLine)
+                continue
+
+            if MODEL_PCD_FEATURE_FLAG in self._SectionType and len(self._SectionType) > 1:
+                EdkLogger.error(
+                            'Parser',
+                            FORMAT_INVALID,
+                            "%s must not be in the same section of other types of PCD" % TAB_PCDS_FEATURE_FLAG_NULL,
+                            File=self.MetaFile,
+                            Line=self._LineIndex+1,
+                            ExtraData=self._CurrentLine
+                            )
+            # S1 is always Arch
+            if len(ItemList) > 1:
+                S1 = ItemList[1].upper()
+            else:
+                S1 = 'COMMON'
+            ArchList.add(S1)
+            # S2 may be Platform or ModuleType
+            if len(ItemList) > 2:
+                S2 = ItemList[2].upper()
+            else:
+                S2 = 'COMMON'
+            if [S1, S2, self.DataType[self._SectionName]] not in self._Scope:
+                self._Scope.append([S1, S2, self.DataType[self._SectionName]])
+
+        # 'COMMON' must not be used with specific ARCHs at the same section
+        if 'COMMON' in ArchList and len(ArchList) > 1:
+            EdkLogger.error('Parser', FORMAT_INVALID, "'common' ARCH must not be used with specific ARCHs",
+                            File=self.MetaFile, Line=self._LineIndex+1, ExtraData=self._CurrentLine)
+
+    ## [guids], [ppis] and [protocols] section parser
+    @ParseMacro
+    def _GuidParser(self):
+        TokenList = GetSplitValueList(self._CurrentLine, TAB_EQUAL_SPLIT, 1)
+        if len(TokenList) < 2:
+            EdkLogger.error('Parser', FORMAT_INVALID, "No GUID name or value specified",
+                            ExtraData=self._CurrentLine + " (<CName> = <GuidValueInCFormat>)",
+                            File=self.MetaFile, Line=self._LineIndex+1)
+        if TokenList[0] == '':
+            EdkLogger.error('Parser', FORMAT_INVALID, "No GUID name specified",
+                            ExtraData=self._CurrentLine + " (<CName> = <GuidValueInCFormat>)",
+                            File=self.MetaFile, Line=self._LineIndex+1)
+        if TokenList[1] == '':
+            EdkLogger.error('Parser', FORMAT_INVALID, "No GUID value specified",
+                            ExtraData=self._CurrentLine + " (<CName> = <GuidValueInCFormat>)",
+                            File=self.MetaFile, Line=self._LineIndex+1)
+        if TokenList[1][0] != '{' or TokenList[1][-1] != '}' or GuidStructureStringToGuidString(TokenList[1]) == '':
+            EdkLogger.error('Parser', FORMAT_INVALID, "Invalid GUID value format",
+                            ExtraData=self._CurrentLine + \
+                                      " (<CName> = <GuidValueInCFormat:{8,4,4,{2,2,2,2,2,2,2,2}}>)",
+                            File=self.MetaFile, Line=self._LineIndex+1)
+        self._ValueList[0] = TokenList[0]
+        #Parse the Guid value format
+        GuidValueList = TokenList[1].strip(' {}').split(',')
+        Index = 0
+        HexList = []
+        if len(GuidValueList) == 11:
+            for GuidValue in GuidValueList:
+                GuidValue = GuidValue.strip()
+                if GuidValue.startswith('0x') or GuidValue.startswith('0X'):
+                    HexList.append('0x' + str(GuidValue[2:]))
+                    Index += 1
+                    continue
+                else:
+                    if GuidValue.startswith('{'):
+                        HexList.append('0x' + str(GuidValue[3:]))
+                        Index += 1
+            self._ValueList[1] = "{ %s, %s, %s, { %s, %s, %s, %s, %s, %s, %s, %s }}" % (HexList[0], HexList[1], HexList[2],HexList[3],HexList[4],HexList[5],HexList[6],HexList[7],HexList[8],HexList[9],HexList[10])
+        else:
+            EdkLogger.error('Parser', FORMAT_INVALID, "Invalid GUID value format",
+                            ExtraData=self._CurrentLine + \
+                                      " (<CName> = <GuidValueInCFormat:{8,4,4,{2,2,2,2,2,2,2,2}}>)",
+                            File=self.MetaFile, Line=self._LineIndex+1)
+            self._ValueList[0] = ''
+
+    ## PCD sections parser
+    #
+    #   [PcdsFixedAtBuild]
+    #   [PcdsPatchableInModule]
+    #   [PcdsFeatureFlag]
+    #   [PcdsDynamicEx
+    #   [PcdsDynamic]
+    #
+    @ParseMacro
+    def _PcdParser(self):
+        TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT, 1)
+        self._ValueList[0:1] = GetSplitValueList(TokenList[0], TAB_SPLIT)
+        # check PCD information
+        if self._ValueList[0] == '' or self._ValueList[1] == '':
+            EdkLogger.error('Parser', FORMAT_INVALID, "No token space GUID or PCD name specified",
+                            ExtraData=self._CurrentLine + \
+                                      " (<TokenSpaceGuidCName>.<PcdCName>|<DefaultValue>|<DatumType>|<Token>)",
+                            File=self.MetaFile, Line=self._LineIndex+1)
+        # check PCD datum information
+        if len(TokenList) < 2 or TokenList[1] == '':
+            EdkLogger.error('Parser', FORMAT_INVALID, "No PCD Datum information given",
+                            ExtraData=self._CurrentLine + \
+                                      " (<TokenSpaceGuidCName>.<PcdCName>|<DefaultValue>|<DatumType>|<Token>)",
+                            File=self.MetaFile, Line=self._LineIndex+1)
+
+        
+        ValueRe  = re.compile(r'^\s*L?\".*\|.*\"')
+        PtrValue = ValueRe.findall(TokenList[1])
+        
+        # Has VOID* type string, may contain "|" character in the string. 
+        if len(PtrValue) != 0:
+            ptrValueList = re.sub(ValueRe, '', TokenList[1])
+            ValueList    = GetSplitValueList(ptrValueList)
+            ValueList[0] = PtrValue[0]
+        else:
+            ValueList = GetSplitValueList(TokenList[1])
+            
+        
+        # check if there's enough datum information given
+        if len(ValueList) != 3:
+            EdkLogger.error('Parser', FORMAT_INVALID, "Invalid PCD Datum information given",
+                            ExtraData=self._CurrentLine + \
+                                      " (<TokenSpaceGuidCName>.<PcdCName>|<DefaultValue>|<DatumType>|<Token>)",
+                            File=self.MetaFile, Line=self._LineIndex+1)
+        # check default value
+        if ValueList[0] == '':
+            EdkLogger.error('Parser', FORMAT_INVALID, "Missing DefaultValue in PCD Datum information",
+                            ExtraData=self._CurrentLine + \
+                                      " (<TokenSpaceGuidCName>.<PcdCName>|<DefaultValue>|<DatumType>|<Token>)",
+                            File=self.MetaFile, Line=self._LineIndex+1)
+        # check datum type
+        if ValueList[1] == '':
+            EdkLogger.error('Parser', FORMAT_INVALID, "Missing DatumType in PCD Datum information",
+                            ExtraData=self._CurrentLine + \
+                                      " (<TokenSpaceGuidCName>.<PcdCName>|<DefaultValue>|<DatumType>|<Token>)",
+                            File=self.MetaFile, Line=self._LineIndex+1)
+        # check token of the PCD
+        if ValueList[2] == '':
+            EdkLogger.error('Parser', FORMAT_INVALID, "Missing Token in PCD Datum information",
+                            ExtraData=self._CurrentLine + \
+                                      " (<TokenSpaceGuidCName>.<PcdCName>|<DefaultValue>|<DatumType>|<Token>)",
+                            File=self.MetaFile, Line=self._LineIndex+1)
+        # check format of default value against the datum type
+        IsValid, Cause = CheckPcdDatum(ValueList[1], ValueList[0])
+        if not IsValid:
+            EdkLogger.error('Parser', FORMAT_INVALID, Cause, ExtraData=self._CurrentLine,
+                            File=self.MetaFile, Line=self._LineIndex+1)
+
+        if ValueList[0] in ['True', 'true', 'TRUE']:
+            ValueList[0] = '1'
+        elif ValueList[0] in ['False', 'false', 'FALSE']:
+            ValueList[0] = '0'
+
+        self._ValueList[2] = ValueList[0].strip() + '|' + ValueList[1].strip() + '|' + ValueList[2].strip()
+
+    _SectionParser = {
+        MODEL_META_DATA_HEADER          :   MetaFileParser._DefineParser,
+        MODEL_EFI_INCLUDE               :   MetaFileParser._PathParser,
+        MODEL_EFI_LIBRARY_CLASS         :   MetaFileParser._PathParser,
+        MODEL_EFI_GUID                  :   _GuidParser,
+        MODEL_EFI_PPI                   :   _GuidParser,
+        MODEL_EFI_PROTOCOL              :   _GuidParser,
+        MODEL_PCD_FIXED_AT_BUILD        :   _PcdParser,
+        MODEL_PCD_PATCHABLE_IN_MODULE   :   _PcdParser,
+        MODEL_PCD_FEATURE_FLAG          :   _PcdParser,
+        MODEL_PCD_DYNAMIC               :   _PcdParser,
+        MODEL_PCD_DYNAMIC_EX            :   _PcdParser,
+        MODEL_UNKNOWN                   :   MetaFileParser._Skip,
+        MODEL_META_DATA_USER_EXTENSION  :   MetaFileParser._Skip,
+    }
+
+
+## FdfObject
+#
+# This class defined basic Fdf object which is used by inheriting
+# 
+# @param object:       Inherited from object class
+#
+class FdfObject(object):
+    def __init__(self):
+        object.__init__()
+
+## Fdf
+#
+# This class defined the structure used in Fdf object
+# 
+# @param FdfObject:     Inherited from FdfObject class
+# @param Filename:      Input value for Ffilename of Fdf file, default is None
+# @param WorkspaceDir:  Input value for current workspace directory, default is None
+#
+class Fdf(FdfObject):
+    def __init__(self, Filename = None, IsToDatabase = False, WorkspaceDir = None, Database = None):
+        self.WorkspaceDir = WorkspaceDir
+        self.IsToDatabase = IsToDatabase
+        
+        self.Cur = Database.Cur
+        self.TblFile = Database.TblFile
+        self.TblFdf = Database.TblFdf
+        self.FileID = -1
+        self.FileList = {}
+
+        #
+        # Load Fdf file if filename is not None
+        #
+        if Filename != None:
+            self.LoadFdfFile(Filename)
+
+    #
+    # Insert a FDF file record into database
+    #
+    def InsertFile(self, Filename):
+        FileID = -1
+        Filename = NormPath(Filename)
+        if Filename not in self.FileList:
+            FileID = self.TblFile.InsertFile(Filename, MODEL_FILE_FDF)
+            self.FileList[Filename] = FileID
+
+        return self.FileList[Filename]
+            
+    
+    ## Load Fdf file
+    #
+    # Load the file if it exists
+    #
+    # @param Filename:  Input value for filename of Fdf file
+    #
+    def LoadFdfFile(self, Filename):     
+        FileList = []
+        #
+        # Parse Fdf file
+        #
+        Filename = NormPath(Filename)
+        Fdf = FdfParser(Filename)
+        Fdf.ParseFile()
+
+        #
+        # Insert inf file and pcd information
+        #
+        if self.IsToDatabase:
+            (Model, Value1, Value2, Value3, Scope1, Scope2, BelongsToItem, BelongsToFile, StartLine, StartColumn, EndLine, EndColumn, Enabled) = \
+            (0, '', '', '', 'COMMON', 'COMMON', -1, -1, -1, -1, -1, -1, 0)
+            for Index in range(0, len(Fdf.Profile.PcdDict)):
+                pass
+            for Key in Fdf.Profile.PcdDict.keys():
+                Model = MODEL_PCD
+                Value1 = Key[1]
+                Value2 = Key[0]
+                FileName = Fdf.Profile.PcdFileLineDict[Key][0]
+                StartLine = Fdf.Profile.PcdFileLineDict[Key][1]
+                BelongsToFile = self.InsertFile(FileName)
+                self.TblFdf.Insert(Model, Value1, Value2, Value3, Scope1, Scope2, BelongsToItem, BelongsToFile, StartLine, StartColumn, EndLine, EndColumn, Enabled)
+            for Index in range(0, len(Fdf.Profile.InfList)):
+                Model = MODEL_META_DATA_COMPONENT
+                Value1 = Fdf.Profile.InfList[Index]
+                Value2 = ''
+                FileName = Fdf.Profile.InfFileLineList[Index][0]
+                StartLine = Fdf.Profile.InfFileLineList[Index][1]
+                BelongsToFile = self.InsertFile(FileName)
+                self.TblFdf.Insert(Model, Value1, Value2, Value3, Scope1, Scope2, BelongsToItem, BelongsToFile, StartLine, StartColumn, EndLine, EndColumn, Enabled)
+
+##
+#
+# This acts like the main() function for the script, unless it is 'import'ed into another
+# script.
+#
+if __name__ == '__main__':
+    pass
+
diff --git a/BaseTools/Source/Python/Ecc/MetaFileWorkspace/MetaFileTable.py b/BaseTools/Source/Python/Ecc/MetaFileWorkspace/MetaFileTable.py
new file mode 100644 (file)
index 0000000..89bc7f2
--- /dev/null
@@ -0,0 +1,332 @@
+## @file\r
+# This file is used to create/update/query/erase a meta file table\r
+#\r
+# Copyright (c) 2008, Intel Corporation. All rights reserved.<BR>\r
+# This program and the accompanying materials\r
+# are licensed and made available under the terms and conditions of the BSD License\r
+# which accompanies this distribution.  The full text of the license may be found at\r
+# http://opensource.org/licenses/bsd-license.php\r
+#\r
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,\r
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.\r
+#\r
+\r
+##\r
+# Import Modules\r
+#\r
+import uuid\r
+\r
+import Common.EdkLogger as EdkLogger\r
+import EccGlobalData\r
+\r
+from MetaDataTable import Table\r
+from MetaDataTable import ConvertToSqlString\r
+from CommonDataClass.DataClass import MODEL_FILE_DSC, MODEL_FILE_DEC, MODEL_FILE_INF, \\r
+                                      MODEL_FILE_OTHERS\r
+\r
+class MetaFileTable(Table):\r
+    ## Constructor \r
+    def __init__(self, Cursor, MetaFile, FileType, TableName, Temporary = False):\r
+        self.MetaFile = MetaFile\r
+        self.TblFile = EccGlobalData.gDb.TblFile\r
+        if (FileType == MODEL_FILE_INF):\r
+            TableName = "Inf"\r
+        if (FileType == MODEL_FILE_DSC):\r
+            if Temporary:\r
+                TableName = "_%s_%s" % ("Dsc", uuid.uuid4().hex)\r
+            else:\r
+                TableName = "Dsc"\r
+        if (FileType == MODEL_FILE_DEC):\r
+            TableName = "Dec"\r
+\r
+        Table.__init__(self, Cursor, TableName, 0, Temporary)\r
+        self.Create(False)\r
+\r
+\r
+## Python class representation of table storing module data\r
+class ModuleTable(MetaFileTable):\r
+    _COLUMN_ = '''\r
+        ID REAL PRIMARY KEY,\r
+        Model INTEGER NOT NULL,\r
+        Value1 TEXT NOT NULL,\r
+        Value2 TEXT,\r
+        Value3 TEXT,\r
+        Scope1 TEXT,\r
+        Scope2 TEXT,\r
+        BelongsToItem REAL NOT NULL,\r
+        BelongsToFile SINGLE NOT NULL,\r
+        StartLine INTEGER NOT NULL,\r
+        StartColumn INTEGER NOT NULL,\r
+        EndLine INTEGER NOT NULL,\r
+        EndColumn INTEGER NOT NULL,\r
+        Enabled INTEGER DEFAULT 0\r
+        '''\r
+    # used as table end flag, in case the changes to database is not committed to db file\r
+    _DUMMY_ = "-1, -1, '====', '====', '====', '====', '====', -1, -1, -1, -1, -1, -1, -1"\r
+\r
+    ## Constructor\r
+    def __init__(self, Cursor):\r
+        MetaFileTable.__init__(self, Cursor, '', MODEL_FILE_INF, "Inf", False)\r
+\r
+    ## Insert a record into table Inf\r
+    #\r
+    # @param Model:          Model of a Inf item\r
+    # @param Value1:         Value1 of a Inf item\r
+    # @param Value2:         Value2 of a Inf item\r
+    # @param Value3:         Value3 of a Inf item\r
+    # @param Scope1:         Arch of a Inf item\r
+    # @param Scope2          Platform os a Inf item\r
+    # @param BelongsToItem:  The item belongs to which another item\r
+    # @param StartLine:      StartLine of a Inf item\r
+    # @param StartColumn:    StartColumn of a Inf item\r
+    # @param EndLine:        EndLine of a Inf item\r
+    # @param EndColumn:      EndColumn of a Inf item\r
+    # @param Enabled:        If this item enabled\r
+    #\r
+    def Insert(self, Model, Value1, Value2, Value3, Scope1='COMMON', Scope2='COMMON',\r
+               BelongsToItem=-1, BelongsToFile = -1, StartLine=-1, StartColumn=-1, EndLine=-1, EndColumn=-1, Enabled=0):\r
+        (Value1, Value2, Value3, Scope1, Scope2) = ConvertToSqlString((Value1, Value2, Value3, Scope1, Scope2))\r
+        return Table.Insert(\r
+                        self, \r
+                        Model, \r
+                        Value1, \r
+                        Value2, \r
+                        Value3, \r
+                        Scope1, \r
+                        Scope2,\r
+                        BelongsToItem,\r
+                        BelongsToFile, \r
+                        StartLine, \r
+                        StartColumn, \r
+                        EndLine, \r
+                        EndColumn, \r
+                        Enabled\r
+                        )\r
+\r
+    ## Query table\r
+    #\r
+    # @param    Model:      The Model of Record \r
+    # @param    Arch:       The Arch attribute of Record \r
+    # @param    Platform    The Platform attribute of Record \r
+    #\r
+    # @retval:       A recordSet of all found records \r
+    #\r
+    def Query(self, Model, Arch=None, Platform=None):\r
+        ConditionString = "Model=%s AND Enabled>=0" % Model\r
+        ValueString = "Value1,Value2,Value3,Scope1,Scope2,ID,StartLine"\r
+\r
+        if Arch != None and Arch != 'COMMON':\r
+            ConditionString += " AND (Scope1='%s' OR Scope1='COMMON')" % Arch\r
+        if Platform != None and Platform != 'COMMON':\r
+            ConditionString += " AND (Scope2='%s' OR Scope2='COMMON' OR Scope2='DEFAULT')" % Platform\r
+\r
+        SqlCommand = "SELECT %s FROM %s WHERE %s" % (ValueString, self.Table, ConditionString)\r
+        return self.Exec(SqlCommand)\r
+\r
+## Python class representation of table storing package data\r
+class PackageTable(MetaFileTable):\r
+    _COLUMN_ = '''\r
+        ID REAL PRIMARY KEY,\r
+        Model INTEGER NOT NULL,\r
+        Value1 TEXT NOT NULL,\r
+        Value2 TEXT,\r
+        Value3 TEXT,\r
+        Scope1 TEXT,\r
+        Scope2 TEXT,\r
+        BelongsToItem REAL NOT NULL,\r
+        BelongsToFile SINGLE NOT NULL,\r
+        StartLine INTEGER NOT NULL,\r
+        StartColumn INTEGER NOT NULL,\r
+        EndLine INTEGER NOT NULL,\r
+        EndColumn INTEGER NOT NULL,\r
+        Enabled INTEGER DEFAULT 0\r
+        '''\r
+    # used as table end flag, in case the changes to database is not committed to db file\r
+    _DUMMY_ = "-1, -1, '====', '====', '====', '====', '====', -1, -1, -1, -1, -1, -1, -1"\r
+\r
+    ## Constructor\r
+    def __init__(self, Cursor):\r
+        MetaFileTable.__init__(self, Cursor, '', MODEL_FILE_DEC, "Dec", False)\r
+\r
+    ## Insert table\r
+    #\r
+    # Insert a record into table Dec\r
+    #\r
+    # @param Model:          Model of a Dec item\r
+    # @param Value1:         Value1 of a Dec item\r
+    # @param Value2:         Value2 of a Dec item\r
+    # @param Value3:         Value3 of a Dec item\r
+    # @param Scope1:         Arch of a Dec item\r
+    # @param Scope2:         Module type of a Dec item\r
+    # @param BelongsToItem:  The item belongs to which another item\r
+    # @param StartLine:      StartLine of a Dec item\r
+    # @param StartColumn:    StartColumn of a Dec item\r
+    # @param EndLine:        EndLine of a Dec item\r
+    # @param EndColumn:      EndColumn of a Dec item\r
+    # @param Enabled:        If this item enabled\r
+    #\r
+    def Insert(self, Model, Value1, Value2, Value3, Scope1='COMMON', Scope2='COMMON',\r
+               BelongsToItem=-1, BelongsToFile = -1, StartLine=-1, StartColumn=-1, EndLine=-1, EndColumn=-1, Enabled=0):\r
+        (Value1, Value2, Value3, Scope1, Scope2) = ConvertToSqlString((Value1, Value2, Value3, Scope1, Scope2))\r
+        return Table.Insert(\r
+                        self, \r
+                        Model, \r
+                        Value1, \r
+                        Value2, \r
+                        Value3, \r
+                        Scope1, \r
+                        Scope2,\r
+                        BelongsToItem,\r
+                        BelongsToFile, \r
+                        StartLine, \r
+                        StartColumn, \r
+                        EndLine, \r
+                        EndColumn, \r
+                        Enabled\r
+                        )\r
+\r
+    ## Query table\r
+    #\r
+    # @param    Model:  The Model of Record \r
+    # @param    Arch:   The Arch attribute of Record \r
+    #\r
+    # @retval:       A recordSet of all found records \r
+    #\r
+    def Query(self, Model, Arch=None):\r
+        ConditionString = "Model=%s AND Enabled>=0" % Model\r
+        ValueString = "Value1,Value2,Value3,Scope1,ID,StartLine"\r
+\r
+        if Arch != None and Arch != 'COMMON':\r
+            ConditionString += " AND (Scope1='%s' OR Scope1='COMMON')" % Arch\r
+\r
+        SqlCommand = "SELECT %s FROM %s WHERE %s" % (ValueString, self.Table, ConditionString)\r
+        return self.Exec(SqlCommand)\r
+\r
+## Python class representation of table storing platform data\r
+class PlatformTable(MetaFileTable):\r
+    _COLUMN_ = '''\r
+        ID REAL PRIMARY KEY,\r
+        Model INTEGER NOT NULL,\r
+        Value1 TEXT NOT NULL,\r
+        Value2 TEXT,\r
+        Value3 TEXT,\r
+        Scope1 TEXT,\r
+        Scope2 TEXT,\r
+        BelongsToItem REAL NOT NULL,\r
+        BelongsToFile SINGLE NOT NULL,\r
+        FromItem REAL NOT NULL,\r
+        StartLine INTEGER NOT NULL,\r
+        StartColumn INTEGER NOT NULL,\r
+        EndLine INTEGER NOT NULL,\r
+        EndColumn INTEGER NOT NULL,\r
+        Enabled INTEGER DEFAULT 0\r
+        '''\r
+    # used as table end flag, in case the changes to database is not committed to db file\r
+    _DUMMY_ = "-1, -1, '====', '====', '====', '====', '====', -1, -1, -1, -1, -1, -1, -1, -1"\r
+\r
+    ## Constructor\r
+    def __init__(self, Cursor, MetaFile = '', FileType = MODEL_FILE_DSC, Temporary = False):\r
+        MetaFileTable.__init__(self, Cursor, MetaFile, FileType, "Dsc", Temporary)\r
+\r
+    ## Insert table\r
+    #\r
+    # Insert a record into table Dsc\r
+    #\r
+    # @param Model:          Model of a Dsc item\r
+    # @param Value1:         Value1 of a Dsc item\r
+    # @param Value2:         Value2 of a Dsc item\r
+    # @param Value3:         Value3 of a Dsc item\r
+    # @param Scope1:         Arch of a Dsc item\r
+    # @param Scope2:         Module type of a Dsc item\r
+    # @param BelongsToItem:  The item belongs to which another item\r
+    # @param FromItem:       The item belongs to which dsc file\r
+    # @param StartLine:      StartLine of a Dsc item\r
+    # @param StartColumn:    StartColumn of a Dsc item\r
+    # @param EndLine:        EndLine of a Dsc item\r
+    # @param EndColumn:      EndColumn of a Dsc item\r
+    # @param Enabled:        If this item enabled\r
+    #\r
+    def Insert(self, Model, Value1, Value2, Value3, Scope1='COMMON', Scope2='COMMON', BelongsToItem=-1, BelongsToFile = -1,\r
+               FromItem=-1, StartLine=-1, StartColumn=-1, EndLine=-1, EndColumn=-1, Enabled=1):\r
+        (Value1, Value2, Value3, Scope1, Scope2) = ConvertToSqlString((Value1, Value2, Value3, Scope1, Scope2))\r
+        return Table.Insert(\r
+                        self, \r
+                        Model, \r
+                        Value1, \r
+                        Value2, \r
+                        Value3, \r
+                        Scope1, \r
+                        Scope2,\r
+                        BelongsToItem, \r
+                        BelongsToFile,\r
+                        FromItem,\r
+                        StartLine, \r
+                        StartColumn, \r
+                        EndLine, \r
+                        EndColumn, \r
+                        Enabled\r
+                        )\r
+\r
+    ## Query table\r
+    #\r
+    # @param Model:          The Model of Record \r
+    # @param Scope1:         Arch of a Dsc item\r
+    # @param Scope2:         Module type of a Dsc item\r
+    # @param BelongsToItem:  The item belongs to which another item\r
+    # @param FromItem:       The item belongs to which dsc file\r
+    #\r
+    # @retval:       A recordSet of all found records \r
+    #\r
+    def Query(self, Model, Scope1=None, Scope2=None, BelongsToItem=None, FromItem=None):\r
+        ConditionString = "Model=%s AND Enabled>0" % Model\r
+        ValueString = "Value1,Value2,Value3,Scope1,Scope2,ID,StartLine"\r
+\r
+        if Scope1 != None and Scope1 != 'COMMON':\r
+            ConditionString += " AND (Scope1='%s' OR Scope1='COMMON')" % Scope1\r
+        if Scope2 != None and Scope2 != 'COMMON':\r
+            ConditionString += " AND (Scope2='%s' OR Scope2='COMMON' OR Scope2='DEFAULT')" % Scope2\r
+\r
+        if BelongsToItem != None:\r
+            ConditionString += " AND BelongsToItem=%s" % BelongsToItem\r
+        else:\r
+            ConditionString += " AND BelongsToItem<0"\r
+\r
+        if FromItem != None:\r
+            ConditionString += " AND FromItem=%s" % FromItem\r
+\r
+        SqlCommand = "SELECT %s FROM %s WHERE %s" % (ValueString, self.Table, ConditionString)\r
+        return self.Exec(SqlCommand)\r
+\r
+## Factory class to produce different storage for different type of meta-file\r
+class MetaFileStorage(object):\r
+    _FILE_TABLE_ = {\r
+        MODEL_FILE_INF      :   ModuleTable,\r
+        MODEL_FILE_DEC      :   PackageTable,\r
+        MODEL_FILE_DSC      :   PlatformTable,\r
+        MODEL_FILE_OTHERS   :   MetaFileTable,\r
+    }\r
+\r
+    _FILE_TYPE_ = {\r
+        ".inf"  : MODEL_FILE_INF,\r
+        ".dec"  : MODEL_FILE_DEC,\r
+        ".dsc"  : MODEL_FILE_DSC,\r
+    }\r
+\r
+    ## Constructor\r
+    def __new__(Class, Cursor, MetaFile, FileType=None, Temporary=False):\r
+        # no type given, try to find one\r
+        if not FileType:\r
+            if MetaFile.Type in self._FILE_TYPE_:\r
+                FileType = Class._FILE_TYPE_[MetaFile.Type]\r
+            else:\r
+                FileType = MODEL_FILE_OTHERS\r
+\r
+        # don't pass the type around if it's well known\r
+        if FileType == MODEL_FILE_OTHERS:\r
+            Args = (Cursor, MetaFile, FileType, Temporary)\r
+        else:\r
+            Args = (Cursor, MetaFile, FileType, Temporary)\r
+\r
+        # create the storage object and return it to caller\r
+        return Class._FILE_TABLE_[FileType](*Args)\r
+\r
diff --git a/BaseTools/Source/Python/Ecc/MetaFileWorkspace/__init__.py b/BaseTools/Source/Python/Ecc/MetaFileWorkspace/__init__.py
new file mode 100644 (file)
index 0000000..05cd34b
--- /dev/null
@@ -0,0 +1,15 @@
+## @file\r
+# Python 'Workspace' package initialization file.\r
+#\r
+# This file is required to make Python interpreter treat the directory\r
+# as containing package.\r
+#\r
+# Copyright (c) 2008 - 2010, Intel Corporation. All rights reserved.<BR>\r
+# This program and the accompanying materials\r
+# are licensed and made available under the terms and conditions of the BSD License\r
+# which accompanies this distribution.  The full text of the license may be found at\r
+# http://opensource.org/licenses/bsd-license.php\r
+#\r
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,\r
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.\r
+#\r
index 532f4a0918a8a16f2fa642ea1d875a3a5b4533a6..ea7d99fecd22186bf7fd27118dee2b18845755c1 100644 (file)
@@ -2305,28 +2305,90 @@ def CheckFileHeaderDoxygenComments(FullFileName):
                    """ % (FileTable, DataClass.MODEL_IDENTIFIER_COMMENT)\r
     ResultSet = Db.TblFile.Exec(SqlStatement)\r
     if len(ResultSet) == 0:\r
-        PrintErrorMsg(ERROR_HEADER_CHECK_FILE, 'No Comment appear at the very beginning of file.', 'File', FileID)\r
+        PrintErrorMsg(ERROR_HEADER_CHECK_FILE, 'No File License header appear at the very beginning of file.', 'File', FileID)\r
         return ErrorMsgList\r
 \r
-    IsFoundError1 = True\r
-    IsFoundError2 = True\r
-    IsFoundError3 = True\r
+    NoHeaderCommentStartFlag = True\r
+    NoHeaderCommentEndFlag = True\r
+    NoHeaderCommentPeriodFlag = True\r
+    NoCopyrightFlag = True\r
+    NoLicenseFlag = True\r
+    NoRevReferFlag = True\r
+    NextLineIndex = 0\r
     for Result in ResultSet:\r
+        FileStartFlag = False\r
+        CommentStrList = []\r
         CommentStr = Result[0].strip()\r
+        CommentStrListTemp = CommentStr.split('\n')\r
+        if (len(CommentStrListTemp) <= 1):\r
+            # For Mac\r
+            CommentStrListTemp = CommentStr.split('\r')\r
+        # Skip the content before the file  header    \r
+        for CommentLine in CommentStrListTemp:\r
+            if CommentLine.strip().startswith('/** @file'):\r
+                FileStartFlag = True\r
+            if FileStartFlag ==  True:\r
+                CommentStrList.append(CommentLine)\r
+                       \r
         ID = Result[1]\r
-        if CommentStr.startswith('/** @file'):\r
-            IsFoundError1 = False\r
-        if CommentStr.endswith('**/'):\r
-            IsFoundError2 = False\r
-        if CommentStr.find('.') != -1:\r
-            IsFoundError3 = False\r
-\r
-    if IsFoundError1:\r
+        Index = 0\r
+        if CommentStrList and CommentStrList[0].strip().startswith('/** @file'):\r
+            NoHeaderCommentStartFlag = False\r
+        else:\r
+            continue\r
+        if CommentStrList and CommentStrList[-1].strip().endswith('**/'):\r
+            NoHeaderCommentEndFlag = False\r
+        else:\r
+            continue\r
+\r
+        for CommentLine in CommentStrList:\r
+            Index = Index + 1\r
+            NextLineIndex = Index\r
+            if CommentLine.startswith('/** @file'):\r
+                continue\r
+            if CommentLine.startswith('**/'):\r
+                break\r
+            # Check whether C File header Comment content start with two spaces.\r
+            if EccGlobalData.gConfig.HeaderCheckCFileCommentStartSpacesNum == '1' or EccGlobalData.gConfig.HeaderCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':\r
+                if CommentLine.startswith('/** @file') == False and CommentLine.startswith('**/') == False and CommentLine.strip() and CommentLine.startswith('  ') == False:\r
+                    PrintErrorMsg(ERROR_HEADER_CHECK_FILE, 'File header comment content should start with two spaces at each line', FileTable, ID)\r
+            \r
+            CommentLine = CommentLine.strip()\r
+            if CommentLine.startswith('Copyright'):\r
+                NoCopyrightFlag = False\r
+                if CommentLine.find('All rights reserved') == -1:\r
+                    PrintErrorMsg(ERROR_HEADER_CHECK_FILE, '""All rights reserved"" announcement should be following the ""Copyright"" at the same line', FileTable, ID)\r
+                if CommentLine.endswith('<BR>') == -1:\r
+                    PrintErrorMsg(ERROR_HEADER_CHECK_FILE, 'The ""<BR>"" at the end of the Copyright line is required', FileTable, ID)\r
+                if NextLineIndex < len(CommentStrList) and CommentStrList[NextLineIndex].strip().startswith('Copyright') == False and CommentStrList[NextLineIndex].strip():\r
+                    NoLicenseFlag = False\r
+            if CommentLine.startswith('@par Revision Reference:'):\r
+                NoRevReferFlag = False\r
+                RefListFlag = False\r
+                for RefLine in CommentStrList[NextLineIndex:]:\r
+                    if RefLine.strip() and (NextLineIndex + 1) < len(CommentStrList) and CommentStrList[NextLineIndex+1].strip() and CommentStrList[NextLineIndex+1].strip().startswith('**/') == False:\r
+                        RefListFlag = True\r
+                    if RefLine.strip() == False or RefLine.strip().startswith('**/'):\r
+                        RefListFlag = False\r
+                        break\r
+                    # Check whether C File header Comment's each reference at list should begin with a bullet character.\r
+                    if EccGlobalData.gConfig.HeaderCheckCFileCommentReferenceFormat == '1' or EccGlobalData.gConfig.HeaderCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':\r
+                        if RefListFlag == True:\r
+                            if RefLine.strip() and RefLine.strip().startswith('**/') == False and RefLine.startswith('  -') == False:                            \r
+                                PrintErrorMsg(ERROR_HEADER_CHECK_FILE, 'Each reference on a separate line should begin with a bullet character ""-"" ', FileTable, ID)                    \r
+    \r
+    if NoHeaderCommentStartFlag:\r
         PrintErrorMsg(ERROR_DOXYGEN_CHECK_FILE_HEADER, 'File header comment should begin with ""/** @file""', FileTable, ID)\r
-    if IsFoundError2:\r
+        return\r
+    if NoHeaderCommentEndFlag:\r
         PrintErrorMsg(ERROR_HEADER_CHECK_FILE, 'File header comment should end with ""**/""', FileTable, ID)\r
-    if IsFoundError3:\r
-        PrintErrorMsg(ERROR_DOXYGEN_CHECK_COMMENT_DESCRIPTION, 'Comment description should end with period "".""', FileTable, ID)\r
+        return\r
+    if NoCopyrightFlag:\r
+        PrintErrorMsg(ERROR_HEADER_CHECK_FILE, 'File header comment missing the ""Copyright""', FileTable, ID)\r
+    #Check whether C File header Comment have the License immediately after the ""Copyright"" line.\r
+    if EccGlobalData.gConfig.HeaderCheckCFileCommentLicenseFormat == '1' or EccGlobalData.gConfig.HeaderCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':\r
+        if NoLicenseFlag:\r
+            PrintErrorMsg(ERROR_HEADER_CHECK_FILE, 'File header comment should have the License immediately after the ""Copyright"" line', FileTable, ID)\r
 \r
 def CheckFuncHeaderDoxygenComments(FullFileName):\r
     ErrorMsgList = []\r
index 357c52ced0f39edc3c5add83a40e487a4e7151b9..c55276fce75bd073ae782a0aaa20fced7df4fe5f 100644 (file)
@@ -97,6 +97,14 @@ HeaderCheckAll = 0
 HeaderCheckFile = 1\r
 # Check whether Function header exists\r
 HeaderCheckFunction = 1\r
+# Check whether Meta data File header Comment End with '##'\r
+HeaderCheckFileCommentEnd = 0\r
+# Check whether C File header Comment content start with two spaces\r
+HeaderCheckCFileCommentStartSpacesNum = 0\r
+# Check whether C File header Comment's each reference at list should begin with a bullet character '-'\r
+HeaderCheckCFileCommentReferenceFormat = 0\r
+# Check whether C File header Comment have the License immediately after the ""Copyright"" line\r
+HeaderCheckCFileCommentLicenseFormat = 0\r
 \r
 #\r
 # C Function Layout Checking\r
index 5ad00cfbb00e431fcf38e90f42b7b70377478254..d5419d0e06618b3e8c1a93aef31cd3389b5b6306 100644 (file)
@@ -22,6 +22,7 @@ from CommonDataClass.DataClass import *
 from Common.String import CleanString, GetSplitValueList, ReplaceMacro\r
 import EotGlobalData\r
 from Common.Misc import sdict\r
+from Common.String import GetSplitList\r
 \r
 ## PreProcess() method\r
 #\r
index d3d50b638e771624b6f44613ee871999c22ab62e..bdb7a777707bc0e9f8443c6112673e81d2e1ffac 100644 (file)
@@ -52,6 +52,8 @@ from Common.Expression import *
 from Common import GlobalData\r
 from Common.String import ReplaceMacro\r
 \r
+from Common.Misc import tdict\r
+\r
 import re\r
 import os\r
 \r
@@ -77,10 +79,6 @@ RegionSizePattern = re.compile("\s*(?P<base>(?:0x|0X)?[a-fA-F0-9]+)\s*\|\s*(?P<s
 RegionSizeGuidPattern = re.compile("\s*(?P<base>\w+\.\w+)\s*\|\s*(?P<size>\w+\.\w+)\s*")\r
 \r
 IncludeFileList = []\r
-# Macro passed from command line, which has greatest priority and can NOT be overridden by those in FDF\r
-InputMacroDict = {}\r
-# All Macro values when parsing file, not replace existing Macro\r
-AllMacroList = []\r
 \r
 def GetRealFileLine (File, Line):\r
 \r
@@ -182,7 +180,10 @@ class FileProfile :
 \r
         self.PcdDict = {}\r
         self.InfList = []\r
-\r
+        # ECC will use this Dict and List information\r
+        self.PcdFileLineDict = {}\r
+        self.InfFileLineList = []\r
+        \r
         self.FdDict = {}\r
         self.FdNameNotSet = False\r
         self.FvDict = {}\r
@@ -215,14 +216,17 @@ class FdfParser:
         self.__Token = ""\r
         self.__SkippedChars = ""\r
 \r
+        # Used to section info\r
+        self.__CurSection = []\r
+        # Key: [section name, UI name, arch]\r
+        # Value: {MACRO_NAME : MACRO_VALUE}\r
+        self.__MacroDict = tdict(True, 3)\r
+        self.__PcdDict = {}\r
+\r
         self.__WipeOffArea = []\r
         if GenFdsGlobalVariable.WorkSpaceDir == '':\r
             GenFdsGlobalVariable.WorkSpaceDir = os.getenv("WORKSPACE")\r
 \r
-        InputMacroDict.update(GlobalData.gPlatformDefines)\r
-        InputMacroDict.update(GlobalData.gGlobalDefines)\r
-        InputMacroDict.update(GlobalData.gCommandLineDefines)\r
-\r
     ## __IsWhiteSpace() method\r
     #\r
     #   Whether char at current FileBufferPos is whitespace\r
@@ -381,30 +385,6 @@ class FdfParser:
         self.Profile.FileLinesList = [list(s) for s in self.Profile.FileLinesList]\r
         self.Profile.FileLinesList[-1].append(' ')\r
 \r
-    def __ReplaceMacros(self, Str, File, Line):\r
-        MacroEnd = 0\r
-        while Str.find('$(', MacroEnd) >= 0:\r
-            MacroStart = Str.find('$(', MacroEnd)\r
-            if Str.find(')', MacroStart) > 0:\r
-                MacroEnd = Str.find(')', MacroStart)\r
-                Name = Str[MacroStart + 2 : MacroEnd]\r
-                Value = None\r
-                if Name in InputMacroDict:\r
-                    Value = InputMacroDict[Name]\r
-\r
-                else:\r
-                    for Profile in AllMacroList:\r
-                        if Profile.FileName == File and Profile.MacroName == Name and Profile.DefinedAtLine <= Line:\r
-                            Value = Profile.MacroValue\r
-\r
-                if Value != None:\r
-                    Str = Str.replace('$(' + Name + ')', Value)\r
-                    MacroEnd = MacroStart + len(Value)\r
-\r
-            else:\r
-                raise Warning("Macro not complete", self.FileName, self.CurrentLineNumber)\r
-        return Str\r
-\r
     def __ReplaceFragment(self, StartPos, EndPos, Value = ' '):\r
         if StartPos[0] == EndPos[0]:\r
             Offset = StartPos[1]\r
@@ -446,7 +426,67 @@ class FdfParser:
                           self.FileName, self.CurrentLineNumber)\r
         MacroName = MacroName[2:-1]\r
         return MacroName, NotFlag\r
-    \r
+\r
+    def __SetMacroValue(self, Macro, Value):\r
+        if not self.__CurSection:\r
+            return\r
+\r
+        MacroDict = {}\r
+        if not self.__MacroDict[self.__CurSection[0], self.__CurSection[1], self.__CurSection[2]]:\r
+            self.__MacroDict[self.__CurSection[0], self.__CurSection[1], self.__CurSection[2]] = MacroDict\r
+        else:\r
+            MacroDict = self.__MacroDict[self.__CurSection[0], self.__CurSection[1], self.__CurSection[2]]\r
+        MacroDict[Macro] = Value\r
+\r
+    def __GetMacroValue(self, Macro):\r
+        # Highest priority\r
+        if Macro in GlobalData.gCommandLineDefines:\r
+            return GlobalData.gCommandLineDefines[Macro]\r
+        if Macro in GlobalData.gGlobalDefines:\r
+            return GlobalData.gGlobalDefines[Macro]\r
+\r
+        if self.__CurSection:\r
+            MacroDict = self.__MacroDict[\r
+                        self.__CurSection[0],\r
+                        self.__CurSection[1],\r
+                        self.__CurSection[2]\r
+            ]\r
+            if MacroDict and Macro in MacroDict:\r
+                return MacroDict[Macro]\r
+\r
+        # Lowest priority\r
+        if Macro in GlobalData.gPlatformDefines:\r
+            return GlobalData.gPlatformDefines[Macro]\r
+        return None\r
+\r
+    def __SectionHeaderParser(self, Section):\r
+        # [Defines]\r
+        # [FD.UiName]: use dummy instead if UI name is optional\r
+        # [FV.UiName]\r
+        # [Capsule.UiName]\r
+        # [Rule]: don't take rule section into account, macro is not allowed in this section\r
+        # [VTF.arch.UiName, arch]\r
+        # [OptionRom.DriverName]\r
+        self.__CurSection = []\r
+        Section = Section.strip()[1:-1].upper().replace(' ', '').strip('.')\r
+        ItemList = Section.split('.')\r
+        Item = ItemList[0]\r
+        if Item == '' or Item == 'RULE':\r
+            return\r
+\r
+        if Item == 'DEFINES':\r
+            self.__CurSection = ['COMMON', 'COMMON', 'COMMON']\r
+        elif Item == 'VTF' and len(ItemList) == 3:\r
+            UiName = ItemList[2]\r
+            Pos = UiName.find(',')\r
+            if Pos != -1:\r
+                UiName = UiName[:Pos]\r
+            self.__CurSection = ['VTF', UiName, ItemList[1]]\r
+        elif len(ItemList) > 1:\r
+            self.__CurSection = [ItemList[0], ItemList[1], 'COMMON']\r
+        elif len(ItemList) > 0:\r
+            self.__CurSection = [ItemList[0], 'DUMMY', 'COMMON']\r
+\r
     ## PreprocessFile() method\r
     #\r
     #   Preprocess file contents, replace comments with spaces.\r
@@ -530,12 +570,17 @@ class FdfParser:
                     raise Warning("expected include file name", self.FileName, self.CurrentLineNumber)\r
                 IncFileName = self.__Token\r
                 __IncludeMacros = {}\r
-                __IncludeMacros['WORKSPACE'] = InputMacroDict['WORKSPACE']\r
-                __IncludeMacros['ECP_SOURCE'] = InputMacroDict['ECP_SOURCE']\r
-                __IncludeMacros['EFI_SOURCE'] = InputMacroDict['EFI_SOURCE']\r
-                __IncludeMacros['EDK_SOURCE'] = InputMacroDict['EDK_SOURCE']\r
-                \r
-                IncludedFile = NormPath(ReplaceMacro(IncFileName, __IncludeMacros, RaiseError=True))\r
+                for Macro in ['WORKSPACE', 'ECP_SOURCE', 'EFI_SOURCE', 'EDK_SOURCE']:\r
+                    MacroVal = self.__GetMacroValue(Macro)\r
+                    if MacroVal:\r
+                        __IncludeMacros[Macro] = MacroVal\r
+\r
+                try:\r
+                    IncludedFile = NormPath(ReplaceMacro(IncFileName, __IncludeMacros, RaiseError=True))\r
+                except:\r
+                    raise Warning("only these system environment variables are permitted to start the path of the included file: "\r
+                                  "$(WORKSPACE), $(ECP_SOURCE), $(EFI_SOURCE), $(EDK_SOURCE)",\r
+                                  self.FileName, self.CurrentLineNumber)\r
                 #\r
                 # First search the include file under the same directory as FDF file\r
                 #\r
@@ -545,7 +590,12 @@ class FdfParser:
                     #\r
                     # Then search the include file under the same directory as DSC file\r
                     #\r
-                    IncludedFile1 = PathClass(IncludedFile, GenFdsGlobalVariable.ActivePlatform.Dir)\r
+                    PlatformDir = ''\r
+                    if GenFdsGlobalVariable.ActivePlatform:\r
+                        PlatformDir = GenFdsGlobalVariable.ActivePlatform.Dir\r
+                    elif GlobalData.gActivePlatform:\r
+                        PlatformDir = GlobalData.gActivePlatform.MetaFile.Dir\r
+                    IncludedFile1 = PathClass(IncludedFile, PlatformDir)\r
                     ErrorCode = IncludedFile1.Validate()[0]\r
                     if ErrorCode != 0:\r
                         #\r
@@ -554,7 +604,7 @@ class FdfParser:
                         IncludedFile1 = PathClass(IncludedFile, GlobalData.gWorkspace)\r
                         ErrorCode = IncludedFile1.Validate()[0]\r
                         if ErrorCode != 0:\r
-                            raise Warning("The include file does not exist under below directories: \n%s\n%s\n%s\n"%(os.path.dirname(self.FileName), GenFdsGlobalVariable.ActivePlatform.Dir, GlobalData.gWorkspace), \r
+                            raise Warning("The include file does not exist under below directories: \n%s\n%s\n%s\n"%(os.path.dirname(self.FileName), PlatformDir, GlobalData.gWorkspace), \r
                                           self.FileName, self.CurrentLineNumber)\r
 \r
                 IncFileProfile = IncludeFileProfile(IncludedFile1.Path)\r
@@ -608,9 +658,47 @@ class FdfParser:
         # IfList is a stack of if branches with elements of list [Pos, CondSatisfied, BranchDetermined]\r
         IfList = []\r
         RegionLayoutLine = 0\r
+        ReplacedLine = -1\r
         while self.__GetNextToken():\r
+            # Determine section name and the location dependent macro\r
+            if self.__GetIfListCurrentItemStat(IfList):\r
+                if self.__Token.startswith('['):\r
+                    Header = self.__Token\r
+                    if not self.__Token.endswith(']'):\r
+                        self.__SkipToToken(']')\r
+                        Header += self.__SkippedChars\r
+                    if Header.find('$(') != -1:\r
+                        raise Warning("macro cannot be used in section header", self.FileName, self.CurrentLineNumber)\r
+                    self.__SectionHeaderParser(Header)\r
+                    continue\r
+                # Replace macros except in RULE section or out of section\r
+                elif self.__CurSection and ReplacedLine != self.CurrentLineNumber:\r
+                    ReplacedLine = self.CurrentLineNumber\r
+                    self.__UndoToken()\r
+                    CurLine = self.Profile.FileLinesList[ReplacedLine - 1]\r
+                    PreIndex = 0\r
+                    StartPos = CurLine.find('$(', PreIndex)\r
+                    EndPos = CurLine.find(')', StartPos+2)\r
+                    while StartPos != -1 and EndPos != -1:\r
+                        MacroName = CurLine[StartPos+2 : EndPos]\r
+                        MacorValue = self.__GetMacroValue(MacroName)\r
+                        if MacorValue != None:\r
+                            CurLine = CurLine.replace('$(' + MacroName + ')', MacorValue, 1)\r
+                            if MacorValue.find('$(') != -1:\r
+                                PreIndex = StartPos\r
+                            else:\r
+                                PreIndex = StartPos + len(MacorValue)\r
+                        else:\r
+                            PreIndex = EndPos + 1\r
+                        StartPos = CurLine.find('$(', PreIndex)\r
+                        EndPos = CurLine.find(')', StartPos+2)\r
+                    self.Profile.FileLinesList[ReplacedLine - 1] = CurLine\r
+                    continue\r
+\r
             if self.__Token == 'DEFINE':\r
-                if self.__GetIfListCurrentItemStat(IfList): \r
+                if self.__GetIfListCurrentItemStat(IfList):\r
+                    if not self.__CurSection:\r
+                        raise Warning("macro cannot be defined in Rule section or out of section", self.FileName, self.CurrentLineNumber)\r
                     DefineLine = self.CurrentLineNumber - 1\r
                     DefineOffset = self.CurrentOffsetWithinLine - len('DEFINE')\r
                     if not self.__GetNextToken():\r
@@ -619,19 +707,8 @@ class FdfParser:
                     if not self.__IsToken( "="):\r
                         raise Warning("expected '='", self.FileName, self.CurrentLineNumber)\r
     \r
-                    if not self.__GetNextToken():\r
-                        raise Warning("expected value", self.FileName, self.CurrentLineNumber)\r
-    \r
-                    if self.__GetStringData():\r
-                        pass\r
-                    Value = self.__Token\r
-                    if not Macro in InputMacroDict:\r
-                        FileLineTuple = GetRealFileLine(self.FileName, DefineLine + 1)\r
-                        MacProfile = MacroProfile(FileLineTuple[0], FileLineTuple[1])\r
-                        MacProfile.MacroName = Macro\r
-                        MacProfile.MacroValue = Value\r
-                        AllMacroList.append(MacProfile)\r
-                        InputMacroDict[MacProfile.MacroName] = MacProfile.MacroValue\r
+                    Value = self.__GetExpression()\r
+                    self.__SetMacroValue(Macro, Value)\r
                     self.__WipeOffArea.append(((DefineLine, DefineOffset), (self.CurrentLineNumber - 1, self.CurrentOffsetWithinLine - 1)))\r
             elif self.__Token == 'SET':\r
                 PcdPair = self.__GetNextPcdName()\r
@@ -639,17 +716,10 @@ class FdfParser:
                 if not self.__IsToken( "="):\r
                     raise Warning("expected '='", self.FileName, self.CurrentLineNumber)\r
 \r
-                if not self.__GetNextToken():\r
-                    raise Warning("expected value", self.FileName, self.CurrentLineNumber)\r
-\r
-                Value = self.__Token\r
-                if Value.startswith("{"):\r
-                    # deal with value with {}\r
-                    if not self.__SkipToToken( "}"):\r
-                        raise Warning("expected '}'", self.FileName, self.CurrentLineNumber)\r
-                    Value += self.__SkippedChars\r
+                Value = self.__GetExpression()\r
+                Value = self.__EvaluateConditional(Value, self.CurrentLineNumber, 'eval', True)\r
 \r
-                InputMacroDict[PcdName] = Value\r
+                self.__PcdDict[PcdName] = Value\r
             elif self.__Token in ('!ifdef', '!ifndef', '!if'):\r
                 IfStartPos = (self.CurrentLineNumber - 1, self.CurrentOffsetWithinLine - len(self.__Token))\r
                 IfList.append([IfStartPos, None, None])\r
@@ -691,6 +761,8 @@ class FdfParser:
                             IfList[-1][2] = True\r
                             self.__WipeOffArea.append((IfList[-1][0], (self.CurrentLineNumber - 1, self.CurrentOffsetWithinLine - 1)))\r
             elif self.__Token == '!endif':\r
+                if len(IfList) <= 0:\r
+                    raise Warning("Missing !if statement", self.FileName, self.CurrentLineNumber)\r
                 if IfList[-1][1]:\r
                     self.__WipeOffArea.append(((self.CurrentLineNumber - 1, self.CurrentOffsetWithinLine - len('!endif')), (self.CurrentLineNumber - 1, self.CurrentOffsetWithinLine - 1)))\r
                 else:\r
@@ -709,21 +781,53 @@ class FdfParser:
                 if not RegionSizeGuid:\r
                     RegionLayoutLine = self.CurrentLineNumber + 1\r
                     continue\r
-                InputMacroDict[RegionSizeGuid.group('base')] = RegionSize.group('base')\r
-                InputMacroDict[RegionSizeGuid.group('size')] = RegionSize.group('size')\r
+                self.__PcdDict[RegionSizeGuid.group('base')] = RegionSize.group('base')\r
+                self.__PcdDict[RegionSizeGuid.group('size')] = RegionSize.group('size')\r
                 RegionLayoutLine = self.CurrentLineNumber + 1\r
 \r
         if IfList:\r
             raise Warning("Missing !endif", self.FileName, self.CurrentLineNumber)\r
         self.Rewind()\r
 \r
+    def __CollectMacroPcd(self):\r
+        MacroDict = {}\r
+\r
+        # PCD macro\r
+        MacroDict.update(self.__PcdDict)\r
+\r
+        # Lowest priority\r
+        MacroDict.update(GlobalData.gPlatformDefines)\r
+\r
+        if self.__CurSection:\r
+            # Defines macro\r
+            ScopeMacro = self.__MacroDict['COMMON', 'COMMON', 'COMMON']\r
+            if ScopeMacro:\r
+                MacroDict.update(ScopeMacro)\r
+    \r
+            # Section macro\r
+            ScopeMacro = self.__MacroDict[\r
+                        self.__CurSection[0],\r
+                        self.__CurSection[1],\r
+                        self.__CurSection[2]\r
+            ]\r
+            if ScopeMacro:\r
+                MacroDict.update(ScopeMacro)\r
+\r
+        MacroDict.update(GlobalData.gGlobalDefines)\r
+        MacroDict.update(GlobalData.gCommandLineDefines)\r
+        # Highest priority\r
+\r
+        return MacroDict\r
+\r
     def __EvaluateConditional(self, Expression, Line, Op = None, Value = None):\r
         FileLineTuple = GetRealFileLine(self.FileName, Line)\r
+        MacroPcdDict = self.__CollectMacroPcd()\r
         if Op == 'eval':\r
             try:\r
-                return ValueExpression(Expression, InputMacroDict)()\r
-            except SymbolNotFound:\r
-                return False\r
+                if Value:\r
+                    return ValueExpression(Expression, MacroPcdDict)(True)\r
+                else:\r
+                    return ValueExpression(Expression, MacroPcdDict)()\r
             except WrnExpression, Excpt:\r
                 # \r
                 # Catch expression evaluation warning here. We need to report\r
@@ -738,7 +842,7 @@ class FdfParser:
         else:\r
             if Expression.startswith('$(') and Expression[-1] == ')':\r
                 Expression = Expression[2:-1]            \r
-            return Expression in InputMacroDict\r
+            return Expression in MacroPcdDict\r
 \r
     ## __IsToken() method\r
     #\r
@@ -856,7 +960,7 @@ class FdfParser:
         # Record the token start position, the position of the first non-space char.\r
         StartPos = self.CurrentOffsetWithinLine\r
         StartLine = self.CurrentLineNumber\r
-        while not self.__EndOfLine():\r
+        while StartLine == self.CurrentLineNumber:\r
             TempChar = self.__CurrentChar()\r
             # Try to find the end char that is not a space and not in seperator tuple.\r
             # That is, when we got a space or any char in the tuple, we got the end of token.\r
@@ -946,7 +1050,7 @@ class FdfParser:
             # That is, when we got a space or any char in the tuple, we got the end of token.\r
             if not str(TempChar).isspace() and not TempChar in SEPERATOR_TUPLE:\r
                 if not self.__UndoOneChar():\r
-                    break\r
+                    return\r
             # if we happen to meet a seperator as the first char, we must proceed to get it.\r
             # That is, we get a token that is a seperator char. nomally it is the boundary of other tokens.\r
             elif StartPos == self.CurrentOffsetWithinLine and TempChar in SEPERATOR_TUPLE:\r
@@ -1150,12 +1254,6 @@ class FdfParser:
 \r
         while self.__GetDefines():\r
             pass\r
-        \r
-        Index = 0\r
-        while Index < len(self.Profile.FileLinesList):\r
-            FileLineTuple = GetRealFileLine(self.FileName, Index + 1)\r
-            self.Profile.FileLinesList[Index] = self.__ReplaceMacros(self.Profile.FileLinesList[Index], FileLineTuple[0], FileLineTuple[1])\r
-            Index += 1\r
 \r
     ## ParseFile() method\r
     #\r
@@ -1239,11 +1337,6 @@ class FdfParser:
             if not self.__GetNextToken() or self.__Token.startswith('['):\r
                 raise Warning("expected MACRO value", self.FileName, self.CurrentLineNumber)\r
             Value = self.__Token\r
-            FileLineTuple = GetRealFileLine(self.FileName, self.CurrentLineNumber)\r
-            MacProfile = MacroProfile(FileLineTuple[0], FileLineTuple[1])\r
-            MacProfile.MacroName = Macro\r
-            MacProfile.MacroValue = Value\r
-            AllMacroList.append(MacProfile)\r
 \r
         return False\r
 \r
@@ -1279,6 +1372,8 @@ class FdfParser:
         if FdName == "":\r
             if len (self.Profile.FdDict) == 0:\r
                 FdName = GenFdsGlobalVariable.PlatformName\r
+                if FdName == "" and GlobalData.gActivePlatform:\r
+                    FdName = GlobalData.gActivePlatform.PlatformName\r
                 self.Profile.FdNameNotSet = True\r
             else:\r
                 raise Warning("expected FdName in [FD.] section", self.FileName, self.CurrentLineNumber)\r
@@ -1373,6 +1468,8 @@ class FdfParser:
             pcdPair = self.__GetNextPcdName()\r
             Obj.BaseAddressPcd = pcdPair\r
             self.Profile.PcdDict[pcdPair] = Obj.BaseAddress\r
+            FileLineTuple = GetRealFileLine(self.FileName, self.CurrentLineNumber)\r
+            self.Profile.PcdFileLineDict[pcdPair] = FileLineTuple\r
 \r
         if not self.__IsKeyword( "Size"):\r
             raise Warning("Size missing", self.FileName, self.CurrentLineNumber)\r
@@ -1389,6 +1486,8 @@ class FdfParser:
             pcdPair = self.__GetNextPcdName()\r
             Obj.SizePcd = pcdPair\r
             self.Profile.PcdDict[pcdPair] = Size\r
+            FileLineTuple = GetRealFileLine(self.FileName, self.CurrentLineNumber)\r
+            self.Profile.PcdFileLineDict[pcdPair] = FileLineTuple\r
         Obj.Size = long(Size, 0)\r
 \r
         if not self.__IsKeyword( "ErasePolarity"):\r
@@ -1484,6 +1583,8 @@ class FdfParser:
             PcdPair = self.__GetNextPcdName()\r
             BlockSizePcd = PcdPair\r
             self.Profile.PcdDict[PcdPair] = BlockSize\r
+            FileLineTuple = GetRealFileLine(self.FileName, self.CurrentLineNumber)\r
+            self.Profile.PcdFileLineDict[PcdPair] = FileLineTuple\r
         BlockSize = long(BlockSize, 0)\r
 \r
         BlockNumber = None\r
@@ -1567,19 +1668,14 @@ class FdfParser:
             if not self.__IsToken( "="):\r
                 raise Warning("expected '='", self.FileName, self.CurrentLineNumber)\r
 \r
-            if not self.__GetNextToken():\r
-                raise Warning("expected value", self.FileName, self.CurrentLineNumber)\r
-\r
-            Value = self.__Token\r
-            if Value.startswith("{"):\r
-                # deal with value with {}\r
-                if not self.__SkipToToken( "}"):\r
-                    raise Warning("expected '}'", self.FileName, self.CurrentLineNumber)\r
-                Value += self.__SkippedChars\r
+            Value = self.__GetExpression()\r
+            Value = self.__EvaluateConditional(Value, self.CurrentLineNumber, 'eval', True)\r
 \r
             if Obj:\r
                 Obj.SetVarDict[PcdPair] = Value\r
             self.Profile.PcdDict[PcdPair] = Value\r
+            FileLineTuple = GetRealFileLine(self.FileName, self.CurrentLineNumber)\r
+            self.Profile.PcdFileLineDict[PcdPair] = FileLineTuple\r
             return True\r
 \r
         return False\r
@@ -1615,9 +1711,13 @@ class FdfParser:
             self.__UndoToken()\r
             RegionObj.PcdOffset = self.__GetNextPcdName()\r
             self.Profile.PcdDict[RegionObj.PcdOffset] = "0x%08X" % (RegionObj.Offset + long(Fd.BaseAddress, 0))\r
+            FileLineTuple = GetRealFileLine(self.FileName, self.CurrentLineNumber)\r
+            self.Profile.PcdFileLineDict[RegionObj.PcdOffset] = FileLineTuple\r
             if self.__IsToken( "|"):\r
                 RegionObj.PcdSize = self.__GetNextPcdName()\r
                 self.Profile.PcdDict[RegionObj.PcdSize] = "0x%08X" % RegionObj.Size\r
+                FileLineTuple = GetRealFileLine(self.FileName, self.CurrentLineNumber)\r
+                self.Profile.PcdFileLineDict[RegionObj.PcdSize] = FileLineTuple\r
 \r
             if not self.__GetNextWord():\r
                 return True\r
@@ -2195,6 +2295,8 @@ class FdfParser:
 \r
         if not ffsInf.InfFileName in self.Profile.InfList:\r
             self.Profile.InfList.append(ffsInf.InfFileName)\r
+            FileLineTuple = GetRealFileLine(self.FileName, self.CurrentLineNumber)\r
+            self.Profile.InfFileLineList.append(FileLineTuple)\r
 \r
         if self.__IsToken('|'):\r
             if self.__IsKeyword('RELOCS_STRIPPED'):\r
@@ -2420,7 +2522,7 @@ class FdfParser:
                     if ErrorCode != 0:\r
                         EdkLogger.error("GenFds", ErrorCode, ExtraData=ErrorInfo)\r
                 else:\r
-                    if not InputMacroDict["OUTPUT_DIRECTORY"] in FfsFileObj.FileName:\r
+                    if not self.__GetMacroValue("OUTPUT_DIRECTORY") in FfsFileObj.FileName:\r
                         #do case sensitive check for file path\r
                         ErrorCode, ErrorInfo = PathClass(NormPath(FfsFileObj.FileName), GenFdsGlobalVariable.WorkSpaceDir).Validate()\r
                         if ErrorCode != 0:\r
@@ -3872,6 +3974,8 @@ class FdfParser:
 \r
         if not ffsInf.InfFileName in self.Profile.InfList:\r
             self.Profile.InfList.append(ffsInf.InfFileName)\r
+            FileLineTuple = GetRealFileLine(self.FileName, self.CurrentLineNumber)\r
+            self.Profile.InfFileLineList.append(FileLineTuple)\r
 \r
         \r
         self.__GetOptRomOverrides (ffsInf)\r
index 0219783b03da07930c8884938d703b8ae642f387..8a742d95bef327697eaa4f4dddb200df035dac7a 100644 (file)
@@ -278,8 +278,7 @@ def main():
                     ExtraData="Please send email to edk2-buildtools-devel@lists.sourceforge.net for help, attaching following call stack trace!\n",
                     RaiseError=False
                     )
-        if Options.debug != None:
-            EdkLogger.quiet(traceback.format_exc())
+        EdkLogger.quiet(traceback.format_exc())
         ReturnCode = CODE_ERROR
     return ReturnCode
 
index 5fb8cd82322c54a15dfee508dddccd070f5e21f7..927b5d1a3be637833ebe55bfb5e4cb237c91697a 100644 (file)
@@ -55,7 +55,8 @@ class TableFdf(Table):
                                                        Value1 VARCHAR NOT NULL,\r
                                                        Value2 VARCHAR,\r
                                                        Value3 VARCHAR,\r
-                                                       Arch VarCHAR,\r
+                                                       Scope1 VarCHAR,\r
+                                                       Scope2 VarCHAR,\r
                                                        BelongsToItem SINGLE NOT NULL,\r
                                                        BelongsToFile SINGLE NOT NULL,\r
                                                        StartLine INTEGER NOT NULL,\r
@@ -84,11 +85,11 @@ class TableFdf(Table):
     # @param EndColumn:      EndColumn of a Fdf item\r
     # @param Enabled:        If this item enabled\r
     #\r
-    def Insert(self, Model, Value1, Value2, Value3, Arch, BelongsToItem, BelongsToFile, StartLine, StartColumn, EndLine, EndColumn, Enabled):\r
+    def Insert(self, Model, Value1, Value2, Value3, Scope1, Scope2, BelongsToItem, BelongsToFile, StartLine, StartColumn, EndLine, EndColumn, Enabled):\r
         self.ID = self.ID + 1\r
-        (Value1, Value2, Value3, Arch) = ConvertToSqlString((Value1, Value2, Value3, Arch))\r
-        SqlCommand = """insert into %s values(%s, %s, '%s', '%s', '%s', '%s', %s, %s, %s, %s, %s, %s, %s)""" \\r
-                     % (self.Table, self.ID, Model, Value1, Value2, Value3, Arch, BelongsToItem, BelongsToFile, StartLine, StartColumn, EndLine, EndColumn, Enabled)\r
+        (Value1, Value2, Value3, Scope1, Scope2) = ConvertToSqlString((Value1, Value2, Value3, Scope1, Scope2))\r
+        SqlCommand = """insert into %s values(%s, %s, '%s', '%s', '%s', '%s', '%s', %s, %s, %s, %s, %s, %s, %s)""" \\r
+                     % (self.Table, self.ID, Model, Value1, Value2, Value3, Scope1, Scope2, BelongsToItem, BelongsToFile, StartLine, StartColumn, EndLine, EndColumn, Enabled)\r
         Table.Insert(self, SqlCommand)\r
         \r
         return self.ID\r
@@ -100,7 +101,7 @@ class TableFdf(Table):
     # @retval:       A recordSet of all found records \r
     #\r
     def Query(self, Model):\r
-        SqlCommand = """select ID, Value1, Value2, Value3, Arch, BelongsToItem, BelongsToFile, StartLine from %s\r
+        SqlCommand = """select ID, Value1, Value2, Value3, Scope1, Scope2, BelongsToItem, BelongsToFile, StartLine from %s\r
                         where Model = %s\r
                         and Enabled > -1""" % (self.Table, Model)\r
         EdkLogger.debug(4, "SqlCommand: %s" % SqlCommand)\r
index 86dddd0f2b63dbd05e572fb417e8bec4ca38a4eb..e43802ef25dfdcd8f73d03a094fc7f18ca93f12d 100644 (file)
@@ -89,3 +89,16 @@ class TableFile(Table):
         TimeStamp = os.stat(FileFullPath)[8]\r
         File = FileClass(-1, Name, Ext, Filepath, FileFullPath, Model, '', [], [], [])\r
         return self.Insert(File.Name, File.ExtName, File.Path, File.FullPath, File.Model, TimeStamp)\r
+    \r
+    ## Get ID of a given file\r
+    #\r
+    #   @param  FilePath    Path of file\r
+    #\r
+    #   @retval ID          ID value of given file in the table\r
+    #\r
+    def GetFileId(self, File):\r
+        QueryScript = "select ID from %s where FullPath = '%s'" % (self.Table, str(File))\r
+        RecordList = self.Exec(QueryScript)\r
+        if len(RecordList) == 0:\r
+            return None\r
+        return RecordList[0][0]\r
index 34f6284a875062f54a63172b90e7d3621560422f..0416ecdcacdf0c629bea0a1d6e3c641f5daf5a4c 100644 (file)
@@ -36,16 +36,23 @@ gLineControlDirective = re.compile('^\s*#(?:line)?\s+([0-9]+)\s+"*([^"]*)"')
 gTypedefPattern = re.compile("^\s*typedef\s+struct(\s+\w+)?\s*[{]*$", re.MULTILINE)
 ## Regular expression for matching "#pragma pack"
 gPragmaPattern = re.compile("^\s*#pragma\s+pack", re.MULTILINE)
+
+#
+# The following number pattern match will only match if following criteria is met:
+# There is leading non-(alphanumeric or _) character, and no following alphanumeric or _
+# as the pattern is greedily match, so it is ok for the gDecNumberPattern or gHexNumberPattern to grab the maximum match
+#
 ## Regular expression for matching HEX number
-gHexNumberPattern = re.compile("(0[xX])([0-9a-fA-F]+)U?")
+gHexNumberPattern = re.compile("(?<=[^a-zA-Z0-9_])(0[xX])([0-9a-fA-F]+)(U(?=$|[^a-zA-Z0-9_]))?")
 ## Regular expression for matching decimal number with 'U' postfix
-gDecNumberPattern = re.compile("([0-9]+)U")
+gDecNumberPattern = re.compile("(?<=[^a-zA-Z0-9_])([0-9]+)U(?=$|[^a-zA-Z0-9_])")
+## Regular expression for matching constant with 'ULL' 'LL' postfix
+gLongNumberPattern = re.compile("(?<=[^a-zA-Z0-9_])(0[xX][0-9a-fA-F]+|[0-9]+)U?LL(?=$|[^a-zA-Z0-9_])")
+
 ## Regular expression for matching "Include ()" in asl file
 gAslIncludePattern = re.compile("^(\s*)[iI]nclude\s*\(\"?([^\"\(\)]+)\"\)", re.MULTILINE)
 ## Regular expression for matching C style #include "XXX.asl" in asl file
 gAslCIncludePattern = re.compile(r'^(\s*)#include\s*[<"]\s*([-\\/\w.]+)\s*([>"])', re.MULTILINE)
-## Regular expression for matching constant with 'ULL' and 'UL', 'LL', 'L' postfix
-gLongNumberPattern = re.compile("(0[xX][0-9a-fA-F]+|[0-9]+)U?LL", re.MULTILINE)
 ## Patterns used to convert EDK conventions to EDK2 ECP conventions
 gImportCodePatterns = [
     [
index 4bb9a8b52116e76b261c1179336f88f5cf52982c..fc3239135cd175a87d23486a98b74eafa5573e62 100644 (file)
@@ -1,3 +1,3 @@
 #This file is for build version number auto generation\r
 #\r
-gBUILD_VERSION = "Build 2423"\r
+gBUILD_VERSION = "Build 2460"\r
index ac656bb02aaa214b24925aeb37fbbc7118542021..752d8e8f41051dd703546b2077b6fd4634ea5bb6 100644 (file)
@@ -55,12 +55,12 @@ class DependencyRules(object):
     # @param Guid:  Guid of a module\r
     # @param Version: Version of a module\r
     #\r
-    def CheckModuleExists(self, Guid, Version, ReturnCode=DEPEX_CHECK_SUCCESS):\r
+    def CheckModuleExists(self, Guid, Version, Name, Path, ReturnCode=DEPEX_CHECK_SUCCESS):\r
         if ReturnCode:\r
             pass\r
         Logger.Verbose(ST.MSG_CHECK_MODULE_EXIST)\r
-        ModuleList = self.IpiDb.GetModInPackage(Guid, Version)\r
-        ModuleList.extend(self.IpiDb.GetStandaloneModule(Guid, Version))\r
+        ModuleList = self.IpiDb.GetModInPackage(Guid, Version, Name, Path)\r
+        ModuleList.extend(self.IpiDb.GetStandaloneModule(Guid, Version, Name, Path))\r
         Logger.Verbose(ST.MSG_CHECK_MODULE_EXIST_FINISH)\r
         if len(ModuleList) > 0:\r
             return True\r
index 03872379518eb26c5b38ad7e9e8259a3703c84f8..8ac8d4ed5255253ef9415f0a6cc99ba74646bb9a 100644 (file)
@@ -95,7 +95,7 @@ class DistributionPackageClass(object):
         #\r
         self.PackageSurfaceArea = Sdict() \r
         #\r
-        # {(Guid, Version, Path) : ModuleObj}\r
+        # {(Guid, Version, Name, Path) : ModuleObj}\r
         #\r
         self.ModuleSurfaceArea = Sdict()  \r
         self.Tools = MiscFileObject()\r
@@ -149,6 +149,7 @@ class DistributionPackageClass(object):
                         ModuleDict = PackageObj.GetModuleDict()\r
                         ModuleDict[(ModuleObj.GetGuid(), \\r
                                     ModuleObj.GetVersion(), \\r
+                                    ModuleObj.GetName(), \\r
                                     ModuleObj.GetCombinePath())] = ModuleObj\r
                         PackageObj.SetModuleDict(ModuleDict)\r
                     except FatalError, ErrCode:\r
@@ -172,10 +173,11 @@ class DistributionPackageClass(object):
                 try:\r
                     ModuleObj = InfPomAlignment(ModuleFileFullPath, \r
                                                 WorkspaceDir)\r
-                    self.ModuleSurfaceArea[(ModuleObj.GetGuid(), \\r
-                                            ModuleObj.GetVersion(), \\r
-                                            ModuleObj.GetCombinePath())] = \\r
-                                            ModuleObj\r
+                    ModuleKey = (ModuleObj.GetGuid(), \r
+                                 ModuleObj.GetVersion(), \r
+                                 ModuleObj.GetName(), \r
+                                 ModuleObj.GetCombinePath())\r
+                    self.ModuleSurfaceArea[ModuleKey] = ModuleObj\r
                 except FatalError, ErrCode:\r
                     if ErrCode.message == EDK1_INF_ERROR:\r
                         Logger.Error("UPT",\r
@@ -207,16 +209,16 @@ class DistributionPackageClass(object):
         \r
             Module = None\r
             ModuleDict = Package.GetModuleDict()\r
-            for Guid, Version, Path in ModuleDict:\r
-                Module = ModuleDict[Guid, Version, Path]\r
+            for Guid, Version, Name, Path in ModuleDict:\r
+                Module = ModuleDict[Guid, Version, Name, Path]\r
                 ModulePath = Module.GetModulePath()\r
                 FullPath = Module.GetFullPath()\r
                 PkgRelPath = os.path.normpath(os.path.join(PackagePath, ModulePath))\r
                 MetaDataFileList.append(Path)\r
                 self.FileList += GetNonMetaDataFiles(os.path.dirname(FullPath), ['CVS', '.svn'], False, PkgRelPath)\r
      \r
-        for Guid, Version, Path in self.ModuleSurfaceArea:\r
-            Module = self.ModuleSurfaceArea[Guid, Version, Path]\r
+        for Guid, Version, Name, Path in self.ModuleSurfaceArea:\r
+            Module = self.ModuleSurfaceArea[Guid, Version, Name, Path]\r
             ModulePath = Module.GetModulePath()\r
             FullPath = Module.GetFullPath()\r
             MetaDataFileList.append(Path)\r
index 38f872c2addf46887030a3c845b89843eea381e4..e45acb7d48ab189001fc0db80189b5094e2d8c45 100644 (file)
@@ -26,6 +26,7 @@ import time
 import Logger.Log as Logger\r
 from Logger import StringTable as ST\r
 from Logger.ToolError import UPT_ALREADY_RUNNING_ERROR\r
+from Logger.ToolError import UPT_DB_UPDATE_ERROR\r
 \r
 ## IpiDb\r
 #\r
@@ -118,11 +119,12 @@ class IpiDatabase(object):
         create table IF NOT EXISTS %s (\r
         ModuleGuid TEXT NOT NULL,\r
         ModuleVersion TEXT NOT NULL,\r
+        ModuleName TEXT NOT NULL,\r
         InstallTime REAL NOT NULL,\r
         PackageGuid TEXT,\r
         PackageVersion TEXT,\r
         InstallPath TEXT NOT NULL,\r
-        PRIMARY KEY (ModuleGuid, ModuleVersion, InstallPath)\r
+        PRIMARY KEY (ModuleGuid, ModuleVersion, ModuleName, InstallPath)\r
         )""" % self.ModInPkgTable\r
         self.Cur.execute(SqlCommand)\r
         \r
@@ -130,11 +132,12 @@ class IpiDatabase(object):
         create table IF NOT EXISTS %s (\r
         ModuleGuid TEXT NOT NULL,\r
         ModuleVersion TEXT NOT NULL,\r
+        ModuleName TEXT NOT NULL,\r
         InstallTime REAL NOT NULL,\r
         DpGuid TEXT,\r
         DpVersion TEXT,\r
         InstallPath TEXT NOT NULL,\r
-        PRIMARY KEY (ModuleGuid, ModuleVersion, InstallPath)\r
+        PRIMARY KEY (ModuleGuid, ModuleVersion, ModuleName, InstallPath)\r
         )""" % self.StandaloneModTable\r
         self.Cur.execute(SqlCommand)\r
         \r
@@ -142,6 +145,7 @@ class IpiDatabase(object):
         create table IF NOT EXISTS %s (\r
         ModuleGuid TEXT NOT NULL,\r
         ModuleVersion TEXT NOT NULL,\r
+        ModuleName TEXT NOT NULL,\r
         InstallPath TEXT NOT NULL,\r
         DepexGuid TEXT,\r
         DepexVersion TEXT\r
@@ -160,64 +164,72 @@ class IpiDatabase(object):
     # @param RePackage: A RePackage\r
     #\r
     def AddDPObject(self, DpObj, NewDpPkgFileName, DpPkgFileName, RePackage):\r
-        \r
-        for PkgKey in DpObj.PackageSurfaceArea.keys():\r
-            PkgGuid = PkgKey[0]\r
-            PkgVersion = PkgKey[1]\r
-            PkgInstallPath = PkgKey[2]\r
-            self._AddPackage(PkgGuid, PkgVersion, DpObj.Header.GetGuid(), \\r
-                             DpObj.Header.GetVersion(), PkgInstallPath)\r
-            PkgObj = DpObj.PackageSurfaceArea[PkgKey]\r
-            for ModKey in PkgObj.GetModuleDict().keys():\r
+        try:\r
+            for PkgKey in DpObj.PackageSurfaceArea.keys():\r
+                PkgGuid = PkgKey[0]\r
+                PkgVersion = PkgKey[1]\r
+                PkgInstallPath = PkgKey[2]\r
+                self._AddPackage(PkgGuid, PkgVersion, DpObj.Header.GetGuid(), \\r
+                                 DpObj.Header.GetVersion(), PkgInstallPath)\r
+                PkgObj = DpObj.PackageSurfaceArea[PkgKey]\r
+                for ModKey in PkgObj.GetModuleDict().keys():\r
+                    ModGuid = ModKey[0]\r
+                    ModVersion = ModKey[1]\r
+                    ModName = ModKey[2]\r
+                    ModInstallPath = ModKey[3]\r
+                    ModInstallPath = \\r
+                    os.path.normpath(os.path.join(PkgInstallPath, ModInstallPath))\r
+                    self._AddModuleInPackage(ModGuid, ModVersion, ModName, PkgGuid, \\r
+                                             PkgVersion, ModInstallPath)\r
+                    ModObj = PkgObj.GetModuleDict()[ModKey]\r
+                    for Dep in ModObj.GetPackageDependencyList():\r
+                        DepexGuid = Dep.GetGuid()\r
+                        DepexVersion = Dep.GetVersion()\r
+                        self._AddModuleDepex(ModGuid, ModVersion, ModName, ModInstallPath, \\r
+                                             DepexGuid, DepexVersion)\r
+                for (FilePath, Md5Sum) in PkgObj.FileList:\r
+                    self._AddDpFilePathList(DpObj.Header.GetGuid(), \\r
+                                            DpObj.Header.GetVersion(), FilePath, \\r
+                                            Md5Sum)\r
+    \r
+            for ModKey in DpObj.ModuleSurfaceArea.keys():\r
                 ModGuid = ModKey[0]\r
                 ModVersion = ModKey[1]\r
-                ModInstallPath = ModKey[2]\r
-                ModInstallPath = \\r
-                os.path.normpath(os.path.join(PkgInstallPath, ModInstallPath))\r
-                self._AddModuleInPackage(ModGuid, ModVersion, PkgGuid, \\r
-                                         PkgVersion, ModInstallPath)\r
-                ModObj = PkgObj.GetModuleDict()[ModKey]\r
+                ModName = ModKey[2]\r
+                ModInstallPath = ModKey[3]\r
+                self._AddStandaloneModule(ModGuid, ModVersion, ModName, \\r
+                                          DpObj.Header.GetGuid(), \\r
+                                          DpObj.Header.GetVersion(), \\r
+                                          ModInstallPath)\r
+                ModObj = DpObj.ModuleSurfaceArea[ModKey]\r
                 for Dep in ModObj.GetPackageDependencyList():\r
                     DepexGuid = Dep.GetGuid()\r
                     DepexVersion = Dep.GetVersion()\r
-                    self._AddModuleDepex(ModGuid, ModVersion, ModInstallPath, \\r
+                    self._AddModuleDepex(ModGuid, ModVersion, ModName, ModInstallPath, \\r
                                          DepexGuid, DepexVersion)\r
-            for (FilePath, Md5Sum) in PkgObj.FileList:\r
-                self._AddDpFilePathList(DpObj.Header.GetGuid(), \\r
-                                        DpObj.Header.GetVersion(), FilePath, \\r
-                                        Md5Sum)\r
-\r
-        for ModKey in DpObj.ModuleSurfaceArea.keys():\r
-            ModGuid = ModKey[0]\r
-            ModVersion = ModKey[1]\r
-            ModInstallPath = ModKey[2]\r
-            self._AddStandaloneModule(ModGuid, ModVersion, \\r
-                                      DpObj.Header.GetGuid(), \\r
-                                      DpObj.Header.GetVersion(), \\r
-                                      ModInstallPath)\r
-            ModObj = DpObj.ModuleSurfaceArea[ModKey]\r
-            for Dep in ModObj.GetPackageDependencyList():\r
-                DepexGuid = Dep.GetGuid()\r
-                DepexVersion = Dep.GetVersion()\r
-                self._AddModuleDepex(ModGuid, ModVersion, ModInstallPath, \\r
-                                     DepexGuid, DepexVersion)\r
-            for (Path, Md5Sum) in ModObj.FileList:\r
+                for (Path, Md5Sum) in ModObj.FileList:\r
+                    self._AddDpFilePathList(DpObj.Header.GetGuid(), \\r
+                                            DpObj.Header.GetVersion(), \\r
+                                            Path, Md5Sum)\r
+    \r
+            #\r
+            # add tool/misc files\r
+            #\r
+            for (Path, Md5Sum) in DpObj.FileList:\r
                 self._AddDpFilePathList(DpObj.Header.GetGuid(), \\r
-                                        DpObj.Header.GetVersion(), \\r
-                                        Path, Md5Sum)\r
-\r
-        #\r
-        # add tool/misc files\r
-        #\r
-        for (Path, Md5Sum) in DpObj.FileList:\r
-            self._AddDpFilePathList(DpObj.Header.GetGuid(), \\r
-                                    DpObj.Header.GetVersion(), Path, Md5Sum)\r
-                                \r
-        self._AddDp(DpObj.Header.GetGuid(), DpObj.Header.GetVersion(), \\r
-                    NewDpPkgFileName, DpPkgFileName, RePackage)\r
+                                        DpObj.Header.GetVersion(), Path, Md5Sum)\r
+                                    \r
+            self._AddDp(DpObj.Header.GetGuid(), DpObj.Header.GetVersion(), \\r
+                        NewDpPkgFileName, DpPkgFileName, RePackage)\r
+    \r
+            self.Conn.commit()\r
+        except sqlite3.IntegrityError, DetailMsg:\r
+            Logger.Error("UPT",\r
+                         UPT_DB_UPDATE_ERROR,\r
+                         ST.ERR_UPT_DB_UPDATE_ERROR,\r
+                         ExtraData = DetailMsg\r
+                         )\r
 \r
-        self.Conn.commit()\r
-                    \r
     ## Add a distribution install information\r
     #\r
     # @param Guid         Guid of the distribution package  \r
@@ -290,12 +302,14 @@ class IpiDatabase(object):
         \r
     ## Add a module that from a package install information\r
     #\r
-    # @param Guid: A package guid \r
-    # @param Version: A package version\r
-    # @param PkgGuid: A package guid\r
-    # @param PkgFileName: A package File Name\r
+    # @param Guid:    Module Guid \r
+    # @param Version: Module version\r
+    # @param Name:    Module Name\r
+    # @param PkgGuid: Package Guid\r
+    # @param PkgVersion: Package version\r
+    # @param Path:    Package relative path that module installs\r
     #\r
-    def _AddModuleInPackage(self, Guid, Version, PkgGuid=None, \\r
+    def _AddModuleInPackage(self, Guid, Version, Name, PkgGuid=None, \\r
                             PkgVersion=None, Path=''):\r
         \r
         if Version == None or len(Version.strip()) == 0:\r
@@ -312,8 +326,8 @@ class IpiDatabase(object):
         #\r
         CurrentTime = time.time()\r
         SqlCommand = \\r
-        """insert into %s values('%s', '%s', %s, '%s', '%s', '%s')""" % \\r
-        (self.ModInPkgTable, Guid, Version, CurrentTime, PkgGuid, PkgVersion, \\r
+        """insert into %s values('%s', '%s', '%s', %s, '%s', '%s', '%s')""" % \\r
+        (self.ModInPkgTable, Guid, Version, Name, CurrentTime, PkgGuid, PkgVersion, \\r
          Path)\r
         self.Cur.execute(SqlCommand)\r
     \r
@@ -321,11 +335,12 @@ class IpiDatabase(object):
     #\r
     # @param Guid: a module Guid\r
     # @param Version: a module Version\r
+    # @param Name: a module name\r
     # @param DpGuid: a DpGuid\r
     # @param DpVersion: a DpVersion\r
     # @param Path: path\r
     #\r
-    def _AddStandaloneModule(self, Guid, Version, DpGuid=None, \\r
+    def _AddStandaloneModule(self, Guid, Version, Name, DpGuid=None, \\r
                              DpVersion=None, Path=''):\r
         \r
         if Version == None or len(Version.strip()) == 0:\r
@@ -342,8 +357,8 @@ class IpiDatabase(object):
         #\r
         CurrentTime = time.time()\r
         SqlCommand = \\r
-        """insert into %s values('%s', '%s', %s, '%s', '%s', '%s')""" % \\r
-        (self.StandaloneModTable, Guid, Version, CurrentTime, DpGuid, \\r
+        """insert into %s values('%s', '%s', '%s', %s, '%s', '%s', '%s')""" % \\r
+        (self.StandaloneModTable, Guid, Version, Name, CurrentTime, DpGuid, \\r
          DpVersion, Path)\r
         self.Cur.execute(SqlCommand)\r
     \r
@@ -351,10 +366,11 @@ class IpiDatabase(object):
     #\r
     # @param Guid: a module Guid\r
     # @param Version: a module Version\r
+    # @param Name: a module name\r
     # @param DepexGuid: a module DepexGuid\r
     # @param DepexVersion: a module DepexVersion\r
     #\r
-    def _AddModuleDepex(self, Guid, Version, Path, DepexGuid=None, \\r
+    def _AddModuleDepex(self, Guid, Version, Name, Path, DepexGuid=None, \\r
                         DepexVersion=None):\r
                 \r
         if DepexGuid == None or len(DepexGuid.strip()) == 0:\r
@@ -366,8 +382,8 @@ class IpiDatabase(object):
         #\r
         # Add module depex information to DB.\r
         #\r
-        SqlCommand = """insert into %s values('%s', '%s', '%s', '%s', '%s')"""\\r
-         % (self.ModDepexTable, Guid, Version, Path, DepexGuid, DepexVersion)\r
+        SqlCommand = """insert into %s values('%s', '%s', '%s', '%s', '%s', '%s')"""\\r
+         % (self.ModDepexTable, Guid, Version, Name, Path, DepexGuid, DepexVersion)\r
         self.Cur.execute(SqlCommand)\r
         \r
     ## Remove a distribution install information, if no version specified, \r
@@ -389,10 +405,13 @@ class IpiDatabase(object):
         and ModDepexInfo.ModuleVersion in\r
         (select ModuleVersion from StandaloneModInfo as B \r
         where B.DpGuid = '%s' and B.DpVersion = '%s')\r
+        and ModDepexInfo.ModuleName in\r
+        (select ModuleName from StandaloneModInfo as B \r
+        where B.DpGuid = '%s' and B.DpVersion = '%s')\r
         and ModDepexInfo.InstallPath in\r
         (select InstallPath from StandaloneModInfo as B \r
         where B.DpGuid = '%s' and B.DpVersion = '%s') """ % \\r
-        (DpGuid, DpVersion, DpGuid, DpVersion, DpGuid, DpVersion)\r
+        (DpGuid, DpVersion, DpGuid, DpVersion, DpGuid, DpVersion, DpGuid, DpVersion)\r
 \r
         self.Cur.execute(SqlCommand)\r
         #\r
@@ -409,11 +428,15 @@ class IpiDatabase(object):
             (select ModuleVersion from ModInPkgInfo \r
             where ModInPkgInfo.PackageGuid ='%s' and \r
             ModInPkgInfo.PackageVersion = '%s')\r
+            and ModDepexInfo.ModuleName in\r
+            (select ModuleName from ModInPkgInfo \r
+            where ModInPkgInfo.PackageGuid ='%s' and \r
+            ModInPkgInfo.PackageVersion = '%s')\r
             and ModDepexInfo.InstallPath in\r
             (select InstallPath from ModInPkgInfo where \r
             ModInPkgInfo.PackageGuid ='%s' \r
             and ModInPkgInfo.PackageVersion = '%s')""" \\r
-                            % (Pkg[0], Pkg[1],Pkg[0], Pkg[1],Pkg[0], Pkg[1])\r
+                            % (Pkg[0], Pkg[1], Pkg[0], Pkg[1], Pkg[0], Pkg[1],Pkg[0], Pkg[1])\r
             \r
             self.Cur.execute(SqlCommand)\r
         #\r
@@ -627,23 +650,21 @@ class IpiDatabase(object):
     # @param Guid: A module guid\r
     # @param Version: A module version\r
     #\r
-    def GetModInPackage(self, Guid, Version, PkgGuid='', PkgVersion=''):\r
-        \r
+    def GetModInPackage(self, Guid, Version, Name, Path, PkgGuid='', PkgVersion=''):\r
+        (ModuleGuid, ModuleVersion, ModuleName, InstallPath) = (Guid, Version, Name, Path)\r
         if PkgVersion == '' or PkgGuid == '':\r
-\r
-            (ModuleGuid, ModuleVersion) = (Guid, Version)\r
             SqlCommand = """select * from %s where ModuleGuid ='%s' and \r
-            ModuleVersion = '%s'""" % (self.ModInPkgTable, ModuleGuid, \\r
-                                       ModuleVersion)\r
+            ModuleVersion = '%s' and InstallPath = '%s' \r
+            and ModuleName = '%s'""" % (self.ModInPkgTable, ModuleGuid, \\r
+                                       ModuleVersion, InstallPath, ModuleName)\r
             self.Cur.execute(SqlCommand)\r
-        \r
         else:\r
-            (ModuleGuid, ModuleVersion) = (Guid, Version)\r
             SqlCommand = """select * from %s where ModuleGuid ='%s' and \r
-            ModuleVersion = '%s' and PackageGuid ='%s' \r
+            ModuleVersion = '%s' and InstallPath = '%s' \r
+            and ModuleName = '%s' and PackageGuid ='%s' \r
             and PackageVersion = '%s'\r
                             """ % (self.ModInPkgTable, ModuleGuid, \\r
-                                   ModuleVersion, PkgGuid, PkgVersion)\r
+                                   ModuleVersion, InstallPath, ModuleName, PkgGuid, PkgVersion)\r
             self.Cur.execute(SqlCommand)\r
 \r
         ModList = []\r
@@ -662,21 +683,20 @@ class IpiDatabase(object):
     # @param Guid: A module guid \r
     # @param Version: A module version \r
     #\r
-    def GetStandaloneModule(self, Guid, Version, DpGuid='', DpVersion=''):\r
-        \r
+    def GetStandaloneModule(self, Guid, Version, Name, Path, DpGuid='', DpVersion=''):\r
+        (ModuleGuid, ModuleVersion, ModuleName, InstallPath) = (Guid, Version, Name, Path)\r
         if DpGuid == '':\r
-            (ModuleGuid, ModuleVersion) = (Guid, Version)\r
             SqlCommand = """select * from %s where ModuleGuid ='%s' and \r
-            ModuleVersion = '%s'""" % (self.StandaloneModTable, ModuleGuid, \\r
-                                       ModuleVersion)\r
+            ModuleVersion = '%s' and InstallPath = '%s' \r
+            and ModuleName = '%s'""" % (self.StandaloneModTable, ModuleGuid, \\r
+                                       ModuleVersion, InstallPath, ModuleName)\r
             self.Cur.execute(SqlCommand)\r
         \r
         else:\r
-            (ModuleGuid, ModuleVersion) = (Guid, Version)\r
             SqlCommand = """select * from %s where ModuleGuid ='%s' and \r
-            ModuleVersion = '%s' and DpGuid ='%s' and DpVersion = '%s'\r
+            ModuleVersion = '%s' and InstallPath = '%s' and ModuleName = '%s' and DpGuid ='%s' and DpVersion = '%s' \r
                             """ % (self.StandaloneModTable, ModuleGuid, \\r
-                                   ModuleVersion, DpGuid, DpVersion)\r
+                                   ModuleVersion, ModuleName, InstallPath, DpGuid, DpVersion)\r
             self.Cur.execute(SqlCommand)\r
 \r
         ModList = []\r
index c258222d6dac2990d4965e20a9e40680c2ccd29d..1c75dad80b1684cb5072fc3fe47affef61eaa0fb 100644 (file)
@@ -265,11 +265,11 @@ def GetModuleList(DistPkg, Dep, WorkspaceDir, ContentZipFile, ModuleList):
     #\r
     Module = None\r
     NewDict = Sdict()        \r
-    for Guid, Version, Path in DistPkg.ModuleSurfaceArea:\r
+    for Guid, Version, Name, Path in DistPkg.ModuleSurfaceArea:\r
         ModulePath = Path\r
-        Module = DistPkg.ModuleSurfaceArea[Guid, Version, Path]\r
+        Module = DistPkg.ModuleSurfaceArea[Guid, Version, Name, Path]\r
         Logger.Info(ST.MSG_INSTALL_MODULE % Module.GetName())\r
-        if Dep.CheckModuleExists(Guid, Version):\r
+        if Dep.CheckModuleExists(Guid, Version, Name, Path):\r
             Logger.Quiet(ST.WRN_MODULE_EXISTED %Path)\r
         #\r
         # here check for the multiple inf share the same module path cases:\r
@@ -291,7 +291,7 @@ def GetModuleList(DistPkg, Dep, WorkspaceDir, ContentZipFile, ModuleList):
         #\r
         Module.SetModulePath(Module.GetModulePath().replace(Path, NewModulePath, 1))\r
         \r
-        NewDict[Guid, Version, Module.GetModulePath()] = Module\r
+        NewDict[Guid, Version, Name, Module.GetModulePath()] = Module\r
 \r
     #\r
     # generate all inf for modules\r
@@ -737,8 +737,8 @@ def InstallPackageContent(FromPath, ToPath, Package, ContentZipFile, Dep,
     #\r
     Module = None\r
     ModuleDict = Package.GetModuleDict()\r
-    for ModuleGuid, ModuleVersion, ModulePath in ModuleDict:\r
-        Module = ModuleDict[ModuleGuid, ModuleVersion, ModulePath]\r
+    for ModuleGuid, ModuleVersion, ModuleName, ModulePath in ModuleDict:\r
+        Module = ModuleDict[ModuleGuid, ModuleVersion, ModuleName, ModulePath]\r
         InstallModuleContent(FromPath, ToPath, ModulePath, Module,\r
             ContentZipFile, WorkspaceDir, ModuleList, Package, ReadOnly)\r
 \r
index b67cd102d1601e89ee6cfbe0d063366ce077d1b4..889b777d190ae0e6c7c90d2a1a5899c06e3c2092 100644 (file)
@@ -875,7 +875,7 @@ def ProcessEdkComment(LineList):
                 for Index in xrange(StartPos, EndPos+1):
                     LineList[Index] = ''
                 FindEdkBlockComment = False
-        elif Line.find("//") != -1:
+        elif Line.find("//") != -1 and not Line.startswith("#"):
             #
             # handling cpp style comment
             #
index 063ca52d2b7aa99ca25bf0a2a1f2214ecfb28b1d..8a94d710469de58807cf16c991ebb68b18722f46 100644 (file)
@@ -524,6 +524,7 @@ ERR_UNKNOWN_ERROR                = _("Unknown error")
 ERR_UPT_ALREADY_INSTALLED_ERROR  = _("Already installed")\r
 ERR_UPT_ENVIRON_MISSING_ERROR    = _("Environ missing")\r
 ERR_UPT_REPKG_ERROR              = _("File not allowed for RePackage")\r
+ERR_UPT_DB_UPDATE_ERROR          = _("Update database did not complete successfully")\r
 ERR_UPT_INI_PARSE_ERROR          = _("INI file parse error")\r
 ERR_COPYRIGHT_MISSING            = \\r
 _("Header comment section must have copyright information")\r
index 69600b2c0114bd8a15340a759cdd2033656d79ed..906d03337c3407f418ed017a46b945ca89d4be09 100644 (file)
@@ -97,6 +97,7 @@ UPT_ENVIRON_MISSING_ERROR = 0xD001
 UPT_REPKG_ERROR = 0xD002
 UPT_ALREADY_RUNNING_ERROR = 0xD003
 UPT_MUL_DEC_ERROR = 0xD004
+UPT_DB_UPDATE_ERROR = 0xD005
 UPT_INI_PARSE_ERROR = 0xE000
 
 ## Error message of each error code
index f340805f328fecf35461fa9c708bda579529a6fc..85062ac8838b26439569afb4761de560f040d0c3 100644 (file)
@@ -273,7 +273,8 @@ class PackageSurfaceAreaXml(object):
         for SubItem in XmlList(Item, '/PackageSurfaceArea/Modules/ModuleSurfaceArea'):\r
             Tmp = ModuleSurfaceAreaXml()\r
             Module = Tmp.FromXml(SubItem, 'ModuleSurfaceArea')\r
-            Package.ModuleDict[(Module.GetGuid(), Module.GetVersion(), Module.GetModulePath())] = Module\r
+            ModuleDictKey = (Module.GetGuid(), Module.GetVersion(), Module.GetName(), Module.GetModulePath())\r
+            Package.ModuleDict[ModuleDictKey] = Module\r
         #    \r
         # MiscellaneousFile\r
         #\r
index adfeca81a000328412ca8b5aa2949d3d56b807bd..5a2f0dc70571603a22f019ab975bd08e8c5c3030 100644 (file)
@@ -180,7 +180,9 @@ class DistributionPackageXml(object):
             for Item in XmlList(self.Pkg, '/DistributionPackage/ModuleSurfaceArea'):\r
                 Msa = ModuleSurfaceAreaXml()\r
                 Module = Msa.FromXml(Item, 'ModuleSurfaceArea', True)\r
-                self.DistP.ModuleSurfaceArea[(Module.GetGuid(), Module.GetVersion(), Module.GetModulePath())] = Module\r
+                ModuleKey = (Module.GetGuid(), Module.GetVersion(), Module.GetName(), Module.GetModulePath())\r
+                self.DistP.ModuleSurfaceArea[ModuleKey] = Module\r
+\r
             #    \r
             # Parse Tools\r
             #\r
index d907b11ba11ac07053fe9c1688deee8eeb1ddf64..e26b558de0b6c7858e70188161238bcf3d00a784 100644 (file)
@@ -59,17 +59,29 @@ def ParseMacro(Parser):
             EdkLogger.error('Parser', FORMAT_INVALID, "The macro name must be in the pattern [A-Z][A-Z0-9_]*",
                             ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex+1)
 
-        self._ItemType = self.DataType[Type]
+        Value = ReplaceMacro(Value, self._Macros)
+        if Type in self.DataType:
+            self._ItemType = self.DataType[Type]
+        else:
+            self._ItemType = MODEL_META_DATA_DEFINE
         # DEFINE defined macros
-        if self._ItemType == MODEL_META_DATA_DEFINE:
-            if self._SectionType == MODEL_META_DATA_HEADER:
-                self._FileLocalMacros[Name] = Value
+        if Type == TAB_DSC_DEFINES_DEFINE:
+            #
+            # First judge whether this DEFINE is in conditional directive statements or not.
+            #
+            if type(self) == DscParser and self._InDirective > -1:
+                pass
             else:
-                SectionDictKey = self._SectionType, self._Scope[0][0], self._Scope[0][1]
-                if SectionDictKey not in self._SectionsMacroDict:
-                    self._SectionsMacroDict[SectionDictKey] = {}
-                SectionLocalMacros = self._SectionsMacroDict[SectionDictKey]
-                SectionLocalMacros[Name] = Value
+                if type(self) == DecParser:
+                    if MODEL_META_DATA_HEADER in self._SectionType:
+                        self._FileLocalMacros[Name] = Value
+                    else:
+                        self._ConstructSectionMacroDict(Name, Value)
+                elif self._SectionType == MODEL_META_DATA_HEADER:
+                    self._FileLocalMacros[Name] = Value
+                else:
+                    self._ConstructSectionMacroDict(Name, Value)
+
         # EDK_GLOBAL defined macros
         elif type(self) != DscParser:
             EdkLogger.error('Parser', FORMAT_INVALID, "EDK_GLOBAL can only be used in .dsc file",
@@ -310,6 +322,7 @@ class MetaFileParser(object):
             EdkLogger.error('Parser', FORMAT_INVALID, "No value specified",
                             ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex+1)
 
+        self._ValueList = [ReplaceMacro(Value, self._Macros) for Value in self._ValueList]
         Name, Value = self._ValueList[1], self._ValueList[2]
         # Sometimes, we need to make differences between EDK and EDK2 modules 
         if Name == 'INF_VERSION':
@@ -319,7 +332,6 @@ class MetaFileParser(object):
                 EdkLogger.error('Parser', FORMAT_INVALID, "Invalid version number",
                                 ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex+1)
 
-        Value = ReplaceMacro(Value, self._Macros)
         if type(self) == InfParser and self._Version < 0x00010005:
             # EDK module allows using defines as macros
             self._FileLocalMacros[Name] = Value
@@ -354,15 +366,59 @@ class MetaFileParser(object):
         Macros.update(self._GetApplicableSectionMacro())
         return Macros
 
+    ## Construct section Macro dict 
+    def _ConstructSectionMacroDict(self, Name, Value):
+        ScopeKey = [(Scope[0], Scope[1]) for Scope in self._Scope]
+        ScopeKey = tuple(ScopeKey)
+        SectionDictKey = self._SectionType, ScopeKey
+        #
+        # DecParser SectionType is a list, will contain more than one item only in Pcd Section
+        # As Pcd section macro usage is not alllowed, so here it is safe
+        #
+        if type(self) == DecParser:
+            SectionDictKey = self._SectionType[0], ScopeKey
+        if SectionDictKey not in self._SectionsMacroDict:
+            self._SectionsMacroDict[SectionDictKey] = {}
+        SectionLocalMacros = self._SectionsMacroDict[SectionDictKey]
+        SectionLocalMacros[Name] = Value
 
     ## Get section Macros that are applicable to current line, which may come from other sections 
     ## that share the same name while scope is wider
     def _GetApplicableSectionMacro(self):
         Macros = {}
 
-        for SectionType, Scope1, Scope2 in self._SectionsMacroDict:
-            if (SectionType == self._SectionType) and (Scope1 == self._Scope[0][0] or Scope1 == "COMMON") and (Scope2 == self._Scope[0][1] or Scope2 == "COMMON"):
-                Macros.update(self._SectionsMacroDict[(SectionType, Scope1, Scope2)])
+        ComComMacroDict = {}
+        ComSpeMacroDict = {}
+        SpeSpeMacroDict = {}
+        
+        ActiveSectionType = self._SectionType
+        if type(self) == DecParser:
+            ActiveSectionType = self._SectionType[0]
+            
+        for (SectionType, Scope) in self._SectionsMacroDict:
+            if SectionType != ActiveSectionType:
+                continue
+
+            for ActiveScope in self._Scope:
+                Scope0, Scope1 = ActiveScope[0], ActiveScope[1]
+                if(Scope0, Scope1) not in Scope:
+                    break
+            else:
+                SpeSpeMacroDict.update(self._SectionsMacroDict[(SectionType, Scope)])
+            
+            for ActiveScope in self._Scope:
+                Scope0, Scope1 = ActiveScope[0], ActiveScope[1]
+                if(Scope0, Scope1) not in Scope and (Scope0, "COMMON") not in Scope and ("COMMON", Scope1) not in Scope:
+                    break
+            else:
+                ComSpeMacroDict.update(self._SectionsMacroDict[(SectionType, Scope)])
+
+            if ("COMMON", "COMMON") in Scope:
+                ComComMacroDict.update(self._SectionsMacroDict[(SectionType, Scope)])
+
+        Macros.update(ComComMacroDict)
+        Macros.update(ComSpeMacroDict)
+        Macros.update(SpeSpeMacroDict)
 
         return Macros
 
@@ -499,7 +555,8 @@ class InfParser(MetaFileParser):
             self._ValueList = ['','','']
             # parse current line, result will be put in self._ValueList
             self._SectionParser[self._SectionType](self)
-            if self._ValueList == None:
+            if self._ValueList == None or self._ItemType == MODEL_META_DATA_DEFINE:
+                self._ItemType = -1
                 continue
             #
             # Model, Value1, Value2, Value3, Arch, Platform, BelongsToItem=-1,
@@ -733,7 +790,12 @@ class DscParser(MetaFileParser):
         self._DirectiveStack = []
         self._DirectiveEvalStack = []
         self._Enabled = 1
-
+        
+        #
+        # Specify whether current line is in uncertain condition
+        #
+        self._InDirective = -1
+        
         # Final valid replacable symbols
         self._Symbols = {}
         #
@@ -838,6 +900,13 @@ class DscParser(MetaFileParser):
         if DirectiveName not in self.DataType:
             EdkLogger.error("Parser", FORMAT_INVALID, "Unknown directive [%s]" % DirectiveName,
                             File=self.MetaFile, Line=self._LineIndex+1)
+
+        if DirectiveName in ['!IF', '!IFDEF', '!IFNDEF']:
+            self._InDirective += 1
+
+        if DirectiveName in ['!ENDIF']:
+            self._InDirective -= 1
+
         if DirectiveName in ['!IF', '!IFDEF', '!INCLUDE', '!IFNDEF', '!ELSEIF'] and self._ValueList[1] == '':
             EdkLogger.error("Parser", FORMAT_INVALID, "Missing expression",
                             File=self.MetaFile, Line=self._LineIndex+1,
@@ -923,6 +992,7 @@ class DscParser(MetaFileParser):
         self._ValueList[0:len(TokenList)] = TokenList
 
     ## Parse Edk style of library modules
+    @ParseMacro
     def _LibraryInstanceParser(self):
         self._ValueList[0] = self._CurrentLine
 
@@ -1146,27 +1216,13 @@ class DscParser(MetaFileParser):
         Records = self._RawTable.Query(MODEL_PCD_FEATURE_FLAG, BelongsToItem=-1.0)
         for TokenSpaceGuid,PcdName,Value,Dummy2,Dummy3,ID,Line in Records:
             Value, DatumType, MaxDatumSize = AnalyzePcdData(Value)
-            # Only use PCD whose value is straitforward (no macro and PCD)
-            if self.SymbolPattern.findall(Value):
-                continue
             Name = TokenSpaceGuid + '.' + PcdName
-            # Don't use PCD with different values.
-            if Name in self._Symbols and self._Symbols[Name] != Value:
-                self._Symbols.pop(Name)
-                continue 
             self._Symbols[Name] = Value
 
         Records = self._RawTable.Query(MODEL_PCD_FIXED_AT_BUILD, BelongsToItem=-1.0)
         for TokenSpaceGuid,PcdName,Value,Dummy2,Dummy3,ID,Line in Records:
             Value, DatumType, MaxDatumSize = AnalyzePcdData(Value)
-            # Only use PCD whose value is straitforward (no macro and PCD)
-            if self.SymbolPattern.findall(Value):
-                continue 
-            Name = TokenSpaceGuid+'.'+PcdName
-            # Don't use PCD with different values.
-            if Name in self._Symbols and self._Symbols[Name] != Value:
-                self._Symbols.pop(Name)
-                continue 
+            Name = TokenSpaceGuid + '.' + PcdName
             self._Symbols[Name] = Value
 
     def __ProcessDefine(self):
@@ -1179,11 +1235,7 @@ class DscParser(MetaFileParser):
             if self._SectionType == MODEL_META_DATA_HEADER:
                 self._FileLocalMacros[Name] = Value
             else:
-                SectionDictKey = self._SectionType, self._Scope[0][0], self._Scope[0][1]
-                if SectionDictKey not in self._SectionsMacroDict:
-                    self._SectionsMacroDict[SectionDictKey] = {}
-                SectionLocalMacros = self._SectionsMacroDict[SectionDictKey]
-                SectionLocalMacros[Name] = Value
+                self._ConstructSectionMacroDict(Name, Value)
         elif self._ItemType == MODEL_META_DATA_GLOBAL_DEFINE:
             GlobalData.gEdkGlobal[Name] = Value
         
@@ -1234,8 +1286,9 @@ class DscParser(MetaFileParser):
             self._DirectiveEvalStack[-1] = not self._DirectiveEvalStack[-1]
             self._DirectiveEvalStack.append(bool(Result))
         elif self._ItemType == MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSE:
-            self._DirectiveStack[-1] = self._ItemType
+            self._DirectiveStack.append(self._ItemType)
             self._DirectiveEvalStack[-1] = not self._DirectiveEvalStack[-1]
+            self._DirectiveEvalStack.append(True)