]> git.proxmox.com Git - mirror_edk2.git/blobdiff - BaseTools/Source/Python/AutoGen/AutoGen.py
BaseTools: Fixed the issue of Multiple Skus are always disables
[mirror_edk2.git] / BaseTools / Source / Python / AutoGen / AutoGen.py
index e89b61d19a62cfa6daf31f033812dc8950193bcd..28e575d2196ed659c0019c7d16a8aae145eb181a 100644 (file)
@@ -43,6 +43,8 @@ from Workspace.MetaFileCommentParser import UsageList
 from Common.MultipleWorkspace import MultipleWorkspace as mws\r
 import InfSectionParser\r
 import datetime\r
+import hashlib\r
+from GenVar import VariableMgr,var_info\r
 \r
 ## Regular expression for splitting Dependency Expression string into tokens\r
 gDepexTokenPattern = re.compile("(\(|\)|\w+| \S+\.inf)")\r
@@ -143,6 +145,8 @@ ${END}
 \r
 ${depexsection_item}\r
 \r
+${userextension_tianocore_item}\r
+\r
 ${tail_comments}\r
 \r
 [BuildOptions.${module_arch}]\r
@@ -263,6 +267,10 @@ class WorkspaceAutoGen(AutoGen):
         self.FvTargetList   = Fvs\r
         self.CapTargetList  = Caps\r
         self.AutoGenObjectList = []\r
+        self._BuildDir      = None\r
+        self._FvDir         = None\r
+        self._MakeFileDir   = None\r
+        self._BuildCommand  = None\r
 \r
         # there's many relative directory operations, so ...\r
         os.chdir(self.WorkspaceDir)\r
@@ -309,8 +317,8 @@ class WorkspaceAutoGen(AutoGen):
 \r
         EdkLogger.verbose("\nFLASH_DEFINITION = %s" % self.FdfFile)\r
 \r
-        if Progress:\r
-            Progress.Start("\nProcessing meta-data")\r
+        if Progress:\r
+            Progress.Start("\nProcessing meta-data")\r
 \r
         if self.FdfFile:\r
             #\r
@@ -642,6 +650,14 @@ class WorkspaceAutoGen(AutoGen):
             Pa.CollectFixedAtBuildPcds()\r
             self.AutoGenObjectList.append(Pa)\r
 \r
+            #\r
+            # Generate Package level hash value\r
+            #\r
+            GlobalData.gPackageHash[Arch] = {}\r
+            if GlobalData.gUseHashCache:\r
+                for Pkg in Pkgs:\r
+                    self._GenPkgLevelHash(Pkg)\r
+\r
         #\r
         # Check PCDs token value conflict in each DEC file.\r
         #\r
@@ -655,11 +671,6 @@ class WorkspaceAutoGen(AutoGen):
 #         if self.FdfFile:\r
 #             self._CheckDuplicateInFV(Fdf)\r
 \r
-        self._BuildDir = None\r
-        self._FvDir = None\r
-        self._MakeFileDir = None\r
-        self._BuildCommand = None\r
-\r
         #\r
         # Create BuildOptions Macro & PCD metafile, also add the Active Platform and FDF file.\r
         #\r
@@ -675,6 +686,7 @@ class WorkspaceAutoGen(AutoGen):
         if self.FdfFile:\r
             content += 'Flash Image Definition: '\r
             content += str(self.FdfFile)\r
+            content += os.linesep\r
         SaveFileOnChange(os.path.join(self.BuildDir, 'BuildOptions'), content, False)\r
 \r
         #\r
@@ -704,6 +716,18 @@ class WorkspaceAutoGen(AutoGen):
                 SrcTimeStamp = os.stat(f)[8]\r
         self._SrcTimeStamp = SrcTimeStamp\r
 \r
+        if GlobalData.gUseHashCache:\r
+            m = hashlib.md5()\r
+            for files in AllWorkSpaceMetaFiles:\r
+                if files.endswith('.dec'):\r
+                    continue\r
+                f = open(files, 'r')\r
+                Content = f.read()\r
+                f.close()\r
+                m.update(Content)\r
+            SaveFileOnChange(os.path.join(self.BuildDir, 'AutoGen.hash'), m.hexdigest(), True)\r
+            GlobalData.gPlatformHash = m.hexdigest()\r
+\r
         #\r
         # Write metafile list to build directory\r
         #\r
@@ -717,6 +741,29 @@ class WorkspaceAutoGen(AutoGen):
                 print >> file, f\r
         return True\r
 \r
+    def _GenPkgLevelHash(self, Pkg):\r
+        PkgDir = os.path.join(self.BuildDir, Pkg.Arch, Pkg.PackageName)\r
+        CreateDirectory(PkgDir)\r
+        HashFile = os.path.join(PkgDir, Pkg.PackageName + '.hash')\r
+        m = hashlib.md5()\r
+        # Get .dec file's hash value\r
+        f = open(Pkg.MetaFile.Path, 'r')\r
+        Content = f.read()\r
+        f.close()\r
+        m.update(Content)\r
+        # Get include files hash value\r
+        if Pkg.Includes:\r
+            for inc in Pkg.Includes:\r
+                for Root, Dirs, Files in os.walk(str(inc)):\r
+                    for File in Files:\r
+                        File_Path = os.path.join(Root, File)\r
+                        f = open(File_Path, 'r')\r
+                        Content = f.read()\r
+                        f.close()\r
+                        m.update(Content)\r
+        SaveFileOnChange(HashFile, m.hexdigest(), True)\r
+        if Pkg.PackageName not in GlobalData.gPackageHash[Pkg.Arch]:\r
+            GlobalData.gPackageHash[Pkg.Arch][Pkg.PackageName] = m.hexdigest()\r
 \r
     def _GetMetaFiles(self, Target, Toolchain, Arch):\r
         AllWorkSpaceMetaFiles = set()\r
@@ -954,7 +1001,8 @@ class WorkspaceAutoGen(AutoGen):
 \r
     ## Return the directory to store all intermediate and final files built\r
     def _GetBuildDir(self):\r
-        return self.AutoGenObjectList[0].BuildDir\r
+        if self._BuildDir == None:\r
+            return self.AutoGenObjectList[0].BuildDir\r
 \r
     ## Return the build output directory platform specifies\r
     def _GetOutputDir(self):\r
@@ -1177,6 +1225,7 @@ class PlatformAutoGen(AutoGen):
         self.AllPcdList = []\r
         # get the original module/package/platform objects\r
         self.BuildDatabase = Workspace.BuildDatabase\r
+        self.DscBuildDataObj = Workspace.Platform\r
 \r
         # flag indicating if the makefile/C-code file has been created or not\r
         self.IsMakeFileCreated  = False\r
@@ -1213,6 +1262,9 @@ class PlatformAutoGen(AutoGen):
         self._BuildCommand = None\r
         self._AsBuildInfList = []\r
         self._AsBuildModuleList = []\r
+\r
+        self.VariableInfo = None\r
+\r
         if GlobalData.gFdfParser != None:\r
             self._AsBuildInfList = GlobalData.gFdfParser.Profile.InfList\r
             for Inf in self._AsBuildInfList:\r
@@ -1224,6 +1276,7 @@ class PlatformAutoGen(AutoGen):
         # get library/modules for build\r
         self.LibraryBuildDirectoryList = []\r
         self.ModuleBuildDirectoryList = []\r
+\r
         return True\r
 \r
     def __repr__(self):\r
@@ -1257,12 +1310,15 @@ class PlatformAutoGen(AutoGen):
     #   @param      CreateModuleMakeFile    Flag indicating if the makefile for\r
     #                                       modules will be created as well\r
     #\r
-    def CreateMakeFile(self, CreateModuleMakeFile=False):\r
+    def CreateMakeFile(self, CreateModuleMakeFile=False, FfsCommand = {}):\r
         if CreateModuleMakeFile:\r
             for ModuleFile in self.Platform.Modules:\r
                 Ma = ModuleAutoGen(self.Workspace, ModuleFile, self.BuildTarget,\r
                                    self.ToolChain, self.Arch, self.MetaFile)\r
-                Ma.CreateMakeFile(True)\r
+                if (ModuleFile.File, self.Arch) in FfsCommand:\r
+                    Ma.CreateMakeFile(True, FfsCommand[ModuleFile.File, self.Arch])\r
+                else:\r
+                    Ma.CreateMakeFile(True)\r
                 #Ma.CreateAsBuiltInf()\r
 \r
         # no need to create makefile for the platform more than once\r
@@ -1302,6 +1358,67 @@ class PlatformAutoGen(AutoGen):
                 if key in ShareFixedAtBuildPcdsSameValue and ShareFixedAtBuildPcdsSameValue[key]:                    \r
                     LibAuto.ConstPcd[key] = Pcd.DefaultValue\r
 \r
+    def CollectVariables(self, DynamicPcdSet):\r
+\r
+        VpdRegionSize = 0\r
+        VpdRegionBase = 0\r
+        if self.Workspace.FdfFile:\r
+            FdDict = self.Workspace.FdfProfile.FdDict[GlobalData.gFdfParser.CurrentFdName]\r
+            for FdRegion in FdDict.RegionList:\r
+                for item in FdRegion.RegionDataList:\r
+                    if self.Platform.VpdToolGuid.strip() and self.Platform.VpdToolGuid in item:\r
+                        VpdRegionSize = FdRegion.Size\r
+                        VpdRegionBase = FdRegion.Offset\r
+                        break\r
+\r
+\r
+        VariableInfo = VariableMgr(self.DscBuildDataObj._GetDefaultStores(),self.DscBuildDataObj._GetSkuIds())\r
+        VariableInfo.SetVpdRegionMaxSize(VpdRegionSize)\r
+        VariableInfo.SetVpdRegionOffset(VpdRegionBase)\r
+        Index = 0\r
+        for Pcd in DynamicPcdSet:\r
+            pcdname = ".".join((Pcd.TokenSpaceGuidCName,Pcd.TokenCName))\r
+            for SkuName in Pcd.SkuInfoList:\r
+                Sku = Pcd.SkuInfoList[SkuName]\r
+                SkuId = Sku.SkuId\r
+                if SkuId == None or SkuId == '':\r
+                    continue\r
+                if len(Sku.VariableName) > 0:\r
+                    VariableGuidStructure = Sku.VariableGuidValue\r
+                    VariableGuid = GuidStructureStringToGuidString(VariableGuidStructure)\r
+                    if Pcd.Phase == "DXE":\r
+                        for StorageName in Sku.DefaultStoreDict:\r
+                            VariableInfo.append_variable(var_info(Index,pcdname,StorageName,SkuName, StringToArray(Sku.VariableName),VariableGuid, Sku.VariableAttribute , Sku.HiiDefaultValue,Sku.DefaultStoreDict[StorageName],Pcd.DatumType))\r
+            Index += 1\r
+        return VariableInfo\r
+\r
+    def UpdateNVStoreMaxSize(self,OrgVpdFile):\r
+        VpdMapFilePath = os.path.join(self.BuildDir, "FV", "%s.map" % self.Platform.VpdToolGuid)\r
+#         VpdFile = VpdInfoFile.VpdInfoFile()\r
+        PcdNvStoreDfBuffer = [item for item in self._DynamicPcdList if item.TokenCName == "PcdNvStoreDefaultValueBuffer" and item.TokenSpaceGuidCName == "gEfiMdeModulePkgTokenSpaceGuid"]\r
+\r
+        if PcdNvStoreDfBuffer:\r
+            if os.path.exists(VpdMapFilePath):\r
+                OrgVpdFile.Read(VpdMapFilePath)\r
+                PcdItems = OrgVpdFile.GetOffset(PcdNvStoreDfBuffer[0])\r
+                NvStoreOffset = PcdItems[0].strip() if PcdItems else 0\r
+            else:\r
+                EdkLogger.error("build", FILE_READ_FAILURE, "Can not find VPD map file %s to fix up VPD offset." % VpdMapFilePath)\r
+\r
+            NvStoreOffset = int(NvStoreOffset,16) if NvStoreOffset.upper().startswith("0X") else int(NvStoreOffset)\r
+            maxsize = self.VariableInfo.VpdRegionSize  - NvStoreOffset\r
+            var_data = self.VariableInfo.PatchNVStoreDefaultMaxSize(maxsize)\r
+            default_skuobj = PcdNvStoreDfBuffer[0].SkuInfoList.get("DEFAULT")\r
+\r
+            if var_data and default_skuobj:\r
+                default_skuobj.DefaultValue = var_data\r
+                PcdNvStoreDfBuffer[0].DefaultValue = var_data\r
+                PcdNvStoreDfBuffer[0].SkuInfoList.clear()\r
+                PcdNvStoreDfBuffer[0].SkuInfoList['DEFAULT'] = default_skuobj\r
+                PcdNvStoreDfBuffer[0].MaxDatumSize = str(len(default_skuobj.DefaultValue.split(",")))\r
+\r
+        return OrgVpdFile\r
+\r
     ## Collect dynamic PCDs\r
     #\r
     #  Gather dynamic PCDs list from each module and their settings from platform\r
@@ -1507,34 +1624,47 @@ class PlatformAutoGen(AutoGen):
             if pcd not in self._PlatformPcds.keys():\r
                 self._PlatformPcds[pcd] = self.Platform.Pcds[pcd]\r
 \r
+        for item in self._PlatformPcds:\r
+            if self._PlatformPcds[item].DatumType and self._PlatformPcds[item].DatumType not in [TAB_UINT8, TAB_UINT16, TAB_UINT32, TAB_UINT64, TAB_VOID, "BOOLEAN"]:\r
+                self._PlatformPcds[item].DatumType = "VOID*"\r
+\r
         if (self.Workspace.ArchList[-1] == self.Arch): \r
             for Pcd in self._DynamicPcdList:\r
                 # just pick the a value to determine whether is unicode string type\r
                 Sku = Pcd.SkuInfoList[Pcd.SkuInfoList.keys()[0]]\r
                 Sku.VpdOffset = Sku.VpdOffset.strip()\r
 \r
-                PcdValue = Sku.DefaultValue\r
-                if Pcd.DatumType == 'VOID*' and PcdValue.startswith("L"):\r
+                if Pcd.DatumType not in [TAB_UINT8, TAB_UINT16, TAB_UINT32, TAB_UINT64, TAB_VOID, "BOOLEAN"]:\r
+                    Pcd.DatumType = "VOID*"\r
+\r
                     # if found PCD which datum value is unicode string the insert to left size of UnicodeIndex\r
-                    UnicodePcdArray.append(Pcd)\r
-                elif len(Sku.VariableName) > 0:\r
                     # if found HII type PCD then insert to right of UnicodeIndex\r
-                    HiiPcdArray.append(Pcd)\r
-                else:\r
-                    OtherPcdArray.append(Pcd)\r
                 if Pcd.Type in [TAB_PCDS_DYNAMIC_VPD, TAB_PCDS_DYNAMIC_EX_VPD]:\r
                     VpdPcdDict[(Pcd.TokenCName, Pcd.TokenSpaceGuidCName)] = Pcd\r
 \r
+            #Collect DynamicHii PCD values and assign it to DynamicExVpd PCD gEfiMdeModulePkgTokenSpaceGuid.PcdNvStoreDefaultValueBuffer\r
+            PcdNvStoreDfBuffer = VpdPcdDict.get(("PcdNvStoreDefaultValueBuffer","gEfiMdeModulePkgTokenSpaceGuid"))\r
+            if PcdNvStoreDfBuffer:\r
+                self.VariableInfo = self.CollectVariables(self._DynamicPcdList)\r
+                vardump = self.VariableInfo.dump()\r
+                if vardump:\r
+                    PcdNvStoreDfBuffer.DefaultValue = vardump\r
+                    for skuname in PcdNvStoreDfBuffer.SkuInfoList:\r
+                        PcdNvStoreDfBuffer.SkuInfoList[skuname].DefaultValue = vardump\r
+                        PcdNvStoreDfBuffer.MaxDatumSize = str(len(vardump.split(",")))\r
+\r
             PlatformPcds = self._PlatformPcds.keys()\r
             PlatformPcds.sort()\r
             #\r
             # Add VPD type PCD into VpdFile and determine whether the VPD PCD need to be fixed up.\r
             #\r
+            VpdSkuMap = {}\r
             for PcdKey in PlatformPcds:\r
                 Pcd = self._PlatformPcds[PcdKey]\r
                 if Pcd.Type in [TAB_PCDS_DYNAMIC_VPD, TAB_PCDS_DYNAMIC_EX_VPD] and \\r
                    PcdKey in VpdPcdDict:\r
                     Pcd = VpdPcdDict[PcdKey]\r
+                    SkuValueMap = {}\r
                     for (SkuName,Sku) in Pcd.SkuInfoList.items():\r
                         Sku.VpdOffset = Sku.VpdOffset.strip()\r
                         PcdValue = Sku.DefaultValue\r
@@ -1559,7 +1689,10 @@ class PlatformAutoGen(AutoGen):
                                     EdkLogger.warn("build", "The offset value of PCD %s.%s is not 8-byte aligned!" %(Pcd.TokenSpaceGuidCName, Pcd.TokenCName), File=self.MetaFile)\r
                                 else:\r
                                     EdkLogger.error("build", FORMAT_INVALID, 'The offset value of PCD %s.%s should be %s-byte aligned.' % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName, Alignment))\r
-                        VpdFile.Add(Pcd, Sku.VpdOffset)\r
+                        if PcdValue not in SkuValueMap:\r
+                            SkuValueMap[PcdValue] = []\r
+                            VpdFile.Add(Pcd, Sku.VpdOffset)\r
+                        SkuValueMap[PcdValue].append(Sku)\r
                         # if the offset of a VPD is *, then it need to be fixed up by third party tool.\r
                         if not NeedProcessVpdMapFile and Sku.VpdOffset == "*":\r
                             NeedProcessVpdMapFile = True\r
@@ -1567,7 +1700,7 @@ class PlatformAutoGen(AutoGen):
                                 EdkLogger.error("Build", FILE_NOT_FOUND, \\r
                                                 "Fail to find third-party BPDG tool to process VPD PCDs. BPDG Guid tool need to be defined in tools_def.txt and VPD_TOOL_GUID need to be provided in DSC file.")\r
 \r
-\r
+                    VpdSkuMap[PcdKey] = SkuValueMap\r
             #\r
             # Fix the PCDs define in VPD PCD section that never referenced by module.\r
             # An example is PCD for signature usage.\r
@@ -1586,6 +1719,7 @@ class PlatformAutoGen(AutoGen):
                         # Not found, it should be signature\r
                         if not FoundFlag :\r
                             # just pick the a value to determine whether is unicode string type\r
+                            SkuValueMap = {}\r
                             for (SkuName,Sku) in DscPcdEntry.SkuInfoList.items():\r
                                 Sku.VpdOffset = Sku.VpdOffset.strip() \r
                                 \r
@@ -1611,7 +1745,6 @@ class PlatformAutoGen(AutoGen):
                                                                                                                     \r
                                 if DscPcdEntry not in self._DynamicPcdList:\r
                                     self._DynamicPcdList.append(DscPcdEntry)\r
-#                                Sku = DscPcdEntry.SkuInfoList[DscPcdEntry.SkuInfoList.keys()[0]]\r
                                 Sku.VpdOffset = Sku.VpdOffset.strip()\r
                                 PcdValue = Sku.DefaultValue\r
                                 if PcdValue == "":\r
@@ -1635,7 +1768,10 @@ class PlatformAutoGen(AutoGen):
                                             EdkLogger.warn("build", "The offset value of PCD %s.%s is not 8-byte aligned!" %(DscPcdEntry.TokenSpaceGuidCName, DscPcdEntry.TokenCName), File=self.MetaFile)\r
                                         else:\r
                                             EdkLogger.error("build", FORMAT_INVALID, 'The offset value of PCD %s.%s should be %s-byte aligned.' % (DscPcdEntry.TokenSpaceGuidCName, DscPcdEntry.TokenCName, Alignment))\r
-                                VpdFile.Add(DscPcdEntry, Sku.VpdOffset)\r
+                                if PcdValue not in SkuValueMap:\r
+                                    SkuValueMap[PcdValue] = []\r
+                                    VpdFile.Add(DscPcdEntry, Sku.VpdOffset)\r
+                                SkuValueMap[PcdValue].append(Sku)\r
                                 if not NeedProcessVpdMapFile and Sku.VpdOffset == "*":\r
                                     NeedProcessVpdMapFile = True \r
                             if DscPcdEntry.DatumType == 'VOID*' and PcdValue.startswith("L"):\r
@@ -1646,38 +1782,17 @@ class PlatformAutoGen(AutoGen):
                                 OtherPcdArray.append(DscPcdEntry)\r
                                 \r
                                 # if the offset of a VPD is *, then it need to be fixed up by third party tool.\r
-                                                       \r
-                    \r
-                    \r
+                            VpdSkuMap[DscPcd] = SkuValueMap\r
             if (self.Platform.FlashDefinition == None or self.Platform.FlashDefinition == '') and \\r
                VpdFile.GetCount() != 0:\r
                 EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE, \r
                                 "Fail to get FLASH_DEFINITION definition in DSC file %s which is required when DSC contains VPD PCD." % str(self.Platform.MetaFile))\r
 \r
             if VpdFile.GetCount() != 0:\r
-                FvPath = os.path.join(self.BuildDir, "FV")\r
-                if not os.path.exists(FvPath):\r
-                    try:\r
-                        os.makedirs(FvPath)\r
-                    except:\r
-                        EdkLogger.error("build", FILE_WRITE_FAILURE, "Fail to create FV folder under %s" % self.BuildDir)\r
-\r
-                VpdFilePath = os.path.join(FvPath, "%s.txt" % self.Platform.VpdToolGuid)\r
-\r
-                if VpdFile.Write(VpdFilePath):\r
-                    # retrieve BPDG tool's path from tool_def.txt according to VPD_TOOL_GUID defined in DSC file.\r
-                    BPDGToolName = None\r
-                    for ToolDef in self.ToolDefinition.values():\r
-                        if ToolDef.has_key("GUID") and ToolDef["GUID"] == self.Platform.VpdToolGuid:\r
-                            if not ToolDef.has_key("PATH"):\r
-                                EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE, "PATH attribute was not provided for BPDG guid tool %s in tools_def.txt" % self.Platform.VpdToolGuid)\r
-                            BPDGToolName = ToolDef["PATH"]\r
-                            break\r
-                    # Call third party GUID BPDG tool.\r
-                    if BPDGToolName != None:\r
-                        VpdInfoFile.CallExtenalBPDGTool(BPDGToolName, VpdFilePath)\r
-                    else:\r
-                        EdkLogger.error("Build", FILE_NOT_FOUND, "Fail to find third-party BPDG tool to process VPD PCDs. BPDG Guid tool need to be defined in tools_def.txt and VPD_TOOL_GUID need to be provided in DSC file.")\r
+\r
+                self.FixVpdOffset(VpdFile)\r
+\r
+                self.FixVpdOffset(self.UpdateNVStoreMaxSize(VpdFile))\r
 \r
                 # Process VPD map file generated by third party BPDG tool\r
                 if NeedProcessVpdMapFile:\r
@@ -1686,23 +1801,76 @@ class PlatformAutoGen(AutoGen):
                         VpdFile.Read(VpdMapFilePath)\r
 \r
                         # Fixup "*" offset\r
-                        for Pcd in self._DynamicPcdList:\r
+                        for pcd in VpdSkuMap:\r
+                            vpdinfo = VpdFile.GetVpdInfo(pcd)\r
+                            if vpdinfo is None:\r
                             # just pick the a value to determine whether is unicode string type\r
-                            i = 0\r
-                            for (SkuName,Sku) in Pcd.SkuInfoList.items():                        \r
-                                if Sku.VpdOffset == "*":\r
-                                    Sku.VpdOffset = VpdFile.GetOffset(Pcd)[i].strip()\r
-                                i += 1\r
+                                continue\r
+                            for pcdvalue in VpdSkuMap[pcd]:\r
+                                for sku in VpdSkuMap[pcd][pcdvalue]:\r
+                                    for item in vpdinfo:\r
+                                        if item[2] == pcdvalue:\r
+                                            sku.VpdOffset = item[1]\r
                     else:\r
                         EdkLogger.error("build", FILE_READ_FAILURE, "Can not find VPD map file %s to fix up VPD offset." % VpdMapFilePath)\r
 \r
-            # Delete the DynamicPcdList At the last time enter into this function \r
+            # Delete the DynamicPcdList At the last time enter into this function\r
+            for Pcd in self._DynamicPcdList:\r
+                # just pick the a value to determine whether is unicode string type\r
+                Sku = Pcd.SkuInfoList[Pcd.SkuInfoList.keys()[0]]\r
+                Sku.VpdOffset = Sku.VpdOffset.strip()\r
+\r
+                if Pcd.DatumType not in [TAB_UINT8, TAB_UINT16, TAB_UINT32, TAB_UINT64, TAB_VOID, "BOOLEAN"]:\r
+                    Pcd.DatumType = "VOID*"\r
+\r
+                PcdValue = Sku.DefaultValue\r
+                if Pcd.DatumType == 'VOID*' and PcdValue.startswith("L"):\r
+                    # if found PCD which datum value is unicode string the insert to left size of UnicodeIndex\r
+                    UnicodePcdArray.append(Pcd)\r
+                elif len(Sku.VariableName) > 0:\r
+                    # if found HII type PCD then insert to right of UnicodeIndex\r
+                    HiiPcdArray.append(Pcd)\r
+                else:\r
+                    OtherPcdArray.append(Pcd)\r
             del self._DynamicPcdList[:]\r
         self._DynamicPcdList.extend(UnicodePcdArray)\r
         self._DynamicPcdList.extend(HiiPcdArray)\r
         self._DynamicPcdList.extend(OtherPcdArray)\r
+        allskuset = [(SkuName,Sku.SkuId) for pcd in self._DynamicPcdList for (SkuName,Sku) in pcd.SkuInfoList.items()]\r
+        for pcd in self._DynamicPcdList:\r
+            if len(pcd.SkuInfoList) == 1:\r
+                for (SkuName,SkuId) in allskuset:\r
+                    if type(SkuId) in (str,unicode) and eval(SkuId) == 0 or SkuId == 0:\r
+                        continue\r
+                    pcd.SkuInfoList[SkuName] = copy.deepcopy(pcd.SkuInfoList['DEFAULT'])\r
+                    pcd.SkuInfoList[SkuName].SkuId = SkuId\r
         self.AllPcdList = self._NonDynamicPcdList + self._DynamicPcdList\r
-        \r
+\r
+    def FixVpdOffset(self,VpdFile ):\r
+        FvPath = os.path.join(self.BuildDir, "FV")\r
+        if not os.path.exists(FvPath):\r
+            try:\r
+                os.makedirs(FvPath)\r
+            except:\r
+                EdkLogger.error("build", FILE_WRITE_FAILURE, "Fail to create FV folder under %s" % self.BuildDir)\r
+\r
+        VpdFilePath = os.path.join(FvPath, "%s.txt" % self.Platform.VpdToolGuid)\r
+\r
+        if VpdFile.Write(VpdFilePath):\r
+            # retrieve BPDG tool's path from tool_def.txt according to VPD_TOOL_GUID defined in DSC file.\r
+            BPDGToolName = None\r
+            for ToolDef in self.ToolDefinition.values():\r
+                if ToolDef.has_key("GUID") and ToolDef["GUID"] == self.Platform.VpdToolGuid:\r
+                    if not ToolDef.has_key("PATH"):\r
+                        EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE, "PATH attribute was not provided for BPDG guid tool %s in tools_def.txt" % self.Platform.VpdToolGuid)\r
+                    BPDGToolName = ToolDef["PATH"]\r
+                    break\r
+            # Call third party GUID BPDG tool.\r
+            if BPDGToolName != None:\r
+                VpdInfoFile.CallExtenalBPDGTool(BPDGToolName, VpdFilePath)\r
+            else:\r
+                EdkLogger.error("Build", FILE_NOT_FOUND, "Fail to find third-party BPDG tool to process VPD PCDs. BPDG Guid tool need to be defined in tools_def.txt and VPD_TOOL_GUID need to be provided in DSC file.")\r
+\r
     ## Return the platform build data object\r
     def _GetPlatform(self):\r
         if self._Platform == None:\r
@@ -1748,6 +1916,7 @@ class PlatformAutoGen(AutoGen):
                                             self.OutputDir,\r
                                             self.BuildTarget + "_" + self.ToolChain,\r
                                             )\r
+            GlobalData.gBuildDirectory = self._BuildDir\r
         return self._BuildDir\r
 \r
     ## Return directory of platform makefile\r
@@ -2293,7 +2462,7 @@ class PlatformAutoGen(AutoGen):
             else:\r
                 SkuName = 'DEFAULT'\r
             ToPcd.SkuInfoList = {\r
-                SkuName : SkuInfoClass(SkuName, self.Platform.SkuIds[SkuName], '', '', '', '', '', ToPcd.DefaultValue)\r
+                SkuName : SkuInfoClass(SkuName, self.Platform.SkuIds[SkuName][0], '', '', '', '', '', ToPcd.DefaultValue)\r
             }\r
 \r
     ## Apply PCD setting defined platform to a module\r
@@ -2333,8 +2502,18 @@ class PlatformAutoGen(AutoGen):
         if Module in self.Platform.Modules:\r
             PlatformModule = self.Platform.Modules[str(Module)]\r
             for Key  in PlatformModule.Pcds:\r
+                Flag = False\r
                 if Key in Pcds:\r
-                    self._OverridePcd(Pcds[Key], PlatformModule.Pcds[Key], Module)\r
+                    ToPcd = Pcds[Key]\r
+                    Flag = True\r
+                elif Key in GlobalData.MixedPcd:\r
+                    for PcdItem in GlobalData.MixedPcd[Key]:\r
+                        if PcdItem in Pcds:\r
+                            ToPcd = Pcds[PcdItem]\r
+                            Flag = True\r
+                            break\r
+                if Flag:\r
+                    self._OverridePcd(ToPcd, PlatformModule.Pcds[Key], Module)\r
         return Pcds.values()\r
 \r
     ## Resolve library names to library modules\r
@@ -2683,6 +2862,7 @@ class ModuleAutoGen(AutoGen):
 \r
         self.BuildDatabase = self.Workspace.BuildDatabase\r
         self.BuildRuleOrder = None\r
+        self.BuildTime      = 0\r
 \r
         self._Module          = None\r
         self._Name            = None\r
@@ -2698,6 +2878,7 @@ class ModuleAutoGen(AutoGen):
 \r
         self._BuildDir        = None\r
         self._OutputDir       = None\r
+        self._FfsOutputDir    = None\r
         self._DebugDir        = None\r
         self._MakeFileDir     = None\r
 \r
@@ -2814,6 +2995,7 @@ class ModuleAutoGen(AutoGen):
             self._Macro["PLATFORM_RELATIVE_DIR" ] = self.PlatformInfo.SourceDir\r
             self._Macro["PLATFORM_DIR"          ] = mws.join(self.WorkspaceDir, self.PlatformInfo.SourceDir)\r
             self._Macro["PLATFORM_OUTPUT_DIR"   ] = self.PlatformInfo.OutputDir\r
+            self._Macro["FFS_OUTPUT_DIR"        ] = self.FfsOutputDir\r
         return self._Macro\r
 \r
     ## Return the module build data object\r
@@ -2904,6 +3086,15 @@ class ModuleAutoGen(AutoGen):
             CreateDirectory(self._OutputDir)\r
         return self._OutputDir\r
 \r
+    ## Return the directory to store ffs file\r
+    def _GetFfsOutputDir(self):\r
+        if self._FfsOutputDir == None:\r
+            if GlobalData.gFdfParser != None:\r
+                self._FfsOutputDir = path.join(self.PlatformInfo.BuildDir, "FV", "Ffs", self.Guid + self.Name)\r
+            else:\r
+                self._FfsOutputDir = ''\r
+        return self._FfsOutputDir\r
+\r
     ## Return the directory to store auto-gened source files of the mdoule\r
     def _GetDebugDir(self):\r
         if self._DebugDir == None:\r
@@ -3088,6 +3279,30 @@ class ModuleAutoGen(AutoGen):
                 self._DepexExpressionList[ModuleType] = DepexExpressionList\r
         return self._DepexExpressionList\r
 \r
+    # Get the tiano core user extension, it is contain dependent library.\r
+    # @retval: a list contain tiano core userextension.\r
+    #\r
+    def _GetTianoCoreUserExtensionList(self):\r
+        TianoCoreUserExtentionList = []\r
+        for M in [self.Module] + self.DependentLibraryList:\r
+            Filename = M.MetaFile.Path\r
+            InfObj = InfSectionParser.InfSectionParser(Filename)\r
+            TianoCoreUserExtenList = InfObj.GetUserExtensionTianoCore()\r
+            for TianoCoreUserExtent in TianoCoreUserExtenList:\r
+                for Section in TianoCoreUserExtent.keys():\r
+                    ItemList = Section.split(TAB_SPLIT)\r
+                    Arch = self.Arch\r
+                    if len(ItemList) == 4:\r
+                        Arch = ItemList[3]\r
+                    if Arch.upper() == TAB_ARCH_COMMON or Arch.upper() == self.Arch.upper():\r
+                        TianoCoreList = []\r
+                        TianoCoreList.extend([TAB_SECTION_START + Section + TAB_SECTION_END])\r
+                        TianoCoreList.extend(TianoCoreUserExtent[Section][:])\r
+                        TianoCoreList.append('\n')\r
+                        TianoCoreUserExtentionList.append(TianoCoreList)\r
+\r
+        return TianoCoreUserExtentionList\r
+\r
     ## Return the list of specification version required for the module\r
     #\r
     #   @retval     list    The list of specification defined in module file\r
@@ -3181,13 +3396,13 @@ class ModuleAutoGen(AutoGen):
                     EdkLogger.debug(EdkLogger.DEBUG_9, "The toolchain [%s] for processing file [%s] is found, "\r
                                     "but [%s] is needed" % (F.TagName, str(F), self.ToolChain))\r
                     continue\r
-                # match tool chain family\r
-                if F.ToolChainFamily not in ("", "*", self.ToolChainFamily):\r
+                # match tool chain family or build rule family\r
+                if F.ToolChainFamily not in ("", "*", self.ToolChainFamily, self.BuildRuleFamily):\r
                     EdkLogger.debug(\r
                                 EdkLogger.DEBUG_0,\r
                                 "The file [%s] must be built by tools of [%s], " \\r
-                                "but current toolchain family is [%s]" \\r
-                                    % (str(F), F.ToolChainFamily, self.ToolChainFamily))\r
+                                "but current toolchain family is [%s], buildrule family is [%s]" \\r
+                                    % (str(F), F.ToolChainFamily, self.ToolChainFamily, self.BuildRuleFamily))\r
                     continue\r
 \r
                 # add the file path into search path list for file including\r
@@ -3741,7 +3956,13 @@ class ModuleAutoGen(AutoGen):
 \r
     ## Create AsBuilt INF file the module\r
     #\r
-    def CreateAsBuiltInf(self):\r
+    def CreateAsBuiltInf(self, IsOnlyCopy = False):\r
+        self.OutputFile = []\r
+        if IsOnlyCopy:\r
+            if GlobalData.gBinCacheDest:\r
+                self.CopyModuleToCache()\r
+                return\r
+\r
         if self.IsAsBuiltInfCreated:\r
             return\r
             \r
@@ -3855,6 +4076,11 @@ class ModuleAutoGen(AutoGen):
           'libraryclasses_item'               : []\r
         }\r
 \r
+        if 'MODULE_UNI_FILE' in MDefs:\r
+            UNIFile = os.path.join(self.MetaFile.Dir, MDefs['MODULE_UNI_FILE'])\r
+            if os.path.isfile(UNIFile):\r
+                shutil.copy2(UNIFile, self.OutputDir)\r
+\r
         if self.AutoGenVersion > int(gInfSpecVersion, 0):\r
             AsBuiltInfDict['module_inf_version'] = '0x%08x' % self.AutoGenVersion\r
         else:\r
@@ -3869,9 +4095,10 @@ class ModuleAutoGen(AutoGen):
             AsBuiltInfDict['module_pi_specification_version'] += [self.Specification['PI_SPECIFICATION_VERSION']]\r
 \r
         OutputDir = self.OutputDir.replace('\\', '/').strip('/')\r
-\r
         for Item in self.CodaTargetList:\r
             File = Item.Target.Path.replace('\\', '/').strip('/').replace(OutputDir, '').strip('/')\r
+            if File not in self.OutputFile:\r
+                self.OutputFile.append(File)\r
             if Item.Target.Ext.lower() == '.aml':\r
                 AsBuiltInfDict['binary_item'] += ['ASL|' + File]\r
             elif Item.Target.Ext.lower() == '.acpi':\r
@@ -3881,6 +4108,8 @@ class ModuleAutoGen(AutoGen):
             else:\r
                 AsBuiltInfDict['binary_item'] += ['BIN|' + File]\r
         if self.DepexGenerated:\r
+            if self.Name + '.depex' not in self.OutputFile:\r
+                self.OutputFile.append(self.Name + '.depex')\r
             if self.ModuleType in ['PEIM']:\r
                 AsBuiltInfDict['binary_item'] += ['PEI_DEPEX|' + self.Name + '.depex']\r
             if self.ModuleType in ['DXE_DRIVER', 'DXE_RUNTIME_DRIVER', 'DXE_SAL_DRIVER', 'UEFI_DRIVER']:\r
@@ -3891,11 +4120,15 @@ class ModuleAutoGen(AutoGen):
         Bin = self._GenOffsetBin()\r
         if Bin:\r
             AsBuiltInfDict['binary_item'] += ['BIN|%s' % Bin]\r
+            if Bin not in self.OutputFile:\r
+                self.OutputFile.append(Bin)\r
 \r
         for Root, Dirs, Files in os.walk(OutputDir):\r
             for File in Files:\r
                 if File.lower().endswith('.pdb'):\r
                     AsBuiltInfDict['binary_item'] += ['DISPOSABLE|' + File]\r
+                    if File not in self.OutputFile:\r
+                        self.OutputFile.append(File)\r
         HeaderComments = self.Module.HeaderComments\r
         StartPos = 0\r
         for Index in range(len(HeaderComments)):\r
@@ -3944,7 +4177,7 @@ class ModuleAutoGen(AutoGen):
                     elif BoolValue == 'FALSE':\r
                         Pcd.DefaultValue = '0'\r
 \r
-                if Pcd.DatumType != 'VOID*':\r
+                if Pcd.DatumType in ['UINT8', 'UINT16', 'UINT32', 'UINT64', 'BOOLEAN']:\r
                     HexFormat = '0x%02x'\r
                     if Pcd.DatumType == 'UINT16':\r
                         HexFormat = '0x%04x'\r
@@ -4054,6 +4287,16 @@ class ModuleAutoGen(AutoGen):
         for Library in self.LibraryAutoGenList:\r
             AsBuiltInfDict['libraryclasses_item'] += [Library.MetaFile.File.replace('\\', '/')]\r
         \r
+        # Generated UserExtensions TianoCore section.\r
+        # All tianocore user extensions are copied.\r
+        UserExtStr = ''\r
+        for TianoCore in self._GetTianoCoreUserExtensionList():\r
+            UserExtStr += '\n'.join(TianoCore)\r
+            ExtensionFile = os.path.join(self.MetaFile.Dir, TianoCore[1])\r
+            if os.path.isfile(ExtensionFile):\r
+                shutil.copy2(ExtensionFile, self.OutputDir)\r
+        AsBuiltInfDict['userextension_tianocore_item'] = UserExtStr\r
+\r
         # Generated depex expression section in comments.\r
         AsBuiltInfDict['depexsection_item'] = ''\r
         DepexExpresion = self._GetDepexExpresionString()\r
@@ -4066,26 +4309,72 @@ class ModuleAutoGen(AutoGen):
         SaveFileOnChange(os.path.join(self.OutputDir, self.Name + '.inf'), str(AsBuiltInf), False)\r
         \r
         self.IsAsBuiltInfCreated = True\r
-        \r
+        if GlobalData.gBinCacheDest:\r
+            self.CopyModuleToCache()\r
+\r
+    def CopyModuleToCache(self):\r
+        FileDir = path.join(GlobalData.gBinCacheDest, self.Arch, self.SourceDir, self.MetaFile.BaseName)\r
+        CreateDirectory (FileDir)\r
+        HashFile = path.join(self.BuildDir, self.Name + '.hash')\r
+        ModuleFile = path.join(self.OutputDir, self.Name + '.inf')\r
+        if os.path.exists(HashFile):\r
+            shutil.copy2(HashFile, FileDir)\r
+        if os.path.exists(ModuleFile):\r
+            shutil.copy2(ModuleFile, FileDir)\r
+        if not self.OutputFile:\r
+            Ma = self.Workspace.BuildDatabase[PathClass(ModuleFile), self.Arch, self.BuildTarget, self.ToolChain]\r
+            self.OutputFile = Ma.Binaries\r
+        if self.OutputFile:\r
+            for File in self.OutputFile:\r
+                File = str(File)\r
+                if not os.path.isabs(File):\r
+                    File = os.path.join(self.OutputDir, File)\r
+                if os.path.exists(File):\r
+                    shutil.copy2(File, FileDir)\r
+\r
+    def AttemptModuleCacheCopy(self):\r
+        if self.IsBinaryModule:\r
+            return False\r
+        FileDir = path.join(GlobalData.gBinCacheSource, self.Arch, self.SourceDir, self.MetaFile.BaseName)\r
+        HashFile = path.join(FileDir, self.Name + '.hash')\r
+        if os.path.exists(HashFile):\r
+            f = open(HashFile, 'r')\r
+            CacheHash = f.read()\r
+            f.close()\r
+            if GlobalData.gModuleHash[self.Arch][self.Name]:\r
+                if CacheHash == GlobalData.gModuleHash[self.Arch][self.Name]:\r
+                    for root, dir, files in os.walk(FileDir):\r
+                        for f in files:\r
+                            if self.Name + '.hash' in f:\r
+                                shutil.copy2(HashFile, self.BuildDir)\r
+                            else:\r
+                                File = path.join(root, f)\r
+                                shutil.copy2(File, self.OutputDir)\r
+                    if self.Name == "PcdPeim" or self.Name == "PcdDxe":\r
+                        CreatePcdDatabaseCode(self, TemplateString(), TemplateString())\r
+                    return True\r
+        return False\r
+\r
     ## Create makefile for the module and its dependent libraries\r
     #\r
     #   @param      CreateLibraryMakeFile   Flag indicating if or not the makefiles of\r
     #                                       dependent libraries will be created\r
     #\r
-    def CreateMakeFile(self, CreateLibraryMakeFile=True):\r
+    def CreateMakeFile(self, CreateLibraryMakeFile=True, GenFfsList = []):\r
         # Ignore generating makefile when it is a binary module\r
         if self.IsBinaryModule:\r
             return\r
 \r
         if self.IsMakeFileCreated:\r
             return\r
-        if self.CanSkip():\r
-            return\r
-\r
+        self.GenFfsList = GenFfsList\r
         if not self.IsLibrary and CreateLibraryMakeFile:\r
             for LibraryAutoGen in self.LibraryAutoGenList:\r
                 LibraryAutoGen.CreateMakeFile()\r
 \r
+        if self.CanSkip():\r
+            return\r
+\r
         if len(self.CustomMakefile) == 0:\r
             Makefile = GenMake.ModuleMakefile(self)\r
         else:\r
@@ -4113,8 +4402,6 @@ class ModuleAutoGen(AutoGen):
     def CreateCodeFile(self, CreateLibraryCodeFile=True):\r
         if self.IsCodeFileCreated:\r
             return\r
-        if self.CanSkip():\r
-            return\r
 \r
         # Need to generate PcdDatabase even PcdDriver is binarymodule\r
         if self.IsBinaryModule and self.PcdIsDriver != '':\r
@@ -4129,6 +4416,9 @@ class ModuleAutoGen(AutoGen):
             for LibraryAutoGen in self.LibraryAutoGenList:\r
                 LibraryAutoGen.CreateCodeFile()\r
 \r
+        if self.CanSkip():\r
+            return\r
+\r
         AutoGenList = []\r
         IgoredAutoGenList = []\r
 \r
@@ -4194,8 +4484,54 @@ class ModuleAutoGen(AutoGen):
                         self._ApplyBuildRule(Lib.Target, TAB_UNKNOWN_FILE)\r
         return self._LibraryAutoGenList\r
 \r
+    def GenModuleHash(self):\r
+        if self.Arch not in GlobalData.gModuleHash:\r
+            GlobalData.gModuleHash[self.Arch] = {}\r
+        m = hashlib.md5()\r
+        # Add Platform level hash\r
+        m.update(GlobalData.gPlatformHash)\r
+        # Add Package level hash\r
+        if self.DependentPackageList:\r
+            for Pkg in self.DependentPackageList:\r
+                if Pkg.PackageName in GlobalData.gPackageHash[self.Arch]:\r
+                    m.update(GlobalData.gPackageHash[self.Arch][Pkg.PackageName])\r
+\r
+        # Add Library hash\r
+        if self.LibraryAutoGenList:\r
+            for Lib in self.LibraryAutoGenList:\r
+                if Lib.Name not in GlobalData.gModuleHash[self.Arch]:\r
+                    Lib.GenModuleHash()\r
+                m.update(GlobalData.gModuleHash[self.Arch][Lib.Name])\r
+\r
+        # Add Module self\r
+        f = open(str(self.MetaFile), 'r')\r
+        Content = f.read()\r
+        f.close()\r
+        m.update(Content)\r
+        # Add Module's source files\r
+        if self.SourceFileList:\r
+            for File in self.SourceFileList:\r
+                f = open(str(File), 'r')\r
+                Content = f.read()\r
+                f.close()\r
+                m.update(Content)\r
+\r
+        ModuleHashFile = path.join(self.BuildDir, self.Name + ".hash")\r
+        if self.Name not in GlobalData.gModuleHash[self.Arch]:\r
+            GlobalData.gModuleHash[self.Arch][self.Name] = m.hexdigest()\r
+        if GlobalData.gBinCacheSource:\r
+            CacheValid = self.AttemptModuleCacheCopy()\r
+            if CacheValid:\r
+                return False\r
+        return SaveFileOnChange(ModuleHashFile, m.hexdigest(), True)\r
+\r
     ## Decide whether we can skip the ModuleAutoGen process\r
-    #  If any source file is newer than the modeule than we cannot skip\r
+    def CanSkipbyHash(self):\r
+        if GlobalData.gUseHashCache:\r
+            return not self.GenModuleHash()\r
+\r
+    ## Decide whether we can skip the ModuleAutoGen process\r
+    #  If any source file is newer than the module than we cannot skip\r
     #\r
     def CanSkip(self):\r
         if not os.path.exists(self.GetTimeStampPath()):\r
@@ -4210,6 +4546,8 @@ class ModuleAutoGen(AutoGen):
         with open(self.GetTimeStampPath(),'r') as f:\r
             for source in f:\r
                 source = source.rstrip('\n')\r
+                if not os.path.exists(source):\r
+                    return False\r
                 if source not in ModuleAutoGen.TimeDict :\r
                     ModuleAutoGen.TimeDict[source] = os.stat(source)[8]\r
                 if ModuleAutoGen.TimeDict[source] > DstTimeStamp:\r
@@ -4257,6 +4595,7 @@ class ModuleAutoGen(AutoGen):
     IsBinaryModule  = property(_IsBinaryModule)\r
     BuildDir        = property(_GetBuildDir)\r
     OutputDir       = property(_GetOutputDir)\r
+    FfsOutputDir    = property(_GetFfsOutputDir)\r
     DebugDir        = property(_GetDebugDir)\r
     MakeFileDir     = property(_GetMakeFileDir)\r
     CustomMakefile  = property(_GetCustomMakefile)\r