]> git.proxmox.com Git - mirror_edk2.git/blobdiff - BaseTools/Source/Python/AutoGen/ModuleAutoGen.py
UefiCpuPkg: Move AsmRelocateApLoopStart from Mpfuncs.nasm to AmdSev.nasm
[mirror_edk2.git] / BaseTools / Source / Python / AutoGen / ModuleAutoGen.py
index 383078c376c95f83ce348f36e870a991a035977d..d05410b32966bfbeaf3e514f1a68ea72be90e873 100755 (executable)
@@ -6,7 +6,7 @@
 #\r
 from __future__ import absolute_import\r
 from AutoGen.AutoGen import AutoGen\r
-from Common.LongFilePathSupport import CopyLongFilePath\r
+from Common.LongFilePathSupport import LongFilePath, CopyLongFilePath\r
 from Common.BuildToolError import *\r
 from Common.DataType import *\r
 from Common.Misc import *\r
@@ -26,13 +26,13 @@ from Workspace.MetaFileCommentParser import UsageList
 from .GenPcdDb import CreatePcdDatabaseCode\r
 from Common.caching import cached_class_function\r
 from AutoGen.ModuleAutoGenHelper import PlatformInfo,WorkSpaceInfo\r
-from AutoGen.CacheIR import ModuleBuildCacheIR\r
 import json\r
+import tempfile\r
 \r
 ## Mapping Makefile type\r
 gMakeTypeMap = {TAB_COMPILER_MSFT:"nmake", "GCC":"gmake"}\r
 #\r
-# Regular expression for finding Include Directories, the difference between MSFT and INTEL/GCC/RVCT\r
+# Regular expression for finding Include Directories, the difference between MSFT and INTEL/GCC\r
 # is the former use /I , the Latter used -I to specify include directories\r
 #\r
 gBuildOptIncludePatternMsft = re.compile(r"(?:.*?)/I[ \t]*([^ ]*)", re.MULTILINE | re.DOTALL)\r
@@ -254,7 +254,6 @@ class ModuleAutoGen(AutoGen):
         self.AutoGenDepSet = set()\r
         self.ReferenceModules = []\r
         self.ConstPcd                  = {}\r
-        self.Makefile         = None\r
         self.FileDependCache  = {}\r
 \r
     def __init_platform_info__(self):\r
@@ -270,7 +269,7 @@ class ModuleAutoGen(AutoGen):
     #\r
     @cached_class_function\r
     def __hash__(self):\r
-        return hash((self.MetaFile, self.Arch))\r
+        return hash((self.MetaFile, self.Arch, self.ToolChain,self.BuildTarget))\r
     def __repr__(self):\r
         return "%s [%s]" % (self.MetaFile, self.Arch)\r
 \r
@@ -461,14 +460,31 @@ class ModuleAutoGen(AutoGen):
     def BuildCommand(self):\r
         return self.PlatformInfo.BuildCommand\r
 \r
-    ## Get object list of all packages the module and its dependent libraries belong to\r
+    ## Get Module package and Platform package\r
+    #\r
+    #   @retval list The list of package object\r
+    #\r
+    @cached_property\r
+    def PackageList(self):\r
+        PkagList = []\r
+        if self.Module.Packages:\r
+            PkagList.extend(self.Module.Packages)\r
+        Platform = self.BuildDatabase[self.PlatformInfo.MetaFile, self.Arch, self.BuildTarget, self.ToolChain]\r
+        for Package in Platform.Packages:\r
+            if Package in PkagList:\r
+                continue\r
+            PkagList.append(Package)\r
+        return PkagList\r
+\r
+    ## Get object list of all packages the module and its dependent libraries belong to and the Platform depends on\r
     #\r
     #   @retval     list    The list of package object\r
     #\r
     @cached_property\r
     def DerivedPackageList(self):\r
         PackageList = []\r
-        for M in [self.Module] + self.DependentLibraryList:\r
+        PackageList.extend(self.PackageList)\r
+        for M in self.DependentLibraryList:\r
             for Package in M.Packages:\r
                 if Package in PackageList:\r
                     continue\r
@@ -668,12 +684,12 @@ class ModuleAutoGen(AutoGen):
     @cached_property\r
     def BuildOptionIncPathList(self):\r
         #\r
-        # Regular expression for finding Include Directories, the difference between MSFT and INTEL/GCC/RVCT\r
+        # Regular expression for finding Include Directories, the difference between MSFT and INTEL/GCC\r
         # is the former use /I , the Latter used -I to specify include directories\r
         #\r
         if self.PlatformInfo.ToolChainFamily in (TAB_COMPILER_MSFT):\r
             BuildOptIncludeRegEx = gBuildOptIncludePatternMsft\r
-        elif self.PlatformInfo.ToolChainFamily in ('INTEL', 'GCC', 'RVCT'):\r
+        elif self.PlatformInfo.ToolChainFamily in ('INTEL', 'GCC'):\r
             BuildOptIncludeRegEx = gBuildOptIncludePatternOther\r
         else:\r
             #\r
@@ -688,16 +704,7 @@ class ModuleAutoGen(AutoGen):
             except KeyError:\r
                 FlagOption = ''\r
 \r
-            if self.ToolChainFamily != 'RVCT':\r
-                IncPathList = [NormPath(Path, self.Macros) for Path in BuildOptIncludeRegEx.findall(FlagOption)]\r
-            else:\r
-                #\r
-                # RVCT may specify a list of directory seperated by commas\r
-                #\r
-                IncPathList = []\r
-                for Path in BuildOptIncludeRegEx.findall(FlagOption):\r
-                    PathList = GetSplitList(Path, TAB_COMMA_SPLIT)\r
-                    IncPathList.extend(NormPath(PathEntry, self.Macros) for PathEntry in PathList)\r
+            IncPathList = [NormPath(Path, self.Macros) for Path in BuildOptIncludeRegEx.findall(FlagOption)]\r
 \r
             #\r
             # EDK II modules must not reference header files outside of the packages they depend on or\r
@@ -843,7 +850,8 @@ class ModuleAutoGen(AutoGen):
         SubDirectory = os.path.join(self.OutputDir, File.SubDir)\r
         if not os.path.exists(SubDirectory):\r
             CreateDirectory(SubDirectory)\r
-        LastTarget = None\r
+        TargetList = set()\r
+        FinalTargetName = set()\r
         RuleChain = set()\r
         SourceList = [File]\r
         Index = 0\r
@@ -853,6 +861,9 @@ class ModuleAutoGen(AutoGen):
         self.BuildOption\r
 \r
         while Index < len(SourceList):\r
+            # Reset the FileType if not the first iteration.\r
+            if Index > 0:\r
+                FileType = TAB_UNKNOWN_FILE\r
             Source = SourceList[Index]\r
             Index = Index + 1\r
 \r
@@ -869,29 +880,25 @@ class ModuleAutoGen(AutoGen):
             elif Source.Ext in self.BuildRules:\r
                 RuleObject = self.BuildRules[Source.Ext]\r
             else:\r
-                # stop at no more rules\r
-                if LastTarget:\r
-                    self._FinalBuildTargetList.add(LastTarget)\r
-                break\r
+                # No more rule to apply: Source is a final target.\r
+                FinalTargetName.add(Source)\r
+                continue\r
 \r
             FileType = RuleObject.SourceFileType\r
             self._FileTypes[FileType].add(Source)\r
 \r
             # stop at STATIC_LIBRARY for library\r
             if self.IsLibrary and FileType == TAB_STATIC_LIBRARY:\r
-                if LastTarget:\r
-                    self._FinalBuildTargetList.add(LastTarget)\r
-                break\r
+                FinalTargetName.add(Source)\r
+                continue\r
 \r
             Target = RuleObject.Apply(Source, self.BuildRuleOrder)\r
             if not Target:\r
-                if LastTarget:\r
-                    self._FinalBuildTargetList.add(LastTarget)\r
-                break\r
-            elif not Target.Outputs:\r
-                # Only do build for target with outputs\r
-                self._FinalBuildTargetList.add(Target)\r
+                # No Target: Source is a final target.\r
+                FinalTargetName.add(Source)\r
+                continue\r
 \r
+            TargetList.add(Target)\r
             self._BuildTargets[FileType].add(Target)\r
 \r
             if not Source.IsBinary and Source == File:\r
@@ -899,12 +906,16 @@ class ModuleAutoGen(AutoGen):
 \r
             # to avoid cyclic rule\r
             if FileType in RuleChain:\r
-                break\r
+                EdkLogger.error("build", ERROR_STATEMENT, "Cyclic dependency detected while generating rule for %s" % str(Source))\r
 \r
             RuleChain.add(FileType)\r
             SourceList.extend(Target.Outputs)\r
-            LastTarget = Target\r
-            FileType = TAB_UNKNOWN_FILE\r
+\r
+        # For each final target name, retrieve the corresponding TargetDescBlock instance.\r
+        for FTargetName in FinalTargetName:\r
+            for Target in TargetList:\r
+                if FTargetName == Target.Target:\r
+                    self._FinalBuildTargetList.add(Target)\r
 \r
     @cached_property\r
     def Targets(self):\r
@@ -937,13 +948,13 @@ class ModuleAutoGen(AutoGen):
         self.Targets\r
         return self._FileTypes\r
 \r
-    ## Get the list of package object the module depends on\r
+    ## Get the list of package object the module depends on and the Platform depends on\r
     #\r
     #   @retval     list    The package object list\r
     #\r
     @cached_property\r
     def DependentPackageList(self):\r
-        return self.Module.Packages\r
+        return self.PackageList\r
 \r
     ## Return the list of auto-generated code file\r
     #\r
@@ -1011,7 +1022,7 @@ class ModuleAutoGen(AutoGen):
     @cached_property\r
     def ModulePcdList(self):\r
         # apply PCD settings from platform\r
-        RetVal = self.PlatformInfo.ApplyPcdSetting(self.Module, self.Module.Pcds)\r
+        RetVal = self.PlatformInfo.ApplyPcdSetting(self, self.Module.Pcds)\r
 \r
         return RetVal\r
     @cached_property\r
@@ -1042,7 +1053,7 @@ class ModuleAutoGen(AutoGen):
                     continue\r
                 Pcds.add(Key)\r
                 PcdsInLibrary[Key] = copy.copy(Library.Pcds[Key])\r
-            RetVal.extend(self.PlatformInfo.ApplyPcdSetting(self.Module, PcdsInLibrary, Library=Library))\r
+            RetVal.extend(self.PlatformInfo.ApplyPcdSetting(self, PcdsInLibrary, Library=Library))\r
         return RetVal\r
 \r
     ## Get the GUID value mapping\r
@@ -1100,7 +1111,7 @@ class ModuleAutoGen(AutoGen):
         RetVal.append(self.MetaFile.Dir)\r
         RetVal.append(self.DebugDir)\r
 \r
-        for Package in self.Module.Packages:\r
+        for Package in self.PackageList:\r
             PackageDir = mws.join(self.WorkspaceDir, Package.MetaFile.Dir)\r
             if PackageDir not in RetVal:\r
                 RetVal.append(PackageDir)\r
@@ -1111,8 +1122,31 @@ class ModuleAutoGen(AutoGen):
             for Inc in IncludesList:\r
                 if Inc not in RetVal:\r
                     RetVal.append(str(Inc))\r
+        RetVal.extend(self.IncPathFromBuildOptions)\r
         return RetVal\r
 \r
+    @cached_property\r
+    def IncPathFromBuildOptions(self):\r
+        IncPathList = []\r
+        for tool in self.BuildOption:\r
+            if 'FLAGS' in self.BuildOption[tool]:\r
+                flags = self.BuildOption[tool]['FLAGS']\r
+                whitespace = False\r
+                for flag in flags.split(" "):\r
+                    flag = flag.strip()\r
+                    if flag.startswith(("/I","-I")):\r
+                        if len(flag)>2:\r
+                            if os.path.exists(flag[2:]):\r
+                                IncPathList.append(flag[2:])\r
+                        else:\r
+                            whitespace = True\r
+                            continue\r
+                    if whitespace and flag:\r
+                        if os.path.exists(flag):\r
+                            IncPathList.append(flag)\r
+                            whitespace = False\r
+        return IncPathList\r
+\r
     @cached_property\r
     def IncludePathLength(self):\r
         return sum(len(inc)+1 for inc in self.IncludePathList)\r
@@ -1124,7 +1158,7 @@ class ModuleAutoGen(AutoGen):
     @cached_property\r
     def PackageIncludePathList(self):\r
         IncludesList = []\r
-        for Package in self.Module.Packages:\r
+        for Package in self.PackageList:\r
             PackageDir = mws.join(self.WorkspaceDir, Package.MetaFile.Dir)\r
             IncludesList = Package.Includes\r
             if Package._PrivateIncludes:\r
@@ -1262,25 +1296,22 @@ class ModuleAutoGen(AutoGen):
         fStringIO.close ()\r
         fInputfile.close ()\r
         return OutputName\r
+\r
     @cached_property\r
     def OutputFile(self):\r
         retVal = set()\r
-        OutputDir = self.OutputDir.replace('\\', '/').strip('/')\r
-        DebugDir = self.DebugDir.replace('\\', '/').strip('/')\r
-        for Item in self.CodaTargetList:\r
-            File = Item.Target.Path.replace('\\', '/').strip('/').replace(DebugDir, '').replace(OutputDir, '').strip('/')\r
-            retVal.add(File)\r
-        if self.DepexGenerated:\r
-            retVal.add(self.Name + '.depex')\r
 \r
-        Bin = self._GenOffsetBin()\r
-        if Bin:\r
-            retVal.add(Bin)\r
+        for Root, Dirs, Files in os.walk(self.BuildDir):\r
+            for File in Files:\r
+                # lib file is already added through above CodaTargetList, skip it here\r
+                if not (File.lower().endswith('.obj') or File.lower().endswith('.debug')):\r
+                    NewFile = path.join(Root, File)\r
+                    retVal.add(NewFile)\r
 \r
-        for Root, Dirs, Files in os.walk(OutputDir):\r
+        for Root, Dirs, Files in os.walk(self.FfsOutputDir):\r
             for File in Files:\r
-                if File.lower().endswith('.pdb'):\r
-                    retVal.add(File)\r
+                NewFile = path.join(Root, File)\r
+                retVal.add(NewFile)\r
 \r
         return retVal\r
 \r
@@ -1612,9 +1643,12 @@ class ModuleAutoGen(AutoGen):
 \r
         self.IsAsBuiltInfCreated = True\r
 \r
-    def CacheCopyFile(self, OriginDir, CopyDir, File):\r
-        sub_dir = os.path.relpath(File, CopyDir)\r
-        destination_file = os.path.join(OriginDir, sub_dir)\r
+    def CacheCopyFile(self, DestDir, SourceDir, File):\r
+        if os.path.isdir(File):\r
+            return\r
+\r
+        sub_dir = os.path.relpath(File, SourceDir)\r
+        destination_file = os.path.join(DestDir, sub_dir)\r
         destination_dir = os.path.dirname(destination_file)\r
         CreateDirectory(destination_dir)\r
         try:\r
@@ -1624,110 +1658,73 @@ class ModuleAutoGen(AutoGen):
             return\r
 \r
     def CopyModuleToCache(self):\r
-        self.GenPreMakefileHash(GlobalData.gCacheIR)\r
-        if not (self.MetaFile.Path, self.Arch) in GlobalData.gCacheIR or \\r
-           not GlobalData.gCacheIR[(self.MetaFile.Path, self.Arch)].PreMakefileHashHexDigest:\r
-            EdkLogger.quiet("[cache warning]: Cannot generate PreMakefileHash for module: %s[%s]" % (self.MetaFile.Path, self.Arch))\r
-            return False\r
-\r
-        self.GenMakeHash(GlobalData.gCacheIR)\r
-        if not (self.MetaFile.Path, self.Arch) in GlobalData.gCacheIR or \\r
-           not GlobalData.gCacheIR[(self.MetaFile.Path, self.Arch)].MakeHashChain or \\r
-           not GlobalData.gCacheIR[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest:\r
-            EdkLogger.quiet("[cache warning]: Cannot generate MakeHashChain for module: %s[%s]" % (self.MetaFile.Path, self.Arch))\r
-            return False\r
-\r
-        MakeHashStr = str(GlobalData.gCacheIR[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest)\r
-        FileDir = path.join(GlobalData.gBinCacheDest, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName, MakeHashStr)\r
-        FfsDir = path.join(GlobalData.gBinCacheDest, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name, MakeHashStr)\r
-\r
-        CreateDirectory (FileDir)\r
-        self.SaveHashChainFileToCache(GlobalData.gCacheIR)\r
-        ModuleFile = path.join(self.OutputDir, self.Name + '.inf')\r
-        if os.path.exists(ModuleFile):\r
-            CopyFileOnChange(ModuleFile, FileDir)\r
-        if not self.OutputFile:\r
-            Ma = self.BuildDatabase[self.MetaFile, self.Arch, self.BuildTarget, self.ToolChain]\r
-            self.OutputFile = Ma.Binaries\r
-        for File in self.OutputFile:\r
-            File = str(File)\r
-            if not os.path.isabs(File):\r
-                File = os.path.join(self.OutputDir, File)\r
-            if os.path.exists(File):\r
-                sub_dir = os.path.relpath(File, self.OutputDir)\r
-                destination_file = os.path.join(FileDir, sub_dir)\r
-                destination_dir = os.path.dirname(destination_file)\r
-                CreateDirectory(destination_dir)\r
-                CopyFileOnChange(File, destination_dir)\r
-\r
-    def SaveHashChainFileToCache(self, gDict):\r
-        if not GlobalData.gBinCacheDest:\r
-            return False\r
+        # Find the MakeHashStr and PreMakeHashStr from latest MakeHashFileList\r
+        # and PreMakeHashFileList files\r
+        MakeHashStr = None\r
+        PreMakeHashStr = None\r
+        MakeTimeStamp = 0\r
+        PreMakeTimeStamp = 0\r
+        Files = [f for f in os.listdir(LongFilePath(self.BuildDir)) if path.isfile(LongFilePath(path.join(self.BuildDir, f)))]\r
+        for File in Files:\r
+            if ".MakeHashFileList." in File:\r
+                #find lastest file through time stamp\r
+                FileTimeStamp = os.stat(LongFilePath(path.join(self.BuildDir, File)))[8]\r
+                if FileTimeStamp > MakeTimeStamp:\r
+                    MakeTimeStamp = FileTimeStamp\r
+                    MakeHashStr = File.split('.')[-1]\r
+                    if len(MakeHashStr) != 32:\r
+                        EdkLogger.quiet("[cache error]: wrong MakeHashFileList file:%s" % (File))\r
+            if ".PreMakeHashFileList." in File:\r
+                FileTimeStamp = os.stat(LongFilePath(path.join(self.BuildDir, File)))[8]\r
+                if FileTimeStamp > PreMakeTimeStamp:\r
+                    PreMakeTimeStamp = FileTimeStamp\r
+                    PreMakeHashStr = File.split('.')[-1]\r
+                    if len(PreMakeHashStr) != 32:\r
+                        EdkLogger.quiet("[cache error]: wrong PreMakeHashFileList file:%s" % (File))\r
 \r
-        self.GenPreMakefileHash(gDict)\r
-        if not (self.MetaFile.Path, self.Arch) in gDict or \\r
-           not gDict[(self.MetaFile.Path, self.Arch)].PreMakefileHashHexDigest:\r
-            EdkLogger.quiet("[cache warning]: Cannot generate PreMakefileHash for module: %s[%s]" % (self.MetaFile.Path, self.Arch))\r
-            return False\r
+        if not MakeHashStr:\r
+            EdkLogger.quiet("[cache error]: No MakeHashFileList file for module:%s[%s]" % (self.MetaFile.Path, self.Arch))\r
+            return\r
+        if not PreMakeHashStr:\r
+            EdkLogger.quiet("[cache error]: No PreMakeHashFileList file for module:%s[%s]" % (self.MetaFile.Path, self.Arch))\r
+            return\r
 \r
-        self.GenMakeHash(gDict)\r
-        if not (self.MetaFile.Path, self.Arch) in gDict or \\r
-           not gDict[(self.MetaFile.Path, self.Arch)].MakeHashChain or \\r
-           not gDict[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest:\r
-            EdkLogger.quiet("[cache warning]: Cannot generate MakeHashChain for module: %s[%s]" % (self.MetaFile.Path, self.Arch))\r
-            return False\r
+        # Create Cache destination dirs\r
+        FileDir = path.join(GlobalData.gBinCacheDest, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)\r
+        FfsDir = path.join(GlobalData.gBinCacheDest, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name)\r
+        CacheFileDir = path.join(FileDir, MakeHashStr)\r
+        CacheFfsDir = path.join(FfsDir, MakeHashStr)\r
+        CreateDirectory (CacheFileDir)\r
+        CreateDirectory (CacheFfsDir)\r
 \r
-        # save the hash chain list as cache file\r
-        MakeHashStr = str(GlobalData.gCacheIR[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest)\r
-        CacheDestDir = path.join(GlobalData.gBinCacheDest, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)\r
-        CacheHashDestDir = path.join(CacheDestDir, MakeHashStr)\r
-        ModuleHashPair = path.join(CacheDestDir, self.Name + ".ModuleHashPair")\r
-        MakeHashChain = path.join(CacheHashDestDir, self.Name + ".MakeHashChain")\r
-        ModuleFilesChain = path.join(CacheHashDestDir, self.Name + ".ModuleFilesChain")\r
-\r
-        # save the HashChainDict as json file\r
-        CreateDirectory (CacheDestDir)\r
-        CreateDirectory (CacheHashDestDir)\r
-        try:\r
-            ModuleHashPairList = [] # tuple list: [tuple(PreMakefileHash, MakeHash)]\r
-            if os.path.exists(ModuleHashPair):\r
-                f = open(ModuleHashPair, 'r')\r
+        # Create ModuleHashPair file to support multiple version cache together\r
+        ModuleHashPair = path.join(FileDir, self.Name + ".ModuleHashPair")\r
+        ModuleHashPairList = [] # tuple list: [tuple(PreMakefileHash, MakeHash)]\r
+        if os.path.exists(ModuleHashPair):\r
+            with open(ModuleHashPair, 'r') as f:\r
                 ModuleHashPairList = json.load(f)\r
-                f.close()\r
-            PreMakeHash = gDict[(self.MetaFile.Path, self.Arch)].PreMakefileHashHexDigest\r
-            MakeHash = gDict[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest\r
-            ModuleHashPairList.append((PreMakeHash, MakeHash))\r
-            ModuleHashPairList = list(set(map(tuple, ModuleHashPairList)))\r
+        if not (PreMakeHashStr, MakeHashStr) in set(map(tuple, ModuleHashPairList)):\r
+            ModuleHashPairList.insert(0, (PreMakeHashStr, MakeHashStr))\r
             with open(ModuleHashPair, 'w') as f:\r
                 json.dump(ModuleHashPairList, f, indent=2)\r
-        except:\r
-            EdkLogger.quiet("[cache warning]: fail to save ModuleHashPair file in cache: %s" % ModuleHashPair)\r
-            return False\r
-\r
-        try:\r
-            with open(MakeHashChain, 'w') as f:\r
-                json.dump(gDict[(self.MetaFile.Path, self.Arch)].MakeHashChain, f, indent=2)\r
-        except:\r
-            EdkLogger.quiet("[cache warning]: fail to save MakeHashChain file in cache: %s" % MakeHashChain)\r
-            return False\r
-\r
-        try:\r
-            with open(ModuleFilesChain, 'w') as f:\r
-                json.dump(gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesChain, f, indent=2)\r
-        except:\r
-            EdkLogger.quiet("[cache warning]: fail to save ModuleFilesChain file in cache: %s" % ModuleFilesChain)\r
-            return False\r
-\r
-        # save the autogenfile and makefile for debug usage\r
-        CacheDebugDir = path.join(CacheHashDestDir, "CacheDebug")\r
-        CreateDirectory (CacheDebugDir)\r
-        CopyFileOnChange(gDict[(self.MetaFile.Path, self.Arch)].MakefilePath, CacheDebugDir)\r
-        if gDict[(self.MetaFile.Path, self.Arch)].AutoGenFileList:\r
-            for File in gDict[(self.MetaFile.Path, self.Arch)].AutoGenFileList:\r
-                CopyFileOnChange(str(File), CacheDebugDir)\r
-\r
-        return True\r
 \r
+        # Copy files to Cache destination dirs\r
+        if not self.OutputFile:\r
+            Ma = self.BuildDatabase[self.MetaFile, self.Arch, self.BuildTarget, self.ToolChain]\r
+            self.OutputFile = Ma.Binaries\r
+        for File in self.OutputFile:\r
+            if File.startswith(os.path.abspath(self.FfsOutputDir)+os.sep):\r
+                self.CacheCopyFile(CacheFfsDir, self.FfsOutputDir, File)\r
+            else:\r
+                if  self.Name + ".autogen.hash." in File or \\r
+                    self.Name + ".autogen.hashchain." in File or \\r
+                    self.Name + ".hash." in File or \\r
+                    self.Name + ".hashchain." in File or \\r
+                    self.Name + ".PreMakeHashFileList." in File or \\r
+                    self.Name + ".MakeHashFileList." in File:\r
+                    self.CacheCopyFile(FileDir, self.BuildDir, File)\r
+                else:\r
+                    self.CacheCopyFile(CacheFileDir, self.BuildDir, File)\r
     ## Create makefile for the module and its dependent libraries\r
     #\r
     #   @param      CreateLibraryMakeFile   Flag indicating if or not the makefiles of\r
@@ -1735,10 +1732,6 @@ class ModuleAutoGen(AutoGen):
     #\r
     @cached_class_function\r
     def CreateMakeFile(self, CreateLibraryMakeFile=True, GenFfsList = []):\r
-        gDict = GlobalData.gCacheIR\r
-        if (self.MetaFile.Path, self.Arch) in gDict and \\r
-          gDict[(self.MetaFile.Path, self.Arch)].CreateMakeFileDone:\r
-            return\r
 \r
         # nest this function inside it's only caller.\r
         def CreateTimeStamp():\r
@@ -1755,10 +1748,8 @@ class ModuleAutoGen(AutoGen):
 \r
             if os.path.exists (self.TimeStampPath):\r
                 os.remove (self.TimeStampPath)\r
-            with open(self.TimeStampPath, 'w+') as fd:\r
-                for f in FileSet:\r
-                    fd.write(f)\r
-                    fd.write("\n")\r
+\r
+            SaveFileOnChange(self.TimeStampPath, "\n".join(FileSet), False)\r
 \r
         # Ignore generating makefile when it is a binary module\r
         if self.IsBinaryModule:\r
@@ -1790,20 +1781,8 @@ class ModuleAutoGen(AutoGen):
         MakefileType = Makefile._FileType\r
         MakefileName = Makefile._FILE_NAME_[MakefileType]\r
         MakefilePath = os.path.join(self.MakeFileDir, MakefileName)\r
-\r
-        MewIR = ModuleBuildCacheIR(self.MetaFile.Path, self.Arch)\r
-        MewIR.MakefilePath = MakefilePath\r
-        MewIR.DependencyHeaderFileSet = Makefile.DependencyHeaderFileSet\r
-        MewIR.CreateMakeFileDone = True\r
-        with GlobalData.file_lock:\r
-            try:\r
-                IR = gDict[(self.MetaFile.Path, self.Arch)]\r
-                IR.MakefilePath = MakefilePath\r
-                IR.DependencyHeaderFileSet = Makefile.DependencyHeaderFileSet\r
-                IR.CreateMakeFileDone = True\r
-                gDict[(self.MetaFile.Path, self.Arch)] = IR\r
-            except:\r
-                gDict[(self.MetaFile.Path, self.Arch)] = MewIR\r
+        FilePath = path.join(self.BuildDir, self.Name + ".makefile")\r
+        SaveFileOnChange(FilePath, MakefilePath, False)\r
 \r
     def CopyBinaryFiles(self):\r
         for File in self.Module.Binaries:\r
@@ -1816,10 +1795,6 @@ class ModuleAutoGen(AutoGen):
     #                                       dependent libraries will be created\r
     #\r
     def CreateCodeFile(self, CreateLibraryCodeFile=True):\r
-        gDict = GlobalData.gCacheIR\r
-        if (self.MetaFile.Path, self.Arch) in gDict and \\r
-          gDict[(self.MetaFile.Path, self.Arch)].CreateCodeFileDone:\r
-            return\r
 \r
         if self.IsCodeFileCreated:\r
             return\r
@@ -1837,10 +1812,7 @@ class ModuleAutoGen(AutoGen):
             for LibraryAutoGen in self.LibraryAutoGenList:\r
                 LibraryAutoGen.CreateCodeFile()\r
 \r
-        # CanSkip uses timestamps to determine build skipping\r
-        if self.CanSkip():\r
-            return\r
-\r
+        self.LibraryAutoGenList\r
         AutoGenList = []\r
         IgoredAutoGenList = []\r
 \r
@@ -1878,15 +1850,6 @@ class ModuleAutoGen(AutoGen):
                             (" ".join(AutoGenList), " ".join(IgoredAutoGenList), self.Name, self.Arch))\r
 \r
         self.IsCodeFileCreated = True\r
-        MewIR = ModuleBuildCacheIR(self.MetaFile.Path, self.Arch)\r
-        MewIR.CreateCodeFileDone = True\r
-        with GlobalData.file_lock:\r
-            try:\r
-                IR = gDict[(self.MetaFile.Path, self.Arch)]\r
-                IR.CreateCodeFileDone = True\r
-                gDict[(self.MetaFile.Path, self.Arch)] = IR\r
-            except:\r
-                gDict[(self.MetaFile.Path, self.Arch)] = MewIR\r
 \r
         return AutoGenList\r
 \r
@@ -1911,503 +1874,539 @@ class ModuleAutoGen(AutoGen):
                     self._ApplyBuildRule(Lib.Target, TAB_UNKNOWN_FILE)\r
         return RetVal\r
 \r
-    def GenModuleHash(self):\r
-        # Initialize a dictionary for each arch type\r
-        if self.Arch not in GlobalData.gModuleHash:\r
-            GlobalData.gModuleHash[self.Arch] = {}\r
+    def GenCMakeHash(self):\r
+        # GenCMakeHash can only be called in --binary-destination\r
+        # Never called in multiprocessing and always directly save result in main process,\r
+        # so no need remote dict to share the gCMakeHashFile result with main process\r
 \r
-        # Early exit if module or library has been hashed and is in memory\r
-        if self.Name in GlobalData.gModuleHash[self.Arch]:\r
-            return GlobalData.gModuleHash[self.Arch][self.Name].encode('utf-8')\r
+        DependencyFileSet = set()\r
+        # Add AutoGen files\r
+        if self.AutoGenFileList:\r
+            for File in set(self.AutoGenFileList):\r
+                DependencyFileSet.add(File)\r
 \r
+        # Add Makefile\r
+        abspath = path.join(self.BuildDir, self.Name + ".makefile")\r
+        try:\r
+            with open(LongFilePath(abspath),"r") as fd:\r
+                lines = fd.readlines()\r
+        except Exception as e:\r
+            EdkLogger.error("build",FILE_NOT_FOUND, "%s doesn't exist" % abspath, ExtraData=str(e), RaiseError=False)\r
+        if lines:\r
+            DependencyFileSet.update(lines)\r
+\r
+        # Caculate all above dependency files hash\r
         # Initialze hash object\r
+        FileList = []\r
         m = hashlib.md5()\r
-\r
-        # Add Platform level hash\r
-        m.update(GlobalData.gPlatformHash.encode('utf-8'))\r
-\r
-        # Add Package level hash\r
-        if self.DependentPackageList:\r
-            for Pkg in sorted(self.DependentPackageList, key=lambda x: x.PackageName):\r
-                if Pkg.PackageName in GlobalData.gPackageHash:\r
-                    m.update(GlobalData.gPackageHash[Pkg.PackageName].encode('utf-8'))\r
-\r
-        # Add Library hash\r
-        if self.LibraryAutoGenList:\r
-            for Lib in sorted(self.LibraryAutoGenList, key=lambda x: x.Name):\r
-                if Lib.Name not in GlobalData.gModuleHash[self.Arch]:\r
-                    Lib.GenModuleHash()\r
-                m.update(GlobalData.gModuleHash[self.Arch][Lib.Name].encode('utf-8'))\r
-\r
-        # Add Module self\r
-        f = open(str(self.MetaFile), 'rb')\r
-        Content = f.read()\r
-        f.close()\r
-        m.update(Content)\r
-\r
-        # Add Module's source files\r
-        if self.SourceFileList:\r
-            for File in sorted(self.SourceFileList, key=lambda x: str(x)):\r
-                f = open(str(File), 'rb')\r
+        for File in sorted(DependencyFileSet, key=lambda x: str(x)):\r
+            if not path.exists(LongFilePath(str(File))):\r
+                EdkLogger.quiet("[cache warning]: header file %s is missing for module: %s[%s]" % (File, self.MetaFile.Path, self.Arch))\r
+                continue\r
+            with open(LongFilePath(str(File)), 'rb') as f:\r
                 Content = f.read()\r
-                f.close()\r
-                m.update(Content)\r
-\r
-        GlobalData.gModuleHash[self.Arch][self.Name] = m.hexdigest()\r
+            m.update(Content)\r
+            FileList.append((str(File), hashlib.md5(Content).hexdigest()))\r
 \r
-        return GlobalData.gModuleHash[self.Arch][self.Name].encode('utf-8')\r
+        HashChainFile = path.join(self.BuildDir, self.Name + ".autogen.hashchain." + m.hexdigest())\r
+        GlobalData.gCMakeHashFile[(self.MetaFile.Path, self.Arch)] = HashChainFile\r
+        try:\r
+            with open(LongFilePath(HashChainFile), 'w') as f:\r
+                json.dump(FileList, f, indent=2)\r
+        except:\r
+            EdkLogger.quiet("[cache warning]: fail to save hashchain file:%s" % HashChainFile)\r
+            return False\r
 \r
-    def GenModuleFilesHash(self, gDict):\r
-        # Early exit if module or library has been hashed and is in memory\r
-        if (self.MetaFile.Path, self.Arch) in gDict:\r
-            if gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesChain:\r
-                return gDict[(self.MetaFile.Path, self.Arch)]\r
+    def GenModuleHash(self):\r
+        # GenModuleHash only called after autogen phase\r
+        # Never called in multiprocessing and always directly save result in main process,\r
+        # so no need remote dict to share the gModuleHashFile result with main process\r
+        #\r
+        # GenPreMakefileHashList consume no dict.\r
+        # GenPreMakefileHashList produce local gModuleHashFile dict.\r
 \r
         DependencyFileSet = set()\r
         # Add Module Meta file\r
-        DependencyFileSet.add(self.MetaFile)\r
+        DependencyFileSet.add(self.MetaFile.Path)\r
 \r
         # Add Module's source files\r
         if self.SourceFileList:\r
             for File in set(self.SourceFileList):\r
-                DependencyFileSet.add(File)\r
+                DependencyFileSet.add(File.Path)\r
 \r
         # Add modules's include header files\r
-        # Search dependency file list for each source file\r
-        SourceFileList = []\r
-        OutPutFileList = []\r
-        for Target in self.IntroTargetList:\r
-            SourceFileList.extend(Target.Inputs)\r
-            OutPutFileList.extend(Target.Outputs)\r
-        if OutPutFileList:\r
-            for Item in OutPutFileList:\r
-                if Item in SourceFileList:\r
-                    SourceFileList.remove(Item)\r
-        SearchList = []\r
-        for file_path in self.IncludePathList + self.BuildOptionIncPathList:\r
-            # skip the folders in platform BuildDir which are not been generated yet\r
-            if file_path.startswith(os.path.abspath(self.PlatformInfo.BuildDir)+os.sep):\r
-                continue\r
-            SearchList.append(file_path)\r
-        FileDependencyDict = {}\r
-        ForceIncludedFile = []\r
-        for F in SourceFileList:\r
-            # skip the files which are not been generated yet, because\r
-            # the SourceFileList usually contains intermediate build files, e.g. AutoGen.c\r
-            if not os.path.exists(F.Path):\r
-                continue\r
-            FileDependencyDict[F] = GenMake.GetDependencyList(self, self.FileDependCache, F, ForceIncludedFile, SearchList)\r
+        # Directly use the deps.txt file in the module BuildDir\r
+        abspath = path.join(self.BuildDir, "deps.txt")\r
+        rt = None\r
+        try:\r
+            with open(LongFilePath(abspath),"r") as fd:\r
+                lines = fd.readlines()\r
+                if lines:\r
+                    rt = set([item.lstrip().strip("\n") for item in lines if item.strip("\n").endswith(".h")])\r
+        except Exception as e:\r
+            EdkLogger.error("build",FILE_NOT_FOUND, "%s doesn't exist" % abspath, ExtraData=str(e), RaiseError=False)\r
+\r
+        if rt:\r
+            DependencyFileSet.update(rt)\r
 \r
-        if FileDependencyDict:\r
-            for Dependency in FileDependencyDict.values():\r
-                DependencyFileSet.update(set(Dependency))\r
 \r
         # Caculate all above dependency files hash\r
         # Initialze hash object\r
         FileList = []\r
         m = hashlib.md5()\r
+        BuildDirStr = path.abspath(self.BuildDir).lower()\r
         for File in sorted(DependencyFileSet, key=lambda x: str(x)):\r
-            if not os.path.exists(str(File)):\r
+            # Skip the AutoGen files in BuildDir which already been\r
+            # included in .autogen.hash. file\r
+            if BuildDirStr in path.abspath(File).lower():\r
+                continue\r
+            if not path.exists(LongFilePath(File)):\r
                 EdkLogger.quiet("[cache warning]: header file %s is missing for module: %s[%s]" % (File, self.MetaFile.Path, self.Arch))\r
                 continue\r
-            f = open(str(File), 'rb')\r
-            Content = f.read()\r
-            f.close()\r
+            with open(LongFilePath(File), 'rb') as f:\r
+                Content = f.read()\r
             m.update(Content)\r
-            FileList.append((str(File), hashlib.md5(Content).hexdigest()))\r
-\r
-\r
-        MewIR = ModuleBuildCacheIR(self.MetaFile.Path, self.Arch)\r
-        MewIR.ModuleFilesHashDigest = m.digest()\r
-        MewIR.ModuleFilesHashHexDigest = m.hexdigest()\r
-        MewIR.ModuleFilesChain = FileList\r
-        with GlobalData.file_lock:\r
-            try:\r
-                IR = gDict[(self.MetaFile.Path, self.Arch)]\r
-                IR.ModuleFilesHashDigest = m.digest()\r
-                IR.ModuleFilesHashHexDigest = m.hexdigest()\r
-                IR.ModuleFilesChain = FileList\r
-                gDict[(self.MetaFile.Path, self.Arch)] = IR\r
-            except:\r
-                gDict[(self.MetaFile.Path, self.Arch)] = MewIR\r
+            FileList.append((File, hashlib.md5(Content).hexdigest()))\r
 \r
-        return gDict[(self.MetaFile.Path, self.Arch)]\r
+        HashChainFile = path.join(self.BuildDir, self.Name + ".hashchain." + m.hexdigest())\r
+        GlobalData.gModuleHashFile[(self.MetaFile.Path, self.Arch)] = HashChainFile\r
+        try:\r
+            with open(LongFilePath(HashChainFile), 'w') as f:\r
+                json.dump(FileList, f, indent=2)\r
+        except:\r
+            EdkLogger.quiet("[cache warning]: fail to save hashchain file:%s" % HashChainFile)\r
+            return False\r
 \r
-    def GenPreMakefileHash(self, gDict):\r
-        # Early exit if module or library has been hashed and is in memory\r
-        if (self.MetaFile.Path, self.Arch) in gDict and \\r
-          gDict[(self.MetaFile.Path, self.Arch)].PreMakefileHashHexDigest:\r
-            return gDict[(self.MetaFile.Path, self.Arch)]\r
+    def GenPreMakefileHashList(self):\r
+        # GenPreMakefileHashList consume below dicts:\r
+        #     gPlatformHashFile\r
+        #     gPackageHashFile\r
+        #     gModuleHashFile\r
+        # GenPreMakefileHashList produce no dict.\r
+        # gModuleHashFile items might be produced in multiprocessing, so\r
+        # need check gModuleHashFile remote dict\r
 \r
         # skip binary module\r
         if self.IsBinaryModule:\r
             return\r
 \r
-        if not (self.MetaFile.Path, self.Arch) in gDict or \\r
-           not gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesHashDigest:\r
-            self.GenModuleFilesHash(gDict)\r
-\r
-        if not (self.MetaFile.Path, self.Arch) in gDict or \\r
-           not gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesHashDigest:\r
-           EdkLogger.quiet("[cache warning]: Cannot generate ModuleFilesHashDigest for module %s[%s]" %(self.MetaFile.Path, self.Arch))\r
-           return\r
-\r
-        # Initialze hash object\r
+        FileList = []\r
         m = hashlib.md5()\r
-\r
         # Add Platform level hash\r
-        if ('PlatformHash') in gDict:\r
-            m.update(gDict[('PlatformHash')].encode('utf-8'))\r
+        HashFile = GlobalData.gPlatformHashFile\r
+        if path.exists(LongFilePath(HashFile)):\r
+            FileList.append(HashFile)\r
+            m.update(HashFile.encode('utf-8'))\r
         else:\r
-            EdkLogger.quiet("[cache warning]: PlatformHash is missing")\r
+            EdkLogger.quiet("[cache warning]: No Platform HashFile: %s" % HashFile)\r
 \r
         # Add Package level hash\r
         if self.DependentPackageList:\r
             for Pkg in sorted(self.DependentPackageList, key=lambda x: x.PackageName):\r
-                if (Pkg.PackageName, 'PackageHash') in gDict:\r
-                    m.update(gDict[(Pkg.PackageName, 'PackageHash')].encode('utf-8'))\r
+                if not (Pkg.PackageName, Pkg.Arch) in GlobalData.gPackageHashFile:\r
+                    EdkLogger.quiet("[cache warning]:No Package %s for module %s[%s]" % (Pkg.PackageName, self.MetaFile.Path, self.Arch))\r
+                    continue\r
+                HashFile = GlobalData.gPackageHashFile[(Pkg.PackageName, Pkg.Arch)]\r
+                if path.exists(LongFilePath(HashFile)):\r
+                    FileList.append(HashFile)\r
+                    m.update(HashFile.encode('utf-8'))\r
                 else:\r
-                    EdkLogger.quiet("[cache warning]: %s PackageHash needed by %s[%s] is missing" %(Pkg.PackageName, self.MetaFile.Name, self.Arch))\r
-\r
-        # Add Library hash\r
-        if self.LibraryAutoGenList:\r
-            for Lib in sorted(self.LibraryAutoGenList, key=lambda x: x.Name):\r
-                if not (Lib.MetaFile.Path, Lib.Arch) in gDict or \\r
-                   not gDict[(Lib.MetaFile.Path, Lib.Arch)].ModuleFilesHashDigest:\r
-                    Lib.GenPreMakefileHash(gDict)\r
-                m.update(gDict[(Lib.MetaFile.Path, Lib.Arch)].ModuleFilesHashDigest)\r
+                    EdkLogger.quiet("[cache warning]:No Package HashFile: %s" % HashFile)\r
 \r
         # Add Module self\r
-        m.update(gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesHashDigest)\r
-\r
-        with GlobalData.file_lock:\r
-            IR = gDict[(self.MetaFile.Path, self.Arch)]\r
-            IR.PreMakefileHashHexDigest = m.hexdigest()\r
-            gDict[(self.MetaFile.Path, self.Arch)] = IR\r
-\r
-        return gDict[(self.MetaFile.Path, self.Arch)]\r
-\r
-    def GenMakeHeaderFilesHash(self, gDict):\r
-        # Early exit if module or library has been hashed and is in memory\r
-        if (self.MetaFile.Path, self.Arch) in gDict and \\r
-          gDict[(self.MetaFile.Path, self.Arch)].MakeHeaderFilesHashDigest:\r
-            return gDict[(self.MetaFile.Path, self.Arch)]\r
-\r
-        # skip binary module\r
-        if self.IsBinaryModule:\r
-            return\r
-\r
-        if not (self.MetaFile.Path, self.Arch) in gDict or \\r
-           not gDict[(self.MetaFile.Path, self.Arch)].CreateCodeFileDone:\r
-            if self.IsLibrary:\r
-                if (self.MetaFile.File,self.MetaFile.Root,self.Arch,self.MetaFile.Path) in GlobalData.libConstPcd:\r
-                    self.ConstPcd = GlobalData.libConstPcd[(self.MetaFile.File,self.MetaFile.Root,self.Arch,self.MetaFile.Path)]\r
-                if (self.MetaFile.File,self.MetaFile.Root,self.Arch,self.MetaFile.Path) in GlobalData.Refes:\r
-                    self.ReferenceModules = GlobalData.Refes[(self.MetaFile.File,self.MetaFile.Root,self.Arch,self.MetaFile.Path)]\r
-            self.CreateCodeFile()\r
-        if not (self.MetaFile.Path, self.Arch) in gDict or \\r
-           not gDict[(self.MetaFile.Path, self.Arch)].CreateMakeFileDone:\r
-            self.CreateMakeFile(GenFfsList=GlobalData.FfsCmd.get((self.MetaFile.File, self.Arch),[]))\r
-\r
-        if not (self.MetaFile.Path, self.Arch) in gDict or \\r
-           not gDict[(self.MetaFile.Path, self.Arch)].CreateCodeFileDone or \\r
-           not gDict[(self.MetaFile.Path, self.Arch)].CreateMakeFileDone:\r
-           EdkLogger.quiet("[cache warning]: Cannot create CodeFile or Makefile for module %s[%s]" %(self.MetaFile.Path, self.Arch))\r
-           return\r
-\r
-        DependencyFileSet = set()\r
-        # Add Makefile\r
-        if gDict[(self.MetaFile.Path, self.Arch)].MakefilePath:\r
-            DependencyFileSet.add(gDict[(self.MetaFile.Path, self.Arch)].MakefilePath)\r
+        # GenPreMakefileHashList needed in both --binary-destination\r
+        # and --hash. And --hash might save ModuleHashFile in remote dict\r
+        # during multiprocessing.\r
+        if (self.MetaFile.Path, self.Arch) in GlobalData.gModuleHashFile:\r
+            HashFile = GlobalData.gModuleHashFile[(self.MetaFile.Path, self.Arch)]\r
         else:\r
-            EdkLogger.quiet("[cache warning]: makefile is missing for module %s[%s]" %(self.MetaFile.Path, self.Arch))\r
-\r
-        # Add header files\r
-        if gDict[(self.MetaFile.Path, self.Arch)].DependencyHeaderFileSet:\r
-            for File in gDict[(self.MetaFile.Path, self.Arch)].DependencyHeaderFileSet:\r
-                DependencyFileSet.add(File)\r
+            EdkLogger.quiet("[cache error]:No ModuleHashFile for module: %s[%s]" % (self.MetaFile.Path, self.Arch))\r
+        if path.exists(LongFilePath(HashFile)):\r
+            FileList.append(HashFile)\r
+            m.update(HashFile.encode('utf-8'))\r
         else:\r
-            EdkLogger.quiet("[cache warning]: No dependency header found for module %s[%s]" %(self.MetaFile.Path, self.Arch))\r
+            EdkLogger.quiet("[cache warning]:No Module HashFile: %s" % HashFile)\r
 \r
-        # Add AutoGen files\r
-        if self.AutoGenFileList:\r
-            for File in set(self.AutoGenFileList):\r
-                DependencyFileSet.add(File)\r
-\r
-        # Caculate all above dependency files hash\r
-        # Initialze hash object\r
-        FileList = []\r
-        m = hashlib.md5()\r
-        for File in sorted(DependencyFileSet, key=lambda x: str(x)):\r
-            if not os.path.exists(str(File)):\r
-                EdkLogger.quiet("[cache warning]: header file: %s doesn't exist for module: %s[%s]" % (File, self.MetaFile.Path, self.Arch))\r
-                continue\r
-            f = open(str(File), 'rb')\r
-            Content = f.read()\r
-            f.close()\r
-            m.update(Content)\r
-            FileList.append((str(File), hashlib.md5(Content).hexdigest()))\r
+        # Add Library hash\r
+        if self.LibraryAutoGenList:\r
+            for Lib in sorted(self.LibraryAutoGenList, key=lambda x: x.MetaFile.Path):\r
 \r
-        with GlobalData.file_lock:\r
-            IR = gDict[(self.MetaFile.Path, self.Arch)]\r
-            IR.AutoGenFileList = self.AutoGenFileList.keys()\r
-            IR.MakeHeaderFilesHashChain = FileList\r
-            IR.MakeHeaderFilesHashDigest = m.digest()\r
-            gDict[(self.MetaFile.Path, self.Arch)] = IR\r
+                if (Lib.MetaFile.Path, Lib.Arch) in GlobalData.gModuleHashFile:\r
+                    HashFile = GlobalData.gModuleHashFile[(Lib.MetaFile.Path, Lib.Arch)]\r
+                else:\r
+                    EdkLogger.quiet("[cache error]:No ModuleHashFile for lib: %s[%s]" % (Lib.MetaFile.Path, Lib.Arch))\r
+                if path.exists(LongFilePath(HashFile)):\r
+                    FileList.append(HashFile)\r
+                    m.update(HashFile.encode('utf-8'))\r
+                else:\r
+                    EdkLogger.quiet("[cache warning]:No Lib HashFile: %s" % HashFile)\r
 \r
-        return gDict[(self.MetaFile.Path, self.Arch)]\r
+        # Save PreMakeHashFileList\r
+        FilePath = path.join(self.BuildDir, self.Name + ".PreMakeHashFileList." + m.hexdigest())\r
+        try:\r
+            with open(LongFilePath(FilePath), 'w') as f:\r
+                json.dump(FileList, f, indent=0)\r
+        except:\r
+            EdkLogger.quiet("[cache warning]: fail to save PreMake HashFileList: %s" % FilePath)\r
 \r
-    def GenMakeHash(self, gDict):\r
-        # Early exit if module or library has been hashed and is in memory\r
-        if (self.MetaFile.Path, self.Arch) in gDict and \\r
-          gDict[(self.MetaFile.Path, self.Arch)].MakeHashChain:\r
-            return gDict[(self.MetaFile.Path, self.Arch)]\r
+    def GenMakefileHashList(self):\r
+        # GenMakefileHashList only need in --binary-destination which will\r
+        # everything in local dict. So don't need check remote dict.\r
 \r
         # skip binary module\r
         if self.IsBinaryModule:\r
             return\r
 \r
-        if not (self.MetaFile.Path, self.Arch) in gDict or \\r
-           not gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesHashDigest:\r
-            self.GenModuleFilesHash(gDict)\r
-        if not gDict[(self.MetaFile.Path, self.Arch)].MakeHeaderFilesHashDigest:\r
-            self.GenMakeHeaderFilesHash(gDict)\r
-\r
-        if not (self.MetaFile.Path, self.Arch) in gDict or \\r
-           not gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesHashDigest or \\r
-           not gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesChain or \\r
-           not gDict[(self.MetaFile.Path, self.Arch)].MakeHeaderFilesHashDigest or \\r
-           not gDict[(self.MetaFile.Path, self.Arch)].MakeHeaderFilesHashChain:\r
-           EdkLogger.quiet("[cache warning]: Cannot generate ModuleFilesHash or MakeHeaderFilesHash for module %s[%s]" %(self.MetaFile.Path, self.Arch))\r
-           return\r
-\r
-        # Initialze hash object\r
+        FileList = []\r
         m = hashlib.md5()\r
-        MakeHashChain = []\r
+        # Add AutoGen hash\r
+        HashFile = GlobalData.gCMakeHashFile[(self.MetaFile.Path, self.Arch)]\r
+        if path.exists(LongFilePath(HashFile)):\r
+            FileList.append(HashFile)\r
+            m.update(HashFile.encode('utf-8'))\r
+        else:\r
+            EdkLogger.quiet("[cache warning]:No AutoGen HashFile: %s" % HashFile)\r
 \r
-        # Add hash of makefile and dependency header files\r
-        m.update(gDict[(self.MetaFile.Path, self.Arch)].MakeHeaderFilesHashDigest)\r
-        New = list(set(gDict[(self.MetaFile.Path, self.Arch)].MakeHeaderFilesHashChain) - set(MakeHashChain))\r
-        New.sort(key=lambda x: str(x))\r
-        MakeHashChain += New\r
+        # Add Module self\r
+        if (self.MetaFile.Path, self.Arch) in GlobalData.gModuleHashFile:\r
+            HashFile = GlobalData.gModuleHashFile[(self.MetaFile.Path, self.Arch)]\r
+        else:\r
+            EdkLogger.quiet("[cache error]:No ModuleHashFile for module: %s[%s]" % (self.MetaFile.Path, self.Arch))\r
+        if path.exists(LongFilePath(HashFile)):\r
+            FileList.append(HashFile)\r
+            m.update(HashFile.encode('utf-8'))\r
+        else:\r
+            EdkLogger.quiet("[cache warning]:No Module HashFile: %s" % HashFile)\r
 \r
         # Add Library hash\r
         if self.LibraryAutoGenList:\r
-            for Lib in sorted(self.LibraryAutoGenList, key=lambda x: x.Name):\r
-                if not (Lib.MetaFile.Path, Lib.Arch) in gDict or \\r
-                   not gDict[(Lib.MetaFile.Path, Lib.Arch)].MakeHashChain:\r
-                    Lib.GenMakeHash(gDict)\r
-                if not gDict[(Lib.MetaFile.Path, Lib.Arch)].MakeHashDigest:\r
-                    print("Cannot generate MakeHash for lib module:", Lib.MetaFile.Path, Lib.Arch)\r
-                    continue\r
-                m.update(gDict[(Lib.MetaFile.Path, Lib.Arch)].MakeHashDigest)\r
-                New = list(set(gDict[(Lib.MetaFile.Path, Lib.Arch)].MakeHashChain) - set(MakeHashChain))\r
-                New.sort(key=lambda x: str(x))\r
-                MakeHashChain += New\r
+            for Lib in sorted(self.LibraryAutoGenList, key=lambda x: x.MetaFile.Path):\r
+                if (Lib.MetaFile.Path, Lib.Arch) in GlobalData.gModuleHashFile:\r
+                    HashFile = GlobalData.gModuleHashFile[(Lib.MetaFile.Path, Lib.Arch)]\r
+                else:\r
+                    EdkLogger.quiet("[cache error]:No ModuleHashFile for lib: %s[%s]" % (Lib.MetaFile.Path, Lib.Arch))\r
+                if path.exists(LongFilePath(HashFile)):\r
+                    FileList.append(HashFile)\r
+                    m.update(HashFile.encode('utf-8'))\r
+                else:\r
+                    EdkLogger.quiet("[cache warning]:No Lib HashFile: %s" % HashFile)\r
 \r
-        # Add Module self\r
-        m.update(gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesHashDigest)\r
-        New = list(set(gDict[(self.MetaFile.Path, self.Arch)].ModuleFilesChain) - set(MakeHashChain))\r
-        New.sort(key=lambda x: str(x))\r
-        MakeHashChain += New\r
+        # Save MakeHashFileList\r
+        FilePath = path.join(self.BuildDir, self.Name + ".MakeHashFileList." + m.hexdigest())\r
+        try:\r
+            with open(LongFilePath(FilePath), 'w') as f:\r
+                json.dump(FileList, f, indent=0)\r
+        except:\r
+            EdkLogger.quiet("[cache warning]: fail to save Make HashFileList: %s" % FilePath)\r
+\r
+    def CheckHashChainFile(self, HashChainFile):\r
+        # Assume the HashChainFile basename format is the 'x.hashchain.16BytesHexStr'\r
+        # The x is module name and the 16BytesHexStr is md5 hexdigest of\r
+        # all hashchain files content\r
+        HashStr = HashChainFile.split('.')[-1]\r
+        if len(HashStr) != 32:\r
+            EdkLogger.quiet("[cache error]: wrong format HashChainFile:%s" % (File))\r
+            return False\r
 \r
-        with GlobalData.file_lock:\r
-            IR = gDict[(self.MetaFile.Path, self.Arch)]\r
-            IR.MakeHashDigest = m.digest()\r
-            IR.MakeHashHexDigest = m.hexdigest()\r
-            IR.MakeHashChain = MakeHashChain\r
-            gDict[(self.MetaFile.Path, self.Arch)] = IR\r
+        try:\r
+            with open(LongFilePath(HashChainFile), 'r') as f:\r
+                HashChainList = json.load(f)\r
+        except:\r
+            EdkLogger.quiet("[cache error]: fail to load HashChainFile: %s" % HashChainFile)\r
+            return False\r
 \r
-        return gDict[(self.MetaFile.Path, self.Arch)]\r
+        # Print the different file info\r
+        # print(HashChainFile)\r
+        for idx, (SrcFile, SrcHash) in enumerate (HashChainList):\r
+            if SrcFile in GlobalData.gFileHashDict:\r
+                DestHash = GlobalData.gFileHashDict[SrcFile]\r
+            else:\r
+                try:\r
+                    with open(LongFilePath(SrcFile), 'rb') as f:\r
+                        Content = f.read()\r
+                        DestHash = hashlib.md5(Content).hexdigest()\r
+                        GlobalData.gFileHashDict[SrcFile] = DestHash\r
+                except IOError as X:\r
+                    # cache miss if SrcFile is removed in new version code\r
+                    GlobalData.gFileHashDict[SrcFile] = 0\r
+                    EdkLogger.quiet("[cache insight]: first cache miss file in %s is %s" % (HashChainFile, SrcFile))\r
+                    return False\r
+            if SrcHash != DestHash:\r
+                EdkLogger.quiet("[cache insight]: first cache miss file in %s is %s" % (HashChainFile, SrcFile))\r
+                return False\r
+\r
+        return True\r
 \r
     ## Decide whether we can skip the left autogen and make process\r
-    def CanSkipbyPreMakefileCache(self, gDict):\r
+    def CanSkipbyMakeCache(self):\r
+        # For --binary-source only\r
+        # CanSkipbyMakeCache consume below dicts:\r
+        #     gModuleMakeCacheStatus\r
+        #     gHashChainStatus\r
+        # GenPreMakefileHashList produce gModuleMakeCacheStatus, gModuleHashFile dict.\r
+        # all these dicts might be produced in multiprocessing, so\r
+        # need check these remote dict\r
+\r
         if not GlobalData.gBinCacheSource:\r
             return False\r
 \r
-        # If Module is binary, do not skip by cache\r
+        if (self.MetaFile.Path, self.Arch) in GlobalData.gModuleMakeCacheStatus:\r
+            return GlobalData.gModuleMakeCacheStatus[(self.MetaFile.Path, self.Arch)]\r
+\r
+        # If Module is binary, which has special build rule, do not skip by cache.\r
         if self.IsBinaryModule:\r
+            print("[cache miss]: MakeCache: Skip BinaryModule:", self.MetaFile.Path, self.Arch)\r
+            GlobalData.gModuleMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False\r
             return False\r
 \r
-        # .inc is contains binary information so do not skip by hash as well\r
+        # see .inc as binary file, do not skip by hash\r
         for f_ext in self.SourceFileList:\r
             if '.inc' in str(f_ext):\r
+                print("[cache miss]: MakeCache: Skip '.inc' File:", self.MetaFile.Path, self.Arch)\r
+                GlobalData.gModuleMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False\r
                 return False\r
 \r
-        # Get the module hash values from stored cache and currrent build\r
-        # then check whether cache hit based on the hash values\r
-        # if cache hit, restore all the files from cache\r
-        FileDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)\r
+        ModuleCacheDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)\r
         FfsDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name)\r
 \r
         ModuleHashPairList = [] # tuple list: [tuple(PreMakefileHash, MakeHash)]\r
-        ModuleHashPair = path.join(FileDir, self.Name + ".ModuleHashPair")\r
-        if not os.path.exists(ModuleHashPair):\r
-            EdkLogger.quiet("[cache warning]: Cannot find ModuleHashPair file: %s" % ModuleHashPair)\r
-            return False\r
-\r
+        ModuleHashPair = path.join(ModuleCacheDir, self.Name + ".ModuleHashPair")\r
         try:\r
-            f = open(ModuleHashPair, 'r')\r
-            ModuleHashPairList = json.load(f)\r
-            f.close()\r
+            with open(LongFilePath(ModuleHashPair), 'r') as f:\r
+                ModuleHashPairList = json.load(f)\r
         except:\r
+            # ModuleHashPair might not exist for new added module\r
+            GlobalData.gModuleMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False\r
             EdkLogger.quiet("[cache warning]: fail to load ModuleHashPair file: %s" % ModuleHashPair)\r
+            print("[cache miss]: MakeCache:", self.MetaFile.Path, self.Arch)\r
             return False\r
 \r
-        self.GenPreMakefileHash(gDict)\r
-        if not (self.MetaFile.Path, self.Arch) in gDict or \\r
-           not gDict[(self.MetaFile.Path, self.Arch)].PreMakefileHashHexDigest:\r
-            EdkLogger.quiet("[cache warning]: PreMakefileHashHexDigest is missing for module %s[%s]" %(self.MetaFile.Path, self.Arch))\r
-            return False\r
-\r
-        MakeHashStr = None\r
-        CurrentPreMakeHash = gDict[(self.MetaFile.Path, self.Arch)].PreMakefileHashHexDigest\r
+        # Check the PreMakeHash in ModuleHashPairList one by one\r
         for idx, (PreMakefileHash, MakeHash) in enumerate (ModuleHashPairList):\r
-            if PreMakefileHash == CurrentPreMakeHash:\r
-                MakeHashStr = str(MakeHash)\r
+            SourceHashDir = path.join(ModuleCacheDir, MakeHash)\r
+            SourceFfsHashDir = path.join(FfsDir, MakeHash)\r
+            PreMakeHashFileList_FilePah = path.join(ModuleCacheDir, self.Name + ".PreMakeHashFileList." + PreMakefileHash)\r
+            MakeHashFileList_FilePah = path.join(ModuleCacheDir, self.Name + ".MakeHashFileList." + MakeHash)\r
 \r
-        if not MakeHashStr:\r
-            return False\r
+            try:\r
+                with open(LongFilePath(MakeHashFileList_FilePah), 'r') as f:\r
+                    MakeHashFileList = json.load(f)\r
+            except:\r
+                EdkLogger.quiet("[cache error]: fail to load MakeHashFileList file: %s" % MakeHashFileList_FilePah)\r
+                continue\r
 \r
-        TargetHashDir = path.join(FileDir, MakeHashStr)\r
-        TargetFfsHashDir = path.join(FfsDir, MakeHashStr)\r
+            HashMiss = False\r
+            for HashChainFile in MakeHashFileList:\r
+                HashChainStatus = None\r
+                if HashChainFile in GlobalData.gHashChainStatus:\r
+                    HashChainStatus = GlobalData.gHashChainStatus[HashChainFile]\r
+                if HashChainStatus == False:\r
+                    HashMiss = True\r
+                    break\r
+                elif HashChainStatus == True:\r
+                    continue\r
+                # Convert to path start with cache source dir\r
+                RelativePath = os.path.relpath(HashChainFile, self.WorkspaceDir)\r
+                NewFilePath = os.path.join(GlobalData.gBinCacheSource, RelativePath)\r
+                if self.CheckHashChainFile(NewFilePath):\r
+                    GlobalData.gHashChainStatus[HashChainFile] = True\r
+                    # Save the module self HashFile for GenPreMakefileHashList later usage\r
+                    if self.Name + ".hashchain." in HashChainFile:\r
+                        GlobalData.gModuleHashFile[(self.MetaFile.Path, self.Arch)] = HashChainFile\r
+                else:\r
+                    GlobalData.gHashChainStatus[HashChainFile] = False\r
+                    HashMiss = True\r
+                    break\r
 \r
-        if not os.path.exists(TargetHashDir):\r
-            EdkLogger.quiet("[cache warning]: Cache folder is missing: %s" % TargetHashDir)\r
-            return False\r
+            if HashMiss:\r
+                continue\r
 \r
-        for root, dir, files in os.walk(TargetHashDir):\r
-            for f in files:\r
-                File = path.join(root, f)\r
-                self.CacheCopyFile(self.OutputDir, TargetHashDir, File)\r
-        if os.path.exists(TargetFfsHashDir):\r
-            for root, dir, files in os.walk(TargetFfsHashDir):\r
+            # PreMakefile cache hit, restore the module build result\r
+            for root, dir, files in os.walk(SourceHashDir):\r
                 for f in files:\r
                     File = path.join(root, f)\r
-                    self.CacheCopyFile(self.FfsOutputDir, TargetFfsHashDir, File)\r
-\r
-        if self.Name == "PcdPeim" or self.Name == "PcdDxe":\r
-            CreatePcdDatabaseCode(self, TemplateString(), TemplateString())\r
+                    self.CacheCopyFile(self.BuildDir, SourceHashDir, File)\r
+            if os.path.exists(SourceFfsHashDir):\r
+                for root, dir, files in os.walk(SourceFfsHashDir):\r
+                    for f in files:\r
+                        File = path.join(root, f)\r
+                        self.CacheCopyFile(self.FfsOutputDir, SourceFfsHashDir, File)\r
+\r
+            if self.Name == "PcdPeim" or self.Name == "PcdDxe":\r
+                CreatePcdDatabaseCode(self, TemplateString(), TemplateString())\r
+\r
+            print("[cache hit]: MakeCache:", self.MetaFile.Path, self.Arch)\r
+            GlobalData.gModuleMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = True\r
+            return True\r
 \r
-        with GlobalData.file_lock:\r
-            IR = gDict[(self.MetaFile.Path, self.Arch)]\r
-            IR.PreMakeCacheHit = True\r
-            gDict[(self.MetaFile.Path, self.Arch)] = IR\r
-        print("[cache hit]: checkpoint_PreMakefile:", self.MetaFile.Path, self.Arch)\r
-        #EdkLogger.quiet("cache hit: %s[%s]" % (self.MetaFile.Path, self.Arch))\r
-        return True\r
+        print("[cache miss]: MakeCache:", self.MetaFile.Path, self.Arch)\r
+        GlobalData.gModuleMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False\r
+        return False\r
 \r
-    ## Decide whether we can skip the make process\r
-    def CanSkipbyMakeCache(self, gDict):\r
-        if not GlobalData.gBinCacheSource:\r
+    ## Decide whether we can skip the left autogen and make process\r
+    def CanSkipbyPreMakeCache(self):\r
+        # CanSkipbyPreMakeCache consume below dicts:\r
+        #     gModulePreMakeCacheStatus\r
+        #     gHashChainStatus\r
+        #     gModuleHashFile\r
+        # GenPreMakefileHashList produce gModulePreMakeCacheStatus dict.\r
+        # all these dicts might be produced in multiprocessing, so\r
+        # need check these remote dicts\r
+\r
+        if not GlobalData.gUseHashCache or GlobalData.gBinCacheDest:\r
             return False\r
 \r
-        # If Module is binary, do not skip by cache\r
+        if (self.MetaFile.Path, self.Arch) in GlobalData.gModulePreMakeCacheStatus:\r
+            return GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)]\r
+\r
+        # If Module is binary, which has special build rule, do not skip by cache.\r
         if self.IsBinaryModule:\r
-            print("[cache miss]: checkpoint_Makefile: binary module:", self.MetaFile.Path, self.Arch)\r
+            print("[cache miss]: PreMakeCache: Skip BinaryModule:", self.MetaFile.Path, self.Arch)\r
+            GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False\r
             return False\r
 \r
-        # .inc is contains binary information so do not skip by hash as well\r
+        # see .inc as binary file, do not skip by hash\r
         for f_ext in self.SourceFileList:\r
             if '.inc' in str(f_ext):\r
-                with GlobalData.file_lock:\r
-                    IR = gDict[(self.MetaFile.Path, self.Arch)]\r
-                    IR.MakeCacheHit = False\r
-                    gDict[(self.MetaFile.Path, self.Arch)] = IR\r
-                print("[cache miss]: checkpoint_Makefile: .inc module:", self.MetaFile.Path, self.Arch)\r
+                print("[cache miss]: PreMakeCache: Skip '.inc' File:", self.MetaFile.Path, self.Arch)\r
+                GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False\r
                 return False\r
 \r
-        # Get the module hash values from stored cache and currrent build\r
-        # then check whether cache hit based on the hash values\r
-        # if cache hit, restore all the files from cache\r
-        FileDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)\r
+        # For --hash only in the incremental build\r
+        if not GlobalData.gBinCacheSource:\r
+            Files = [path.join(self.BuildDir, f) for f in os.listdir(self.BuildDir) if path.isfile(path.join(self.BuildDir, f))]\r
+            PreMakeHashFileList_FilePah = None\r
+            MakeTimeStamp = 0\r
+            # Find latest PreMakeHashFileList file in self.BuildDir folder\r
+            for File in Files:\r
+                if ".PreMakeHashFileList." in File:\r
+                    FileTimeStamp = os.stat(path.join(self.BuildDir, File))[8]\r
+                    if FileTimeStamp > MakeTimeStamp:\r
+                        MakeTimeStamp = FileTimeStamp\r
+                        PreMakeHashFileList_FilePah = File\r
+            if not PreMakeHashFileList_FilePah:\r
+                GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False\r
+                return False\r
+\r
+            try:\r
+                with open(LongFilePath(PreMakeHashFileList_FilePah), 'r') as f:\r
+                    PreMakeHashFileList = json.load(f)\r
+            except:\r
+                EdkLogger.quiet("[cache error]: fail to load PreMakeHashFileList file: %s" % PreMakeHashFileList_FilePah)\r
+                print("[cache miss]: PreMakeCache:", self.MetaFile.Path, self.Arch)\r
+                GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False\r
+                return False\r
+\r
+            HashMiss = False\r
+            for HashChainFile in PreMakeHashFileList:\r
+                HashChainStatus = None\r
+                if HashChainFile in GlobalData.gHashChainStatus:\r
+                    HashChainStatus = GlobalData.gHashChainStatus[HashChainFile]\r
+                if HashChainStatus == False:\r
+                    HashMiss = True\r
+                    break\r
+                elif HashChainStatus == True:\r
+                    continue\r
+                if self.CheckHashChainFile(HashChainFile):\r
+                    GlobalData.gHashChainStatus[HashChainFile] = True\r
+                    # Save the module self HashFile for GenPreMakefileHashList later usage\r
+                    if self.Name + ".hashchain." in HashChainFile:\r
+                        GlobalData.gModuleHashFile[(self.MetaFile.Path, self.Arch)] = HashChainFile\r
+                else:\r
+                    GlobalData.gHashChainStatus[HashChainFile] = False\r
+                    HashMiss = True\r
+                    break\r
+\r
+            if HashMiss:\r
+                print("[cache miss]: PreMakeCache:", self.MetaFile.Path, self.Arch)\r
+                GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False\r
+                return False\r
+            else:\r
+                print("[cache hit]: PreMakeCache:", self.MetaFile.Path, self.Arch)\r
+                GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = True\r
+                return True\r
+\r
+        ModuleCacheDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)\r
         FfsDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name)\r
 \r
         ModuleHashPairList = [] # tuple list: [tuple(PreMakefileHash, MakeHash)]\r
-        ModuleHashPair = path.join(FileDir, self.Name + ".ModuleHashPair")\r
-        if not os.path.exists(ModuleHashPair):\r
-            EdkLogger.quiet("[cache warning]: Cannot find ModuleHashPair file: %s" % ModuleHashPair)\r
-            return False\r
-\r
+        ModuleHashPair = path.join(ModuleCacheDir, self.Name + ".ModuleHashPair")\r
         try:\r
-            f = open(ModuleHashPair, 'r')\r
-            ModuleHashPairList = json.load(f)\r
-            f.close()\r
+            with open(LongFilePath(ModuleHashPair), 'r') as f:\r
+                ModuleHashPairList = json.load(f)\r
         except:\r
+            # ModuleHashPair might not exist for new added module\r
+            GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False\r
             EdkLogger.quiet("[cache warning]: fail to load ModuleHashPair file: %s" % ModuleHashPair)\r
+            print("[cache miss]: PreMakeCache:", self.MetaFile.Path, self.Arch)\r
             return False\r
 \r
-        self.GenMakeHash(gDict)\r
-        if not (self.MetaFile.Path, self.Arch) in gDict or \\r
-           not gDict[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest:\r
-            EdkLogger.quiet("[cache warning]: MakeHashHexDigest is missing for module %s[%s]" %(self.MetaFile.Path, self.Arch))\r
-            return False\r
-\r
-        MakeHashStr = None\r
-        CurrentMakeHash = gDict[(self.MetaFile.Path, self.Arch)].MakeHashHexDigest\r
+        # Check the PreMakeHash in ModuleHashPairList one by one\r
         for idx, (PreMakefileHash, MakeHash) in enumerate (ModuleHashPairList):\r
-            if MakeHash == CurrentMakeHash:\r
-                MakeHashStr = str(MakeHash)\r
+            SourceHashDir = path.join(ModuleCacheDir, MakeHash)\r
+            SourceFfsHashDir = path.join(FfsDir, MakeHash)\r
+            PreMakeHashFileList_FilePah = path.join(ModuleCacheDir, self.Name + ".PreMakeHashFileList." + PreMakefileHash)\r
+            MakeHashFileList_FilePah = path.join(ModuleCacheDir, self.Name + ".MakeHashFileList." + MakeHash)\r
 \r
-        if not MakeHashStr:\r
-            print("[cache miss]: checkpoint_Makefile:", self.MetaFile.Path, self.Arch)\r
-            return False\r
+            try:\r
+                with open(LongFilePath(PreMakeHashFileList_FilePah), 'r') as f:\r
+                    PreMakeHashFileList = json.load(f)\r
+            except:\r
+                EdkLogger.quiet("[cache error]: fail to load PreMakeHashFileList file: %s" % PreMakeHashFileList_FilePah)\r
+                continue\r
 \r
-        TargetHashDir = path.join(FileDir, MakeHashStr)\r
-        TargetFfsHashDir = path.join(FfsDir, MakeHashStr)\r
-        if not os.path.exists(TargetHashDir):\r
-            EdkLogger.quiet("[cache warning]: Cache folder is missing: %s" % TargetHashDir)\r
-            return False\r
+            HashMiss = False\r
+            for HashChainFile in PreMakeHashFileList:\r
+                HashChainStatus = None\r
+                if HashChainFile in GlobalData.gHashChainStatus:\r
+                    HashChainStatus = GlobalData.gHashChainStatus[HashChainFile]\r
+                if HashChainStatus == False:\r
+                    HashMiss = True\r
+                    break\r
+                elif HashChainStatus == True:\r
+                    continue\r
+                # Convert to path start with cache source dir\r
+                RelativePath = os.path.relpath(HashChainFile, self.WorkspaceDir)\r
+                NewFilePath = os.path.join(GlobalData.gBinCacheSource, RelativePath)\r
+                if self.CheckHashChainFile(NewFilePath):\r
+                    GlobalData.gHashChainStatus[HashChainFile] = True\r
+                else:\r
+                    GlobalData.gHashChainStatus[HashChainFile] = False\r
+                    HashMiss = True\r
+                    break\r
 \r
-        for root, dir, files in os.walk(TargetHashDir):\r
-            for f in files:\r
-                File = path.join(root, f)\r
-                self.CacheCopyFile(self.OutputDir, TargetHashDir, File)\r
+            if HashMiss:\r
+                continue\r
 \r
-        if os.path.exists(TargetFfsHashDir):\r
-            for root, dir, files in os.walk(TargetFfsHashDir):\r
+            # PreMakefile cache hit, restore the module build result\r
+            for root, dir, files in os.walk(SourceHashDir):\r
                 for f in files:\r
                     File = path.join(root, f)\r
-                    self.CacheCopyFile(self.FfsOutputDir, TargetFfsHashDir, File)\r
+                    self.CacheCopyFile(self.BuildDir, SourceHashDir, File)\r
+            if os.path.exists(SourceFfsHashDir):\r
+                for root, dir, files in os.walk(SourceFfsHashDir):\r
+                    for f in files:\r
+                        File = path.join(root, f)\r
+                        self.CacheCopyFile(self.FfsOutputDir, SourceFfsHashDir, File)\r
+\r
+            if self.Name == "PcdPeim" or self.Name == "PcdDxe":\r
+                CreatePcdDatabaseCode(self, TemplateString(), TemplateString())\r
+\r
+            print("[cache hit]: PreMakeCache:", self.MetaFile.Path, self.Arch)\r
+            GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = True\r
+            return True\r
 \r
-        if self.Name == "PcdPeim" or self.Name == "PcdDxe":\r
-            CreatePcdDatabaseCode(self, TemplateString(), TemplateString())\r
-        with GlobalData.file_lock:\r
-            IR = gDict[(self.MetaFile.Path, self.Arch)]\r
-            IR.MakeCacheHit = True\r
-            gDict[(self.MetaFile.Path, self.Arch)] = IR\r
-        print("[cache hit]: checkpoint_Makefile:", self.MetaFile.Path, self.Arch)\r
-        return True\r
+        print("[cache miss]: PreMakeCache:", self.MetaFile.Path, self.Arch)\r
+        GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False\r
+        return False\r
 \r
-    ## Decide whether we can skip the ModuleAutoGen process\r
-    def CanSkipbyCache(self, gDict):\r
+    ## Decide whether we can skip the Module build\r
+    def CanSkipbyCache(self, gHitSet):\r
         # Hashing feature is off\r
         if not GlobalData.gBinCacheSource:\r
             return False\r
 \r
-        if self in GlobalData.gBuildHashSkipTracking:\r
-            return GlobalData.gBuildHashSkipTracking[self]\r
-\r
-        # If library or Module is binary do not skip by hash\r
-        if self.IsBinaryModule:\r
-            GlobalData.gBuildHashSkipTracking[self] = False\r
-            return False\r
-\r
-        # .inc is contains binary information so do not skip by hash as well\r
-        for f_ext in self.SourceFileList:\r
-            if '.inc' in str(f_ext):\r
-                GlobalData.gBuildHashSkipTracking[self] = False\r
-                return False\r
-\r
-        if not (self.MetaFile.Path, self.Arch) in gDict:\r
-            return False\r
-\r
-        if gDict[(self.MetaFile.Path, self.Arch)].PreMakeCacheHit:\r
-            GlobalData.gBuildHashSkipTracking[self] = True\r
-            return True\r
-\r
-        if gDict[(self.MetaFile.Path, self.Arch)].MakeCacheHit:\r
-            GlobalData.gBuildHashSkipTracking[self] = True\r
+        if self in gHitSet:\r
             return True\r
 \r
         return False\r