\r
# validate the options\r
errors = []\r
- if options.WorkspacePath == None:\r
+ if options.WorkspacePath is None:\r
errors.append('- Please specify workspace path via option -w!')\r
elif not os.path.exists(options.WorkspacePath):\r
errors.append("- Invalid workspace path %s! The workspace path should be exist in absolute path!" % options.WorkspacePath)\r
\r
- if options.PackagePath == None:\r
+ if options.PackagePath is None:\r
errors.append('- Please specify package DEC file path via option -p!')\r
elif not os.path.exists(options.PackagePath):\r
errors.append("- Invalid package's DEC file path %s! The DEC path should be exist in absolute path!" % options.PackagePath)\r
\r
default = "C:\\Program Files\\doxygen\\bin\\doxygen.exe"\r
- if options.DoxygenPath == None:\r
+ if options.DoxygenPath is None:\r
if os.path.exists(default):\r
print "Warning: Assume doxygen tool is installed at %s. If not, please specify via -x" % default\r
options.DoxygenPath = default\r
elif not os.path.exists(options.DoxygenPath):\r
errors.append("- Invalid doxygen tool path %s! The doxygen tool path should be exist in absolute path!" % options.DoxygenPath)\r
\r
- if options.OutputPath != None:\r
+ if options.OutputPath is not None:\r
if not os.path.exists(options.OutputPath):\r
# create output\r
try:\r
except:\r
errors.append('- Fail to create the output directory %s' % options.OutputPath)\r
else:\r
- if options.PackagePath != None and os.path.exists(options.PackagePath):\r
+ if options.PackagePath is not None and os.path.exists(options.PackagePath):\r
dirpath = os.path.dirname(options.PackagePath)\r
default = os.path.join (dirpath, "Document")\r
print 'Warning: Assume document output at %s. If not, please specify via option -o' % default\r
else:\r
errors.append('- Please specify document output path via option -o!')\r
\r
- if options.Arch == None:\r
+ if options.Arch is None:\r
options.Arch = 'ALL'\r
print "Warning: Assume arch is \"ALL\". If not, specify via -a"\r
\r
- if options.DocumentMode == None:\r
+ if options.DocumentMode is None:\r
options.DocumentMode = "HTML"\r
print "Warning: Assume document mode is \"HTML\". If not, specify via -m"\r
\r
- if options.IncludeOnly == None:\r
+ if options.IncludeOnly is None:\r
options.IncludeOnly = False\r
print "Warning: Assume generate package document for all package\'s source including publich interfaces and implementation libraries and modules."\r
\r
if options.DocumentMode.lower() == 'chm':\r
default = "C:\\Program Files\\HTML Help Workshop\\hhc.exe"\r
- if options.HtmlWorkshopPath == None:\r
+ if options.HtmlWorkshopPath is None:\r
if os.path.exists(default):\r
print 'Warning: Assume the installation path of Microsoft HTML Workshop is %s. If not, specify via option -c.' % default\r
options.HtmlWorkshopPath = default\r
\r
# create package model object firstly\r
pkgObj = createPackageObject(wspath, pkgpath)\r
- if pkgObj == None:\r
+ if pkgObj is None:\r
sys.exit(-1)\r
\r
# create doxygen action model\r
return subpage\r
\r
def AddPages(self, pageArray):\r
- if pageArray == None:\r
+ if pageArray is None:\r
return\r
for page in pageArray:\r
self.AddPage(page)\r
self.mWarningFile = str.replace('\\', '/')\r
\r
def FileExists(self, path):\r
- if path == None:\r
+ if path is None:\r
return False\r
if len(path) == 0:\r
return False\r
return False\r
\r
def AddFile(self, path):\r
- if path == None:\r
+ if path is None:\r
return\r
\r
if len(path) == 0:\r
if line[0] != ' ':\r
# new entry\r
ret = rMapEntry.match(line)\r
- if ret != None:\r
+ if ret is not None:\r
name = ret.groups()[0]\r
baseaddr = int(ret.groups()[1], 16)\r
entry = int(ret.groups()[2], 16)\r
if key not in cls._objs.keys():\r
cls._objs[key] = object.__new__(cls, *args, **kwargs)\r
\r
- if parent != None:\r
+ if parent is not None:\r
cls._objs[key].AddParent(parent)\r
\r
return cls._objs[key]\r
self._isModify = True\r
\r
def AddParent(self, parent):\r
- if parent == None: return\r
+ if parent is None: return\r
if not hasattr(self, "_parents"):\r
self._parents = []\r
\r
continue\r
\r
m = section_re.match(templine)\r
- if m!= None: # found a section\r
+ if mis not None: # found a section\r
inGlobal = False\r
# Finish the latest section first\r
if len(sObjs) != 0:\r
def Destroy(self, parent):\r
\r
# check referenced parent\r
- if parent != None:\r
+ if parent is not None:\r
assert parent in self._parents, "when destory ini object, can not found parent reference!"\r
self._parents.remove(parent)\r
\r
visit += 1\r
continue\r
line = line.split('#')[0].strip()\r
- if iniObj != None:\r
+ if iniObj is not None:\r
if line.endswith('}'):\r
iniObj._end = visit - self._start\r
if not iniObj.Parse():\r
def NormalMessage(type, mess, fName=None, fNo=None):\r
strMsg = type\r
\r
- if fName != None:\r
+ if fName is not None:\r
strMsg += ' %s' % fName.replace('/', '\\')\r
- if fNo != None:\r
+ if fNo is not None:\r
strMsg += '(%d):' % fNo\r
else:\r
strMsg += ' :'\r
\r
- if fName == None and fNo == None:\r
+ if fName is None and fNo is None:\r
strMsg += ' '\r
strMsg += mess\r
\r
\r
def Load(self, relativePath):\r
# if has been loaded, directly return\r
- if self._fileObj != None: return True\r
+ if self._fileObj is not None: return True\r
\r
relativePath = os.path.normpath(relativePath)\r
fullPath = os.path.join(self._workspace, relativePath)\r
return dsc.DSCFile\r
\r
def GetModuleCount(self):\r
- if self.GetFileObj() == None:\r
+ if self.GetFileObj() is None:\r
ErrorMsg("Fail to get module count because DSC file has not been load!")\r
\r
return len(self.GetFileObj().GetComponents())\r
def LoadModules(self, precallback=None, postcallback=None):\r
for obj in self.GetFileObj().GetComponents():\r
mFilename = obj.GetFilename()\r
- if precallback != None:\r
+ if precallback is not None:\r
precallback(self, mFilename)\r
arch = obj.GetArch()\r
if arch.lower() == 'common':\r
module = Module(self, self.GetWorkspace())\r
if module.Load(mFilename, arch, obj.GetOveridePcds(), obj.GetOverideLibs()):\r
self._modules.append(module)\r
- if postcallback != None:\r
+ if postcallback is not None:\r
postcallback(self, module)\r
else:\r
del module\r
for obj in objs:\r
if obj.GetPcdName().lower() == name.lower():\r
arr.append(obj)\r
- if arch != None:\r
+ if arch is not None:\r
arr = self.FilterObjsByArch(arr, arch)\r
return arr\r
\r
newSect = newDsc.AddNewSection(oldSect.GetName())\r
for oldComObj in oldSect.GetObjects():\r
module = self.GetModuleObject(oldComObj.GetFilename(), oldSect.GetArch())\r
- if module == None: continue\r
+ if module is None: continue\r
\r
newComObj = dsc.DSCComponentObject(newSect)\r
newComObj.SetFilename(oldComObj.GetFilename())\r
# add all library instance for override section\r
libdict = module.GetLibraries()\r
for libclass in libdict.keys():\r
- if libdict[libclass] != None:\r
+ if libdict[libclass] is not None:\r
newComObj.AddOverideLib(libclass, libdict[libclass].GetRelativeFilename().replace('\\', '/'))\r
\r
# add all pcds for override section\r
\r
def Destroy(self):\r
for lib in self._libs.values():\r
- if lib != None:\r
+ if lib is not None:\r
lib.Destroy()\r
self._libs.clear()\r
\r
del self._ppis[:]\r
\r
for protocol in self._protocols:\r
- if protocol != None:\r
+ if protocol is not None:\r
protocol.DeRef(self)\r
del self._protocols[:]\r
\r
for guid in self._guids:\r
- if guid != None:\r
+ if guid is not None:\r
guid.DeRef(self)\r
del self._guids[:]\r
\r
return False\r
\r
self._arch = arch\r
- if overidePcds != None:\r
+ if overidePcds is not None:\r
self._overideLibs = overideLibs\r
- if overideLibs != None:\r
+ if overideLibs is not None:\r
self._overidePcds = overidePcds\r
\r
self._SearchLibraries()\r
def GetPcds(self):\r
pcds = self._pcds.copy()\r
for lib in self._libs.values():\r
- if lib == None: continue\r
+ if lib is None: continue\r
for name in lib._pcds.keys():\r
pcds[name] = lib._pcds[name]\r
return pcds\r
ppis = []\r
ppis += self._ppis\r
for lib in self._libs.values():\r
- if lib == None: continue\r
+ if lib is None: continue\r
ppis += lib._ppis\r
return ppis\r
\r
pros = []\r
pros = self._protocols\r
for lib in self._libs.values():\r
- if lib == None: continue\r
+ if lib is None: continue\r
pros += lib._protocols\r
return pros\r
\r
guids = []\r
guids += self._guids\r
for lib in self._libs.values():\r
- if lib == None: continue\r
+ if lib is None: continue\r
guids += lib._guids\r
return guids\r
\r
deps = []\r
deps += self._depexs\r
for lib in self._libs.values():\r
- if lib == None: continue\r
+ if lib is None: continue\r
deps += lib._depexs\r
return deps\r
\r
def IsLibrary(self):\r
- return self.GetFileObj().GetDefine("LIBRARY_CLASS") != None\r
+ return self.GetFileObj().GetDefine("LIBRARY_CLASS") is not None\r
\r
def GetLibraryInstance(self, classname, arch, type):\r
if classname not in self._libs.keys():\r
parent = self.GetParent()\r
if issubclass(parent.__class__, Platform):\r
path = parent.GetLibraryPath(classname, arch, type)\r
- if path == None:\r
+ if path is None:\r
ErrorMsg('Fail to get library instance for %s' % classname, self.GetFilename())\r
return None\r
self._libs[classname] = Library(self, self.GetWorkspace())\r
continue\r
classname = obj.GetClass()\r
instance = self.GetLibraryInstance(classname, arch, type)\r
- if not self.IsLibrary() and instance != None:\r
+ if not self.IsLibrary() and instance is not None:\r
instance._isInherit = False\r
\r
if classname not in self._libs.keys():\r
pros = []\r
deps = []\r
guids = []\r
- if self.GetFileObj() != None:\r
+ if self.GetFileObj() is not None:\r
pcds = self.FilterObjsByArch(self.GetFileObj().GetSectionObjectsByName('pcd'),\r
self.GetArch())\r
for pcd in pcds:\r
objs = self.GetFileObj().GetSectionObjectsByName('packages')\r
for obj in objs:\r
package = self.GetPlatform().GetPackage(obj.GetPath())\r
- if package != None:\r
+ if package is not None:\r
self._packages.append(package)\r
\r
def GetPackages(self):\r
return self._packages\r
\r
def GetPcdObjects(self):\r
- if self.GetFileObj() == None:\r
+ if self.GetFileObj() is None:\r
return []\r
\r
return self.GetFileObj().GetSectionObjectsByName('pcd')\r
\r
def GetLibraryClassHeaderFilePath(self):\r
lcname = self.GetFileObj().GetProduceLibraryClass()\r
- if lcname == None: return None\r
+ if lcname is None: return None\r
\r
pkgs = self.GetPackages()\r
for package in pkgs:\r
path = package.GetLibraryClassHeaderPathByName(lcname)\r
- if path != None:\r
+ if path is not None:\r
return os.path.realpath(os.path.join(package.GetFileObj().GetPackageRootPath(), path))\r
return None\r
\r
def Reload(self, force=False, callback=None):\r
- if callback != None:\r
+ if callback is not None:\r
callback(self, "Starting reload...")\r
\r
ret = SurfaceObject.Reload(self, force)\r
return True\r
\r
for lib in self._libs.values():\r
- if lib != None:\r
+ if lib is not None:\r
lib.Destroy()\r
self._libs.clear()\r
\r
del self._packages[:]\r
del self._depexs[:]\r
\r
- if callback != None:\r
+ if callback is not None:\r
callback(self, "Searching libraries...")\r
self._SearchLibraries()\r
- if callback != None:\r
+ if callback is not None:\r
callback(self, "Searching packages...")\r
self._SearchPackage()\r
- if callback != None:\r
+ if callback is not None:\r
callback(self, "Searching surface items...")\r
self._SearchSurfaceItems()\r
\r
\r
def Destroy(self):\r
for pcd in self._pcds.values():\r
- if pcd != None:\r
+ if pcd is not None:\r
pcd.Destroy()\r
for guid in self._guids.values():\r
- if guid != None:\r
+ if guid is not None:\r
guid.Destroy()\r
for protocol in self._protocols.values():\r
- if protocol != None:\r
+ if protocol is not None:\r
protocol.Destroy()\r
for ppi in self._ppis.values():\r
- if ppi != None:\r
+ if ppi is not None:\r
ppi.Destroy()\r
self._pcds.clear()\r
self._guids.clear()\r
pcds = self.GetFileObj().GetSectionObjectsByName('pcds')\r
for pcd in pcds:\r
if pcd.GetPcdName() in self._pcds.keys():\r
- if self._pcds[pcd.GetPcdName()] != None:\r
+ if self._pcds[pcd.GetPcdName()] is not None:\r
self._pcds[pcd.GetPcdName()].AddDecObj(pcd)\r
else:\r
self._pcds[pcd.GetPcdName()] = PcdItem(pcd.GetPcdName(), self, pcd)\r
def GetPcdDefineObjs(self, name=None):\r
arr = []\r
objs = self.GetFileObj().GetSectionObjectsByName('pcds')\r
- if name == None: return objs\r
+ if name is None: return objs\r
\r
for obj in objs:\r
if obj.GetPcdName().lower() == name.lower():\r
\r
def __init__(self, parent, name, infObj, pcdItem):\r
assert issubclass(parent.__class__, Module), "Module's PCD's parent must be module!"\r
- assert pcdItem != None, 'Pcd %s does not in some package!' % name\r
+ assert pcdItem is not None, 'Pcd %s does not in some package!' % name\r
\r
self._name = name\r
self._parent = parent\r
return arr[1]\r
\r
def IsArchMatch(self, arch):\r
- if arch == None or self.GetArch() == 'common':\r
+ if arch is None or self.GetArch() == 'common':\r
return True\r
\r
if self.GetArch().lower() != arch.lower():\r
self._chmCallback = None\r
\r
def Log(self, message, level='info'):\r
- if self._log != None:\r
+ if self._log is not None:\r
self._log(message, level)\r
\r
def IsVerbose(self):\r
\r
self.Log(" >>>>>> Generate doxygen index page file...Zzz...\n")\r
indexPagePath = self.GenerateIndexPage()\r
- if indexPagePath == None:\r
+ if indexPagePath is None:\r
self.Log("Fail to generate index page!\n", 'error')\r
return False\r
else:\r
self.Log(" <<<<<< Success Save doxygen config file to %s...\n" % configFilePath)\r
\r
# launch doxygen tool to generate document\r
- if self._doxygenCallback != None:\r
+ if self._doxygenCallback is not None:\r
self.Log(" >>>>>> Start doxygen process...Zzz...\n")\r
if not self._doxygenCallback(self._doxPath, configFilePath):\r
return False\r
self._configFile.AddPreDefined('MDE_CPU_ARM')\r
\r
namestr = self._pObj.GetName()\r
- if self._arch != None:\r
+ if self._arch is not None:\r
namestr += '[%s]' % self._arch\r
- if self._tooltag != None:\r
+ if self._tooltag is not None:\r
namestr += '[%s]' % self._tooltag\r
self._configFile.SetProjectName(namestr)\r
self._configFile.SetStripPath(self._pObj.GetWorkspace())\r
objs = pObj.GetFileObj().GetSectionObjectsByName('libraryclass', self._arch)\r
if len(objs) == 0: return []\r
\r
- if self._arch != None:\r
+ if self._arch is not None:\r
for obj in objs:\r
classPage = doxygen.Page(obj.GetClassName(),\r
"lc_%s" % obj.GetClassName())\r
mo = re.match(r"^[#\w\s]+[<\"]([\\/\w.]+)[>\"]$", lines[no].strip())\r
filePath = mo.groups()[0]\r
\r
- if filePath == None or len(filePath) == 0:\r
+ if filePath is None or len(filePath) == 0:\r
continue\r
\r
# find header file in module's path firstly.\r
if os.path.exists(incPath):\r
fullPath = incPath\r
break\r
- if infObj != None:\r
+ if infObj is not None:\r
pkgInfObjs = infObj.GetSectionObjectsByName('packages')\r
for obj in pkgInfObjs:\r
decObj = dec.DECFile(os.path.join(pObj.GetWorkspace(), obj.GetPath()))\r
if os.path.exists(os.path.join(incPath, filePath)):\r
fullPath = os.path.join(os.path.join(incPath, filePath))\r
break\r
- if fullPath != None:\r
+ if fullPath is not None:\r
break\r
\r
- if fullPath == None and self.IsVerbose():\r
+ if fullPath is None and self.IsVerbose():\r
self.Log('Can not resolve header file %s for file %s in package %s\n' % (filePath, path, pObj.GetFileObj().GetFilename()), 'error')\r
return\r
else:\r
typeRootPageDict[obj.GetPcdType()] = doxygen.Page(obj.GetPcdType(), 'pcd_%s_root_page' % obj.GetPcdType())\r
pcdRootPage.AddPage(typeRootPageDict[obj.GetPcdType()])\r
typeRoot = typeRootPageDict[obj.GetPcdType()]\r
- if self._arch != None:\r
+ if self._arch is not None:\r
pcdPage = doxygen.Page('%s' % obj.GetPcdName(),\r
'pcd_%s_%s_%s' % (obj.GetPcdType(), obj.GetArch(), obj.GetPcdName().split('.')[1]))\r
pcdPage.AddDescription('<br>\n'.join(obj.GetComment()) + '<br>\n')\r
pageRoot = doxygen.Page('GUID', 'guid_root_page')\r
objs = pObj.GetFileObj().GetSectionObjectsByName('guids', self._arch)\r
if len(objs) == 0: return []\r
- if self._arch != None:\r
+ if self._arch is not None:\r
for obj in objs:\r
pageRoot.AddPage(self._GenerateGuidSubPage(pObj, obj, configFile))\r
else:\r
pageRoot = doxygen.Page('PPI', 'ppi_root_page')\r
objs = pObj.GetFileObj().GetSectionObjectsByName('ppis', self._arch)\r
if len(objs) == 0: return []\r
- if self._arch != None:\r
+ if self._arch is not None:\r
for obj in objs:\r
pageRoot.AddPage(self._GeneratePpiSubPage(pObj, obj, configFile))\r
else:\r
pageRoot = doxygen.Page('PROTOCOL', 'protocol_root_page')\r
objs = pObj.GetFileObj().GetSectionObjectsByName('protocols', self._arch)\r
if len(objs) == 0: return []\r
- if self._arch != None:\r
+ if self._arch is not None:\r
for obj in objs:\r
pageRoot.AddPage(self._GenerateProtocolSubPage(pObj, obj, configFile))\r
else:\r
if not infObj.Parse():\r
self.Log('Fail to load INF file %s' % inf)\r
continue\r
- if infObj.GetProduceLibraryClass() != None:\r
+ if infObj.GetProduceLibraryClass() is not None:\r
libObjs.append(infObj)\r
else:\r
modObjs.append(infObj)\r
retarr = self.SearchLibraryClassHeaderFile(lcObj.GetClass(),\r
workspace,\r
refDecObjs)\r
- if retarr != None:\r
+ if retarr is not None:\r
pkgname, hPath = retarr\r
else:\r
self.Log('Fail find the library class %s definition from module %s dependent package!' % (lcObj.GetClass(), infObj.GetFilename()), 'error')\r
self._chmCallback = None\r
\r
def Log(self, message, level='info'):\r
- if self._log != None:\r
+ if self._log is not None:\r
self._log(message, level)\r
\r
def IsVerbose(self):\r
\r
self.Log(" >>>>>> Generate doxygen index page file...Zzz...\n")\r
indexPagePath = self.GenerateIndexPage()\r
- if indexPagePath == None:\r
+ if indexPagePath is None:\r
self.Log("Fail to generate index page!\n", 'error')\r
return False\r
else:\r
self.Log(" <<<<<< Success Save doxygen config file to %s...\n" % configFilePath)\r
\r
# launch doxygen tool to generate document\r
- if self._doxygenCallback != None:\r
+ if self._doxygenCallback is not None:\r
self.Log(" >>>>>> Start doxygen process...Zzz...\n")\r
if not self._doxygenCallback(self._doxPath, configFilePath):\r
return False\r
self._configFile.AddPreDefined(macro)\r
\r
namestr = self._pObj.GetName()\r
- if self._arch != None:\r
+ if self._arch is not None:\r
namestr += '[%s]' % self._arch\r
- if self._tooltag != None:\r
+ if self._tooltag is not None:\r
namestr += '[%s]' % self._tooltag\r
self._configFile.SetProjectName(namestr)\r
self._configFile.SetStripPath(self._pObj.GetWorkspace())\r
objs = pObj.GetFileObj().GetSectionObjectsByName('libraryclass', self._arch)\r
if len(objs) == 0: return []\r
\r
- if self._arch != None:\r
+ if self._arch is not None:\r
for obj in objs:\r
classPage = doxygen.Page(obj.GetClassName(),\r
"lc_%s" % obj.GetClassName())\r
mo = re.match(r"^[#\w\s]+[<\"]([\\/\w.]+)[>\"]$", lines[no].strip())\r
filePath = mo.groups()[0]\r
\r
- if filePath == None or len(filePath) == 0:\r
+ if filePath is None or len(filePath) == 0:\r
continue\r
\r
# find header file in module's path firstly.\r
if os.path.exists(incPath):\r
fullPath = incPath\r
break\r
- if infObj != None:\r
+ if infObj is not None:\r
pkgInfObjs = infObj.GetSectionObjectsByName('packages')\r
for obj in pkgInfObjs:\r
decObj = dec.DECFile(os.path.join(pObj.GetWorkspace(), obj.GetPath()))\r
if os.path.exists(os.path.join(incPath, filePath)):\r
fullPath = os.path.join(os.path.join(incPath, filePath))\r
break\r
- if fullPath != None:\r
+ if fullPath is not None:\r
break\r
\r
- if fullPath == None and self.IsVerbose():\r
+ if fullPath is None and self.IsVerbose():\r
self.Log('Can not resolve header file %s for file %s in package %s\n' % (filePath, path, pObj.GetFileObj().GetFilename()), 'error')\r
return\r
else:\r
typeRootPageDict[obj.GetPcdType()] = doxygen.Page(obj.GetPcdType(), 'pcd_%s_root_page' % obj.GetPcdType())\r
pcdRootPage.AddPage(typeRootPageDict[obj.GetPcdType()])\r
typeRoot = typeRootPageDict[obj.GetPcdType()]\r
- if self._arch != None:\r
+ if self._arch is not None:\r
pcdPage = doxygen.Page('%s' % obj.GetPcdName(),\r
'pcd_%s_%s_%s' % (obj.GetPcdType(), obj.GetArch(), obj.GetPcdName().split('.')[1]))\r
pcdPage.AddDescription('<br>\n'.join(obj.GetComment()) + '<br>\n')\r
pageRoot = doxygen.Page('GUID', 'guid_root_page')\r
objs = pObj.GetFileObj().GetSectionObjectsByName('guids', self._arch)\r
if len(objs) == 0: return []\r
- if self._arch != None:\r
+ if self._arch is not None:\r
for obj in objs:\r
pageRoot.AddPage(self._GenerateGuidSubPage(pObj, obj, configFile))\r
else:\r
pageRoot = doxygen.Page('PPI', 'ppi_root_page')\r
objs = pObj.GetFileObj().GetSectionObjectsByName('ppis', self._arch)\r
if len(objs) == 0: return []\r
- if self._arch != None:\r
+ if self._arch is not None:\r
for obj in objs:\r
pageRoot.AddPage(self._GeneratePpiSubPage(pObj, obj, configFile))\r
else:\r
pageRoot = doxygen.Page('PROTOCOL', 'protocol_root_page')\r
objs = pObj.GetFileObj().GetSectionObjectsByName('protocols', self._arch)\r
if len(objs) == 0: return []\r
- if self._arch != None:\r
+ if self._arch is not None:\r
for obj in objs:\r
pageRoot.AddPage(self._GenerateProtocolSubPage(pObj, obj, configFile))\r
else:\r
if not infObj.Parse():\r
self.Log('Fail to load INF file %s' % inf)\r
continue\r
- if infObj.GetProduceLibraryClass() != None:\r
+ if infObj.GetProduceLibraryClass() is not None:\r
libObjs.append(infObj)\r
else:\r
modObjs.append(infObj)\r
retarr = self.SearchLibraryClassHeaderFile(lcObj.GetClass(),\r
workspace,\r
refDecObjs)\r
- if retarr != None:\r
+ if retarr is not None:\r
pkgname, hPath = retarr\r
else:\r
self.Log('Fail find the library class %s definition from module %s dependent package!' % (lcObj.GetClass(), infObj.GetFilename()), 'error')\r
lines.append(' <%s>\n' % key)\r
\r
for name, value in self._OveridePcds[key]:\r
- if value != None:\r
+ if value is not None:\r
lines.append(' %s|%s\n' % (name, value))\r
else:\r
lines.append(' %s\n' % name)\r
\r
def GetProduceLibraryClass(self):\r
obj = self.GetDefine("LIBRARY_CLASS")\r
- if obj == None: return None\r
+ if obj is None: return None\r
\r
return obj.split('|')[0].strip()\r
\r
if not ini.BaseINIFile.Parse(self):\r
return False\r
classname = self.GetProduceLibraryClass()\r
- if classname != None:\r
+ if classname is not None:\r
libobjdict = INFFile._libobjs\r
if libobjdict.has_key(classname):\r
if self not in libobjdict[classname]:\r
\r
def Clear(self):\r
classname = self.GetProduceLibraryClass()\r
- if classname != None:\r
+ if classname is not None:\r
libobjdict = INFFile._libobjs\r
libobjdict[classname].remove(self)\r
if len(libobjdict[classname]) == 0:\r
return arr[1]\r
\r
def IsArchMatch(self, arch):\r
- if arch == None or self.GetArch() == 'common':\r
+ if arch is None or self.GetArch() == 'common':\r
return True\r
\r
if self.GetArch().lower() != arch.lower():\r
del objdict[self.mFilename]\r
\r
def IsMatchFamily(self, family):\r
- if family == None:\r
+ if family is None:\r
return True\r
- if self.mFamily != None:\r
+ if self.mFamily is not None:\r
if family.strip().lower() == self.mFamily.lower():\r
return True\r
else:\r
for Fv in Fdf.Profile.FvDict:\r
_GuidDict = {}\r
for FfsFile in Fdf.Profile.FvDict[Fv].FfsList:\r
- if FfsFile.InfFileName and FfsFile.NameGuid == None:\r
+ if FfsFile.InfFileName and FfsFile.NameGuid is None:\r
#\r
# Get INF file GUID\r
#\r
ExtraData=self.FdfFile)\r
InfFoundFlag = False\r
\r
- if FfsFile.NameGuid != None:\r
+ if FfsFile.NameGuid is not None:\r
_CheckPCDAsGuidPattern = re.compile("^PCD\(.+\..+\)$")\r
\r
#\r
\r
## Return the directory to store FV files\r
def _GetFvDir(self):\r
- if self._FvDir == None:\r
+ if self._FvDir is None:\r
self._FvDir = path.join(self.BuildDir, 'FV')\r
return self._FvDir\r
\r
## Return the directory to store all intermediate and final files built\r
def _GetBuildDir(self):\r
- if self._BuildDir == None:\r
+ if self._BuildDir is None:\r
return self.AutoGenObjectList[0].BuildDir\r
\r
## Return the build output directory platform specifies\r
# @retval string Makefile directory\r
#\r
def _GetMakeFileDir(self):\r
- if self._MakeFileDir == None:\r
+ if self._MakeFileDir is None:\r
self._MakeFileDir = self.BuildDir\r
return self._MakeFileDir\r
\r
# @retval string Build command string\r
#\r
def _GetBuildCommand(self):\r
- if self._BuildCommand == None:\r
+ if self._BuildCommand is None:\r
# BuildCommand should be all the same. So just get one from platform AutoGen\r
self._BuildCommand = self.AutoGenObjectList[0].BuildCommand\r
return self._BuildCommand\r
\r
self.VariableInfo = None\r
\r
- if GlobalData.gFdfParser != None:\r
+ if GlobalData.gFdfParser is not None:\r
self._AsBuildInfList = GlobalData.gFdfParser.Profile.InfList\r
for Inf in self._AsBuildInfList:\r
InfClass = PathClass(NormPath(Inf), GlobalData.gWorkspace, self.Arch)\r
for SkuName in Pcd.SkuInfoList:\r
Sku = Pcd.SkuInfoList[SkuName]\r
SkuId = Sku.SkuId\r
- if SkuId == None or SkuId == '':\r
+ if SkuId is None or SkuId == '':\r
continue\r
if len(Sku.VariableName) > 0:\r
VariableGuidStructure = Sku.VariableGuidValue\r
# if the offset of a VPD is *, then it need to be fixed up by third party tool.\r
if not NeedProcessVpdMapFile and Sku.VpdOffset == "*":\r
NeedProcessVpdMapFile = True\r
- if self.Platform.VpdToolGuid == None or self.Platform.VpdToolGuid == '':\r
+ if self.Platform.VpdToolGuid is None or self.Platform.VpdToolGuid == '':\r
EdkLogger.error("Build", FILE_NOT_FOUND, \\r
"Fail to find third-party BPDG tool to process VPD PCDs. BPDG Guid tool need to be defined in tools_def.txt and VPD_TOOL_GUID need to be provided in DSC file.")\r
\r
for DscPcd in PlatformPcds:\r
DscPcdEntry = self._PlatformPcds[DscPcd]\r
if DscPcdEntry.Type in [TAB_PCDS_DYNAMIC_VPD, TAB_PCDS_DYNAMIC_EX_VPD]:\r
- if not (self.Platform.VpdToolGuid == None or self.Platform.VpdToolGuid == ''):\r
+ if not (self.Platform.VpdToolGuid is None or self.Platform.VpdToolGuid == ''):\r
FoundFlag = False\r
for VpdPcd in VpdFile._VpdArray.keys():\r
# This PCD has been referenced by module\r
\r
# if the offset of a VPD is *, then it need to be fixed up by third party tool.\r
VpdSkuMap[DscPcd] = SkuValueMap\r
- if (self.Platform.FlashDefinition == None or self.Platform.FlashDefinition == '') and \\r
+ if (self.Platform.FlashDefinition is None or self.Platform.FlashDefinition == '') and \\r
VpdFile.GetCount() != 0:\r
EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE, \r
"Fail to get FLASH_DEFINITION definition in DSC file %s which is required when DSC contains VPD PCD." % str(self.Platform.MetaFile))\r
BPDGToolName = ToolDef["PATH"]\r
break\r
# Call third party GUID BPDG tool.\r
- if BPDGToolName != None:\r
+ if BPDGToolName is not None:\r
VpdInfoFile.CallExtenalBPDGTool(BPDGToolName, VpdFilePath)\r
else:\r
EdkLogger.error("Build", FILE_NOT_FOUND, "Fail to find third-party BPDG tool to process VPD PCDs. BPDG Guid tool need to be defined in tools_def.txt and VPD_TOOL_GUID need to be provided in DSC file.")\r
\r
## Return the platform build data object\r
def _GetPlatform(self):\r
- if self._Platform == None:\r
+ if self._Platform is None:\r
self._Platform = self.BuildDatabase[self.MetaFile, self.Arch, self.BuildTarget, self.ToolChain]\r
return self._Platform\r
\r
\r
## Return the FDF file name\r
def _GetFdfFile(self):\r
- if self._FdfFile == None:\r
+ if self._FdfFile is None:\r
if self.Workspace.FdfFile != "":\r
self._FdfFile= mws.join(self.WorkspaceDir, self.Workspace.FdfFile)\r
else:\r
\r
## Return the directory to store all intermediate and final files built\r
def _GetBuildDir(self):\r
- if self._BuildDir == None:\r
+ if self._BuildDir is None:\r
if os.path.isabs(self.OutputDir):\r
self._BuildDir = path.join(\r
path.abspath(self.OutputDir),\r
# @retval string Makefile directory\r
#\r
def _GetMakeFileDir(self):\r
- if self._MakeFileDir == None:\r
+ if self._MakeFileDir is None:\r
self._MakeFileDir = path.join(self.BuildDir, self.Arch)\r
return self._MakeFileDir\r
\r
# @retval string Build command string\r
#\r
def _GetBuildCommand(self):\r
- if self._BuildCommand == None:\r
+ if self._BuildCommand is None:\r
self._BuildCommand = []\r
if "MAKE" in self.ToolDefinition and "PATH" in self.ToolDefinition["MAKE"]:\r
self._BuildCommand += SplitOption(self.ToolDefinition["MAKE"]["PATH"])\r
# Get each tool defition for given tool chain from tools_def.txt and platform\r
#\r
def _GetToolDefinition(self):\r
- if self._ToolDefinitions == None:\r
+ if self._ToolDefinitions is None:\r
ToolDefinition = self.Workspace.ToolDef.ToolsDefTxtDictionary\r
if TAB_TOD_DEFINES_COMMAND_TYPE not in self.Workspace.ToolDef.ToolsDefTxtDatabase:\r
EdkLogger.error('build', RESOURCE_NOT_AVAILABLE, "No tools found in configuration",\r
\r
## Return the paths of tools\r
def _GetToolDefFile(self):\r
- if self._ToolDefFile == None:\r
+ if self._ToolDefFile is None:\r
self._ToolDefFile = os.path.join(self.MakeFileDir, "TOOLS_DEF." + self.Arch)\r
return self._ToolDefFile\r
\r
## Retrieve the toolchain family of given toolchain tag. Default to 'MSFT'.\r
def _GetToolChainFamily(self):\r
- if self._ToolChainFamily == None:\r
+ if self._ToolChainFamily is None:\r
ToolDefinition = self.Workspace.ToolDef.ToolsDefTxtDatabase\r
if TAB_TOD_DEFINES_FAMILY not in ToolDefinition \\r
or self.ToolChain not in ToolDefinition[TAB_TOD_DEFINES_FAMILY] \\r
return self._ToolChainFamily\r
\r
def _GetBuildRuleFamily(self):\r
- if self._BuildRuleFamily == None:\r
+ if self._BuildRuleFamily is None:\r
ToolDefinition = self.Workspace.ToolDef.ToolsDefTxtDatabase\r
if TAB_TOD_DEFINES_BUILDRULEFAMILY not in ToolDefinition \\r
or self.ToolChain not in ToolDefinition[TAB_TOD_DEFINES_BUILDRULEFAMILY] \\r
\r
## Return the build options specific for all modules in this platform\r
def _GetBuildOptions(self):\r
- if self._BuildOption == None:\r
+ if self._BuildOption is None:\r
self._BuildOption = self._ExpandBuildOption(self.Platform.BuildOptions)\r
return self._BuildOption\r
\r
## Return the build options specific for EDK modules in this platform\r
def _GetEdkBuildOptions(self):\r
- if self._EdkBuildOption == None:\r
+ if self._EdkBuildOption is None:\r
self._EdkBuildOption = self._ExpandBuildOption(self.Platform.BuildOptions, EDK_NAME)\r
return self._EdkBuildOption\r
\r
## Return the build options specific for EDKII modules in this platform\r
def _GetEdkIIBuildOptions(self):\r
- if self._EdkIIBuildOption == None:\r
+ if self._EdkIIBuildOption is None:\r
self._EdkIIBuildOption = self._ExpandBuildOption(self.Platform.BuildOptions, EDKII_NAME)\r
return self._EdkIIBuildOption\r
\r
# @retval BuildRule object\r
#\r
def _GetBuildRule(self):\r
- if self._BuildRule == None:\r
+ if self._BuildRule is None:\r
BuildRuleFile = None\r
if TAB_TAT_DEFINES_BUILD_RULE_CONF in self.Workspace.TargetTxt.TargetTxtDictionary:\r
BuildRuleFile = self.Workspace.TargetTxt.TargetTxtDictionary[TAB_TAT_DEFINES_BUILD_RULE_CONF]\r
\r
## Summarize the packages used by modules in this platform\r
def _GetPackageList(self):\r
- if self._PackageList == None:\r
+ if self._PackageList is None:\r
self._PackageList = set()\r
for La in self.LibraryAutoGenList:\r
self._PackageList.update(La.DependentPackageList)\r
\r
## Get list of non-dynamic PCDs\r
def _GetNonDynamicPcdList(self):\r
- if self._NonDynamicPcdList == None:\r
+ if self._NonDynamicPcdList is None:\r
self.CollectPlatformDynamicPcds()\r
return self._NonDynamicPcdList\r
\r
## Get list of dynamic PCDs\r
def _GetDynamicPcdList(self):\r
- if self._DynamicPcdList == None:\r
+ if self._DynamicPcdList is None:\r
self.CollectPlatformDynamicPcds()\r
return self._DynamicPcdList\r
\r
## Generate Token Number for all PCD\r
def _GetPcdTokenNumbers(self):\r
- if self._PcdTokenNumber == None:\r
+ if self._PcdTokenNumber is None:\r
self._PcdTokenNumber = sdict()\r
TokenNumber = 1\r
#\r
\r
## Summarize ModuleAutoGen objects of all modules to be built for this platform\r
def _GetModuleAutoGenList(self):\r
- if self._ModuleAutoGenList == None:\r
+ if self._ModuleAutoGenList is None:\r
self._GetAutoGenObjectList()\r
return self._ModuleAutoGenList\r
\r
## Summarize ModuleAutoGen objects of all libraries to be built for this platform\r
def _GetLibraryAutoGenList(self):\r
- if self._LibraryAutoGenList == None:\r
+ if self._LibraryAutoGenList is None:\r
self._GetAutoGenObjectList()\r
return self._LibraryAutoGenList\r
\r
LibraryPath = PlatformModule.LibraryClasses[LibraryClassName]\r
else:\r
LibraryPath = self.Platform.LibraryClasses[LibraryClassName, ModuleType]\r
- if LibraryPath == None or LibraryPath == "":\r
+ if LibraryPath is None or LibraryPath == "":\r
LibraryPath = M.LibraryClasses[LibraryClassName]\r
- if LibraryPath == None or LibraryPath == "":\r
+ if LibraryPath is None or LibraryPath == "":\r
EdkLogger.error("build", RESOURCE_NOT_AVAILABLE,\r
"Instance of library class [%s] is not found" % LibraryClassName,\r
File=self.MetaFile,\r
# for those forced library instance (NULL library), add a fake library class\r
if LibraryClassName.startswith("NULL"):\r
LibraryModule.LibraryClass.append(LibraryClassObject(LibraryClassName, [ModuleType]))\r
- elif LibraryModule.LibraryClass == None \\r
+ elif LibraryModule.LibraryClass is None \\r
or len(LibraryModule.LibraryClass) == 0 \\r
or (ModuleType != 'USER_DEFINED'\r
and ModuleType not in LibraryModule.LibraryClass[0].SupModList):\r
else:\r
LibraryModule = LibraryInstance[LibraryClassName]\r
\r
- if LibraryModule == None:\r
+ if LibraryModule is None:\r
continue\r
\r
if LibraryModule.ConstructorList != [] and LibraryModule not in Constructor:\r
if (ToPcd.TokenCName, ToPcd.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:\r
TokenCName = PcdItem[0]\r
break\r
- if FromPcd != None:\r
+ if FromPcd is not None:\r
if ToPcd.Pending and FromPcd.Type not in [None, '']:\r
ToPcd.Type = FromPcd.Type\r
elif (ToPcd.Type not in [None, '']) and (FromPcd.Type not in [None, ''])\\r
ToPcd.validlists = FromPcd.validlists\r
ToPcd.expressions = FromPcd.expressions\r
\r
- if FromPcd != None and ToPcd.DatumType == "VOID*" and ToPcd.MaxDatumSize in ['', None]:\r
+ if FromPcd is not None and ToPcd.DatumType == "VOID*" and ToPcd.MaxDatumSize in ['', None]:\r
EdkLogger.debug(EdkLogger.DEBUG_9, "No MaxDatumSize specified for PCD %s.%s" \\r
% (ToPcd.TokenSpaceGuidCName, TokenCName))\r
Value = ToPcd.DefaultValue\r
Sku = PcdInModule.SkuInfoList[SkuId]\r
if Sku.VariableGuid == '': continue\r
Sku.VariableGuidValue = GuidValue(Sku.VariableGuid, self.PackageList, self.MetaFile.Path)\r
- if Sku.VariableGuidValue == None:\r
+ if Sku.VariableGuidValue is None:\r
PackageList = "\n\t".join([str(P) for P in self.PackageList])\r
EdkLogger.error(\r
'build',\r
M = LibraryConsumerList.pop()\r
for LibraryName in M.Libraries:\r
Library = self.Platform.LibraryClasses[LibraryName, ':dummy:']\r
- if Library == None:\r
+ if Library is None:\r
for Key in self.Platform.LibraryClasses.data.keys():\r
if LibraryName.upper() == Key.upper():\r
Library = self.Platform.LibraryClasses[Key, ':dummy:']\r
break\r
- if Library == None:\r
+ if Library is None:\r
EdkLogger.warn("build", "Library [%s] is not found" % LibraryName, File=str(M),\r
ExtraData="\t%s [%s]" % (str(Module), self.Arch))\r
continue\r
# Key[1] -- TARGET_TOOLCHAIN_ARCH_COMMANDTYPE_ATTRIBUTE\r
#\r
if (Key[0] == self.BuildRuleFamily and\r
- (ModuleStyle == None or len(Key) < 3 or (len(Key) > 2 and Key[2] == ModuleStyle))):\r
+ (ModuleStyle is None or len(Key) < 3 or (len(Key) > 2 and Key[2] == ModuleStyle))):\r
Target, ToolChain, Arch, CommandType, Attr = Key[1].split('_')\r
if Target == self.BuildTarget or Target == "*":\r
if ToolChain == self.ToolChain or ToolChain == "*":\r
if Arch == self.Arch or Arch == "*":\r
if Options[Key].startswith("="):\r
- if OverrideList.get(Key[1]) != None:\r
+ if OverrideList.get(Key[1]) is not None:\r
OverrideList.pop(Key[1])\r
OverrideList[Key[1]] = Options[Key]\r
\r
if CommandType1 == CommandType2 or CommandType1 == "*" or CommandType2 == "*":\r
if Attr1 == Attr2 or Attr1 == "*" or Attr2 == "*":\r
if self.CalculatePriorityValue(NowKey) > self.CalculatePriorityValue(NextKey):\r
- if Options.get((self.BuildRuleFamily, NextKey)) != None:\r
+ if Options.get((self.BuildRuleFamily, NextKey)) is not None:\r
Options.pop((self.BuildRuleFamily, NextKey))\r
else:\r
- if Options.get((self.BuildRuleFamily, NowKey)) != None:\r
+ if Options.get((self.BuildRuleFamily, NowKey)) is not None:\r
Options.pop((self.BuildRuleFamily, NowKey))\r
\r
for Key in Options:\r
- if ModuleStyle != None and len (Key) > 2:\r
+ if ModuleStyle is not None and len (Key) > 2:\r
# Check Module style is EDK or EDKII.\r
# Only append build option for the matched style module.\r
if ModuleStyle == EDK_NAME and Key[2] != EDK_NAME:\r
return BuildOptions\r
\r
for Key in Options:\r
- if ModuleStyle != None and len (Key) > 2:\r
+ if ModuleStyle is not None and len (Key) > 2:\r
# Check Module style is EDK or EDKII.\r
# Only append build option for the matched style module.\r
if ModuleStyle == EDK_NAME and Key[2] != EDK_NAME:\r
BuildOptions[Tool][Attr] += " " + Value\r
else:\r
BuildOptions[Tool][Attr] = Value\r
- if Module.AutoGenVersion < 0x00010005 and self.Workspace.UniFlag != None:\r
+ if Module.AutoGenVersion < 0x00010005 and self.Workspace.UniFlag is not None:\r
#\r
# Override UNI flag only for EDK module.\r
#\r
\r
# Macros could be used in build_rule.txt (also Makefile)\r
def _GetMacros(self):\r
- if self._Macro == None:\r
+ if self._Macro is None:\r
self._Macro = sdict()\r
self._Macro["WORKSPACE" ] = self.WorkspaceDir\r
self._Macro["MODULE_NAME" ] = self.Name\r
\r
## Return the module build data object\r
def _GetModule(self):\r
- if self._Module == None:\r
+ if self._Module is None:\r
self._Module = self.Workspace.BuildDatabase[self.MetaFile, self.Arch, self.BuildTarget, self.ToolChain]\r
return self._Module\r
\r
\r
## Check if the module is library or not\r
def _IsLibrary(self):\r
- if self._LibraryFlag == None:\r
- if self.Module.LibraryClass != None and self.Module.LibraryClass != []:\r
+ if self._LibraryFlag is None:\r
+ if self.Module.LibraryClass is not None and self.Module.LibraryClass != []:\r
self._LibraryFlag = True\r
else:\r
self._LibraryFlag = False\r
\r
## Return the directory to store intermediate files of the module\r
def _GetBuildDir(self):\r
- if self._BuildDir == None:\r
+ if self._BuildDir is None:\r
self._BuildDir = path.join(\r
self.PlatformInfo.BuildDir,\r
self.Arch,\r
\r
## Return the directory to store the intermediate object files of the mdoule\r
def _GetOutputDir(self):\r
- if self._OutputDir == None:\r
+ if self._OutputDir is None:\r
self._OutputDir = path.join(self.BuildDir, "OUTPUT")\r
CreateDirectory(self._OutputDir)\r
return self._OutputDir\r
\r
## Return the directory to store ffs file\r
def _GetFfsOutputDir(self):\r
- if self._FfsOutputDir == None:\r
- if GlobalData.gFdfParser != None:\r
+ if self._FfsOutputDir is None:\r
+ if GlobalData.gFdfParser is not None:\r
self._FfsOutputDir = path.join(self.PlatformInfo.BuildDir, "FV", "Ffs", self.Guid + self.Name)\r
else:\r
self._FfsOutputDir = ''\r
\r
## Return the directory to store auto-gened source files of the mdoule\r
def _GetDebugDir(self):\r
- if self._DebugDir == None:\r
+ if self._DebugDir is None:\r
self._DebugDir = path.join(self.BuildDir, "DEBUG")\r
CreateDirectory(self._DebugDir)\r
return self._DebugDir\r
\r
## Return the path of custom file\r
def _GetCustomMakefile(self):\r
- if self._CustomMakefile == None:\r
+ if self._CustomMakefile is None:\r
self._CustomMakefile = {}\r
for Type in self.Module.CustomMakefile:\r
if Type in gMakeTypeMap:\r
MakeType = gMakeTypeMap[Type]\r
else:\r
MakeType = 'nmake'\r
- if self.SourceOverrideDir != None:\r
+ if self.SourceOverrideDir is not None:\r
File = os.path.join(self.SourceOverrideDir, self.Module.CustomMakefile[Type])\r
if not os.path.exists(File):\r
File = os.path.join(self.SourceDir, self.Module.CustomMakefile[Type])\r
# @retval list The token list of the dependency expression after parsed\r
#\r
def _GetDepexTokenList(self):\r
- if self._DepexList == None:\r
+ if self._DepexList is None:\r
self._DepexList = {}\r
if self.DxsFile or self.IsLibrary or TAB_DEPENDENCY_EXPRESSION_FILE in self.FileTypes:\r
return self._DepexList\r
# @retval list The token list of the dependency expression after parsed\r
#\r
def _GetDepexExpressionTokenList(self):\r
- if self._DepexExpressionList == None:\r
+ if self._DepexExpressionList is None:\r
self._DepexExpressionList = {}\r
if self.DxsFile or self.IsLibrary or TAB_DEPENDENCY_EXPRESSION_FILE in self.FileTypes:\r
return self._DepexExpressionList\r
# @retval dict The dict containing valid options\r
#\r
def _GetModuleBuildOption(self):\r
- if self._BuildOption == None:\r
+ if self._BuildOption is None:\r
self._BuildOption, self.BuildRuleOrder = self.PlatformInfo.ApplyBuildOption(self.Module)\r
if self.BuildRuleOrder:\r
self.BuildRuleOrder = ['.%s' % Ext for Ext in self.BuildRuleOrder.split()]\r
# @retval list The include path list\r
#\r
def _GetBuildOptionIncPathList(self):\r
- if self._BuildOptionIncPathList == None:\r
+ if self._BuildOptionIncPathList is None:\r
#\r
# Regular expression for finding Include Directories, the difference between MSFT and INTEL/GCC/RVCT\r
# is the former use /I , the Latter used -I to specify include directories\r
# $(CONF_DIRECTORY)/build_rule.txt and toolchain family.\r
#\r
def _GetSourceFileList(self):\r
- if self._SourceFileList == None:\r
+ if self._SourceFileList is None:\r
self._SourceFileList = []\r
for F in self.Module.Sources:\r
# match tool chain\r
\r
## Return the list of unicode files\r
def _GetUnicodeFileList(self):\r
- if self._UnicodeFileList == None:\r
+ if self._UnicodeFileList is None:\r
if TAB_UNICODE_FILE in self.FileTypes:\r
self._UnicodeFileList = self.FileTypes[TAB_UNICODE_FILE]\r
else:\r
\r
## Return the list of vfr files\r
def _GetVfrFileList(self):\r
- if self._VfrFileList == None:\r
+ if self._VfrFileList is None:\r
if TAB_VFR_FILE in self.FileTypes:\r
self._VfrFileList = self.FileTypes[TAB_VFR_FILE]\r
else:\r
\r
## Return the list of Image Definition files\r
def _GetIdfFileList(self):\r
- if self._IdfFileList == None:\r
+ if self._IdfFileList is None:\r
if TAB_IMAGE_FILE in self.FileTypes:\r
self._IdfFileList = self.FileTypes[TAB_IMAGE_FILE]\r
else:\r
# @retval list The list of files which can be built later\r
#\r
def _GetBinaryFiles(self):\r
- if self._BinaryFileList == None:\r
+ if self._BinaryFileList is None:\r
self._BinaryFileList = []\r
for F in self.Module.Binaries:\r
if F.Target not in ['COMMON', '*'] and F.Target != self.BuildTarget:\r
return self._BinaryFileList\r
\r
def _GetBuildRules(self):\r
- if self._BuildRules == None:\r
+ if self._BuildRules is None:\r
BuildRules = {}\r
BuildRuleDatabase = self.PlatformInfo.BuildRule\r
for Type in BuildRuleDatabase.FileTypeList:\r
return self._BuildRules\r
\r
def _ApplyBuildRule(self, File, FileType):\r
- if self._BuildTargets == None:\r
+ if self._BuildTargets is None:\r
self._IntroBuildTargetList = set()\r
self._FinalBuildTargetList = set()\r
self._BuildTargets = {}\r
if Source != File:\r
CreateDirectory(Source.Dir)\r
\r
- if File.IsBinary and File == Source and self._BinaryFileList != None and File in self._BinaryFileList:\r
+ if File.IsBinary and File == Source and self._BinaryFileList is not None and File in self._BinaryFileList:\r
# Skip all files that are not binary libraries\r
if not self.IsLibrary:\r
continue\r
FileType = TAB_UNKNOWN_FILE\r
\r
def _GetTargets(self):\r
- if self._BuildTargets == None:\r
+ if self._BuildTargets is None:\r
self._IntroBuildTargetList = set()\r
self._FinalBuildTargetList = set()\r
self._BuildTargets = {}\r
if self.BuildType == 'UEFI_HII':\r
UniStringAutoGenC = False\r
IdfStringAutoGenC = False\r
- if self._AutoGenFileList == None:\r
+ if self._AutoGenFileList is None:\r
self._AutoGenFileList = {}\r
AutoGenC = TemplateString()\r
AutoGenH = TemplateString()\r
AutoFile = PathClass(gAutoGenStringFileName % {"module_name":self.Name}, self.DebugDir)\r
self._AutoGenFileList[AutoFile] = str(StringH)\r
self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)\r
- if UniStringBinBuffer != None and UniStringBinBuffer.getvalue() != "":\r
+ if UniStringBinBuffer is not None and UniStringBinBuffer.getvalue() != "":\r
AutoFile = PathClass(gAutoGenStringFormFileName % {"module_name":self.Name}, self.OutputDir)\r
self._AutoGenFileList[AutoFile] = UniStringBinBuffer.getvalue()\r
AutoFile.IsBinary = True\r
self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)\r
- if UniStringBinBuffer != None:\r
+ if UniStringBinBuffer is not None:\r
UniStringBinBuffer.close()\r
if str(StringIdf) != "":\r
AutoFile = PathClass(gAutoGenImageDefFileName % {"module_name":self.Name}, self.DebugDir)\r
self._AutoGenFileList[AutoFile] = str(StringIdf)\r
self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)\r
- if IdfGenBinBuffer != None and IdfGenBinBuffer.getvalue() != "":\r
+ if IdfGenBinBuffer is not None and IdfGenBinBuffer.getvalue() != "":\r
AutoFile = PathClass(gAutoGenIdfFileName % {"module_name":self.Name}, self.OutputDir)\r
self._AutoGenFileList[AutoFile] = IdfGenBinBuffer.getvalue()\r
AutoFile.IsBinary = True\r
self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)\r
- if IdfGenBinBuffer != None:\r
+ if IdfGenBinBuffer is not None:\r
IdfGenBinBuffer.close()\r
return self._AutoGenFileList\r
\r
## Return the list of library modules explicitly or implicityly used by this module\r
def _GetLibraryList(self):\r
- if self._DependentLibraryList == None:\r
+ if self._DependentLibraryList is None:\r
# only merge library classes and PCD for non-library module\r
if self.IsLibrary:\r
self._DependentLibraryList = []\r
# @retval list The list of PCD\r
#\r
def _GetModulePcdList(self):\r
- if self._ModulePcdList == None:\r
+ if self._ModulePcdList is None:\r
# apply PCD settings from platform\r
self._ModulePcdList = self.PlatformInfo.ApplyPcdSetting(self.Module, self.Module.Pcds)\r
self.UpdateComments(self._PcdComments, self.Module.PcdComments)\r
# @retval list The list of PCD\r
#\r
def _GetLibraryPcdList(self):\r
- if self._LibraryPcdList == None:\r
+ if self._LibraryPcdList is None:\r
Pcds = sdict()\r
if not self.IsLibrary:\r
# get PCDs from dependent libraries\r
# @retval dict The mapping between GUID cname and its value\r
#\r
def _GetGuidList(self):\r
- if self._GuidList == None:\r
+ if self._GuidList is None:\r
self._GuidList = sdict()\r
self._GuidList.update(self.Module.Guids)\r
for Library in self.DependentLibraryList:\r
return self._GuidList\r
\r
def GetGuidsUsedByPcd(self):\r
- if self._GuidsUsedByPcd == None:\r
+ if self._GuidsUsedByPcd is None:\r
self._GuidsUsedByPcd = sdict()\r
self._GuidsUsedByPcd.update(self.Module.GetGuidsUsedByPcd())\r
for Library in self.DependentLibraryList:\r
# @retval dict The mapping between protocol cname and its value\r
#\r
def _GetProtocolList(self):\r
- if self._ProtocolList == None:\r
+ if self._ProtocolList is None:\r
self._ProtocolList = sdict()\r
self._ProtocolList.update(self.Module.Protocols)\r
for Library in self.DependentLibraryList:\r
# @retval dict The mapping between PPI cname and its value\r
#\r
def _GetPpiList(self):\r
- if self._PpiList == None:\r
+ if self._PpiList is None:\r
self._PpiList = sdict()\r
self._PpiList.update(self.Module.Ppis)\r
for Library in self.DependentLibraryList:\r
# @retval list The list path\r
#\r
def _GetIncludePathList(self):\r
- if self._IncludePathList == None:\r
+ if self._IncludePathList is None:\r
self._IncludePathList = []\r
if self.AutoGenVersion < 0x00010005:\r
for Inc in self.Module.Includes:\r
return\r
\r
# Skip the following code for modules with no source files\r
- if self.SourceFileList == None or self.SourceFileList == []:\r
+ if self.SourceFileList is None or self.SourceFileList == []:\r
return\r
\r
# Skip the following code for modules without any binary files\r
HexFormat = '0x%016x'\r
PcdValue = HexFormat % int(Pcd.DefaultValue, 0)\r
else:\r
- if Pcd.MaxDatumSize == None or Pcd.MaxDatumSize == '':\r
+ if Pcd.MaxDatumSize is None or Pcd.MaxDatumSize == '':\r
EdkLogger.error("build", AUTOGEN_ERROR,\r
"Unknown [MaxDatumSize] of PCD [%s.%s]" % (Pcd.TokenSpaceGuidCName, TokenCName)\r
)\r
\r
## Summarize the ModuleAutoGen objects of all libraries used by this module\r
def _GetLibraryAutoGenList(self):\r
- if self._LibraryAutoGenList == None:\r
+ if self._LibraryAutoGenList is None:\r
self._LibraryAutoGenList = []\r
for Library in self.DependentLibraryList:\r
La = ModuleAutoGen(\r
return True\r
\r
def GetTimeStampPath(self):\r
- if self._TimeStampPath == None:\r
+ if self._TimeStampPath is None:\r
self._TimeStampPath = os.path.join(self.MakeFileDir, 'AutoGenTimeStamp')\r
return self._TimeStampPath\r
def CreateTimeStamp(self, Makefile):\r
def __init__(self, File=None, Content=None, LineIndex=0, SupportedFamily=["MSFT", "INTEL", "GCC", "RVCT"]):\r
self.RuleFile = File\r
# Read build rules from file if it's not none\r
- if File != None:\r
+ if File is not None:\r
try:\r
self.RuleContent = open(File, 'r').readlines()\r
except:\r
EdkLogger.error("build", FILE_OPEN_FAILURE, ExtraData=File)\r
- elif Content != None:\r
+ elif Content is not None:\r
self.RuleContent = Content\r
else:\r
EdkLogger.error("build", PARAMETER_MISSING, ExtraData="No rule file or string given")\r
EdkLogger.error("build", FORMAT_INVALID, "No file type given",\r
File=self.RuleFile, Line=LineIndex + 1,\r
ExtraData=self.RuleContent[LineIndex])\r
- if self._FileTypePattern.match(FileType) == None:\r
+ if self._FileTypePattern.match(FileType) is None:\r
EdkLogger.error("build", FORMAT_INVALID, File=self.RuleFile, Line=LineIndex + 1,\r
ExtraData="Only character, number (non-first character), '_' and '-' are allowed in file type")\r
# new format: File-Type.Build-Type.Arch\r
FileList = [File.strip() for File in self.RuleContent[LineIndex].split(",")]\r
for ToolChainFamily in self._FamilyList:\r
InputFiles = self._RuleInfo[ToolChainFamily, self._State]\r
- if InputFiles == None:\r
+ if InputFiles is None:\r
InputFiles = []\r
self._RuleInfo[ToolChainFamily, self._State] = InputFiles\r
InputFiles.extend(FileList)\r
def ParseCommon(self, LineIndex):\r
for ToolChainFamily in self._FamilyList:\r
Items = self._RuleInfo[ToolChainFamily, self._State]\r
- if Items == None:\r
+ if Items is None:\r
Items = []\r
self._RuleInfo[ToolChainFamily, self._State] = Items\r
Items.append(self.RuleContent[LineIndex])\r
if not Value.endswith('U'):\r
Value += 'U'\r
if Pcd.DatumType not in ['UINT8', 'UINT16', 'UINT32', 'UINT64', 'BOOLEAN']:\r
- if Pcd.MaxDatumSize == None or Pcd.MaxDatumSize == '':\r
+ if Pcd.MaxDatumSize is None or Pcd.MaxDatumSize == '':\r
EdkLogger.error("build", AUTOGEN_ERROR,\r
"Unknown [MaxDatumSize] of PCD [%s.%s]" % (Pcd.TokenSpaceGuidCName, TokenCName),\r
ExtraData="[%s]" % str(Info))\r
\r
if Pcd.DatumType not in ['UINT8', 'UINT16', 'UINT32', 'UINT64', 'BOOLEAN', 'VOID*']:\r
# handle structure PCD\r
- if Pcd.MaxDatumSize == None or Pcd.MaxDatumSize == '':\r
+ if Pcd.MaxDatumSize is None or Pcd.MaxDatumSize == '':\r
EdkLogger.error("build", AUTOGEN_ERROR,\r
"Unknown [MaxDatumSize] of PCD [%s.%s]" % (Pcd.TokenSpaceGuidCName, TokenCName),\r
ExtraData="[%s]" % str(Info))\r
\r
FilePath = ""\r
FileChangeFlag = True\r
- if File == None:\r
+ if File is None:\r
sys.stdout.write(Buffer.getvalue())\r
FilePath = "STDOUT"\r
else:\r
EdkLogger.SetLevel(EdkLogger.QUIET)\r
elif Option.verbose:\r
EdkLogger.SetLevel(EdkLogger.VERBOSE)\r
- elif Option.debug != None:\r
+ elif Option.debug is not None:\r
EdkLogger.SetLevel(Option.debug + 1)\r
else:\r
EdkLogger.SetLevel(EdkLogger.INFO)\r
\r
try:\r
- if Option.ModuleType == None or Option.ModuleType not in gType2Phase:\r
+ if Option.ModuleType is None or Option.ModuleType not in gType2Phase:\r
EdkLogger.error("GenDepex", OPTION_MISSING, "Module type is not specified or supported")\r
\r
DxsFile = ''\r
EdkLogger.error("GenDepex", OPTION_MISSING, "No expression string or file given")\r
\r
Dpx = DependencyExpression(DxsString, Option.ModuleType, Option.Optimize)\r
- if Option.OutputFile != None:\r
+ if Option.OutputFile is not None:\r
FileChangeFlag = Dpx.Generate(Option.OutputFile)\r
if not FileChangeFlag and DxsFile:\r
#\r
Dpx.Generate()\r
except BaseException, X:\r
EdkLogger.quiet("")\r
- if Option != None and Option.debug != None:\r
+ if Option is not None and Option.debug is not None:\r
EdkLogger.quiet(traceback.format_exc())\r
else:\r
EdkLogger.quiet(str(X))\r
# skip non-C files\r
if File.Ext not in [".c", ".C"] or File.Name == "AutoGen.c":\r
continue\r
- elif DepSet == None:\r
+ elif DepSet is None:\r
DepSet = set(self.FileDependency[File])\r
else:\r
DepSet &= set(self.FileDependency[File])\r
# in case nothing in SourceFileList\r
- if DepSet == None:\r
+ if DepSet is None:\r
DepSet = set()\r
#\r
# Extract common files list in the dependency files\r
\r
# TRICK: for not generating GenFds call in makefile if no FDF file\r
MacroList = []\r
- if PlatformInfo.FdfFile != None and PlatformInfo.FdfFile != "":\r
+ if PlatformInfo.FdfFile is not None and PlatformInfo.FdfFile != "":\r
FdfFileList = [PlatformInfo.FdfFile]\r
# macros passed to GenFds\r
MacroList.append('"%s=%s"' % ("EFI_SOURCE", GlobalData.gEfiSource.replace('\\', '\\\\')))\r
for SkuName in Pcd.SkuInfoList:\r
Sku = Pcd.SkuInfoList[SkuName]\r
SkuId = Sku.SkuId\r
- if SkuId == None or SkuId == '':\r
+ if SkuId is None or SkuId == '':\r
continue\r
\r
\r
self.LoadIdfFile(File)\r
\r
def LoadIdfFile(self, File = None):\r
- if File == None:\r
+ if File is None:\r
EdkLogger.error("Image Definition File Parser", PARSER_ERROR, 'No Image definition file is given.')\r
self.File = File\r
\r
if Len == 4 and LineDetails[2] != 'TRANSPARENT':\r
EdkLogger.error("Image Definition File Parser", PARSER_ERROR, 'Please use the keyword "TRANSPARENT" to describe the transparency setting in Line %s of File %s.' % (LineNo, File.Path))\r
MatchString = re.match('^[a-zA-Z][a-zA-Z0-9_]*$', LineDetails[1], re.UNICODE)\r
- if MatchString == None or MatchString.end(0) != len(LineDetails[1]):\r
+ if MatchString is None or MatchString.end(0) != len(LineDetails[1]):\r
EdkLogger.error('Image Definition File Parser', FORMAT_INVALID, 'The Image token name %s defined in Idf file %s contains the invalid character.' % (LineDetails[1], File.Path))\r
if LineDetails[1] not in self.ImageIDList:\r
self.ImageIDList.append(LineDetails[1])\r
Name = StringItem.StringName\r
Token = StringItem.Token\r
Referenced = StringItem.Referenced\r
- if Name != None:\r
+ if Name is not None:\r
Line = ''\r
if Referenced == True:\r
if (ValueStartPtr - len(DEFINE_STR + Name)) <= 0:\r
# @retval FileList: A list of all files found\r
#\r
def GetFileList(SourceFileList, IncludeList, SkipList):\r
- if IncludeList == None:\r
+ if IncludeList is None:\r
EdkLogger.error("UnicodeStringGather", AUTOGEN_ERROR, "Include path for unicode file is not defined")\r
\r
FileList = []\r
- if SkipList == None:\r
+ if SkipList is None:\r
SkipList = []\r
\r
for File in SourceFileList:\r
if IsCompatibleMode:\r
if length == 3 and LangName.isalpha():\r
TempLangName = LangConvTable.get(LangName.lower())\r
- if TempLangName != None:\r
+ if TempLangName is not None:\r
return TempLangName\r
return LangName\r
else:\r
if LangName.isalpha():\r
return LangName\r
elif length == 3:\r
- if LangName.isalpha() and LangConvTable.get(LangName.lower()) == None:\r
+ if LangName.isalpha() and LangConvTable.get(LangName.lower()) is None:\r
return LangName\r
elif length == 5:\r
if LangName[0:2].isalpha() and LangName[2] == '-':\r
elif length >= 6:\r
if LangName[0:2].isalpha() and LangName[2] == '-':\r
return LangName\r
- if LangName[0:3].isalpha() and LangConvTable.get(LangName.lower()) == None and LangName[3] == '-':\r
+ if LangName[0:3].isalpha() and LangConvTable.get(LangName.lower()) is None and LangName[3] == '-':\r
return LangName\r
\r
EdkLogger.error("Unicode File Parser", FORMAT_INVALID, "Invalid RFC 4646 language code : %s" % LangName, File)\r
self.UseOtherLangDef = UseOtherLangDef\r
self.Length = 0\r
\r
- if Name != None:\r
+ if Name is not None:\r
self.StringName = Name\r
self.StringNameByteList = UniToHexList(Name)\r
- if Value != None:\r
+ if Value is not None:\r
self.StringValue = Value + u'\x00' # Add a NULL at string tail\r
self.StringValueByteList = UniToHexList(self.StringValue)\r
self.Length = len(self.StringValueByteList)\r
- if Token != None:\r
+ if Token is not None:\r
self.Token = Token\r
\r
def __str__(self):\r
repr(self.UseOtherLangDef)\r
\r
def UpdateValue(self, Value = None):\r
- if Value != None:\r
+ if Value is not None:\r
self.StringValue = Value + u'\x00' # Add a NULL at string tail\r
self.StringValueByteList = UniToHexList(self.StringValue)\r
self.Length = len(self.StringValueByteList)\r
# Check the string name\r
if Name != '':\r
MatchString = re.match('^[a-zA-Z][a-zA-Z0-9_]*$', Name, re.UNICODE)\r
- if MatchString == None or MatchString.end(0) != len(Name):\r
+ if MatchString is None or MatchString.end(0) != len(Name):\r
EdkLogger.error('Unicode File Parser', FORMAT_INVALID, 'The string token name %s defined in UNI file %s contains the invalid character.' % (Name, self.File))\r
LanguageList = Item.split(u'#language ')\r
for IndexI in range(len(LanguageList)):\r
# Load a .uni file\r
#\r
def LoadUniFile(self, File = None):\r
- if File == None:\r
+ if File is None:\r
EdkLogger.error("Unicode File Parser", PARSER_ERROR, 'No unicode file is given')\r
self.File = File\r
#\r
# Check the string name\r
if not self.IsCompatibleMode and Name != '':\r
MatchString = re.match('^[a-zA-Z][a-zA-Z0-9_]*$', Name, re.UNICODE)\r
- if MatchString == None or MatchString.end(0) != len(Name):\r
+ if MatchString is None or MatchString.end(0) != len(Name):\r
EdkLogger.error('Unicode File Parser', FORMAT_INVALID, 'The string token name %s defined in UNI file %s contains the invalid character.' % (Name, self.File))\r
self.AddStringToList(Name, Language, Value)\r
continue\r
IsAdded = True\r
if Name in self.OrderedStringDict[Language]:\r
IsAdded = False\r
- if Value != None:\r
+ if Value is not None:\r
ItemIndexInList = self.OrderedStringDict[Language][Name]\r
Item = self.OrderedStringList[Language][ItemIndexInList]\r
Item.UpdateValue(Value)\r
EdkLogger.SetLevel(EdkLogger.VERBOSE)\r
elif Options.opt_quiet:\r
EdkLogger.SetLevel(EdkLogger.QUIET)\r
- elif Options.debug_level != None:\r
+ elif Options.debug_level is not None:\r
EdkLogger.SetLevel(Options.debug_level + 1) \r
else:\r
EdkLogger.SetLevel(EdkLogger.INFO)\r
\r
- if Options.bin_filename == None:\r
+ if Options.bin_filename is None:\r
EdkLogger.error("BPDG", ATTRIBUTE_NOT_AVAILABLE, "Please use the -o option to specify the file name for the VPD binary file") \r
- if Options.filename == None:\r
+ if Options.filename is None:\r
EdkLogger.error("BPDG", ATTRIBUTE_NOT_AVAILABLE, "Please use the -m option to specify the file name for the mapping file") \r
\r
Force = False\r
- if Options.opt_force != None:\r
+ if Options.opt_force is not None:\r
Force = True\r
\r
- if (Args[0] != None) :\r
+ if (Args[0] is not None) :\r
StartBpdg(Args[0], Options.filename, Options.bin_filename, Force)\r
else :\r
EdkLogger.error("BPDG", ATTRIBUTE_NOT_AVAILABLE, "Please specify the file which contain the VPD pcd info.",\r
# Delete useless lines\r
while (True) :\r
try :\r
- if (self.FileLinesList[count] == None) :\r
+ if (self.FileLinesList[count] is None) :\r
del(self.FileLinesList[count])\r
else :\r
count += 1\r
# Process the pcds one by one base on the pcd's value and size\r
count = 0\r
for line in self.FileLinesList:\r
- if line != None :\r
+ if line is not None :\r
PCD = PcdEntry(line[0], line[1], line[2], line[3], line[4],line[5], self.InputFileName) \r
# Strip the space char\r
PCD.PcdCName = PCD.PcdCName.strip(' ')\r
#\r
# Load Dec file if filename is not None\r
#\r
- if Filename != None:\r
+ if Filename is not None:\r
self.LoadDecFile(Filename)\r
\r
#\r
# @param Dict: The dictionary to be printed\r
#\r
def printDict(Dict):\r
- if Dict != None:\r
+ if Dict is not None:\r
KeyList = Dict.keys()\r
for Key in KeyList:\r
if Dict[Key] != '':\r
#\r
# Load Dsc file if filename is not None\r
#\r
- if Filename != None:\r
+ if Filename is not None:\r
self.LoadDscFile(Filename)\r
\r
#\r
#\r
def GenSkuInfoList(self, SkuNameList, SkuInfo, VariableName='', VariableGuid='', VariableOffset='', HiiDefaultValue='', VpdOffset='', DefaultValue=''):\r
SkuNameList = GetSplitValueList(SkuNameList)\r
- if SkuNameList == None or SkuNameList == [] or SkuNameList == ['']:\r
+ if SkuNameList is None or SkuNameList == [] or SkuNameList == ['']:\r
SkuNameList = ['DEFAULT']\r
SkuInfoList = {}\r
for Item in SkuNameList:\r
#\r
# Check environment valiable 'WORKSPACE'\r
#\r
- if os.environ.get('WORKSPACE') == None:\r
+ if os.environ.get('WORKSPACE') is None:\r
print 'ERROR: WORKSPACE not defined. Please run EdkSetup from the EDK II install directory.'\r
return False\r
\r
# @retval True The two pcds are the same\r
#\r
def __eq__(self, Other):\r
- return Other != None and self.TokenCName == Other.TokenCName and self.TokenSpaceGuidCName == Other.TokenSpaceGuidCName\r
+ return Other is not None and self.TokenCName == Other.TokenCName and self.TokenSpaceGuidCName == Other.TokenSpaceGuidCName\r
\r
## Override __hash__ function\r
#\r
def __init__(self, Name = None, SupModList = [], Type = None):\r
self.LibraryClass = Name\r
self.SupModList = SupModList\r
- if Type != None:\r
+ if Type is not None:\r
self.SupModList = CleanString(Type).split(DataType.TAB_SPACE_SPLIT)\r
\r
## ModuleBuildClassObject\r
for Libs in Pb.LibraryClass:\r
for Type in Libs.SupModList:\r
Instance = self.FindLibraryClassInstanceOfLibrary(Lib, Arch, Type)\r
- if Instance == None:\r
+ if Instance is None:\r
Instance = RecommendedInstance\r
Pb.LibraryClasses[(Lib, Type)] = Instance\r
else:\r
# For Module\r
#\r
Instance = self.FindLibraryClassInstanceOfModule(Lib, Arch, Pb.ModuleType, Inf)\r
- if Instance == None:\r
+ if Instance is None:\r
Instance = RecommendedInstance\r
Pb.LibraryClasses[(Lib, Pb.ModuleType)] = Instance\r
\r
if not self.IsModuleDefinedInPlatform(Inf, Arch, InfList):\r
continue\r
Module = self.Build[Arch].ModuleDatabase[Inf]\r
- if Module.LibraryClass == None or Module.LibraryClass == []:\r
+ if Module.LibraryClass is None or Module.LibraryClass == []:\r
self.UpdateLibrariesOfModule(Platform, Module, Arch)\r
for Key in Module.LibraryClasses:\r
Lib = Module.LibraryClasses[Key]\r
continue\r
\r
LibraryClassName = Key[0]\r
- if LibraryClassName not in LibraryInstance or LibraryInstance[LibraryClassName] == None:\r
- if LibraryPath == None or LibraryPath == "":\r
+ if LibraryClassName not in LibraryInstance or LibraryInstance[LibraryClassName] is None:\r
+ if LibraryPath is None or LibraryPath == "":\r
LibraryInstance[LibraryClassName] = None\r
continue\r
LibraryModule = ModuleDatabase[LibraryPath]\r
LibraryInstance[LibraryClassName] = LibraryModule\r
LibraryConsumerList.append(LibraryModule)\r
EdkLogger.verbose("\t" + LibraryClassName + " : " + str(LibraryModule))\r
- elif LibraryPath == None or LibraryPath == "":\r
+ elif LibraryPath is None or LibraryPath == "":\r
continue\r
else:\r
LibraryModule = LibraryInstance[LibraryClassName]\r
Q = []\r
for LibraryClassName in LibraryInstance:\r
M = LibraryInstance[LibraryClassName]\r
- if M == None:\r
+ if M is None:\r
EdkLogger.error("AutoGen", AUTOGEN_ERROR,\r
"Library instance for library class [%s] is not found" % LibraryClassName,\r
ExtraData="\t%s [%s]" % (str(Module), Arch))\r
# check if there're duplicate library classes\r
#\r
for Lc in M.LibraryClass:\r
- if Lc.SupModList != None and ModuleType not in Lc.SupModList:\r
+ if Lc.SupModList is not None and ModuleType not in Lc.SupModList:\r
EdkLogger.error("AutoGen", AUTOGEN_ERROR,\r
"Module type [%s] is not supported by library instance [%s]" % (ModuleType, str(M)),\r
ExtraData="\t%s" % str(Module))\r
if (Name, Guid) in Pcds:\r
OwnerPlatform = Dsc\r
Pcd = Pcds[(Name, Guid)]\r
- if Pcd.Type != '' and Pcd.Type != None:\r
+ if Pcd.Type != '' and Pcd.Type is not None:\r
NewType = Pcd.Type\r
if NewType in DataType.PCD_DYNAMIC_TYPE_LIST:\r
NewType = DataType.TAB_PCDS_DYNAMIC\r
EdkLogger.error("AutoGen", PARSER_ERROR, ErrorMsg)\r
\r
\r
- if Pcd.DatumType != '' and Pcd.DatumType != None:\r
+ if Pcd.DatumType != '' and Pcd.DatumType is not None:\r
DatumType = Pcd.DatumType\r
- if Pcd.TokenValue != '' and Pcd.TokenValue != None:\r
+ if Pcd.TokenValue != '' and Pcd.TokenValue is not None:\r
Token = Pcd.TokenValue\r
- if Pcd.DefaultValue != '' and Pcd.DefaultValue != None:\r
+ if Pcd.DefaultValue != '' and Pcd.DefaultValue is not None:\r
Value = Pcd.DefaultValue\r
- if Pcd.MaxDatumSize != '' and Pcd.MaxDatumSize != None:\r
+ if Pcd.MaxDatumSize != '' and Pcd.MaxDatumSize is not None:\r
MaxDatumSize = Pcd.MaxDatumSize\r
SkuInfoList = Pcd.SkuInfoList\r
\r
"msg" : Message,\r
}\r
\r
- if ExtraData != None:\r
+ if ExtraData is not None:\r
LogText = _DebugMessageTemplate % TemplateDict + "\n %s" % ExtraData\r
else:\r
LogText = _DebugMessageTemplate % TemplateDict\r
return\r
\r
# if no tool name given, use caller's source file name as tool name\r
- if ToolName == None or ToolName == "":\r
+ if ToolName is None or ToolName == "":\r
ToolName = os.path.basename(traceback.extract_stack()[-2][0])\r
\r
- if Line == None:\r
+ if Line is None:\r
Line = "..."\r
else:\r
Line = "%d" % Line\r
"msg" : Message,\r
}\r
\r
- if File != None:\r
+ if File is not None:\r
LogText = _WarningMessageTemplate % TemplateDict\r
else:\r
LogText = _WarningMessageTemplateWithoutFile % TemplateDict\r
\r
- if ExtraData != None:\r
+ if ExtraData is not None:\r
LogText += "\n %s" % ExtraData\r
\r
_InfoLogger.log(WARN, LogText)\r
# it's True. This is the default behavior.\r
#\r
def error(ToolName, ErrorCode, Message=None, File=None, Line=None, ExtraData=None, RaiseError=IsRaiseError):\r
- if Line == None:\r
+ if Line is None:\r
Line = "..."\r
else:\r
Line = "%d" % Line\r
\r
- if Message == None:\r
+ if Message is None:\r
if ErrorCode in gErrorMessage:\r
Message = gErrorMessage[ErrorCode]\r
else:\r
Message = gErrorMessage[UNKNOWN_ERROR]\r
\r
- if ExtraData == None:\r
+ if ExtraData is None:\r
ExtraData = ""\r
\r
TemplateDict = {\r
"extra" : ExtraData\r
}\r
\r
- if File != None:\r
+ if File is not None:\r
LogText = _ErrorMessageTemplate % TemplateDict\r
else:\r
LogText = _ErrorMessageTemplateWithoutFile % TemplateDict\r
#\r
# Load Fdf file if filename is not None\r
#\r
- if Filename != None:\r
+ if Filename is not None:\r
self.LoadFdfFile(Filename)\r
\r
#\r
if Profile.FileName == File and Profile.MacroName == Name and Profile.DefinedAtLine <= Line:\r
Value = Profile.MacroValue\r
\r
- if Value != None:\r
+ if Value is not None:\r
Str = Str.replace('$(' + Name + ')', Value)\r
MacroEnd = MacroStart + len(Value) \r
\r
FileLineTuple = GetRealFileLine(self.FileName, Line)\r
if Name in InputMacroDict:\r
MacroValue = InputMacroDict[Name]\r
- if Op == None:\r
- if Value == 'Bool' and MacroValue == None or MacroValue.upper() == 'FALSE':\r
+ if Op is None:\r
+ if Value == 'Bool' and MacroValue is None or MacroValue.upper() == 'FALSE':\r
return False\r
return True\r
elif Op == '!=':\r
else:\r
return False\r
else:\r
- if (self.__IsHex(Value) or Value.isdigit()) and (self.__IsHex(MacroValue) or (MacroValue != None and MacroValue.isdigit())):\r
+ if (self.__IsHex(Value) or Value.isdigit()) and (self.__IsHex(MacroValue) or (MacroValue is not None and MacroValue.isdigit())):\r
InputVal = long(Value, 0)\r
MacroVal = long(MacroValue, 0)\r
if Op == '>':\r
\r
for Profile in AllMacroList:\r
if Profile.FileName == FileLineTuple[0] and Profile.MacroName == Name and Profile.DefinedAtLine <= FileLineTuple[1]:\r
- if Op == None:\r
- if Value == 'Bool' and Profile.MacroValue == None or Profile.MacroValue.upper() == 'FALSE':\r
+ if Op is None:\r
+ if Value == 'Bool' and Profile.MacroValue is None or Profile.MacroValue.upper() == 'FALSE':\r
return False\r
return True\r
elif Op == '!=':\r
else:\r
return False\r
else:\r
- if (self.__IsHex(Value) or Value.isdigit()) and (self.__IsHex(Profile.MacroValue) or (Profile.MacroValue != None and Profile.MacroValue.isdigit())):\r
+ if (self.__IsHex(Value) or Value.isdigit()) and (self.__IsHex(Profile.MacroValue) or (Profile.MacroValue is not None and Profile.MacroValue.isdigit())):\r
InputVal = long(Value, 0)\r
MacroVal = long(Profile.MacroValue, 0)\r
if Op == '>':\r
\r
if not self.__GetNextToken():\r
return False\r
- if gGuidPattern.match(self.__Token) != None:\r
+ if gGuidPattern.match(self.__Token) is not None:\r
return True\r
else:\r
self.__UndoToken()\r
pass\r
\r
for Item in Obj.BlockSizeList:\r
- if Item[0] == None or Item[1] == None:\r
+ if Item[0] is None or Item[1] is None:\r
raise Warning("expected block statement for Fd Section", self.FileName, self.CurrentLineNumber)\r
\r
return True\r
\r
FvImageSectionObj = CommonDataClass.FdfClass.FvImageSectionClassObject()\r
FvImageSectionObj.Alignment = AlignValue\r
- if FvObj != None:\r
+ if FvObj is not None:\r
FvImageSectionObj.Fv = FvObj\r
FvImageSectionObj.FvName = None\r
else:\r
Rule.CheckSum = CheckSum\r
Rule.Fixed = Fixed\r
Rule.KeyStringList = KeyStringList\r
- if KeepReloc != None:\r
+ if KeepReloc is not None:\r
Rule.KeepReloc = KeepReloc\r
\r
while True:\r
Rule.Fixed = Fixed\r
Rule.FileExtension = Ext\r
Rule.KeyStringList = KeyStringList\r
- if KeepReloc != None:\r
+ if KeepReloc is not None:\r
Rule.KeepReloc = KeepReloc\r
\r
return Rule\r
Rule.Fixed = Fixed\r
Rule.FileName = self.__Token\r
Rule.KeyStringList = KeyStringList\r
- if KeepReloc != None:\r
+ if KeepReloc is not None:\r
Rule.KeepReloc = KeepReloc\r
return Rule\r
\r
EfiSectionObj.KeepReloc = False\r
else:\r
EfiSectionObj.KeepReloc = True\r
- if Obj.KeepReloc != None and Obj.KeepReloc != EfiSectionObj.KeepReloc:\r
+ if Obj.KeepReloc is not None and Obj.KeepReloc != EfiSectionObj.KeepReloc:\r
raise Warning("Section type %s has reloc strip flag conflict with Rule At Line %d" % (EfiSectionObj.SectionType, self.CurrentLineNumber), self.FileName, self.CurrentLineNumber)\r
else:\r
raise Warning("Section type %s could not have reloc strip flag At Line %d" % (EfiSectionObj.SectionType, self.CurrentLineNumber), self.FileName, self.CurrentLineNumber)\r
raise Warning("expected Component version At Line ", self.FileName, self.CurrentLineNumber)\r
\r
Pattern = re.compile('-$|[0-9]{0,1}[0-9]{1}\.[0-9]{0,1}[0-9]{1}')\r
- if Pattern.match(self.__Token) == None:\r
+ if Pattern.match(self.__Token) is None:\r
raise Warning("Unknown version format At line ", self.FileName, self.CurrentLineNumber)\r
CompStatementObj.CompVer = self.__Token\r
\r
for elementRegion in FdObj.RegionList:\r
if elementRegion.RegionType == 'FV':\r
for elementRegionData in elementRegion.RegionDataList:\r
- if elementRegionData != None and elementRegionData.upper() not in FvList:\r
+ if elementRegionData is not None and elementRegionData.upper() not in FvList:\r
FvList.append(elementRegionData.upper())\r
return FvList\r
\r
\r
for FfsObj in FvObj.FfsList:\r
if isinstance(FfsObj, FfsFileStatement.FileStatement):\r
- if FfsObj.FvName != None and FfsObj.FvName.upper() not in RefFvList:\r
+ if FfsObj.FvName is not None and FfsObj.FvName.upper() not in RefFvList:\r
RefFvList.append(FfsObj.FvName.upper())\r
- elif FfsObj.FdName != None and FfsObj.FdName.upper() not in RefFdList:\r
+ elif FfsObj.FdName is not None and FfsObj.FdName.upper() not in RefFdList:\r
RefFdList.append(FfsObj.FdName.upper())\r
else:\r
self.__GetReferencedFdFvTupleFromSection(FfsObj, RefFdList, RefFvList) \r
while SectionStack != []:\r
SectionObj = SectionStack.pop()\r
if isinstance(SectionObj, FvImageSection.FvImageSection):\r
- if SectionObj.FvName != None and SectionObj.FvName.upper() not in FvList:\r
+ if SectionObj.FvName is not None and SectionObj.FvName.upper() not in FvList:\r
FvList.append(SectionObj.FvName.upper())\r
- if SectionObj.Fv != None and SectionObj.Fv.UiFvName != None and SectionObj.Fv.UiFvName.upper() not in FvList:\r
+ if SectionObj.Fv is not None and SectionObj.Fv.UiFvName is not None and SectionObj.Fv.UiFvName.upper() not in FvList:\r
FvList.append(SectionObj.Fv.UiFvName.upper())\r
self.__GetReferencedFdFvTuple(SectionObj.Fv, FdList, FvList)\r
\r
#\r
# Load Inf file if filename is not None\r
#\r
- if Filename != None:\r
+ if Filename is not None:\r
self.LoadInfFile(Filename)\r
\r
#\r
for varname in varnames:\r
if varname in line:\r
m = re.match('^([\da-fA-FxX]+)([\s\S]*)([_]*%s)$' % varname, line)\r
- if m != None:\r
+ if m is not None:\r
ret.append((varname, m.group(1)))\r
return ret\r
\r
# status handler\r
if status == 3:\r
m = re.match('^([\w_\.]+) +([\da-fA-Fx]+) +([\da-fA-Fx]+)$', line)\r
- if m != None:\r
+ if m is not None:\r
sections.append(m.groups(0))\r
for varname in varnames:\r
Str = ''\r
m = re.match("^.data.(%s)" % varname, line)\r
- if m != None:\r
+ if m is not None:\r
m = re.match(".data.(%s)$" % varname, line)\r
- if m != None:\r
+ if m is not None:\r
Str = lines[index + 1]\r
else:\r
Str = line[len(".data.%s" % varname):]\r
if Str:\r
m = re.match('^([\da-fA-Fx]+) +([\da-fA-Fx]+)', Str.strip())\r
- if m != None:\r
+ if m is not None:\r
varoffset.append((varname, int(m.groups(0)[0], 16) , int(sections[-1][1], 16), sections[-1][0]))\r
\r
if not varoffset:\r
return []\r
# get section information from efi file\r
efisecs = PeImageClass(efifilepath).SectionHeaderList\r
- if efisecs == None or len(efisecs) == 0:\r
+ if efisecs is None or len(efisecs) == 0:\r
return []\r
#redirection\r
redirection = 0\r
continue \r
if status == 1 and len(line) != 0:\r
m = secRe.match(line)\r
- assert m != None, "Fail to parse the section in map file , line is %s" % line\r
+ assert m is not None, "Fail to parse the section in map file , line is %s" % line\r
sec_no, sec_start, sec_length, sec_name, sec_class = m.groups(0)\r
secs.append([int(sec_no, 16), int(sec_start, 16), int(sec_length, 16), sec_name, sec_class])\r
if status == 2 and len(line) != 0:\r
for varname in varnames:\r
m = symRe.match(line)\r
- assert m != None, "Fail to parse the symbol in map file, line is %s" % line\r
+ assert m is not None, "Fail to parse the symbol in map file, line is %s" % line\r
sec_no, sym_offset, sym_name, vir_addr = m.groups(0)\r
sec_no = int(sec_no, 16)\r
sym_offset = int(sym_offset, 16)\r
vir_addr = int(vir_addr, 16)\r
m2 = re.match('^[_]*(%s)' % varname, sym_name)\r
- if m2 != None:\r
+ if m2 is not None:\r
# fond a binary pcd entry in map file\r
for sec in secs:\r
if sec[0] == sec_no and (sym_offset >= sec[1] and sym_offset < sec[1] + sec[2]):\r
\r
# get section information from efi file\r
efisecs = PeImageClass(efifilepath).SectionHeaderList\r
- if efisecs == None or len(efisecs) == 0:\r
+ if efisecs is None or len(efisecs) == 0:\r
return []\r
\r
ret = []\r
# @param Directory The directory name\r
#\r
def CreateDirectory(Directory):\r
- if Directory == None or Directory.strip() == "":\r
+ if Directory is None or Directory.strip() == "":\r
return True\r
try:\r
if not os.access(Directory, os.F_OK):\r
# @param Directory The directory name\r
#\r
def RemoveDirectory(Directory, Recursively=False):\r
- if Directory == None or Directory.strip() == "" or not os.path.exists(Directory):\r
+ if Directory is None or Directory.strip() == "" or not os.path.exists(Directory):\r
return\r
if Recursively:\r
CurrentDirectory = os.getcwd()\r
except:\r
EdkLogger.error("", FILE_OPEN_FAILURE, ExtraData=File, RaiseError=False)\r
finally:\r
- if Fd != None:\r
+ if Fd is not None:\r
Fd.close()\r
\r
## Restore a Python object from a file\r
EdkLogger.verbose("Failed to load [%s]\n\t%s" % (File, str(e)))\r
Data = None\r
finally:\r
- if Fd != None:\r
+ if Fd is not None:\r
Fd.close()\r
return Data\r
\r
# @retval False if file doesn't exists\r
#\r
def ValidFile(File, Ext=None):\r
- if Ext != None:\r
+ if Ext is not None:\r
Dummy, FileExt = os.path.splitext(File)\r
if FileExt.lower() != Ext.lower():\r
return False\r
#\r
def ValidFile2(AllFiles, File, Ext=None, Workspace='', EfiSource='', EdkSource='', Dir='.', OverrideDir=''):\r
NewFile = File\r
- if Ext != None:\r
+ if Ext is not None:\r
Dummy, FileExt = os.path.splitext(File)\r
if FileExt.lower() != Ext.lower():\r
return False, File\r
\r
# Replace the Edk macros\r
- if OverrideDir != '' and OverrideDir != None:\r
+ if OverrideDir != '' and OverrideDir is not None:\r
if OverrideDir.find('$(EFI_SOURCE)') > -1:\r
OverrideDir = OverrideDir.replace('$(EFI_SOURCE)', EfiSource)\r
if OverrideDir.find('$(EDK_SOURCE)') > -1:\r
NewFile = File.replace('$(EFI_SOURCE)', EfiSource)\r
NewFile = NewFile.replace('$(EDK_SOURCE)', EdkSource)\r
NewFile = AllFiles[os.path.normpath(NewFile)]\r
- if NewFile != None:\r
+ if NewFile is not None:\r
return True, NewFile\r
\r
# Second check the path with override value\r
- if OverrideDir != '' and OverrideDir != None:\r
+ if OverrideDir != '' and OverrideDir is not None:\r
NewFile = AllFiles[os.path.normpath(os.path.join(OverrideDir, File))]\r
- if NewFile != None:\r
+ if NewFile is not None:\r
return True, NewFile\r
\r
# Last check the path with normal definitions\r
File = os.path.join(Dir, File)\r
NewFile = AllFiles[os.path.normpath(File)]\r
- if NewFile != None:\r
+ if NewFile is not None:\r
return True, NewFile\r
\r
return False, File\r
#\r
def ValidFile3(AllFiles, File, Workspace='', EfiSource='', EdkSource='', Dir='.', OverrideDir=''):\r
# Replace the Edk macros\r
- if OverrideDir != '' and OverrideDir != None:\r
+ if OverrideDir != '' and OverrideDir is not None:\r
if OverrideDir.find('$(EFI_SOURCE)') > -1:\r
OverrideDir = OverrideDir.replace('$(EFI_SOURCE)', EfiSource)\r
if OverrideDir.find('$(EDK_SOURCE)') > -1:\r
File = File.replace('$(EFI_SOURCE)', EfiSource)\r
File = File.replace('$(EDK_SOURCE)', EdkSource)\r
NewFile = AllFiles[os.path.normpath(File)]\r
- if NewFile != None:\r
+ if NewFile is not None:\r
NewRelaPath = os.path.dirname(NewFile)\r
File = os.path.basename(NewFile)\r
#NewRelaPath = NewFile[:len(NewFile) - len(File.replace("..\\", '').replace("../", '')) - 1]\r
break\r
\r
# Second check the path with override value\r
- if OverrideDir != '' and OverrideDir != None:\r
+ if OverrideDir != '' and OverrideDir is not None:\r
NewFile = AllFiles[os.path.normpath(os.path.join(OverrideDir, File))]\r
- if NewFile != None:\r
+ if NewFile is not None:\r
#NewRelaPath = os.path.dirname(NewFile)\r
NewRelaPath = NewFile[:len(NewFile) - len(File.replace("..\\", '').replace("../", '')) - 1]\r
break\r
\r
# Last check the path with normal definitions\r
NewFile = AllFiles[os.path.normpath(os.path.join(Dir, File))]\r
- if NewFile != None:\r
+ if NewFile is not None:\r
break\r
\r
# No file found\r
self.CodaMessage = CloseMessage\r
self.ProgressChar = ProgressChar\r
self.Interval = Interval\r
- if Progressor._StopFlag == None:\r
+ if Progressor._StopFlag is None:\r
Progressor._StopFlag = threading.Event()\r
\r
## Start to print progress charater\r
# @param OpenMessage The string printed before progress charaters\r
#\r
def Start(self, OpenMessage=None):\r
- if OpenMessage != None:\r
+ if OpenMessage is not None:\r
self.PromptMessage = OpenMessage\r
Progressor._StopFlag.clear()\r
- if Progressor._ProgressThread == None:\r
+ if Progressor._ProgressThread is None:\r
Progressor._ProgressThread = threading.Thread(target=self._ProgressThreadEntry)\r
Progressor._ProgressThread.setDaemon(False)\r
Progressor._ProgressThread.start()\r
#\r
def Stop(self, CloseMessage=None):\r
OriginalCodaMessage = self.CodaMessage\r
- if CloseMessage != None:\r
+ if CloseMessage is not None:\r
self.CodaMessage = CloseMessage\r
self.Abort()\r
self.CodaMessage = OriginalCodaMessage\r
## Abort the progress display\r
@staticmethod\r
def Abort():\r
- if Progressor._StopFlag != None:\r
+ if Progressor._StopFlag is not None:\r
Progressor._StopFlag.set()\r
- if Progressor._ProgressThread != None:\r
+ if Progressor._ProgressThread is not None:\r
Progressor._ProgressThread.join()\r
Progressor._ProgressThread = None\r
\r
return key, value\r
\r
def update(self, dict=None, **kwargs):\r
- if dict != None:\r
+ if dict is not None:\r
for k, v in dict.items():\r
self[k] = v\r
if len(kwargs):\r
if self._Level_ > 1:\r
RestKeys = [self._Wildcard for i in range(0, self._Level_ - 1)]\r
\r
- if FirstKey == None or str(FirstKey).upper() in self._ValidWildcardList:\r
+ if FirstKey is None or str(FirstKey).upper() in self._ValidWildcardList:\r
FirstKey = self._Wildcard\r
\r
if self._Single_:\r
if FirstKey == self._Wildcard:\r
if FirstKey in self.data:\r
Value = self.data[FirstKey][RestKeys]\r
- if Value == None:\r
+ if Value is None:\r
for Key in self.data:\r
Value = self.data[Key][RestKeys]\r
- if Value != None: break\r
+ if Value is not None: break\r
else:\r
if FirstKey in self.data:\r
Value = self.data[FirstKey][RestKeys]\r
- if Value == None and self._Wildcard in self.data:\r
+ if Value is None and self._Wildcard in self.data:\r
#print "Value=None"\r
Value = self.data[self._Wildcard][RestKeys]\r
else:\r
if FirstKey == self._Wildcard:\r
if FirstKey in self.data:\r
Value = self.data[FirstKey]\r
- if Value == None:\r
+ if Value is None:\r
for Key in self.data:\r
Value = self.data[Key]\r
- if Value != None: break\r
+ if Value is not None: break\r
else:\r
if FirstKey in self.data:\r
Value = self.data[FirstKey]\r
return hash(self.Path)\r
\r
def _GetFileKey(self):\r
- if self._Key == None:\r
+ if self._Key is None:\r
self._Key = self.Path.upper() # + self.ToolChainFamily + self.TagName + self.ToolCode + self.Target\r
return self._Key\r
\r
#\r
def CheckPcdTokenInfo(TokenInfoString, Section, File, LineNo = -1):\r
Format = '<TokenSpaceGuidCName>.<PcdCName>'\r
- if TokenInfoString != '' and TokenInfoString != None:\r
+ if TokenInfoString != '' and TokenInfoString is not None:\r
TokenInfoList = GetSplitValueList(TokenInfoString, TAB_SPLIT)\r
if len(TokenInfoList) == 2:\r
return True\r
LineList = Lines.split('\n')\r
for Line in LineList:\r
Line = CleanString(Line, CommentCharacter)\r
- if Line == None or Line == '':\r
+ if Line is None or Line == '':\r
continue\r
\r
if findBlock == False:\r
# @retval True The file type is correct\r
#\r
def CheckFileType(CheckFilename, ExtName, ContainerFilename, SectionName, Line, LineNo= -1):\r
- if CheckFilename != '' and CheckFilename != None:\r
+ if CheckFilename != '' and CheckFilename is not None:\r
(Root, Ext) = os.path.splitext(CheckFilename)\r
if Ext.upper() != ExtName.upper():\r
ContainerFile = open(ContainerFilename, 'r').read()\r
#\r
def CheckFileExist(WorkspaceDir, CheckFilename, ContainerFilename, SectionName, Line, LineNo= -1):\r
CheckFile = ''\r
- if CheckFilename != '' and CheckFilename != None:\r
+ if CheckFilename != '' and CheckFilename is not None:\r
CheckFile = WorkspaceFile(WorkspaceDir, CheckFilename)\r
if not os.path.isfile(CheckFile):\r
ContainerFile = open(ContainerFilename, 'r').read()\r
DataType.TAB_TAT_DEFINES_BUILD_RULE_CONF : '',\r
}\r
self.ConfDirectoryPath = ""\r
- if Filename != None:\r
+ if Filename is not None:\r
self.LoadTargetTxtFile(Filename)\r
\r
## LoadTargetTxtFile\r
self.ConfDirectoryPath = os.path.dirname(FileName)\r
except:\r
EdkLogger.error("build", FILE_OPEN_FAILURE, ExtraData=FileName)\r
- if F != None:\r
+ if F is not None:\r
F.close()\r
\r
for Line in F:\r
# @param Dict: The dictionary to be printed\r
#\r
def printDict(Dict):\r
- if Dict != None:\r
+ if Dict is not None:\r
KeyList = Dict.keys()\r
for Key in KeyList:\r
if Dict[Key] != '':\r
for Env in os.environ:\r
self.MacroDictionary["ENV(%s)" % Env] = os.environ[Env]\r
\r
- if FileName != None:\r
+ if FileName is not None:\r
self.LoadToolDefFile(FileName)\r
\r
## LoadToolDefFile\r
# @param offset integer value for VPD's offset in specific SKU.\r
#\r
def Add(self, Vpd, skuname,Offset):\r
- if (Vpd == None):\r
+ if (Vpd is None):\r
EdkLogger.error("VpdInfoFile", BuildToolError.ATTRIBUTE_UNKNOWN_ERROR, "Invalid VPD PCD entry.")\r
\r
if not (Offset >= 0 or Offset == "*"):\r
EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID, \r
"Invalid max datum size for VPD PCD %s.%s" % (Vpd.TokenSpaceGuidCName, Vpd.TokenCName))\r
elif Vpd.DatumType in ["BOOLEAN", "UINT8", "UINT16", "UINT32", "UINT64"]: \r
- if Vpd.MaxDatumSize == None or Vpd.MaxDatumSize == "":\r
+ if Vpd.MaxDatumSize is None or Vpd.MaxDatumSize == "":\r
Vpd.MaxDatumSize = VpdInfoFile._MAX_SIZE_TYPE[Vpd.DatumType]\r
else:\r
if Vpd.MaxDatumSize <= 0:\r
# If \r
# @param FilePath The given file path which would hold VPD information\r
def Write(self, FilePath):\r
- if not (FilePath != None or len(FilePath) != 0):\r
+ if not (FilePath is not None or len(FilePath) != 0):\r
EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID, \r
"Invalid parameter FilePath: %s." % FilePath) \r
\r
# @param VpdFileName The string path name for VPD information guid.txt\r
# \r
def CallExtenalBPDGTool(ToolPath, VpdFileName):\r
- assert ToolPath != None, "Invalid parameter ToolPath"\r
- assert VpdFileName != None and os.path.exists(VpdFileName), "Invalid parameter VpdFileName"\r
+ assert ToolPath is not None, "Invalid parameter ToolPath"\r
+ assert VpdFileName is not None and os.path.exists(VpdFileName), "Invalid parameter VpdFileName"\r
\r
OutputDir = os.path.dirname(VpdFileName)\r
FileName = os.path.basename(VpdFileName)\r
EdkLogger.error("BPDG", BuildToolError.COMMAND_FAILURE, ExtraData="%s" % (str(X)))\r
(out, error) = PopenObject.communicate()\r
print out\r
- while PopenObject.returncode == None :\r
+ while PopenObject.returncode is None :\r
PopenObject.wait()\r
\r
if PopenObject.returncode != 0:\r
class CommonClass(object):\r
def __init__(self, Usage = None, FeatureFlag = '', SupArchList = None, HelpText = ''):\r
self.Usage = Usage\r
- if self.Usage == None:\r
+ if self.Usage is None:\r
self.Usage = []\r
self.FeatureFlag = FeatureFlag\r
self.SupArchList = SupArchList\r
- if self.SupArchList == None:\r
+ if self.SupArchList is None:\r
self.SupArchList = []\r
self.HelpText = HelpText\r
self.HelpTextList = []\r
self.PcdCName = ''\r
self.Value = ''\r
self.Offset = ''\r
- if self.ValidUsage == None:\r
+ if self.ValidUsage is None:\r
self.ValidUsage = []\r
self.SkuInfoList = SkuInfoList\r
- if self.SkuInfoList == None:\r
+ if self.SkuInfoList is None:\r
self.SkuInfoList = {}\r
self.SupModuleList = SupModuleList\r
- if self.SupModuleList == None:\r
+ if self.SupModuleList is None:\r
self.SupModuleList = []\r
CommonClass.__init__(self)\r
self.PcdErrors = []\r
\r
if self.backtracking == 0:\r
\r
- if d != None:\r
+ if d is not None:\r
self.function_definition_stack[-1].ModifierText = self.input.toString(d.start,d.stop)\r
else:\r
self.function_definition_stack[-1].ModifierText = ''\r
self.function_definition_stack[-1].DeclText = self.input.toString(declarator1.start,declarator1.stop)\r
self.function_definition_stack[-1].DeclLine = declarator1.start.line\r
self.function_definition_stack[-1].DeclOffset = declarator1.start.charPositionInLine\r
- if a != None:\r
+ if a is not None:\r
self.function_definition_stack[-1].LBLine = a.start.line\r
self.function_definition_stack[-1].LBOffset = a.start.charPositionInLine\r
else:\r
return \r
if self.backtracking == 0:\r
\r
- if b != None:\r
+ if b is not None:\r
self.StoreTypedefDefinition(a.line, a.charPositionInLine, d.line, d.charPositionInLine, self.input.toString(b.start,b.stop), self.input.toString(c.start,c.stop))\r
else:\r
self.StoreTypedefDefinition(a.line, a.charPositionInLine, d.line, d.charPositionInLine, '', self.input.toString(c.start,c.stop))\r
return \r
if self.backtracking == 0:\r
\r
- if t != None:\r
+ if t is not None:\r
self.StoreVariableDeclaration(s.start.line, s.start.charPositionInLine, t.start.line, t.start.charPositionInLine, self.input.toString(s.start,s.stop), self.input.toString(t.start,t.stop))\r
\r
\r
return \r
if self.backtracking == 0:\r
\r
- if s.stop != None:\r
+ if s.stop is not None:\r
self.StoreStructUnionDefinition(s.start.line, s.start.charPositionInLine, s.stop.line, s.stop.charPositionInLine, self.input.toString(s.start,s.stop))\r
\r
\r
return \r
if self.backtracking == 0:\r
\r
- if e.stop != None:\r
+ if e.stop is not None:\r
self.StoreEnumerationDefinition(e.start.line, e.start.charPositionInLine, e.stop.line, e.stop.charPositionInLine, self.input.toString(e.start,e.stop))\r
\r
\r
RecordSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)\r
for Record in RecordSet:\r
Name = Record[1].strip()\r
- if Name != '' and Name != None:\r
+ if Name != '' and Name is not None:\r
if Name[0] == '(':\r
Name = Name[1:Name.find(')')]\r
if Name.find('(') > -1:\r
InCharLiteral = not InCharLiteral\r
# meet new line, then no longer in a comment for // and '#'\r
if self.__CurrentChar() == T_CHAR_LF:\r
- if HashComment and PPDirectiveObj != None:\r
+ if HashComment and PPDirectiveObj is not None:\r
if PPDirectiveObj.Content.rstrip(T_CHAR_CR).endswith(T_CHAR_BACKSLASH):\r
PPDirectiveObj.Content += T_CHAR_LF\r
PPExtend = True\r
InCharLiteral = not InCharLiteral\r
# meet new line, then no longer in a comment for // and '#'\r
if self.__CurrentChar() == T_CHAR_LF:\r
- if HashComment and PPDirectiveObj != None:\r
+ if HashComment and PPDirectiveObj is not None:\r
if PPDirectiveObj.Content.rstrip(T_CHAR_CR).endswith(T_CHAR_BACKSLASH):\r
PPDirectiveObj.Content += T_CHAR_LF\r
PPExtend = True\r
self.BuildMetaDataFileDatabase(SpeciDirs)\r
if self.ScanSourceCode:\r
EdkLogger.quiet("Building database for Meta Data File Done!")\r
- if SpeciDirs == None:\r
+ if SpeciDirs is None:\r
c.CollectSourceCodeDataIntoDB(EccGlobalData.gTarget)\r
else:\r
for specificDir in SpeciDirs:\r
#\r
def BuildMetaDataFileDatabase(self, SpecificDirs = None):\r
ScanFolders = []\r
- if SpecificDirs == None:\r
+ if SpecificDirs is None:\r
ScanFolders.append(EccGlobalData.gTarget)\r
else:\r
for specificDir in SpecificDirs: \r
self.SetLogLevel(Options)\r
\r
# Set other options\r
- if Options.ConfigFile != None:\r
+ if Options.ConfigFile is not None:\r
self.ConfigFile = Options.ConfigFile\r
- if Options.OutputFile != None:\r
+ if Options.OutputFile is not None:\r
self.OutputFile = Options.OutputFile\r
- if Options.ReportFile != None:\r
+ if Options.ReportFile is not None:\r
self.ReportFile = Options.ReportFile\r
- if Options.ExceptionFile != None:\r
+ if Options.ExceptionFile is not None:\r
self.ExceptionFile = Options.ExceptionFile\r
- if Options.Target != None:\r
+ if Options.Target is not None:\r
if not os.path.isdir(Options.Target):\r
EdkLogger.error("ECC", BuildToolError.OPTION_VALUE_INVALID, ExtraData="Target [%s] does NOT exist" % Options.Target)\r
else:\r
else:\r
EdkLogger.warn("Ecc", EdkLogger.ECC_ERROR, "The target source tree was not specified, using current WORKSPACE instead!")\r
EccGlobalData.gTarget = os.path.normpath(os.getenv("WORKSPACE"))\r
- if Options.keepdatabase != None:\r
+ if Options.keepdatabase is not None:\r
self.IsInit = False\r
- if Options.metadata != None and Options.sourcecode != None:\r
+ if Options.metadata is not None and Options.sourcecode is not None:\r
EdkLogger.error("ECC", BuildToolError.OPTION_CONFLICT, ExtraData="-m and -s can't be specified at one time")\r
- if Options.metadata != None:\r
+ if Options.metadata is not None:\r
self.ScanSourceCode = False\r
- if Options.sourcecode != None:\r
+ if Options.sourcecode is not None:\r
self.ScanMetaData = False\r
- if Options.folders != None:\r
+ if Options.folders is not None:\r
self.OnlyScan = True\r
\r
## SetLogLevel\r
# @param Option: The option list including log level setting\r
#\r
def SetLogLevel(self, Option):\r
- if Option.verbose != None:\r
+ if Option.verbose is not None:\r
EdkLogger.SetLevel(EdkLogger.VERBOSE)\r
- elif Option.quiet != None:\r
+ elif Option.quiet is not None:\r
EdkLogger.SetLevel(EdkLogger.QUIET)\r
- elif Option.debug != None:\r
+ elif Option.debug is not None:\r
EdkLogger.SetLevel(Option.debug + 1)\r
else:\r
EdkLogger.SetLevel(EdkLogger.INFO)\r
SqlCommand = """select max(ID) from %s""" % self.Table\r
Record = self.Cur.execute(SqlCommand).fetchall()\r
Id = Record[0][0]\r
- if Id == None:\r
+ if Id is None:\r
Id = self.IdBase\r
return Id\r
\r
def InitTable(self):\r
EdkLogger.verbose("\nInitialize table DataModel started ...")\r
Count = self.GetCount()\r
- if Count != None and Count != 0:\r
+ if Count is not None and Count != 0:\r
return\r
for Item in DataClass.MODEL_LIST:\r
CrossIndex = Item[1]\r
self.Start()\r
\r
# No specific ARCH or Platform given, use raw data\r
- if self._RawTable and (len(DataInfo) == 1 or DataInfo[1] == None):\r
+ if self._RawTable and (len(DataInfo) == 1 or DataInfo[1] is None):\r
return self._RawTable.Query(*DataInfo)\r
\r
# Do post-process if necessary\r
self._ValueList = ['','','']\r
# parse current line, result will be put in self._ValueList\r
self._SectionParser[self._SectionType](self)\r
- if self._ValueList == None or self._ItemType == MODEL_META_DATA_DEFINE:\r
+ if self._ValueList is None or self._ItemType == MODEL_META_DATA_DEFINE:\r
self._ItemType = -1\r
continue\r
#\r
\r
self._ValueList = ['', '', '']\r
self._SectionParser[SectionType](self)\r
- if self._ValueList == None:\r
+ if self._ValueList is None:\r
continue\r
#\r
# Model, Value1, Value2, Value3, Arch, ModuleType, BelongsToItem=-1, BelongsToFile=-1,\r
File=self._FileWithError, ExtraData=' '.join(self._ValueList), \r
Line=self._LineIndex+1)\r
\r
- if self._ValueList == None:\r
+ if self._ValueList is None:\r
continue \r
\r
NewOwner = self._IdMapping.get(Owner, -1)\r
# section content\r
self._ValueList = ['','','']\r
self._SectionParser[self._SectionType[0]](self)\r
- if self._ValueList == None or self._ItemType == MODEL_META_DATA_DEFINE:\r
+ if self._ValueList is None or self._ItemType == MODEL_META_DATA_DEFINE:\r
self._ItemType = -1\r
self._Comments = []\r
continue\r
#\r
# Load Fdf file if filename is not None\r
#\r
- if Filename != None:\r
+ if Filename is not None:\r
try:\r
self.LoadFdfFile(Filename)\r
except Exception:\r
ConditionString = "Model=%s AND Enabled>=0" % Model\r
ValueString = "Value1,Value2,Value3,Usage,Scope1,Scope2,ID,StartLine"\r
\r
- if Arch != None and Arch != 'COMMON':\r
+ if Arch is not None and Arch != 'COMMON':\r
ConditionString += " AND (Scope1='%s' OR Scope1='COMMON')" % Arch\r
- if Platform != None and Platform != 'COMMON':\r
+ if Platform is not None and Platform != 'COMMON':\r
ConditionString += " AND (Scope2='%s' OR Scope2='COMMON' OR Scope2='DEFAULT')" % Platform\r
\r
SqlCommand = "SELECT %s FROM %s WHERE %s" % (ValueString, self.Table, ConditionString)\r
ConditionString = "Model=%s AND Enabled>=0" % Model\r
ValueString = "Value1,Value2,Value3,Scope1,ID,StartLine"\r
\r
- if Arch != None and Arch != 'COMMON':\r
+ if Arch is not None and Arch != 'COMMON':\r
ConditionString += " AND (Scope1='%s' OR Scope1='COMMON')" % Arch\r
\r
SqlCommand = "SELECT %s FROM %s WHERE %s" % (ValueString, self.Table, ConditionString)\r
ConditionString = "Model=%s AND Enabled>0" % Model\r
ValueString = "Value1,Value2,Value3,Scope1,Scope2,ID,StartLine"\r
\r
- if Scope1 != None and Scope1 != 'COMMON':\r
+ if Scope1 is not None and Scope1 != 'COMMON':\r
ConditionString += " AND (Scope1='%s' OR Scope1='COMMON')" % Scope1\r
- if Scope2 != None and Scope2 != 'COMMON':\r
+ if Scope2 is not None and Scope2 != 'COMMON':\r
ConditionString += " AND (Scope2='%s' OR Scope2='COMMON' OR Scope2='DEFAULT')" % Scope2\r
\r
- if BelongsToItem != None:\r
+ if BelongsToItem is not None:\r
ConditionString += " AND BelongsToItem=%s" % BelongsToItem\r
else:\r
ConditionString += " AND BelongsToItem<0"\r
\r
- if FromItem != None:\r
+ if FromItem is not None:\r
ConditionString += " AND FromItem=%s" % FromItem\r
\r
SqlCommand = "SELECT %s FROM %s WHERE %s" % (ValueString, self.Table, ConditionString)\r
def CreateXmlElement(Name, String, NodeList, AttributeList):\r
Doc = xml.dom.minidom.Document()\r
Element = Doc.createElement(Name)\r
- if String != '' and String != None:\r
+ if String != '' and String is not None:\r
Element.appendChild(Doc.createTextNode(String))\r
\r
for Item in NodeList:\r
if type(Item) == type([]):\r
Key = Item[0]\r
Value = Item[1]\r
- if Key != '' and Key != None and Value != '' and Value != None:\r
+ if Key != '' and Key is not None and Value != '' and Value is not None:\r
Node = Doc.createElement(Key)\r
Node.appendChild(Doc.createTextNode(Value))\r
Element.appendChild(Node)\r
for Item in AttributeList:\r
Key = Item[0]\r
Value = Item[1]\r
- if Key != '' and Key != None and Value != '' and Value != None:\r
+ if Key != '' and Key is not None and Value != '' and Value is not None:\r
Element.setAttribute(Key, Value)\r
\r
return Element\r
# @revel Nodes A list of XML nodes matching XPath style Sting.\r
#\r
def XmlList(Dom, String):\r
- if String == None or String == "" or Dom == None or Dom == "":\r
+ if String is None or String == "" or Dom is None or Dom == "":\r
return []\r
if Dom.nodeType == Dom.DOCUMENT_NODE:\r
Dom = Dom.documentElement\r
# @revel Node A single XML node matching XPath style Sting.\r
#\r
def XmlNode(Dom, String):\r
- if String == None or String == "" or Dom == None or Dom == "":\r
+ if String is None or String == "" or Dom is None or Dom == "":\r
return ""\r
if Dom.nodeType == Dom.DOCUMENT_NODE:\r
Dom = Dom.documentElement\r
Db.UpdateIdentifierBelongsToFunction()\r
\r
def GetTableID(FullFileName, ErrorMsgList=None):\r
- if ErrorMsgList == None:\r
+ if ErrorMsgList is None:\r
ErrorMsgList = []\r
\r
Db = GetDB()\r
if os.path.splitext(FullFileName)[1].upper() not in ('.H'):\r
return []\r
IFList = IncludeFileListDict.get(FullFileName)\r
- if IFList != None:\r
+ if IFList is not None:\r
return IFList\r
\r
FileID = GetTableID(FullFileName)\r
return None\r
\r
def GetAllIncludeFiles(FullFileName):\r
- if AllIncludeFileListDict.get(FullFileName) != None:\r
+ if AllIncludeFileListDict.get(FullFileName) is not None:\r
return AllIncludeFileListDict.get(FullFileName)\r
\r
FileDirName = os.path.dirname(FullFileName)\r
IncludePathList = IncludePathListDict.get(FileDirName)\r
- if IncludePathList == None:\r
+ if IncludePathList is None:\r
IncludePathList = MetaDataParser.GetIncludeListOfFile(EccGlobalData.gWorkspace, FullFileName, GetDB())\r
if FileDirName not in IncludePathList:\r
IncludePathList.insert(0, FileDirName)\r
FileName = FileName.strip('\"')\r
FileName = FileName.lstrip('<').rstrip('>').strip()\r
FullPath = GetFullPathOfIncludeFile(FileName, IncludePathList)\r
- if FullPath != None:\r
+ if FullPath is not None:\r
IncludeFileQueue.append(FullPath)\r
\r
i = 0\r
FileName = FileName.strip('\"')\r
FileName = FileName.lstrip('<').rstrip('>').strip()\r
FullPath = GetFullPathOfIncludeFile(FileName, IncludePathList)\r
- if FullPath != None and FullPath not in IncludeFileQueue:\r
+ if FullPath is not None and FullPath not in IncludeFileQueue:\r
IncludeFileQueue.insert(i + 1, FullPath)\r
i += 1\r
\r
def GetTypedefDict(FullFileName):\r
\r
Dict = ComplexTypeDict.get(FullFileName)\r
- if Dict != None:\r
+ if Dict is not None:\r
return Dict\r
\r
FileID = GetTableID(FullFileName)\r
def GetSUDict(FullFileName):\r
\r
Dict = SUDict.get(FullFileName)\r
- if Dict != None:\r
+ if Dict is not None:\r
return Dict\r
\r
FileID = GetTableID(FullFileName)\r
\r
def GetFinalTypeValue(Type, FieldName, TypedefDict, SUDict):\r
Value = TypedefDict.get(Type)\r
- if Value == None:\r
+ if Value is None:\r
Value = SUDict.get(Type)\r
- if Value == None:\r
+ if Value is None:\r
return None\r
\r
LBPos = Value.find('{')\r
for FT in FTList:\r
if FT not in ('struct', 'union'):\r
Value = TypedefDict.get(FT)\r
- if Value == None:\r
+ if Value is None:\r
Value = SUDict.get(FT)\r
break\r
\r
- if Value == None:\r
+ if Value is None:\r
return None\r
\r
LBPos = Value.find('{')\r
return None\r
\r
def GetRealType(Type, TypedefDict, TargetType=None):\r
- if TargetType != None and Type == TargetType:\r
+ if TargetType is not None and Type == TargetType:\r
return Type\r
while TypedefDict.get(Type):\r
Type = TypedefDict.get(Type)\r
- if TargetType != None and Type == TargetType:\r
+ if TargetType is not None and Type == TargetType:\r
return Type\r
return Type\r
\r
while Index < len(RefList):\r
FieldName = RefList[Index]\r
FromType = GetFinalTypeValue(Type, FieldName, TypedefDict, SUDict)\r
- if FromType == None:\r
+ if FromType is None:\r
return None\r
# we want to determine the exact type.\r
- if TargetType != None:\r
+ if TargetType is not None:\r
Type = FromType.split()[0]\r
# we only want to check if it is a pointer\r
else:\r
# Type = GetDataTypeFromModifier(Result[0]).split()[-1]\r
TypeList = GetDataTypeFromModifier(Result[0]).split()\r
Type = TypeList[-1]\r
- if len(TypeList) > 1 and StarList != None:\r
+ if len(TypeList) > 1 and StarList is not None:\r
for Star in StarList:\r
Type = Type.strip()\r
Type = Type.rstrip(Star)\r
Type = TypeList[-1]\r
if Type == '*' and len(TypeList) >= 2:\r
Type = TypeList[-2]\r
- if len(TypeList) > 1 and StarList != None:\r
+ if len(TypeList) > 1 and StarList is not None:\r
for Star in StarList:\r
Type = Type.strip()\r
Type = Type.rstrip(Star)\r
else:\r
TypeList = GetDataTypeFromModifier(Result[0]).split()\r
Type = TypeList[-1]\r
- if len(TypeList) > 1 and StarList != None:\r
+ if len(TypeList) > 1 and StarList is not None:\r
for Star in StarList:\r
Type = Type.strip()\r
Type = Type.rstrip(Star)\r
else:\r
TypeList = GetDataTypeFromModifier(Result[0]).split()\r
Type = TypeList[-1]\r
- if len(TypeList) > 1 and StarList != None:\r
+ if len(TypeList) > 1 and StarList is not None:\r
for Star in StarList:\r
Type = Type.strip()\r
Type = Type.rstrip(Star)\r
p = GetFuncDeclPattern()\r
for Str in PSL:\r
FuncRecord = GetFuncContainsPE(Str[1], FL)\r
- if FuncRecord == None:\r
+ if FuncRecord is None:\r
continue\r
\r
for Exp in GetPredicateListFromPredicateExpStr(Str[0]):\r
PredInfo = SplitPredicateStr(Exp)\r
- if PredInfo[1] == None:\r
+ if PredInfo[1] is None:\r
PredVarStr = PredInfo[0][0].strip()\r
IsFuncCall = False\r
SearchInCache = False\r
continue\r
if SearchInCache:\r
Type = FuncReturnTypeDict.get(PredVarStr)\r
- if Type != None:\r
+ if Type is not None:\r
if Type.find('*') != -1 and Type != 'BOOLEAN*':\r
PrintErrorMsg(ERROR_PREDICATE_EXPRESSION_CHECK_COMPARISON_NULL_TYPE, 'Predicate Expression: %s' % Exp, FileTable, Str[2])\r
continue\r
Type = GetVarInfo(PredVarList, FuncRecord, FullFileName, IsFuncCall, None, StarList)\r
if SearchInCache:\r
FuncReturnTypeDict[PredVarStr] = Type\r
- if Type == None:\r
+ if Type is None:\r
continue\r
Type = GetTypeFromArray(Type, PredVarStr)\r
if Type.find('*') != -1 and Type != 'BOOLEAN*':\r
p = GetFuncDeclPattern()\r
for Str in PSL:\r
FuncRecord = GetFuncContainsPE(Str[1], FL)\r
- if FuncRecord == None:\r
+ if FuncRecord is None:\r
continue\r
\r
for Exp in GetPredicateListFromPredicateExpStr(Str[0]):\r
PredInfo = SplitPredicateStr(Exp)\r
- if PredInfo[1] == None:\r
+ if PredInfo[1] is None:\r
PredVarStr = PredInfo[0][0].strip()\r
IsFuncCall = False\r
SearchInCache = False\r
\r
if SearchInCache:\r
Type = FuncReturnTypeDict.get(PredVarStr)\r
- if Type != None:\r
+ if Type is not None:\r
if Type.find('BOOLEAN') == -1:\r
PrintErrorMsg(ERROR_PREDICATE_EXPRESSION_CHECK_NO_BOOLEAN_OPERATOR, 'Predicate Expression: %s' % Exp, FileTable, Str[2])\r
continue\r
Type = GetVarInfo(PredVarList, FuncRecord, FullFileName, IsFuncCall, 'BOOLEAN', StarList)\r
if SearchInCache:\r
FuncReturnTypeDict[PredVarStr] = Type\r
- if Type == None:\r
+ if Type is None:\r
continue\r
if Type.find('BOOLEAN') == -1:\r
PrintErrorMsg(ERROR_PREDICATE_EXPRESSION_CHECK_NO_BOOLEAN_OPERATOR, 'Predicate Expression: %s' % Exp, FileTable, Str[2])\r
p = GetFuncDeclPattern()\r
for Str in PSL:\r
FuncRecord = GetFuncContainsPE(Str[1], FL)\r
- if FuncRecord == None:\r
+ if FuncRecord is None:\r
continue\r
\r
for Exp in GetPredicateListFromPredicateExpStr(Str[0]):\r
\r
if SearchInCache:\r
Type = FuncReturnTypeDict.get(PredVarStr)\r
- if Type != None:\r
+ if Type is not None:\r
if Type.find('BOOLEAN') != -1:\r
PrintErrorMsg(ERROR_PREDICATE_EXPRESSION_CHECK_BOOLEAN_VALUE, 'Predicate Expression: %s' % Exp, FileTable, Str[2])\r
continue\r
Type = GetVarInfo(PredVarList, FuncRecord, FullFileName, IsFuncCall, 'BOOLEAN', StarList)\r
if SearchInCache:\r
FuncReturnTypeDict[PredVarStr] = Type\r
- if Type == None:\r
+ if Type is None:\r
continue\r
if Type.find('BOOLEAN') != -1:\r
PrintErrorMsg(ERROR_PREDICATE_EXPRESSION_CHECK_BOOLEAN_VALUE, 'Predicate Expression: %s' % Exp, FileTable, Str[2])\r
\r
if self.backtracking == 0:\r
\r
- if d != None:\r
+ if d is not None:\r
self.function_definition_stack[-1].ModifierText = self.input.toString(d.start,d.stop)\r
else:\r
self.function_definition_stack[-1].ModifierText = ''\r
self.function_definition_stack[-1].DeclText = self.input.toString(declarator1.start,declarator1.stop)\r
self.function_definition_stack[-1].DeclLine = declarator1.start.line\r
self.function_definition_stack[-1].DeclOffset = declarator1.start.charPositionInLine\r
- if a != None:\r
+ if a is not None:\r
self.function_definition_stack[-1].LBLine = a.start.line\r
self.function_definition_stack[-1].LBOffset = a.start.charPositionInLine\r
else:\r
return \r
if self.backtracking == 0:\r
\r
- if b != None:\r
+ if b is not None:\r
self.StoreTypedefDefinition(a.line, a.charPositionInLine, d.line, d.charPositionInLine, self.input.toString(b.start,b.stop), self.input.toString(c.start,c.stop))\r
else:\r
self.StoreTypedefDefinition(a.line, a.charPositionInLine, d.line, d.charPositionInLine, '', self.input.toString(c.start,c.stop))\r
return \r
if self.backtracking == 0:\r
\r
- if t != None:\r
+ if t is not None:\r
self.StoreVariableDeclaration(s.start.line, s.start.charPositionInLine, t.start.line, t.start.charPositionInLine, self.input.toString(s.start,s.stop), self.input.toString(t.start,t.stop))\r
\r
\r
return \r
if self.backtracking == 0:\r
\r
- if s.stop != None:\r
+ if s.stop is not None:\r
self.StoreStructUnionDefinition(s.start.line, s.start.charPositionInLine, s.stop.line, s.stop.charPositionInLine, self.input.toString(s.start,s.stop))\r
\r
\r
return \r
if self.backtracking == 0:\r
\r
- if e.stop != None:\r
+ if e.stop is not None:\r
self.StoreEnumerationDefinition(e.start.line, e.start.charPositionInLine, e.stop.line, e.stop.charPositionInLine, self.input.toString(e.start,e.stop))\r
\r
\r
InCharLiteral = not InCharLiteral\r
# meet new line, then no longer in a comment for // and '#'\r
if self.__CurrentChar() == T_CHAR_LF:\r
- if HashComment and PPDirectiveObj != None:\r
+ if HashComment and PPDirectiveObj is not None:\r
if PPDirectiveObj.Content.rstrip(T_CHAR_CR).endswith(T_CHAR_BACKSLASH):\r
PPDirectiveObj.Content += T_CHAR_LF\r
PPExtend = True\r
# @param Option: The option list including log level setting\r
#\r
def SetLogLevel(self, Option):\r
- if Option.verbose != None:\r
+ if Option.verbose is not None:\r
EdkLogger.SetLevel(EdkLogger.VERBOSE)\r
- elif Option.quiet != None:\r
+ elif Option.quiet is not None:\r
EdkLogger.SetLevel(EdkLogger.QUIET)\r
- elif Option.debug != None:\r
+ elif Option.debug is not None:\r
EdkLogger.SetLevel(Option.debug + 1)\r
else:\r
EdkLogger.SetLevel(EdkLogger.INFO)\r
return array.__new__(cls, 'B')\r
\r
def __init__(m, ID=None):\r
- if ID == None:\r
+ if ID is None:\r
m._ID_ = str(uuid.uuid1()).upper()\r
else:\r
m._ID_ = ID\r
return (CouldBeLoaded, DepexString, FileDepex)\r
\r
def Dispatch(self, Db = None):\r
- if Db == None:\r
+ if Db is None:\r
return False\r
self.UnDispatchedFfsDict = copy.copy(self.FfsDict)\r
# Find PeiCore, DexCore, PeiPriori, DxePriori first\r
continue\r
\r
# Parse SEC_CORE first\r
- if FfsSecCoreGuid != None:\r
+ if FfsSecCoreGuid is not None:\r
self.OrderedFfsDict[FfsSecCoreGuid] = self.UnDispatchedFfsDict.pop(FfsSecCoreGuid)\r
self.LoadPpi(Db, FfsSecCoreGuid)\r
\r
# Parse PEI first\r
- if FfsPeiCoreGuid != None:\r
+ if FfsPeiCoreGuid is not None:\r
self.OrderedFfsDict[FfsPeiCoreGuid] = self.UnDispatchedFfsDict.pop(FfsPeiCoreGuid)\r
self.LoadPpi(Db, FfsPeiCoreGuid)\r
- if FfsPeiPrioriGuid != None:\r
+ if FfsPeiPrioriGuid is not None:\r
# Load PEIM described in priori file\r
FfsPeiPriori = self.UnDispatchedFfsDict.pop(FfsPeiPrioriGuid)\r
if len(FfsPeiPriori.Sections) == 1:\r
self.DisPatchPei(Db)\r
\r
# Parse DXE then\r
- if FfsDxeCoreGuid != None:\r
+ if FfsDxeCoreGuid is not None:\r
self.OrderedFfsDict[FfsDxeCoreGuid] = self.UnDispatchedFfsDict.pop(FfsDxeCoreGuid)\r
self.LoadProtocol(Db, FfsDxeCoreGuid)\r
- if FfsDxePrioriGuid != None:\r
+ if FfsDxePrioriGuid is not None:\r
# Load PEIM described in priori file\r
FfsDxePriori = self.UnDispatchedFfsDict.pop(FfsDxePrioriGuid)\r
if len(FfsDxePriori.Sections) == 1:\r
IsInstalled = True\r
NewFfs = self.UnDispatchedFfsDict.pop(FfsID)\r
NewFfs.Depex = DepexString\r
- if FileDepex != None:\r
+ if FileDepex is not None:\r
ScheduleList.insert.insert(FileDepex[1], FfsID, NewFfs, FileDepex[0])\r
else:\r
ScheduleList[FfsID] = NewFfs\r
FfsId = repr(FfsObj)\r
if ((self.Attributes & 0x00000800) != 0 and len(FfsObj) == 0xFFFFFF) \\r
or ((self.Attributes & 0x00000800) == 0 and len(FfsObj) == 0):\r
- if LastFfsObj != None:\r
+ if LastFfsObj is not None:\r
LastFfsObj.FreeSpace = EndOfFv - LastFfsObj._OFF_ - len(LastFfsObj)\r
else:\r
if FfsId in self.FfsDict:\r
% (FfsObj.Guid, FfsObj.Offset,\r
self.FfsDict[FfsId].Guid, self.FfsDict[FfsId].Offset))\r
self.FfsDict[FfsId] = FfsObj\r
- if LastFfsObj != None:\r
+ if LastFfsObj is not None:\r
LastFfsObj.FreeSpace = FfsStartAddress - LastFfsObj._OFF_ - len(LastFfsObj)\r
\r
FfsStartAddress += len(FfsObj)\r
\r
def __init__(m, CompressedData=None, CompressionType=None, UncompressedLength=None):\r
Image.__init__(m)\r
- if UncompressedLength != None:\r
+ if UncompressedLength is not None:\r
m.UncompressedLength = UncompressedLength\r
- if CompressionType != None:\r
+ if CompressionType is not None:\r
m.CompressionType = CompressionType\r
- if CompressedData != None:\r
+ if CompressedData is not None:\r
m.Data = CompressedData\r
\r
def __str__(m):\r
\r
def __init__(m, SectionDefinitionGuid=None, DataOffset=None, Attributes=None, Data=None):\r
Image.__init__(m)\r
- if SectionDefinitionGuid != None:\r
+ if SectionDefinitionGuid is not None:\r
m.SectionDefinitionGuid = SectionDefinitionGuid\r
- if DataOffset != None:\r
+ if DataOffset is not None:\r
m.DataOffset = DataOffset\r
- if Attributes != None:\r
+ if Attributes is not None:\r
m.Attributes = Attributes\r
- if Data != None:\r
+ if Data is not None:\r
m.Data = Data\r
\r
def __str__(m):\r
else:\r
CurrentData = m._OPCODE_\r
m._ExprList.append(Token)\r
- if CurrentData == None:\r
+ if CurrentData is None:\r
break\r
return m._ExprList\r
\r
def __init__(m, Type=None, Size=None):\r
Image.__init__(m)\r
m._Alignment = 1\r
- if Type != None:\r
+ if Type is not None:\r
m.Type = Type\r
- if Size != None:\r
+ if Size is not None:\r
m.Size = Size\r
\r
def __str__(m):\r
for Line in MapFile:\r
Line = Line.strip()\r
if not MappingStart:\r
- if MappingTitle.match(Line) != None:\r
+ if MappingTitle.match(Line) is not None:\r
MappingStart = True\r
continue\r
ResultList = MappingFormat.findall(Line)\r
self.SourceOverridePath = SourceOverridePath\r
\r
# Load Inf file if filename is not None\r
- if Filename != None:\r
+ if Filename is not None:\r
self.LoadInfFile(Filename)\r
\r
if SourceFileList:\r
#\r
def GenerateFfs(self, FfsObj):\r
self.FfsIndex = self.FfsIndex + 1\r
- if FfsObj != None and FfsObj.Type in [0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0xA]:\r
+ if FfsObj is not None and FfsObj.Type in [0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0xA]:\r
FfsGuid = FfsObj.Guid\r
FfsOffset = FfsObj._OFF_\r
FfsName = 'Unknown-Module'\r
InfFileName = NormPath(FfsObj.InfFileName)\r
Arch = FfsObj.GetCurrentArch()\r
\r
- if Arch != None:\r
+ if Arch is not None:\r
Dict['$(ARCH)'] = Arch\r
InfFileName = GenFdsGlobalVariable.MacroExtend(InfFileName, Dict, Arch)\r
\r
- if Arch != None:\r
+ if Arch is not None:\r
Inf = GenFdsGlobalVariable.WorkSpace.BuildObject[PathClass(InfFileName, GenFdsGlobalVariable.WorkSpaceDir), Arch, GenFdsGlobalVariable.TargetName, GenFdsGlobalVariable.ToolChainTag]\r
Guid = Inf.Guid\r
\r
if not os.path.isabs(fmp.ImageFile):\r
CapInputFile = os.path.join(GenFdsGlobalVariable.WorkSpaceDir, fmp.ImageFile)\r
CapOutputTmp = os.path.join(GenFdsGlobalVariable.FvDir, self.UiCapsuleName) + '.tmp'\r
- if ExternalTool == None:\r
+ if ExternalTool is None:\r
EdkLogger.error("GenFds", GENFDS_ERROR, "No tool found with GUID %s" % fmp.Certificate_Guid)\r
else:\r
CmdOption += ExternalTool\r
#\r
def GenSection(self, OutputPath, ModuleName, SecNum, KeyStringList, FfsInf = None, Dict = {}, IsMakefile = False):\r
\r
- if FfsInf != None:\r
+ if FfsInf is not None:\r
self.CompType = FfsInf.__ExtendMacro__(self.CompType)\r
self.Alignment = FfsInf.__ExtendMacro__(self.Alignment)\r
\r
Index = Index + 1\r
SecIndex = '%s.%d' %(SecNum, Index)\r
ReturnSectList, AlignValue = Sect.GenSection(OutputPath, ModuleName, SecIndex, KeyStringList, FfsInf, Dict, IsMakefile=IsMakefile)\r
- if AlignValue != None:\r
- if MaxAlign == None:\r
+ if AlignValue is not None:\r
+ if MaxAlign is None:\r
MaxAlign = AlignValue\r
if GenFdsGlobalVariable.GetAlignment (AlignValue) > GenFdsGlobalVariable.GetAlignment (MaxAlign):\r
MaxAlign = AlignValue\r
if ReturnSectList != []:\r
- if AlignValue == None:\r
+ if AlignValue is None:\r
AlignValue = "1"\r
for FileData in ReturnSectList:\r
SectFiles += (FileData,)\r
#\r
# Prepare the parameter of GenSection\r
#\r
- if FfsFile != None:\r
+ if FfsFile is not None:\r
self.SectFileName = GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.SectFileName)\r
self.SectFileName = GenFdsGlobalVariable.MacroExtend(self.SectFileName, Dict, FfsFile.CurrentArch)\r
else:\r
\r
NoStrip = True\r
if self.SecType in ('TE', 'PE32'):\r
- if self.KeepReloc != None:\r
+ if self.KeepReloc is not None:\r
NoStrip = self.KeepReloc\r
\r
if not NoStrip:\r
for Exp in ExpList:\r
if Exp.upper() not in ('AND', 'OR', 'NOT', 'TRUE', 'FALSE', 'SOR', 'BEFORE', 'AFTER', 'END'):\r
GuidStr = self.__FindGuidValue(Exp)\r
- if GuidStr == None:\r
+ if GuidStr is None:\r
EdkLogger.error("GenFds", RESOURCE_NOT_AVAILABLE,\r
"Depex GUID %s could not be found in build DB! (ModuleName: %s)" % (Exp, ModuleName))\r
\r
#\r
def GenSection(self, OutputPath, ModuleName, SecNum, KeyStringList, FfsInf = None, Dict = {}, IsMakefile = False) :\r
\r
- if self.FileName != None and self.FileName.startswith('PCD('):\r
+ if self.FileName is not None and self.FileName.startswith('PCD('):\r
self.FileName = GenFdsGlobalVariable.GetPcdValue(self.FileName)\r
"""Prepare the parameter of GenSection"""\r
- if FfsInf != None :\r
+ if FfsInf is not None :\r
InfFileName = FfsInf.InfFileName\r
SectionType = FfsInf.__ExtendMacro__(self.SectionType)\r
Filename = FfsInf.__ExtendMacro__(self.FileName)\r
StringData = FfsInf.__ExtendMacro__(self.StringData)\r
NoStrip = True\r
if FfsInf.ModuleType in ('SEC', 'PEI_CORE', 'PEIM') and SectionType in ('TE', 'PE32'):\r
- if FfsInf.KeepReloc != None:\r
+ if FfsInf.KeepReloc is not None:\r
NoStrip = FfsInf.KeepReloc\r
- elif FfsInf.KeepRelocFromRule != None:\r
+ elif FfsInf.KeepRelocFromRule is not None:\r
NoStrip = FfsInf.KeepRelocFromRule\r
- elif self.KeepReloc != None:\r
+ elif self.KeepReloc is not None:\r
NoStrip = self.KeepReloc\r
- elif FfsInf.ShadowFromInfFile != None:\r
+ elif FfsInf.ShadowFromInfFile is not None:\r
NoStrip = FfsInf.ShadowFromInfFile\r
else:\r
EdkLogger.error("GenFds", GENFDS_ERROR, "Module %s apply rule for None!" %ModuleName)\r
\r
"""If the file name was pointed out, add it in FileList"""\r
FileList = []\r
- if Filename != None:\r
+ if Filename is not None:\r
Filename = GenFdsGlobalVariable.MacroExtend(Filename, Dict)\r
# check if the path is absolute or relative\r
if os.path.isabs(Filename):\r
if SectionType == 'VERSION':\r
\r
InfOverrideVerString = False\r
- if FfsInf.Version != None:\r
+ if FfsInf.Version is not None:\r
#StringData = FfsInf.Version\r
BuildNum = FfsInf.Version\r
InfOverrideVerString = True\r
\r
if InfOverrideVerString:\r
#VerTuple = ('-n', '"' + StringData + '"')\r
- if BuildNum != None and BuildNum != '':\r
+ if BuildNum is not None and BuildNum != '':\r
BuildNumTuple = ('-j', BuildNum)\r
else:\r
BuildNumTuple = tuple()\r
VerString = f.read()\r
f.close()\r
BuildNum = VerString\r
- if BuildNum != None and BuildNum != '':\r
+ if BuildNum is not None and BuildNum != '':\r
BuildNumTuple = ('-j', BuildNum)\r
GenFdsGlobalVariable.GenerateSection(OutputFile, [], 'EFI_SECTION_VERSION',\r
#Ui=VerString,\r
\r
else:\r
BuildNum = StringData\r
- if BuildNum != None and BuildNum != '':\r
+ if BuildNum is not None and BuildNum != '':\r
BuildNumTuple = ('-j', BuildNum)\r
else:\r
BuildNumTuple = tuple()\r
elif SectionType == 'UI':\r
\r
InfOverrideUiString = False\r
- if FfsInf.Ui != None:\r
+ if FfsInf.Ui is not None:\r
StringData = FfsInf.Ui\r
InfOverrideUiString = True\r
\r
Ui=UiString, IsMakefile=IsMakefile)\r
OutputFileList.append(OutputFile)\r
else:\r
- if StringData != None and len(StringData) > 0:\r
+ if StringData is not None and len(StringData) > 0:\r
UiTuple = ('-n', '"' + StringData + '"')\r
else:\r
UiTuple = tuple()\r
if not MacroVal:\r
if Macro in MacroDict:\r
MacroVal = MacroDict[Macro]\r
- if MacroVal != None:\r
+ if MacroVal is not None:\r
IncFileName = IncFileName.replace('$(' + Macro + ')', MacroVal, 1)\r
if MacroVal.find('$(') != -1:\r
PreIndex = StartPos\r
# list index of the insertion, note that line number is 'CurrentLine + 1'\r
InsertAtLine = CurrentLine\r
ParentProfile = GetParentAtLine (CurrentLine)\r
- if ParentProfile != None:\r
+ if ParentProfile is not None:\r
ParentProfile.IncludeFileList.insert(0, IncFileProfile)\r
IncFileProfile.Level = ParentProfile.Level + 1\r
IncFileProfile.InsertStartLineNumber = InsertAtLine + 1\r
while StartPos != -1 and EndPos != -1 and self.__Token not in ['!ifdef', '!ifndef', '!if', '!elseif']:\r
MacroName = CurLine[StartPos+2 : EndPos]\r
MacorValue = self.__GetMacroValue(MacroName)\r
- if MacorValue != None:\r
+ if MacorValue is not None:\r
CurLine = CurLine.replace('$(' + MacroName + ')', MacorValue, 1)\r
if MacorValue.find('$(') != -1:\r
PreIndex = StartPos\r
\r
if not self.__GetNextToken():\r
return False\r
- if gGuidPattern.match(self.__Token) != None:\r
+ if gGuidPattern.match(self.__Token) is not None:\r
return True\r
else:\r
self.__UndoToken()\r
#'\n\tGot Token: \"%s\" from File %s\n' % (self.__Token, FileLineTuple[0]) + \\r
# At this point, the closest parent would be the included file itself\r
Profile = GetParentAtLine(X.OriginalLineNumber)\r
- if Profile != None:\r
+ if Profile is not None:\r
X.Message += ' near line %d, column %d: %s' \\r
% (X.LineNumber, 0, Profile.FileLinesList[X.LineNumber-1])\r
else:\r
while self.__GetTokenStatements(FdObj):\r
pass\r
for Attr in ("BaseAddress", "Size", "ErasePolarity"):\r
- if getattr(FdObj, Attr) == None:\r
+ if getattr(FdObj, Attr) is None:\r
self.__GetNextToken()\r
raise Warning("Keyword %s missing" % Attr, self.FileName, self.CurrentLineNumber)\r
\r
IsBlock = True\r
\r
Item = Obj.BlockSizeList[-1]\r
- if Item[0] == None or Item[1] == None:\r
+ if Item[0] is None or Item[1] is None:\r
raise Warning("expected block statement", self.FileName, self.CurrentLineNumber)\r
return IsBlock\r
\r
#\r
def __GetRegionLayout(self, Fd):\r
Offset = self.__CalcRegionExpr() \r
- if Offset == None:\r
+ if Offset is None:\r
return False\r
\r
RegionObj = Region.Region()\r
raise Warning("expected '|'", self.FileName, self.CurrentLineNumber)\r
\r
Size = self.__CalcRegionExpr()\r
- if Size == None:\r
+ if Size is None:\r
raise Warning("expected Region Size", self.FileName, self.CurrentLineNumber)\r
RegionObj.Size = Size\r
\r
\r
FvImageSectionObj = FvImageSection.FvImageSection()\r
FvImageSectionObj.Alignment = AlignValue\r
- if FvObj != None:\r
+ if FvObj is not None:\r
FvImageSectionObj.Fv = FvObj\r
FvImageSectionObj.FvName = None\r
else:\r
Rule.CheckSum = CheckSum\r
Rule.Fixed = Fixed\r
Rule.KeyStringList = KeyStringList\r
- if KeepReloc != None:\r
+ if KeepReloc is not None:\r
Rule.KeepReloc = KeepReloc\r
\r
while True:\r
Rule.CheckSum = CheckSum\r
Rule.Fixed = Fixed\r
Rule.KeyStringList = KeyStringList\r
- if KeepReloc != None:\r
+ if KeepReloc is not None:\r
Rule.KeepReloc = KeepReloc\r
Rule.FileExtension = Ext\r
Rule.FileName = self.__Token\r
EfiSectionObj.KeepReloc = False\r
else:\r
EfiSectionObj.KeepReloc = True\r
- if Obj.KeepReloc != None and Obj.KeepReloc != EfiSectionObj.KeepReloc:\r
+ if Obj.KeepReloc is not None and Obj.KeepReloc != EfiSectionObj.KeepReloc:\r
raise Warning("Section type %s has reloc strip flag conflict with Rule" % EfiSectionObj.SectionType, self.FileName, self.CurrentLineNumber)\r
else:\r
raise Warning("Section type %s could not have reloc strip flag" % EfiSectionObj.SectionType, self.FileName, self.CurrentLineNumber)\r
raise Warning("expected Component version", self.FileName, self.CurrentLineNumber)\r
\r
Pattern = re.compile('-$|[0-9a-fA-F]{1,2}\.[0-9a-fA-F]{1,2}$', re.DOTALL)\r
- if Pattern.match(self.__Token) == None:\r
+ if Pattern.match(self.__Token) is None:\r
raise Warning("Unknown version format '%s'" % self.__Token, self.FileName, self.CurrentLineNumber)\r
CompStatementObj.CompVer = self.__Token\r
\r
for elementRegionData in elementRegion.RegionDataList:\r
if elementRegionData.endswith(".cap"):\r
continue\r
- if elementRegionData != None and elementRegionData.upper() not in CapList:\r
+ if elementRegionData is not None and elementRegionData.upper() not in CapList:\r
CapList.append(elementRegionData.upper())\r
return CapList\r
\r
def __GetReferencedFdCapTuple(self, CapObj, RefFdList = [], RefFvList = []):\r
\r
for CapsuleDataObj in CapObj.CapsuleDataList :\r
- if hasattr(CapsuleDataObj, 'FvName') and CapsuleDataObj.FvName != None and CapsuleDataObj.FvName.upper() not in RefFvList:\r
+ if hasattr(CapsuleDataObj, 'FvName') and CapsuleDataObj.FvName is not None and CapsuleDataObj.FvName.upper() not in RefFvList:\r
RefFvList.append (CapsuleDataObj.FvName.upper())\r
- elif hasattr(CapsuleDataObj, 'FdName') and CapsuleDataObj.FdName != None and CapsuleDataObj.FdName.upper() not in RefFdList:\r
+ elif hasattr(CapsuleDataObj, 'FdName') and CapsuleDataObj.FdName is not None and CapsuleDataObj.FdName.upper() not in RefFdList:\r
RefFdList.append (CapsuleDataObj.FdName.upper()) \r
- elif CapsuleDataObj.Ffs != None:\r
+ elif CapsuleDataObj.Ffs is not None:\r
if isinstance(CapsuleDataObj.Ffs, FfsFileStatement.FileStatement):\r
- if CapsuleDataObj.Ffs.FvName != None and CapsuleDataObj.Ffs.FvName.upper() not in RefFvList:\r
+ if CapsuleDataObj.Ffs.FvName is not None and CapsuleDataObj.Ffs.FvName.upper() not in RefFvList:\r
RefFvList.append(CapsuleDataObj.Ffs.FvName.upper())\r
- elif CapsuleDataObj.Ffs.FdName != None and CapsuleDataObj.Ffs.FdName.upper() not in RefFdList:\r
+ elif CapsuleDataObj.Ffs.FdName is not None and CapsuleDataObj.Ffs.FdName.upper() not in RefFdList:\r
RefFdList.append(CapsuleDataObj.Ffs.FdName.upper())\r
else:\r
self.__GetReferencedFdFvTupleFromSection(CapsuleDataObj.Ffs, RefFdList, RefFvList)\r
for elementRegionData in elementRegion.RegionDataList:\r
if elementRegionData.endswith(".fv"):\r
continue\r
- if elementRegionData != None and elementRegionData.upper() not in FvList:\r
+ if elementRegionData is not None and elementRegionData.upper() not in FvList:\r
FvList.append(elementRegionData.upper())\r
return FvList\r
\r
\r
for FfsObj in FvObj.FfsList:\r
if isinstance(FfsObj, FfsFileStatement.FileStatement):\r
- if FfsObj.FvName != None and FfsObj.FvName.upper() not in RefFvList:\r
+ if FfsObj.FvName is not None and FfsObj.FvName.upper() not in RefFvList:\r
RefFvList.append(FfsObj.FvName.upper())\r
- elif FfsObj.FdName != None and FfsObj.FdName.upper() not in RefFdList:\r
+ elif FfsObj.FdName is not None and FfsObj.FdName.upper() not in RefFdList:\r
RefFdList.append(FfsObj.FdName.upper())\r
else:\r
self.__GetReferencedFdFvTupleFromSection(FfsObj, RefFdList, RefFvList)\r
while SectionStack != []:\r
SectionObj = SectionStack.pop()\r
if isinstance(SectionObj, FvImageSection.FvImageSection):\r
- if SectionObj.FvName != None and SectionObj.FvName.upper() not in FvList:\r
+ if SectionObj.FvName is not None and SectionObj.FvName.upper() not in FvList:\r
FvList.append(SectionObj.FvName.upper())\r
- if SectionObj.Fv != None and SectionObj.Fv.UiFvName != None and SectionObj.Fv.UiFvName.upper() not in FvList:\r
+ if SectionObj.Fv is not None and SectionObj.Fv.UiFvName is not None and SectionObj.Fv.UiFvName.upper() not in FvList:\r
FvList.append(SectionObj.Fv.UiFvName.upper())\r
self.__GetReferencedFdFvTuple(SectionObj.Fv, FdList, FvList)\r
\r
#\r
def GenFfs(self, Dict = {}, FvChildAddr=[], FvParentAddr=None, IsMakefile=False, FvName=None):\r
\r
- if self.NameGuid != None and self.NameGuid.startswith('PCD('):\r
+ if self.NameGuid is not None and self.NameGuid.startswith('PCD('):\r
PcdValue = GenFdsGlobalVariable.GetPcdValue(self.NameGuid)\r
if len(PcdValue) == 0:\r
EdkLogger.error("GenFds", GENFDS_ERROR, '%s NOT defined.' \\r
\r
Dict.update(self.DefineVarDict)\r
SectionAlignments = None\r
- if self.FvName != None :\r
+ if self.FvName is not None :\r
Buffer = StringIO.StringIO('')\r
if self.FvName.upper() not in GenFdsGlobalVariable.FdfParser.Profile.FvDict.keys():\r
EdkLogger.error("GenFds", GENFDS_ERROR, "FV (%s) is NOT described in FDF file!" % (self.FvName))\r
FileName = Fv.AddToBuffer(Buffer)\r
SectionFiles = [FileName]\r
\r
- elif self.FdName != None:\r
+ elif self.FdName is not None:\r
if self.FdName.upper() not in GenFdsGlobalVariable.FdfParser.Profile.FdDict.keys():\r
EdkLogger.error("GenFds", GENFDS_ERROR, "FD (%s) is NOT described in FDF file!" % (self.FdName))\r
Fd = GenFdsGlobalVariable.FdfParser.Profile.FdDict.get(self.FdName.upper())\r
FileName = Fd.GenFd()\r
SectionFiles = [FileName]\r
\r
- elif self.FileName != None:\r
+ elif self.FileName is not None:\r
if hasattr(self, 'FvFileType') and self.FvFileType == 'RAW':\r
if isinstance(self.FileName, list) and isinstance(self.SubAlignment, list) and len(self.FileName) == len(self.SubAlignment):\r
FileContent = ''\r
Content = f.read()\r
f.close()\r
AlignValue = 1\r
- if self.SubAlignment[Index] != None:\r
+ if self.SubAlignment[Index] is not None:\r
AlignValue = GenFdsGlobalVariable.GetAlignment(self.SubAlignment[Index])\r
if AlignValue > MaxAlignValue:\r
MaxAlignIndex = Index\r
section.FvAddr = FvChildAddr.pop(0)\r
elif isinstance(section, GuidSection):\r
section.FvAddr = FvChildAddr\r
- if FvParentAddr != None and isinstance(section, GuidSection):\r
+ if FvParentAddr is not None and isinstance(section, GuidSection):\r
section.FvParentAddr = FvParentAddr\r
\r
if self.KeepReloc == False:\r