python_code
stringlengths 0
679k
| repo_name
stringlengths 9
41
| file_path
stringlengths 6
149
|
---|---|---|
## @file
# process data section generation
#
# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# Import Modules
#
from __future__ import absolute_import
from . import Section
from .GenFdsGlobalVariable import GenFdsGlobalVariable
import subprocess
from .Ffs import SectionSuffix
import Common.LongFilePathOs as os
from CommonDataClass.FdfClass import DataSectionClassObject
from Common.Misc import PeImageClass
from Common.LongFilePathSupport import CopyLongFilePath
from Common.DataType import *
## generate data section
#
#
class DataSection (DataSectionClassObject):
## The constructor
#
# @param self The object pointer
#
def __init__(self):
DataSectionClassObject.__init__(self)
## GenSection() method
#
# Generate compressed section
#
# @param self The object pointer
# @param OutputPath Where to place output file
# @param ModuleName Which module this section belongs to
# @param SecNum Index of section
# @param KeyStringList Filter for inputs of section generation
# @param FfsInf FfsInfStatement object that contains this section data
# @param Dict dictionary contains macro and its value
# @retval tuple (Generated file name list, section alignment)
#
def GenSection(self, OutputPath, ModuleName, SecNum, keyStringList, FfsFile = None, Dict = None, IsMakefile = False):
#
# Prepare the parameter of GenSection
#
if Dict is None:
Dict = {}
if FfsFile is not None:
self.SectFileName = GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.SectFileName)
self.SectFileName = GenFdsGlobalVariable.MacroExtend(self.SectFileName, Dict, FfsFile.CurrentArch)
else:
self.SectFileName = GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.SectFileName)
self.SectFileName = GenFdsGlobalVariable.MacroExtend(self.SectFileName, Dict)
"""Check Section file exist or not !"""
if not os.path.exists(self.SectFileName):
self.SectFileName = os.path.join (GenFdsGlobalVariable.WorkSpaceDir,
self.SectFileName)
"""Copy Map file to Ffs output"""
Filename = GenFdsGlobalVariable.MacroExtend(self.SectFileName)
if Filename[(len(Filename)-4):] == '.efi':
MapFile = Filename.replace('.efi', '.map')
CopyMapFile = os.path.join(OutputPath, ModuleName + '.map')
if IsMakefile:
if GenFdsGlobalVariable.CopyList == []:
GenFdsGlobalVariable.CopyList = [(MapFile, CopyMapFile)]
else:
GenFdsGlobalVariable.CopyList.append((MapFile, CopyMapFile))
else:
if os.path.exists(MapFile):
if not os.path.exists(CopyMapFile) or (os.path.getmtime(MapFile) > os.path.getmtime(CopyMapFile)):
CopyLongFilePath(MapFile, CopyMapFile)
#Get PE Section alignment when align is set to AUTO
if self.Alignment == 'Auto' and self.SecType in (BINARY_FILE_TYPE_TE, BINARY_FILE_TYPE_PE32):
self.Alignment = "0"
NoStrip = True
if self.SecType in (BINARY_FILE_TYPE_TE, BINARY_FILE_TYPE_PE32):
if self.KeepReloc is not None:
NoStrip = self.KeepReloc
if not NoStrip:
FileBeforeStrip = os.path.join(OutputPath, ModuleName + '.efi')
if not os.path.exists(FileBeforeStrip) or \
(os.path.getmtime(self.SectFileName) > os.path.getmtime(FileBeforeStrip)):
CopyLongFilePath(self.SectFileName, FileBeforeStrip)
StrippedFile = os.path.join(OutputPath, ModuleName + '.stripped')
GenFdsGlobalVariable.GenerateFirmwareImage(
StrippedFile,
[GenFdsGlobalVariable.MacroExtend(self.SectFileName, Dict)],
Strip=True,
IsMakefile = IsMakefile
)
self.SectFileName = StrippedFile
if self.SecType == BINARY_FILE_TYPE_TE:
TeFile = os.path.join( OutputPath, ModuleName + 'Te.raw')
GenFdsGlobalVariable.GenerateFirmwareImage(
TeFile,
[GenFdsGlobalVariable.MacroExtend(self.SectFileName, Dict)],
Type='te',
IsMakefile = IsMakefile
)
self.SectFileName = TeFile
OutputFile = os.path.join (OutputPath, ModuleName + SUP_MODULE_SEC + SecNum + SectionSuffix.get(self.SecType))
OutputFile = os.path.normpath(OutputFile)
GenFdsGlobalVariable.GenerateSection(OutputFile, [self.SectFileName], Section.Section.SectionType.get(self.SecType), IsMakefile = IsMakefile)
FileList = [OutputFile]
return FileList, self.Alignment
| edk2-master | BaseTools/Source/Python/GenFds/DataSection.py |
## @file
# Global variables for GenFds
#
# Copyright (c) 2007 - 2021, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# Import Modules
#
from __future__ import print_function
from __future__ import absolute_import
import Common.LongFilePathOs as os
import sys
from sys import stdout
from subprocess import PIPE,Popen
from struct import Struct
from array import array
from Common.BuildToolError import COMMAND_FAILURE,GENFDS_ERROR
from Common import EdkLogger
from Common.Misc import SaveFileOnChange
from Common.TargetTxtClassObject import TargetTxtDict
from Common.ToolDefClassObject import ToolDefDict,gDefaultToolsDefFile
from AutoGen.BuildEngine import ToolBuildRule
import Common.DataType as DataType
from Common.Misc import PathClass,CreateDirectory
from Common.LongFilePathSupport import OpenLongFilePath as open
from Common.MultipleWorkspace import MultipleWorkspace as mws
import Common.GlobalData as GlobalData
from Common.BuildToolError import *
from AutoGen.AutoGen import CalculatePriorityValue
## Global variables
#
#
class GenFdsGlobalVariable:
FvDir = ''
OutputDirDict = {}
BinDir = ''
# will be FvDir + os.sep + 'Ffs'
FfsDir = ''
FdfParser = None
LibDir = ''
WorkSpace = None
WorkSpaceDir = ''
ConfDir = ''
OutputDirFromDscDict = {}
TargetName = ''
ToolChainTag = ''
RuleDict = {}
ArchList = None
ActivePlatform = None
FvAddressFileName = ''
VerboseMode = False
DebugLevel = -1
SharpCounter = 0
SharpNumberPerLine = 40
FdfFile = ''
FdfFileTimeStamp = 0
FixedLoadAddress = False
PlatformName = ''
BuildRuleFamily = DataType.TAB_COMPILER_MSFT
ToolChainFamily = DataType.TAB_COMPILER_MSFT
__BuildRuleDatabase = None
GuidToolDefinition = {}
FfsCmdDict = {}
SecCmdList = []
CopyList = []
ModuleFile = ''
EnableGenfdsMultiThread = True
#
# The list whose element are flags to indicate if large FFS or SECTION files exist in FV.
# At the beginning of each generation of FV, false flag is appended to the list,
# after the call to GenerateSection returns, check the size of the output file,
# if it is greater than 0xFFFFFF, the tail flag in list is set to true,
# and EFI_FIRMWARE_FILE_SYSTEM3_GUID is passed to C GenFv.
# At the end of generation of FV, pop the flag.
# List is used as a stack to handle nested FV generation.
#
LargeFileInFvFlags = []
EFI_FIRMWARE_FILE_SYSTEM3_GUID = '5473C07A-3DCB-4dca-BD6F-1E9689E7349A'
LARGE_FILE_SIZE = 0x1000000
SectionHeader = Struct("3B 1B")
# FvName, FdName, CapName in FDF, Image file name
ImageBinDict = {}
## LoadBuildRule
#
@staticmethod
def _LoadBuildRule():
if GenFdsGlobalVariable.__BuildRuleDatabase:
return GenFdsGlobalVariable.__BuildRuleDatabase
BuildRule = ToolBuildRule()
GenFdsGlobalVariable.__BuildRuleDatabase = BuildRule.ToolBuildRule
TargetObj = TargetTxtDict()
ToolDefinitionFile = TargetObj.Target.TargetTxtDictionary[DataType.TAB_TAT_DEFINES_TOOL_CHAIN_CONF]
if ToolDefinitionFile == '':
ToolDefinitionFile = os.path.join('Conf', gDefaultToolsDefFile)
if os.path.isfile(ToolDefinitionFile):
ToolDefObj = ToolDefDict((os.path.join(os.getenv("WORKSPACE"), "Conf")))
ToolDefinition = ToolDefObj.ToolDef.ToolsDefTxtDatabase
if DataType.TAB_TOD_DEFINES_BUILDRULEFAMILY in ToolDefinition \
and GenFdsGlobalVariable.ToolChainTag in ToolDefinition[DataType.TAB_TOD_DEFINES_BUILDRULEFAMILY] \
and ToolDefinition[DataType.TAB_TOD_DEFINES_BUILDRULEFAMILY][GenFdsGlobalVariable.ToolChainTag]:
GenFdsGlobalVariable.BuildRuleFamily = ToolDefinition[DataType.TAB_TOD_DEFINES_BUILDRULEFAMILY][GenFdsGlobalVariable.ToolChainTag]
if DataType.TAB_TOD_DEFINES_FAMILY in ToolDefinition \
and GenFdsGlobalVariable.ToolChainTag in ToolDefinition[DataType.TAB_TOD_DEFINES_FAMILY] \
and ToolDefinition[DataType.TAB_TOD_DEFINES_FAMILY][GenFdsGlobalVariable.ToolChainTag]:
GenFdsGlobalVariable.ToolChainFamily = ToolDefinition[DataType.TAB_TOD_DEFINES_FAMILY][GenFdsGlobalVariable.ToolChainTag]
return GenFdsGlobalVariable.__BuildRuleDatabase
## GetBuildRules
# @param Inf: object of InfBuildData
# @param Arch: current arch
#
@staticmethod
def GetBuildRules(Inf, Arch):
if not Arch:
Arch = DataType.TAB_COMMON
if not Arch in GenFdsGlobalVariable.OutputDirDict:
return {}
BuildRuleDatabase = GenFdsGlobalVariable._LoadBuildRule()
if not BuildRuleDatabase:
return {}
PathClassObj = PathClass(Inf.MetaFile.File,
GenFdsGlobalVariable.WorkSpaceDir)
BuildDir = os.path.join(
GenFdsGlobalVariable.OutputDirDict[Arch],
Arch,
PathClassObj.SubDir,
PathClassObj.BaseName
)
BinDir = os.path.join(GenFdsGlobalVariable.OutputDirDict[Arch], Arch)
Macro = {
"WORKSPACE":GenFdsGlobalVariable.WorkSpaceDir,
"MODULE_NAME":Inf.BaseName,
"MODULE_GUID":Inf.Guid,
"MODULE_VERSION":Inf.Version,
"MODULE_TYPE":Inf.ModuleType,
"MODULE_FILE":str(PathClassObj),
"MODULE_FILE_BASE_NAME":PathClassObj.BaseName,
"MODULE_RELATIVE_DIR":PathClassObj.SubDir,
"MODULE_DIR":PathClassObj.SubDir,
"BASE_NAME":Inf.BaseName,
"ARCH":Arch,
"TOOLCHAIN":GenFdsGlobalVariable.ToolChainTag,
"TOOLCHAIN_TAG":GenFdsGlobalVariable.ToolChainTag,
"TOOL_CHAIN_TAG":GenFdsGlobalVariable.ToolChainTag,
"TARGET":GenFdsGlobalVariable.TargetName,
"BUILD_DIR":GenFdsGlobalVariable.OutputDirDict[Arch],
"BIN_DIR":BinDir,
"LIB_DIR":BinDir,
"MODULE_BUILD_DIR":BuildDir,
"OUTPUT_DIR":os.path.join(BuildDir, "OUTPUT"),
"DEBUG_DIR":os.path.join(BuildDir, "DEBUG")
}
BuildRules = {}
for Type in BuildRuleDatabase.FileTypeList:
#first try getting build rule by BuildRuleFamily
RuleObject = BuildRuleDatabase[Type, Inf.BuildType, Arch, GenFdsGlobalVariable.BuildRuleFamily]
if not RuleObject:
# build type is always module type, but ...
if Inf.ModuleType != Inf.BuildType:
RuleObject = BuildRuleDatabase[Type, Inf.ModuleType, Arch, GenFdsGlobalVariable.BuildRuleFamily]
#second try getting build rule by ToolChainFamily
if not RuleObject:
RuleObject = BuildRuleDatabase[Type, Inf.BuildType, Arch, GenFdsGlobalVariable.ToolChainFamily]
if not RuleObject:
# build type is always module type, but ...
if Inf.ModuleType != Inf.BuildType:
RuleObject = BuildRuleDatabase[Type, Inf.ModuleType, Arch, GenFdsGlobalVariable.ToolChainFamily]
if not RuleObject:
continue
RuleObject = RuleObject.Instantiate(Macro)
BuildRules[Type] = RuleObject
for Ext in RuleObject.SourceFileExtList:
BuildRules[Ext] = RuleObject
return BuildRules
## GetModuleCodaTargetList
#
# @param Inf: object of InfBuildData
# @param Arch: current arch
#
@staticmethod
def GetModuleCodaTargetList(Inf, Arch):
BuildRules = GenFdsGlobalVariable.GetBuildRules(Inf, Arch)
if not BuildRules:
return []
TargetList = set()
FileList = []
if not Inf.IsBinaryModule:
for File in Inf.Sources:
if File.TagName in {"", DataType.TAB_STAR, GenFdsGlobalVariable.ToolChainTag} and \
File.ToolChainFamily in {"", DataType.TAB_STAR, GenFdsGlobalVariable.ToolChainFamily}:
FileList.append((File, DataType.TAB_UNKNOWN_FILE))
for File in Inf.Binaries:
if File.Target in {DataType.TAB_COMMON, DataType.TAB_STAR, GenFdsGlobalVariable.TargetName}:
FileList.append((File, File.Type))
for File, FileType in FileList:
LastTarget = None
RuleChain = []
SourceList = [File]
Index = 0
while Index < len(SourceList):
Source = SourceList[Index]
Index = Index + 1
if File.IsBinary and File == Source and Inf.Binaries and File in Inf.Binaries:
# Skip all files that are not binary libraries
if not Inf.LibraryClass:
continue
RuleObject = BuildRules[DataType.TAB_DEFAULT_BINARY_FILE]
elif FileType in BuildRules:
RuleObject = BuildRules[FileType]
elif Source.Ext in BuildRules:
RuleObject = BuildRules[Source.Ext]
else:
# stop at no more rules
if LastTarget:
TargetList.add(str(LastTarget))
break
FileType = RuleObject.SourceFileType
# stop at STATIC_LIBRARY for library
if Inf.LibraryClass and FileType == DataType.TAB_STATIC_LIBRARY:
if LastTarget:
TargetList.add(str(LastTarget))
break
Target = RuleObject.Apply(Source)
if not Target:
if LastTarget:
TargetList.add(str(LastTarget))
break
elif not Target.Outputs:
# Only do build for target with outputs
TargetList.add(str(Target))
# to avoid cyclic rule
if FileType in RuleChain:
break
RuleChain.append(FileType)
SourceList.extend(Target.Outputs)
LastTarget = Target
FileType = DataType.TAB_UNKNOWN_FILE
for Cmd in Target.Commands:
if "$(CP)" == Cmd.split()[0]:
CpTarget = Cmd.split()[2]
TargetList.add(CpTarget)
return list(TargetList)
## SetDir()
#
# @param OutputDir Output directory
# @param FdfParser FDF contents parser
# @param Workspace The directory of workspace
# @param ArchList The Arch list of platform
#
@staticmethod
def SetDir (OutputDir, FdfParser, WorkSpace, ArchList):
GenFdsGlobalVariable.VerboseLogger("GenFdsGlobalVariable.OutputDir:%s" % OutputDir)
GenFdsGlobalVariable.FdfParser = FdfParser
GenFdsGlobalVariable.WorkSpace = WorkSpace
GenFdsGlobalVariable.FvDir = os.path.join(GenFdsGlobalVariable.OutputDirDict[ArchList[0]], DataType.TAB_FV_DIRECTORY)
if not os.path.exists(GenFdsGlobalVariable.FvDir):
os.makedirs(GenFdsGlobalVariable.FvDir)
GenFdsGlobalVariable.FfsDir = os.path.join(GenFdsGlobalVariable.FvDir, 'Ffs')
if not os.path.exists(GenFdsGlobalVariable.FfsDir):
os.makedirs(GenFdsGlobalVariable.FfsDir)
#
# Create FV Address inf file
#
GenFdsGlobalVariable.FvAddressFileName = os.path.join(GenFdsGlobalVariable.FfsDir, 'FvAddress.inf')
FvAddressFile = open(GenFdsGlobalVariable.FvAddressFileName, 'w')
#
# Add [Options]
#
FvAddressFile.writelines("[options]" + DataType.TAB_LINE_BREAK)
BsAddress = '0'
for Arch in ArchList:
if GenFdsGlobalVariable.WorkSpace.BuildObject[GenFdsGlobalVariable.ActivePlatform, Arch, GenFdsGlobalVariable.TargetName, GenFdsGlobalVariable.ToolChainTag].BsBaseAddress:
BsAddress = GenFdsGlobalVariable.WorkSpace.BuildObject[GenFdsGlobalVariable.ActivePlatform, Arch, GenFdsGlobalVariable.TargetName, GenFdsGlobalVariable.ToolChainTag].BsBaseAddress
break
FvAddressFile.writelines("EFI_BOOT_DRIVER_BASE_ADDRESS = " + \
BsAddress + \
DataType.TAB_LINE_BREAK)
RtAddress = '0'
for Arch in reversed(ArchList):
temp = GenFdsGlobalVariable.WorkSpace.BuildObject[GenFdsGlobalVariable.ActivePlatform, Arch, GenFdsGlobalVariable.TargetName, GenFdsGlobalVariable.ToolChainTag].RtBaseAddress
if temp:
RtAddress = temp
break
FvAddressFile.writelines("EFI_RUNTIME_DRIVER_BASE_ADDRESS = " + \
RtAddress + \
DataType.TAB_LINE_BREAK)
FvAddressFile.close()
@staticmethod
def SetEnv(FdfParser, WorkSpace, ArchList, GlobalData):
GenFdsGlobalVariable.ModuleFile = WorkSpace.ModuleFile
GenFdsGlobalVariable.FdfParser = FdfParser
GenFdsGlobalVariable.WorkSpace = WorkSpace.Db
GenFdsGlobalVariable.ArchList = ArchList
GenFdsGlobalVariable.ToolChainTag = GlobalData.gGlobalDefines["TOOL_CHAIN_TAG"]
GenFdsGlobalVariable.TargetName = GlobalData.gGlobalDefines["TARGET"]
GenFdsGlobalVariable.ActivePlatform = GlobalData.gActivePlatform
GenFdsGlobalVariable.ConfDir = GlobalData.gConfDirectory
GenFdsGlobalVariable.EnableGenfdsMultiThread = GlobalData.gEnableGenfdsMultiThread
for Arch in ArchList:
GenFdsGlobalVariable.OutputDirDict[Arch] = os.path.normpath(
os.path.join(GlobalData.gWorkspace,
WorkSpace.Db.BuildObject[GenFdsGlobalVariable.ActivePlatform, Arch, GlobalData.gGlobalDefines['TARGET'],
GlobalData.gGlobalDefines['TOOLCHAIN']].OutputDirectory,
GlobalData.gGlobalDefines['TARGET'] +'_' + GlobalData.gGlobalDefines['TOOLCHAIN']))
GenFdsGlobalVariable.OutputDirFromDscDict[Arch] = os.path.normpath(
WorkSpace.Db.BuildObject[GenFdsGlobalVariable.ActivePlatform, Arch,
GlobalData.gGlobalDefines['TARGET'], GlobalData.gGlobalDefines['TOOLCHAIN']].OutputDirectory)
GenFdsGlobalVariable.PlatformName = WorkSpace.Db.BuildObject[GenFdsGlobalVariable.ActivePlatform, Arch,
GlobalData.gGlobalDefines['TARGET'],
GlobalData.gGlobalDefines['TOOLCHAIN']].PlatformName
GenFdsGlobalVariable.FvDir = os.path.join(GenFdsGlobalVariable.OutputDirDict[ArchList[0]], DataType.TAB_FV_DIRECTORY)
if not os.path.exists(GenFdsGlobalVariable.FvDir):
os.makedirs(GenFdsGlobalVariable.FvDir)
GenFdsGlobalVariable.FfsDir = os.path.join(GenFdsGlobalVariable.FvDir, 'Ffs')
if not os.path.exists(GenFdsGlobalVariable.FfsDir):
os.makedirs(GenFdsGlobalVariable.FfsDir)
#
# Create FV Address inf file
#
GenFdsGlobalVariable.FvAddressFileName = os.path.join(GenFdsGlobalVariable.FfsDir, 'FvAddress.inf')
FvAddressFile = open(GenFdsGlobalVariable.FvAddressFileName, 'w')
#
# Add [Options]
#
FvAddressFile.writelines("[options]" + DataType.TAB_LINE_BREAK)
BsAddress = '0'
for Arch in ArchList:
BsAddress = GenFdsGlobalVariable.WorkSpace.BuildObject[GenFdsGlobalVariable.ActivePlatform, Arch,
GlobalData.gGlobalDefines['TARGET'],
GlobalData.gGlobalDefines["TOOL_CHAIN_TAG"]].BsBaseAddress
if BsAddress:
break
FvAddressFile.writelines("EFI_BOOT_DRIVER_BASE_ADDRESS = " + \
BsAddress + \
DataType.TAB_LINE_BREAK)
RtAddress = '0'
for Arch in reversed(ArchList):
temp = GenFdsGlobalVariable.WorkSpace.BuildObject[
GenFdsGlobalVariable.ActivePlatform, Arch, GlobalData.gGlobalDefines['TARGET'],
GlobalData.gGlobalDefines["TOOL_CHAIN_TAG"]].RtBaseAddress
if temp:
RtAddress = temp
break
FvAddressFile.writelines("EFI_RUNTIME_DRIVER_BASE_ADDRESS = " + \
RtAddress + \
DataType.TAB_LINE_BREAK)
FvAddressFile.close()
## ReplaceWorkspaceMacro()
#
# @param String String that may contain macro
#
@staticmethod
def ReplaceWorkspaceMacro(String):
String = mws.handleWsMacro(String)
Str = String.replace('$(WORKSPACE)', GenFdsGlobalVariable.WorkSpaceDir)
if os.path.exists(Str):
if not os.path.isabs(Str):
Str = os.path.abspath(Str)
else:
Str = mws.join(GenFdsGlobalVariable.WorkSpaceDir, String)
return os.path.normpath(Str)
## Check if the input files are newer than output files
#
# @param Output Path of output file
# @param Input Path list of input files
#
# @retval True if Output doesn't exist, or any Input is newer
# @retval False if all Input is older than Output
#
@staticmethod
def NeedsUpdate(Output, Input):
if not os.path.exists(Output):
return True
# always update "Output" if no "Input" given
if not Input:
return True
# if fdf file is changed after the 'Output" is generated, update the 'Output'
OutputTime = os.path.getmtime(Output)
if GenFdsGlobalVariable.FdfFileTimeStamp > OutputTime:
return True
for F in Input:
# always update "Output" if any "Input" doesn't exist
if not os.path.exists(F):
return True
# always update "Output" if any "Input" is newer than "Output"
if os.path.getmtime(F) > OutputTime:
return True
return False
@staticmethod
def GenerateSection(Output, Input, Type=None, CompressionType=None, Guid=None,
GuidHdrLen=None, GuidAttr=[], Ui=None, Ver=None, InputAlign=[], BuildNumber=None, DummyFile=None, IsMakefile=False):
Cmd = ["GenSec"]
if Type:
Cmd += ("-s", Type)
if CompressionType:
Cmd += ("-c", CompressionType)
if Guid:
Cmd += ("-g", Guid)
if DummyFile:
Cmd += ("--dummy", DummyFile)
if GuidHdrLen:
Cmd += ("-l", GuidHdrLen)
#Add each guided attribute
for Attr in GuidAttr:
Cmd += ("-r", Attr)
#Section Align is only for dummy section without section type
for SecAlign in InputAlign:
Cmd += ("--sectionalign", SecAlign)
CommandFile = Output + '.txt'
if Ui:
if IsMakefile:
if Ui == "$(MODULE_NAME)":
Cmd += ('-n', Ui)
else:
Cmd += ("-n", '"' + Ui + '"')
Cmd += ("-o", Output)
if ' '.join(Cmd).strip() not in GenFdsGlobalVariable.SecCmdList:
GenFdsGlobalVariable.SecCmdList.append(' '.join(Cmd).strip())
else:
SectionData = array('B', [0, 0, 0, 0])
SectionData.fromlist(array('B',Ui.encode('utf-16-le')).tolist())
SectionData.append(0)
SectionData.append(0)
Len = len(SectionData)
GenFdsGlobalVariable.SectionHeader.pack_into(SectionData, 0, Len & 0xff, (Len >> 8) & 0xff, (Len >> 16) & 0xff, 0x15)
DirName = os.path.dirname(Output)
if not CreateDirectory(DirName):
EdkLogger.error(None, FILE_CREATE_FAILURE, "Could not create directory %s" % DirName)
else:
if DirName == '':
DirName = os.getcwd()
if not os.access(DirName, os.W_OK):
EdkLogger.error(None, PERMISSION_FAILURE, "Do not have write permission on directory %s" % DirName)
try:
with open(Output, "wb") as Fd:
SectionData.tofile(Fd)
Fd.flush()
except IOError as X:
EdkLogger.error(None, FILE_CREATE_FAILURE, ExtraData='IOError %s' % X)
elif Ver:
Cmd += ("-n", Ver)
if BuildNumber:
Cmd += ("-j", BuildNumber)
Cmd += ("-o", Output)
SaveFileOnChange(CommandFile, ' '.join(Cmd), False)
if IsMakefile:
if ' '.join(Cmd).strip() not in GenFdsGlobalVariable.SecCmdList:
GenFdsGlobalVariable.SecCmdList.append(' '.join(Cmd).strip())
else:
if not GenFdsGlobalVariable.NeedsUpdate(Output, list(Input) + [CommandFile]):
return
GenFdsGlobalVariable.CallExternalTool(Cmd, "Failed to generate section")
else:
Cmd += ("-o", Output)
Cmd += Input
SaveFileOnChange(CommandFile, ' '.join(Cmd), False)
if IsMakefile:
if sys.platform == "win32":
Cmd = ['if', 'exist', Input[0]] + Cmd
else:
Cmd = ['-test', '-e', Input[0], "&&"] + Cmd
if ' '.join(Cmd).strip() not in GenFdsGlobalVariable.SecCmdList:
GenFdsGlobalVariable.SecCmdList.append(' '.join(Cmd).strip())
elif GenFdsGlobalVariable.NeedsUpdate(Output, list(Input) + [CommandFile]):
GenFdsGlobalVariable.DebugLogger(EdkLogger.DEBUG_5, "%s needs update because of newer %s" % (Output, Input))
GenFdsGlobalVariable.CallExternalTool(Cmd, "Failed to generate section")
if (os.path.getsize(Output) >= GenFdsGlobalVariable.LARGE_FILE_SIZE and
GenFdsGlobalVariable.LargeFileInFvFlags):
GenFdsGlobalVariable.LargeFileInFvFlags[-1] = True
@staticmethod
def GetAlignment (AlignString):
if not AlignString:
return 0
if AlignString.endswith('K'):
return int (AlignString.rstrip('K')) * 1024
if AlignString.endswith('M'):
return int (AlignString.rstrip('M')) * 1024 * 1024
if AlignString.endswith('G'):
return int (AlignString.rstrip('G')) * 1024 * 1024 * 1024
return int (AlignString)
@staticmethod
def GenerateFfs(Output, Input, Type, Guid, Fixed=False, CheckSum=False, Align=None,
SectionAlign=None, MakefilePath=None):
Cmd = ["GenFfs", "-t", Type, "-g", Guid]
mFfsValidAlign = ["0", "8", "16", "128", "512", "1K", "4K", "32K", "64K", "128K", "256K", "512K", "1M", "2M", "4M", "8M", "16M"]
if Fixed == True:
Cmd.append("-x")
if CheckSum:
Cmd.append("-s")
if Align:
if Align not in mFfsValidAlign:
Align = GenFdsGlobalVariable.GetAlignment (Align)
for index in range(0, len(mFfsValidAlign) - 1):
if ((Align > GenFdsGlobalVariable.GetAlignment(mFfsValidAlign[index])) and (Align <= GenFdsGlobalVariable.GetAlignment(mFfsValidAlign[index + 1]))):
break
Align = mFfsValidAlign[index + 1]
Cmd += ("-a", Align)
Cmd += ("-o", Output)
for I in range(0, len(Input)):
if MakefilePath:
Cmd += ("-oi", Input[I])
else:
Cmd += ("-i", Input[I])
if SectionAlign and SectionAlign[I]:
Cmd += ("-n", SectionAlign[I])
CommandFile = Output + '.txt'
SaveFileOnChange(CommandFile, ' '.join(Cmd), False)
GenFdsGlobalVariable.DebugLogger(EdkLogger.DEBUG_5, "%s needs update because of newer %s" % (Output, Input))
if MakefilePath:
if (tuple(Cmd), tuple(GenFdsGlobalVariable.SecCmdList), tuple(GenFdsGlobalVariable.CopyList)) not in GenFdsGlobalVariable.FfsCmdDict:
GenFdsGlobalVariable.FfsCmdDict[tuple(Cmd), tuple(GenFdsGlobalVariable.SecCmdList), tuple(GenFdsGlobalVariable.CopyList)] = MakefilePath
GenFdsGlobalVariable.SecCmdList = []
GenFdsGlobalVariable.CopyList = []
else:
if not GenFdsGlobalVariable.NeedsUpdate(Output, list(Input) + [CommandFile]):
return
GenFdsGlobalVariable.CallExternalTool(Cmd, "Failed to generate FFS")
@staticmethod
def GenerateFirmwareVolume(Output, Input, BaseAddress=None, ForceRebase=None, Capsule=False, Dump=False,
AddressFile=None, MapFile=None, FfsList=[], FileSystemGuid=None):
if not GenFdsGlobalVariable.NeedsUpdate(Output, Input+FfsList):
return
GenFdsGlobalVariable.DebugLogger(EdkLogger.DEBUG_5, "%s needs update because of newer %s" % (Output, Input))
Cmd = ["GenFv"]
if BaseAddress:
Cmd += ("-r", BaseAddress)
if ForceRebase == False:
Cmd += ("-F", "FALSE")
elif ForceRebase == True:
Cmd += ("-F", "TRUE")
if Capsule:
Cmd.append("-c")
if Dump:
Cmd.append("-p")
if AddressFile:
Cmd += ("-a", AddressFile)
if MapFile:
Cmd += ("-m", MapFile)
if FileSystemGuid:
Cmd += ("-g", FileSystemGuid)
Cmd += ("-o", Output)
for I in Input:
Cmd += ("-i", I)
GenFdsGlobalVariable.CallExternalTool(Cmd, "Failed to generate FV")
@staticmethod
def GenerateFirmwareImage(Output, Input, Type="efi", SubType=None, Zero=False,
Strip=False, Replace=False, TimeStamp=None, Join=False,
Align=None, Padding=None, Convert=False, IsMakefile=False):
if not GenFdsGlobalVariable.NeedsUpdate(Output, Input) and not IsMakefile:
return
GenFdsGlobalVariable.DebugLogger(EdkLogger.DEBUG_5, "%s needs update because of newer %s" % (Output, Input))
Cmd = ["GenFw"]
if Type.lower() == "te":
Cmd.append("-t")
if SubType:
Cmd += ("-e", SubType)
if TimeStamp:
Cmd += ("-s", TimeStamp)
if Align:
Cmd += ("-a", Align)
if Padding:
Cmd += ("-p", Padding)
if Zero:
Cmd.append("-z")
if Strip:
Cmd.append("-l")
if Replace:
Cmd.append("-r")
if Join:
Cmd.append("-j")
if Convert:
Cmd.append("-m")
Cmd += ("-o", Output)
Cmd += Input
if IsMakefile:
if " ".join(Cmd).strip() not in GenFdsGlobalVariable.SecCmdList:
GenFdsGlobalVariable.SecCmdList.append(" ".join(Cmd).strip())
else:
GenFdsGlobalVariable.CallExternalTool(Cmd, "Failed to generate firmware image")
@staticmethod
def GenerateOptionRom(Output, EfiInput, BinaryInput, Compress=False, ClassCode=None,
Revision=None, DeviceId=None, VendorId=None, IsMakefile=False):
InputList = []
Cmd = ["EfiRom"]
if EfiInput:
if Compress:
Cmd.append("-ec")
else:
Cmd.append("-e")
for EfiFile in EfiInput:
Cmd.append(EfiFile)
InputList.append (EfiFile)
if BinaryInput:
Cmd.append("-b")
for BinFile in BinaryInput:
Cmd.append(BinFile)
InputList.append (BinFile)
# Check List
if not GenFdsGlobalVariable.NeedsUpdate(Output, InputList) and not IsMakefile:
return
GenFdsGlobalVariable.DebugLogger(EdkLogger.DEBUG_5, "%s needs update because of newer %s" % (Output, InputList))
if ClassCode:
Cmd += ("-l", ClassCode)
if Revision:
Cmd += ("-r", Revision)
if DeviceId:
Cmd += ("-i", DeviceId)
if VendorId:
Cmd += ("-f", VendorId)
Cmd += ("-o", Output)
if IsMakefile:
if " ".join(Cmd).strip() not in GenFdsGlobalVariable.SecCmdList:
GenFdsGlobalVariable.SecCmdList.append(" ".join(Cmd).strip())
else:
GenFdsGlobalVariable.CallExternalTool(Cmd, "Failed to generate option rom")
@staticmethod
def GuidTool(Output, Input, ToolPath, Options='', returnValue=[], IsMakefile=False):
if not GenFdsGlobalVariable.NeedsUpdate(Output, Input) and not IsMakefile:
return
GenFdsGlobalVariable.DebugLogger(EdkLogger.DEBUG_5, "%s needs update because of newer %s" % (Output, Input))
Cmd = [ToolPath, ]
Cmd += Options.split(' ')
Cmd += ("-o", Output)
Cmd += Input
if IsMakefile:
if " ".join(Cmd).strip() not in GenFdsGlobalVariable.SecCmdList:
GenFdsGlobalVariable.SecCmdList.append(" ".join(Cmd).strip())
else:
GenFdsGlobalVariable.CallExternalTool(Cmd, "Failed to call " + ToolPath, returnValue)
@staticmethod
def CallExternalTool (cmd, errorMess, returnValue=[]):
if type(cmd) not in (tuple, list):
GenFdsGlobalVariable.ErrorLogger("ToolError! Invalid parameter type in call to CallExternalTool")
if GenFdsGlobalVariable.DebugLevel != -1:
cmd += ('--debug', str(GenFdsGlobalVariable.DebugLevel))
GenFdsGlobalVariable.InfLogger (cmd)
if GenFdsGlobalVariable.VerboseMode:
cmd += ('-v',)
GenFdsGlobalVariable.InfLogger (cmd)
else:
stdout.write ('#')
stdout.flush()
GenFdsGlobalVariable.SharpCounter = GenFdsGlobalVariable.SharpCounter + 1
if GenFdsGlobalVariable.SharpCounter % GenFdsGlobalVariable.SharpNumberPerLine == 0:
stdout.write('\n')
try:
PopenObject = Popen(' '.join(cmd), stdout=PIPE, stderr=PIPE, shell=True)
except Exception as X:
EdkLogger.error("GenFds", COMMAND_FAILURE, ExtraData="%s: %s" % (str(X), cmd[0]))
(out, error) = PopenObject.communicate()
while PopenObject.returncode is None:
PopenObject.wait()
if returnValue != [] and returnValue[0] != 0:
#get command return value
returnValue[0] = PopenObject.returncode
return
if PopenObject.returncode != 0 or GenFdsGlobalVariable.VerboseMode or GenFdsGlobalVariable.DebugLevel != -1:
GenFdsGlobalVariable.InfLogger ("Return Value = %d" % PopenObject.returncode)
GenFdsGlobalVariable.InfLogger(out.decode(encoding='utf-8', errors='ignore'))
GenFdsGlobalVariable.InfLogger(error.decode(encoding='utf-8', errors='ignore'))
if PopenObject.returncode != 0:
print("###", cmd)
EdkLogger.error("GenFds", COMMAND_FAILURE, errorMess)
@staticmethod
def VerboseLogger (msg):
EdkLogger.verbose(msg)
@staticmethod
def InfLogger (msg):
EdkLogger.info(msg)
@staticmethod
def ErrorLogger (msg, File=None, Line=None, ExtraData=None):
EdkLogger.error('GenFds', GENFDS_ERROR, msg, File, Line, ExtraData)
@staticmethod
def DebugLogger (Level, msg):
EdkLogger.debug(Level, msg)
## MacroExtend()
#
# @param Str String that may contain macro
# @param MacroDict Dictionary that contains macro value pair
#
@staticmethod
def MacroExtend (Str, MacroDict=None, Arch=DataType.TAB_COMMON):
if Str is None:
return None
Dict = {'$(WORKSPACE)': GenFdsGlobalVariable.WorkSpaceDir,
# '$(OUTPUT_DIRECTORY)': GenFdsGlobalVariable.OutputDirFromDsc,
'$(TARGET)': GenFdsGlobalVariable.TargetName,
'$(TOOL_CHAIN_TAG)': GenFdsGlobalVariable.ToolChainTag,
'$(SPACE)': ' '
}
if Arch != DataType.TAB_COMMON and Arch in GenFdsGlobalVariable.ArchList:
OutputDir = GenFdsGlobalVariable.OutputDirFromDscDict[Arch]
else:
OutputDir = GenFdsGlobalVariable.OutputDirFromDscDict[GenFdsGlobalVariable.ArchList[0]]
Dict['$(OUTPUT_DIRECTORY)'] = OutputDir
if MacroDict:
Dict.update(MacroDict)
for key in Dict:
if Str.find(key) >= 0:
Str = Str.replace (key, Dict[key])
if Str.find('$(ARCH)') >= 0:
if len(GenFdsGlobalVariable.ArchList) == 1:
Str = Str.replace('$(ARCH)', GenFdsGlobalVariable.ArchList[0])
else:
EdkLogger.error("GenFds", GENFDS_ERROR, "No way to determine $(ARCH) for %s" % Str)
return Str
## GetPcdValue()
#
# @param PcdPattern pattern that labels a PCD.
#
@staticmethod
def GetPcdValue (PcdPattern):
if PcdPattern is None:
return None
if PcdPattern.startswith('PCD('):
PcdPair = PcdPattern[4:].rstrip(')').strip().split('.')
else:
PcdPair = PcdPattern.strip().split('.')
TokenSpace = PcdPair[0]
TokenCName = PcdPair[1]
for Arch in GenFdsGlobalVariable.ArchList:
Platform = GenFdsGlobalVariable.WorkSpace.BuildObject[GenFdsGlobalVariable.ActivePlatform, Arch, GenFdsGlobalVariable.TargetName, GenFdsGlobalVariable.ToolChainTag]
PcdDict = Platform.Pcds
for Key in PcdDict:
PcdObj = PcdDict[Key]
if (PcdObj.TokenCName == TokenCName) and (PcdObj.TokenSpaceGuidCName == TokenSpace):
if PcdObj.Type != DataType.TAB_PCDS_FIXED_AT_BUILD:
EdkLogger.error("GenFds", GENFDS_ERROR, "%s is not FixedAtBuild type." % PcdPattern)
if PcdObj.DatumType != DataType.TAB_VOID:
EdkLogger.error("GenFds", GENFDS_ERROR, "%s is not VOID* datum type." % PcdPattern)
return PcdObj.DefaultValue
for Package in GenFdsGlobalVariable.WorkSpace.GetPackageList(GenFdsGlobalVariable.ActivePlatform,
Arch,
GenFdsGlobalVariable.TargetName,
GenFdsGlobalVariable.ToolChainTag):
PcdDict = Package.Pcds
for Key in PcdDict:
PcdObj = PcdDict[Key]
if (PcdObj.TokenCName == TokenCName) and (PcdObj.TokenSpaceGuidCName == TokenSpace):
if PcdObj.Type != DataType.TAB_PCDS_FIXED_AT_BUILD:
EdkLogger.error("GenFds", GENFDS_ERROR, "%s is not FixedAtBuild type." % PcdPattern)
if PcdObj.DatumType != DataType.TAB_VOID:
EdkLogger.error("GenFds", GENFDS_ERROR, "%s is not VOID* datum type." % PcdPattern)
return PcdObj.DefaultValue
return ''
## FindExtendTool()
#
# Find location of tools to process data
#
# @param KeyStringList Filter for inputs of section generation
# @param CurrentArchList Arch list
# @param NameGuid The Guid name
#
def FindExtendTool(KeyStringList, CurrentArchList, NameGuid):
if GenFdsGlobalVariable.GuidToolDefinition:
if NameGuid in GenFdsGlobalVariable.GuidToolDefinition:
return GenFdsGlobalVariable.GuidToolDefinition[NameGuid]
ToolDefObj = ToolDefDict((os.path.join(os.getenv("WORKSPACE"), "Conf")))
ToolDef = ToolDefObj.ToolDef
ToolDb = ToolDef.ToolsDefTxtDatabase
# if user not specify filter, try to deduce it from global data.
if KeyStringList is None or KeyStringList == []:
Target = GenFdsGlobalVariable.TargetName
ToolChain = GenFdsGlobalVariable.ToolChainTag
if ToolChain not in ToolDb['TOOL_CHAIN_TAG']:
EdkLogger.error("GenFds", GENFDS_ERROR, "Can not find external tool because tool tag %s is not defined in tools_def.txt!" % ToolChain)
KeyStringList = [Target + '_' + ToolChain + '_' + CurrentArchList[0]]
for Arch in CurrentArchList:
if Target + '_' + ToolChain + '_' + Arch not in KeyStringList:
KeyStringList.append(Target + '_' + ToolChain + '_' + Arch)
ToolPathTmp = None
ToolOption = None
for Arch in CurrentArchList:
MatchItem = None
MatchPathItem = None
MatchOptionsItem = None
for KeyString in KeyStringList:
KeyStringBuildTarget, KeyStringToolChain, KeyStringArch = KeyString.split('_')
if KeyStringArch != Arch:
continue
for Item in ToolDef.ToolsDefTxtDictionary:
if len(Item.split('_')) < 5:
continue
ItemTarget, ItemToolChain, ItemArch, ItemTool, ItemAttr = Item.split('_')
if ItemTarget == DataType.TAB_STAR:
ItemTarget = KeyStringBuildTarget
if ItemToolChain == DataType.TAB_STAR:
ItemToolChain = KeyStringToolChain
if ItemArch == DataType.TAB_STAR:
ItemArch = KeyStringArch
if ItemTarget != KeyStringBuildTarget:
continue
if ItemToolChain != KeyStringToolChain:
continue
if ItemArch != KeyStringArch:
continue
if ItemAttr != DataType.TAB_GUID:
# Not GUID attribute
continue
if ToolDef.ToolsDefTxtDictionary[Item].lower() != NameGuid.lower():
# No GUID value match
continue
if MatchItem:
if MatchItem.split('_')[3] == ItemTool:
# Tool name is the same
continue
if CalculatePriorityValue(MatchItem) > CalculatePriorityValue(Item):
# Current MatchItem is higher priority than new match item
continue
MatchItem = Item
if not MatchItem:
continue
ToolName = MatchItem.split('_')[3]
for Item in ToolDef.ToolsDefTxtDictionary:
if len(Item.split('_')) < 5:
continue
ItemTarget, ItemToolChain, ItemArch, ItemTool, ItemAttr = Item.split('_')
if ItemTarget == DataType.TAB_STAR:
ItemTarget = KeyStringBuildTarget
if ItemToolChain == DataType.TAB_STAR:
ItemToolChain = KeyStringToolChain
if ItemArch == DataType.TAB_STAR:
ItemArch = KeyStringArch
if ItemTarget != KeyStringBuildTarget:
continue
if ItemToolChain != KeyStringToolChain:
continue
if ItemArch != KeyStringArch:
continue
if ItemTool != ToolName:
continue
if ItemAttr == 'PATH':
if MatchPathItem:
if CalculatePriorityValue(MatchPathItem) <= CalculatePriorityValue(Item):
MatchPathItem = Item
else:
MatchPathItem = Item
if ItemAttr == 'FLAGS':
if MatchOptionsItem:
if CalculatePriorityValue(MatchOptionsItem) <= CalculatePriorityValue(Item):
MatchOptionsItem = Item
else:
MatchOptionsItem = Item
if MatchPathItem:
ToolPathTmp = ToolDef.ToolsDefTxtDictionary[MatchPathItem]
if MatchOptionsItem:
ToolOption = ToolDef.ToolsDefTxtDictionary[MatchOptionsItem]
for Arch in CurrentArchList:
MatchItem = None
MatchPathItem = None
MatchOptionsItem = None
for KeyString in KeyStringList:
KeyStringBuildTarget, KeyStringToolChain, KeyStringArch = KeyString.split('_')
if KeyStringArch != Arch:
continue
Platform = GenFdsGlobalVariable.WorkSpace.BuildObject[GenFdsGlobalVariable.ActivePlatform, Arch, KeyStringBuildTarget, KeyStringToolChain]
for Item in Platform.BuildOptions:
if len(Item[1].split('_')) < 5:
continue
ItemTarget, ItemToolChain, ItemArch, ItemTool, ItemAttr = Item[1].split('_')
if ItemTarget == DataType.TAB_STAR:
ItemTarget = KeyStringBuildTarget
if ItemToolChain == DataType.TAB_STAR:
ItemToolChain = KeyStringToolChain
if ItemArch == DataType.TAB_STAR:
ItemArch = KeyStringArch
if ItemTarget != KeyStringBuildTarget:
continue
if ItemToolChain != KeyStringToolChain:
continue
if ItemArch != KeyStringArch:
continue
if ItemAttr != DataType.TAB_GUID:
# Not GUID attribute match
continue
if Platform.BuildOptions[Item].lower() != NameGuid.lower():
# No GUID value match
continue
if MatchItem:
if MatchItem[1].split('_')[3] == ItemTool:
# Tool name is the same
continue
if CalculatePriorityValue(MatchItem[1]) > CalculatePriorityValue(Item[1]):
# Current MatchItem is higher priority than new match item
continue
MatchItem = Item
if not MatchItem:
continue
ToolName = MatchItem[1].split('_')[3]
for Item in Platform.BuildOptions:
if len(Item[1].split('_')) < 5:
continue
ItemTarget, ItemToolChain, ItemArch, ItemTool, ItemAttr = Item[1].split('_')
if ItemTarget == DataType.TAB_STAR:
ItemTarget = KeyStringBuildTarget
if ItemToolChain == DataType.TAB_STAR:
ItemToolChain = KeyStringToolChain
if ItemArch == DataType.TAB_STAR:
ItemArch = KeyStringArch
if ItemTarget != KeyStringBuildTarget:
continue
if ItemToolChain != KeyStringToolChain:
continue
if ItemArch != KeyStringArch:
continue
if ItemTool != ToolName:
continue
if ItemAttr == 'PATH':
if MatchPathItem:
if CalculatePriorityValue(MatchPathItem[1]) <= CalculatePriorityValue(Item[1]):
MatchPathItem = Item
else:
MatchPathItem = Item
if ItemAttr == 'FLAGS':
if MatchOptionsItem:
if CalculatePriorityValue(MatchOptionsItem[1]) <= CalculatePriorityValue(Item[1]):
MatchOptionsItem = Item
else:
MatchOptionsItem = Item
if MatchPathItem:
ToolPathTmp = Platform.BuildOptions[MatchPathItem]
if MatchOptionsItem:
ToolOption = Platform.BuildOptions[MatchOptionsItem]
GenFdsGlobalVariable.GuidToolDefinition[NameGuid] = (ToolPathTmp, ToolOption)
return ToolPathTmp, ToolOption
| edk2-master | BaseTools/Source/Python/GenFds/GenFdsGlobalVariable.py |
## @file
# process FV generation
#
# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# Import Modules
#
from __future__ import absolute_import
import Common.LongFilePathOs as os
import subprocess
from io import BytesIO
from struct import *
from . import FfsFileStatement
from .GenFdsGlobalVariable import GenFdsGlobalVariable
from Common.Misc import SaveFileOnChange, PackGUID
from Common.LongFilePathSupport import CopyLongFilePath
from Common.LongFilePathSupport import OpenLongFilePath as open
from Common.DataType import *
FV_UI_EXT_ENTY_GUID = 'A67DF1FA-8DE8-4E98-AF09-4BDF2EFFBC7C'
## generate FV
#
#
class FV (object):
## The constructor
#
# @param self The object pointer
#
def __init__(self, Name=None):
self.UiFvName = Name
self.CreateFileName = None
self.BlockSizeList = []
self.DefineVarDict = {}
self.SetVarDict = {}
self.FvAlignment = None
self.FvAttributeDict = {}
self.FvNameGuid = None
self.FvNameString = None
self.AprioriSectionList = []
self.FfsList = []
self.BsBaseAddress = None
self.RtBaseAddress = None
self.FvInfFile = None
self.FvAddressFile = None
self.BaseAddress = None
self.InfFileName = None
self.FvAddressFileName = None
self.CapsuleName = None
self.FvBaseAddress = None
self.FvForceRebase = None
self.FvRegionInFD = None
self.UsedSizeEnable = False
self.FvExtEntryTypeValue = []
self.FvExtEntryType = []
self.FvExtEntryData = []
## AddToBuffer()
#
# Generate Fv and add it to the Buffer
#
# @param self The object pointer
# @param Buffer The buffer generated FV data will be put
# @param BaseAddress base address of FV
# @param BlockSize block size of FV
# @param BlockNum How many blocks in FV
# @param ErasePolarity Flash erase polarity
# @param MacroDict macro value pair
# @retval string Generated FV file path
#
def AddToBuffer (self, Buffer, BaseAddress=None, BlockSize= None, BlockNum=None, ErasePloarity='1', MacroDict = None, Flag=False):
if BaseAddress is None and self.UiFvName.upper() + 'fv' in GenFdsGlobalVariable.ImageBinDict:
return GenFdsGlobalVariable.ImageBinDict[self.UiFvName.upper() + 'fv']
if MacroDict is None:
MacroDict = {}
#
# Check whether FV in Capsule is in FD flash region.
# If yes, return error. Doesn't support FV in Capsule image is also in FD flash region.
#
if self.CapsuleName is not None:
for FdObj in GenFdsGlobalVariable.FdfParser.Profile.FdDict.values():
for RegionObj in FdObj.RegionList:
if RegionObj.RegionType == BINARY_FILE_TYPE_FV:
for RegionData in RegionObj.RegionDataList:
if RegionData.endswith(".fv"):
continue
elif RegionData.upper() + 'fv' in GenFdsGlobalVariable.ImageBinDict:
continue
elif self.UiFvName.upper() == RegionData.upper():
GenFdsGlobalVariable.ErrorLogger("Capsule %s in FD region can't contain a FV %s in FD region." % (self.CapsuleName, self.UiFvName.upper()))
if not Flag:
GenFdsGlobalVariable.InfLogger( "\nGenerating %s FV" %self.UiFvName)
GenFdsGlobalVariable.LargeFileInFvFlags.append(False)
FFSGuid = None
if self.FvBaseAddress is not None:
BaseAddress = self.FvBaseAddress
if not Flag:
self._InitializeInf(BaseAddress, BlockSize, BlockNum, ErasePloarity)
#
# First Process the Apriori section
#
MacroDict.update(self.DefineVarDict)
GenFdsGlobalVariable.VerboseLogger('First generate Apriori file !')
FfsFileList = []
for AprSection in self.AprioriSectionList:
FileName = AprSection.GenFfs (self.UiFvName, MacroDict, IsMakefile=Flag)
FfsFileList.append(FileName)
# Add Apriori file name to Inf file
if not Flag:
self.FvInfFile.append("EFI_FILE_NAME = " + \
FileName + \
TAB_LINE_BREAK)
# Process Modules in FfsList
for FfsFile in self.FfsList:
if Flag:
if isinstance(FfsFile, FfsFileStatement.FileStatement):
continue
if GenFdsGlobalVariable.EnableGenfdsMultiThread and GenFdsGlobalVariable.ModuleFile and GenFdsGlobalVariable.ModuleFile.Path.find(os.path.normpath(FfsFile.InfFileName)) == -1:
continue
FileName = FfsFile.GenFfs(MacroDict, FvParentAddr=BaseAddress, IsMakefile=Flag, FvName=self.UiFvName)
FfsFileList.append(FileName)
if not Flag:
self.FvInfFile.append("EFI_FILE_NAME = " + \
FileName + \
TAB_LINE_BREAK)
if not Flag:
FvInfFile = ''.join(self.FvInfFile)
SaveFileOnChange(self.InfFileName, FvInfFile, False)
#
# Call GenFv tool
#
FvOutputFile = os.path.join(GenFdsGlobalVariable.FvDir, self.UiFvName)
FvOutputFile = FvOutputFile + '.Fv'
# BUGBUG: FvOutputFile could be specified from FDF file (FV section, CreateFile statement)
if self.CreateFileName is not None:
FvOutputFile = self.CreateFileName
if Flag:
GenFdsGlobalVariable.ImageBinDict[self.UiFvName.upper() + 'fv'] = FvOutputFile
return FvOutputFile
FvInfoFileName = os.path.join(GenFdsGlobalVariable.FfsDir, self.UiFvName + '.inf')
if not Flag:
CopyLongFilePath(GenFdsGlobalVariable.FvAddressFileName, FvInfoFileName)
OrigFvInfo = None
if os.path.exists (FvInfoFileName):
OrigFvInfo = open(FvInfoFileName, 'r').read()
if GenFdsGlobalVariable.LargeFileInFvFlags[-1]:
FFSGuid = GenFdsGlobalVariable.EFI_FIRMWARE_FILE_SYSTEM3_GUID
GenFdsGlobalVariable.GenerateFirmwareVolume(
FvOutputFile,
[self.InfFileName],
AddressFile=FvInfoFileName,
FfsList=FfsFileList,
ForceRebase=self.FvForceRebase,
FileSystemGuid=FFSGuid
)
NewFvInfo = None
if os.path.exists (FvInfoFileName):
NewFvInfo = open(FvInfoFileName, 'r').read()
if NewFvInfo is not None and NewFvInfo != OrigFvInfo:
FvChildAddr = []
AddFileObj = open(FvInfoFileName, 'r')
AddrStrings = AddFileObj.readlines()
AddrKeyFound = False
for AddrString in AddrStrings:
if AddrKeyFound:
#get base address for the inside FvImage
FvChildAddr.append (AddrString)
elif AddrString.find ("[FV_BASE_ADDRESS]") != -1:
AddrKeyFound = True
AddFileObj.close()
if FvChildAddr != []:
# Update Ffs again
for FfsFile in self.FfsList:
FileName = FfsFile.GenFfs(MacroDict, FvChildAddr, BaseAddress, IsMakefile=Flag, FvName=self.UiFvName)
if GenFdsGlobalVariable.LargeFileInFvFlags[-1]:
FFSGuid = GenFdsGlobalVariable.EFI_FIRMWARE_FILE_SYSTEM3_GUID;
#Update GenFv again
GenFdsGlobalVariable.GenerateFirmwareVolume(
FvOutputFile,
[self.InfFileName],
AddressFile=FvInfoFileName,
FfsList=FfsFileList,
ForceRebase=self.FvForceRebase,
FileSystemGuid=FFSGuid
)
#
# Write the Fv contents to Buffer
#
if os.path.isfile(FvOutputFile) and os.path.getsize(FvOutputFile) >= 0x48:
FvFileObj = open(FvOutputFile, 'rb')
# PI FvHeader is 0x48 byte
FvHeaderBuffer = FvFileObj.read(0x48)
Signature = FvHeaderBuffer[0x28:0x32]
if Signature and Signature.startswith(b'_FVH'):
GenFdsGlobalVariable.VerboseLogger("\nGenerate %s FV Successfully" % self.UiFvName)
GenFdsGlobalVariable.SharpCounter = 0
FvFileObj.seek(0)
Buffer.write(FvFileObj.read())
# FV alignment position.
FvAlignmentValue = 1 << (ord(FvHeaderBuffer[0x2E:0x2F]) & 0x1F)
if FvAlignmentValue >= 0x400:
if FvAlignmentValue >= 0x100000:
if FvAlignmentValue >= 0x1000000:
#The max alignment supported by FFS is 16M.
self.FvAlignment = "16M"
else:
self.FvAlignment = str(FvAlignmentValue // 0x100000) + "M"
else:
self.FvAlignment = str(FvAlignmentValue // 0x400) + "K"
else:
# FvAlignmentValue is less than 1K
self.FvAlignment = str (FvAlignmentValue)
FvFileObj.close()
GenFdsGlobalVariable.ImageBinDict[self.UiFvName.upper() + 'fv'] = FvOutputFile
GenFdsGlobalVariable.LargeFileInFvFlags.pop()
else:
GenFdsGlobalVariable.ErrorLogger("Invalid FV file %s." % self.UiFvName)
else:
GenFdsGlobalVariable.ErrorLogger("Failed to generate %s FV file." %self.UiFvName)
return FvOutputFile
## _GetBlockSize()
#
# Calculate FV's block size
# Inherit block size from FD if no block size specified in FV
#
def _GetBlockSize(self):
if self.BlockSizeList:
return True
for FdObj in GenFdsGlobalVariable.FdfParser.Profile.FdDict.values():
for RegionObj in FdObj.RegionList:
if RegionObj.RegionType != BINARY_FILE_TYPE_FV:
continue
for RegionData in RegionObj.RegionDataList:
#
# Found the FD and region that contain this FV
#
if self.UiFvName.upper() == RegionData.upper():
RegionObj.BlockInfoOfRegion(FdObj.BlockSizeList, self)
if self.BlockSizeList:
return True
return False
## _InitializeInf()
#
# Initialize the inf file to create FV
#
# @param self The object pointer
# @param BaseAddress base address of FV
# @param BlockSize block size of FV
# @param BlockNum How many blocks in FV
# @param ErasePolarity Flash erase polarity
#
def _InitializeInf (self, BaseAddress = None, BlockSize= None, BlockNum = None, ErasePloarity='1'):
#
# Create FV inf file
#
self.InfFileName = os.path.join(GenFdsGlobalVariable.FvDir,
self.UiFvName + '.inf')
self.FvInfFile = []
#
# Add [Options]
#
self.FvInfFile.append("[options]" + TAB_LINE_BREAK)
if BaseAddress is not None:
self.FvInfFile.append("EFI_BASE_ADDRESS = " + \
BaseAddress + \
TAB_LINE_BREAK)
if BlockSize is not None:
self.FvInfFile.append("EFI_BLOCK_SIZE = " + \
'0x%X' %BlockSize + \
TAB_LINE_BREAK)
if BlockNum is not None:
self.FvInfFile.append("EFI_NUM_BLOCKS = " + \
' 0x%X' %BlockNum + \
TAB_LINE_BREAK)
else:
if self.BlockSizeList == []:
if not self._GetBlockSize():
#set default block size is 1
self.FvInfFile.append("EFI_BLOCK_SIZE = 0x1" + TAB_LINE_BREAK)
for BlockSize in self.BlockSizeList:
if BlockSize[0] is not None:
self.FvInfFile.append("EFI_BLOCK_SIZE = " + \
'0x%X' %BlockSize[0] + \
TAB_LINE_BREAK)
if BlockSize[1] is not None:
self.FvInfFile.append("EFI_NUM_BLOCKS = " + \
' 0x%X' %BlockSize[1] + \
TAB_LINE_BREAK)
if self.BsBaseAddress is not None:
self.FvInfFile.append('EFI_BOOT_DRIVER_BASE_ADDRESS = ' + \
'0x%X' %self.BsBaseAddress)
if self.RtBaseAddress is not None:
self.FvInfFile.append('EFI_RUNTIME_DRIVER_BASE_ADDRESS = ' + \
'0x%X' %self.RtBaseAddress)
#
# Add attribute
#
self.FvInfFile.append("[attributes]" + TAB_LINE_BREAK)
self.FvInfFile.append("EFI_ERASE_POLARITY = " + \
' %s' %ErasePloarity + \
TAB_LINE_BREAK)
if not (self.FvAttributeDict is None):
for FvAttribute in self.FvAttributeDict.keys():
if FvAttribute == "FvUsedSizeEnable":
if self.FvAttributeDict[FvAttribute].upper() in ('TRUE', '1'):
self.UsedSizeEnable = True
continue
self.FvInfFile.append("EFI_" + \
FvAttribute + \
' = ' + \
self.FvAttributeDict[FvAttribute] + \
TAB_LINE_BREAK )
if self.FvAlignment is not None:
self.FvInfFile.append("EFI_FVB2_ALIGNMENT_" + \
self.FvAlignment.strip() + \
" = TRUE" + \
TAB_LINE_BREAK)
#
# Generate FV extension header file
#
if not self.FvNameGuid:
if len(self.FvExtEntryType) > 0 or self.UsedSizeEnable:
GenFdsGlobalVariable.ErrorLogger("FV Extension Header Entries declared for %s with no FvNameGuid declaration." % (self.UiFvName))
else:
TotalSize = 16 + 4
Buffer = bytearray()
if self.UsedSizeEnable:
TotalSize += (4 + 4)
## define EFI_FV_EXT_TYPE_USED_SIZE_TYPE 0x03
#typedef struct
# {
# EFI_FIRMWARE_VOLUME_EXT_ENTRY Hdr;
# UINT32 UsedSize;
# } EFI_FIRMWARE_VOLUME_EXT_ENTRY_USED_SIZE_TYPE;
Buffer += pack('HHL', 8, 3, 0)
if self.FvNameString == 'TRUE':
#
# Create EXT entry for FV UI name
# This GUID is used: A67DF1FA-8DE8-4E98-AF09-4BDF2EFFBC7C
#
FvUiLen = len(self.UiFvName)
TotalSize += (FvUiLen + 16 + 4)
Guid = FV_UI_EXT_ENTY_GUID.split('-')
#
# Layout:
# EFI_FIRMWARE_VOLUME_EXT_ENTRY: size 4
# GUID: size 16
# FV UI name
#
Buffer += (pack('HH', (FvUiLen + 16 + 4), 0x0002)
+ PackGUID(Guid)
+ self.UiFvName.encode('utf-8'))
for Index in range (0, len(self.FvExtEntryType)):
if self.FvExtEntryType[Index] == 'FILE':
# check if the path is absolute or relative
if os.path.isabs(self.FvExtEntryData[Index]):
FileFullPath = os.path.normpath(self.FvExtEntryData[Index])
else:
FileFullPath = os.path.normpath(os.path.join(GenFdsGlobalVariable.WorkSpaceDir, self.FvExtEntryData[Index]))
# check if the file path exists or not
if not os.path.isfile(FileFullPath):
GenFdsGlobalVariable.ErrorLogger("Error opening FV Extension Header Entry file %s." % (self.FvExtEntryData[Index]))
FvExtFile = open (FileFullPath, 'rb')
FvExtFile.seek(0, 2)
Size = FvExtFile.tell()
if Size >= 0x10000:
GenFdsGlobalVariable.ErrorLogger("The size of FV Extension Header Entry file %s exceeds 0x10000." % (self.FvExtEntryData[Index]))
TotalSize += (Size + 4)
FvExtFile.seek(0)
Buffer += pack('HH', (Size + 4), int(self.FvExtEntryTypeValue[Index], 16))
Buffer += FvExtFile.read()
FvExtFile.close()
if self.FvExtEntryType[Index] == 'DATA':
ByteList = self.FvExtEntryData[Index].split(',')
Size = len (ByteList)
if Size >= 0x10000:
GenFdsGlobalVariable.ErrorLogger("The size of FV Extension Header Entry data %s exceeds 0x10000." % (self.FvExtEntryData[Index]))
TotalSize += (Size + 4)
Buffer += pack('HH', (Size + 4), int(self.FvExtEntryTypeValue[Index], 16))
for Index1 in range (0, Size):
Buffer += pack('B', int(ByteList[Index1], 16))
Guid = self.FvNameGuid.split('-')
Buffer = PackGUID(Guid) + pack('=L', TotalSize) + Buffer
#
# Generate FV extension header file if the total size is not zero
#
if TotalSize > 0:
FvExtHeaderFileName = os.path.join(GenFdsGlobalVariable.FvDir, self.UiFvName + '.ext')
FvExtHeaderFile = BytesIO()
FvExtHeaderFile.write(Buffer)
Changed = SaveFileOnChange(FvExtHeaderFileName, FvExtHeaderFile.getvalue(), True)
FvExtHeaderFile.close()
if Changed:
if os.path.exists (self.InfFileName):
os.remove (self.InfFileName)
self.FvInfFile.append("EFI_FV_EXT_HEADER_FILE_NAME = " + \
FvExtHeaderFileName + \
TAB_LINE_BREAK)
#
# Add [Files]
#
self.FvInfFile.append("[files]" + TAB_LINE_BREAK)
| edk2-master | BaseTools/Source/Python/GenFds/Fv.py |
## @file
# process FFS generation from INF statement
#
# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2014-2016 Hewlett-Packard Development Company, L.P.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# Import Modules
#
from __future__ import absolute_import
from . import Rule
import Common.LongFilePathOs as os
from io import BytesIO
from struct import *
from .GenFdsGlobalVariable import GenFdsGlobalVariable
from .Ffs import SectionSuffix,FdfFvFileTypeToFileType
import subprocess
import sys
from . import Section
from . import RuleSimpleFile
from . import RuleComplexFile
from CommonDataClass.FdfClass import FfsInfStatementClassObject
from Common.MultipleWorkspace import MultipleWorkspace as mws
from Common.DataType import SUP_MODULE_USER_DEFINED
from Common.DataType import SUP_MODULE_HOST_APPLICATION
from Common.StringUtils import *
from Common.Misc import PathClass
from Common.Misc import GuidStructureByteArrayToGuidString
from Common.Misc import ProcessDuplicatedInf
from Common.Misc import GetVariableOffset
from Common import EdkLogger
from Common.BuildToolError import *
from .GuidSection import GuidSection
from .FvImageSection import FvImageSection
from Common.Misc import PeImageClass
from AutoGen.GenDepex import DependencyExpression
from PatchPcdValue.PatchPcdValue import PatchBinaryFile
from Common.LongFilePathSupport import CopyLongFilePath
from Common.LongFilePathSupport import OpenLongFilePath as open
import Common.GlobalData as GlobalData
from .DepexSection import DepexSection
from Common.Misc import SaveFileOnChange
from Common.Expression import *
from Common.DataType import *
## generate FFS from INF
#
#
class FfsInfStatement(FfsInfStatementClassObject):
## The constructor
#
# @param self The object pointer
#
def __init__(self):
FfsInfStatementClassObject.__init__(self)
self.TargetOverrideList = []
self.ShadowFromInfFile = None
self.KeepRelocFromRule = None
self.InDsc = True
self.OptRomDefs = {}
self.PiSpecVersion = '0x00000000'
self.InfModule = None
self.FinalTargetSuffixMap = {}
self.CurrentLineNum = None
self.CurrentLineContent = None
self.FileName = None
self.InfFileName = None
self.OverrideGuid = None
self.PatchedBinFile = ''
self.MacroDict = {}
self.Depex = False
## GetFinalTargetSuffixMap() method
#
# Get final build target list
def GetFinalTargetSuffixMap(self):
if not self.InfModule or not self.CurrentArch:
return []
if not self.FinalTargetSuffixMap:
FinalBuildTargetList = GenFdsGlobalVariable.GetModuleCodaTargetList(self.InfModule, self.CurrentArch)
for File in FinalBuildTargetList:
self.FinalTargetSuffixMap.setdefault(os.path.splitext(File)[1], []).append(File)
# Check if current INF module has DEPEX
if '.depex' not in self.FinalTargetSuffixMap and self.InfModule.ModuleType != SUP_MODULE_USER_DEFINED and self.InfModule.ModuleType != SUP_MODULE_HOST_APPLICATION \
and not self.InfModule.DxsFile and not self.InfModule.LibraryClass:
ModuleType = self.InfModule.ModuleType
PlatformDataBase = GenFdsGlobalVariable.WorkSpace.BuildObject[GenFdsGlobalVariable.ActivePlatform, self.CurrentArch, GenFdsGlobalVariable.TargetName, GenFdsGlobalVariable.ToolChainTag]
if ModuleType != SUP_MODULE_USER_DEFINED and ModuleType != SUP_MODULE_HOST_APPLICATION:
for LibraryClass in PlatformDataBase.LibraryClasses.GetKeys():
if LibraryClass.startswith("NULL") and PlatformDataBase.LibraryClasses[LibraryClass, ModuleType]:
self.InfModule.LibraryClasses[LibraryClass] = PlatformDataBase.LibraryClasses[LibraryClass, ModuleType]
StrModule = str(self.InfModule)
PlatformModule = None
if StrModule in PlatformDataBase.Modules:
PlatformModule = PlatformDataBase.Modules[StrModule]
for LibraryClass in PlatformModule.LibraryClasses:
if LibraryClass.startswith("NULL"):
self.InfModule.LibraryClasses[LibraryClass] = PlatformModule.LibraryClasses[LibraryClass]
DependencyList = [self.InfModule]
LibraryInstance = {}
DepexList = []
while len(DependencyList) > 0:
Module = DependencyList.pop(0)
if not Module:
continue
for Dep in Module.Depex[self.CurrentArch, ModuleType]:
if DepexList != []:
DepexList.append('AND')
DepexList.append('(')
DepexList.extend(Dep)
if DepexList[-1] == 'END': # no need of a END at this time
DepexList.pop()
DepexList.append(')')
if 'BEFORE' in DepexList or 'AFTER' in DepexList:
break
for LibName in Module.LibraryClasses:
if LibName in LibraryInstance:
continue
if PlatformModule and LibName in PlatformModule.LibraryClasses:
LibraryPath = PlatformModule.LibraryClasses[LibName]
else:
LibraryPath = PlatformDataBase.LibraryClasses[LibName, ModuleType]
if not LibraryPath:
LibraryPath = Module.LibraryClasses[LibName]
if not LibraryPath:
continue
LibraryModule = GenFdsGlobalVariable.WorkSpace.BuildObject[LibraryPath, self.CurrentArch, GenFdsGlobalVariable.TargetName, GenFdsGlobalVariable.ToolChainTag]
LibraryInstance[LibName] = LibraryModule
DependencyList.append(LibraryModule)
if DepexList:
Dpx = DependencyExpression(DepexList, ModuleType, True)
if len(Dpx.PostfixNotation) != 0:
# It means this module has DEPEX
self.FinalTargetSuffixMap['.depex'] = [os.path.join(self.EfiOutputPath, self.BaseName) + '.depex']
return self.FinalTargetSuffixMap
## __InfParse() method
#
# Parse inf file to get module information
#
# @param self The object pointer
# @param Dict dictionary contains macro and value pair
#
def __InfParse__(self, Dict = None, IsGenFfs=False):
GenFdsGlobalVariable.VerboseLogger( " Begine parsing INf file : %s" %self.InfFileName)
self.InfFileName = self.InfFileName.replace('$(WORKSPACE)', '')
if len(self.InfFileName) > 1 and self.InfFileName[0] == '\\' and self.InfFileName[1] == '\\':
pass
elif self.InfFileName[0] == '\\' or self.InfFileName[0] == '/' :
self.InfFileName = self.InfFileName[1:]
if self.InfFileName.find('$') == -1:
InfPath = NormPath(self.InfFileName)
if not os.path.exists(InfPath):
InfPath = GenFdsGlobalVariable.ReplaceWorkspaceMacro(InfPath)
if not os.path.exists(InfPath):
EdkLogger.error("GenFds", GENFDS_ERROR, "Non-existant Module %s !" % (self.InfFileName))
self.CurrentArch = self.GetCurrentArch()
#
# Get the InfClass object
#
PathClassObj = PathClass(self.InfFileName, GenFdsGlobalVariable.WorkSpaceDir)
ErrorCode, ErrorInfo = PathClassObj.Validate(".inf")
if ErrorCode != 0:
EdkLogger.error("GenFds", ErrorCode, ExtraData=ErrorInfo)
#
# Cache lower case version of INF path before processing FILE_GUID override
#
InfLowerPath = str(PathClassObj).lower()
if self.OverrideGuid:
PathClassObj = ProcessDuplicatedInf(PathClassObj, self.OverrideGuid, GenFdsGlobalVariable.WorkSpaceDir)
if self.CurrentArch is not None:
Inf = GenFdsGlobalVariable.WorkSpace.BuildObject[PathClassObj, self.CurrentArch, GenFdsGlobalVariable.TargetName, GenFdsGlobalVariable.ToolChainTag]
#
# Set Ffs BaseName, ModuleGuid, ModuleType, Version, OutputPath
#
self.BaseName = Inf.BaseName
self.ModuleGuid = Inf.Guid
self.ModuleType = Inf.ModuleType
if Inf.Specification is not None and 'PI_SPECIFICATION_VERSION' in Inf.Specification:
self.PiSpecVersion = Inf.Specification['PI_SPECIFICATION_VERSION']
if Inf.AutoGenVersion < 0x00010005:
self.ModuleType = Inf.ComponentType
self.VersionString = Inf.Version
self.BinFileList = Inf.Binaries
self.SourceFileList = Inf.Sources
if self.KeepReloc is None and Inf.Shadow:
self.ShadowFromInfFile = Inf.Shadow
else:
Inf = GenFdsGlobalVariable.WorkSpace.BuildObject[PathClassObj, TAB_COMMON, GenFdsGlobalVariable.TargetName, GenFdsGlobalVariable.ToolChainTag]
self.BaseName = Inf.BaseName
self.ModuleGuid = Inf.Guid
self.ModuleType = Inf.ModuleType
if Inf.Specification is not None and 'PI_SPECIFICATION_VERSION' in Inf.Specification:
self.PiSpecVersion = Inf.Specification['PI_SPECIFICATION_VERSION']
self.VersionString = Inf.Version
self.BinFileList = Inf.Binaries
self.SourceFileList = Inf.Sources
if self.BinFileList == []:
EdkLogger.error("GenFds", GENFDS_ERROR,
"INF %s specified in FDF could not be found in build ARCH %s!" \
% (self.InfFileName, GenFdsGlobalVariable.ArchList))
if self.OverrideGuid:
self.ModuleGuid = self.OverrideGuid
if len(self.SourceFileList) != 0 and not self.InDsc:
EdkLogger.warn("GenFds", GENFDS_ERROR, "Module %s NOT found in DSC file; Is it really a binary module?" % (self.InfFileName))
if self.ModuleType == SUP_MODULE_SMM_CORE and int(self.PiSpecVersion, 16) < 0x0001000A:
EdkLogger.error("GenFds", FORMAT_NOT_SUPPORTED, "SMM_CORE module type can't be used in the module with PI_SPECIFICATION_VERSION less than 0x0001000A", File=self.InfFileName)
if self.ModuleType == SUP_MODULE_MM_CORE_STANDALONE and int(self.PiSpecVersion, 16) < 0x00010032:
EdkLogger.error("GenFds", FORMAT_NOT_SUPPORTED, "MM_CORE_STANDALONE module type can't be used in the module with PI_SPECIFICATION_VERSION less than 0x00010032", File=self.InfFileName)
if Inf._Defs is not None and len(Inf._Defs) > 0:
self.OptRomDefs.update(Inf._Defs)
self.PatchPcds = []
InfPcds = Inf.Pcds
Platform = GenFdsGlobalVariable.WorkSpace.BuildObject[GenFdsGlobalVariable.ActivePlatform, self.CurrentArch, GenFdsGlobalVariable.TargetName, GenFdsGlobalVariable.ToolChainTag]
FdfPcdDict = GenFdsGlobalVariable.FdfParser.Profile.PcdDict
PlatformPcds = Platform.Pcds
# Workaround here: both build and GenFds tool convert the workspace path to lower case
# But INF file path in FDF and DSC file may have real case characters.
# Try to convert the path to lower case to see if PCDs value are override by DSC.
DscModules = {}
for DscModule in Platform.Modules:
DscModules[str(DscModule).lower()] = Platform.Modules[DscModule]
for PcdKey in InfPcds:
Pcd = InfPcds[PcdKey]
if not hasattr(Pcd, 'Offset'):
continue
if Pcd.Type != TAB_PCDS_PATCHABLE_IN_MODULE:
continue
# Override Patchable PCD value by the value from DSC
PatchPcd = None
if InfLowerPath in DscModules and PcdKey in DscModules[InfLowerPath].Pcds:
PatchPcd = DscModules[InfLowerPath].Pcds[PcdKey]
elif PcdKey in Platform.Pcds:
PatchPcd = Platform.Pcds[PcdKey]
DscOverride = False
if PatchPcd and Pcd.Type == PatchPcd.Type:
DefaultValue = PatchPcd.DefaultValue
DscOverride = True
# Override Patchable PCD value by the value from FDF
FdfOverride = False
if PcdKey in FdfPcdDict:
DefaultValue = FdfPcdDict[PcdKey]
FdfOverride = True
# Override Patchable PCD value by the value from Build Option
BuildOptionOverride = False
if GlobalData.BuildOptionPcd:
for pcd in GlobalData.BuildOptionPcd:
if PcdKey == (pcd[1], pcd[0]):
if pcd[2]:
continue
DefaultValue = pcd[3]
BuildOptionOverride = True
break
if not DscOverride and not FdfOverride and not BuildOptionOverride:
continue
# Support Flexible PCD format
if DefaultValue:
try:
DefaultValue = ValueExpressionEx(DefaultValue, Pcd.DatumType, Platform._GuidDict)(True)
except BadExpression:
EdkLogger.error("GenFds", GENFDS_ERROR, 'PCD [%s.%s] Value "%s"' %(Pcd.TokenSpaceGuidCName, Pcd.TokenCName, DefaultValue), File=self.InfFileName)
if Pcd.InfDefaultValue:
try:
Pcd.InfDefaultValue = ValueExpressionEx(Pcd.InfDefaultValue, Pcd.DatumType, Platform._GuidDict)(True)
except BadExpression:
EdkLogger.error("GenFds", GENFDS_ERROR, 'PCD [%s.%s] Value "%s"' %(Pcd.TokenSpaceGuidCName, Pcd.TokenCName, Pcd.DefaultValue), File=self.InfFileName)
# Check value, if value are equal, no need to patch
if Pcd.DatumType == TAB_VOID:
if Pcd.InfDefaultValue == DefaultValue or not DefaultValue:
continue
# Get the string size from FDF or DSC
if DefaultValue[0] == 'L':
# Remove L"", but the '\0' must be appended
MaxDatumSize = str((len(DefaultValue) - 2) * 2)
elif DefaultValue[0] == '{':
MaxDatumSize = str(len(DefaultValue.split(',')))
else:
MaxDatumSize = str(len(DefaultValue) - 1)
if DscOverride:
Pcd.MaxDatumSize = PatchPcd.MaxDatumSize
# If no defined the maximum size in DSC, try to get current size from INF
if not Pcd.MaxDatumSize:
Pcd.MaxDatumSize = str(len(Pcd.InfDefaultValue.split(',')))
else:
Base1 = Base2 = 10
if Pcd.InfDefaultValue.upper().startswith('0X'):
Base1 = 16
if DefaultValue.upper().startswith('0X'):
Base2 = 16
try:
PcdValueInImg = int(Pcd.InfDefaultValue, Base1)
PcdValueInDscOrFdf = int(DefaultValue, Base2)
if PcdValueInImg == PcdValueInDscOrFdf:
continue
except:
continue
# Check the Pcd size and data type
if Pcd.DatumType == TAB_VOID:
if int(MaxDatumSize) > int(Pcd.MaxDatumSize):
EdkLogger.error("GenFds", GENFDS_ERROR, "The size of VOID* type PCD '%s.%s' exceeds its maximum size %d bytes." \
% (Pcd.TokenSpaceGuidCName, Pcd.TokenCName, int(MaxDatumSize) - int(Pcd.MaxDatumSize)))
else:
if PcdValueInDscOrFdf > MAX_VAL_TYPE[Pcd.DatumType] \
or PcdValueInImg > MAX_VAL_TYPE[Pcd.DatumType]:
EdkLogger.error("GenFds", GENFDS_ERROR, "The size of %s type PCD '%s.%s' doesn't match its data type." \
% (Pcd.DatumType, Pcd.TokenSpaceGuidCName, Pcd.TokenCName))
self.PatchPcds.append((Pcd, DefaultValue))
self.InfModule = Inf
self.PcdIsDriver = Inf.PcdIsDriver
self.IsBinaryModule = Inf.IsBinaryModule
if len(Inf.Depex.data) > 0 and len(Inf.DepexExpression.data) > 0:
self.Depex = True
GenFdsGlobalVariable.VerboseLogger("BaseName : %s" % self.BaseName)
GenFdsGlobalVariable.VerboseLogger("ModuleGuid : %s" % self.ModuleGuid)
GenFdsGlobalVariable.VerboseLogger("ModuleType : %s" % self.ModuleType)
GenFdsGlobalVariable.VerboseLogger("VersionString : %s" % self.VersionString)
GenFdsGlobalVariable.VerboseLogger("InfFileName :%s" % self.InfFileName)
#
# Set OutputPath = ${WorkSpace}\Build\Fv\Ffs\${ModuleGuid}+ ${ModuleName}\
#
if IsGenFfs:
Rule = self.__GetRule__()
if GlobalData.gGuidPatternEnd.match(Rule.NameGuid):
self.ModuleGuid = Rule.NameGuid
self.OutputPath = os.path.join(GenFdsGlobalVariable.FfsDir, \
self.ModuleGuid + self.BaseName)
if not os.path.exists(self.OutputPath) :
os.makedirs(self.OutputPath)
self.EfiOutputPath, self.EfiDebugPath = self.__GetEFIOutPutPath__()
GenFdsGlobalVariable.VerboseLogger( "ModuelEFIPath: " + self.EfiOutputPath)
## PatchEfiFile
#
# Patch EFI file with patch PCD
#
# @param EfiFile: EFI file needs to be patched.
# @retval: Full path of patched EFI file: self.OutputPath + EfiFile base name
# If passed in file does not end with efi, return as is
#
def PatchEfiFile(self, EfiFile, FileType):
#
# If the module does not have any patches, then return path to input file
#
if not self.PatchPcds:
return EfiFile
#
# Only patch file if FileType is PE32 or ModuleType is USER_DEFINED
#
if FileType != BINARY_FILE_TYPE_PE32 and self.ModuleType != SUP_MODULE_USER_DEFINED and self.ModuleType != SUP_MODULE_HOST_APPLICATION:
return EfiFile
#
# Generate path to patched output file
#
Basename = os.path.basename(EfiFile)
Output = os.path.normpath (os.path.join(self.OutputPath, Basename))
#
# If this file has already been patched, then return the path to the patched file
#
if self.PatchedBinFile == Output:
return Output
#
# If a different file from the same module has already been patched, then generate an error
#
if self.PatchedBinFile:
EdkLogger.error("GenFds", GENFDS_ERROR,
'Only one binary file can be patched:\n'
' a binary file has been patched: %s\n'
' current file: %s' % (self.PatchedBinFile, EfiFile),
File=self.InfFileName)
#
# Copy unpatched file contents to output file location to perform patching
#
CopyLongFilePath(EfiFile, Output)
#
# Apply patches to patched output file
#
for Pcd, Value in self.PatchPcds:
RetVal, RetStr = PatchBinaryFile(Output, int(Pcd.Offset, 0), Pcd.DatumType, Value, Pcd.MaxDatumSize)
if RetVal:
EdkLogger.error("GenFds", GENFDS_ERROR, RetStr, File=self.InfFileName)
#
# Save the path of the patched output file
#
self.PatchedBinFile = Output
#
# Return path to patched output file
#
return Output
## GenFfs() method
#
# Generate FFS
#
# @param self The object pointer
# @param Dict dictionary contains macro and value pair
# @param FvChildAddr Array of the inside FvImage base address
# @param FvParentAddr Parent Fv base address
# @retval string Generated FFS file name
#
def GenFfs(self, Dict = None, FvChildAddr = [], FvParentAddr=None, IsMakefile=False, FvName=None):
#
# Parse Inf file get Module related information
#
if Dict is None:
Dict = {}
self.__InfParse__(Dict, IsGenFfs=True)
Arch = self.GetCurrentArch()
SrcFile = mws.join( GenFdsGlobalVariable.WorkSpaceDir, self.InfFileName);
DestFile = os.path.join( self.OutputPath, self.ModuleGuid + '.ffs')
SrcFileDir = "."
SrcPath = os.path.dirname(SrcFile)
SrcFileName = os.path.basename(SrcFile)
SrcFileBase, SrcFileExt = os.path.splitext(SrcFileName)
DestPath = os.path.dirname(DestFile)
DestFileName = os.path.basename(DestFile)
DestFileBase, DestFileExt = os.path.splitext(DestFileName)
self.MacroDict = {
# source file
"${src}" : SrcFile,
"${s_path}" : SrcPath,
"${s_dir}" : SrcFileDir,
"${s_name}" : SrcFileName,
"${s_base}" : SrcFileBase,
"${s_ext}" : SrcFileExt,
# destination file
"${dst}" : DestFile,
"${d_path}" : DestPath,
"${d_name}" : DestFileName,
"${d_base}" : DestFileBase,
"${d_ext}" : DestFileExt
}
#
# Allow binary type module not specify override rule in FDF file.
#
if len(self.BinFileList) > 0:
if self.Rule is None or self.Rule == "":
self.Rule = "BINARY"
if not IsMakefile and GenFdsGlobalVariable.EnableGenfdsMultiThread and self.Rule != 'BINARY':
IsMakefile = True
#
# Get the rule of how to generate Ffs file
#
Rule = self.__GetRule__()
GenFdsGlobalVariable.VerboseLogger( "Packing binaries from inf file : %s" %self.InfFileName)
#
# Convert Fv File Type for PI1.1 SMM driver.
#
if self.ModuleType == SUP_MODULE_DXE_SMM_DRIVER and int(self.PiSpecVersion, 16) >= 0x0001000A:
if Rule.FvFileType == 'DRIVER':
Rule.FvFileType = 'SMM'
#
# Framework SMM Driver has no SMM FV file type
#
if self.ModuleType == SUP_MODULE_DXE_SMM_DRIVER and int(self.PiSpecVersion, 16) < 0x0001000A:
if Rule.FvFileType == 'SMM' or Rule.FvFileType == SUP_MODULE_SMM_CORE:
EdkLogger.error("GenFds", FORMAT_NOT_SUPPORTED, "Framework SMM module doesn't support SMM or SMM_CORE FV file type", File=self.InfFileName)
#
# For the rule only has simpleFile
#
MakefilePath = None
if self.IsBinaryModule:
IsMakefile = False
if IsMakefile:
PathClassObj = PathClass(self.InfFileName, GenFdsGlobalVariable.WorkSpaceDir)
if self.OverrideGuid:
PathClassObj = ProcessDuplicatedInf(PathClassObj, self.OverrideGuid, GenFdsGlobalVariable.WorkSpaceDir)
MakefilePath = PathClassObj.Path, Arch
if isinstance (Rule, RuleSimpleFile.RuleSimpleFile):
SectionOutputList = self.__GenSimpleFileSection__(Rule, IsMakefile=IsMakefile)
FfsOutput = self.__GenSimpleFileFfs__(Rule, SectionOutputList, MakefilePath=MakefilePath)
return FfsOutput
#
# For Rule has ComplexFile
#
elif isinstance(Rule, RuleComplexFile.RuleComplexFile):
InputSectList, InputSectAlignments = self.__GenComplexFileSection__(Rule, FvChildAddr, FvParentAddr, IsMakefile=IsMakefile)
FfsOutput = self.__GenComplexFileFfs__(Rule, InputSectList, InputSectAlignments, MakefilePath=MakefilePath)
return FfsOutput
## __ExtendMacro__() method
#
# Replace macro with its value
#
# @param self The object pointer
# @param String The string to be replaced
# @retval string Macro replaced string
#
def __ExtendMacro__ (self, String):
MacroDict = {
'$(INF_OUTPUT)' : self.EfiOutputPath,
'$(MODULE_NAME)' : self.BaseName,
'$(BUILD_NUMBER)': self.BuildNum,
'$(INF_VERSION)' : self.VersionString,
'$(NAMED_GUID)' : self.ModuleGuid
}
String = GenFdsGlobalVariable.MacroExtend(String, MacroDict)
String = GenFdsGlobalVariable.MacroExtend(String, self.MacroDict)
return String
## __GetRule__() method
#
# Get correct rule for generating FFS for this INF
#
# @param self The object pointer
# @retval Rule Rule object
#
def __GetRule__ (self) :
CurrentArchList = []
if self.CurrentArch is None:
CurrentArchList = ['common']
else:
CurrentArchList.append(self.CurrentArch)
for CurrentArch in CurrentArchList:
RuleName = 'RULE' + \
'.' + \
CurrentArch.upper() + \
'.' + \
self.ModuleType.upper()
if self.Rule is not None:
RuleName = RuleName + \
'.' + \
self.Rule.upper()
Rule = GenFdsGlobalVariable.FdfParser.Profile.RuleDict.get(RuleName)
if Rule is not None:
GenFdsGlobalVariable.VerboseLogger ("Want To Find Rule Name is : " + RuleName)
return Rule
RuleName = 'RULE' + \
'.' + \
TAB_COMMON + \
'.' + \
self.ModuleType.upper()
if self.Rule is not None:
RuleName = RuleName + \
'.' + \
self.Rule.upper()
GenFdsGlobalVariable.VerboseLogger ('Trying to apply common rule %s for INF %s' % (RuleName, self.InfFileName))
Rule = GenFdsGlobalVariable.FdfParser.Profile.RuleDict.get(RuleName)
if Rule is not None:
GenFdsGlobalVariable.VerboseLogger ("Want To Find Rule Name is : " + RuleName)
return Rule
if Rule is None :
EdkLogger.error("GenFds", GENFDS_ERROR, 'Don\'t Find common rule %s for INF %s' \
% (RuleName, self.InfFileName))
## __GetPlatformArchList__() method
#
# Get Arch list this INF built under
#
# @param self The object pointer
# @retval list Arch list
#
def __GetPlatformArchList__(self):
InfFileKey = os.path.normpath(mws.join(GenFdsGlobalVariable.WorkSpaceDir, self.InfFileName))
DscArchList = []
for Arch in GenFdsGlobalVariable.ArchList :
PlatformDataBase = GenFdsGlobalVariable.WorkSpace.BuildObject[GenFdsGlobalVariable.ActivePlatform, Arch, GenFdsGlobalVariable.TargetName, GenFdsGlobalVariable.ToolChainTag]
if PlatformDataBase is not None:
if InfFileKey in PlatformDataBase.Modules:
DscArchList.append (Arch)
else:
#
# BaseTools support build same module more than once, the module path with FILE_GUID overridden has
# the file name FILE_GUIDmodule.inf, then PlatformDataBase.Modules use FILE_GUIDmodule.inf as key,
# but the path (self.MetaFile.Path) is the real path
#
for key in PlatformDataBase.Modules:
if InfFileKey == str((PlatformDataBase.Modules[key]).MetaFile.Path):
DscArchList.append (Arch)
break
return DscArchList
## GetCurrentArch() method
#
# Get Arch list of the module from this INF is to be placed into flash
#
# @param self The object pointer
# @retval list Arch list
#
def GetCurrentArch(self) :
TargetArchList = GenFdsGlobalVariable.ArchList
PlatformArchList = self.__GetPlatformArchList__()
CurArchList = TargetArchList
if PlatformArchList != []:
CurArchList = list(set (TargetArchList) & set (PlatformArchList))
GenFdsGlobalVariable.VerboseLogger ("Valid target architecture(s) is : " + " ".join(CurArchList))
ArchList = []
if self.KeyStringList != []:
for Key in self.KeyStringList:
Key = GenFdsGlobalVariable.MacroExtend(Key)
Target, Tag, Arch = Key.split('_')
if Arch in CurArchList:
ArchList.append(Arch)
if Target not in self.TargetOverrideList:
self.TargetOverrideList.append(Target)
else:
ArchList = CurArchList
UseArchList = TargetArchList
if self.UseArch is not None:
UseArchList = []
UseArchList.append(self.UseArch)
ArchList = list(set (UseArchList) & set (ArchList))
self.InfFileName = NormPath(self.InfFileName)
if len(PlatformArchList) == 0:
self.InDsc = False
PathClassObj = PathClass(self.InfFileName, GenFdsGlobalVariable.WorkSpaceDir)
ErrorCode, ErrorInfo = PathClassObj.Validate(".inf")
if ErrorCode != 0:
EdkLogger.error("GenFds", ErrorCode, ExtraData=ErrorInfo)
if len(ArchList) == 1:
Arch = ArchList[0]
return Arch
elif len(ArchList) > 1:
if len(PlatformArchList) == 0:
EdkLogger.error("GenFds", GENFDS_ERROR, "GenFds command line option has multiple ARCHs %s. Not able to determine which ARCH is valid for Module %s !" % (str(ArchList), self.InfFileName))
else:
EdkLogger.error("GenFds", GENFDS_ERROR, "Module built under multiple ARCHs %s. Not able to determine which output to put into flash for Module %s !" % (str(ArchList), self.InfFileName))
else:
EdkLogger.error("GenFds", GENFDS_ERROR, "Module %s appears under ARCH %s in platform %s, but current deduced ARCH is %s, so NO build output could be put into flash." \
% (self.InfFileName, str(PlatformArchList), GenFdsGlobalVariable.ActivePlatform, str(set (UseArchList) & set (TargetArchList))))
## __GetEFIOutPutPath__() method
#
# Get the output path for generated files
#
# @param self The object pointer
# @retval string Path that output files from this INF go to
#
def __GetEFIOutPutPath__(self):
Arch = ''
OutputPath = ''
DebugPath = ''
(ModulePath, FileName) = os.path.split(self.InfFileName)
Index = FileName.rfind('.')
FileName = FileName[0:Index]
if self.OverrideGuid:
FileName = self.OverrideGuid
Arch = "NoneArch"
if self.CurrentArch is not None:
Arch = self.CurrentArch
OutputPath = os.path.join(GenFdsGlobalVariable.OutputDirDict[Arch],
Arch,
ModulePath,
FileName,
'OUTPUT'
)
DebugPath = os.path.join(GenFdsGlobalVariable.OutputDirDict[Arch],
Arch,
ModulePath,
FileName,
'DEBUG'
)
OutputPath = os.path.abspath(OutputPath)
DebugPath = os.path.abspath(DebugPath)
return OutputPath, DebugPath
## __GenSimpleFileSection__() method
#
# Generate section by specified file name or a list of files with file extension
#
# @param self The object pointer
# @param Rule The rule object used to generate section
# @retval string File name of the generated section file
#
def __GenSimpleFileSection__(self, Rule, IsMakefile = False):
#
# Prepare the parameter of GenSection
#
FileList = []
OutputFileList = []
GenSecInputFile = None
if Rule.FileName is not None:
GenSecInputFile = self.__ExtendMacro__(Rule.FileName)
if os.path.isabs(GenSecInputFile):
GenSecInputFile = os.path.normpath(GenSecInputFile)
else:
GenSecInputFile = os.path.normpath(os.path.join(self.EfiOutputPath, GenSecInputFile))
else:
FileList, IsSect = Section.Section.GetFileList(self, '', Rule.FileExtension)
Index = 1
SectionType = Rule.SectionType
#
# Convert Fv Section Type for PI1.1 SMM driver.
#
if self.ModuleType == SUP_MODULE_DXE_SMM_DRIVER and int(self.PiSpecVersion, 16) >= 0x0001000A:
if SectionType == BINARY_FILE_TYPE_DXE_DEPEX:
SectionType = BINARY_FILE_TYPE_SMM_DEPEX
#
# Framework SMM Driver has no SMM_DEPEX section type
#
if self.ModuleType == SUP_MODULE_DXE_SMM_DRIVER and int(self.PiSpecVersion, 16) < 0x0001000A:
if SectionType == BINARY_FILE_TYPE_SMM_DEPEX:
EdkLogger.error("GenFds", FORMAT_NOT_SUPPORTED, "Framework SMM module doesn't support SMM_DEPEX section type", File=self.InfFileName)
NoStrip = True
if self.ModuleType in (SUP_MODULE_SEC, SUP_MODULE_PEI_CORE, SUP_MODULE_PEIM):
if self.KeepReloc is not None:
NoStrip = self.KeepReloc
elif Rule.KeepReloc is not None:
NoStrip = Rule.KeepReloc
elif self.ShadowFromInfFile is not None:
NoStrip = self.ShadowFromInfFile
if FileList != [] :
for File in FileList:
SecNum = '%d' %Index
GenSecOutputFile= self.__ExtendMacro__(Rule.NameGuid) + \
SectionSuffix[SectionType] + SUP_MODULE_SEC + SecNum
Index = Index + 1
OutputFile = os.path.join(self.OutputPath, GenSecOutputFile)
File = GenFdsGlobalVariable.MacroExtend(File, Dict, self.CurrentArch)
#Get PE Section alignment when align is set to AUTO
if self.Alignment == 'Auto' and (SectionType == BINARY_FILE_TYPE_PE32 or SectionType == BINARY_FILE_TYPE_TE):
ImageObj = PeImageClass (File)
if ImageObj.SectionAlignment < 0x400:
self.Alignment = str (ImageObj.SectionAlignment)
elif ImageObj.SectionAlignment < 0x100000:
self.Alignment = str (ImageObj.SectionAlignment // 0x400) + 'K'
else:
self.Alignment = str (ImageObj.SectionAlignment // 0x100000) + 'M'
if not NoStrip:
FileBeforeStrip = os.path.join(self.OutputPath, ModuleName + '.reloc')
if not os.path.exists(FileBeforeStrip) or \
(os.path.getmtime(File) > os.path.getmtime(FileBeforeStrip)):
CopyLongFilePath(File, FileBeforeStrip)
StrippedFile = os.path.join(self.OutputPath, ModuleName + '.stipped')
GenFdsGlobalVariable.GenerateFirmwareImage(
StrippedFile,
[File],
Strip=True,
IsMakefile=IsMakefile
)
File = StrippedFile
if SectionType == BINARY_FILE_TYPE_TE:
TeFile = os.path.join( self.OutputPath, self.ModuleGuid + 'Te.raw')
GenFdsGlobalVariable.GenerateFirmwareImage(
TeFile,
[File],
Type='te',
IsMakefile=IsMakefile
)
File = TeFile
GenFdsGlobalVariable.GenerateSection(OutputFile, [File], Section.Section.SectionType[SectionType], IsMakefile=IsMakefile)
OutputFileList.append(OutputFile)
else:
SecNum = '%d' %Index
GenSecOutputFile= self.__ExtendMacro__(Rule.NameGuid) + \
SectionSuffix[SectionType] + SUP_MODULE_SEC + SecNum
OutputFile = os.path.join(self.OutputPath, GenSecOutputFile)
GenSecInputFile = GenFdsGlobalVariable.MacroExtend(GenSecInputFile, Dict, self.CurrentArch)
#Get PE Section alignment when align is set to AUTO
if self.Alignment == 'Auto' and (SectionType == BINARY_FILE_TYPE_PE32 or SectionType == BINARY_FILE_TYPE_TE):
ImageObj = PeImageClass (GenSecInputFile)
if ImageObj.SectionAlignment < 0x400:
self.Alignment = str (ImageObj.SectionAlignment)
elif ImageObj.SectionAlignment < 0x100000:
self.Alignment = str (ImageObj.SectionAlignment // 0x400) + 'K'
else:
self.Alignment = str (ImageObj.SectionAlignment // 0x100000) + 'M'
if not NoStrip:
FileBeforeStrip = os.path.join(self.OutputPath, ModuleName + '.reloc')
if not os.path.exists(FileBeforeStrip) or \
(os.path.getmtime(GenSecInputFile) > os.path.getmtime(FileBeforeStrip)):
CopyLongFilePath(GenSecInputFile, FileBeforeStrip)
StrippedFile = os.path.join(self.OutputPath, ModuleName + '.stipped')
GenFdsGlobalVariable.GenerateFirmwareImage(
StrippedFile,
[GenSecInputFile],
Strip=True,
IsMakefile=IsMakefile
)
GenSecInputFile = StrippedFile
if SectionType == BINARY_FILE_TYPE_TE:
TeFile = os.path.join( self.OutputPath, self.ModuleGuid + 'Te.raw')
GenFdsGlobalVariable.GenerateFirmwareImage(
TeFile,
[GenSecInputFile],
Type='te',
IsMakefile=IsMakefile
)
GenSecInputFile = TeFile
GenFdsGlobalVariable.GenerateSection(OutputFile, [GenSecInputFile], Section.Section.SectionType[SectionType], IsMakefile=IsMakefile)
OutputFileList.append(OutputFile)
return OutputFileList
## __GenSimpleFileFfs__() method
#
# Generate FFS
#
# @param self The object pointer
# @param Rule The rule object used to generate section
# @param InputFileList The output file list from GenSection
# @retval string Generated FFS file name
#
def __GenSimpleFileFfs__(self, Rule, InputFileList, MakefilePath = None):
FfsOutput = self.OutputPath + \
os.sep + \
self.__ExtendMacro__(Rule.NameGuid) + \
'.ffs'
GenFdsGlobalVariable.VerboseLogger(self.__ExtendMacro__(Rule.NameGuid))
InputSection = []
SectionAlignments = []
for InputFile in InputFileList:
InputSection.append(InputFile)
SectionAlignments.append(Rule.SectAlignment)
if Rule.NameGuid is not None and Rule.NameGuid.startswith('PCD('):
PcdValue = GenFdsGlobalVariable.GetPcdValue(Rule.NameGuid)
if len(PcdValue) == 0:
EdkLogger.error("GenFds", GENFDS_ERROR, '%s NOT defined.' \
% (Rule.NameGuid))
if PcdValue.startswith('{'):
PcdValue = GuidStructureByteArrayToGuidString(PcdValue)
RegistryGuidStr = PcdValue
if len(RegistryGuidStr) == 0:
EdkLogger.error("GenFds", GENFDS_ERROR, 'GUID value for %s in wrong format.' \
% (Rule.NameGuid))
self.ModuleGuid = RegistryGuidStr
GenFdsGlobalVariable.GenerateFfs(FfsOutput, InputSection,
FdfFvFileTypeToFileType[Rule.FvFileType],
self.ModuleGuid, Fixed=Rule.Fixed,
CheckSum=Rule.CheckSum, Align=Rule.Alignment,
SectionAlign=SectionAlignments,
MakefilePath=MakefilePath
)
return FfsOutput
## __GenComplexFileSection__() method
#
# Generate section by sections in Rule
#
# @param self The object pointer
# @param Rule The rule object used to generate section
# @param FvChildAddr Array of the inside FvImage base address
# @param FvParentAddr Parent Fv base address
# @retval string File name of the generated section file
#
def __GenComplexFileSection__(self, Rule, FvChildAddr, FvParentAddr, IsMakefile = False):
if self.ModuleType in (SUP_MODULE_SEC, SUP_MODULE_PEI_CORE, SUP_MODULE_PEIM, SUP_MODULE_MM_CORE_STANDALONE):
if Rule.KeepReloc is not None:
self.KeepRelocFromRule = Rule.KeepReloc
SectFiles = []
SectAlignments = []
Index = 1
HasGeneratedFlag = False
if self.PcdIsDriver == 'PEI_PCD_DRIVER':
if self.IsBinaryModule:
PcdExDbFileName = os.path.join(GenFdsGlobalVariable.FvDir, "PEIPcdDataBase.raw")
else:
PcdExDbFileName = os.path.join(self.EfiOutputPath, "PEIPcdDataBase.raw")
PcdExDbSecName = os.path.join(self.OutputPath, "PEIPcdDataBaseSec.raw")
GenFdsGlobalVariable.GenerateSection(PcdExDbSecName,
[PcdExDbFileName],
"EFI_SECTION_RAW",
IsMakefile = IsMakefile
)
SectFiles.append(PcdExDbSecName)
SectAlignments.append(None)
elif self.PcdIsDriver == 'DXE_PCD_DRIVER':
if self.IsBinaryModule:
PcdExDbFileName = os.path.join(GenFdsGlobalVariable.FvDir, "DXEPcdDataBase.raw")
else:
PcdExDbFileName = os.path.join(self.EfiOutputPath, "DXEPcdDataBase.raw")
PcdExDbSecName = os.path.join(self.OutputPath, "DXEPcdDataBaseSec.raw")
GenFdsGlobalVariable.GenerateSection(PcdExDbSecName,
[PcdExDbFileName],
"EFI_SECTION_RAW",
IsMakefile = IsMakefile
)
SectFiles.append(PcdExDbSecName)
SectAlignments.append(None)
for Sect in Rule.SectionList:
SecIndex = '%d' %Index
SectList = []
#
# Convert Fv Section Type for PI1.1 SMM driver.
#
if self.ModuleType == SUP_MODULE_DXE_SMM_DRIVER and int(self.PiSpecVersion, 16) >= 0x0001000A:
if Sect.SectionType == BINARY_FILE_TYPE_DXE_DEPEX:
Sect.SectionType = BINARY_FILE_TYPE_SMM_DEPEX
#
# Framework SMM Driver has no SMM_DEPEX section type
#
if self.ModuleType == SUP_MODULE_DXE_SMM_DRIVER and int(self.PiSpecVersion, 16) < 0x0001000A:
if Sect.SectionType == BINARY_FILE_TYPE_SMM_DEPEX:
EdkLogger.error("GenFds", FORMAT_NOT_SUPPORTED, "Framework SMM module doesn't support SMM_DEPEX section type", File=self.InfFileName)
#
# process the inside FvImage from FvSection or GuidSection
#
if FvChildAddr != []:
if isinstance(Sect, FvImageSection):
Sect.FvAddr = FvChildAddr.pop(0)
elif isinstance(Sect, GuidSection):
Sect.FvAddr = FvChildAddr
if FvParentAddr is not None and isinstance(Sect, GuidSection):
Sect.FvParentAddr = FvParentAddr
if Rule.KeyStringList != []:
SectList, Align = Sect.GenSection(self.OutputPath, self.ModuleGuid, SecIndex, Rule.KeyStringList, self, IsMakefile = IsMakefile)
else :
SectList, Align = Sect.GenSection(self.OutputPath, self.ModuleGuid, SecIndex, self.KeyStringList, self, IsMakefile = IsMakefile)
if not HasGeneratedFlag:
UniVfrOffsetFileSection = ""
ModuleFileName = mws.join(GenFdsGlobalVariable.WorkSpaceDir, self.InfFileName)
InfData = GenFdsGlobalVariable.WorkSpace.BuildObject[PathClass(ModuleFileName), self.CurrentArch]
#
# Search the source list in InfData to find if there are .vfr file exist.
#
VfrUniBaseName = {}
VfrUniOffsetList = []
for SourceFile in InfData.Sources:
if SourceFile.Type.upper() == ".VFR" :
#
# search the .map file to find the offset of vfr binary in the PE32+/TE file.
#
VfrUniBaseName[SourceFile.BaseName] = (SourceFile.BaseName + "Bin")
if SourceFile.Type.upper() == ".UNI" :
#
# search the .map file to find the offset of Uni strings binary in the PE32+/TE file.
#
VfrUniBaseName["UniOffsetName"] = (self.BaseName + "Strings")
if len(VfrUniBaseName) > 0:
if IsMakefile:
if InfData.BuildType != 'UEFI_HII':
UniVfrOffsetFileName = os.path.join(self.OutputPath, self.BaseName + '.offset')
UniVfrOffsetFileSection = os.path.join(self.OutputPath, self.BaseName + 'Offset' + '.raw')
UniVfrOffsetFileNameList = []
UniVfrOffsetFileNameList.append(UniVfrOffsetFileName)
TrimCmd = "Trim --Vfr-Uni-Offset -o %s --ModuleName=%s --DebugDir=%s " % (UniVfrOffsetFileName, self.BaseName, self.EfiDebugPath)
GenFdsGlobalVariable.SecCmdList.append(TrimCmd)
GenFdsGlobalVariable.GenerateSection(UniVfrOffsetFileSection,
[UniVfrOffsetFileName],
"EFI_SECTION_RAW",
IsMakefile = True
)
else:
VfrUniOffsetList = self.__GetBuildOutputMapFileVfrUniInfo(VfrUniBaseName)
#
# Generate the Raw data of raw section
#
if VfrUniOffsetList:
UniVfrOffsetFileName = os.path.join(self.OutputPath, self.BaseName + '.offset')
UniVfrOffsetFileSection = os.path.join(self.OutputPath, self.BaseName + 'Offset' + '.raw')
FfsInfStatement.__GenUniVfrOffsetFile (VfrUniOffsetList, UniVfrOffsetFileName)
UniVfrOffsetFileNameList = []
UniVfrOffsetFileNameList.append(UniVfrOffsetFileName)
"""Call GenSection"""
GenFdsGlobalVariable.GenerateSection(UniVfrOffsetFileSection,
UniVfrOffsetFileNameList,
"EFI_SECTION_RAW"
)
#os.remove(UniVfrOffsetFileName)
if UniVfrOffsetFileSection:
SectList.append(UniVfrOffsetFileSection)
HasGeneratedFlag = True
for SecName in SectList :
SectFiles.append(SecName)
SectAlignments.append(Align)
Index = Index + 1
return SectFiles, SectAlignments
## __GenComplexFileFfs__() method
#
# Generate FFS
#
# @param self The object pointer
# @param Rule The rule object used to generate section
# @param InputFileList The output file list from GenSection
# @retval string Generated FFS file name
#
def __GenComplexFileFfs__(self, Rule, InputFile, Alignments, MakefilePath = None):
if Rule.NameGuid is not None and Rule.NameGuid.startswith('PCD('):
PcdValue = GenFdsGlobalVariable.GetPcdValue(Rule.NameGuid)
if len(PcdValue) == 0:
EdkLogger.error("GenFds", GENFDS_ERROR, '%s NOT defined.' \
% (Rule.NameGuid))
if PcdValue.startswith('{'):
PcdValue = GuidStructureByteArrayToGuidString(PcdValue)
RegistryGuidStr = PcdValue
if len(RegistryGuidStr) == 0:
EdkLogger.error("GenFds", GENFDS_ERROR, 'GUID value for %s in wrong format.' \
% (Rule.NameGuid))
self.ModuleGuid = RegistryGuidStr
FfsOutput = os.path.join( self.OutputPath, self.ModuleGuid + '.ffs')
GenFdsGlobalVariable.GenerateFfs(FfsOutput, InputFile,
FdfFvFileTypeToFileType[Rule.FvFileType],
self.ModuleGuid, Fixed=Rule.Fixed,
CheckSum=Rule.CheckSum, Align=Rule.Alignment,
SectionAlign=Alignments,
MakefilePath=MakefilePath
)
return FfsOutput
## __GetBuildOutputMapFileVfrUniInfo() method
#
# Find the offset of UNI/INF object offset in the EFI image file.
#
# @param self The object pointer
# @param VfrUniBaseName A name list contain the UNI/INF object name.
# @retval RetValue A list contain offset of UNI/INF object.
#
def __GetBuildOutputMapFileVfrUniInfo(self, VfrUniBaseName):
MapFileName = os.path.join(self.EfiOutputPath, self.BaseName + ".map")
EfiFileName = os.path.join(self.EfiOutputPath, self.BaseName + ".efi")
return GetVariableOffset(MapFileName, EfiFileName, list(VfrUniBaseName.values()))
## __GenUniVfrOffsetFile() method
#
# Generate the offset file for the module which contain VFR or UNI file.
#
# @param VfrUniOffsetList A list contain the VFR/UNI offsets in the EFI image file.
# @param UniVfrOffsetFileName The output offset file name.
#
@staticmethod
def __GenUniVfrOffsetFile(VfrUniOffsetList, UniVfrOffsetFileName):
# Use a instance of StringIO to cache data
fStringIO = BytesIO()
for Item in VfrUniOffsetList:
if (Item[0].find("Strings") != -1):
#
# UNI offset in image.
# GUID + Offset
# { 0x8913c5e0, 0x33f6, 0x4d86, { 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66 } }
#
UniGuid = b'\xe0\xc5\x13\x89\xf63\x86M\x9b\xf1C\xef\x89\xfc\x06f'
fStringIO.write(UniGuid)
UniValue = pack ('Q', int (Item[1], 16))
fStringIO.write (UniValue)
else:
#
# VFR binary offset in image.
# GUID + Offset
# { 0xd0bc7cb4, 0x6a47, 0x495f, { 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2 } };
#
VfrGuid = b'\xb4|\xbc\xd0Gj_I\xaa\x11q\x07F\xda\x06\xa2'
fStringIO.write(VfrGuid)
type (Item[1])
VfrValue = pack ('Q', int (Item[1], 16))
fStringIO.write (VfrValue)
#
# write data into file.
#
try :
SaveFileOnChange(UniVfrOffsetFileName, fStringIO.getvalue())
except:
EdkLogger.error("GenFds", FILE_WRITE_FAILURE, "Write data to file %s failed, please check whether the file been locked or using by other applications." %UniVfrOffsetFileName, None)
fStringIO.close ()
| edk2-master | BaseTools/Source/Python/GenFds/FfsInfStatement.py |
## @file
# process APRIORI file data and generate PEI/DXE APRIORI file
#
# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# Import Modules
#
from __future__ import absolute_import
from struct import pack
import Common.LongFilePathOs as os
from io import BytesIO
from .FfsFileStatement import FileStatement
from .GenFdsGlobalVariable import GenFdsGlobalVariable
from Common.StringUtils import NormPath
from Common.Misc import SaveFileOnChange, PathClass
from Common.EdkLogger import error as EdkLoggerError
from Common.BuildToolError import RESOURCE_NOT_AVAILABLE
from Common.DataType import TAB_COMMON
DXE_APRIORI_GUID = "FC510EE7-FFDC-11D4-BD41-0080C73C8881"
PEI_APRIORI_GUID = "1B45CC0A-156A-428A-AF62-49864DA0E6E6"
## process APRIORI file data and generate PEI/DXE APRIORI file
#
#
class AprioriSection (object):
## The constructor
#
# @param self The object pointer
#
def __init__(self):
self.DefineVarDict = {}
self.FfsList = []
self.AprioriType = ""
## GenFfs() method
#
# Generate FFS for APRIORI file
#
# @param self The object pointer
# @param FvName for whom apriori file generated
# @param Dict dictionary contains macro and its value
# @retval string Generated file name
#
def GenFfs (self, FvName, Dict = None, IsMakefile = False):
if Dict is None:
Dict = {}
Buffer = BytesIO()
if self.AprioriType == "PEI":
AprioriFileGuid = PEI_APRIORI_GUID
else:
AprioriFileGuid = DXE_APRIORI_GUID
OutputAprFilePath = os.path.join (GenFdsGlobalVariable.WorkSpaceDir, \
GenFdsGlobalVariable.FfsDir,\
AprioriFileGuid + FvName)
if not os.path.exists(OutputAprFilePath):
os.makedirs(OutputAprFilePath)
OutputAprFileName = os.path.join( OutputAprFilePath, \
AprioriFileGuid + FvName + '.Apri' )
AprFfsFileName = os.path.join (OutputAprFilePath,\
AprioriFileGuid + FvName + '.Ffs')
Dict.update(self.DefineVarDict)
InfFileName = None
for FfsObj in self.FfsList:
Guid = ""
if isinstance(FfsObj, FileStatement):
Guid = FfsObj.NameGuid
else:
InfFileName = NormPath(FfsObj.InfFileName)
Arch = FfsObj.GetCurrentArch()
if Arch:
Dict['$(ARCH)'] = Arch
InfFileName = GenFdsGlobalVariable.MacroExtend(InfFileName, Dict, Arch)
if Arch:
Inf = GenFdsGlobalVariable.WorkSpace.BuildObject[PathClass(InfFileName, GenFdsGlobalVariable.WorkSpaceDir), Arch, GenFdsGlobalVariable.TargetName, GenFdsGlobalVariable.ToolChainTag]
Guid = Inf.Guid
else:
Inf = GenFdsGlobalVariable.WorkSpace.BuildObject[PathClass(InfFileName, GenFdsGlobalVariable.WorkSpaceDir), TAB_COMMON, GenFdsGlobalVariable.TargetName, GenFdsGlobalVariable.ToolChainTag]
Guid = Inf.Guid
if not Inf.Module.Binaries:
EdkLoggerError("GenFds", RESOURCE_NOT_AVAILABLE,
"INF %s not found in build ARCH %s!" \
% (InfFileName, GenFdsGlobalVariable.ArchList))
GuidPart = Guid.split('-')
Buffer.write(pack('I', int(GuidPart[0], 16)))
Buffer.write(pack('H', int(GuidPart[1], 16)))
Buffer.write(pack('H', int(GuidPart[2], 16)))
for Num in range(2):
Char = GuidPart[3][Num*2:Num*2+2]
Buffer.write(pack('B', int(Char, 16)))
for Num in range(6):
Char = GuidPart[4][Num*2:Num*2+2]
Buffer.write(pack('B', int(Char, 16)))
SaveFileOnChange(OutputAprFileName, Buffer.getvalue())
RawSectionFileName = os.path.join( OutputAprFilePath, \
AprioriFileGuid + FvName + '.raw' )
MakefilePath = None
if IsMakefile:
if not InfFileName:
return None
MakefilePath = InfFileName, Arch
GenFdsGlobalVariable.GenerateSection(RawSectionFileName, [OutputAprFileName], 'EFI_SECTION_RAW', IsMakefile=IsMakefile)
GenFdsGlobalVariable.GenerateFfs(AprFfsFileName, [RawSectionFileName],
'EFI_FV_FILETYPE_FREEFORM', AprioriFileGuid, MakefilePath=MakefilePath)
return AprFfsFileName
| edk2-master | BaseTools/Source/Python/GenFds/AprioriSection.py |
## @file
# process OptionROM generation from INF statement
#
# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# Import Modules
#
from __future__ import absolute_import
from . import RuleSimpleFile
from . import RuleComplexFile
from . import Section
import Common.GlobalData as GlobalData
from Common.DataType import *
from Common.StringUtils import *
from .FfsInfStatement import FfsInfStatement
from .GenFdsGlobalVariable import GenFdsGlobalVariable
##
#
#
class OptRomInfStatement (FfsInfStatement):
## The constructor
#
# @param self The object pointer
#
def __init__(self):
FfsInfStatement.__init__(self)
self.OverrideAttribs = None
## __GetOptRomParams() method
#
# Parse inf file to get option ROM related parameters
#
# @param self The object pointer
#
def __GetOptRomParams(self):
if self.OverrideAttribs is None:
self.OverrideAttribs = OverrideAttribs()
if self.OverrideAttribs.NeedCompress is None:
self.OverrideAttribs.NeedCompress = self.OptRomDefs.get ('PCI_COMPRESS')
if self.OverrideAttribs.NeedCompress is not None:
if self.OverrideAttribs.NeedCompress.upper() not in ('TRUE', 'FALSE'):
GenFdsGlobalVariable.ErrorLogger( "Expected TRUE/FALSE for PCI_COMPRESS: %s" %self.InfFileName)
self.OverrideAttribs.NeedCompress = \
self.OverrideAttribs.NeedCompress.upper() == 'TRUE'
if self.OverrideAttribs.PciVendorId is None:
self.OverrideAttribs.PciVendorId = self.OptRomDefs.get ('PCI_VENDOR_ID')
if self.OverrideAttribs.PciClassCode is None:
self.OverrideAttribs.PciClassCode = self.OptRomDefs.get ('PCI_CLASS_CODE')
if self.OverrideAttribs.PciDeviceId is None:
self.OverrideAttribs.PciDeviceId = self.OptRomDefs.get ('PCI_DEVICE_ID')
if self.OverrideAttribs.PciRevision is None:
self.OverrideAttribs.PciRevision = self.OptRomDefs.get ('PCI_REVISION')
# InfObj = GenFdsGlobalVariable.WorkSpace.BuildObject[self.PathClassObj, self.CurrentArch]
# RecordList = InfObj._RawData[MODEL_META_DATA_HEADER, InfObj._Arch, InfObj._Platform]
# for Record in RecordList:
# Record = ReplaceMacros(Record, GlobalData.gEdkGlobal, False)
# Name = Record[0]
## GenFfs() method
#
# Generate FFS
#
# @param self The object pointer
# @retval string Generated .efi file name
#
def GenFfs(self, IsMakefile=False):
#
# Parse Inf file get Module related information
#
self.__InfParse__()
self.__GetOptRomParams()
#
# Get the rule of how to generate Ffs file
#
Rule = self.__GetRule__()
GenFdsGlobalVariable.VerboseLogger( "Packing binaries from inf file : %s" %self.InfFileName)
#
# For the rule only has simpleFile
#
if isinstance (Rule, RuleSimpleFile.RuleSimpleFile) :
EfiOutputList = self.__GenSimpleFileSection__(Rule, IsMakefile=IsMakefile)
return EfiOutputList
#
# For Rule has ComplexFile
#
elif isinstance(Rule, RuleComplexFile.RuleComplexFile):
EfiOutputList = self.__GenComplexFileSection__(Rule, IsMakefile=IsMakefile)
return EfiOutputList
## __GenSimpleFileSection__() method
#
# Get .efi files according to simple rule.
#
# @param self The object pointer
# @param Rule The rule object used to generate section
# @retval string File name of the generated section file
#
def __GenSimpleFileSection__(self, Rule, IsMakefile = False):
#
# Prepare the parameter of GenSection
#
OutputFileList = []
if Rule.FileName is not None:
GenSecInputFile = self.__ExtendMacro__(Rule.FileName)
OutputFileList.append(GenSecInputFile)
else:
OutputFileList, IsSect = Section.Section.GetFileList(self, '', Rule.FileExtension)
return OutputFileList
## __GenComplexFileSection__() method
#
# Get .efi by sections in complex Rule
#
# @param self The object pointer
# @param Rule The rule object used to generate section
# @retval string File name of the generated section file
#
def __GenComplexFileSection__(self, Rule, IsMakefile=False):
OutputFileList = []
for Sect in Rule.SectionList:
if Sect.SectionType == BINARY_FILE_TYPE_PE32:
if Sect.FileName is not None:
GenSecInputFile = self.__ExtendMacro__(Sect.FileName)
OutputFileList.append(GenSecInputFile)
else:
FileList, IsSect = Section.Section.GetFileList(self, '', Sect.FileExtension)
OutputFileList.extend(FileList)
return OutputFileList
class OverrideAttribs:
## The constructor
#
# @param self The object pointer
#
def __init__(self):
self.PciVendorId = None
self.PciClassCode = None
self.PciDeviceId = None
self.PciRevision = None
self.NeedCompress = None
| edk2-master | BaseTools/Source/Python/GenFds/OptRomInfStatement.py |
## @file
# Rule object for generating FFS
#
# Copyright (c) 2007, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# Import Modules
#
from CommonDataClass.FdfClass import RuleClassObject
## Rule base class
#
#
class Rule(RuleClassObject):
## The constructor
#
# @param self The object pointer
#
def __init__(self):
RuleClassObject.__init__(self)
| edk2-master | BaseTools/Source/Python/GenFds/Rule.py |
## @file
# Simple Rule object for generating FFS
#
# Copyright (c) 2007, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# Import Modules
#
from __future__ import absolute_import
from . import Rule
from CommonDataClass.FdfClass import RuleSimpleFileClassObject
## simple rule
#
#
class RuleSimpleFile (RuleSimpleFileClassObject) :
## The constructor
#
# @param self The object pointer
#
def __init__(self):
RuleSimpleFileClassObject.__init__(self)
| edk2-master | BaseTools/Source/Python/GenFds/RuleSimpleFile.py |
## @file
# process rule section generation
#
# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# Import Modules
#
from __future__ import absolute_import
from struct import *
from . import Section
from .GenFdsGlobalVariable import GenFdsGlobalVariable
import subprocess
from .Ffs import SectionSuffix
import Common.LongFilePathOs as os
from CommonDataClass.FdfClass import EfiSectionClassObject
from Common import EdkLogger
from Common.BuildToolError import *
from Common.Misc import PeImageClass
from Common.LongFilePathSupport import OpenLongFilePath as open
from Common.LongFilePathSupport import CopyLongFilePath
from Common.DataType import *
## generate rule section
#
#
class EfiSection (EfiSectionClassObject):
## The constructor
#
# @param self The object pointer
#
def __init__(self):
EfiSectionClassObject.__init__(self)
## GenSection() method
#
# Generate rule section
#
# @param self The object pointer
# @param OutputPath Where to place output file
# @param ModuleName Which module this section belongs to
# @param SecNum Index of section
# @param KeyStringList Filter for inputs of section generation
# @param FfsInf FfsInfStatement object that contains this section data
# @param Dict dictionary contains macro and its value
# @retval tuple (Generated file name list, section alignment)
#
def GenSection(self, OutputPath, ModuleName, SecNum, KeyStringList, FfsInf = None, Dict = None, IsMakefile = False) :
if self.FileName is not None and self.FileName.startswith('PCD('):
self.FileName = GenFdsGlobalVariable.GetPcdValue(self.FileName)
"""Prepare the parameter of GenSection"""
if FfsInf is not None :
InfFileName = FfsInf.InfFileName
SectionType = FfsInf.__ExtendMacro__(self.SectionType)
Filename = FfsInf.__ExtendMacro__(self.FileName)
BuildNum = FfsInf.__ExtendMacro__(self.BuildNum)
StringData = FfsInf.__ExtendMacro__(self.StringData)
ModuleNameStr = FfsInf.__ExtendMacro__('$(MODULE_NAME)')
NoStrip = True
if FfsInf.ModuleType in (SUP_MODULE_SEC, SUP_MODULE_PEI_CORE, SUP_MODULE_PEIM, SUP_MODULE_MM_CORE_STANDALONE) and SectionType in (BINARY_FILE_TYPE_TE, BINARY_FILE_TYPE_PE32):
if FfsInf.KeepReloc is not None:
NoStrip = FfsInf.KeepReloc
elif FfsInf.KeepRelocFromRule is not None:
NoStrip = FfsInf.KeepRelocFromRule
elif self.KeepReloc is not None:
NoStrip = self.KeepReloc
elif FfsInf.ShadowFromInfFile is not None:
NoStrip = FfsInf.ShadowFromInfFile
else:
EdkLogger.error("GenFds", GENFDS_ERROR, "Module %s apply rule for None!" %ModuleName)
"""If the file name was pointed out, add it in FileList"""
FileList = []
if Dict is None:
Dict = {}
if Filename is not None:
Filename = GenFdsGlobalVariable.MacroExtend(Filename, Dict)
# check if the path is absolute or relative
if os.path.isabs(Filename):
Filename = os.path.normpath(Filename)
else:
Filename = os.path.normpath(os.path.join(FfsInf.EfiOutputPath, Filename))
if not self.Optional:
FileList.append(Filename)
elif os.path.exists(Filename):
FileList.append(Filename)
elif IsMakefile:
SuffixMap = FfsInf.GetFinalTargetSuffixMap()
if '.depex' in SuffixMap:
FileList.append(Filename)
else:
FileList, IsSect = Section.Section.GetFileList(FfsInf, self.FileType, self.FileExtension, Dict, IsMakefile=IsMakefile, SectionType=SectionType)
if IsSect :
return FileList, self.Alignment
Index = 0
Align = self.Alignment
""" If Section type is 'VERSION'"""
OutputFileList = []
if SectionType == 'VERSION':
InfOverrideVerString = False
if FfsInf.Version is not None:
#StringData = FfsInf.Version
BuildNum = FfsInf.Version
InfOverrideVerString = True
if InfOverrideVerString:
#VerTuple = ('-n', '"' + StringData + '"')
if BuildNum is not None and BuildNum != '':
BuildNumTuple = ('-j', BuildNum)
else:
BuildNumTuple = tuple()
Num = SecNum
OutputFile = os.path.join( OutputPath, ModuleName + SUP_MODULE_SEC + str(Num) + SectionSuffix.get(SectionType))
GenFdsGlobalVariable.GenerateSection(OutputFile, [], 'EFI_SECTION_VERSION',
#Ui=StringData,
Ver=BuildNum,
IsMakefile=IsMakefile)
OutputFileList.append(OutputFile)
elif FileList != []:
for File in FileList:
Index = Index + 1
Num = '%s.%d' %(SecNum, Index)
OutputFile = os.path.join(OutputPath, ModuleName + SUP_MODULE_SEC + Num + SectionSuffix.get(SectionType))
f = open(File, 'r')
VerString = f.read()
f.close()
BuildNum = VerString
if BuildNum is not None and BuildNum != '':
BuildNumTuple = ('-j', BuildNum)
GenFdsGlobalVariable.GenerateSection(OutputFile, [], 'EFI_SECTION_VERSION',
#Ui=VerString,
Ver=BuildNum,
IsMakefile=IsMakefile)
OutputFileList.append(OutputFile)
else:
BuildNum = StringData
if BuildNum is not None and BuildNum != '':
BuildNumTuple = ('-j', BuildNum)
else:
BuildNumTuple = tuple()
BuildNumString = ' ' + ' '.join(BuildNumTuple)
#if VerString == '' and
if BuildNumString == '':
if self.Optional == True :
GenFdsGlobalVariable.VerboseLogger( "Optional Section don't exist!")
return [], None
else:
EdkLogger.error("GenFds", GENFDS_ERROR, "File: %s miss Version Section value" %InfFileName)
Num = SecNum
OutputFile = os.path.join( OutputPath, ModuleName + SUP_MODULE_SEC + str(Num) + SectionSuffix.get(SectionType))
GenFdsGlobalVariable.GenerateSection(OutputFile, [], 'EFI_SECTION_VERSION',
#Ui=VerString,
Ver=BuildNum,
IsMakefile=IsMakefile)
OutputFileList.append(OutputFile)
#
# If Section Type is BINARY_FILE_TYPE_UI
#
elif SectionType == BINARY_FILE_TYPE_UI:
InfOverrideUiString = False
if FfsInf.Ui is not None:
StringData = FfsInf.Ui
InfOverrideUiString = True
if InfOverrideUiString:
Num = SecNum
if IsMakefile and StringData == ModuleNameStr:
StringData = "$(MODULE_NAME)"
OutputFile = os.path.join( OutputPath, ModuleName + SUP_MODULE_SEC + str(Num) + SectionSuffix.get(SectionType))
GenFdsGlobalVariable.GenerateSection(OutputFile, [], 'EFI_SECTION_USER_INTERFACE',
Ui=StringData, IsMakefile=IsMakefile)
OutputFileList.append(OutputFile)
elif FileList != []:
for File in FileList:
Index = Index + 1
Num = '%s.%d' %(SecNum, Index)
OutputFile = os.path.join(OutputPath, ModuleName + SUP_MODULE_SEC + Num + SectionSuffix.get(SectionType))
f = open(File, 'r')
UiString = f.read()
f.close()
if IsMakefile and UiString == ModuleNameStr:
UiString = "$(MODULE_NAME)"
GenFdsGlobalVariable.GenerateSection(OutputFile, [], 'EFI_SECTION_USER_INTERFACE',
Ui=UiString, IsMakefile=IsMakefile)
OutputFileList.append(OutputFile)
else:
if StringData is not None and len(StringData) > 0:
UiTuple = ('-n', '"' + StringData + '"')
else:
UiTuple = tuple()
if self.Optional == True :
GenFdsGlobalVariable.VerboseLogger( "Optional Section don't exist!")
return '', None
else:
EdkLogger.error("GenFds", GENFDS_ERROR, "File: %s miss UI Section value" %InfFileName)
Num = SecNum
if IsMakefile and StringData == ModuleNameStr:
StringData = "$(MODULE_NAME)"
OutputFile = os.path.join( OutputPath, ModuleName + SUP_MODULE_SEC + str(Num) + SectionSuffix.get(SectionType))
GenFdsGlobalVariable.GenerateSection(OutputFile, [], 'EFI_SECTION_USER_INTERFACE',
Ui=StringData, IsMakefile=IsMakefile)
OutputFileList.append(OutputFile)
#
# If Section Type is BINARY_FILE_TYPE_RAW
#
elif SectionType == BINARY_FILE_TYPE_RAW:
"""If File List is empty"""
if FileList == []:
if self.Optional == True:
GenFdsGlobalVariable.VerboseLogger("Optional Section don't exist!")
return [], None
else:
EdkLogger.error("GenFds", GENFDS_ERROR, "Output file for %s section could not be found for %s" % (SectionType, InfFileName))
elif len(FileList) > 1:
EdkLogger.error("GenFds", GENFDS_ERROR,
"Files suffixed with %s are not allowed to have more than one file in %s[Binaries] section" % (
self.FileExtension, InfFileName))
else:
for File in FileList:
File = GenFdsGlobalVariable.MacroExtend(File, Dict)
OutputFileList.append(File)
else:
"""If File List is empty"""
if FileList == [] :
if self.Optional == True:
GenFdsGlobalVariable.VerboseLogger("Optional Section don't exist!")
return [], None
else:
EdkLogger.error("GenFds", GENFDS_ERROR, "Output file for %s section could not be found for %s" % (SectionType, InfFileName))
else:
"""Convert the File to Section file one by one """
for File in FileList:
""" Copy Map file to FFS output path """
Index = Index + 1
Num = '%s.%d' %(SecNum, Index)
OutputFile = os.path.join( OutputPath, ModuleName + SUP_MODULE_SEC + Num + SectionSuffix.get(SectionType))
File = GenFdsGlobalVariable.MacroExtend(File, Dict)
#Get PE Section alignment when align is set to AUTO
if self.Alignment == 'Auto' and (SectionType == BINARY_FILE_TYPE_PE32 or SectionType == BINARY_FILE_TYPE_TE):
Align = "0"
if File[(len(File)-4):] == '.efi' and FfsInf.InfModule.BaseName == os.path.basename(File)[:-4]:
MapFile = File.replace('.efi', '.map')
CopyMapFile = os.path.join(OutputPath, ModuleName + '.map')
if IsMakefile:
if GenFdsGlobalVariable.CopyList == []:
GenFdsGlobalVariable.CopyList = [(MapFile, CopyMapFile)]
else:
GenFdsGlobalVariable.CopyList.append((MapFile, CopyMapFile))
else:
if os.path.exists(MapFile):
if not os.path.exists(CopyMapFile) or \
(os.path.getmtime(MapFile) > os.path.getmtime(CopyMapFile)):
CopyLongFilePath(MapFile, CopyMapFile)
if not NoStrip:
FileBeforeStrip = os.path.join(OutputPath, ModuleName + '.efi')
if IsMakefile:
if GenFdsGlobalVariable.CopyList == []:
GenFdsGlobalVariable.CopyList = [(File, FileBeforeStrip)]
else:
GenFdsGlobalVariable.CopyList.append((File, FileBeforeStrip))
else:
if not os.path.exists(FileBeforeStrip) or \
(os.path.getmtime(File) > os.path.getmtime(FileBeforeStrip)):
CopyLongFilePath(File, FileBeforeStrip)
StrippedFile = os.path.join(OutputPath, ModuleName + '.stripped')
GenFdsGlobalVariable.GenerateFirmwareImage(
StrippedFile,
[File],
Strip=True,
IsMakefile = IsMakefile
)
File = StrippedFile
"""For TE Section call GenFw to generate TE image"""
if SectionType == BINARY_FILE_TYPE_TE:
TeFile = os.path.join( OutputPath, ModuleName + 'Te.raw')
GenFdsGlobalVariable.GenerateFirmwareImage(
TeFile,
[File],
Type='te',
IsMakefile = IsMakefile
)
File = TeFile
"""Call GenSection"""
GenFdsGlobalVariable.GenerateSection(OutputFile,
[File],
Section.Section.SectionType.get (SectionType),
IsMakefile=IsMakefile
)
OutputFileList.append(OutputFile)
return OutputFileList, Align
| edk2-master | BaseTools/Source/Python/GenFds/EfiSection.py |
## @file
# process Version section generation
#
# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# Import Modules
#
from __future__ import absolute_import
from .Ffs import SectionSuffix
import Common.LongFilePathOs as os
from .GenFdsGlobalVariable import GenFdsGlobalVariable
from CommonDataClass.FdfClass import VerSectionClassObject
from Common.LongFilePathSupport import OpenLongFilePath as open
from Common.DataType import SUP_MODULE_SEC
## generate version section
#
#
class VerSection (VerSectionClassObject):
## The constructor
#
# @param self The object pointer
#
def __init__(self):
VerSectionClassObject.__init__(self)
## GenSection() method
#
# Generate version section
#
# @param self The object pointer
# @param OutputPath Where to place output file
# @param ModuleName Which module this section belongs to
# @param SecNum Index of section
# @param KeyStringList Filter for inputs of section generation
# @param FfsInf FfsInfStatement object that contains this section data
# @param Dict dictionary contains macro and its value
# @retval tuple (Generated file name, section alignment)
#
def GenSection(self, OutputPath, ModuleName, SecNum, KeyStringList, FfsInf=None, Dict=None, IsMakefile = False):
#
# Prepare the parameter of GenSection
#
if FfsInf:
self.Alignment = FfsInf.__ExtendMacro__(self.Alignment)
self.BuildNum = FfsInf.__ExtendMacro__(self.BuildNum)
self.StringData = FfsInf.__ExtendMacro__(self.StringData)
self.FileName = FfsInf.__ExtendMacro__(self.FileName)
OutputFile = os.path.join(OutputPath,
ModuleName + SUP_MODULE_SEC + SecNum + SectionSuffix.get('VERSION'))
OutputFile = os.path.normpath(OutputFile)
# Get String Data
StringData = ''
if self.StringData:
StringData = self.StringData
elif self.FileName:
if Dict is None:
Dict = {}
FileNameStr = GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.FileName)
FileNameStr = GenFdsGlobalVariable.MacroExtend(FileNameStr, Dict)
FileObj = open(FileNameStr, 'r')
StringData = FileObj.read()
StringData = '"' + StringData + '"'
FileObj.close()
GenFdsGlobalVariable.GenerateSection(OutputFile, [], 'EFI_SECTION_VERSION',
Ver=StringData, BuildNumber=self.BuildNum, IsMakefile=IsMakefile)
OutputFileList = []
OutputFileList.append(OutputFile)
return OutputFileList, self.Alignment
| edk2-master | BaseTools/Source/Python/GenFds/VerSection.py |
## @file
# process OptionROM generation from FILE statement
#
# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# Import Modules
#
from __future__ import absolute_import
import Common.LongFilePathOs as os
from .GenFdsGlobalVariable import GenFdsGlobalVariable
##
#
#
class OptRomFileStatement:
## The constructor
#
# @param self The object pointer
#
def __init__(self):
self.FileName = None
self.FileType = None
self.OverrideAttribs = None
## GenFfs() method
#
# Generate FFS
#
# @param self The object pointer
# @param Dict dictionary contains macro and value pair
# @retval string Generated FFS file name
#
def GenFfs(self, Dict = None, IsMakefile=False):
if Dict is None:
Dict = {}
if self.FileName is not None:
self.FileName = GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.FileName)
return self.FileName
| edk2-master | BaseTools/Source/Python/GenFds/OptRomFileStatement.py |
## @file
# generate flash image
#
# Copyright (c) 2007 - 2019, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# Import Modules
#
from __future__ import print_function
from __future__ import absolute_import
from re import compile
from optparse import OptionParser
from sys import exit
from glob import glob
from struct import unpack
from linecache import getlines
from io import BytesIO
import Common.LongFilePathOs as os
from Common.TargetTxtClassObject import TargetTxtDict,gDefaultTargetTxtFile
from Common.DataType import *
import Common.GlobalData as GlobalData
from Common import EdkLogger
from Common.StringUtils import NormPath
from Common.Misc import DirCache, PathClass, GuidStructureStringToGuidString
from Common.Misc import SaveFileOnChange, ClearDuplicatedInf
from Common.BuildVersion import gBUILD_VERSION
from Common.MultipleWorkspace import MultipleWorkspace as mws
from Common.BuildToolError import FatalError, GENFDS_ERROR, CODE_ERROR, FORMAT_INVALID, RESOURCE_NOT_AVAILABLE, FILE_NOT_FOUND, OPTION_MISSING, FORMAT_NOT_SUPPORTED, OPTION_VALUE_INVALID, PARAMETER_INVALID
from Workspace.WorkspaceDatabase import WorkspaceDatabase
from .FdfParser import FdfParser, Warning
from .GenFdsGlobalVariable import GenFdsGlobalVariable
from .FfsFileStatement import FileStatement
import Common.DataType as DataType
from struct import Struct
## Version and Copyright
versionNumber = "1.0" + ' ' + gBUILD_VERSION
__version__ = "%prog Version " + versionNumber
__copyright__ = "Copyright (c) 2007 - 2018, Intel Corporation All rights reserved."
## Tool entrance method
#
# This method mainly dispatch specific methods per the command line options.
# If no error found, return zero value so the caller of this tool can know
# if it's executed successfully or not.
#
# @retval 0 Tool was successful
# @retval 1 Tool failed
#
def main():
global Options
Options = myOptionParser()
EdkLogger.Initialize()
return GenFdsApi(OptionsToCommandDict(Options))
def resetFdsGlobalVariable():
GenFdsGlobalVariable.FvDir = ''
GenFdsGlobalVariable.OutputDirDict = {}
GenFdsGlobalVariable.BinDir = ''
# will be FvDir + os.sep + 'Ffs'
GenFdsGlobalVariable.FfsDir = ''
GenFdsGlobalVariable.FdfParser = None
GenFdsGlobalVariable.LibDir = ''
GenFdsGlobalVariable.WorkSpace = None
GenFdsGlobalVariable.WorkSpaceDir = ''
GenFdsGlobalVariable.ConfDir = ''
GenFdsGlobalVariable.OutputDirFromDscDict = {}
GenFdsGlobalVariable.TargetName = ''
GenFdsGlobalVariable.ToolChainTag = ''
GenFdsGlobalVariable.RuleDict = {}
GenFdsGlobalVariable.ArchList = None
GenFdsGlobalVariable.ActivePlatform = None
GenFdsGlobalVariable.FvAddressFileName = ''
GenFdsGlobalVariable.VerboseMode = False
GenFdsGlobalVariable.DebugLevel = -1
GenFdsGlobalVariable.SharpCounter = 0
GenFdsGlobalVariable.SharpNumberPerLine = 40
GenFdsGlobalVariable.FdfFile = ''
GenFdsGlobalVariable.FdfFileTimeStamp = 0
GenFdsGlobalVariable.FixedLoadAddress = False
GenFdsGlobalVariable.PlatformName = ''
GenFdsGlobalVariable.BuildRuleFamily = DataType.TAB_COMPILER_MSFT
GenFdsGlobalVariable.ToolChainFamily = DataType.TAB_COMPILER_MSFT
GenFdsGlobalVariable.__BuildRuleDatabase = None
GenFdsGlobalVariable.GuidToolDefinition = {}
GenFdsGlobalVariable.FfsCmdDict = {}
GenFdsGlobalVariable.SecCmdList = []
GenFdsGlobalVariable.CopyList = []
GenFdsGlobalVariable.ModuleFile = ''
GenFdsGlobalVariable.EnableGenfdsMultiThread = True
GenFdsGlobalVariable.LargeFileInFvFlags = []
GenFdsGlobalVariable.EFI_FIRMWARE_FILE_SYSTEM3_GUID = '5473C07A-3DCB-4dca-BD6F-1E9689E7349A'
GenFdsGlobalVariable.LARGE_FILE_SIZE = 0x1000000
GenFdsGlobalVariable.SectionHeader = Struct("3B 1B")
# FvName, FdName, CapName in FDF, Image file name
GenFdsGlobalVariable.ImageBinDict = {}
def GenFdsApi(FdsCommandDict, WorkSpaceDataBase=None):
global Workspace
Workspace = ""
ArchList = None
ReturnCode = 0
resetFdsGlobalVariable()
try:
if FdsCommandDict.get("verbose"):
EdkLogger.SetLevel(EdkLogger.VERBOSE)
GenFdsGlobalVariable.VerboseMode = True
if FdsCommandDict.get("FixedAddress"):
GenFdsGlobalVariable.FixedLoadAddress = True
if FdsCommandDict.get("quiet"):
EdkLogger.SetLevel(EdkLogger.QUIET)
if FdsCommandDict.get("debug"):
EdkLogger.SetLevel(FdsCommandDict.get("debug") + 1)
GenFdsGlobalVariable.DebugLevel = FdsCommandDict.get("debug")
else:
EdkLogger.SetLevel(EdkLogger.INFO)
if not FdsCommandDict.get("Workspace",os.environ.get('WORKSPACE')):
EdkLogger.error("GenFds", OPTION_MISSING, "WORKSPACE not defined",
ExtraData="Please use '-w' switch to pass it or set the WORKSPACE environment variable.")
elif not os.path.exists(FdsCommandDict.get("Workspace",os.environ.get('WORKSPACE'))):
EdkLogger.error("GenFds", PARAMETER_INVALID, "WORKSPACE is invalid",
ExtraData="Please use '-w' switch to pass it or set the WORKSPACE environment variable.")
else:
Workspace = os.path.normcase(FdsCommandDict.get("Workspace",os.environ.get('WORKSPACE')))
GenFdsGlobalVariable.WorkSpaceDir = Workspace
if FdsCommandDict.get("debug"):
GenFdsGlobalVariable.VerboseLogger("Using Workspace:" + Workspace)
if FdsCommandDict.get("GenfdsMultiThread"):
GenFdsGlobalVariable.EnableGenfdsMultiThread = True
else:
GenFdsGlobalVariable.EnableGenfdsMultiThread = False
os.chdir(GenFdsGlobalVariable.WorkSpaceDir)
# set multiple workspace
PackagesPath = os.getenv("PACKAGES_PATH")
mws.setWs(GenFdsGlobalVariable.WorkSpaceDir, PackagesPath)
if FdsCommandDict.get("fdf_file"):
FdfFilename = FdsCommandDict.get("fdf_file")[0].Path
FdfFilename = GenFdsGlobalVariable.ReplaceWorkspaceMacro(FdfFilename)
if FdfFilename[0:2] == '..':
FdfFilename = os.path.abspath(FdfFilename)
if not os.path.isabs(FdfFilename):
FdfFilename = mws.join(GenFdsGlobalVariable.WorkSpaceDir, FdfFilename)
if not os.path.exists(FdfFilename):
EdkLogger.error("GenFds", FILE_NOT_FOUND, ExtraData=FdfFilename)
GenFdsGlobalVariable.FdfFile = FdfFilename
GenFdsGlobalVariable.FdfFileTimeStamp = os.path.getmtime(FdfFilename)
else:
EdkLogger.error("GenFds", OPTION_MISSING, "Missing FDF filename")
if FdsCommandDict.get("build_target"):
GenFdsGlobalVariable.TargetName = FdsCommandDict.get("build_target")
if FdsCommandDict.get("toolchain_tag"):
GenFdsGlobalVariable.ToolChainTag = FdsCommandDict.get("toolchain_tag")
if FdsCommandDict.get("active_platform"):
ActivePlatform = FdsCommandDict.get("active_platform")
ActivePlatform = GenFdsGlobalVariable.ReplaceWorkspaceMacro(ActivePlatform)
if ActivePlatform[0:2] == '..':
ActivePlatform = os.path.abspath(ActivePlatform)
if not os.path.isabs (ActivePlatform):
ActivePlatform = mws.join(GenFdsGlobalVariable.WorkSpaceDir, ActivePlatform)
if not os.path.exists(ActivePlatform):
EdkLogger.error("GenFds", FILE_NOT_FOUND, "ActivePlatform doesn't exist!")
else:
EdkLogger.error("GenFds", OPTION_MISSING, "Missing active platform")
GenFdsGlobalVariable.ActivePlatform = PathClass(NormPath(ActivePlatform))
if FdsCommandDict.get("conf_directory"):
# Get alternate Conf location, if it is absolute, then just use the absolute directory name
ConfDirectoryPath = os.path.normpath(FdsCommandDict.get("conf_directory"))
if ConfDirectoryPath.startswith('"'):
ConfDirectoryPath = ConfDirectoryPath[1:]
if ConfDirectoryPath.endswith('"'):
ConfDirectoryPath = ConfDirectoryPath[:-1]
if not os.path.isabs(ConfDirectoryPath):
# Since alternate directory name is not absolute, the alternate directory is located within the WORKSPACE
# This also handles someone specifying the Conf directory in the workspace. Using --conf=Conf
ConfDirectoryPath = os.path.join(GenFdsGlobalVariable.WorkSpaceDir, ConfDirectoryPath)
else:
if "CONF_PATH" in os.environ:
ConfDirectoryPath = os.path.normcase(os.environ["CONF_PATH"])
else:
# Get standard WORKSPACE/Conf, use the absolute path to the WORKSPACE/Conf
ConfDirectoryPath = mws.join(GenFdsGlobalVariable.WorkSpaceDir, 'Conf')
GenFdsGlobalVariable.ConfDir = ConfDirectoryPath
if not GlobalData.gConfDirectory:
GlobalData.gConfDirectory = GenFdsGlobalVariable.ConfDir
BuildConfigurationFile = os.path.normpath(os.path.join(ConfDirectoryPath, gDefaultTargetTxtFile))
if os.path.isfile(BuildConfigurationFile) == True:
# if no build target given in command line, get it from target.txt
TargetObj = TargetTxtDict()
TargetTxt = TargetObj.Target
if not GenFdsGlobalVariable.TargetName:
BuildTargetList = TargetTxt.TargetTxtDictionary[TAB_TAT_DEFINES_TARGET]
if len(BuildTargetList) != 1:
EdkLogger.error("GenFds", OPTION_VALUE_INVALID, ExtraData="Only allows one instance for Target.")
GenFdsGlobalVariable.TargetName = BuildTargetList[0]
# if no tool chain given in command line, get it from target.txt
if not GenFdsGlobalVariable.ToolChainTag:
ToolChainList = TargetTxt.TargetTxtDictionary[TAB_TAT_DEFINES_TOOL_CHAIN_TAG]
if ToolChainList is None or len(ToolChainList) == 0:
EdkLogger.error("GenFds", RESOURCE_NOT_AVAILABLE, ExtraData="No toolchain given. Don't know how to build.")
if len(ToolChainList) != 1:
EdkLogger.error("GenFds", OPTION_VALUE_INVALID, ExtraData="Only allows one instance for ToolChain.")
GenFdsGlobalVariable.ToolChainTag = ToolChainList[0]
else:
EdkLogger.error("GenFds", FILE_NOT_FOUND, ExtraData=BuildConfigurationFile)
#Set global flag for build mode
GlobalData.gIgnoreSource = FdsCommandDict.get("IgnoreSources")
if FdsCommandDict.get("macro"):
for Pair in FdsCommandDict.get("macro"):
if Pair.startswith('"'):
Pair = Pair[1:]
if Pair.endswith('"'):
Pair = Pair[:-1]
List = Pair.split('=')
if len(List) == 2:
if not List[1].strip():
EdkLogger.error("GenFds", OPTION_VALUE_INVALID, ExtraData="No Value given for Macro %s" %List[0])
if List[0].strip() in ["WORKSPACE", "TARGET", "TOOLCHAIN"]:
GlobalData.gGlobalDefines[List[0].strip()] = List[1].strip()
else:
GlobalData.gCommandLineDefines[List[0].strip()] = List[1].strip()
else:
GlobalData.gCommandLineDefines[List[0].strip()] = "TRUE"
os.environ["WORKSPACE"] = Workspace
# Use the -t and -b option as gGlobalDefines's TOOLCHAIN and TARGET if they are not defined
if "TARGET" not in GlobalData.gGlobalDefines:
GlobalData.gGlobalDefines["TARGET"] = GenFdsGlobalVariable.TargetName
if "TOOLCHAIN" not in GlobalData.gGlobalDefines:
GlobalData.gGlobalDefines["TOOLCHAIN"] = GenFdsGlobalVariable.ToolChainTag
if "TOOL_CHAIN_TAG" not in GlobalData.gGlobalDefines:
GlobalData.gGlobalDefines['TOOL_CHAIN_TAG'] = GenFdsGlobalVariable.ToolChainTag
"""call Workspace build create database"""
GlobalData.gDatabasePath = os.path.normpath(os.path.join(ConfDirectoryPath, GlobalData.gDatabasePath))
if WorkSpaceDataBase:
BuildWorkSpace = WorkSpaceDataBase
else:
BuildWorkSpace = WorkspaceDatabase()
#
# Get files real name in workspace dir
#
GlobalData.gAllFiles = DirCache(Workspace)
GlobalData.gWorkspace = Workspace
if FdsCommandDict.get("build_architecture_list"):
ArchList = FdsCommandDict.get("build_architecture_list").split(',')
else:
ArchList = BuildWorkSpace.BuildObject[GenFdsGlobalVariable.ActivePlatform, TAB_COMMON, FdsCommandDict.get("build_target"), FdsCommandDict.get("toolchain_tag")].SupArchList
TargetArchList = set(BuildWorkSpace.BuildObject[GenFdsGlobalVariable.ActivePlatform, TAB_COMMON, FdsCommandDict.get("build_target"), FdsCommandDict.get("toolchain_tag")].SupArchList) & set(ArchList)
if len(TargetArchList) == 0:
EdkLogger.error("GenFds", GENFDS_ERROR, "Target ARCH %s not in platform supported ARCH %s" % (str(ArchList), str(BuildWorkSpace.BuildObject[GenFdsGlobalVariable.ActivePlatform, TAB_COMMON].SupArchList)))
for Arch in ArchList:
GenFdsGlobalVariable.OutputDirFromDscDict[Arch] = NormPath(BuildWorkSpace.BuildObject[GenFdsGlobalVariable.ActivePlatform, Arch, FdsCommandDict.get("build_target"), FdsCommandDict.get("toolchain_tag")].OutputDirectory)
# assign platform name based on last entry in ArchList
GenFdsGlobalVariable.PlatformName = BuildWorkSpace.BuildObject[GenFdsGlobalVariable.ActivePlatform, ArchList[-1], FdsCommandDict.get("build_target"), FdsCommandDict.get("toolchain_tag")].PlatformName
if FdsCommandDict.get("platform_build_directory"):
OutputDirFromCommandLine = GenFdsGlobalVariable.ReplaceWorkspaceMacro(FdsCommandDict.get("platform_build_directory"))
if not os.path.isabs (OutputDirFromCommandLine):
OutputDirFromCommandLine = os.path.join(GenFdsGlobalVariable.WorkSpaceDir, OutputDirFromCommandLine)
for Arch in ArchList:
GenFdsGlobalVariable.OutputDirDict[Arch] = OutputDirFromCommandLine
else:
for Arch in ArchList:
GenFdsGlobalVariable.OutputDirDict[Arch] = os.path.join(GenFdsGlobalVariable.OutputDirFromDscDict[Arch], GenFdsGlobalVariable.TargetName + '_' + GenFdsGlobalVariable.ToolChainTag)
for Key in GenFdsGlobalVariable.OutputDirDict:
OutputDir = GenFdsGlobalVariable.OutputDirDict[Key]
if OutputDir[0:2] == '..':
OutputDir = os.path.abspath(OutputDir)
if OutputDir[1] != ':':
OutputDir = os.path.join (GenFdsGlobalVariable.WorkSpaceDir, OutputDir)
if not os.path.exists(OutputDir):
EdkLogger.error("GenFds", FILE_NOT_FOUND, ExtraData=OutputDir)
GenFdsGlobalVariable.OutputDirDict[Key] = OutputDir
""" Parse Fdf file, has to place after build Workspace as FDF may contain macros from DSC file """
if WorkSpaceDataBase:
FdfParserObj = GlobalData.gFdfParser
else:
FdfParserObj = FdfParser(FdfFilename)
FdfParserObj.ParseFile()
if FdfParserObj.CycleReferenceCheck():
EdkLogger.error("GenFds", FORMAT_NOT_SUPPORTED, "Cycle Reference Detected in FDF file")
if FdsCommandDict.get("fd"):
if FdsCommandDict.get("fd")[0].upper() in FdfParserObj.Profile.FdDict:
GenFds.OnlyGenerateThisFd = FdsCommandDict.get("fd")[0]
else:
EdkLogger.error("GenFds", OPTION_VALUE_INVALID,
"No such an FD in FDF file: %s" % FdsCommandDict.get("fd")[0])
if FdsCommandDict.get("fv"):
if FdsCommandDict.get("fv")[0].upper() in FdfParserObj.Profile.FvDict:
GenFds.OnlyGenerateThisFv = FdsCommandDict.get("fv")[0]
else:
EdkLogger.error("GenFds", OPTION_VALUE_INVALID,
"No such an FV in FDF file: %s" % FdsCommandDict.get("fv")[0])
if FdsCommandDict.get("cap"):
if FdsCommandDict.get("cap")[0].upper() in FdfParserObj.Profile.CapsuleDict:
GenFds.OnlyGenerateThisCap = FdsCommandDict.get("cap")[0]
else:
EdkLogger.error("GenFds", OPTION_VALUE_INVALID,
"No such a Capsule in FDF file: %s" % FdsCommandDict.get("cap")[0])
GenFdsGlobalVariable.WorkSpace = BuildWorkSpace
if ArchList:
GenFdsGlobalVariable.ArchList = ArchList
# Dsc Build Data will handle Pcd Settings from CommandLine.
"""Modify images from build output if the feature of loading driver at fixed address is on."""
if GenFdsGlobalVariable.FixedLoadAddress:
GenFds.PreprocessImage(BuildWorkSpace, GenFdsGlobalVariable.ActivePlatform)
# Record the FV Region info that may specific in the FD
if FdfParserObj.Profile.FvDict and FdfParserObj.Profile.FdDict:
for FvObj in FdfParserObj.Profile.FvDict.values():
for FdObj in FdfParserObj.Profile.FdDict.values():
for RegionObj in FdObj.RegionList:
if RegionObj.RegionType != BINARY_FILE_TYPE_FV:
continue
for RegionData in RegionObj.RegionDataList:
if FvObj.UiFvName.upper() == RegionData.upper():
if not FvObj.BaseAddress:
FvObj.BaseAddress = '0x%x' % (int(FdObj.BaseAddress, 0) + RegionObj.Offset)
if FvObj.FvRegionInFD:
if FvObj.FvRegionInFD != RegionObj.Size:
EdkLogger.error("GenFds", FORMAT_INVALID, "The FV %s's region is specified in multiple FD with different value." %FvObj.UiFvName)
else:
FvObj.FvRegionInFD = RegionObj.Size
RegionObj.BlockInfoOfRegion(FdObj.BlockSizeList, FvObj)
"""Call GenFds"""
GenFds.GenFd('', FdfParserObj, BuildWorkSpace, ArchList)
"""Generate GUID cross reference file"""
GenFds.GenerateGuidXRefFile(BuildWorkSpace, ArchList, FdfParserObj)
"""Display FV space info."""
GenFds.DisplayFvSpaceInfo(FdfParserObj)
except Warning as X:
EdkLogger.error(X.ToolName, FORMAT_INVALID, File=X.FileName, Line=X.LineNumber, ExtraData=X.Message, RaiseError=False)
ReturnCode = FORMAT_INVALID
except FatalError as X:
if FdsCommandDict.get("debug") is not None:
import traceback
EdkLogger.quiet(traceback.format_exc())
ReturnCode = X.args[0]
except:
import traceback
EdkLogger.error(
"\nPython",
CODE_ERROR,
"Tools code failure",
ExtraData="Please send email to %s for help, attaching following call stack trace!\n" % MSG_EDKII_MAIL_ADDR,
RaiseError=False
)
EdkLogger.quiet(traceback.format_exc())
ReturnCode = CODE_ERROR
finally:
ClearDuplicatedInf()
return ReturnCode
def OptionsToCommandDict(Options):
FdsCommandDict = {}
FdsCommandDict["verbose"] = Options.verbose
FdsCommandDict["FixedAddress"] = Options.FixedAddress
FdsCommandDict["quiet"] = Options.quiet
FdsCommandDict["debug"] = Options.debug
FdsCommandDict["Workspace"] = Options.Workspace
FdsCommandDict["GenfdsMultiThread"] = not Options.NoGenfdsMultiThread
FdsCommandDict["fdf_file"] = [PathClass(Options.filename)] if Options.filename else []
FdsCommandDict["build_target"] = Options.BuildTarget
FdsCommandDict["toolchain_tag"] = Options.ToolChain
FdsCommandDict["active_platform"] = Options.activePlatform
FdsCommandDict["OptionPcd"] = Options.OptionPcd
FdsCommandDict["conf_directory"] = Options.ConfDirectory
FdsCommandDict["IgnoreSources"] = Options.IgnoreSources
FdsCommandDict["macro"] = Options.Macros
FdsCommandDict["build_architecture_list"] = Options.archList
FdsCommandDict["platform_build_directory"] = Options.outputDir
FdsCommandDict["fd"] = [Options.uiFdName] if Options.uiFdName else []
FdsCommandDict["fv"] = [Options.uiFvName] if Options.uiFvName else []
FdsCommandDict["cap"] = [Options.uiCapName] if Options.uiCapName else []
return FdsCommandDict
gParamCheck = []
def SingleCheckCallback(option, opt_str, value, parser):
if option not in gParamCheck:
setattr(parser.values, option.dest, value)
gParamCheck.append(option)
else:
parser.error("Option %s only allows one instance in command line!" % option)
## Parse command line options
#
# Using standard Python module optparse to parse command line option of this tool.
#
# @retval Opt A optparse.Values object containing the parsed options
#
def myOptionParser():
usage = "%prog [options] -f input_file -a arch_list -b build_target -p active_platform -t tool_chain_tag -D \"MacroName [= MacroValue]\""
Parser = OptionParser(usage=usage, description=__copyright__, version="%prog " + str(versionNumber))
Parser.add_option("-f", "--file", dest="filename", type="string", help="Name of FDF file to convert", action="callback", callback=SingleCheckCallback)
Parser.add_option("-a", "--arch", dest="archList", help="comma separated list containing one or more of: IA32, X64, IPF, ARM, AARCH64 or EBC which should be built, overrides target.txt?s TARGET_ARCH")
Parser.add_option("-q", "--quiet", action="store_true", type=None, help="Disable all messages except FATAL ERRORS.")
Parser.add_option("-v", "--verbose", action="store_true", type=None, help="Turn on verbose output with informational messages printed.")
Parser.add_option("-d", "--debug", action="store", type="int", help="Enable debug messages at specified level.")
Parser.add_option("-p", "--platform", type="string", dest="activePlatform", help="Set the ACTIVE_PLATFORM, overrides target.txt ACTIVE_PLATFORM setting.",
action="callback", callback=SingleCheckCallback)
Parser.add_option("-w", "--workspace", type="string", dest="Workspace", default=os.environ.get('WORKSPACE'), help="Set the WORKSPACE",
action="callback", callback=SingleCheckCallback)
Parser.add_option("-o", "--outputDir", type="string", dest="outputDir", help="Name of Build Output directory",
action="callback", callback=SingleCheckCallback)
Parser.add_option("-r", "--rom_image", dest="uiFdName", help="Build the image using the [FD] section named by FdUiName.")
Parser.add_option("-i", "--FvImage", dest="uiFvName", help="Build the FV image using the [FV] section named by UiFvName")
Parser.add_option("-C", "--CapsuleImage", dest="uiCapName", help="Build the Capsule image using the [Capsule] section named by UiCapName")
Parser.add_option("-b", "--buildtarget", type="string", dest="BuildTarget", help="Set the build TARGET, overrides target.txt TARGET setting.",
action="callback", callback=SingleCheckCallback)
Parser.add_option("-t", "--tagname", type="string", dest="ToolChain", help="Using the tools: TOOL_CHAIN_TAG name to build the platform.",
action="callback", callback=SingleCheckCallback)
Parser.add_option("-D", "--define", action="append", type="string", dest="Macros", help="Macro: \"Name [= Value]\".")
Parser.add_option("-s", "--specifyaddress", dest="FixedAddress", action="store_true", type=None, help="Specify driver load address.")
Parser.add_option("--conf", action="store", type="string", dest="ConfDirectory", help="Specify the customized Conf directory.")
Parser.add_option("--ignore-sources", action="store_true", dest="IgnoreSources", default=False, help="Focus to a binary build and ignore all source files")
Parser.add_option("--pcd", action="append", dest="OptionPcd", help="Set PCD value by command line. Format: \"PcdName=Value\" ")
Parser.add_option("--genfds-multi-thread", action="store_true", dest="GenfdsMultiThread", default=True, help="Enable GenFds multi thread to generate ffs file.")
Parser.add_option("--no-genfds-multi-thread", action="store_true", dest="NoGenfdsMultiThread", default=False, help="Disable GenFds multi thread to generate ffs file.")
Options, _ = Parser.parse_args()
return Options
## The class implementing the EDK2 flash image generation process
#
# This process includes:
# 1. Collect workspace information, includes platform and module information
# 2. Call methods of Fd class to generate FD
# 3. Call methods of Fv class to generate FV that not belong to FD
#
class GenFds(object):
FdfParsef = None
OnlyGenerateThisFd = None
OnlyGenerateThisFv = None
OnlyGenerateThisCap = None
## GenFd()
#
# @param OutputDir Output directory
# @param FdfParserObject FDF contents parser
# @param Workspace The directory of workspace
# @param ArchList The Arch list of platform
#
@staticmethod
def GenFd (OutputDir, FdfParserObject, WorkSpace, ArchList):
GenFdsGlobalVariable.SetDir ('', FdfParserObject, WorkSpace, ArchList)
GenFdsGlobalVariable.VerboseLogger(" Generate all Fd images and their required FV and Capsule images!")
if GenFds.OnlyGenerateThisCap is not None and GenFds.OnlyGenerateThisCap.upper() in GenFdsGlobalVariable.FdfParser.Profile.CapsuleDict:
CapsuleObj = GenFdsGlobalVariable.FdfParser.Profile.CapsuleDict[GenFds.OnlyGenerateThisCap.upper()]
if CapsuleObj is not None:
CapsuleObj.GenCapsule()
return
if GenFds.OnlyGenerateThisFd is not None and GenFds.OnlyGenerateThisFd.upper() in GenFdsGlobalVariable.FdfParser.Profile.FdDict:
FdObj = GenFdsGlobalVariable.FdfParser.Profile.FdDict[GenFds.OnlyGenerateThisFd.upper()]
if FdObj is not None:
FdObj.GenFd()
return
elif GenFds.OnlyGenerateThisFd is None and GenFds.OnlyGenerateThisFv is None:
for FdObj in GenFdsGlobalVariable.FdfParser.Profile.FdDict.values():
FdObj.GenFd()
GenFdsGlobalVariable.VerboseLogger("\n Generate other FV images! ")
if GenFds.OnlyGenerateThisFv is not None and GenFds.OnlyGenerateThisFv.upper() in GenFdsGlobalVariable.FdfParser.Profile.FvDict:
FvObj = GenFdsGlobalVariable.FdfParser.Profile.FvDict[GenFds.OnlyGenerateThisFv.upper()]
if FvObj is not None:
Buffer = BytesIO()
FvObj.AddToBuffer(Buffer)
Buffer.close()
return
elif GenFds.OnlyGenerateThisFv is None:
for FvObj in GenFdsGlobalVariable.FdfParser.Profile.FvDict.values():
Buffer = BytesIO()
FvObj.AddToBuffer(Buffer)
Buffer.close()
if GenFds.OnlyGenerateThisFv is None and GenFds.OnlyGenerateThisFd is None and GenFds.OnlyGenerateThisCap is None:
if GenFdsGlobalVariable.FdfParser.Profile.CapsuleDict != {}:
GenFdsGlobalVariable.VerboseLogger("\n Generate other Capsule images!")
for CapsuleObj in GenFdsGlobalVariable.FdfParser.Profile.CapsuleDict.values():
CapsuleObj.GenCapsule()
if GenFdsGlobalVariable.FdfParser.Profile.OptRomDict != {}:
GenFdsGlobalVariable.VerboseLogger("\n Generate all Option ROM!")
for OptRomObj in GenFdsGlobalVariable.FdfParser.Profile.OptRomDict.values():
OptRomObj.AddToBuffer(None)
@staticmethod
def GenFfsMakefile(OutputDir, FdfParserObject, WorkSpace, ArchList, GlobalData):
GenFdsGlobalVariable.SetEnv(FdfParserObject, WorkSpace, ArchList, GlobalData)
for FdObj in GenFdsGlobalVariable.FdfParser.Profile.FdDict.values():
FdObj.GenFd(Flag=True)
for FvObj in GenFdsGlobalVariable.FdfParser.Profile.FvDict.values():
FvObj.AddToBuffer(Buffer=None, Flag=True)
if GenFdsGlobalVariable.FdfParser.Profile.OptRomDict != {}:
for OptRomObj in GenFdsGlobalVariable.FdfParser.Profile.OptRomDict.values():
OptRomObj.AddToBuffer(Buffer=None, Flag=True)
return GenFdsGlobalVariable.FfsCmdDict
## GetFvBlockSize()
#
# @param FvObj Whose block size to get
# @retval int Block size value
#
@staticmethod
def GetFvBlockSize(FvObj):
DefaultBlockSize = 0x1
FdObj = None
if GenFds.OnlyGenerateThisFd is not None and GenFds.OnlyGenerateThisFd.upper() in GenFdsGlobalVariable.FdfParser.Profile.FdDict:
FdObj = GenFdsGlobalVariable.FdfParser.Profile.FdDict[GenFds.OnlyGenerateThisFd.upper()]
if FdObj is None:
for ElementFd in GenFdsGlobalVariable.FdfParser.Profile.FdDict.values():
for ElementRegion in ElementFd.RegionList:
if ElementRegion.RegionType == BINARY_FILE_TYPE_FV:
for ElementRegionData in ElementRegion.RegionDataList:
if ElementRegionData is not None and ElementRegionData.upper() == FvObj.UiFvName:
if FvObj.BlockSizeList != []:
return FvObj.BlockSizeList[0][0]
else:
return ElementRegion.BlockSizeOfRegion(ElementFd.BlockSizeList)
if FvObj.BlockSizeList != []:
return FvObj.BlockSizeList[0][0]
return DefaultBlockSize
else:
for ElementRegion in FdObj.RegionList:
if ElementRegion.RegionType == BINARY_FILE_TYPE_FV:
for ElementRegionData in ElementRegion.RegionDataList:
if ElementRegionData is not None and ElementRegionData.upper() == FvObj.UiFvName:
if FvObj.BlockSizeList != []:
return FvObj.BlockSizeList[0][0]
else:
return ElementRegion.BlockSizeOfRegion(ElementFd.BlockSizeList)
return DefaultBlockSize
## DisplayFvSpaceInfo()
#
# @param FvObj Whose block size to get
# @retval None
#
@staticmethod
def DisplayFvSpaceInfo(FdfParserObject):
FvSpaceInfoList = []
MaxFvNameLength = 0
for FvName in FdfParserObject.Profile.FvDict:
if len(FvName) > MaxFvNameLength:
MaxFvNameLength = len(FvName)
FvSpaceInfoFileName = os.path.join(GenFdsGlobalVariable.FvDir, FvName.upper() + '.Fv.map')
if os.path.exists(FvSpaceInfoFileName):
FileLinesList = getlines(FvSpaceInfoFileName)
TotalFound = False
Total = ''
UsedFound = False
Used = ''
FreeFound = False
Free = ''
for Line in FileLinesList:
NameValue = Line.split('=')
if len(NameValue) == 2:
if NameValue[0].strip() == 'EFI_FV_TOTAL_SIZE':
TotalFound = True
Total = NameValue[1].strip()
if NameValue[0].strip() == 'EFI_FV_TAKEN_SIZE':
UsedFound = True
Used = NameValue[1].strip()
if NameValue[0].strip() == 'EFI_FV_SPACE_SIZE':
FreeFound = True
Free = NameValue[1].strip()
if TotalFound and UsedFound and FreeFound:
FvSpaceInfoList.append((FvName, Total, Used, Free))
GenFdsGlobalVariable.InfLogger('\nFV Space Information')
for FvSpaceInfo in FvSpaceInfoList:
Name = FvSpaceInfo[0]
TotalSizeValue = int(FvSpaceInfo[1], 0)
UsedSizeValue = int(FvSpaceInfo[2], 0)
FreeSizeValue = int(FvSpaceInfo[3], 0)
if UsedSizeValue == TotalSizeValue:
Percentage = '100'
else:
Percentage = str((UsedSizeValue + 0.0) / TotalSizeValue)[0:4].lstrip('0.')
GenFdsGlobalVariable.InfLogger(Name + ' ' + '[' + Percentage + '%Full] '\
+ str(TotalSizeValue) + ' (' + hex(TotalSizeValue) + ')' + ' total, '\
+ str(UsedSizeValue) + ' (' + hex(UsedSizeValue) + ')' + ' used, '\
+ str(FreeSizeValue) + ' (' + hex(FreeSizeValue) + ')' + ' free')
## PreprocessImage()
#
# @param BuildDb Database from build meta data files
# @param DscFile modules from dsc file will be preprocessed
# @retval None
#
@staticmethod
def PreprocessImage(BuildDb, DscFile):
PcdDict = BuildDb.BuildObject[DscFile, TAB_COMMON, GenFdsGlobalVariable.TargetName, GenFdsGlobalVariable.ToolChainTag].Pcds
PcdValue = ''
for Key in PcdDict:
PcdObj = PcdDict[Key]
if PcdObj.TokenCName == 'PcdBsBaseAddress':
PcdValue = PcdObj.DefaultValue
break
if PcdValue == '':
return
Int64PcdValue = int(PcdValue, 0)
if Int64PcdValue == 0 or Int64PcdValue < -1:
return
TopAddress = 0
if Int64PcdValue > 0:
TopAddress = Int64PcdValue
ModuleDict = BuildDb.BuildObject[DscFile, TAB_COMMON, GenFdsGlobalVariable.TargetName, GenFdsGlobalVariable.ToolChainTag].Modules
for Key in ModuleDict:
ModuleObj = BuildDb.BuildObject[Key, TAB_COMMON, GenFdsGlobalVariable.TargetName, GenFdsGlobalVariable.ToolChainTag]
print(ModuleObj.BaseName + ' ' + ModuleObj.ModuleType)
@staticmethod
def GenerateGuidXRefFile(BuildDb, ArchList, FdfParserObj):
GuidXRefFileName = os.path.join(GenFdsGlobalVariable.FvDir, "Guid.xref")
GuidXRefFile = []
PkgGuidDict = {}
GuidDict = {}
ModuleList = []
FileGuidList = []
VariableGuidSet = set()
for Arch in ArchList:
PlatformDataBase = BuildDb.BuildObject[GenFdsGlobalVariable.ActivePlatform, Arch, GenFdsGlobalVariable.TargetName, GenFdsGlobalVariable.ToolChainTag]
PkgList = GenFdsGlobalVariable.WorkSpace.GetPackageList(GenFdsGlobalVariable.ActivePlatform, Arch, GenFdsGlobalVariable.TargetName, GenFdsGlobalVariable.ToolChainTag)
for P in PkgList:
PkgGuidDict.update(P.Guids)
for Name, Guid in PlatformDataBase.Pcds:
Pcd = PlatformDataBase.Pcds[Name, Guid]
if Pcd.Type in [TAB_PCDS_DYNAMIC_HII, TAB_PCDS_DYNAMIC_EX_HII]:
for SkuId in Pcd.SkuInfoList:
Sku = Pcd.SkuInfoList[SkuId]
if Sku.VariableGuid in VariableGuidSet:continue
VariableGuidSet.add(Sku.VariableGuid)
if Sku.VariableGuid and Sku.VariableGuid in PkgGuidDict.keys():
GuidDict[Sku.VariableGuid] = PkgGuidDict[Sku.VariableGuid]
for ModuleFile in PlatformDataBase.Modules:
Module = BuildDb.BuildObject[ModuleFile, Arch, GenFdsGlobalVariable.TargetName, GenFdsGlobalVariable.ToolChainTag]
if Module in ModuleList:
continue
else:
ModuleList.append(Module)
if GlobalData.gGuidPattern.match(ModuleFile.BaseName):
GuidXRefFile.append("%s %s\n" % (ModuleFile.BaseName, Module.BaseName))
else:
GuidXRefFile.append("%s %s\n" % (Module.Guid, Module.BaseName))
GuidDict.update(Module.Protocols)
GuidDict.update(Module.Guids)
GuidDict.update(Module.Ppis)
for FvName in FdfParserObj.Profile.FvDict:
for FfsObj in FdfParserObj.Profile.FvDict[FvName].FfsList:
if not isinstance(FfsObj, FileStatement):
InfPath = PathClass(NormPath(mws.join(GenFdsGlobalVariable.WorkSpaceDir, FfsObj.InfFileName)))
FdfModule = BuildDb.BuildObject[InfPath, Arch, GenFdsGlobalVariable.TargetName, GenFdsGlobalVariable.ToolChainTag]
if FdfModule in ModuleList:
continue
else:
ModuleList.append(FdfModule)
GuidXRefFile.append("%s %s\n" % (FdfModule.Guid, FdfModule.BaseName))
GuidDict.update(FdfModule.Protocols)
GuidDict.update(FdfModule.Guids)
GuidDict.update(FdfModule.Ppis)
else:
FileStatementGuid = FfsObj.NameGuid
if FileStatementGuid in FileGuidList:
continue
else:
FileGuidList.append(FileStatementGuid)
Name = []
FfsPath = os.path.join(GenFdsGlobalVariable.FvDir, 'Ffs')
FfsPath = glob(os.path.join(FfsPath, FileStatementGuid) + TAB_STAR)
if not FfsPath:
continue
if not os.path.exists(FfsPath[0]):
continue
MatchDict = {}
ReFileEnds = compile('\S+(.ui)$|\S+(fv.sec.txt)$|\S+(.pe32.txt)$|\S+(.te.txt)$|\S+(.pic.txt)$|\S+(.raw.txt)$|\S+(.ffs.txt)$')
FileList = os.listdir(FfsPath[0])
for File in FileList:
Match = ReFileEnds.search(File)
if Match:
for Index in range(1, 8):
if Match.group(Index) and Match.group(Index) in MatchDict:
MatchDict[Match.group(Index)].append(File)
elif Match.group(Index):
MatchDict[Match.group(Index)] = [File]
if not MatchDict:
continue
if '.ui' in MatchDict:
for File in MatchDict['.ui']:
with open(os.path.join(FfsPath[0], File), 'rb') as F:
F.read()
length = F.tell()
F.seek(4)
TmpStr = unpack('%dh' % ((length - 4) // 2), F.read())
Name = ''.join(chr(c) for c in TmpStr[:-1])
else:
FileList = []
if 'fv.sec.txt' in MatchDict:
FileList = MatchDict['fv.sec.txt']
elif '.pe32.txt' in MatchDict:
FileList = MatchDict['.pe32.txt']
elif '.te.txt' in MatchDict:
FileList = MatchDict['.te.txt']
elif '.pic.txt' in MatchDict:
FileList = MatchDict['.pic.txt']
elif '.raw.txt' in MatchDict:
FileList = MatchDict['.raw.txt']
elif '.ffs.txt' in MatchDict:
FileList = MatchDict['.ffs.txt']
else:
pass
for File in FileList:
with open(os.path.join(FfsPath[0], File), 'r') as F:
Name.append((F.read().split()[-1]))
if not Name:
continue
Name = ' '.join(Name) if isinstance(Name, type([])) else Name
GuidXRefFile.append("%s %s\n" %(FileStatementGuid, Name))
# Append GUIDs, Protocols, and PPIs to the Xref file
GuidXRefFile.append("\n")
for key, item in GuidDict.items():
GuidXRefFile.append("%s %s\n" % (GuidStructureStringToGuidString(item).upper(), key))
if GuidXRefFile:
GuidXRefFile = ''.join(GuidXRefFile)
SaveFileOnChange(GuidXRefFileName, GuidXRefFile, False)
GenFdsGlobalVariable.InfLogger("\nGUID cross reference file can be found at %s" % GuidXRefFileName)
elif os.path.exists(GuidXRefFileName):
os.remove(GuidXRefFileName)
if __name__ == '__main__':
r = main()
## 0-127 is a safe return range, and 1 is a standard default error
if r < 0 or r > 127:
r = 1
exit(r)
| edk2-master | BaseTools/Source/Python/GenFds/GenFds.py |
## @file
# process FD generation
#
# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# Import Modules
#
from __future__ import absolute_import
from . import Region
from . import Fv
import Common.LongFilePathOs as os
from io import BytesIO
import sys
from struct import *
from .GenFdsGlobalVariable import GenFdsGlobalVariable
from CommonDataClass.FdfClass import FDClassObject
from Common import EdkLogger
from Common.BuildToolError import *
from Common.Misc import SaveFileOnChange
from Common.DataType import BINARY_FILE_TYPE_FV
## generate FD
#
#
class FD(FDClassObject):
## The constructor
#
# @param self The object pointer
#
def __init__(self):
FDClassObject.__init__(self)
## GenFd() method
#
# Generate FD
#
# @retval string Generated FD file name
#
def GenFd (self, Flag = False):
if self.FdUiName.upper() + 'fd' in GenFdsGlobalVariable.ImageBinDict:
return GenFdsGlobalVariable.ImageBinDict[self.FdUiName.upper() + 'fd']
#
# Print Information
#
FdFileName = os.path.join(GenFdsGlobalVariable.FvDir, self.FdUiName + '.fd')
if not Flag:
GenFdsGlobalVariable.InfLogger("\nFd File Name:%s (%s)" %(self.FdUiName, FdFileName))
Offset = 0x00
for item in self.BlockSizeList:
Offset = Offset + item[0] * item[1]
if Offset != self.Size:
EdkLogger.error("GenFds", GENFDS_ERROR, 'FD %s Size not consistent with block array' % self.FdUiName)
GenFdsGlobalVariable.VerboseLogger('Following Fv will be add to Fd !!!')
for FvObj in GenFdsGlobalVariable.FdfParser.Profile.FvDict:
GenFdsGlobalVariable.VerboseLogger(FvObj)
HasCapsuleRegion = False
for RegionObj in self.RegionList:
if RegionObj.RegionType == 'CAPSULE':
HasCapsuleRegion = True
break
if HasCapsuleRegion:
TempFdBuffer = BytesIO()
PreviousRegionStart = -1
PreviousRegionSize = 1
for RegionObj in self.RegionList :
if RegionObj.RegionType == 'CAPSULE':
continue
if RegionObj.Offset + RegionObj.Size <= PreviousRegionStart:
pass
elif RegionObj.Offset <= PreviousRegionStart or (RegionObj.Offset >=PreviousRegionStart and RegionObj.Offset < PreviousRegionStart + PreviousRegionSize):
pass
elif RegionObj.Offset > PreviousRegionStart + PreviousRegionSize:
if not Flag:
GenFdsGlobalVariable.InfLogger('Padding region starting from offset 0x%X, with size 0x%X' %(PreviousRegionStart + PreviousRegionSize, RegionObj.Offset - (PreviousRegionStart + PreviousRegionSize)))
PadRegion = Region.Region()
PadRegion.Offset = PreviousRegionStart + PreviousRegionSize
PadRegion.Size = RegionObj.Offset - PadRegion.Offset
if not Flag:
PadRegion.AddToBuffer(TempFdBuffer, self.BaseAddress, self.BlockSizeList, self.ErasePolarity, GenFdsGlobalVariable.ImageBinDict, self.DefineVarDict)
PreviousRegionStart = RegionObj.Offset
PreviousRegionSize = RegionObj.Size
#
# Call each region's AddToBuffer function
#
if PreviousRegionSize > self.Size:
pass
GenFdsGlobalVariable.VerboseLogger('Call each region\'s AddToBuffer function')
RegionObj.AddToBuffer (TempFdBuffer, self.BaseAddress, self.BlockSizeList, self.ErasePolarity, GenFdsGlobalVariable.ImageBinDict, self.DefineVarDict)
FdBuffer = BytesIO()
PreviousRegionStart = -1
PreviousRegionSize = 1
for RegionObj in self.RegionList :
if RegionObj.Offset + RegionObj.Size <= PreviousRegionStart:
EdkLogger.error("GenFds", GENFDS_ERROR,
'Region offset 0x%X in wrong order with Region starting from 0x%X, size 0x%X\nRegions in FDF must have offsets appear in ascending order.'\
% (RegionObj.Offset, PreviousRegionStart, PreviousRegionSize))
elif RegionObj.Offset <= PreviousRegionStart or (RegionObj.Offset >=PreviousRegionStart and RegionObj.Offset < PreviousRegionStart + PreviousRegionSize):
EdkLogger.error("GenFds", GENFDS_ERROR,
'Region offset 0x%X overlaps with Region starting from 0x%X, size 0x%X' \
% (RegionObj.Offset, PreviousRegionStart, PreviousRegionSize))
elif RegionObj.Offset > PreviousRegionStart + PreviousRegionSize:
if not Flag:
GenFdsGlobalVariable.InfLogger('Padding region starting from offset 0x%X, with size 0x%X' %(PreviousRegionStart + PreviousRegionSize, RegionObj.Offset - (PreviousRegionStart + PreviousRegionSize)))
PadRegion = Region.Region()
PadRegion.Offset = PreviousRegionStart + PreviousRegionSize
PadRegion.Size = RegionObj.Offset - PadRegion.Offset
if not Flag:
PadRegion.AddToBuffer(FdBuffer, self.BaseAddress, self.BlockSizeList, self.ErasePolarity, GenFdsGlobalVariable.ImageBinDict, self.DefineVarDict)
PreviousRegionStart = RegionObj.Offset
PreviousRegionSize = RegionObj.Size
#
# Verify current region fits within allocated FD section Size
#
if PreviousRegionStart + PreviousRegionSize > self.Size:
EdkLogger.error("GenFds", GENFDS_ERROR,
'FD %s size too small to fit region with offset 0x%X and size 0x%X'
% (self.FdUiName, PreviousRegionStart, PreviousRegionSize))
#
# Call each region's AddToBuffer function
#
GenFdsGlobalVariable.VerboseLogger('Call each region\'s AddToBuffer function')
RegionObj.AddToBuffer (FdBuffer, self.BaseAddress, self.BlockSizeList, self.ErasePolarity, GenFdsGlobalVariable.ImageBinDict, self.DefineVarDict, Flag=Flag)
#
# Write the buffer contents to Fd file
#
GenFdsGlobalVariable.VerboseLogger('Write the buffer contents to Fd file')
if not Flag:
SaveFileOnChange(FdFileName, FdBuffer.getvalue())
FdBuffer.close()
GenFdsGlobalVariable.ImageBinDict[self.FdUiName.upper() + 'fd'] = FdFileName
return FdFileName
## generate flash map file
#
# @param self The object pointer
#
def GenFlashMap (self):
pass
| edk2-master | BaseTools/Source/Python/GenFds/Fd.py |
## @file
# process Subtype GUIDed section generation
#
# Copyright (c) 2022, Konstantin Aladyshev <[email protected]>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# Import Modules
#
from __future__ import absolute_import
from . import Section
import subprocess
from .Ffs import SectionSuffix
import Common.LongFilePathOs as os
from .GenFdsGlobalVariable import GenFdsGlobalVariable
from .GenFdsGlobalVariable import FindExtendTool
from CommonDataClass.FdfClass import SubTypeGuidSectionClassObject
import sys
from Common import EdkLogger
from Common.BuildToolError import *
from .FvImageSection import FvImageSection
from Common.LongFilePathSupport import OpenLongFilePath as open
from Common.DataType import *
## generate SubType GUIDed section
#
#
class SubTypeGuidSection(SubTypeGuidSectionClassObject) :
## The constructor
#
# @param self The object pointer
#
def __init__(self):
SubTypeGuidSectionClassObject.__init__(self)
## GenSection() method
#
# Generate GUIDed section
#
# @param self The object pointer
# @param OutputPath Where to place output file
# @param ModuleName Which module this section belongs to
# @param SecNum Index of section
# @param KeyStringList Filter for inputs of section generation
# @param FfsInf FfsInfStatement object that contains this section data
# @param Dict dictionary contains macro and its value
# @retval tuple (Generated file name, section alignment)
#
def GenSection(self, OutputPath, ModuleName, SecNum, KeyStringList, FfsInf=None, Dict=None, IsMakefile=False):
#
# Generate all section
#
self.KeyStringList = KeyStringList
self.CurrentArchList = GenFdsGlobalVariable.ArchList
if FfsInf is not None:
self.Alignment = FfsInf.__ExtendMacro__(self.Alignment)
self.SubTypeGuid = FfsInf.__ExtendMacro__(self.SubTypeGuid)
self.SectionType = FfsInf.__ExtendMacro__(self.SectionType)
self.CurrentArchList = [FfsInf.CurrentArch]
if Dict is None:
Dict = {}
self.SectFileName = GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.SectFileName)
self.SectFileName = GenFdsGlobalVariable.MacroExtend(self.SectFileName, Dict)
OutputFile = os.path.join(OutputPath, ModuleName + SUP_MODULE_SEC + SecNum + SectionSuffix.get("SUBTYPE_GUID"))
GenFdsGlobalVariable.GenerateSection(OutputFile, [self.SectFileName], 'EFI_SECTION_FREEFORM_SUBTYPE_GUID', Guid=self.SubTypeGuid, IsMakefile=IsMakefile)
OutputFileList = []
OutputFileList.append(OutputFile)
return OutputFileList, self.Alignment
| edk2-master | BaseTools/Source/Python/GenFds/SubTypeGuidSection.py |
# @file
# Split a file into two pieces at the request offset.
#
# Copyright (c) 2021, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# Import Modules
| edk2-master | BaseTools/Source/Python/Split/__init__.py |
# @file
# Split a file into two pieces at the request offset.
#
# Copyright (c) 2021, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# Import Modules
#
import argparse
import os
import io
import shutil
import logging
import sys
import tempfile
parser = argparse.ArgumentParser(description='''
SplitFile creates two Binary files either in the same directory as the current working directory or in the specified directory.
''')
parser.add_argument("-f", "--filename", dest="inputfile",
required=True, help="The input file to split tool.")
parser.add_argument("-s", "--split", dest="position",
required=True, help="The number of bytes in the first file. The valid format are HEX, Decimal and Decimal[KMG].")
parser.add_argument("-p", "--prefix", dest="output",
help="The output folder.")
parser.add_argument("-o", "--firstfile", help="The first file name")
parser.add_argument("-t", "--secondfile", help="The second file name")
parser.add_argument("--version", action="version", version='%(prog)s Version 2.0',
help="Print debug information.")
group = parser.add_mutually_exclusive_group()
group.add_argument("-v", "--verbose", action="store_true",
help="Print debug information.")
group.add_argument("-q", "--quiet", action="store_true",
help="Disable all messages except fatal errors")
SizeDict = {
"K": 1024,
"M": 1024*1024,
"G": 1024*1024*1024
}
def GetPositionValue(position):
'''
Parse the string of the argument position and return a decimal number.
The valid position formats are
1. HEX
e.g. 0x1000 or 0X1000
2. Decimal
e.g. 100
3. Decimal[KMG]
e.g. 100K or 100M or 100G or 100k or 100m or 100g
'''
logger = logging.getLogger('Split')
PosVal = 0
header = position[:2].upper()
tailer = position[-1].upper()
try:
if tailer in SizeDict:
PosVal = int(position[:-1]) * SizeDict[tailer]
else:
if header == "0X":
PosVal = int(position, 16)
else:
PosVal = int(position)
except Exception as e:
logger.error(
"The parameter %s format is incorrect. The valid format is HEX, Decimal and Decimal[KMG]." % position)
raise(e)
return PosVal
def getFileSize(filename):
'''
Read the input file and return the file size.
'''
logger = logging.getLogger('Split')
length = 0
try:
with open(filename, "rb") as fin:
fin.seek(0, io.SEEK_END)
length = fin.tell()
except Exception as e:
logger.error("Access file failed: %s", filename)
raise(e)
return length
def getoutputfileabs(inputfile, prefix, outputfile,index):
inputfile = os.path.abspath(inputfile)
if outputfile is None:
if prefix is None:
outputfileabs = os.path.join(os.path.dirname(inputfile), "{}{}".format(os.path.basename(inputfile),index))
else:
if os.path.isabs(prefix):
outputfileabs = os.path.join(prefix, "{}{}".format(os.path.basename(inputfile),index))
else:
outputfileabs = os.path.join(os.getcwd(), prefix, "{}{}".format(os.path.basename(inputfile),index))
elif not os.path.isabs(outputfile):
if prefix is None:
outputfileabs = os.path.join(os.getcwd(), outputfile)
else:
if os.path.isabs(prefix):
outputfileabs = os.path.join(prefix, outputfile)
else:
outputfileabs = os.path.join(os.getcwd(), prefix, outputfile)
else:
outputfileabs = outputfile
return outputfileabs
def splitFile(inputfile, position, outputdir=None, outputfile1=None, outputfile2=None):
'''
Split the inputfile into outputfile1 and outputfile2 from the position.
'''
logger = logging.getLogger('Split')
if not os.path.exists(inputfile):
logger.error("File Not Found: %s" % inputfile)
raise(Exception)
if outputfile1 and outputfile2 and outputfile1 == outputfile2:
logger.error(
"The firstfile and the secondfile can't be the same: %s" % outputfile1)
raise(Exception)
# Create dir for the output files
try:
outputfile1 = getoutputfileabs(inputfile, outputdir, outputfile1,1)
outputfolder = os.path.dirname(outputfile1)
if not os.path.exists(outputfolder):
os.makedirs(outputfolder)
outputfile2 = getoutputfileabs(inputfile, outputdir, outputfile2,2)
outputfolder = os.path.dirname(outputfile2)
if not os.path.exists(outputfolder):
os.makedirs(outputfolder)
except Exception as e:
logger.error("Can't make dir: %s" % outputfolder)
raise(e)
if position <= 0:
if outputfile2 != os.path.abspath(inputfile):
shutil.copyfile(os.path.abspath(inputfile), outputfile2)
with open(outputfile1, "wb") as fout:
fout.write(b'')
else:
inputfilesize = getFileSize(inputfile)
if position >= inputfilesize:
if outputfile1 != os.path.abspath(inputfile):
shutil.copyfile(os.path.abspath(inputfile), outputfile1)
with open(outputfile2, "wb") as fout:
fout.write(b'')
else:
try:
tempdir = tempfile.mkdtemp()
tempfile1 = os.path.join(tempdir, "file1.bin")
tempfile2 = os.path.join(tempdir, "file2.bin")
with open(inputfile, "rb") as fin:
content1 = fin.read(position)
with open(tempfile1, "wb") as fout1:
fout1.write(content1)
content2 = fin.read(inputfilesize - position)
with open(tempfile2, "wb") as fout2:
fout2.write(content2)
shutil.copyfile(tempfile1, outputfile1)
shutil.copyfile(tempfile2, outputfile2)
except Exception as e:
logger.error("Split file failed")
raise(e)
finally:
if os.path.exists(tempdir):
shutil.rmtree(tempdir)
def main():
args = parser.parse_args()
status = 0
logger = logging.getLogger('Split')
if args.quiet:
logger.setLevel(logging.CRITICAL)
if args.verbose:
logger.setLevel(logging.DEBUG)
lh = logging.StreamHandler(sys.stdout)
lf = logging.Formatter("%(levelname)-8s: %(message)s")
lh.setFormatter(lf)
logger.addHandler(lh)
try:
position = GetPositionValue(args.position)
splitFile(args.inputfile, position, args.output,
args.firstfile, args.secondfile)
except Exception as e:
status = 1
return status
if __name__ == "__main__":
exit(main())
| edk2-master | BaseTools/Source/Python/Split/Split.py |
## @file
#
# Convert an AML file to a .c file containing the AML bytecode stored in a
# C array.
# By default, "Tables\Dsdt.aml" will generate "Tables\Dsdt.c".
# "Tables\Dsdt.c" will contain a C array named "dsdt_aml_code" that contains
# the AML bytecode.
#
# Copyright (c) 2020, ARM Limited. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
import argparse
import Common.EdkLogger as EdkLogger
from Common.BuildToolError import *
import sys
import os
__description__ = """
Convert an AML file to a .c file containing the AML bytecode stored in a C
array. By default, Tables\Dsdt.aml will generate Tables\Dsdt.c.
Tables\Dsdt.c will contain a C array named "dsdt_aml_code" that contains
the AML bytecode.
"""
## Parse the command line arguments.
#
# @retval A argparse.NameSpace instance, containing parsed values.
#
def ParseArgs():
# Initialize the parser.
Parser = argparse.ArgumentParser(description=__description__)
# Define the possible arguments.
Parser.add_argument(dest="InputFile",
help="Path to an input AML file to generate a .c file from.")
Parser.add_argument("-o", "--out-dir", dest="OutDir",
help="Output directory where the .c file will be generated. Default is the input file's directory.")
# Parse the input arguments.
Args = Parser.parse_args()
SplitInputName = ""
if not os.path.exists(Args.InputFile):
EdkLogger.error(__file__, FILE_OPEN_FAILURE,
ExtraData=Args.InputFile)
return None
else:
with open(Args.InputFile, "rb") as fIn:
Signature = str(fIn.read(4))
if ("DSDT" not in Signature) and ("SSDT" not in Signature):
EdkLogger.info("Invalid file type. File does not have a valid DSDT or SSDT signature: {}".format(Args.InputFile))
return None
# Get the basename of the input file.
SplitInputName = os.path.splitext(Args.InputFile)
BaseName = os.path.basename(SplitInputName[0])
# If no output directory is specified, output to the input directory.
if not Args.OutDir:
Args.OutputFile = os.path.join(os.path.dirname(Args.InputFile),
BaseName + ".c")
else:
if not os.path.exists(Args.OutDir):
os.mkdir(Args.OutDir)
Args.OutputFile = os.path.join(Args.OutDir, BaseName + ".c")
Args.BaseName = BaseName
return Args
## Convert an AML file to a .c file containing the AML bytecode stored
# in a C array.
#
# @param InputFile Path to the input AML file.
# @param OutputFile Path to the output .c file to generate.
# @param BaseName Base name of the input file.
# This is also the name of the generated .c file.
#
def AmlToC(InputFile, OutputFile, BaseName):
ArrayName = BaseName.lower() + "_aml_code"
FileHeader =\
"""
// This file has been generated from:
// -Python script: {}
// -Input AML file: {}
"""
with open(InputFile, "rb") as fIn, open(OutputFile, "w") as fOut:
# Write header.
fOut.write(FileHeader.format(os.path.abspath(InputFile), os.path.abspath(__file__)))
# Write the array and its content.
fOut.write("unsigned char {}[] = {{\n ".format(ArrayName))
cnt = 0
byte = fIn.read(1)
while len(byte) != 0:
fOut.write("0x{0:02X}, ".format(ord(byte)))
cnt += 1
if (cnt % 8) == 0:
fOut.write("\n ")
byte = fIn.read(1)
fOut.write("\n};\n")
## Main method
#
# This method:
# 1- Initialize an EdkLogger instance.
# 2- Parses the input arguments.
# 3- Converts an AML file to a .c file containing the AML bytecode stored
# in a C array.
#
# @retval 0 Success.
# @retval 1 Error.
#
def Main():
# Initialize an EdkLogger instance.
EdkLogger.Initialize()
try:
# Parse the input arguments.
CommandArguments = ParseArgs()
if not CommandArguments:
return 1
# Convert an AML file to a .c file containing the AML bytecode stored
# in a C array.
AmlToC(CommandArguments.InputFile, CommandArguments.OutputFile, CommandArguments.BaseName)
except Exception as e:
print(e)
return 1
return 0
if __name__ == '__main__':
r = Main()
# 0-127 is a safe return range, and 1 is a standard default error
if r < 0 or r > 127: r = 1
sys.exit(r)
| edk2-master | BaseTools/Source/Python/AmlToC/AmlToC.py |
## @file
# This file is used to define the Firmware Storage Format.
#
# Copyright (c) 2021-, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
##
| edk2-master | BaseTools/Source/Python/FirmwareStorageFormat/__init__.py |
## @file
# This file is used to define the common C struct and functions.
#
# Copyright (c) 2021-, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
##
from ctypes import *
import uuid
# ZeroGuid = uuid.UUID('{00000000-0000-0000-0000-000000000000}')
# EFI_FIRMWARE_FILE_SYSTEM2_GUID = uuid.UUID('{8C8CE578-8A3D-4f1c-9935-896185C32DD3}')
# EFI_FIRMWARE_FILE_SYSTEM3_GUID = uuid.UUID('{5473C07A-3DCB-4dca-BD6F-1E9689E7349A}')
# EFI_FFS_VOLUME_TOP_FILE_GUID = uuid.UUID('{1BA0062E-C779-4582-8566-336AE8F78F09}')
EFI_FIRMWARE_FILE_SYSTEM2_GUID = uuid.UUID("8c8ce578-8a3d-4f1c-9935-896185c32dd3")
EFI_FIRMWARE_FILE_SYSTEM2_GUID_BYTE = b'x\xe5\x8c\x8c=\x8a\x1cO\x995\x89a\x85\xc3-\xd3'
# EFI_FIRMWARE_FILE_SYSTEM2_GUID_BYTE = EFI_FIRMWARE_FILE_SYSTEM2_GUID.bytes
EFI_FIRMWARE_FILE_SYSTEM3_GUID = uuid.UUID("5473C07A-3DCB-4dca-BD6F-1E9689E7349A")
# EFI_FIRMWARE_FILE_SYSTEM3_GUID_BYTE = b'x\xe5\x8c\x8c=\x8a\x1cO\x995\x89a\x85\xc3-\xd3'
EFI_FIRMWARE_FILE_SYSTEM3_GUID_BYTE = b'z\xc0sT\xcb=\xcaM\xbdo\x1e\x96\x89\xe74\x9a'
EFI_SYSTEM_NVDATA_FV_GUID = uuid.UUID("fff12b8d-7696-4c8b-a985-2747075b4f50")
EFI_SYSTEM_NVDATA_FV_GUID_BYTE = b"\x8d+\xf1\xff\x96v\x8bL\xa9\x85'G\x07[OP"
EFI_FFS_VOLUME_TOP_FILE_GUID = uuid.UUID("1ba0062e-c779-4582-8566-336ae8f78f09")
EFI_FFS_VOLUME_TOP_FILE_GUID_BYTE = b'.\x06\xa0\x1by\xc7\x82E\x85f3j\xe8\xf7\x8f\t'
ZEROVECTOR_BYTE = b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
PADVECTOR = uuid.UUID("ffffffff-ffff-ffff-ffff-ffffffffffff")
FVH_SIGNATURE = b'_FVH'
#Alignment
SECTION_COMMON_ALIGNMENT = 4
FFS_COMMON_ALIGNMENT = 8
class GUID(Structure):
_pack_ = 1
_fields_ = [
('Guid1', c_uint32),
('Guid2', c_uint16),
('Guid3', c_uint16),
('Guid4', ARRAY(c_uint8, 8)),
]
def from_list(self, listformat: list) -> None:
self.Guid1 = listformat[0]
self.Guid2 = listformat[1]
self.Guid3 = listformat[2]
for i in range(8):
self.Guid4[i] = listformat[i+3]
def __cmp__(self, otherguid) -> bool:
if not isinstance(otherguid, GUID):
return 'Input is not the GUID instance!'
rt = False
if self.Guid1 == otherguid.Guid1 and self.Guid2 == otherguid.Guid2 and self.Guid3 == otherguid.Guid3:
rt = True
for i in range(8):
rt = rt & (self.Guid4[i] == otherguid.Guid4[i])
return rt
def ModifyGuidFormat(target_guid: str) -> GUID:
target_guid = target_guid.replace('-', '')
target_list = []
start = [0,8,12,16,18,20,22,24,26,28,30]
end = [8,12,16,18,20,22,24,26,28,30,32]
num = len(start)
for pos in range(num):
new_value = int(target_guid[start[pos]:end[pos]], 16)
target_list.append(new_value)
new_format = GUID()
new_format.from_list(target_list)
return new_format
# Get data from ctypes to bytes.
def struct2stream(s) -> bytes:
length = sizeof(s)
p = cast(pointer(s), POINTER(c_char * length))
return p.contents.raw
def GetPadSize(Size: int, alignment: int) -> int:
if Size % alignment == 0:
return 0
Pad_Size = alignment - Size % alignment
return Pad_Size
| edk2-master | BaseTools/Source/Python/FirmwareStorageFormat/Common.py |
## @file
# This file is used to define the FV Header C Struct.
#
# Copyright (c) 2021-, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
##
from ast import Str
from struct import *
from ctypes import *
from FirmwareStorageFormat.Common import *
class EFI_FV_BLOCK_MAP_ENTRY(Structure):
_pack_ = 1
_fields_ = [
('NumBlocks', c_uint32),
('Length', c_uint32),
]
class EFI_FIRMWARE_VOLUME_HEADER(Structure):
_fields_ = [
('ZeroVector', ARRAY(c_uint8, 16)),
('FileSystemGuid', GUID),
('FvLength', c_uint64),
('Signature', c_uint32),
('Attributes', c_uint32),
('HeaderLength', c_uint16),
('Checksum', c_uint16),
('ExtHeaderOffset', c_uint16),
('Reserved', c_uint8),
('Revision', c_uint8),
('BlockMap', ARRAY(EFI_FV_BLOCK_MAP_ENTRY, 1)),
]
def Refine_FV_Header(nums):
class EFI_FIRMWARE_VOLUME_HEADER(Structure):
_fields_ = [
('ZeroVector', ARRAY(c_uint8, 16)),
('FileSystemGuid', GUID),
('FvLength', c_uint64),
('Signature', c_uint32),
('Attributes', c_uint32),
('HeaderLength', c_uint16),
('Checksum', c_uint16),
('ExtHeaderOffset', c_uint16),
('Reserved', c_uint8),
('Revision', c_uint8),
('BlockMap', ARRAY(EFI_FV_BLOCK_MAP_ENTRY, nums)),
]
return EFI_FIRMWARE_VOLUME_HEADER
class EFI_FIRMWARE_VOLUME_EXT_HEADER(Structure):
_fields_ = [
('FvName', GUID),
('ExtHeaderSize', c_uint32)
]
class EFI_FIRMWARE_VOLUME_EXT_ENTRY(Structure):
_fields_ = [
('ExtEntrySize', c_uint16),
('ExtEntryType', c_uint16)
]
class EFI_FIRMWARE_VOLUME_EXT_ENTRY_OEM_TYPE_0(Structure):
_fields_ = [
('Hdr', EFI_FIRMWARE_VOLUME_EXT_ENTRY),
('TypeMask', c_uint32)
]
class EFI_FIRMWARE_VOLUME_EXT_ENTRY_OEM_TYPE(Structure):
_fields_ = [
('Hdr', EFI_FIRMWARE_VOLUME_EXT_ENTRY),
('TypeMask', c_uint32),
('Types', ARRAY(GUID, 1))
]
def Refine_FV_EXT_ENTRY_OEM_TYPE_Header(nums: int) -> EFI_FIRMWARE_VOLUME_EXT_ENTRY_OEM_TYPE:
class EFI_FIRMWARE_VOLUME_EXT_ENTRY_OEM_TYPE(Structure):
_fields_ = [
('Hdr', EFI_FIRMWARE_VOLUME_EXT_ENTRY),
('TypeMask', c_uint32),
('Types', ARRAY(GUID, nums))
]
return EFI_FIRMWARE_VOLUME_EXT_ENTRY_OEM_TYPE(Structure)
class EFI_FIRMWARE_VOLUME_EXT_ENTRY_GUID_TYPE_0(Structure):
_fields_ = [
('Hdr', EFI_FIRMWARE_VOLUME_EXT_ENTRY),
('FormatType', GUID)
]
class EFI_FIRMWARE_VOLUME_EXT_ENTRY_GUID_TYPE(Structure):
_fields_ = [
('Hdr', EFI_FIRMWARE_VOLUME_EXT_ENTRY),
('FormatType', GUID),
('Data', ARRAY(c_uint8, 1))
]
def Refine_FV_EXT_ENTRY_GUID_TYPE_Header(nums: int) -> EFI_FIRMWARE_VOLUME_EXT_ENTRY_GUID_TYPE:
class EFI_FIRMWARE_VOLUME_EXT_ENTRY_GUID_TYPE(Structure):
_fields_ = [
('Hdr', EFI_FIRMWARE_VOLUME_EXT_ENTRY),
('FormatType', GUID),
('Data', ARRAY(c_uint8, nums))
]
return EFI_FIRMWARE_VOLUME_EXT_ENTRY_GUID_TYPE(Structure)
class EFI_FIRMWARE_VOLUME_EXT_ENTRY_USED_SIZE_TYPE(Structure):
_fields_ = [
('Hdr', EFI_FIRMWARE_VOLUME_EXT_ENTRY),
('UsedSize', c_uint32)
]
| edk2-master | BaseTools/Source/Python/FirmwareStorageFormat/FvHeader.py |
## @file
# This file is used to define the Ffs Header C Struct.
#
# Copyright (c) 2021-, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
##
from struct import *
from ctypes import *
from FirmwareStorageFormat.Common import *
EFI_FFS_FILE_HEADER_LEN = 24
EFI_FFS_FILE_HEADER2_LEN = 32
class CHECK_SUM(Structure):
_pack_ = 1
_fields_ = [
('Header', c_uint8),
('File', c_uint8),
]
class EFI_FFS_INTEGRITY_CHECK(Union):
_pack_ = 1
_fields_ = [
('Checksum', CHECK_SUM),
('Checksum16', c_uint16),
]
class EFI_FFS_FILE_HEADER(Structure):
_pack_ = 1
_fields_ = [
('Name', GUID),
('IntegrityCheck', EFI_FFS_INTEGRITY_CHECK),
('Type', c_uint8),
('Attributes', c_uint8),
('Size', ARRAY(c_uint8, 3)),
('State', c_uint8),
]
@property
def FFS_FILE_SIZE(self) -> int:
return self.Size[0] | self.Size[1] << 8 | self.Size[2] << 16
@property
def HeaderLength(self) -> int:
return 24
class EFI_FFS_FILE_HEADER2(Structure):
_pack_ = 1
_fields_ = [
('Name', GUID),
('IntegrityCheck', EFI_FFS_INTEGRITY_CHECK),
('Type', c_uint8),
('Attributes', c_uint8),
('Size', ARRAY(c_uint8, 3)),
('State', c_uint8),
('ExtendedSize', c_uint64),
]
@property
def FFS_FILE_SIZE(self) -> int:
return self.ExtendedSize
@property
def HeaderLength(self) -> int:
return 32
| edk2-master | BaseTools/Source/Python/FirmwareStorageFormat/FfsFileHeader.py |
## @file
# This file is used to define the Section Header C Struct.
#
# Copyright (c) 2021-, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
##
from struct import *
from ctypes import *
from FirmwareStorageFormat.Common import *
EFI_COMMON_SECTION_HEADER_LEN = 4
EFI_COMMON_SECTION_HEADER2_LEN = 8
class EFI_COMMON_SECTION_HEADER(Structure):
_pack_ = 1
_fields_ = [
('Size', ARRAY(c_uint8, 3)),
('Type', c_uint8),
]
@property
def SECTION_SIZE(self) -> int:
return self.Size[0] | self.Size[1] << 8 | self.Size[2] << 16
def Common_Header_Size(self) -> int:
return 4
class EFI_COMMON_SECTION_HEADER2(Structure):
_pack_ = 1
_fields_ = [
('Size', ARRAY(c_uint8, 3)),
('Type', c_uint8),
('ExtendedSize', c_uint32),
]
@property
def SECTION_SIZE(self) -> int:
return self.ExtendedSize
def Common_Header_Size(self) -> int:
return 8
class EFI_COMPRESSION_SECTION(Structure):
_pack_ = 1
_fields_ = [
('UncompressedLength', c_uint32),
('CompressionType', c_uint8),
]
def ExtHeaderSize(self) -> int:
return 5
class EFI_FREEFORM_SUBTYPE_GUID_SECTION(Structure):
_pack_ = 1
_fields_ = [
('SubTypeGuid', GUID),
]
def ExtHeaderSize(self) -> int:
return 16
class EFI_GUID_DEFINED_SECTION(Structure):
_pack_ = 1
_fields_ = [
('SectionDefinitionGuid', GUID),
('DataOffset', c_uint16),
('Attributes', c_uint16),
]
def ExtHeaderSize(self) -> int:
return 20
def Get_USER_INTERFACE_Header(nums: int):
class EFI_SECTION_USER_INTERFACE(Structure):
_pack_ = 1
_fields_ = [
('FileNameString', ARRAY(c_uint16, nums)),
]
def ExtHeaderSize(self) -> int:
return 2 * nums
def GetUiString(self) -> str:
UiString = ''
for i in range(nums):
if self.FileNameString[i]:
UiString += chr(self.FileNameString[i])
return UiString
return EFI_SECTION_USER_INTERFACE
def Get_VERSION_Header(nums: int):
class EFI_SECTION_VERSION(Structure):
_pack_ = 1
_fields_ = [
('BuildNumber', c_uint16),
('VersionString', ARRAY(c_uint16, nums)),
]
def ExtHeaderSize(self) -> int:
return 2 * (nums+1)
def GetVersionString(self) -> str:
VersionString = ''
for i in range(nums):
if self.VersionString[i]:
VersionString += chr(self.VersionString[i])
return VersionString
return EFI_SECTION_VERSION
| edk2-master | BaseTools/Source/Python/FirmwareStorageFormat/SectionHeader.py |
## @file
# Install distribution package.
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
'''
MkPkg
'''
##
# Import Modules
#
from os import remove
from os import getcwd
from os import chdir
import os.path
from sys import stdin
from sys import platform
from traceback import format_exc
from platform import python_version
from hashlib import md5
from time import strftime
from time import localtime
from uuid import uuid4
from Logger import StringTable as ST
from Logger.ToolError import OPTION_UNKNOWN_ERROR
from Logger.ToolError import OPTION_VALUE_INVALID
from Logger.ToolError import ABORT_ERROR
from Logger.ToolError import UPT_REPKG_ERROR
from Logger.ToolError import CODE_ERROR
from Logger.ToolError import FatalError
from Logger.ToolError import FILE_NOT_FOUND
import Logger.Log as Logger
from Xml.XmlParser import DistributionPackageXml
from Xml.IniToXml import IniToXml
from Library import GlobalData
from Library.ParserValidate import IsValidPath
from Core.DistributionPackageClass import DistributionPackageClass
from Core.PackageFile import PackageFile
from Common.MultipleWorkspace import MultipleWorkspace as mws
## CheckForExistingDp
#
# Check if there is a same name DP file existing
# @param Path: The path to be checked
#
def CheckForExistingDp(Path):
if os.path.exists(Path):
Logger.Info(ST.MSG_DISTRIBUTION_PACKAGE_FILE_EXISTS % Path)
Input = stdin.readline()
Input = Input.replace('\r', '').replace('\n', '')
if Input.upper() != "Y":
Logger.Error("\nMkPkg", ABORT_ERROR, ST.ERR_USER_ABORT, RaiseError=True)
## Tool entrance method
#
# This method mainly dispatch specific methods per the command line options.
# If no error found, return zero value so the caller of this tool can know
# if it's executed successfully or not.
#
#
def Main(Options = None):
if Options is None:
Logger.Error("\nMkPkg", OPTION_UNKNOWN_ERROR, ST.ERR_OPTION_NOT_FOUND)
try:
DataBase = GlobalData.gDB
ContentFileClosed = True
WorkspaceDir = GlobalData.gWORKSPACE
#
# Init PackFileToCreate
#
if not Options.PackFileToCreate:
Logger.Error("\nMkPkg", OPTION_UNKNOWN_ERROR, ST.ERR_OPTION_NOT_FOUND)
#
# Handle if the distribution package file already exists
#
CheckForExistingDp(Options.PackFileToCreate)
#
# Check package file existing and valid
#
CheckFileList('.DEC', Options.PackageFileList, ST.ERR_INVALID_PACKAGE_NAME, ST.ERR_INVALID_PACKAGE_PATH)
#
# Check module file existing and valid
#
CheckFileList('.INF', Options.ModuleFileList, ST.ERR_INVALID_MODULE_NAME, ST.ERR_INVALID_MODULE_PATH)
#
# Get list of files that installed with RePackage attribute available
#
RePkgDict = DataBase.GetRePkgDict()
ContentFile = PackageFile(GlobalData.gCONTENT_FILE, "w")
ContentFileClosed = False
#
# Add temp distribution header
#
if Options.PackageInformationDataFile:
XmlFile = IniToXml(Options.PackageInformationDataFile)
DistPkg = DistributionPackageXml().FromXml(XmlFile)
remove(XmlFile)
#
# add distribution level tool/misc files
# before pack, current dir should be workspace dir, else the full
# path will be in the pack file
#
Cwd = getcwd()
chdir(WorkspaceDir)
ToolObject = DistPkg.Tools
MiscObject = DistPkg.MiscellaneousFiles
FileList = []
if ToolObject:
FileList += ToolObject.GetFileList()
if MiscObject:
FileList += MiscObject.GetFileList()
for FileObject in FileList:
#
# If you have unicode file names, please convert them to byte
# strings in your desired encoding before passing them to
# write().
#
FromFile = os.path.normpath(FileObject.GetURI()).encode('utf_8')
FileFullPath = mws.join(WorkspaceDir, FromFile)
if FileFullPath in RePkgDict:
(DpGuid, DpVersion, DpName, Repackage) = RePkgDict[FileFullPath]
if not Repackage:
Logger.Error("\nMkPkg",
UPT_REPKG_ERROR,
ST.ERR_UPT_REPKG_ERROR,
ExtraData=ST.MSG_REPKG_CONFLICT %\
(FileFullPath, DpGuid, DpVersion, DpName)
)
else:
DistPkg.Header.RePackage = True
ContentFile.PackFile(FromFile)
chdir(Cwd)
#
# Add init dp information
#
else:
DistPkg = DistributionPackageClass()
DistPkg.Header.Name = 'Distribution Package'
DistPkg.Header.Guid = str(uuid4())
DistPkg.Header.Version = '1.0'
DistPkg.GetDistributionPackage(WorkspaceDir, Options.PackageFileList, \
Options.ModuleFileList)
FileList, MetaDataFileList = DistPkg.GetDistributionFileList()
for File in FileList + MetaDataFileList:
FileFullPath = os.path.normpath(os.path.join(WorkspaceDir, File))
#
# check whether file was included in a distribution that can not
# be repackaged
#
if FileFullPath in RePkgDict:
(DpGuid, DpVersion, DpName, Repackage) = RePkgDict[FileFullPath]
if not Repackage:
Logger.Error("\nMkPkg",
UPT_REPKG_ERROR,
ST.ERR_UPT_REPKG_ERROR,
ExtraData = \
ST.MSG_REPKG_CONFLICT %(FileFullPath, DpName, \
DpGuid, DpVersion)
)
else:
DistPkg.Header.RePackage = True
Cwd = getcwd()
chdir(WorkspaceDir)
ContentFile.PackFiles(FileList)
chdir(Cwd)
Logger.Verbose(ST.MSG_COMPRESS_DISTRIBUTION_PKG)
ContentFile.Close()
ContentFileClosed = True
#
# Add Md5Signature
#
DistPkg.Header.Signature = md5(open(str(ContentFile), 'rb').read()).hexdigest()
#
# Add current Date
#
DistPkg.Header.Date = str(strftime("%Y-%m-%dT%H:%M:%S", localtime()))
#
# Finish final dp file
#
DistPkgFile = PackageFile(Options.PackFileToCreate, "w")
DistPkgFile.PackFile(str(ContentFile))
DistPkgXml = DistributionPackageXml()
DistPkgFile.PackData(DistPkgXml.ToXml(DistPkg), GlobalData.gDESC_FILE)
DistPkgFile.Close()
Logger.Quiet(ST.MSG_FINISH)
ReturnCode = 0
except FatalError as XExcept:
ReturnCode = XExcept.args[0]
if Logger.GetLevel() <= Logger.DEBUG_9:
Logger.Quiet(ST.MSG_PYTHON_ON % \
(python_version(), platform) + format_exc())
except KeyboardInterrupt:
ReturnCode = ABORT_ERROR
if Logger.GetLevel() <= Logger.DEBUG_9:
Logger.Quiet(ST.MSG_PYTHON_ON % \
(python_version(), platform) + format_exc())
except OSError:
pass
except:
Logger.Error(
"\nMkPkg",
CODE_ERROR,
ST.ERR_UNKNOWN_FATAL_CREATING_ERR % \
Options.PackFileToCreate,
ExtraData=ST.MSG_SEARCH_FOR_HELP % ST.MSG_EDKII_MAIL_ADDR,
RaiseError=False
)
Logger.Quiet(ST.MSG_PYTHON_ON % \
(python_version(), platform) + format_exc())
ReturnCode = CODE_ERROR
finally:
if os.path.exists(GlobalData.gCONTENT_FILE):
if not ContentFileClosed:
ContentFile.Close()
os.remove(GlobalData.gCONTENT_FILE)
return ReturnCode
## CheckFileList
#
# @param QualifiedExt: QualifiedExt
# @param FileList: FileList
# @param ErrorStringExt: ErrorStringExt
# @param ErrorStringFullPath: ErrorStringFullPath
#
def CheckFileList(QualifiedExt, FileList, ErrorStringExt, ErrorStringFullPath):
if not FileList:
return
WorkspaceDir = GlobalData.gWORKSPACE
WorkspaceDir = os.path.normpath(WorkspaceDir)
for Item in FileList:
Ext = os.path.splitext(Item)[1]
if Ext.upper() != QualifiedExt.upper():
Logger.Error("\nMkPkg", OPTION_VALUE_INVALID, \
ErrorStringExt % Item)
Item = os.path.normpath(Item)
Path = mws.join(WorkspaceDir, Item)
if not os.path.exists(Path):
Logger.Error("\nMkPkg", FILE_NOT_FOUND, ST.ERR_NOT_FOUND % Item)
elif Item == Path:
Logger.Error("\nMkPkg", OPTION_VALUE_INVALID,
ErrorStringFullPath % Item)
elif not IsValidPath(Item, WorkspaceDir):
Logger.Error("\nMkPkg", OPTION_VALUE_INVALID, \
ErrorStringExt % Item)
if not os.path.split(Item)[0]:
Logger.Error("\nMkPkg", OPTION_VALUE_INVALID, \
ST.ERR_INVALID_METAFILE_PATH % Item)
| edk2-master | BaseTools/Source/Python/UPT/MkPkg.py |
## @file
# Replace distribution package.
#
# Copyright (c) 2014 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
"""
Replace a distribution package
"""
##
# Import Modules
#
from shutil import rmtree
from traceback import format_exc
from platform import python_version
from sys import platform
from Logger import StringTable as ST
from Logger.ToolError import UNKNOWN_ERROR
from Logger.ToolError import FatalError
from Logger.ToolError import ABORT_ERROR
from Logger.ToolError import CODE_ERROR
from Logger.ToolError import UPT_ALREADY_INSTALLED_ERROR
import Logger.Log as Logger
from Core.DependencyRules import DependencyRules
from Library import GlobalData
from InstallPkg import UnZipDp
from InstallPkg import InstallDp
from RmPkg import GetInstalledDpInfo
from RmPkg import RemoveDist
## Tool entrance method
#
# This method mainly dispatch specific methods per the command line options.
# If no error found, return zero value so the caller of this tool can know
# if it's executed successfully or not.
#
# @param Options: command Options
#
def Main(Options = None):
ContentZipFile, DistFile = None, None
try:
DataBase = GlobalData.gDB
WorkspaceDir = GlobalData.gWORKSPACE
Dep = DependencyRules(DataBase)
DistPkg, ContentZipFile, DpPkgFileName, DistFile = UnZipDp(WorkspaceDir, Options.PackFileToReplace)
StoredDistFile, OrigDpGuid, OrigDpVersion = GetInstalledDpInfo(Options.PackFileToBeReplaced, \
Dep, DataBase, WorkspaceDir)
#
# check dependency
#
CheckReplaceDpx(Dep, DistPkg, OrigDpGuid, OrigDpVersion)
#
# Remove the old distribution
#
RemoveDist(OrigDpGuid, OrigDpVersion, StoredDistFile, DataBase, WorkspaceDir, Options.Yes)
#
# Install the new distribution
#
InstallDp(DistPkg, DpPkgFileName, ContentZipFile, Options, Dep, WorkspaceDir, DataBase)
ReturnCode = 0
except FatalError as XExcept:
ReturnCode = XExcept.args[0]
if Logger.GetLevel() <= Logger.DEBUG_9:
Logger.Quiet(ST.MSG_PYTHON_ON % (python_version(),
platform) + format_exc())
except KeyboardInterrupt:
ReturnCode = ABORT_ERROR
if Logger.GetLevel() <= Logger.DEBUG_9:
Logger.Quiet(ST.MSG_PYTHON_ON % (python_version(),
platform) + format_exc())
except:
ReturnCode = CODE_ERROR
Logger.Error(
"\nReplacePkg",
CODE_ERROR,
ST.ERR_UNKNOWN_FATAL_REPLACE_ERR % (Options.PackFileToReplace, Options.PackFileToBeReplaced),
ExtraData=ST.MSG_SEARCH_FOR_HELP % ST.MSG_EDKII_MAIL_ADDR,
RaiseError=False
)
Logger.Quiet(ST.MSG_PYTHON_ON % (python_version(),
platform) + format_exc())
finally:
Logger.Quiet(ST.MSG_REMOVE_TEMP_FILE_STARTED)
if DistFile:
DistFile.Close()
if ContentZipFile:
ContentZipFile.Close()
for TempDir in GlobalData.gUNPACK_DIR:
rmtree(TempDir)
GlobalData.gUNPACK_DIR = []
Logger.Quiet(ST.MSG_REMOVE_TEMP_FILE_DONE)
if ReturnCode == 0:
Logger.Quiet(ST.MSG_FINISH)
return ReturnCode
def CheckReplaceDpx(Dep, DistPkg, OrigDpGuid, OrigDpVersion):
NewDpPkgList = []
for PkgInfo in DistPkg.PackageSurfaceArea:
Guid, Version = PkgInfo[0], PkgInfo[1]
NewDpPkgList.append((Guid, Version))
NewDpInfo = "%s %s" % (DistPkg.Header.GetGuid(), DistPkg.Header.GetVersion())
OrigDpInfo = "%s %s" % (OrigDpGuid, OrigDpVersion)
#
# check whether new distribution is already installed and not replacing itself
#
if (NewDpInfo != OrigDpInfo):
if Dep.CheckDpExists(DistPkg.Header.GetGuid(), DistPkg.Header.GetVersion()):
Logger.Error("\nReplacePkg", UPT_ALREADY_INSTALLED_ERROR,
ST.WRN_DIST_PKG_INSTALLED,
ExtraData=ST.MSG_REPLACE_ALREADY_INSTALLED_DP)
#
# check whether the original distribution could be replaced by new distribution
#
Logger.Verbose(ST.MSG_CHECK_DP_FOR_REPLACE%(NewDpInfo, OrigDpInfo))
DepInfoResult = Dep.CheckDpDepexForReplace(OrigDpGuid, OrigDpVersion, NewDpPkgList)
Replaceable = DepInfoResult[0]
if not Replaceable:
Logger.Error("\nReplacePkg", UNKNOWN_ERROR,
ST.ERR_PACKAGE_NOT_MATCH_DEPENDENCY)
#
# check whether new distribution could be installed by dependency rule
#
Logger.Verbose(ST.MSG_CHECK_DP_FOR_INSTALL%str(NewDpInfo))
if not Dep.ReplaceCheckNewDpDepex(DistPkg, OrigDpGuid, OrigDpVersion):
Logger.Error("\nReplacePkg", UNKNOWN_ERROR,
ST.ERR_PACKAGE_NOT_MATCH_DEPENDENCY,
ExtraData=DistPkg.Header.Name)
| edk2-master | BaseTools/Source/Python/UPT/ReplacePkg.py |
## @file
#
# This file is for build version number auto generation
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
'''
Build version information
'''
gBUILD_VERSION = "Developer Build based on Revision: Unknown"
| edk2-master | BaseTools/Source/Python/UPT/BuildVersion.py |
## @file
#
# This file is the main entry for UPT
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
'''
UPT
'''
## import modules
#
import locale
import sys
from imp import reload
encoding = locale.getdefaultlocale()[1]
if encoding:
reload(sys)
sys.setdefaultencoding(encoding)
from Core import FileHook
import os.path
from sys import platform
import platform as pf
from optparse import OptionParser
from traceback import format_exc
from platform import python_version
from Logger import StringTable as ST
import Logger.Log as Logger
from Logger.StringTable import MSG_VERSION
from Logger.StringTable import MSG_DESCRIPTION
from Logger.StringTable import MSG_USAGE
from Logger.ToolError import FILE_NOT_FOUND
from Logger.ToolError import OPTION_MISSING
from Logger.ToolError import FILE_TYPE_MISMATCH
from Logger.ToolError import OPTION_CONFLICT
from Logger.ToolError import FatalError
from Logger.ToolError import UPT_ALREADY_INSTALLED_ERROR
from Common.MultipleWorkspace import MultipleWorkspace as mws
import MkPkg
import InstallPkg
import RmPkg
import InventoryWs
import ReplacePkg
import TestInstall
from Library.Misc import GetWorkspace
from Library import GlobalData
from Core.IpiDb import IpiDatabase
from BuildVersion import gBUILD_VERSION
## CheckConflictOption
#
# CheckConflictOption
#
def CheckConflictOption(Opt):
if (Opt.PackFileToCreate or Opt.PackFileToInstall or Opt.PackFileToRemove or Opt.PackFileToReplace) \
and Opt.InventoryWs:
Logger.Error("UPT", OPTION_CONFLICT, ExtraData=ST.ERR_L_OA_EXCLUSIVE)
elif Opt.PackFileToReplace and (Opt.PackFileToCreate or Opt.PackFileToInstall or Opt.PackFileToRemove):
Logger.Error("UPT", OPTION_CONFLICT, ExtraData=ST.ERR_U_ICR_EXCLUSIVE)
elif (Opt.PackFileToCreate and Opt.PackFileToInstall and Opt.PackFileToRemove):
Logger.Error("UPT", OPTION_CONFLICT, ExtraData=ST.ERR_REQUIRE_I_C_R_OPTION)
elif Opt.PackFileToCreate and Opt.PackFileToInstall:
Logger.Error("UPT", OPTION_CONFLICT, ExtraData=ST.ERR_I_C_EXCLUSIVE)
elif Opt.PackFileToInstall and Opt.PackFileToRemove:
Logger.Error("UPT", OPTION_CONFLICT, ExtraData=ST.ERR_I_R_EXCLUSIVE)
elif Opt.PackFileToCreate and Opt.PackFileToRemove:
Logger.Error("UPT", OPTION_CONFLICT, ExtraData=ST.ERR_C_R_EXCLUSIVE)
elif Opt.TestDistFiles and (Opt.PackFileToCreate or Opt.PackFileToInstall \
or Opt.PackFileToRemove or Opt.PackFileToReplace):
Logger.Error("UPT", OPTION_CONFLICT, ExtraData=ST.ERR_C_R_EXCLUSIVE)
if Opt.CustomPath and Opt.UseGuidedPkgPath:
Logger.Warn("UPT", ST.WARN_CUSTOMPATH_OVERRIDE_USEGUIDEDPATH)
Opt.UseGuidedPkgPath = False
## SetLogLevel
#
def SetLogLevel(Opt):
if Opt.opt_verbose:
Logger.SetLevel(Logger.VERBOSE)
elif Opt.opt_quiet:
Logger.SetLevel(Logger.QUIET + 1)
elif Opt.debug_level is not None:
if Opt.debug_level < 0 or Opt.debug_level > 9:
Logger.Warn("UPT", ST.ERR_DEBUG_LEVEL)
Logger.SetLevel(Logger.INFO)
else:
Logger.SetLevel(Opt.debug_level + 1)
elif Opt.opt_slient:
Logger.SetLevel(Logger.SILENT)
else:
Logger.SetLevel(Logger.INFO)
## Main
#
# Main
#
def Main():
Logger.Initialize()
Parser = OptionParser(version=(MSG_VERSION + ' Build ' + gBUILD_VERSION), description=MSG_DESCRIPTION,
prog="UPT.exe", usage=MSG_USAGE)
Parser.add_option("-d", "--debug", action="store", type="int", dest="debug_level", help=ST.HLP_PRINT_DEBUG_INFO)
Parser.add_option("-v", "--verbose", action="store_true", dest="opt_verbose",
help=ST.HLP_PRINT_INFORMATIONAL_STATEMENT)
Parser.add_option("-s", "--silent", action="store_true", dest="opt_slient", help=ST.HLP_RETURN_NO_DISPLAY)
Parser.add_option("-q", "--quiet", action="store_true", dest="opt_quiet", help=ST.HLP_RETURN_AND_DISPLAY)
Parser.add_option("-i", "--install", action="append", type="string", dest="Install_Distribution_Package_File",
help=ST.HLP_SPECIFY_PACKAGE_NAME_INSTALL)
Parser.add_option("-c", "--create", action="store", type="string", dest="Create_Distribution_Package_File",
help=ST.HLP_SPECIFY_PACKAGE_NAME_CREATE)
Parser.add_option("-r", "--remove", action="store", type="string", dest="Remove_Distribution_Package_File",
help=ST.HLP_SPECIFY_PACKAGE_NAME_REMOVE)
Parser.add_option("-t", "--template", action="store", type="string", dest="Package_Information_Data_File",
help=ST.HLP_SPECIFY_TEMPLATE_NAME_CREATE)
Parser.add_option("-p", "--dec-filename", action="append", type="string", dest="EDK2_DEC_Filename",
help=ST.HLP_SPECIFY_DEC_NAME_CREATE)
Parser.add_option("-m", "--inf-filename", action="append", type="string", dest="EDK2_INF_Filename",
help=ST.HLP_SPECIFY_INF_NAME_CREATE)
Parser.add_option("-l", "--list", action="store_true", dest="List_Dist_Installed",
help=ST.HLP_LIST_DIST_INSTALLED)
Parser.add_option("-f", "--force", action="store_true", dest="Yes", help=ST.HLP_DISABLE_PROMPT)
Parser.add_option("-n", "--custom-path", action="store_true", dest="CustomPath", help=ST.HLP_CUSTOM_PATH_PROMPT)
Parser.add_option("-x", "--free-lock", action="store_true", dest="SkipLock", help=ST.HLP_SKIP_LOCK_CHECK)
Parser.add_option("-u", "--replace", action="store", type="string", dest="Replace_Distribution_Package_File",
help=ST.HLP_SPECIFY_PACKAGE_NAME_REPLACE)
Parser.add_option("-o", "--original", action="store", type="string", dest="Original_Distribution_Package_File",
help=ST.HLP_SPECIFY_PACKAGE_NAME_TO_BE_REPLACED)
Parser.add_option("--use-guided-paths", action="store_true", dest="Use_Guided_Paths", help=ST.HLP_USE_GUIDED_PATHS)
Parser.add_option("-j", "--test-install", action="append", type="string",
dest="Test_Install_Distribution_Package_Files", help=ST.HLP_TEST_INSTALL)
Opt = Parser.parse_args()[0]
Var2Var = [
("PackageInformationDataFile", Opt.Package_Information_Data_File),
("PackFileToInstall", Opt.Install_Distribution_Package_File),
("PackFileToCreate", Opt.Create_Distribution_Package_File),
("PackFileToRemove", Opt.Remove_Distribution_Package_File),
("PackageFileList", Opt.EDK2_DEC_Filename),
("ModuleFileList", Opt.EDK2_INF_Filename),
("InventoryWs", Opt.List_Dist_Installed),
("PackFileToReplace", Opt.Replace_Distribution_Package_File),
("PackFileToBeReplaced", Opt.Original_Distribution_Package_File),
("UseGuidedPkgPath", Opt.Use_Guided_Paths),
("TestDistFiles", Opt.Test_Install_Distribution_Package_Files)
]
for Var in Var2Var:
setattr(Opt, Var[0], Var[1])
try:
GlobalData.gWORKSPACE, GlobalData.gPACKAGE_PATH = GetWorkspace()
except FatalError as XExcept:
if Logger.GetLevel() <= Logger.DEBUG_9:
Logger.Quiet(ST.MSG_PYTHON_ON % (python_version(), platform) + format_exc())
return XExcept.args[0]
# Support WORKSPACE is a long path
# Only works for windows system
if pf.system() == 'Windows':
Vol = 'B:'
for Index in range(90, 65, -1):
Vol = chr(Index) + ':'
if not os.path.isdir(Vol):
os.system('subst %s "%s"' % (Vol, GlobalData.gWORKSPACE))
break
GlobalData.gWORKSPACE = '%s\\' % Vol
WorkspaceDir = GlobalData.gWORKSPACE
SetLogLevel(Opt)
Mgr = FileHook.RecoverMgr(WorkspaceDir)
FileHook.SetRecoverMgr(Mgr)
GlobalData.gDB = IpiDatabase(os.path.normpath(os.path.join(WorkspaceDir, \
"Conf/DistributionPackageDatabase.db")), WorkspaceDir)
GlobalData.gDB.InitDatabase(Opt.SkipLock)
#
# Make sure the Db will get closed correctly
#
try:
ReturnCode = 0
CheckConflictOption(Opt)
RunModule = None
if Opt.PackFileToCreate:
if Opt.PackageInformationDataFile:
if not os.path.exists(Opt.PackageInformationDataFile):
if not os.path.exists(os.path.join(WorkspaceDir, Opt.PackageInformationDataFile)):
Logger.Error("\nUPT", FILE_NOT_FOUND, ST.ERR_NO_TEMPLATE_FILE % Opt.PackageInformationDataFile)
else:
Opt.PackageInformationDataFile = os.path.join(WorkspaceDir, Opt.PackageInformationDataFile)
else:
Logger.Error("UPT", OPTION_MISSING, ExtraData=ST.ERR_REQUIRE_T_OPTION)
if not Opt.PackFileToCreate.endswith('.dist'):
Logger.Error("CreatePkg", FILE_TYPE_MISMATCH, ExtraData=ST.ERR_DIST_EXT_ERROR % Opt.PackFileToCreate)
RunModule = MkPkg.Main
elif Opt.PackFileToInstall:
AbsPath = []
for Item in Opt.PackFileToInstall:
if not Item.endswith('.dist'):
Logger.Error("InstallPkg", FILE_TYPE_MISMATCH, ExtraData=ST.ERR_DIST_EXT_ERROR % Item)
AbsPath.append(GetFullPathDist(Item, WorkspaceDir))
if not AbsPath:
Logger.Error("InstallPkg", FILE_NOT_FOUND, ST.ERR_INSTALL_DIST_NOT_FOUND % Item)
Opt.PackFileToInstall = AbsPath
setattr(Opt, 'PackageFile', Opt.PackFileToInstall)
RunModule = InstallPkg.Main
elif Opt.PackFileToRemove:
if not Opt.PackFileToRemove.endswith('.dist'):
Logger.Error("RemovePkg", FILE_TYPE_MISMATCH, ExtraData=ST.ERR_DIST_EXT_ERROR % Opt.PackFileToRemove)
head, tail = os.path.split(Opt.PackFileToRemove)
if head or not tail:
Logger.Error("RemovePkg",
FILE_TYPE_MISMATCH,
ExtraData=ST.ERR_DIST_FILENAME_ONLY_FOR_REMOVE % Opt.PackFileToRemove)
setattr(Opt, 'DistributionFile', Opt.PackFileToRemove)
RunModule = RmPkg.Main
elif Opt.InventoryWs:
RunModule = InventoryWs.Main
elif Opt.PackFileToBeReplaced and not Opt.PackFileToReplace:
Logger.Error("ReplacePkg", OPTION_MISSING, ExtraData=ST.ERR_REQUIRE_U_OPTION)
elif Opt.PackFileToReplace:
if not Opt.PackFileToReplace.endswith('.dist'):
Logger.Error("ReplacePkg", FILE_TYPE_MISMATCH, ExtraData=ST.ERR_DIST_EXT_ERROR % Opt.PackFileToReplace)
if not Opt.PackFileToBeReplaced:
Logger.Error("ReplacePkg", OPTION_MISSING, ExtraData=ST.ERR_REQUIRE_O_OPTION)
if not Opt.PackFileToBeReplaced.endswith('.dist'):
Logger.Error("ReplacePkg",
FILE_TYPE_MISMATCH,
ExtraData=ST.ERR_DIST_EXT_ERROR % Opt.PackFileToBeReplaced)
head, tail = os.path.split(Opt.PackFileToBeReplaced)
if head or not tail:
Logger.Error("ReplacePkg",
FILE_TYPE_MISMATCH,
ExtraData=ST.ERR_DIST_FILENAME_ONLY_FOR_REPLACE_ORIG % Opt.PackFileToBeReplaced)
AbsPath = GetFullPathDist(Opt.PackFileToReplace, WorkspaceDir)
if not AbsPath:
Logger.Error("ReplacePkg", FILE_NOT_FOUND, ST.ERR_REPLACE_DIST_NOT_FOUND % Opt.PackFileToReplace)
Opt.PackFileToReplace = AbsPath
RunModule = ReplacePkg.Main
elif Opt.Test_Install_Distribution_Package_Files:
for Dist in Opt.Test_Install_Distribution_Package_Files:
if not Dist.endswith('.dist'):
Logger.Error("TestInstall", FILE_TYPE_MISMATCH, ExtraData=ST.ERR_DIST_EXT_ERROR % Dist)
setattr(Opt, 'DistFiles', Opt.Test_Install_Distribution_Package_Files)
RunModule = TestInstall.Main
else:
Parser.print_usage()
return OPTION_MISSING
ReturnCode = RunModule(Opt)
except FatalError as XExcept:
ReturnCode = XExcept.args[0]
if Logger.GetLevel() <= Logger.DEBUG_9:
Logger.Quiet(ST.MSG_PYTHON_ON % (python_version(), platform) + \
format_exc())
finally:
try:
if ReturnCode != 0 and ReturnCode != UPT_ALREADY_INSTALLED_ERROR:
Logger.Quiet(ST.MSG_RECOVER_START)
GlobalData.gDB.RollBack()
Mgr.rollback()
Logger.Quiet(ST.MSG_RECOVER_DONE)
else:
GlobalData.gDB.Commit()
Mgr.commit()
except Exception:
Logger.Quiet(ST.MSG_RECOVER_FAIL)
GlobalData.gDB.CloseDb()
if pf.system() == 'Windows':
os.system('subst %s /D' % GlobalData.gWORKSPACE.replace('\\', ''))
return ReturnCode
## GetFullPathDist
#
# This function will check DistFile existence, if not absolute path, then try current working directory,
# then $(WORKSPACE),and return the AbsPath. If file doesn't find, then return None
#
# @param DistFile: The distribution file in either relative path or absolute path
# @param WorkspaceDir: Workspace Directory
# @return AbsPath: The Absolute path of the distribution file if existed, None else
#
def GetFullPathDist(DistFile, WorkspaceDir):
if os.path.isabs(DistFile):
if not (os.path.exists(DistFile) and os.path.isfile(DistFile)):
return None
else:
return DistFile
else:
AbsPath = os.path.normpath(os.path.join(os.getcwd(), DistFile))
if not (os.path.exists(AbsPath) and os.path.isfile(AbsPath)):
AbsPath = os.path.normpath(os.path.join(WorkspaceDir, DistFile))
if not (os.path.exists(AbsPath) and os.path.isfile(AbsPath)):
return None
return AbsPath
if __name__ == '__main__':
RETVAL = Main()
#
# 0-127 is a safe return range, and 1 is a standard default error
#
if RETVAL < 0 or RETVAL > 127:
RETVAL = 1
sys.exit(RETVAL)
| edk2-master | BaseTools/Source/Python/UPT/UPT.py |
## @file
# Inventory workspace's distribution package information.
#
# Copyright (c) 2014 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
"""
Inventory workspace's distribution package information.
"""
##
# Import Modules
#
from sys import platform
from traceback import format_exc
from platform import python_version
from Logger import StringTable as ST
from Logger.ToolError import FatalError
from Logger.ToolError import ABORT_ERROR
from Logger.ToolError import CODE_ERROR
import Logger.Log as Logger
from Library import GlobalData
## InventoryDistInstalled
#
# This method retrieves the installed distribution information from the internal UPT database
#
# @param DataBase: the UPT database
#
def InventoryDistInstalled(DataBase):
DistInstalled = DataBase.InventoryDistInstalled()
#
# find the max length for each item
#
DpNameStr = "DpName"
DpGuidStr = "DpGuid"
DpVerStr = "DpVer"
DpOriginalNameStr = "DpOriginalName"
MaxGuidlen = len(DpGuidStr)
MaxVerlen = len(DpVerStr)
MaxDpAliasFileNameLen = len(DpNameStr)
MaxDpOrigFileNamelen = len(DpOriginalNameStr)
for (DpGuid, DpVersion, DpOriginalName, DpAliasFileName) in DistInstalled:
MaxGuidlen = max(MaxGuidlen, len(DpGuid))
MaxVerlen = max(MaxVerlen, len(DpVersion))
MaxDpAliasFileNameLen = max(MaxDpAliasFileNameLen, len(DpAliasFileName))
MaxDpOrigFileNamelen = max(MaxDpOrigFileNamelen, len(DpOriginalName))
OutMsgFmt = "%-*s\t%-*s\t%-*s\t%-s"
OutMsg = OutMsgFmt % (MaxDpAliasFileNameLen,
DpNameStr,
MaxGuidlen,
DpGuidStr,
MaxVerlen,
DpVerStr,
DpOriginalNameStr)
Logger.Info(OutMsg)
for (DpGuid, DpVersion, DpFileName, DpAliasFileName) in DistInstalled:
OutMsg = OutMsgFmt % (MaxDpAliasFileNameLen,
DpAliasFileName,
MaxGuidlen,
DpGuid,
MaxVerlen,
DpVersion,
DpFileName)
Logger.Info(OutMsg)
## Tool entrance method
#
# This method mainly dispatch specific methods per the command line options.
# If no error found, return zero value so the caller of this tool can know
# if it's executed successfully or not.
#
# @param Options: command Options
#
def Main(Options = None):
if Options:
pass
try:
DataBase = GlobalData.gDB
InventoryDistInstalled(DataBase)
ReturnCode = 0
except FatalError as XExcept:
ReturnCode = XExcept.args[0]
if Logger.GetLevel() <= Logger.DEBUG_9:
Logger.Quiet(ST.MSG_PYTHON_ON % (python_version(), platform) + format_exc())
except KeyboardInterrupt:
ReturnCode = ABORT_ERROR
if Logger.GetLevel() <= Logger.DEBUG_9:
Logger.Quiet(ST.MSG_PYTHON_ON % (python_version(), platform) + format_exc())
except:
ReturnCode = CODE_ERROR
Logger.Error("\nInventoryWs",
CODE_ERROR,
ST.ERR_UNKNOWN_FATAL_INVENTORYWS_ERR,
ExtraData=ST.MSG_SEARCH_FOR_HELP % ST.MSG_EDKII_MAIL_ADDR,
RaiseError=False
)
Logger.Quiet(ST.MSG_PYTHON_ON % (python_version(),
platform) + format_exc())
if ReturnCode == 0:
Logger.Quiet(ST.MSG_FINISH)
return ReturnCode
| edk2-master | BaseTools/Source/Python/UPT/InventoryWs.py |
# # @file
# Test Install distribution package
#
# Copyright (c) 2016 - 2017, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
"""
Test Install multiple distribution package
"""
# #
# Import Modules
#
from Library import GlobalData
import Logger.Log as Logger
from Logger import StringTable as ST
import Logger.ToolError as TE
from Core.DependencyRules import DependencyRules
from InstallPkg import UnZipDp
import shutil
from traceback import format_exc
from platform import python_version
from sys import platform
# # Tool entrance method
#
# This method mainly dispatch specific methods per the command line options.
# If no error found, return zero value so the caller of this tool can know
# if it's executed successfully or not.
#
# @param Options: command Options
#
def Main(Options=None):
ContentZipFile, DistFile = None, None
ReturnCode = 0
try:
DataBase = GlobalData.gDB
WorkspaceDir = GlobalData.gWORKSPACE
if not Options.DistFiles:
Logger.Error("TestInstallPkg", TE.OPTION_MISSING, ExtraData=ST.ERR_SPECIFY_PACKAGE)
DistPkgList = []
for DistFile in Options.DistFiles:
DistPkg, ContentZipFile, __, DistFile = UnZipDp(WorkspaceDir, DistFile)
DistPkgList.append(DistPkg)
#
# check dependency
#
Dep = DependencyRules(DataBase)
Result = True
DpObj = None
try:
Result, DpObj = Dep.CheckTestInstallPdDepexSatisfied(DistPkgList)
except:
Result = False
if Result:
Logger.Quiet(ST.MSG_TEST_INSTALL_PASS)
else:
Logger.Quiet(ST.MSG_TEST_INSTALL_FAIL)
except TE.FatalError as XExcept:
ReturnCode = XExcept.args[0]
if Logger.GetLevel() <= Logger.DEBUG_9:
Logger.Quiet(ST.MSG_PYTHON_ON % (python_version(), platform) + format_exc())
except Exception as x:
ReturnCode = TE.CODE_ERROR
Logger.Error(
"\nTestInstallPkg",
TE.CODE_ERROR,
ST.ERR_UNKNOWN_FATAL_INSTALL_ERR % Options.DistFiles,
ExtraData=ST.MSG_SEARCH_FOR_HELP % ST.MSG_EDKII_MAIL_ADDR,
RaiseError=False
)
Logger.Quiet(ST.MSG_PYTHON_ON % (python_version(), platform) + format_exc())
finally:
Logger.Quiet(ST.MSG_REMOVE_TEMP_FILE_STARTED)
if DistFile:
DistFile.Close()
if ContentZipFile:
ContentZipFile.Close()
for TempDir in GlobalData.gUNPACK_DIR:
shutil.rmtree(TempDir)
GlobalData.gUNPACK_DIR = []
Logger.Quiet(ST.MSG_REMOVE_TEMP_FILE_DONE)
if ReturnCode == 0:
Logger.Quiet(ST.MSG_FINISH)
return ReturnCode
| edk2-master | BaseTools/Source/Python/UPT/TestInstall.py |
## @file
# Install distribution package.
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
'''
RmPkg
'''
##
# Import Modules
#
import os.path
from stat import S_IWUSR
from traceback import format_exc
from platform import python_version
from hashlib import md5
from sys import stdin
from sys import platform
from Core.DependencyRules import DependencyRules
from Library import GlobalData
from Logger import StringTable as ST
import Logger.Log as Logger
from Logger.ToolError import OPTION_MISSING
from Logger.ToolError import UNKNOWN_ERROR
from Logger.ToolError import ABORT_ERROR
from Logger.ToolError import CODE_ERROR
from Logger.ToolError import FatalError
## CheckDpDepex
#
# Check if the Depex is satisfied
# @param Dep: Dep
# @param Guid: Guid of Dp
# @param Version: Version of Dp
# @param WorkspaceDir: Workspace Dir
#
def CheckDpDepex(Dep, Guid, Version, WorkspaceDir):
(Removable, DependModuleList) = Dep.CheckDpDepexForRemove(Guid, Version)
if not Removable:
Logger.Info(ST.MSG_CONFIRM_REMOVE)
Logger.Info(ST.MSG_USER_DELETE_OP)
Input = stdin.readline()
Input = Input.replace('\r', '').replace('\n', '')
if Input.upper() != 'Y':
Logger.Error("RmPkg", UNKNOWN_ERROR, ST.ERR_USER_INTERRUPT)
return 1
else:
#
# report list of modules that are not valid due to force
# remove,
# also generate a log file for reference
#
Logger.Info(ST.MSG_INVALID_MODULE_INTRODUCED)
LogFilePath = os.path.normpath(os.path.join(WorkspaceDir, GlobalData.gINVALID_MODULE_FILE))
Logger.Info(ST.MSG_CHECK_LOG_FILE % LogFilePath)
try:
LogFile = open(LogFilePath, 'w')
try:
for ModulePath in DependModuleList:
LogFile.write("%s\n"%ModulePath)
Logger.Info(ModulePath)
except IOError:
Logger.Warn("\nRmPkg", ST.ERR_FILE_WRITE_FAILURE,
File=LogFilePath)
except IOError:
Logger.Warn("\nRmPkg", ST.ERR_FILE_OPEN_FAILURE,
File=LogFilePath)
finally:
LogFile.close()
## Remove Path
#
# removing readonly file on windows will get "Access is denied"
# error, so before removing, change the mode to be writeable
#
# @param Path: The Path to be removed
#
def RemovePath(Path):
Logger.Info(ST.MSG_REMOVE_FILE % Path)
if not os.access(Path, os.W_OK):
os.chmod(Path, S_IWUSR)
os.remove(Path)
try:
os.removedirs(os.path.split(Path)[0])
except OSError:
pass
## GetCurrentFileList
#
# @param DataBase: DataBase of UPT
# @param Guid: Guid of Dp
# @param Version: Version of Dp
# @param WorkspaceDir: Workspace Dir
#
def GetCurrentFileList(DataBase, Guid, Version, WorkspaceDir):
NewFileList = []
for Dir in DataBase.GetDpInstallDirList(Guid, Version):
RootDir = os.path.normpath(os.path.join(WorkspaceDir, Dir))
for Root, Dirs, Files in os.walk(RootDir):
Logger.Debug(0, Dirs)
for File in Files:
FilePath = os.path.join(Root, File)
if FilePath not in NewFileList:
NewFileList.append(FilePath)
return NewFileList
## Tool entrance method
#
# This method mainly dispatch specific methods per the command line options.
# If no error found, return zero value so the caller of this tool can know
# if it's executed successfully or not.
#
# @param Options: command option
#
def Main(Options = None):
try:
DataBase = GlobalData.gDB
if not Options.DistributionFile:
Logger.Error("RmPkg",
OPTION_MISSING,
ExtraData=ST.ERR_SPECIFY_PACKAGE)
WorkspaceDir = GlobalData.gWORKSPACE
#
# Prepare check dependency
#
Dep = DependencyRules(DataBase)
#
# Get the Dp information
#
StoredDistFile, Guid, Version = GetInstalledDpInfo(Options.DistributionFile, Dep, DataBase, WorkspaceDir)
#
# Check Dp depex
#
CheckDpDepex(Dep, Guid, Version, WorkspaceDir)
#
# remove distribution
#
RemoveDist(Guid, Version, StoredDistFile, DataBase, WorkspaceDir, Options.Yes)
Logger.Quiet(ST.MSG_FINISH)
ReturnCode = 0
except FatalError as XExcept:
ReturnCode = XExcept.args[0]
if Logger.GetLevel() <= Logger.DEBUG_9:
Logger.Quiet(ST.MSG_PYTHON_ON % (python_version(), platform) + \
format_exc())
except KeyboardInterrupt:
ReturnCode = ABORT_ERROR
if Logger.GetLevel() <= Logger.DEBUG_9:
Logger.Quiet(ST.MSG_PYTHON_ON % (python_version(), platform) + \
format_exc())
except:
Logger.Error(
"\nRmPkg",
CODE_ERROR,
ST.ERR_UNKNOWN_FATAL_REMOVING_ERR,
ExtraData=ST.MSG_SEARCH_FOR_HELP % ST.MSG_EDKII_MAIL_ADDR,
RaiseError=False
)
Logger.Quiet(ST.MSG_PYTHON_ON % (python_version(), platform) + \
format_exc())
ReturnCode = CODE_ERROR
return ReturnCode
## GetInstalledDpInfo method
#
# Get the installed distribution information
#
# @param DistributionFile: the name of the distribution
# @param Dep: the instance of DependencyRules
# @param DataBase: the internal database
# @param WorkspaceDir: work space directory
# @retval StoredDistFile: the distribution file that backed up
# @retval Guid: the Guid of the distribution
# @retval Version: the Version of distribution
#
def GetInstalledDpInfo(DistributionFile, Dep, DataBase, WorkspaceDir):
(Guid, Version, NewDpFileName) = DataBase.GetDpByName(os.path.split(DistributionFile)[1])
if not Guid:
Logger.Error("RmPkg", UNKNOWN_ERROR, ST.ERR_PACKAGE_NOT_INSTALLED % DistributionFile)
#
# Check Dp existing
#
if not Dep.CheckDpExists(Guid, Version):
Logger.Error("RmPkg", UNKNOWN_ERROR, ST.ERR_DISTRIBUTION_NOT_INSTALLED)
#
# Check for Distribution files existence in /conf/upt, if not exist,
# Warn user and go on.
#
StoredDistFile = os.path.normpath(os.path.join(WorkspaceDir, GlobalData.gUPT_DIR, NewDpFileName))
if not os.path.isfile(StoredDistFile):
Logger.Warn("RmPkg", ST.WRN_DIST_NOT_FOUND%StoredDistFile)
StoredDistFile = None
return StoredDistFile, Guid, Version
## RemoveDist method
#
# remove a distribution
#
# @param Guid: the Guid of the distribution
# @param Version: the Version of distribution
# @param StoredDistFile: the distribution file that backed up
# @param DataBase: the internal database
# @param WorkspaceDir: work space directory
# @param ForceRemove: whether user want to remove file even it is modified
#
def RemoveDist(Guid, Version, StoredDistFile, DataBase, WorkspaceDir, ForceRemove):
#
# Get Current File List
#
NewFileList = GetCurrentFileList(DataBase, Guid, Version, WorkspaceDir)
#
# Remove all files
#
MissingFileList = []
for (Path, Md5Sum) in DataBase.GetDpFileList(Guid, Version):
if os.path.isfile(Path):
if Path in NewFileList:
NewFileList.remove(Path)
if not ForceRemove:
#
# check whether modified by users
#
Md5Signature = md5(open(str(Path), 'rb').read())
if Md5Sum != Md5Signature.hexdigest():
Logger.Info(ST.MSG_CONFIRM_REMOVE2 % Path)
Input = stdin.readline()
Input = Input.replace('\r', '').replace('\n', '')
if Input.upper() != 'Y':
continue
RemovePath(Path)
else:
MissingFileList.append(Path)
for Path in NewFileList:
if os.path.isfile(Path):
if (not ForceRemove) and (not os.path.split(Path)[1].startswith('.')):
Logger.Info(ST.MSG_CONFIRM_REMOVE3 % Path)
Input = stdin.readline()
Input = Input.replace('\r', '').replace('\n', '')
if Input.upper() != 'Y':
continue
RemovePath(Path)
#
# Remove distribution files in /Conf/.upt
#
if StoredDistFile is not None:
os.remove(StoredDistFile)
#
# update database
#
Logger.Quiet(ST.MSG_UPDATE_PACKAGE_DATABASE)
DataBase.RemoveDpObj(Guid, Version)
| edk2-master | BaseTools/Source/Python/UPT/RmPkg.py |
## @file
# Install distribution package.
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
"""
Install a distribution package
"""
##
# Import Modules
#
from Core.FileHook import __FileHookOpen__
import os.path
from os import chmod
from os import SEEK_SET
from os import SEEK_END
import stat
from hashlib import md5
import copy
from sys import stdin
from sys import platform
from shutil import rmtree
from shutil import copyfile
from traceback import format_exc
from platform import python_version
from Logger import StringTable as ST
from Logger.ToolError import UNKNOWN_ERROR
from Logger.ToolError import FILE_UNKNOWN_ERROR
from Logger.ToolError import OPTION_MISSING
from Logger.ToolError import UPT_ALREADY_INSTALLED_ERROR
from Logger.ToolError import FatalError
from Logger.ToolError import ABORT_ERROR
from Logger.ToolError import CODE_ERROR
from Logger.ToolError import FORMAT_INVALID
from Logger.ToolError import FILE_TYPE_MISMATCH
import Logger.Log as Logger
from Library.Misc import Sdict
from Library.Misc import ConvertPath
from Library.ParserValidate import IsValidInstallPath
from Xml.XmlParser import DistributionPackageXml
from GenMetaFile.GenDecFile import PackageToDec
from GenMetaFile.GenInfFile import ModuleToInf
from Core.PackageFile import PackageFile
from Core.PackageFile import FILE_NOT_FOUND
from Core.PackageFile import FILE_CHECKSUM_FAILURE
from Core.PackageFile import CreateDirectory
from Core.DependencyRules import DependencyRules
from Library import GlobalData
## InstallNewPackage
#
# @param WorkspaceDir: Workspace Directory
# @param Path: Package Path
# @param CustomPath: whether need to customize path at first
#
def InstallNewPackage(WorkspaceDir, Path, CustomPath = False):
if os.path.isabs(Path):
Logger.Info(ST.MSG_RELATIVE_PATH_ONLY%Path)
elif CustomPath:
Logger.Info(ST.MSG_NEW_PKG_PATH)
else:
Path = ConvertPath(Path)
Path = os.path.normpath(Path)
FullPath = os.path.normpath(os.path.join(WorkspaceDir, Path))
if os.path.exists(FullPath):
Logger.Info(ST.ERR_DIR_ALREADY_EXIST%FullPath)
else:
return Path
Input = stdin.readline()
Input = Input.replace('\r', '').replace('\n', '')
if Input == '':
Logger.Error("InstallPkg", UNKNOWN_ERROR, ST.ERR_USER_INTERRUPT)
Input = Input.replace('\r', '').replace('\n', '')
return InstallNewPackage(WorkspaceDir, Input, False)
## InstallNewModule
#
# @param WorkspaceDir: Workspace Directory
# @param Path: Standalone Module Path
# @param PathList: The already installed standalone module Path list
#
def InstallNewModule(WorkspaceDir, Path, PathList = None):
if PathList is None:
PathList = []
Path = ConvertPath(Path)
Path = os.path.normpath(Path)
FullPath = os.path.normpath(os.path.join(WorkspaceDir, Path))
if os.path.exists(FullPath) and FullPath not in PathList:
Logger.Info(ST.ERR_DIR_ALREADY_EXIST%Path)
elif Path == FullPath:
Logger.Info(ST.MSG_RELATIVE_PATH_ONLY%FullPath)
else:
return Path
Input = stdin.readline()
Input = Input.replace('\r', '').replace('\n', '')
if Input == '':
Logger.Error("InstallPkg", UNKNOWN_ERROR, ST.ERR_USER_INTERRUPT)
Input = Input.replace('\r', '').replace('\n', '')
return InstallNewModule(WorkspaceDir, Input, PathList)
## InstallNewFile
#
# @param WorkspaceDir: Workspace Direction
# @param File: File
#
def InstallNewFile(WorkspaceDir, File):
FullPath = os.path.normpath(os.path.join(WorkspaceDir, File))
if os.path.exists(FullPath):
Logger.Info(ST.ERR_FILE_ALREADY_EXIST %File)
Input = stdin.readline()
Input = Input.replace('\r', '').replace('\n', '')
if Input == '':
Logger.Error("InstallPkg", UNKNOWN_ERROR, ST.ERR_USER_INTERRUPT)
Input = Input.replace('\r', '').replace('\n', '')
return InstallNewFile(WorkspaceDir, Input)
else:
return File
## UnZipDp
#
# UnZipDp
#
def UnZipDp(WorkspaceDir, DpPkgFileName, Index=1):
ContentZipFile = None
Logger.Quiet(ST.MSG_UZIP_PARSE_XML)
DistFile = PackageFile(DpPkgFileName)
DpDescFileName, ContentFileName = GetDPFile(DistFile.GetZipFile())
TempDir = os.path.normpath(os.path.join(WorkspaceDir, "Conf/.tmp%s" % str(Index)))
GlobalData.gUNPACK_DIR.append(TempDir)
DistPkgFile = DistFile.UnpackFile(DpDescFileName, os.path.normpath(os.path.join(TempDir, DpDescFileName)))
if not DistPkgFile:
Logger.Error("InstallPkg", FILE_NOT_FOUND, ST.ERR_FILE_BROKEN %DpDescFileName)
#
# Generate distpkg
#
DistPkgObj = DistributionPackageXml()
DistPkg = DistPkgObj.FromXml(DistPkgFile)
if DistPkg.Header.RePackage == '':
DistPkg.Header.RePackage = False
if DistPkg.Header.ReadOnly == '':
DistPkg.Header.ReadOnly = False
#
# unzip contents.zip file
#
ContentFile = DistFile.UnpackFile(ContentFileName, os.path.normpath(os.path.join(TempDir, ContentFileName)))
if not ContentFile:
Logger.Error("InstallPkg", FILE_NOT_FOUND,
ST.ERR_FILE_BROKEN % ContentFileName)
#
# Get file size
#
FileSize = os.path.getsize(ContentFile)
if FileSize != 0:
ContentZipFile = PackageFile(ContentFile)
#
# verify MD5 signature when existed
#
if DistPkg.Header.Signature != '':
Md5Signature = md5(__FileHookOpen__(ContentFile, 'rb').read())
if DistPkg.Header.Signature != Md5Signature.hexdigest():
ContentZipFile.Close()
Logger.Error("InstallPkg", FILE_CHECKSUM_FAILURE,
ExtraData=ContentFile)
return DistPkg, ContentZipFile, DpPkgFileName, DistFile
## GetPackageList
#
# GetPackageList
#
def GetPackageList(DistPkg, Dep, WorkspaceDir, Options, ContentZipFile, ModuleList, PackageList):
NewDict = Sdict()
for Guid, Version, Path in DistPkg.PackageSurfaceArea:
PackagePath = Path
Package = DistPkg.PackageSurfaceArea[Guid, Version, Path]
Logger.Info(ST.MSG_INSTALL_PACKAGE % Package.GetName())
# if Dep.CheckPackageExists(Guid, Version):
# Logger.Info(ST.WRN_PACKAGE_EXISTED %(Guid, Version))
if Options.UseGuidedPkgPath:
GuidedPkgPath = "%s_%s_%s" % (Package.GetName(), Guid, Version)
NewPackagePath = InstallNewPackage(WorkspaceDir, GuidedPkgPath, Options.CustomPath)
else:
NewPackagePath = InstallNewPackage(WorkspaceDir, PackagePath, Options.CustomPath)
InstallPackageContent(PackagePath, NewPackagePath, Package, ContentZipFile, Dep, WorkspaceDir, ModuleList,
DistPkg.Header.ReadOnly)
PackageList.append(Package)
NewDict[Guid, Version, Package.GetPackagePath()] = Package
#
# Now generate meta-data files, first generate all dec for package
# dec should be generated before inf, and inf should be generated after
# all packages installed, else hard to resolve modules' package
# dependency (Hard to get the location of the newly installed package)
#
for Package in PackageList:
FilePath = PackageToDec(Package, DistPkg.Header)
Md5Signature = md5(__FileHookOpen__(str(FilePath), 'rb').read())
Md5Sum = Md5Signature.hexdigest()
if (FilePath, Md5Sum) not in Package.FileList:
Package.FileList.append((FilePath, Md5Sum))
return NewDict
## GetModuleList
#
# GetModuleList
#
def GetModuleList(DistPkg, Dep, WorkspaceDir, ContentZipFile, ModuleList):
#
# ModulePathList will keep track of the standalone module path that
# we just installed. If a new module's path in that list
# (only multiple INF in one directory will be so), we will
# install them directly. If not, we will try to create a new directory
# for it.
#
ModulePathList = []
#
# Check module exist and install
#
Module = None
NewDict = Sdict()
for Guid, Version, Name, Path in DistPkg.ModuleSurfaceArea:
ModulePath = Path
Module = DistPkg.ModuleSurfaceArea[Guid, Version, Name, Path]
Logger.Info(ST.MSG_INSTALL_MODULE % Module.GetName())
if Dep.CheckModuleExists(Guid, Version, Name, Path):
Logger.Quiet(ST.WRN_MODULE_EXISTED %Path)
#
# here check for the multiple inf share the same module path cases:
# they should be installed into the same directory
#
ModuleFullPath = \
os.path.normpath(os.path.join(WorkspaceDir, ModulePath))
if ModuleFullPath not in ModulePathList:
NewModulePath = InstallNewModule(WorkspaceDir, ModulePath, ModulePathList)
NewModuleFullPath = os.path.normpath(os.path.join(WorkspaceDir, NewModulePath))
ModulePathList.append(NewModuleFullPath)
else:
NewModulePath = ModulePath
InstallModuleContent(ModulePath, NewModulePath, '', Module, ContentZipFile, WorkspaceDir, ModuleList, None,
DistPkg.Header.ReadOnly)
#
# Update module
#
Module.SetModulePath(Module.GetModulePath().replace(Path, NewModulePath, 1))
NewDict[Guid, Version, Name, Module.GetModulePath()] = Module
#
# generate all inf for modules
#
for (Module, Package) in ModuleList:
CheckCNameInModuleRedefined(Module, DistPkg)
FilePath = ModuleToInf(Module, Package, DistPkg.Header)
Md5Signature = md5(__FileHookOpen__(str(FilePath), 'rb').read())
Md5Sum = Md5Signature.hexdigest()
if Package:
if (FilePath, Md5Sum) not in Package.FileList:
Package.FileList.append((FilePath, Md5Sum))
else:
if (FilePath, Md5Sum) not in Module.FileList:
Module.FileList.append((FilePath, Md5Sum))
#
# append the module unicode files to Package FileList
#
for (FilePath, Md5Sum) in Module.FileList:
if str(FilePath).endswith('.uni') and Package and (FilePath, Md5Sum) not in Package.FileList:
Package.FileList.append((FilePath, Md5Sum))
return NewDict
##
# Get all protocol/ppi/guid CNames and pcd name from all dependent DEC file
#
def GetDepProtocolPpiGuidPcdNames(DePackageObjList):
#
# [[Dec1Protocol1, Dec1Protocol2...], [Dec2Protocols...],...]
#
DependentProtocolCNames = []
DependentPpiCNames = []
DependentGuidCNames = []
DependentPcdNames = []
for PackageObj in DePackageObjList:
#
# Get protocol CName list from all dependent DEC file
#
ProtocolCNames = []
for Protocol in PackageObj.GetProtocolList():
if Protocol.GetCName() not in ProtocolCNames:
ProtocolCNames.append(Protocol.GetCName())
DependentProtocolCNames.append(ProtocolCNames)
#
# Get Ppi CName list from all dependent DEC file
#
PpiCNames = []
for Ppi in PackageObj.GetPpiList():
if Ppi.GetCName() not in PpiCNames:
PpiCNames.append(Ppi.GetCName())
DependentPpiCNames.append(PpiCNames)
#
# Get Guid CName list from all dependent DEC file
#
GuidCNames = []
for Guid in PackageObj.GetGuidList():
if Guid.GetCName() not in GuidCNames:
GuidCNames.append(Guid.GetCName())
DependentGuidCNames.append(GuidCNames)
#
# Get PcdName list from all dependent DEC file
#
PcdNames = []
for Pcd in PackageObj.GetPcdList():
PcdName = '.'.join([Pcd.GetTokenSpaceGuidCName(), Pcd.GetCName()])
if PcdName not in PcdNames:
PcdNames.append(PcdName)
DependentPcdNames.append(PcdNames)
return DependentProtocolCNames, DependentPpiCNames, DependentGuidCNames, DependentPcdNames
##
# Check if protocol CName is redefined
#
def CheckProtoclCNameRedefined(Module, DependentProtocolCNames):
for ProtocolInModule in Module.GetProtocolList():
IsCNameDefined = False
for PackageProtocolCNames in DependentProtocolCNames:
if ProtocolInModule.GetCName() in PackageProtocolCNames:
if IsCNameDefined:
Logger.Error("\nUPT", FORMAT_INVALID,
File = Module.GetFullPath(),
ExtraData = \
ST.ERR_INF_PARSER_ITEM_DUPLICATE_IN_DEC % ProtocolInModule.GetCName())
else:
IsCNameDefined = True
##
# Check if Ppi CName is redefined
#
def CheckPpiCNameRedefined(Module, DependentPpiCNames):
for PpiInModule in Module.GetPpiList():
IsCNameDefined = False
for PackagePpiCNames in DependentPpiCNames:
if PpiInModule.GetCName() in PackagePpiCNames:
if IsCNameDefined:
Logger.Error("\nUPT", FORMAT_INVALID,
File = Module.GetFullPath(),
ExtraData = ST.ERR_INF_PARSER_ITEM_DUPLICATE_IN_DEC % PpiInModule.GetCName())
else:
IsCNameDefined = True
##
# Check if Guid CName is redefined
#
def CheckGuidCNameRedefined(Module, DependentGuidCNames):
for GuidInModule in Module.GetGuidList():
IsCNameDefined = False
for PackageGuidCNames in DependentGuidCNames:
if GuidInModule.GetCName() in PackageGuidCNames:
if IsCNameDefined:
Logger.Error("\nUPT", FORMAT_INVALID,
File = Module.GetFullPath(),
ExtraData = \
ST.ERR_INF_PARSER_ITEM_DUPLICATE_IN_DEC % GuidInModule.GetCName())
else:
IsCNameDefined = True
##
# Check if PcdName is redefined
#
def CheckPcdNameRedefined(Module, DependentPcdNames):
PcdObjs = []
if not Module.GetBinaryFileList():
PcdObjs += Module.GetPcdList()
else:
Binary = Module.GetBinaryFileList()[0]
for AsBuild in Binary.GetAsBuiltList():
PcdObjs += AsBuild.GetPatchPcdList() + AsBuild.GetPcdExList()
for PcdObj in PcdObjs:
PcdName = '.'.join([PcdObj.GetTokenSpaceGuidCName(), PcdObj.GetCName()])
IsPcdNameDefined = False
for PcdNames in DependentPcdNames:
if PcdName in PcdNames:
if IsPcdNameDefined:
Logger.Error("\nUPT", FORMAT_INVALID,
File = Module.GetFullPath(),
ExtraData = ST.ERR_INF_PARSER_ITEM_DUPLICATE_IN_DEC % PcdName)
else:
IsPcdNameDefined = True
##
# Check if any Protocol/Ppi/Guid and Pcd name is redefined in its dependent DEC files
#
def CheckCNameInModuleRedefined(Module, DistPkg):
DePackageObjList = []
#
# Get all dependent package objects
#
for Obj in Module.GetPackageDependencyList():
Guid = Obj.GetGuid()
Version = Obj.GetVersion()
for Key in DistPkg.PackageSurfaceArea:
if Key[0] == Guid and Key[1] == Version:
if DistPkg.PackageSurfaceArea[Key] not in DePackageObjList:
DePackageObjList.append(DistPkg.PackageSurfaceArea[Key])
DependentProtocolCNames, DependentPpiCNames, DependentGuidCNames, DependentPcdNames = \
GetDepProtocolPpiGuidPcdNames(DePackageObjList)
CheckProtoclCNameRedefined(Module, DependentProtocolCNames)
CheckPpiCNameRedefined(Module, DependentPpiCNames)
CheckGuidCNameRedefined(Module, DependentGuidCNames)
CheckPcdNameRedefined(Module, DependentPcdNames)
## GenToolMisc
#
# GenToolMisc
#
#
def GenToolMisc(DistPkg, WorkspaceDir, ContentZipFile):
ToolObject = DistPkg.Tools
MiscObject = DistPkg.MiscellaneousFiles
DistPkg.FileList = []
FileList = []
ToolFileNum = 0
FileNum = 0
RootDir = WorkspaceDir
#
# FileList stores both tools files and misc files
# Misc file list must be appended to FileList *AFTER* Tools file list
#
if ToolObject:
FileList += ToolObject.GetFileList()
ToolFileNum = len(ToolObject.GetFileList())
if 'EDK_TOOLS_PATH' in os.environ:
RootDir = os.environ['EDK_TOOLS_PATH']
if MiscObject:
FileList += MiscObject.GetFileList()
for FileObject in FileList:
FileNum += 1
if FileNum > ToolFileNum:
#
# Misc files, root should be changed to WORKSPACE
#
RootDir = WorkspaceDir
File = ConvertPath(FileObject.GetURI())
ToFile = os.path.normpath(os.path.join(RootDir, File))
if os.path.exists(ToFile):
Logger.Info( ST.WRN_FILE_EXISTED % ToFile )
#
# ask for user input the new file name
#
Logger.Info( ST.MSG_NEW_FILE_NAME)
Input = stdin.readline()
Input = Input.replace('\r', '').replace('\n', '')
OrigPath = os.path.split(ToFile)[0]
ToFile = os.path.normpath(os.path.join(OrigPath, Input))
FromFile = os.path.join(FileObject.GetURI())
Md5Sum = InstallFile(ContentZipFile, FromFile, ToFile, DistPkg.Header.ReadOnly, FileObject.GetExecutable())
DistPkg.FileList.append((ToFile, Md5Sum))
## Tool entrance method
#
# This method mainly dispatch specific methods per the command line options.
# If no error found, return zero value so the caller of this tool can know
# if it's executed successfully or not.
#
# @param Options: command Options
#
def Main(Options = None):
try:
DataBase = GlobalData.gDB
WorkspaceDir = GlobalData.gWORKSPACE
if not Options.PackageFile:
Logger.Error("InstallPkg", OPTION_MISSING, ExtraData=ST.ERR_SPECIFY_PACKAGE)
# Get all Dist Info
DistInfoList = []
DistPkgList = []
Index = 1
for ToBeInstalledDist in Options.PackageFile:
#
# unzip dist.pkg file
#
DistInfoList.append(UnZipDp(WorkspaceDir, ToBeInstalledDist, Index))
DistPkgList.append(DistInfoList[-1][0])
Index += 1
#
# Add dist
#
GlobalData.gTO_BE_INSTALLED_DIST_LIST.append(DistInfoList[-1][0])
# Check for dependency
Dep = DependencyRules(DataBase, DistPkgList)
for ToBeInstalledDist in DistInfoList:
CheckInstallDpx(Dep, ToBeInstalledDist[0], ToBeInstalledDist[2])
#
# Install distribution
#
InstallDp(ToBeInstalledDist[0], ToBeInstalledDist[2], ToBeInstalledDist[1],
Options, Dep, WorkspaceDir, DataBase)
ReturnCode = 0
except FatalError as XExcept:
ReturnCode = XExcept.args[0]
if Logger.GetLevel() <= Logger.DEBUG_9:
Logger.Quiet(ST.MSG_PYTHON_ON % (python_version(), platform) + format_exc())
except KeyboardInterrupt:
ReturnCode = ABORT_ERROR
if Logger.GetLevel() <= Logger.DEBUG_9:
Logger.Quiet(ST.MSG_PYTHON_ON % (python_version(), platform) + format_exc())
except:
ReturnCode = CODE_ERROR
Logger.Error(
"\nInstallPkg",
CODE_ERROR,
ST.ERR_UNKNOWN_FATAL_INSTALL_ERR % Options.PackageFile,
ExtraData=ST.MSG_SEARCH_FOR_HELP % ST.MSG_EDKII_MAIL_ADDR,
RaiseError=False
)
Logger.Quiet(ST.MSG_PYTHON_ON % (python_version(),
platform) + format_exc())
finally:
Logger.Quiet(ST.MSG_REMOVE_TEMP_FILE_STARTED)
for ToBeInstalledDist in DistInfoList:
if ToBeInstalledDist[3]:
ToBeInstalledDist[3].Close()
if ToBeInstalledDist[1]:
ToBeInstalledDist[1].Close()
for TempDir in GlobalData.gUNPACK_DIR:
rmtree(TempDir)
GlobalData.gUNPACK_DIR = []
Logger.Quiet(ST.MSG_REMOVE_TEMP_FILE_DONE)
if ReturnCode == 0:
Logger.Quiet(ST.MSG_FINISH)
return ReturnCode
# BackupDist method
#
# This method will backup the Distribution file into the $(WORKSPACE)/conf/upt, and rename it
# if there is already a same-named distribution existed.
#
# @param DpPkgFileName: The distribution path
# @param Guid: The distribution Guid
# @param Version: The distribution Version
# @param WorkspaceDir: The workspace directory
# @retval NewDpPkgFileName: The exact backup file name
#
def BackupDist(DpPkgFileName, Guid, Version, WorkspaceDir):
DistFileName = os.path.split(DpPkgFileName)[1]
DestDir = os.path.normpath(os.path.join(WorkspaceDir, GlobalData.gUPT_DIR))
CreateDirectory(DestDir)
DestFile = os.path.normpath(os.path.join(DestDir, DistFileName))
if os.path.exists(DestFile):
FileName, Ext = os.path.splitext(DistFileName)
NewFileName = FileName + '_' + Guid + '_' + Version + Ext
DestFile = os.path.normpath(os.path.join(DestDir, NewFileName))
if os.path.exists(DestFile):
#
# ask for user input the new file name
#
Logger.Info( ST.MSG_NEW_FILE_NAME_FOR_DIST)
Input = stdin.readline()
Input = Input.replace('\r', '').replace('\n', '')
DestFile = os.path.normpath(os.path.join(DestDir, Input))
copyfile(DpPkgFileName, DestFile)
NewDpPkgFileName = DestFile[DestFile.find(DestDir) + len(DestDir) + 1:]
return NewDpPkgFileName
## CheckInstallDpx method
#
# check whether distribution could be installed
#
# @param Dep: the DependencyRules instance that used to check dependency
# @param DistPkg: the distribution object
#
def CheckInstallDpx(Dep, DistPkg, DistPkgFileName):
#
# Check distribution package installed or not
#
if Dep.CheckDpExists(DistPkg.Header.GetGuid(),
DistPkg.Header.GetVersion()):
Logger.Error("InstallPkg",
UPT_ALREADY_INSTALLED_ERROR,
ST.WRN_DIST_PKG_INSTALLED % os.path.basename(DistPkgFileName))
#
# Check distribution dependency (all module dependency should be
# satisfied)
#
if not Dep.CheckInstallDpDepexSatisfied(DistPkg):
Logger.Error("InstallPkg", UNKNOWN_ERROR,
ST.ERR_PACKAGE_NOT_MATCH_DEPENDENCY,
ExtraData=DistPkg.Header.Name)
## InstallModuleContent method
#
# If this is standalone module, then Package should be none,
# ModulePath should be ''
# @param FromPath: FromPath
# @param NewPath: NewPath
# @param ModulePath: ModulePath
# @param Module: Module
# @param ContentZipFile: ContentZipFile
# @param WorkspaceDir: WorkspaceDir
# @param ModuleList: ModuleList
# @param Package: Package
#
def InstallModuleContent(FromPath, NewPath, ModulePath, Module, ContentZipFile,
WorkspaceDir, ModuleList, Package = None, ReadOnly = False):
if NewPath.startswith("\\") or NewPath.startswith("/"):
NewPath = NewPath[1:]
if not IsValidInstallPath(NewPath):
Logger.Error("UPT", FORMAT_INVALID, ST.ERR_FILE_NAME_INVALIDE%NewPath)
NewModuleFullPath = os.path.normpath(os.path.join(WorkspaceDir, NewPath,
ConvertPath(ModulePath)))
Module.SetFullPath(os.path.normpath(os.path.join(NewModuleFullPath,
ConvertPath(Module.GetName()) + '.inf')))
Module.FileList = []
for MiscFile in Module.GetMiscFileList():
if not MiscFile:
continue
for Item in MiscFile.GetFileList():
File = Item.GetURI()
if File.startswith("\\") or File.startswith("/"):
File = File[1:]
if not IsValidInstallPath(File):
Logger.Error("UPT", FORMAT_INVALID, ST.ERR_FILE_NAME_INVALIDE%File)
FromFile = os.path.join(FromPath, ModulePath, File)
Executable = Item.GetExecutable()
ToFile = os.path.normpath(os.path.join(NewModuleFullPath, ConvertPath(File)))
Md5Sum = InstallFile(ContentZipFile, FromFile, ToFile, ReadOnly, Executable)
if Package and ((ToFile, Md5Sum) not in Package.FileList):
Package.FileList.append((ToFile, Md5Sum))
elif Package:
continue
elif (ToFile, Md5Sum) not in Module.FileList:
Module.FileList.append((ToFile, Md5Sum))
for Item in Module.GetSourceFileList():
File = Item.GetSourceFile()
if File.startswith("\\") or File.startswith("/"):
File = File[1:]
if not IsValidInstallPath(File):
Logger.Error("UPT", FORMAT_INVALID, ST.ERR_FILE_NAME_INVALIDE%File)
FromFile = os.path.join(FromPath, ModulePath, File)
ToFile = os.path.normpath(os.path.join(NewModuleFullPath, ConvertPath(File)))
Md5Sum = InstallFile(ContentZipFile, FromFile, ToFile, ReadOnly)
if Package and ((ToFile, Md5Sum) not in Package.FileList):
Package.FileList.append((ToFile, Md5Sum))
elif Package:
continue
elif (ToFile, Md5Sum) not in Module.FileList:
Module.FileList.append((ToFile, Md5Sum))
for Item in Module.GetBinaryFileList():
FileNameList = Item.GetFileNameList()
for FileName in FileNameList:
File = FileName.GetFilename()
if File.startswith("\\") or File.startswith("/"):
File = File[1:]
if not IsValidInstallPath(File):
Logger.Error("UPT", FORMAT_INVALID, ST.ERR_FILE_NAME_INVALIDE%File)
FromFile = os.path.join(FromPath, ModulePath, File)
ToFile = os.path.normpath(os.path.join(NewModuleFullPath, ConvertPath(File)))
Md5Sum = InstallFile(ContentZipFile, FromFile, ToFile, ReadOnly)
if Package and ((ToFile, Md5Sum) not in Package.FileList):
Package.FileList.append((ToFile, Md5Sum))
elif Package:
continue
elif (ToFile, Md5Sum) not in Module.FileList:
Module.FileList.append((ToFile, Md5Sum))
InstallModuleContentZipFile(ContentZipFile, FromPath, ModulePath, WorkspaceDir, NewPath, Module, Package, ReadOnly,
ModuleList)
## InstallModuleContentZipFile
#
# InstallModuleContentZipFile
#
def InstallModuleContentZipFile(ContentZipFile, FromPath, ModulePath, WorkspaceDir, NewPath, Module, Package, ReadOnly,
ModuleList):
#
# Extract other files under current module path in content Zip file but not listed in the description
#
if ContentZipFile:
for FileName in ContentZipFile.GetZipFile().namelist():
FileName = os.path.normpath(FileName)
CheckPath = os.path.normpath(os.path.join(FromPath, ModulePath))
if FileUnderPath(FileName, CheckPath):
if FileName.startswith("\\") or FileName.startswith("/"):
FileName = FileName[1:]
if not IsValidInstallPath(FileName):
Logger.Error("UPT", FORMAT_INVALID, ST.ERR_FILE_NAME_INVALIDE%FileName)
FromFile = FileName
ToFile = os.path.normpath(os.path.join(WorkspaceDir,
ConvertPath(FileName.replace(FromPath, NewPath, 1))))
CheckList = copy.copy(Module.FileList)
if Package:
CheckList += Package.FileList
for Item in CheckList:
if Item[0] == ToFile:
break
else:
Md5Sum = InstallFile(ContentZipFile, FromFile, ToFile, ReadOnly)
if Package and ((ToFile, Md5Sum) not in Package.FileList):
Package.FileList.append((ToFile, Md5Sum))
elif Package:
continue
elif (ToFile, Md5Sum) not in Module.FileList:
Module.FileList.append((ToFile, Md5Sum))
ModuleList.append((Module, Package))
## FileUnderPath
# Check whether FileName started with directory specified by CheckPath
#
# @param FileName: the FileName need to be checked
# @param CheckPath: the path need to be checked against
# @return: True or False
#
def FileUnderPath(FileName, CheckPath):
FileName = FileName.replace('\\', '/')
FileName = os.path.normpath(FileName)
CheckPath = CheckPath.replace('\\', '/')
CheckPath = os.path.normpath(CheckPath)
if FileName.startswith(CheckPath):
RemainingPath = os.path.normpath(FileName.replace(CheckPath, '', 1))
while RemainingPath.startswith('\\') or RemainingPath.startswith('/'):
RemainingPath = RemainingPath[1:]
if FileName == os.path.normpath(os.path.join(CheckPath, RemainingPath)):
return True
return False
## InstallFile
# Extract File from Zipfile, set file attribute, and return the Md5Sum
#
# @return: True or False
#
def InstallFile(ContentZipFile, FromFile, ToFile, ReadOnly, Executable=False):
if os.path.exists(os.path.normpath(ToFile)):
pass
else:
if not ContentZipFile or not ContentZipFile.UnpackFile(FromFile, ToFile):
Logger.Error("UPT", FILE_NOT_FOUND, ST.ERR_INSTALL_FILE_FROM_EMPTY_CONTENT % FromFile)
if ReadOnly:
if not Executable:
chmod(ToFile, stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH)
else:
chmod(ToFile, stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH | stat.S_IEXEC | stat.S_IXGRP | stat.S_IXOTH)
elif Executable:
chmod(ToFile, stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH | stat.S_IWUSR | stat.S_IWGRP |
stat.S_IWOTH | stat.S_IEXEC | stat.S_IXGRP | stat.S_IXOTH)
else:
chmod(ToFile, stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH | stat.S_IWUSR | stat.S_IWGRP | stat.S_IWOTH)
Md5Signature = md5(__FileHookOpen__(str(ToFile), 'rb').read())
Md5Sum = Md5Signature.hexdigest()
return Md5Sum
## InstallPackageContent method
#
# @param FromPath: FromPath
# @param ToPath: ToPath
# @param Package: Package
# @param ContentZipFile: ContentZipFile
# @param Dep: Dep
# @param WorkspaceDir: WorkspaceDir
# @param ModuleList: ModuleList
#
def InstallPackageContent(FromPath, ToPath, Package, ContentZipFile, Dep,
WorkspaceDir, ModuleList, ReadOnly = False):
if Dep:
pass
Package.FileList = []
if ToPath.startswith("\\") or ToPath.startswith("/"):
ToPath = ToPath[1:]
if not IsValidInstallPath(ToPath):
Logger.Error("UPT", FORMAT_INVALID, ST.ERR_FILE_NAME_INVALIDE%ToPath)
if FromPath.startswith("\\") or FromPath.startswith("/"):
FromPath = FromPath[1:]
if not IsValidInstallPath(FromPath):
Logger.Error("UPT", FORMAT_INVALID, ST.ERR_FILE_NAME_INVALIDE%FromPath)
PackageFullPath = os.path.normpath(os.path.join(WorkspaceDir, ToPath))
for MiscFile in Package.GetMiscFileList():
for Item in MiscFile.GetFileList():
FileName = Item.GetURI()
if FileName.startswith("\\") or FileName.startswith("/"):
FileName = FileName[1:]
if not IsValidInstallPath(FileName):
Logger.Error("UPT", FORMAT_INVALID, ST.ERR_FILE_NAME_INVALIDE%FileName)
FromFile = os.path.join(FromPath, FileName)
Executable = Item.GetExecutable()
ToFile = (os.path.join(PackageFullPath, ConvertPath(FileName)))
Md5Sum = InstallFile(ContentZipFile, FromFile, ToFile, ReadOnly, Executable)
if (ToFile, Md5Sum) not in Package.FileList:
Package.FileList.append((ToFile, Md5Sum))
PackageIncludeArchList = []
for Item in Package.GetPackageIncludeFileList():
FileName = Item.GetFilePath()
if FileName.startswith("\\") or FileName.startswith("/"):
FileName = FileName[1:]
if not IsValidInstallPath(FileName):
Logger.Error("UPT", FORMAT_INVALID, ST.ERR_FILE_NAME_INVALIDE%FileName)
FromFile = os.path.join(FromPath, FileName)
ToFile = os.path.normpath(os.path.join(PackageFullPath, ConvertPath(FileName)))
RetFile = ContentZipFile.UnpackFile(FromFile, ToFile)
if RetFile == '':
#
# a non-exist path in Zipfile will return '', which means an include directory in our case
# save the information for later DEC creation usage and also create the directory
#
PackageIncludeArchList.append([Item.GetFilePath(), Item.GetSupArchList()])
CreateDirectory(ToFile)
continue
if ReadOnly:
chmod(ToFile, stat.S_IRUSR|stat.S_IRGRP|stat.S_IROTH)
else:
chmod(ToFile, stat.S_IRUSR|stat.S_IRGRP|stat.S_IROTH|stat.S_IWUSR|stat.S_IWGRP|stat.S_IWOTH)
Md5Signature = md5(__FileHookOpen__(str(ToFile), 'rb').read())
Md5Sum = Md5Signature.hexdigest()
if (ToFile, Md5Sum) not in Package.FileList:
Package.FileList.append((ToFile, Md5Sum))
Package.SetIncludeArchList(PackageIncludeArchList)
for Item in Package.GetStandardIncludeFileList():
FileName = Item.GetFilePath()
if FileName.startswith("\\") or FileName.startswith("/"):
FileName = FileName[1:]
if not IsValidInstallPath(FileName):
Logger.Error("UPT", FORMAT_INVALID, ST.ERR_FILE_NAME_INVALIDE%FileName)
FromFile = os.path.join(FromPath, FileName)
ToFile = os.path.normpath(os.path.join(PackageFullPath, ConvertPath(FileName)))
Md5Sum = InstallFile(ContentZipFile, FromFile, ToFile, ReadOnly)
if (ToFile, Md5Sum) not in Package.FileList:
Package.FileList.append((ToFile, Md5Sum))
#
# Update package
#
Package.SetPackagePath(Package.GetPackagePath().replace(FromPath,
ToPath, 1))
Package.SetFullPath(os.path.normpath(os.path.join(PackageFullPath,
ConvertPath(Package.GetName()) + '.dec')))
#
# Install files in module
#
Module = None
ModuleDict = Package.GetModuleDict()
for ModuleGuid, ModuleVersion, ModuleName, ModulePath in ModuleDict:
Module = ModuleDict[ModuleGuid, ModuleVersion, ModuleName, ModulePath]
InstallModuleContent(FromPath, ToPath, ModulePath, Module,
ContentZipFile, WorkspaceDir, ModuleList, Package, ReadOnly)
## GetDPFile method
#
# @param ZipFile: A ZipFile
#
def GetDPFile(ZipFile):
ContentFile = ''
DescFile = ''
for FileName in ZipFile.namelist():
if FileName.endswith('.content'):
if not ContentFile:
ContentFile = FileName
continue
elif FileName.endswith('.pkg'):
if not DescFile:
DescFile = FileName
continue
else:
continue
Logger.Error("PackagingTool", FILE_TYPE_MISMATCH,
ExtraData=ST.ERR_DIST_FILE_TOOMANY)
if not DescFile or not ContentFile:
Logger.Error("PackagingTool", FILE_UNKNOWN_ERROR,
ExtraData=ST.ERR_DIST_FILE_TOOFEW)
return DescFile, ContentFile
## InstallDp method
#
# Install the distribution to current workspace
#
def InstallDp(DistPkg, DpPkgFileName, ContentZipFile, Options, Dep, WorkspaceDir, DataBase):
#
# PackageList, ModuleList record the information for the meta-data
# files that need to be generated later
#
PackageList = []
ModuleList = []
DistPkg.PackageSurfaceArea = GetPackageList(DistPkg, Dep, WorkspaceDir, Options,
ContentZipFile, ModuleList, PackageList)
DistPkg.ModuleSurfaceArea = GetModuleList(DistPkg, Dep, WorkspaceDir, ContentZipFile, ModuleList)
GenToolMisc(DistPkg, WorkspaceDir, ContentZipFile)
#
# copy "Distribution File" to directory $(WORKSPACE)/conf/upt
#
DistFileName = os.path.split(DpPkgFileName)[1]
NewDpPkgFileName = BackupDist(DpPkgFileName, DistPkg.Header.GetGuid(), DistPkg.Header.GetVersion(), WorkspaceDir)
#
# update database
#
Logger.Quiet(ST.MSG_UPDATE_PACKAGE_DATABASE)
DataBase.AddDPObject(DistPkg, NewDpPkgFileName, DistFileName,
DistPkg.Header.RePackage)
| edk2-master | BaseTools/Source/Python/UPT/InstallPkg.py |
## @file
# This file contain unit test for DecParser
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
import os
import unittest
from Logger.Log import FatalError
from Parser.DecParser import \
Dec, \
_DecDefine, \
_DecLibraryclass, \
_DecPcd, \
_DecGuid, \
FileContent, \
_DecBase, \
CleanString
from Object.Parser.DecObject import _DecComments
#
# Test CleanString
#
class CleanStringTestCase(unittest.TestCase):
def testCleanString(self):
Line, Comment = CleanString('')
self.assertEqual(Line, '')
self.assertEqual(Comment, '')
Line, Comment = CleanString('line without comment')
self.assertEqual(Line, 'line without comment')
self.assertEqual(Comment, '')
Line, Comment = CleanString('# pure comment')
self.assertEqual(Line, '')
self.assertEqual(Comment, '# pure comment')
Line, Comment = CleanString('line # and comment')
self.assertEqual(Line, 'line')
self.assertEqual(Comment, '# and comment')
def testCleanStringCpp(self):
Line, Comment = CleanString('line // and comment', AllowCppStyleComment = True)
self.assertEqual(Line, 'line')
self.assertEqual(Comment, '# and comment')
#
# Test _DecBase._MacroParser function
#
class MacroParserTestCase(unittest.TestCase):
def setUp(self):
self.dec = _DecBase(FileContent('dummy', []))
def testCorrectMacro(self):
self.dec._MacroParser('DEFINE MACRO1 = test1')
self.failIf('MACRO1' not in self.dec._LocalMacro)
self.assertEqual(self.dec._LocalMacro['MACRO1'], 'test1')
def testErrorMacro1(self):
# Raise fatal error, macro name must be upper case letter
self.assertRaises(FatalError, self.dec._MacroParser, 'DEFINE not_upper_case = test2')
def testErrorMacro2(self):
# No macro name given
self.assertRaises(FatalError, self.dec._MacroParser, 'DEFINE ')
#
# Test _DecBase._TryBackSlash function
#
class TryBackSlashTestCase(unittest.TestCase):
def setUp(self):
Content = [
# Right case
'test no backslash',
'test with backslash \\',
'continue second line',
# Do not precede with whitespace
'\\',
# Empty line after backlash is not allowed
'line with backslash \\',
''
]
self.dec = _DecBase(FileContent('dummy', Content))
def testBackSlash(self):
#
# Right case, assert return values
#
ConcatLine, CommentList = self.dec._TryBackSlash(self.dec._RawData.GetNextLine(), [])
self.assertEqual(ConcatLine, 'test no backslash')
self.assertEqual(CommentList, [])
ConcatLine, CommentList = self.dec._TryBackSlash(self.dec._RawData.GetNextLine(), [])
self.assertEqual(CommentList, [])
self.assertEqual(ConcatLine, 'test with backslash continue second line')
#
# Error cases, assert raise exception
#
self.assertRaises(FatalError, self.dec._TryBackSlash, self.dec._RawData.GetNextLine(), [])
self.assertRaises(FatalError, self.dec._TryBackSlash, self.dec._RawData.GetNextLine(), [])
#
# Test _DecBase.Parse function
#
class DataItem(_DecComments):
def __init__(self):
_DecComments.__init__(self)
self.String = ''
class Data(_DecComments):
def __init__(self):
_DecComments.__init__(self)
# List of DataItem
self.ItemList = []
class TestInner(_DecBase):
def __init__(self, RawData):
_DecBase.__init__(self, RawData)
self.ItemObject = Data()
def _StopCurrentParsing(self, Line):
return Line == '[TOP]'
def _ParseItem(self):
Item = DataItem()
Item.String = self._RawData.CurrentLine
self.ItemObject.ItemList.append(Item)
return Item
def _TailCommentStrategy(self, Comment):
return Comment.find('@comment') != -1
class TestTop(_DecBase):
def __init__(self, RawData):
_DecBase.__init__(self, RawData)
# List of Data
self.ItemObject = []
# Top parser
def _StopCurrentParsing(self, Line):
return False
def _ParseItem(self):
TestParser = TestInner(self._RawData)
TestParser.Parse()
self.ItemObject.append(TestParser.ItemObject)
return TestParser.ItemObject
class ParseTestCase(unittest.TestCase):
def setUp(self):
pass
def testParse(self):
Content = \
'''# Top comment
[TOP]
# sub1 head comment
(test item has both head and tail comment) # sub1 tail comment
# sub2 head comment
(test item has head and special tail comment)
# @comment test TailCommentStrategy branch
(test item has no comment)
# test NextLine branch
[TOP]
sub-item
'''
dec = TestTop(FileContent('dummy', Content.splitlines()))
dec.Parse()
# Two sections
self.assertEqual(len(dec.ItemObject), 2)
data = dec.ItemObject[0]
self.assertEqual(data._HeadComment[0][0], '# Top comment')
self.assertEqual(data._HeadComment[0][1], 1)
# 3 subitems
self.assertEqual(len(data.ItemList), 3)
dataitem = data.ItemList[0]
self.assertEqual(dataitem.String, '(test item has both head and tail comment)')
# Comment content
self.assertEqual(dataitem._HeadComment[0][0], '# sub1 head comment')
self.assertEqual(dataitem._TailComment[0][0], '# sub1 tail comment')
# Comment line number
self.assertEqual(dataitem._HeadComment[0][1], 3)
self.assertEqual(dataitem._TailComment[0][1], 4)
dataitem = data.ItemList[1]
self.assertEqual(dataitem.String, '(test item has head and special tail comment)')
# Comment content
self.assertEqual(dataitem._HeadComment[0][0], '# sub2 head comment')
self.assertEqual(dataitem._TailComment[0][0], '# @comment test TailCommentStrategy branch')
# Comment line number
self.assertEqual(dataitem._HeadComment[0][1], 5)
self.assertEqual(dataitem._TailComment[0][1], 7)
dataitem = data.ItemList[2]
self.assertEqual(dataitem.String, '(test item has no comment)')
# Comment content
self.assertEqual(dataitem._HeadComment, [])
self.assertEqual(dataitem._TailComment, [])
data = dec.ItemObject[1]
self.assertEqual(data._HeadComment[0][0], '# test NextLine branch')
self.assertEqual(data._HeadComment[0][1], 11)
# 1 subitems
self.assertEqual(len(data.ItemList), 1)
dataitem = data.ItemList[0]
self.assertEqual(dataitem.String, 'sub-item')
self.assertEqual(dataitem._HeadComment, [])
self.assertEqual(dataitem._TailComment, [])
#
# Test _DecDefine._ParseItem
#
class DecDefineTestCase(unittest.TestCase):
def GetObj(self, Content):
Obj = _DecDefine(FileContent('dummy', Content.splitlines()))
Obj._RawData.CurrentLine = Obj._RawData.GetNextLine()
return Obj
def testDecDefine(self):
item = self.GetObj('PACKAGE_NAME = MdePkg')._ParseItem()
self.assertEqual(item.Key, 'PACKAGE_NAME')
self.assertEqual(item.Value, 'MdePkg')
def testDecDefine1(self):
obj = self.GetObj('PACKAGE_NAME')
self.assertRaises(FatalError, obj._ParseItem)
def testDecDefine2(self):
obj = self.GetObj('unknown_key = ')
self.assertRaises(FatalError, obj._ParseItem)
def testDecDefine3(self):
obj = self.GetObj('PACKAGE_NAME = ')
self.assertRaises(FatalError, obj._ParseItem)
#
# Test _DecLibraryclass._ParseItem
#
class DecLibraryTestCase(unittest.TestCase):
def GetObj(self, Content):
Obj = _DecLibraryclass(FileContent('dummy', Content.splitlines()))
Obj._RawData.CurrentLine = Obj._RawData.GetNextLine()
return Obj
def testNoInc(self):
obj = self.GetObj('UefiRuntimeLib')
self.assertRaises(FatalError, obj._ParseItem)
def testEmpty(self):
obj = self.GetObj(' | ')
self.assertRaises(FatalError, obj._ParseItem)
def testLibclassNaming(self):
obj = self.GetObj('lowercase_efiRuntimeLib|Include/Library/UefiRuntimeLib.h')
self.assertRaises(FatalError, obj._ParseItem)
def testLibclassExt(self):
obj = self.GetObj('RuntimeLib|Include/Library/UefiRuntimeLib.no_h')
self.assertRaises(FatalError, obj._ParseItem)
def testLibclassRelative(self):
obj = self.GetObj('RuntimeLib|Include/../UefiRuntimeLib.h')
self.assertRaises(FatalError, obj._ParseItem)
#
# Test _DecPcd._ParseItem
#
class DecPcdTestCase(unittest.TestCase):
def GetObj(self, Content):
Obj = _DecPcd(FileContent('dummy', Content.splitlines()))
Obj._RawData.CurrentLine = Obj._RawData.GetNextLine()
Obj._RawData.CurrentScope = [('PcdsFeatureFlag'.upper(), 'COMMON')]
return Obj
def testOK(self):
item = self.GetObj('gEfiMdePkgTokenSpaceGuid.PcdComponentNameDisable|FALSE|BOOLEAN|0x0000000d')._ParseItem()
self.assertEqual(item.TokenSpaceGuidCName, 'gEfiMdePkgTokenSpaceGuid')
self.assertEqual(item.TokenCName, 'PcdComponentNameDisable')
self.assertEqual(item.DefaultValue, 'FALSE')
self.assertEqual(item.DatumType, 'BOOLEAN')
self.assertEqual(item.TokenValue, '0x0000000d')
def testNoCvar(self):
obj = self.GetObj('123ai.PcdComponentNameDisable|FALSE|BOOLEAN|0x0000000d')
self.assertRaises(FatalError, obj._ParseItem)
def testSplit(self):
obj = self.GetObj('gEfiMdePkgTokenSpaceGuid.PcdComponentNameDisable FALSE|BOOLEAN|0x0000000d')
self.assertRaises(FatalError, obj._ParseItem)
obj = self.GetObj('gEfiMdePkgTokenSpaceGuid.PcdComponentNameDisable|FALSE|BOOLEAN|0x0000000d | abc')
self.assertRaises(FatalError, obj._ParseItem)
def testUnknownType(self):
obj = self.GetObj('gEfiMdePkgTokenSpaceGuid.PcdComponentNameDisable|FALSE|unknown|0x0000000d')
self.assertRaises(FatalError, obj._ParseItem)
def testVoid(self):
obj = self.GetObj('gEfiMdePkgTokenSpaceGuid.PcdComponentNameDisable|abc|VOID*|0x0000000d')
self.assertRaises(FatalError, obj._ParseItem)
def testUINT(self):
obj = self.GetObj('gEfiMdePkgTokenSpaceGuid.PcdComponentNameDisable|0xabc|UINT8|0x0000000d')
self.assertRaises(FatalError, obj._ParseItem)
#
# Test _DecInclude._ParseItem
#
class DecIncludeTestCase(unittest.TestCase):
#
# Test code to be added
#
pass
#
# Test _DecGuid._ParseItem
#
class DecGuidTestCase(unittest.TestCase):
def GetObj(self, Content):
Obj = _DecGuid(FileContent('dummy', Content.splitlines()))
Obj._RawData.CurrentLine = Obj._RawData.GetNextLine()
Obj._RawData.CurrentScope = [('guids'.upper(), 'COMMON')]
return Obj
def testCValue(self):
item = self.GetObj('gEfiIpSecProtocolGuid={ 0xdfb386f7, 0xe100, 0x43ad,'
' {0x9c, 0x9a, 0xed, 0x90, 0xd0, 0x8a, 0x5e, 0x12 }}')._ParseItem()
self.assertEqual(item.GuidCName, 'gEfiIpSecProtocolGuid')
self.assertEqual(item.GuidCValue, '{ 0xdfb386f7, 0xe100, 0x43ad, {0x9c, 0x9a, 0xed, 0x90, 0xd0, 0x8a, 0x5e, 0x12 }}')
def testGuidString(self):
item = self.GetObj('gEfiIpSecProtocolGuid=1E73767F-8F52-4603-AEB4-F29B510B6766')._ParseItem()
self.assertEqual(item.GuidCName, 'gEfiIpSecProtocolGuid')
self.assertEqual(item.GuidCValue, '1E73767F-8F52-4603-AEB4-F29B510B6766')
def testNoValue1(self):
obj = self.GetObj('gEfiIpSecProtocolGuid')
self.assertRaises(FatalError, obj._ParseItem)
def testNoValue2(self):
obj = self.GetObj('gEfiIpSecProtocolGuid=')
self.assertRaises(FatalError, obj._ParseItem)
def testNoName(self):
obj = self.GetObj('=')
self.assertRaises(FatalError, obj._ParseItem)
#
# Test Dec.__init__
#
class DecDecInitTestCase(unittest.TestCase):
def testNoDecFile(self):
self.assertRaises(FatalError, Dec, 'No_Such_File')
class TmpFile:
def __init__(self, File):
self.File = File
def Write(self, Content):
try:
FileObj = open(self.File, 'w')
FileObj.write(Content)
FileObj.close()
except:
pass
def Remove(self):
try:
os.remove(self.File)
except:
pass
#
# Test Dec._UserExtentionSectionParser
#
class DecUESectionTestCase(unittest.TestCase):
def setUp(self):
self.File = TmpFile('test.dec')
self.File.Write(
'''[userextensions.intel."myid"]
[userextensions.intel."myid".IA32]
[userextensions.intel."myid".IA32,]
[userextensions.intel."myid]
'''
)
def tearDown(self):
self.File.Remove()
def testUserExtentionHeader(self):
dec = Dec('test.dec', False)
# OK: [userextensions.intel."myid"]
dec._RawData.CurrentLine = CleanString(dec._RawData.GetNextLine())[0]
dec._UserExtentionSectionParser()
self.assertEqual(len(dec._RawData.CurrentScope), 1)
self.assertEqual(dec._RawData.CurrentScope[0][0], 'userextensions'.upper())
self.assertEqual(dec._RawData.CurrentScope[0][1], 'intel')
self.assertEqual(dec._RawData.CurrentScope[0][2], '"myid"')
self.assertEqual(dec._RawData.CurrentScope[0][3], 'COMMON')
# OK: [userextensions.intel."myid".IA32]
dec._RawData.CurrentLine = CleanString(dec._RawData.GetNextLine())[0]
dec._UserExtentionSectionParser()
self.assertEqual(len(dec._RawData.CurrentScope), 1)
self.assertEqual(dec._RawData.CurrentScope[0][0], 'userextensions'.upper())
self.assertEqual(dec._RawData.CurrentScope[0][1], 'intel')
self.assertEqual(dec._RawData.CurrentScope[0][2], '"myid"')
self.assertEqual(dec._RawData.CurrentScope[0][3], 'IA32')
# Fail: [userextensions.intel."myid".IA32,]
dec._RawData.CurrentLine = CleanString(dec._RawData.GetNextLine())[0]
self.assertRaises(FatalError, dec._UserExtentionSectionParser)
# Fail: [userextensions.intel."myid]
dec._RawData.CurrentLine = CleanString(dec._RawData.GetNextLine())[0]
self.assertRaises(FatalError, dec._UserExtentionSectionParser)
#
# Test Dec._SectionHeaderParser
#
class DecSectionTestCase(unittest.TestCase):
def setUp(self):
self.File = TmpFile('test.dec')
self.File.Write(
'''[no section start or end
[,] # empty sub-section
[unknow_section_name]
[Includes.IA32.other] # no third one
[PcdsFeatureFlag, PcdsFixedAtBuild] # feature flag PCD must not be in the same section of other types of PCD
[Includes.IA32, Includes.IA32]
[Includes, Includes.IA32] # common cannot be with other arch
[Includes.IA32, PcdsFeatureFlag] # different section name
''' )
def tearDown(self):
self.File.Remove()
def testSectionHeader(self):
dec = Dec('test.dec', False)
# [no section start or end
dec._RawData.CurrentLine = CleanString(dec._RawData.GetNextLine())[0]
self.assertRaises(FatalError, dec._SectionHeaderParser)
#[,] # empty sub-section
dec._RawData.CurrentLine = CleanString(dec._RawData.GetNextLine())[0]
self.assertRaises(FatalError, dec._SectionHeaderParser)
# [unknow_section_name]
dec._RawData.CurrentLine = CleanString(dec._RawData.GetNextLine())[0]
self.assertRaises(FatalError, dec._SectionHeaderParser)
# [Includes.IA32.other] # no third one
dec._RawData.CurrentLine = CleanString(dec._RawData.GetNextLine())[0]
self.assertRaises(FatalError, dec._SectionHeaderParser)
# [PcdsFeatureFlag, PcdsFixedAtBuild]
dec._RawData.CurrentLine = CleanString(dec._RawData.GetNextLine())[0]
self.assertRaises(FatalError, dec._SectionHeaderParser)
# [Includes.IA32, Includes.IA32]
dec._RawData.CurrentLine = CleanString(dec._RawData.GetNextLine())[0]
dec._SectionHeaderParser()
self.assertEqual(len(dec._RawData.CurrentScope), 1)
self.assertEqual(dec._RawData.CurrentScope[0][0], 'Includes'.upper())
self.assertEqual(dec._RawData.CurrentScope[0][1], 'IA32')
# [Includes, Includes.IA32] # common cannot be with other arch
dec._RawData.CurrentLine = CleanString(dec._RawData.GetNextLine())[0]
self.assertRaises(FatalError, dec._SectionHeaderParser)
# [Includes.IA32, PcdsFeatureFlag] # different section name not allowed
dec._RawData.CurrentLine = CleanString(dec._RawData.GetNextLine())[0]
self.assertRaises(FatalError, dec._SectionHeaderParser)
#
# Test Dec._ParseDecComment
#
class DecDecCommentTestCase(unittest.TestCase):
def testDecHeadComment(self):
File = TmpFile('test.dec')
File.Write(
'''# abc
##''')
dec = Dec('test.dec', False)
dec.ParseDecComment()
self.assertEqual(len(dec._HeadComment), 2)
self.assertEqual(dec._HeadComment[0][0], '# abc')
self.assertEqual(dec._HeadComment[0][1], 1)
self.assertEqual(dec._HeadComment[1][0], '##')
self.assertEqual(dec._HeadComment[1][1], 2)
File.Remove()
def testNoDoubleComment(self):
File = TmpFile('test.dec')
File.Write(
'''# abc
#
[section_start]''')
dec = Dec('test.dec', False)
dec.ParseDecComment()
self.assertEqual(len(dec._HeadComment), 2)
self.assertEqual(dec._HeadComment[0][0], '# abc')
self.assertEqual(dec._HeadComment[0][1], 1)
self.assertEqual(dec._HeadComment[1][0], '#')
self.assertEqual(dec._HeadComment[1][1], 2)
File.Remove()
if __name__ == '__main__':
import Logger.Logger
Logger.Logger.Initialize()
unittest.main()
| edk2-master | BaseTools/Source/Python/UPT/UnitTest/DecParserUnitTest.py |
## @file
# This file contain unit test for Test [Binary] section part of InfParser
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
from __future__ import print_function
import os
#import Object.Parser.InfObject as InfObject
from Object.Parser.InfCommonObject import CurrentLine
from Object.Parser.InfCommonObject import InfLineCommentObject
from Object.Parser.InfBinaryObject import InfBinariesObject
import Logger.Log as Logger
import Library.GlobalData as Global
##
# Test Common binary item
#
#-------------start of common binary item test input--------------------------#
#
# Only has 1 element, binary item Type
#
SectionStringsCommonItem1 = \
"""
GUID
"""
#
# Have 2 elements, binary item Type and FileName
#
SectionStringsCommonItem2 = \
"""
GUID | Test/Test.guid
"""
#
# Have 3 elements, Type | FileName | Target | Family | TagName | FeatureFlagExp
#
SectionStringsCommonItem3 = \
"""
GUID | Test/Test.guid | DEBUG
"""
#
# Have 3 elements, Type | FileName | Target
# Target with MACRO defined in [Define] section
#
SectionStringsCommonItem4 = \
"""
GUID | Test/Test.guid | $(TARGET)
"""
#
# Have 3 elements, Type | FileName | Target
# FileName with MACRO defined in [Binary] section
#
SectionStringsCommonItem5 = \
"""
DEFINE BINARY_FILE_PATH = Test
GUID | $(BINARY_FILE_PATH)/Test.guid | $(TARGET)
"""
#
# Have 4 elements, Type | FileName | Target | Family
#
SectionStringsCommonItem6 = \
"""
GUID | Test/Test.guid | DEBUG | *
"""
#
# Have 4 elements, Type | FileName | Target | Family
#
SectionStringsCommonItem7 = \
"""
GUID | Test/Test.guid | DEBUG | MSFT
"""
#
# Have 5 elements, Type | FileName | Target | Family | TagName
#
SectionStringsCommonItem8 = \
"""
GUID | Test/Test.guid | DEBUG | MSFT | TEST
"""
#
# Have 6 elements, Type | FileName | Target | Family | TagName | FFE
#
SectionStringsCommonItem9 = \
"""
GUID | Test/Test.guid | DEBUG | MSFT | TEST | TRUE
"""
#
# Have 7 elements, Type | FileName | Target | Family | TagName | FFE | Overflow
# Test wrong format
#
SectionStringsCommonItem10 = \
"""
GUID | Test/Test.guid | DEBUG | MSFT | TEST | TRUE | OVERFLOW
"""
#-------------end of common binary item test input----------------------------#
#-------------start of VER type binary item test input------------------------#
#
# Has 1 element, error format
#
SectionStringsVerItem1 = \
"""
VER
"""
#
# Have 5 elements, error format(Maximum elements amount is 4)
#
SectionStringsVerItem2 = \
"""
VER | Test/Test.ver | * | TRUE | OverFlow
"""
#
# Have 2 elements, Type | FileName
#
SectionStringsVerItem3 = \
"""
VER | Test/Test.ver
"""
#
# Have 3 elements, Type | FileName | Target
#
SectionStringsVerItem4 = \
"""
VER | Test/Test.ver | DEBUG
"""
#
# Have 4 elements, Type | FileName | Target | FeatureFlagExp
#
SectionStringsVerItem5 = \
"""
VER | Test/Test.ver | DEBUG | TRUE
"""
#
# Exist 2 VER items, both opened.
#
SectionStringsVerItem6 = \
"""
VER | Test/Test.ver | * | TRUE
VER | Test/Test2.ver | * | TRUE
"""
#
# Exist 2 VER items, only 1 opened.
#
SectionStringsVerItem7 = \
"""
VER | Test/Test.ver | * | TRUE
VER | Test/Test2.ver | * | FALSE
"""
#-------------end of VER type binary item test input--------------------------#
#-------------start of UI type binary item test input-------------------------#
#
# Test only one UI section can exist
#
SectionStringsUiItem1 = \
"""
UI | Test/Test.ui | * | TRUE
UI | Test/Test2.ui | * | TRUE
"""
SectionStringsUiItem2 = \
"""
UI | Test/Test.ui | * | TRUE
SEC_UI | Test/Test2.ui | * | TRUE
"""
SectionStringsUiItem3 = \
"""
UI | Test/Test.ui | * | TRUE
UI | Test/Test2.ui | * | FALSE
"""
#
# Has 1 element, error format
#
SectionStringsUiItem4 = \
"""
UI
"""
#
# Have 5 elements, error format(Maximum elements amount is 4)
#
SectionStringsUiItem5 = \
"""
UI | Test/Test.ui | * | TRUE | OverFlow
"""
#
# Have 2 elements, Type | FileName
#
SectionStringsUiItem6 = \
"""
UI | Test/Test.ui
"""
#
# Have 3 elements, Type | FileName | Target
#
SectionStringsUiItem7 = \
"""
UI | Test/Test.ui | DEBUG
"""
#
# Have 4 elements, Type | FileName | Target | FeatureFlagExp
#
SectionStringsUiItem8 = \
"""
UI | Test/Test.ui | DEBUG | TRUE
"""
#---------------end of UI type binary item test input-------------------------#
gFileName = "BinarySectionTest.inf"
##
# Construct SectionString for call section parser usage.
#
def StringToSectionString(String):
Lines = String.split('\n')
LineNo = 0
SectionString = []
for Line in Lines:
if Line.strip() == '':
continue
SectionString.append((Line, LineNo, ''))
LineNo = LineNo + 1
return SectionString
def PrepareTest(String):
SectionString = StringToSectionString(String)
ItemList = []
for Item in SectionString:
ValueList = Item[0].split('|')
for count in range(len(ValueList)):
ValueList[count] = ValueList[count].strip()
if len(ValueList) >= 2:
#
# Create a temp file for test.
#
FileName = os.path.normpath(os.path.realpath(ValueList[1].strip()))
try:
TempFile = open (FileName, "w")
TempFile.close()
except:
print("File Create Error")
CurrentLine = CurrentLine()
CurrentLine.SetFileName("Test")
CurrentLine.SetLineString(Item[0])
CurrentLine.SetLineNo(Item[1])
InfLineCommentObject = InfLineCommentObject()
ItemList.append((ValueList, InfLineCommentObject, CurrentLine))
return ItemList
if __name__ == '__main__':
Logger.Initialize()
InfBinariesInstance = InfBinariesObject()
ArchList = ['COMMON']
Global.gINF_MODULE_DIR = os.getcwd()
AllPassedFlag = True
#
# For All Ui test
#
UiStringList = [
SectionStringsUiItem1,
SectionStringsUiItem2,
SectionStringsUiItem3,
SectionStringsUiItem4,
SectionStringsUiItem5,
SectionStringsUiItem6,
SectionStringsUiItem7,
SectionStringsUiItem8
]
for Item in UiStringList:
Ui = PrepareTest(Item)
if Item == SectionStringsUiItem4 or Item == SectionStringsUiItem5:
try:
InfBinariesInstance.SetBinary(Ui = Ui, ArchList = ArchList)
except Logger.FatalError:
pass
else:
try:
InfBinariesInstance.SetBinary(Ui = Ui, ArchList = ArchList)
except:
AllPassedFlag = False
#
# For All Ver Test
#
VerStringList = [
SectionStringsVerItem1,
SectionStringsVerItem2,
SectionStringsVerItem3,
SectionStringsVerItem4,
SectionStringsVerItem5,
SectionStringsVerItem6,
SectionStringsVerItem7
]
for Item in VerStringList:
Ver = PrepareTest(Item)
if Item == SectionStringsVerItem1 or \
Item == SectionStringsVerItem2:
try:
InfBinariesInstance.SetBinary(Ver = Ver, ArchList = ArchList)
except:
pass
else:
try:
InfBinariesInstance.SetBinary(Ver = Ver, ArchList = ArchList)
except:
AllPassedFlag = False
#
# For All Common Test
#
CommonStringList = [
SectionStringsCommonItem1,
SectionStringsCommonItem2,
SectionStringsCommonItem3,
SectionStringsCommonItem4,
SectionStringsCommonItem5,
SectionStringsCommonItem6,
SectionStringsCommonItem7,
SectionStringsCommonItem8,
SectionStringsCommonItem9,
SectionStringsCommonItem10
]
for Item in CommonStringList:
CommonBin = PrepareTest(Item)
if Item == SectionStringsCommonItem10 or \
Item == SectionStringsCommonItem1:
try:
InfBinariesInstance.SetBinary(CommonBinary = CommonBin, ArchList = ArchList)
except:
pass
else:
try:
InfBinariesInstance.SetBinary(Ver = Ver, ArchList = ArchList)
except:
print("Test Failed!")
AllPassedFlag = False
if AllPassedFlag :
print('All tests passed...')
else:
print('Some unit test failed!')
| edk2-master | BaseTools/Source/Python/UPT/UnitTest/InfBinarySectionTest.py |
## @file
# This file contain unit test for CommentParsing
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
import unittest
import Logger.Log as Logger
from Library.CommentParsing import ParseHeaderCommentSection, \
ParseGenericComment, \
ParseDecPcdGenericComment, \
ParseDecPcdTailComment
from Library.CommentParsing import _IsCopyrightLine
from Library.StringUtils import GetSplitValueList
from Library.DataType import TAB_SPACE_SPLIT
from Library.DataType import TAB_LANGUAGE_EN_US
#
# Test ParseHeaderCommentSection
#
class ParseHeaderCommentSectionTest(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
#
# Normal case1: have license/copyright/license above @file
#
def testNormalCase1(self):
TestCommentLines1 = \
'''# License1
# License2
#
## @file
# example abstract
#
# example description
#
# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
#
# License3
#'''
CommentList = GetSplitValueList(TestCommentLines1, "\n")
LineNum = 0
TestCommentLinesList = []
for Comment in CommentList:
LineNum += 1
TestCommentLinesList.append((Comment, LineNum))
Abstract, Description, Copyright, License = \
ParseHeaderCommentSection(TestCommentLinesList, "PhonyFile")
ExpectedAbstract = 'example abstract'
self.assertEqual(Abstract, ExpectedAbstract)
ExpectedDescription = 'example description'
self.assertEqual(Description, ExpectedDescription)
ExpectedCopyright = \
'Copyright (c) 2007 - 2010,'\
' Intel Corporation. All rights reserved.<BR>'
self.assertEqual(Copyright, ExpectedCopyright)
ExpectedLicense = 'License1\nLicense2\n\nLicense3'
self.assertEqual(License, ExpectedLicense)
#
# Normal case2: have license/copyright above @file, but no copyright after
#
def testNormalCase2(self):
TestCommentLines2 = \
''' # License1
# License2
#
## @file
# example abstract
#
# example description
#
#Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
#
##'''
CommentList = GetSplitValueList(TestCommentLines2, "\n")
LineNum = 0
TestCommentLinesList = []
for Comment in CommentList:
LineNum += 1
TestCommentLinesList.append((Comment, LineNum))
Abstract, Description, Copyright, License = \
ParseHeaderCommentSection(TestCommentLinesList, "PhonyFile")
ExpectedAbstract = 'example abstract'
self.assertEqual(Abstract, ExpectedAbstract)
ExpectedDescription = 'example description'
self.assertEqual(Description, ExpectedDescription)
ExpectedCopyright = \
'Copyright (c) 2007 - 2018, Intel Corporation.'\
' All rights reserved.<BR>'
self.assertEqual(Copyright, ExpectedCopyright)
ExpectedLicense = 'License1\nLicense2'
self.assertEqual(License, ExpectedLicense)
#
# Normal case2: have license/copyright/license above @file,
# but no abstract/description
#
def testNormalCase3(self):
TestCommentLines3 = \
''' # License1
# License2
#
## @file
# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
#
# License3 Line1
# License3 Line2
##'''
CommentList = GetSplitValueList(TestCommentLines3, "\n")
LineNum = 0
TestCommentLinesList = []
for Comment in CommentList:
LineNum += 1
TestCommentLinesList.append((Comment, LineNum))
Abstract, Description, Copyright, License = \
ParseHeaderCommentSection(TestCommentLinesList, "PhonyFile")
ExpectedAbstract = ''
self.assertEqual(Abstract, ExpectedAbstract)
ExpectedDescription = ''
self.assertEqual(Description, ExpectedDescription)
ExpectedCopyright = \
'Copyright (c) 2007 - 2010,'\
' Intel Corporation. All rights reserved.<BR>'
self.assertEqual(Copyright, ExpectedCopyright)
ExpectedLicense = \
'License1\n' \
'License2\n\n' \
'License3 Line1\n' \
'License3 Line2'
self.assertEqual(License, ExpectedLicense)
#
# Normal case4: format example in spec
#
def testNormalCase4(self):
TestCommentLines = \
'''
## @file
# Abstract
#
# Description
#
# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
#
# License
#
##'''
CommentList = GetSplitValueList(TestCommentLines, "\n")
LineNum = 0
TestCommentLinesList = []
for Comment in CommentList:
LineNum += 1
TestCommentLinesList.append((Comment, LineNum))
Abstract, Description, Copyright, License = \
ParseHeaderCommentSection(TestCommentLinesList, "PhonyFile")
ExpectedAbstract = 'Abstract'
self.assertEqual(Abstract, ExpectedAbstract)
ExpectedDescription = 'Description'
self.assertEqual(Description, ExpectedDescription)
ExpectedCopyright = \
'Copyright (c) 2007 - 2018, Intel Corporation.'\
' All rights reserved.<BR>'
self.assertEqual(Copyright, ExpectedCopyright)
ExpectedLicense = \
'License'
self.assertEqual(License, ExpectedLicense)
#
# Normal case5: other line between copyright
#
def testNormalCase5(self):
TestCommentLines = \
'''
## @file
# Abstract
#
# Description
#
# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
# other line
# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
#
# License
#
##'''
CommentList = GetSplitValueList(TestCommentLines, "\n")
LineNum = 0
TestCommentLinesList = []
for Comment in CommentList:
LineNum += 1
TestCommentLinesList.append((Comment, LineNum))
Abstract, Description, Copyright, License = \
ParseHeaderCommentSection(TestCommentLinesList, "PhonyFile")
ExpectedAbstract = 'Abstract'
self.assertEqual(Abstract, ExpectedAbstract)
ExpectedDescription = 'Description'
self.assertEqual(Description, ExpectedDescription)
ExpectedCopyright = \
'Copyright (c) 2007 - 2018, Intel Corporation.'\
' All rights reserved.<BR>\n'\
'Copyright (c) 2007 - 2018, Intel Corporation.'\
' All rights reserved.<BR>'
self.assertEqual(Copyright, ExpectedCopyright)
ExpectedLicense = \
'License'
self.assertEqual(License, ExpectedLicense)
#
# Normal case6: multiple lines of copyright
#
def testNormalCase6(self):
TestCommentLines = \
'''
## @file
# Abstract
#
# Description
#
# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2007 - 2010, FOO1 Corporation. All rights reserved.<BR>
# Copyright (c) 2007 - 2010, FOO2 Corporation. All rights reserved.<BR>
#
# License
#
##'''
CommentList = GetSplitValueList(TestCommentLines, "\n")
LineNum = 0
TestCommentLinesList = []
for Comment in CommentList:
LineNum += 1
TestCommentLinesList.append((Comment, LineNum))
Abstract, Description, Copyright, License = \
ParseHeaderCommentSection(TestCommentLinesList, "PhonyFile")
ExpectedAbstract = 'Abstract'
self.assertEqual(Abstract, ExpectedAbstract)
ExpectedDescription = 'Description'
self.assertEqual(Description, ExpectedDescription)
ExpectedCopyright = \
'Copyright (c) 2007 - 2018, Intel Corporation.'\
' All rights reserved.<BR>\n'\
'Copyright (c) 2007 - 2010, FOO1 Corporation.'\
' All rights reserved.<BR>\n'\
'Copyright (c) 2007 - 2010, FOO2 Corporation.'\
' All rights reserved.<BR>'
self.assertEqual(Copyright, ExpectedCopyright)
ExpectedLicense = \
'License'
self.assertEqual(License, ExpectedLicense)
#
# Normal case7: Abstract not present
#
def testNormalCase7(self):
TestCommentLines = \
'''
## @file
#
# Description
#
# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2007 - 2010, FOO1 Corporation. All rights reserved.<BR>
# Copyright (c) 2007 - 2010, FOO2 Corporation. All rights reserved.<BR>
#
# License
#
##'''
CommentList = GetSplitValueList(TestCommentLines, "\n")
LineNum = 0
TestCommentLinesList = []
for Comment in CommentList:
LineNum += 1
TestCommentLinesList.append((Comment, LineNum))
Abstract, Description, Copyright, License = \
ParseHeaderCommentSection(TestCommentLinesList, "PhonyFile")
ExpectedAbstract = ''
self.assertEqual(Abstract, ExpectedAbstract)
ExpectedDescription = 'Description'
self.assertEqual(Description, ExpectedDescription)
ExpectedCopyright = \
'Copyright (c) 2007 - 2018, Intel Corporation.'\
' All rights reserved.<BR>\n'\
'Copyright (c) 2007 - 2010, FOO1 Corporation.'\
' All rights reserved.<BR>\n'\
'Copyright (c) 2007 - 2010, FOO2 Corporation.'\
' All rights reserved.<BR>'
self.assertEqual(Copyright, ExpectedCopyright)
ExpectedLicense = \
'License'
self.assertEqual(License, ExpectedLicense)
#
# Normal case8: Description not present
#
def testNormalCase8(self):
TestCommentLines = \
'''
## @file
# Abstact
#
# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
#
# License
#
##'''
CommentList = GetSplitValueList(TestCommentLines, "\n")
LineNum = 0
TestCommentLinesList = []
for Comment in CommentList:
LineNum += 1
TestCommentLinesList.append((Comment, LineNum))
Abstract, Description, Copyright, License = \
ParseHeaderCommentSection(TestCommentLinesList, "PhonyFile")
ExpectedAbstract = 'Abstact'
self.assertEqual(Abstract, ExpectedAbstract)
ExpectedDescription = ''
self.assertEqual(Description, ExpectedDescription)
ExpectedCopyright = \
'Copyright (c) 2007 - 2018, Intel Corporation.'\
' All rights reserved.<BR>'
self.assertEqual(Copyright, ExpectedCopyright)
ExpectedLicense = \
'License'
self.assertEqual(License, ExpectedLicense)
#
# Error case1: No copyright found
#
def testErrorCase1(self):
TestCommentLines = \
'''
## @file
# Abstract
#
# Description
#
# License
#
##'''
CommentList = GetSplitValueList(TestCommentLines, "\n")
LineNum = 0
TestCommentLinesList = []
for Comment in CommentList:
LineNum += 1
TestCommentLinesList.append((Comment, LineNum))
self.assertRaises(Logger.FatalError,
ParseHeaderCommentSection,
TestCommentLinesList,
"PhonyFile")
#
# Error case2: non-empty non-comment lines passed in
#
def testErrorCase2(self):
TestCommentLines = \
'''
## @file
# Abstract
#
this is invalid line
# Description
#
# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
# License
#
##'''
CommentList = GetSplitValueList(TestCommentLines, "\n")
LineNum = 0
TestCommentLinesList = []
for Comment in CommentList:
LineNum += 1
TestCommentLinesList.append((Comment, LineNum))
self.assertRaises(Logger.FatalError,
ParseHeaderCommentSection,
TestCommentLinesList,
"PhonyFile")
#
# Test ParseGenericComment
#
class ParseGenericCommentTest(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
#
# Normal case1: one line of comment
#
def testNormalCase1(self):
TestCommentLines = \
'''# hello world'''
CommentList = GetSplitValueList(TestCommentLines, "\n")
LineNum = 0
TestCommentLinesList = []
for Comment in CommentList:
LineNum += 1
TestCommentLinesList.append((Comment, LineNum))
HelptxtObj = ParseGenericComment(TestCommentLinesList, 'testNormalCase1')
self.failIf(not HelptxtObj)
self.assertEqual(HelptxtObj.GetString(), 'hello world')
self.assertEqual(HelptxtObj.GetLang(), TAB_LANGUAGE_EN_US)
#
# Normal case2: multiple lines of comment
#
def testNormalCase2(self):
TestCommentLines = \
'''## hello world
# second line'''
CommentList = GetSplitValueList(TestCommentLines, "\n")
LineNum = 0
TestCommentLinesList = []
for Comment in CommentList:
LineNum += 1
TestCommentLinesList.append((Comment, LineNum))
HelptxtObj = ParseGenericComment(TestCommentLinesList, 'testNormalCase2')
self.failIf(not HelptxtObj)
self.assertEqual(HelptxtObj.GetString(),
'hello world\n' + 'second line')
self.assertEqual(HelptxtObj.GetLang(), TAB_LANGUAGE_EN_US)
#
# Normal case3: multiple lines of comment, non comment lines will be skipped
#
def testNormalCase3(self):
TestCommentLines = \
'''## hello world
This is not comment line'''
CommentList = GetSplitValueList(TestCommentLines, "\n")
LineNum = 0
TestCommentLinesList = []
for Comment in CommentList:
LineNum += 1
TestCommentLinesList.append((Comment, LineNum))
HelptxtObj = ParseGenericComment(TestCommentLinesList, 'testNormalCase3')
self.failIf(not HelptxtObj)
self.assertEqual(HelptxtObj.GetString(),
'hello world\n\n')
self.assertEqual(HelptxtObj.GetLang(), TAB_LANGUAGE_EN_US)
#
# Test ParseDecPcdGenericComment
#
class ParseDecPcdGenericCommentTest(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
#
# Normal case1: comments with no special comment
#
def testNormalCase1(self):
TestCommentLines = \
'''## hello world
# second line'''
CommentList = GetSplitValueList(TestCommentLines, "\n")
LineNum = 0
TestCommentLinesList = []
for Comment in CommentList:
LineNum += 1
TestCommentLinesList.append((Comment, LineNum))
(HelpTxt, PcdErr) = \
ParseDecPcdGenericComment(TestCommentLinesList, 'testNormalCase1')
self.failIf(not HelpTxt)
self.failIf(PcdErr)
self.assertEqual(HelpTxt,
'hello world\n' + 'second line')
#
# Normal case2: comments with valid list
#
def testNormalCase2(self):
TestCommentLines = \
'''## hello world
# second line
# @ValidList 1, 2, 3
# other line'''
CommentList = GetSplitValueList(TestCommentLines, "\n")
LineNum = 0
TestCommentLinesList = []
for Comment in CommentList:
LineNum += 1
TestCommentLinesList.append((Comment, LineNum))
(HelpTxt, PcdErr) = \
ParseDecPcdGenericComment(TestCommentLinesList, 'UnitTest')
self.failIf(not HelpTxt)
self.failIf(not PcdErr)
self.assertEqual(HelpTxt,
'hello world\n' + 'second line\n' + 'other line')
ExpectedList = GetSplitValueList('1 2 3', TAB_SPACE_SPLIT)
ActualList = [item for item in \
GetSplitValueList(PcdErr.GetValidValue(), TAB_SPACE_SPLIT) if item]
self.assertEqual(ExpectedList, ActualList)
self.failIf(PcdErr.GetExpression())
self.failIf(PcdErr.GetValidValueRange())
#
# Normal case3: comments with valid range
#
def testNormalCase3(self):
TestCommentLines = \
'''## hello world
# second line
# @ValidRange LT 1 AND GT 2
# other line'''
CommentList = GetSplitValueList(TestCommentLines, "\n")
LineNum = 0
TestCommentLinesList = []
for Comment in CommentList:
LineNum += 1
TestCommentLinesList.append((Comment, LineNum))
(HelpTxt, PcdErr) = \
ParseDecPcdGenericComment(TestCommentLinesList, 'UnitTest')
self.failIf(not HelpTxt)
self.failIf(not PcdErr)
self.assertEqual(HelpTxt,
'hello world\n' + 'second line\n' + 'other line')
self.assertEqual(PcdErr.GetValidValueRange().strip(), 'LT 1 AND GT 2')
self.failIf(PcdErr.GetExpression())
self.failIf(PcdErr.GetValidValue())
#
# Normal case4: comments with valid expression
#
def testNormalCase4(self):
TestCommentLines = \
'''## hello world
# second line
# @Expression LT 1 AND GT 2
# other line'''
CommentList = GetSplitValueList(TestCommentLines, "\n")
LineNum = 0
TestCommentLinesList = []
for Comment in CommentList:
LineNum += 1
TestCommentLinesList.append((Comment, LineNum))
(HelpTxt, PcdErr) = \
ParseDecPcdGenericComment(TestCommentLinesList, 'UnitTest')
self.failIf(not HelpTxt)
self.failIf(not PcdErr)
self.assertEqual(HelpTxt,
'hello world\n' + 'second line\n' + 'other line')
self.assertEqual(PcdErr.GetExpression().strip(), 'LT 1 AND GT 2')
self.failIf(PcdErr.GetValidValueRange())
self.failIf(PcdErr.GetValidValue())
#
# Normal case5: comments with valid expression and no generic comment
#
def testNormalCase5(self):
TestCommentLines = \
'''# @Expression LT 1 AND GT 2'''
CommentList = GetSplitValueList(TestCommentLines, "\n")
LineNum = 0
TestCommentLinesList = []
for Comment in CommentList:
LineNum += 1
TestCommentLinesList.append((Comment, LineNum))
(HelpTxt, PcdErr) = \
ParseDecPcdGenericComment(TestCommentLinesList, 'UnitTest')
self.failIf(HelpTxt)
self.failIf(not PcdErr)
self.assertEqual(PcdErr.GetExpression().strip(), 'LT 1 AND GT 2')
self.failIf(PcdErr.GetValidValueRange())
self.failIf(PcdErr.GetValidValue())
#
# Normal case6: comments with only generic help text
#
def testNormalCase6(self):
TestCommentLines = \
'''#'''
CommentList = GetSplitValueList(TestCommentLines, "\n")
LineNum = 0
TestCommentLinesList = []
for Comment in CommentList:
LineNum += 1
TestCommentLinesList.append((Comment, LineNum))
(HelpTxt, PcdErr) = \
ParseDecPcdGenericComment(TestCommentLinesList, 'UnitTest')
self.assertEqual(HelpTxt, '\n')
self.failIf(PcdErr)
#
# Error case1: comments with both expression and valid list, use later
# ignore the former and with a warning message
#
def testErrorCase1(self):
TestCommentLines = \
'''## hello world
# second line
# @ValidList 1, 2, 3
# @Expression LT 1 AND GT 2
# other line'''
CommentList = GetSplitValueList(TestCommentLines, "\n")
LineNum = 0
TestCommentLinesList = []
for Comment in CommentList:
LineNum += 1
TestCommentLinesList.append((Comment, LineNum))
try:
ParseDecPcdGenericComment(TestCommentLinesList, 'UnitTest')
except Logger.FatalError:
pass
#
# Test ParseDecPcdTailComment
#
class ParseDecPcdTailCommentTest(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
#
# Normal case1: comments with no SupModeList
#
def testNormalCase1(self):
TestCommentLines = \
'''## #hello world'''
CommentList = GetSplitValueList(TestCommentLines, "\n")
LineNum = 0
TestCommentLinesList = []
for Comment in CommentList:
LineNum += 1
TestCommentLinesList.append((Comment, LineNum))
(SupModeList, HelpStr) = \
ParseDecPcdTailComment(TestCommentLinesList, 'UnitTest')
self.failIf(not HelpStr)
self.failIf(SupModeList)
self.assertEqual(HelpStr,
'hello world')
#
# Normal case2: comments with one SupMode
#
def testNormalCase2(self):
TestCommentLines = \
'''## BASE #hello world'''
CommentList = GetSplitValueList(TestCommentLines, "\n")
LineNum = 0
TestCommentLinesList = []
for Comment in CommentList:
LineNum += 1
TestCommentLinesList.append((Comment, LineNum))
(SupModeList, HelpStr) = \
ParseDecPcdTailComment(TestCommentLinesList, 'UnitTest')
self.failIf(not HelpStr)
self.failIf(not SupModeList)
self.assertEqual(HelpStr,
'hello world')
self.assertEqual(SupModeList,
['BASE'])
#
# Normal case3: comments with more than one SupMode
#
def testNormalCase3(self):
TestCommentLines = \
'''## BASE UEFI_APPLICATION #hello world'''
CommentList = GetSplitValueList(TestCommentLines, "\n")
LineNum = 0
TestCommentLinesList = []
for Comment in CommentList:
LineNum += 1
TestCommentLinesList.append((Comment, LineNum))
(SupModeList, HelpStr) = \
ParseDecPcdTailComment(TestCommentLinesList, 'UnitTest')
self.failIf(not HelpStr)
self.failIf(not SupModeList)
self.assertEqual(HelpStr,
'hello world')
self.assertEqual(SupModeList,
['BASE', 'UEFI_APPLICATION'])
#
# Normal case4: comments with more than one SupMode, no help text
#
def testNormalCase4(self):
TestCommentLines = \
'''## BASE UEFI_APPLICATION'''
CommentList = GetSplitValueList(TestCommentLines, "\n")
LineNum = 0
TestCommentLinesList = []
for Comment in CommentList:
LineNum += 1
TestCommentLinesList.append((Comment, LineNum))
(SupModeList, HelpStr) = \
ParseDecPcdTailComment(TestCommentLinesList, 'UnitTest')
self.failIf(HelpStr)
self.failIf(not SupModeList)
self.assertEqual(SupModeList,
['BASE', 'UEFI_APPLICATION'])
#
# Normal case5: general comments with no supModList, extract from real case
#
def testNormalCase5(self):
TestCommentLines = \
''' # 1 = 128MB, 2 = 256MB, 3 = MAX'''
CommentList = GetSplitValueList(TestCommentLines, "\n")
LineNum = 0
TestCommentLinesList = []
for Comment in CommentList:
LineNum += 1
TestCommentLinesList.append((Comment, LineNum))
(SupModeList, HelpStr) = \
ParseDecPcdTailComment(TestCommentLinesList, 'UnitTest')
self.failIf(not HelpStr)
self.assertEqual(HelpStr,
'1 = 128MB, 2 = 256MB, 3 = MAX')
self.failIf(SupModeList)
#
# Error case2: comments with supModList contains valid and invalid
# module type
#
def testErrorCase2(self):
TestCommentLines = \
'''## BASE INVALID_MODULE_TYPE #hello world'''
CommentList = GetSplitValueList(TestCommentLines, "\n")
LineNum = 0
TestCommentLinesList = []
for Comment in CommentList:
LineNum += 1
TestCommentLinesList.append((Comment, LineNum))
try:
ParseDecPcdTailComment(TestCommentLinesList, 'UnitTest')
except Logger.FatalError:
pass
#
# Test _IsCopyrightLine
#
class _IsCopyrightLineTest(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
#
# Normal case
#
def testCase1(self):
Line = 'this is a copyright ( line'
Result = _IsCopyrightLine(Line)
self.failIf(not Result)
#
# Normal case
#
def testCase2(self):
Line = 'this is a Copyright ( line'
Result = _IsCopyrightLine(Line)
self.failIf(not Result)
#
# Normal case
#
def testCase3(self):
Line = 'this is not aCopyright ( line'
Result = _IsCopyrightLine(Line)
self.failIf(Result)
#
# Normal case
#
def testCase4(self):
Line = 'this is Copyright( line'
Result = _IsCopyrightLine(Line)
self.failIf(not Result)
#
# Normal case
#
def testCase5(self):
Line = 'this is Copyright (line'
Result = _IsCopyrightLine(Line)
self.failIf(not Result)
#
# Normal case
#
def testCase6(self):
Line = 'this is not Copyright line'
Result = _IsCopyrightLine(Line)
self.failIf(Result)
#
# Normal case
#
def testCase7(self):
Line = 'Copyright (c) line'
Result = _IsCopyrightLine(Line)
self.failIf(not Result)
#
# Normal case
#
def testCase8(self):
Line = ' Copyright (c) line'
Result = _IsCopyrightLine(Line)
self.failIf(not Result)
#
# Normal case
#
def testCase9(self):
Line = 'not a Copyright '
Result = _IsCopyrightLine(Line)
self.failIf(Result)
if __name__ == '__main__':
Logger.Initialize()
unittest.main()
| edk2-master | BaseTools/Source/Python/UPT/UnitTest/CommentParsingUnitTest.py |
## @file
# This file contain unit test for CommentParsing
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
import os
import unittest
import Logger.Log as Logger
from GenMetaFile.GenInfFile import GenGuidSections
from GenMetaFile.GenInfFile import GenProtocolPPiSections
from GenMetaFile.GenInfFile import GenPcdSections
from GenMetaFile.GenInfFile import GenSpecialSections
from Library.CommentGenerating import GenGenericCommentF
from Library.CommentGenerating import _GetHelpStr
from Object.POM.CommonObject import TextObject
from Object.POM.CommonObject import GuidObject
from Object.POM.CommonObject import ProtocolObject
from Object.POM.CommonObject import PpiObject
from Object.POM.CommonObject import PcdObject
from Object.POM.ModuleObject import HobObject
from Library.StringUtils import GetSplitValueList
from Library.DataType import TAB_SPACE_SPLIT
from Library.DataType import TAB_LANGUAGE_EN_US
from Library.DataType import TAB_LANGUAGE_ENG
from Library.DataType import ITEM_UNDEFINED
from Library.DataType import TAB_INF_FEATURE_PCD
from Library import GlobalData
from Library.Misc import CreateDirectory
#
# Test _GetHelpStr
#
class _GetHelpStrTest(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
#
# Normal case1: have one help text object with Lang = 'en-US'
#
def testNormalCase1(self):
HelpStr = 'Hello world'
HelpTextObj = TextObject()
HelpTextObj.SetLang(TAB_LANGUAGE_EN_US)
HelpTextObj.SetString(HelpStr)
HelpTextList = [HelpTextObj]
Result = _GetHelpStr(HelpTextList)
self.assertEqual(Result, HelpStr)
#
# Normal case2: have two help text object with Lang = 'en-US' and other
#
def testNormalCase2(self):
HelpStr = 'Hello world'
HelpTextObj = TextObject()
HelpTextObj.SetLang(TAB_LANGUAGE_ENG)
HelpTextObj.SetString(HelpStr)
HelpTextList = [HelpTextObj]
ExpectedStr = 'Hello world1'
HelpTextObj = TextObject()
HelpTextObj.SetLang(TAB_LANGUAGE_EN_US)
HelpTextObj.SetString(ExpectedStr)
HelpTextList.append(HelpTextObj)
Result = _GetHelpStr(HelpTextList)
self.assertEqual(Result, ExpectedStr)
#
# Normal case3: have two help text object with Lang = '' and 'eng'
#
def testNormalCase3(self):
HelpStr = 'Hello world'
HelpTextObj = TextObject()
HelpTextObj.SetLang('')
HelpTextObj.SetString(HelpStr)
HelpTextList = [HelpTextObj]
ExpectedStr = 'Hello world1'
HelpTextObj = TextObject()
HelpTextObj.SetLang(TAB_LANGUAGE_ENG)
HelpTextObj.SetString(ExpectedStr)
HelpTextList.append(HelpTextObj)
Result = _GetHelpStr(HelpTextList)
self.assertEqual(Result, ExpectedStr)
#
# Normal case4: have two help text object with Lang = '' and ''
#
def testNormalCase4(self):
ExpectedStr = 'Hello world1'
HelpTextObj = TextObject()
HelpTextObj.SetLang(TAB_LANGUAGE_ENG)
HelpTextObj.SetString(ExpectedStr)
HelpTextList = [HelpTextObj]
HelpStr = 'Hello world'
HelpTextObj = TextObject()
HelpTextObj.SetLang('')
HelpTextObj.SetString(HelpStr)
HelpTextList.append(HelpTextObj)
Result = _GetHelpStr(HelpTextList)
self.assertEqual(Result, ExpectedStr)
#
# Normal case: have three help text object with Lang = '','en', 'en-US'
#
def testNormalCase5(self):
ExpectedStr = 'Hello world1'
HelpTextObj = TextObject()
HelpTextObj.SetLang(TAB_LANGUAGE_EN_US)
HelpTextObj.SetString(ExpectedStr)
HelpTextList = [HelpTextObj]
HelpStr = 'Hello unknown world'
HelpTextObj = TextObject()
HelpTextObj.SetLang('')
HelpTextObj.SetString(HelpStr)
HelpTextList.append(HelpTextObj)
HelpStr = 'Hello mysterious world'
HelpTextObj = TextObject()
HelpTextObj.SetLang('')
HelpTextObj.SetString(HelpStr)
HelpTextList.append(HelpTextObj)
Result = _GetHelpStr(HelpTextList)
self.assertEqual(Result, ExpectedStr)
HelpTextList.sort()
self.assertEqual(Result, ExpectedStr)
HelpTextList.sort(reverse=True)
self.assertEqual(Result, ExpectedStr)
#
# Test GenGuidSections
#
class GenGuidSectionsTest(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
#
# This is the API to generate Guid Object to help UnitTest
#
def GuidFactory(self, CName, FFE, Usage, GuidType, VariableName, HelpStr):
Guid = GuidObject()
Guid.SetCName(CName)
Guid.SetFeatureFlag(FFE)
Guid.SetGuidTypeList([GuidType])
Guid.SetUsage(Usage)
Guid.SetVariableName(VariableName)
HelpTextObj = TextObject()
HelpTextObj.SetLang('')
HelpTextObj.SetString(HelpStr)
Guid.SetHelpTextList([HelpTextObj])
return Guid
#
# Normal case: have two GuidObject
#
def testNormalCase1(self):
GuidList = []
CName = 'Guid1'
FFE = 'FFE1'
Usage = 'PRODUCES'
GuidType = 'Event'
VariableName = ''
HelpStr = 'Usage comment line 1'
Guid1 = self.GuidFactory(CName, FFE, Usage, GuidType,
VariableName, HelpStr)
GuidList.append(Guid1)
CName = 'Guid1'
FFE = 'FFE1'
Usage = 'CONSUMES'
GuidType = 'Variable'
VariableName = ''
HelpStr = 'Usage comment line 2'
Guid1 = self.GuidFactory(CName, FFE, Usage, GuidType,
VariableName, HelpStr)
GuidList.append(Guid1)
Result = GenGuidSections(GuidList)
Expected = '''[Guids]
## PRODUCES ## Event # Usage comment line 1
## CONSUMES ## Variable: # Usage comment line 2
Guid1|FFE1'''
self.assertEqual(Result.strip(), Expected)
#
# Normal case: have two GuidObject
#
def testNormalCase2(self):
GuidList = []
CName = 'Guid1'
FFE = 'FFE1'
Usage = 'PRODUCES'
GuidType = 'Event'
VariableName = ''
HelpStr = 'Usage comment line 1'
Guid1 = self.GuidFactory(CName, FFE, Usage, GuidType,
VariableName, HelpStr)
GuidList.append(Guid1)
CName = 'Guid1'
FFE = 'FFE1'
Usage = 'UNDEFINED'
GuidType = 'UNDEFINED'
VariableName = ''
HelpStr = 'Generic comment line 1\n Generic comment line 2'
Guid1 = self.GuidFactory(CName, FFE, Usage, GuidType,
VariableName, HelpStr)
GuidList.append(Guid1)
Result = GenGuidSections(GuidList)
Expected = '''[Guids]
## PRODUCES ## Event # Usage comment line 1
# Generic comment line 1
# Generic comment line 2
Guid1|FFE1'''
self.assertEqual(Result.strip(), Expected)
#
# Normal case: have two GuidObject, one help goes to generic help,
# the other go into usage comment
#
def testNormalCase3(self):
GuidList = []
CName = 'Guid1'
FFE = 'FFE1'
Usage = 'UNDEFINED'
GuidType = 'UNDEFINED'
VariableName = ''
HelpStr = 'Generic comment'
Guid1 = self.GuidFactory(CName, FFE, Usage, GuidType,
VariableName, HelpStr)
GuidList.append(Guid1)
CName = 'Guid1'
FFE = 'FFE1'
Usage = 'PRODUCES'
GuidType = 'Event'
VariableName = ''
HelpStr = 'Usage comment line 1'
Guid1 = self.GuidFactory(CName, FFE, Usage, GuidType,
VariableName, HelpStr)
GuidList.append(Guid1)
Result = GenGuidSections(GuidList)
Expected = '''[Guids]
# Generic comment
## PRODUCES ## Event # Usage comment line 1
Guid1|FFE1'''
self.assertEqual(Result.strip(), Expected)
#
# Normal case: have one GuidObject, generic comment multiple lines
#
def testNormalCase5(self):
GuidList = []
CName = 'Guid1'
FFE = 'FFE1'
Usage = 'UNDEFINED'
GuidType = 'UNDEFINED'
VariableName = ''
HelpStr = 'Generic comment line1 \n generic comment line 2'
Guid1 = self.GuidFactory(CName, FFE, Usage, GuidType,
VariableName, HelpStr)
GuidList.append(Guid1)
Result = GenGuidSections(GuidList)
Expected = '''[Guids]
# Generic comment line1
# generic comment line 2
Guid1|FFE1'''
self.assertEqual(Result.strip(), Expected)
#
# Normal case: have one GuidObject, usage comment multiple lines
#
def testNormalCase6(self):
GuidList = []
CName = 'Guid1'
FFE = 'FFE1'
Usage = 'PRODUCES'
GuidType = 'Event'
VariableName = ''
HelpStr = 'Usage comment line 1\n Usage comment line 2'
Guid1 = self.GuidFactory(CName, FFE, Usage, GuidType,
VariableName, HelpStr)
GuidList.append(Guid1)
Result = GenGuidSections(GuidList)
Expected = '''[Guids]
Guid1|FFE1 ## PRODUCES ## Event # Usage comment line 1 Usage comment line 2
'''
self.assertEqual(Result.strip(), Expected.strip())
#
# Normal case: have one GuidObject, usage comment one line
#
def testNormalCase7(self):
GuidList = []
CName = 'Guid1'
FFE = 'FFE1'
Usage = 'UNDEFINED'
GuidType = 'UNDEFINED'
VariableName = ''
HelpStr = 'Usage comment line 1'
Guid1 = self.GuidFactory(CName, FFE, Usage, GuidType,
VariableName, HelpStr)
GuidList.append(Guid1)
Result = GenGuidSections(GuidList)
Expected = '''[Guids]
Guid1|FFE1 # Usage comment line 1
'''
self.assertEqual(Result.strip(), Expected.strip())
#
# Normal case: have two GuidObject
#
def testNormalCase8(self):
GuidList = []
CName = 'Guid1'
FFE = 'FFE1'
Usage = 'PRODUCES'
GuidType = 'Event'
VariableName = ''
HelpStr = 'Usage comment line 1\n Usage comment line 2'
Guid1 = self.GuidFactory(CName, FFE, Usage, GuidType,
VariableName, HelpStr)
GuidList.append(Guid1)
CName = 'Guid1'
FFE = 'FFE1'
Usage = 'PRODUCES'
GuidType = 'Event'
VariableName = ''
HelpStr = 'Usage comment line 3'
Guid1 = self.GuidFactory(CName, FFE, Usage, GuidType,
VariableName, HelpStr)
GuidList.append(Guid1)
Result = GenGuidSections(GuidList)
Expected = '''[Guids]
## PRODUCES ## Event # Usage comment line 1 Usage comment line 2
## PRODUCES ## Event # Usage comment line 3
Guid1|FFE1
'''
self.assertEqual(Result.strip(), Expected.strip())
#
# Normal case: have no GuidObject
#
def testNormalCase9(self):
GuidList = []
Result = GenGuidSections(GuidList)
Expected = ''
self.assertEqual(Result.strip(), Expected.strip())
#
# Normal case: have one GuidObject with no comment generated
#
def testNormalCase10(self):
GuidList = []
CName = 'Guid1'
FFE = 'FFE1'
Usage = 'UNDEFINED'
GuidType = 'UNDEFINED'
VariableName = ''
HelpStr = ''
Guid1 = self.GuidFactory(CName, FFE, Usage, GuidType,
VariableName, HelpStr)
GuidList.append(Guid1)
Result = GenGuidSections(GuidList)
Expected = '''[Guids]
Guid1|FFE1
'''
self.assertEqual(Result.strip(), Expected.strip())
#
# Normal case: have three GuidObject
#
def testNormalCase11(self):
GuidList = []
CName = 'Guid1'
FFE = 'FFE1'
Usage = 'UNDEFINED'
GuidType = 'UNDEFINED'
VariableName = ''
HelpStr = 'general comment line 1'
Guid1 = self.GuidFactory(CName, FFE, Usage, GuidType,
VariableName, HelpStr)
GuidList.append(Guid1)
CName = 'Guid1'
FFE = 'FFE1'
Usage = 'PRODUCES'
GuidType = 'Event'
VariableName = ''
HelpStr = 'Usage comment line 3'
Guid1 = self.GuidFactory(CName, FFE, Usage, GuidType,
VariableName, HelpStr)
GuidList.append(Guid1)
CName = 'Guid1'
FFE = 'FFE1'
Usage = 'UNDEFINED'
GuidType = 'UNDEFINED'
VariableName = ''
HelpStr = 'general comment line 2'
Guid1 = self.GuidFactory(CName, FFE, Usage, GuidType,
VariableName, HelpStr)
GuidList.append(Guid1)
Result = GenGuidSections(GuidList)
Expected = '''[Guids]
# general comment line 1
## PRODUCES ## Event # Usage comment line 3
# general comment line 2
Guid1|FFE1
'''
self.assertEqual(Result.strip(), Expected.strip())
#
# Normal case: have three GuidObject, with Usage/Type and no help
#
def testNormalCase12(self):
GuidList = []
CName = 'Guid1'
FFE = 'FFE1'
Usage = 'PRODUCES'
GuidType = 'GUID'
VariableName = ''
HelpStr = ''
Guid1 = self.GuidFactory(CName, FFE, Usage, GuidType,
VariableName, HelpStr)
GuidList.append(Guid1)
CName = 'Guid1'
FFE = 'FFE1'
Usage = 'PRODUCES'
GuidType = 'Event'
VariableName = ''
HelpStr = ''
Guid1 = self.GuidFactory(CName, FFE, Usage, GuidType,
VariableName, HelpStr)
GuidList.append(Guid1)
CName = 'Guid1'
FFE = 'FFE1'
Usage = 'CONSUMES'
GuidType = 'Event'
VariableName = ''
HelpStr = ''
Guid1 = self.GuidFactory(CName, FFE, Usage, GuidType,
VariableName, HelpStr)
GuidList.append(Guid1)
Result = GenGuidSections(GuidList)
Expected = '''[Guids]
## PRODUCES ## GUID
## PRODUCES ## Event
## CONSUMES ## Event
Guid1|FFE1
'''
self.assertEqual(Result.strip(), Expected.strip())
#
# Test GenProtocolPPiSections
#
class GenProtocolPPiSectionsTest(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
#
# This is the API to generate Protocol/Ppi Object to help UnitTest
#
def ObjectFactory(self, CName, FFE, Usage, Notify, HelpStr, IsProtocol):
if IsProtocol:
Object = ProtocolObject()
else:
Object = PpiObject()
Object.SetCName(CName)
Object.SetFeatureFlag(FFE)
Object.SetUsage(Usage)
Object.SetNotify(Notify)
HelpTextObj = TextObject()
HelpTextObj.SetLang('')
HelpTextObj.SetString(HelpStr)
Object.SetHelpTextList([HelpTextObj])
return Object
# Usage Notify Help INF Comment
#1 UNDEFINED true Present ## UNDEFINED ## NOTIFY # Help
#2 UNDEFINED true Not Present ## UNDEFINED ## NOTIFY
#3 UNDEFINED false Present ## UNDEFINED # Help
#4 UNDEFINED false Not Present ## UNDEFINED
#5 UNDEFINED Not Present Present # Help
#6 UNDEFINED Not Present Not Present <empty>
#7 Other true Present ## Other ## NOTIFY # Help
#8 Other true Not Present ## Other ## NOTIFY
#9 Other false Present ## Other # Help
#A Other false Not Present ## Other
#B Other Not Present Present ## Other # Help
#C Other Not Present Not Present ## Other
def testNormalCase1(self):
ObjectList = []
CName = 'Guid1'
FFE = 'FFE1'
Usage = 'UNDEFINED'
Notify = True
HelpStr = 'Help'
IsProtocol = True
Object = self.ObjectFactory(CName, FFE, Usage, Notify,
HelpStr, IsProtocol)
ObjectList.append(Object)
Result = GenProtocolPPiSections(ObjectList, IsProtocol)
Expected = '''[Protocols]
Guid1|FFE1 ## UNDEFINED ## NOTIFY # Help'''
self.assertEqual(Result.strip(), Expected)
IsProtocol = False
ObjectList = []
Object = self.ObjectFactory(CName, FFE, Usage, Notify,
HelpStr, IsProtocol)
ObjectList.append(Object)
Result = GenProtocolPPiSections(ObjectList, IsProtocol)
Expected = '''[Ppis]
Guid1|FFE1 ## UNDEFINED ## NOTIFY # Help'''
self.assertEqual(Result.strip(), Expected)
def testNormalCase2(self):
ObjectList = []
CName = 'Guid1'
FFE = 'FFE1'
Usage = 'UNDEFINED'
Notify = True
HelpStr = ''
IsProtocol = True
Object = self.ObjectFactory(CName, FFE, Usage, Notify,
HelpStr, IsProtocol)
ObjectList.append(Object)
Result = GenProtocolPPiSections(ObjectList, IsProtocol)
Expected = '''[Protocols]
Guid1|FFE1 ## UNDEFINED ## NOTIFY'''
self.assertEqual(Result.strip(), Expected)
def testNormalCase3(self):
ObjectList = []
CName = 'Guid1'
FFE = 'FFE1'
Usage = 'UNDEFINED'
Notify = False
HelpStr = 'Help'
IsProtocol = True
Object = self.ObjectFactory(CName, FFE, Usage, Notify,
HelpStr, IsProtocol)
ObjectList.append(Object)
Result = GenProtocolPPiSections(ObjectList, IsProtocol)
Expected = '''[Protocols]
Guid1|FFE1 ## UNDEFINED # Help'''
self.assertEqual(Result.strip(), Expected)
def testNormalCase4(self):
ObjectList = []
CName = 'Guid1'
FFE = 'FFE1'
Usage = 'UNDEFINED'
Notify = False
HelpStr = ''
IsProtocol = True
Object = self.ObjectFactory(CName, FFE, Usage, Notify,
HelpStr, IsProtocol)
ObjectList.append(Object)
Result = GenProtocolPPiSections(ObjectList, IsProtocol)
Expected = '''[Protocols]
Guid1|FFE1 ## UNDEFINED'''
self.assertEqual(Result.strip(), Expected)
def testNormalCase5(self):
ObjectList = []
CName = 'Guid1'
FFE = 'FFE1'
Usage = 'UNDEFINED'
Notify = ''
HelpStr = 'Help'
IsProtocol = True
Object = self.ObjectFactory(CName, FFE, Usage, Notify,
HelpStr, IsProtocol)
ObjectList.append(Object)
Result = GenProtocolPPiSections(ObjectList, IsProtocol)
Expected = '''[Protocols]
Guid1|FFE1 # Help'''
self.assertEqual(Result.strip(), Expected)
def testNormalCase6(self):
ObjectList = []
CName = 'Guid1'
FFE = 'FFE1'
Usage = 'UNDEFINED'
Notify = ''
HelpStr = ''
IsProtocol = True
Object = self.ObjectFactory(CName, FFE, Usage, Notify,
HelpStr, IsProtocol)
ObjectList.append(Object)
Result = GenProtocolPPiSections(ObjectList, IsProtocol)
Expected = '''[Protocols]
Guid1|FFE1'''
self.assertEqual(Result.strip(), Expected)
def testNormalCase7(self):
ObjectList = []
CName = 'Guid1'
FFE = 'FFE1'
Usage = 'PRODUCES'
Notify = True
HelpStr = 'Help'
IsProtocol = True
Object = self.ObjectFactory(CName, FFE, Usage, Notify,
HelpStr, IsProtocol)
ObjectList.append(Object)
Result = GenProtocolPPiSections(ObjectList, IsProtocol)
Expected = '''[Protocols]
Guid1|FFE1 ## PRODUCES ## NOTIFY # Help'''
self.assertEqual(Result.strip(), Expected)
def testNormalCase8(self):
ObjectList = []
CName = 'Guid1'
FFE = 'FFE1'
Usage = 'PRODUCES'
Notify = True
HelpStr = ''
IsProtocol = True
Object = self.ObjectFactory(CName, FFE, Usage, Notify,
HelpStr, IsProtocol)
ObjectList.append(Object)
Result = GenProtocolPPiSections(ObjectList, IsProtocol)
Expected = '''[Protocols]
Guid1|FFE1 ## PRODUCES ## NOTIFY'''
self.assertEqual(Result.strip(), Expected)
def testNormalCase9(self):
ObjectList = []
CName = 'Guid1'
FFE = 'FFE1'
Usage = 'PRODUCES'
Notify = False
HelpStr = 'Help'
IsProtocol = True
Object = self.ObjectFactory(CName, FFE, Usage, Notify,
HelpStr, IsProtocol)
ObjectList.append(Object)
Result = GenProtocolPPiSections(ObjectList, IsProtocol)
Expected = '''[Protocols]
Guid1|FFE1 ## PRODUCES # Help'''
self.assertEqual(Result.strip(), Expected)
def testNormalCaseA(self):
ObjectList = []
CName = 'Guid1'
FFE = 'FFE1'
Usage = 'PRODUCES'
Notify = False
HelpStr = ''
IsProtocol = True
Object = self.ObjectFactory(CName, FFE, Usage, Notify,
HelpStr, IsProtocol)
ObjectList.append(Object)
Result = GenProtocolPPiSections(ObjectList, IsProtocol)
Expected = '''[Protocols]
Guid1|FFE1 ## PRODUCES'''
self.assertEqual(Result.strip(), Expected)
def testNormalCaseB(self):
ObjectList = []
CName = 'Guid1'
FFE = 'FFE1'
Usage = 'PRODUCES'
Notify = ''
HelpStr = 'Help'
IsProtocol = True
Object = self.ObjectFactory(CName, FFE, Usage, Notify,
HelpStr, IsProtocol)
ObjectList.append(Object)
Result = GenProtocolPPiSections(ObjectList, IsProtocol)
Expected = '''[Protocols]
Guid1|FFE1 ## PRODUCES # Help'''
self.assertEqual(Result.strip(), Expected)
def testNormalCaseC(self):
ObjectList = []
CName = 'Guid1'
FFE = 'FFE1'
Usage = 'PRODUCES'
Notify = ''
HelpStr = ''
IsProtocol = True
Object = self.ObjectFactory(CName, FFE, Usage, Notify,
HelpStr, IsProtocol)
ObjectList.append(Object)
Result = GenProtocolPPiSections(ObjectList, IsProtocol)
Expected = '''[Protocols]
Guid1|FFE1 ## PRODUCES'''
self.assertEqual(Result.strip(), Expected)
#
# Test GenPcdSections
#
class GenPcdSectionsTest(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
#
# This is the API to generate Pcd Object to help UnitTest
#
def ObjectFactory(self, ItemType, TSCName, CName, DValue, FFE, Usage, Str):
Object = PcdObject()
HelpStr = Str
Object.SetItemType(ItemType)
Object.SetTokenSpaceGuidCName(TSCName)
Object.SetCName(CName)
Object.SetDefaultValue(DValue)
Object.SetFeatureFlag(FFE)
Object.SetValidUsage(Usage)
HelpTextObj = TextObject()
HelpTextObj.SetLang('')
HelpTextObj.SetString(HelpStr)
Object.SetHelpTextList([HelpTextObj])
return Object
# Usage Help INF Comment
#1 UNDEFINED Present # Help
#2 UNDEFINED Not Present <empty>
#3 Other Present ## Other # Help
#4 Other Not Present ## Other
def testNormalCase1(self):
ObjectList = []
ItemType = 'Pcd'
TSCName = 'TSCName'
CName = 'CName'
DValue = 'DValue'
FFE = 'FFE'
Usage = 'UNDEFINED'
Str = 'Help'
Object = self.ObjectFactory(ItemType, TSCName, CName, DValue, FFE,
Usage, Str)
ObjectList.append(Object)
Result = GenPcdSections(ObjectList)
Expected = \
'[Pcd]\n' + \
'TSCName.CName|DValue|FFE # Help'
self.assertEqual(Result.strip(), Expected)
def testNormalCase2(self):
ObjectList = []
ItemType = 'Pcd'
TSCName = 'TSCName'
CName = 'CName'
DValue = 'DValue'
FFE = 'FFE'
Usage = 'UNDEFINED'
Str = ''
Object = self.ObjectFactory(ItemType, TSCName, CName, DValue, FFE,
Usage, Str)
ObjectList.append(Object)
Result = GenPcdSections(ObjectList)
Expected = '[Pcd]\nTSCName.CName|DValue|FFE'
self.assertEqual(Result.strip(), Expected)
def testNormalCase3(self):
ObjectList = []
ItemType = 'Pcd'
TSCName = 'TSCName'
CName = 'CName'
DValue = 'DValue'
FFE = 'FFE'
Usage = 'CONSUMES'
Str = 'Help'
Object = self.ObjectFactory(ItemType, TSCName, CName, DValue, FFE,
Usage, Str)
ObjectList.append(Object)
Result = GenPcdSections(ObjectList)
Expected = '[Pcd]\nTSCName.CName|DValue|FFE ## CONSUMES # Help'
self.assertEqual(Result.strip(), Expected)
def testNormalCase4(self):
ObjectList = []
ItemType = 'Pcd'
TSCName = 'TSCName'
CName = 'CName'
DValue = 'DValue'
FFE = 'FFE'
Usage = 'CONSUMES'
Str = ''
Object = self.ObjectFactory(ItemType, TSCName, CName, DValue, FFE,
Usage, Str)
ObjectList.append(Object)
Result = GenPcdSections(ObjectList)
Expected = '[Pcd]\nTSCName.CName|DValue|FFE ## CONSUMES'
self.assertEqual(Result.strip(), Expected)
#
# multiple lines for normal usage
#
def testNormalCase5(self):
ObjectList = []
ItemType = 'Pcd'
TSCName = 'TSCName'
CName = 'CName'
DValue = 'DValue'
FFE = 'FFE'
Usage = 'CONSUMES'
Str = 'commment line 1\ncomment line 2'
Object = self.ObjectFactory(ItemType, TSCName, CName, DValue, FFE,
Usage, Str)
ObjectList.append(Object)
Result = GenPcdSections(ObjectList)
Expected = '''[Pcd]
TSCName.CName|DValue|FFE ## CONSUMES # commment line 1 comment line 2'''
self.assertEqual(Result.strip(), Expected)
#
# multiple lines for UNDEFINED usage
#
def testNormalCase6(self):
ObjectList = []
ItemType = 'Pcd'
TSCName = 'TSCName'
CName = 'CName'
DValue = 'DValue'
FFE = 'FFE'
Usage = 'UNDEFINED'
Str = 'commment line 1\ncomment line 2'
Object = self.ObjectFactory(ItemType, TSCName, CName, DValue, FFE,
Usage, Str)
ObjectList.append(Object)
Usage = 'UNDEFINED'
Str = 'commment line 3'
Object = self.ObjectFactory(ItemType, TSCName, CName, DValue, FFE,
Usage, Str)
ObjectList.append(Object)
Result = GenPcdSections(ObjectList)
Expected = '''[Pcd]
# commment line 1
# comment line 2
# commment line 3
TSCName.CName|DValue|FFE'''
self.assertEqual(Result.strip(), Expected)
#
# multiple lines for UNDEFINED and normal usage
#
def testNormalCase7(self):
ObjectList = []
ItemType = 'Pcd'
TSCName = 'TSCName'
CName = 'CName'
DValue = 'DValue'
FFE = 'FFE'
Usage = 'UNDEFINED'
Str = 'commment line 1\ncomment line 2'
Object = self.ObjectFactory(ItemType, TSCName, CName, DValue, FFE,
Usage, Str)
ObjectList.append(Object)
Usage = 'CONSUMES'
Str = 'Foo'
Object = self.ObjectFactory(ItemType, TSCName, CName, DValue, FFE,
Usage, Str)
ObjectList.append(Object)
Usage = 'UNDEFINED'
Str = 'commment line 3'
Object = self.ObjectFactory(ItemType, TSCName, CName, DValue, FFE,
Usage, Str)
ObjectList.append(Object)
Result = GenPcdSections(ObjectList)
Expected = '''[Pcd]
# commment line 1
# comment line 2
## CONSUMES # Foo
# commment line 3
TSCName.CName|DValue|FFE'''
self.assertEqual(Result.strip(), Expected)
# Usage Help INF Comment
# CONSUMES Present # Help (keep <EOL> and insert '#' at beginning of each new line)
# CONSUMES Not Present <empty>
#
# TAB_INF_FEATURE_PCD
#
def testNormalCase8(self):
ObjectList = []
ItemType = TAB_INF_FEATURE_PCD
TSCName = 'TSCName'
CName = 'CName'
DValue = 'DValue'
FFE = 'FFE'
Usage = 'CONSUMES'
Str = 'commment line 1\ncomment line 2'
Object = self.ObjectFactory(ItemType, TSCName, CName, DValue, FFE,
Usage, Str)
ObjectList.append(Object)
Result = GenPcdSections(ObjectList)
Expected = '''[FeaturePcd]
# commment line 1
# comment line 2
TSCName.CName|DValue|FFE'''
self.assertEqual(Result.strip(), Expected)
#
# TAB_INF_FEATURE_PCD
#
def testNormalCase9(self):
ObjectList = []
ItemType = TAB_INF_FEATURE_PCD
TSCName = 'TSCName'
CName = 'CName'
DValue = 'DValue'
FFE = 'FFE'
Usage = 'CONSUMES'
Str = ''
Object = self.ObjectFactory(ItemType, TSCName, CName, DValue, FFE,
Usage, Str)
ObjectList.append(Object)
Result = GenPcdSections(ObjectList)
Expected = '''[FeaturePcd]
TSCName.CName|DValue|FFE'''
self.assertEqual(Result.strip(), Expected)
#
# TAB_INF_FEATURE_PCD
#
def testNormalCase10(self):
ObjectList = []
ItemType = TAB_INF_FEATURE_PCD
TSCName = 'TSCName'
CName = 'CName'
DValue = 'DValue'
FFE = 'FFE'
Usage = 'PRODUCES'
Str = 'commment line 1\ncomment line 2'
Object = self.ObjectFactory(ItemType, TSCName, CName, DValue, FFE,
Usage, Str)
ObjectList.append(Object)
Result = GenPcdSections(ObjectList)
Expected = '''
[FeaturePcd]
# commment line 1
# comment line 2
TSCName.CName|DValue|FFE
'''
self.assertEqual(Result, Expected)
#
# Test GenSpecialSections of Hob
#
class GenHobSectionsTest(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
#
# This is the API to generate Event Object to help UnitTest
#
def ObjectFactory(self, SupArchList, Type, Usage, Str):
Object = HobObject()
HelpStr = Str
Object.SetHobType(Type)
Object.SetUsage(Usage)
Object.SetSupArchList(SupArchList)
HelpTextObj = TextObject()
HelpTextObj.SetLang('')
HelpTextObj.SetString(HelpStr)
Object.SetHelpTextList([HelpTextObj])
return Object
def testNormalCase1(self):
ObjectList = []
SupArchList = ['X64']
Type = 'Foo'
Usage = 'UNDEFINED'
Str = 'Help'
Object = self.ObjectFactory(SupArchList, Type, Usage, Str)
ObjectList.append(Object)
Result = GenSpecialSections(ObjectList, 'Hob')
Expected = '''# [Hob.X64]
# ##
# # Help
# #
# Foo ## UNDEFINED
#
#
'''
self.assertEqual(Result, Expected)
def testNormalCase2(self):
ObjectList = []
SupArchList = []
Type = 'Foo'
Usage = 'UNDEFINED'
Str = 'Help'
Object = self.ObjectFactory(SupArchList, Type, Usage, Str)
ObjectList.append(Object)
Result = GenSpecialSections(ObjectList, 'Hob')
Expected = '''# [Hob]
# ##
# # Help
# #
# Foo ## UNDEFINED
#
#
'''
self.assertEqual(Result, Expected)
def testNormalCase3(self):
ObjectList = []
SupArchList = ['X64']
Type = 'Foo'
Usage = 'UNDEFINED'
Str = '\nComment Line 1\n\n'
Object = self.ObjectFactory(SupArchList, Type, Usage, Str)
ObjectList.append(Object)
Result = GenSpecialSections(ObjectList, 'Hob')
Expected = '''# [Hob.X64]
# ##
# # Comment Line 1
# #
# Foo ## UNDEFINED
#
#
'''
self.assertEqual(Result, Expected)
def testNormalCase4(self):
ObjectList = []
SupArchList = ['X64']
Type = 'Foo'
Usage = 'UNDEFINED'
Str = '\nComment Line 1\n'
Object = self.ObjectFactory(SupArchList, Type, Usage, Str)
ObjectList.append(Object)
Result = GenSpecialSections(ObjectList, 'Hob')
Expected = '''# [Hob.X64]
# ##
# # Comment Line 1
# #
# Foo ## UNDEFINED
#
#
'''
self.assertEqual(Result, Expected)
def testNormalCase5(self):
ObjectList = []
SupArchList = ['X64']
Type = 'Foo'
Usage = 'UNDEFINED'
Str = 'Comment Line 1\n\n'
Object = self.ObjectFactory(SupArchList, Type, Usage, Str)
ObjectList.append(Object)
Result = GenSpecialSections(ObjectList, 'Hob')
Expected = '''# [Hob.X64]
# ##
# # Comment Line 1
# #
# Foo ## UNDEFINED
#
#
'''
self.assertEqual(Result, Expected)
def testNormalCase6(self):
ObjectList = []
SupArchList = ['X64']
Type = 'Foo'
Usage = 'UNDEFINED'
Str = ''
Object = self.ObjectFactory(SupArchList, Type, Usage, Str)
ObjectList.append(Object)
Result = GenSpecialSections(ObjectList, 'Hob')
Expected = '''# [Hob.X64]
# Foo ## UNDEFINED
#
#
'''
self.assertEqual(Result, Expected)
def testNormalCase7(self):
ObjectList = []
SupArchList = ['X64']
Type = 'Foo'
Usage = 'UNDEFINED'
Str = '\nNew Stack HoB'
Object = self.ObjectFactory(SupArchList, Type, Usage, Str)
ObjectList.append(Object)
Result = GenSpecialSections(ObjectList, 'Hob')
Expected = '''# [Hob.X64]
# ##
# # New Stack HoB
# #
# Foo ## UNDEFINED
#
#
'''
self.assertEqual(Result, Expected)
def testNormalCase8(self):
ObjectList = []
SupArchList = ['X64']
Type = 'Foo'
Usage = 'UNDEFINED'
Str = '\nNew Stack HoB\n\nTail Comment'
Object = self.ObjectFactory(SupArchList, Type, Usage, Str)
ObjectList.append(Object)
Result = GenSpecialSections(ObjectList, 'Hob')
Expected = '''# [Hob.X64]
# ##
# # New Stack HoB
# #
# # Tail Comment
# #
# Foo ## UNDEFINED
#
#
'''
self.assertEqual(Result, Expected)
def testNormalCase9(self):
ObjectList = []
SupArchList = ['X64']
Type = 'Foo'
Usage = 'UNDEFINED'
Str = '\n\n'
Object = self.ObjectFactory(SupArchList, Type, Usage, Str)
ObjectList.append(Object)
Result = GenSpecialSections(ObjectList, 'Hob')
Expected = '''# [Hob.X64]
# ##
# #
# #
# Foo ## UNDEFINED
#
#
'''
self.assertEqual(Result, Expected)
def testNormalCase10(self):
ObjectList = []
SupArchList = ['X64']
Type = 'Foo'
Usage = 'UNDEFINED'
Str = '\n'
Object = self.ObjectFactory(SupArchList, Type, Usage, Str)
ObjectList.append(Object)
Result = GenSpecialSections(ObjectList, 'Hob')
Expected = '''# [Hob.X64]
# ##
# #
# #
# Foo ## UNDEFINED
#
#
'''
self.assertEqual(Result, Expected)
def testNormalCase11(self):
ObjectList = []
SupArchList = ['X64']
Type = 'Foo'
Usage = 'UNDEFINED'
Str = '\n\n\n'
Object = self.ObjectFactory(SupArchList, Type, Usage, Str)
ObjectList.append(Object)
Result = GenSpecialSections(ObjectList, 'Hob')
Expected = '''# [Hob.X64]
# ##
# #
# #
# Foo ## UNDEFINED
#
#
'''
self.assertEqual(Result, Expected)
def testNormalCase12(self):
ObjectList = []
SupArchList = ['X64']
Type = 'Foo'
Usage = 'UNDEFINED'
Str = '\n\n\n\n'
Object = self.ObjectFactory(SupArchList, Type, Usage, Str)
ObjectList.append(Object)
Result = GenSpecialSections(ObjectList, 'Hob')
Expected = '''# [Hob.X64]
# ##
# #
# #
# #
# Foo ## UNDEFINED
#
#
'''
self.assertEqual(Result, Expected)
#
# Test GenGenericCommentF
#
class GenGenericCommentFTest(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def testNormalCase1(self):
CommentLines = 'Comment Line 1'
Result = GenGenericCommentF(CommentLines)
Expected = '# Comment Line 1\n'
self.assertEqual(Result, Expected)
def testNormalCase2(self):
CommentLines = '\n'
Result = GenGenericCommentF(CommentLines)
Expected = '#\n'
self.assertEqual(Result, Expected)
def testNormalCase3(self):
CommentLines = '\n\n\n'
Result = GenGenericCommentF(CommentLines)
Expected = '#\n#\n#\n'
self.assertEqual(Result, Expected)
def testNormalCase4(self):
CommentLines = 'coment line 1\n'
Result = GenGenericCommentF(CommentLines)
Expected = '# coment line 1\n'
self.assertEqual(Result, Expected)
def testNormalCase5(self):
CommentLines = 'coment line 1\n coment line 2\n'
Result = GenGenericCommentF(CommentLines)
Expected = '# coment line 1\n# coment line 2\n'
self.assertEqual(Result, Expected)
if __name__ == '__main__':
Logger.Initialize()
unittest.main()
| edk2-master | BaseTools/Source/Python/UPT/UnitTest/CommentGeneratingUnitTest.py |
## @file
# This file contain unit test for DecParser
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
from __future__ import print_function
import os
import unittest
from Parser.DecParserMisc import \
IsValidCArray, \
IsValidPcdDatum
from Parser.DecParser import Dec
from Library.ParserValidate import IsValidCFormatGuid
#
# Test tool function
#
def TestToolFuncs():
assert IsValidCArray('{0x1, 0x23}')
# Empty after comma
assert not IsValidCArray('{0x1, 0x23, }')
# 0x2345 too long
assert not IsValidCArray('{0x1, 0x2345}')
# Must end with '}'
assert not IsValidCArray('{0x1, 0x23, ')
# Whitespace between numbers
assert not IsValidCArray('{0x1, 0x2 3, }')
assert IsValidPcdDatum('VOID*', '"test"')[0]
assert IsValidPcdDatum('VOID*', 'L"test"')[0]
assert IsValidPcdDatum('BOOLEAN', 'TRUE')[0]
assert IsValidPcdDatum('BOOLEAN', 'FALSE')[0]
assert IsValidPcdDatum('BOOLEAN', '0')[0]
assert IsValidPcdDatum('BOOLEAN', '1')[0]
assert IsValidPcdDatum('UINT8', '0xab')[0]
assert not IsValidPcdDatum('UNKNOWNTYPE', '0xabc')[0]
assert not IsValidPcdDatum('UINT8', 'not number')[0]
assert( IsValidCFormatGuid('{ 0xfa0b1735 , 0x87a0, 0x4193, {0xb2, 0x66 , 0x53, 0x8c , 0x38, 0xaf, 0x48, 0xce }}'))
assert( not IsValidCFormatGuid('{ 0xfa0b1735 , 0x87a0, 0x4193, {0xb2, 0x66 , 0x53, 0x8c , 0x38, 0xaf, 0x48, 0xce }} 0xaa'))
def TestTemplate(TestString, TestFunc):
Path = os.path.join(os.getcwd(), 'test.dec')
Path = os.path.normpath(Path)
try:
f = open(Path, 'w')
# Write test string to file
f.write(TestString)
# Close file
f.close()
except:
print('Can not create temporary file [%s]!' % Path)
exit(-1)
# Call test function to test
Ret = TestFunc(Path, TestString)
# Test done, remove temporary file
os.remove(Path)
return Ret
# To make test unit works OK, must set IsRaiseError to True
# This function test right syntax DEC file
# @retval: parser object
#
def TestOK(Path, TestString):
try:
Parser = Dec(Path)
except:
raise 'Bug!!! Correct syntax in DEC file, but exception raised!\n' + TestString
return Parser
# This function test wrong syntax DEC file
# if parser checked wrong syntax, exception thrown and it's expected result
def TestError(Path, TestString):
try:
Dec(Path)
except:
# Raise error, get expected result
return True
raise 'Bug!!! Wrong syntax in DEC file, but passed by DEC parser!!\n' + TestString
def TestDecDefine():
TestString = '''
[Defines]
DEC_SPECIFICATION = 0x00010005
PACKAGE_NAME = MdePkg
PACKAGE_GUID = 1E73767F-8F52-4603-AEB4-F29B510B6766
PACKAGE_VERSION = 1.02
'''
Parser = TestTemplate(TestString, TestOK)
DefObj = Parser.GetDefineSectionObject()
assert DefObj.GetPackageSpecification() == '0x00010005'
assert DefObj.GetPackageName() == 'MdePkg'
assert DefObj.GetPackageGuid() == '1E73767F-8F52-4603-AEB4-F29B510B6766'
assert DefObj.GetPackageVersion() == '1.02'
TestString = '''
[Defines]
UNKNOW_KEY = 0x00010005 # A unknown key
'''
assert TestTemplate(TestString, TestError)
TestString = '''
[Defines]
PACKAGE_GUID = F-8F52-4603-AEB4-F29B510B6766 # Error GUID
'''
assert TestTemplate(TestString, TestError)
def TestDecInclude():
TestString = '''
[Defines]
DEC_SPECIFICATION = 0x00010005
PACKAGE_NAME = MdePkg
PACKAGE_GUID = 1E73767F-8F52-4603-AEB4-F29B510B6766
PACKAGE_VERSION = 1.02
[ \\
Includes]
Include
[Includes.IA32]
Include/Ia32
'''
# Create directory in current directory
try:
os.makedirs('Include/Ia32')
except:
pass
Parser = TestTemplate(TestString, TestOK)
IncObj = Parser.GetIncludeSectionObject()
Items = IncObj.GetIncludes()
assert len(Items) == 1
assert Items[0].File == 'Include'
Items = IncObj.GetIncludes('IA32')
assert len(Items) == 1
# normpath is called in DEC parser so '/' is converted to '\'
assert Items[0].File == 'Include\\Ia32'
TestString = '''
[Defines]
DEC_SPECIFICATION = 0x00010005
PACKAGE_NAME = MdePkg
PACKAGE_GUID = 1E73767F-8F52-4603-AEB4-F29B510B6766
PACKAGE_VERSION = 1.02
[Includes]
Include_not_exist # directory does not exist
'''
assert TestTemplate(TestString, TestError)
os.removedirs('Include/Ia32')
def TestDecGuidPpiProtocol():
TestString = '''
[Defines]
DEC_SPECIFICATION = 0x00010005
PACKAGE_NAME = MdePkg
PACKAGE_GUID = 1E73767F-8F52-4603-AEB4-F29B510B6766
PACKAGE_VERSION = 1.02
[Guids]
#
# GUID defined in UEFI2.1/UEFI2.0/EFI1.1
#
## Include/Guid/GlobalVariable.h
gEfiGlobalVariableGuid = { 0x8BE4DF61, 0x93CA, 0x11D2, { 0xAA, 0x0D, 0x00, 0xE0, 0x98, 0x03, 0x2B, 0x8C }}
[Protocols]
## Include/Protocol/Bds.h
gEfiBdsArchProtocolGuid = { 0x665E3FF6, 0x46CC, 0x11D4, { 0x9A, 0x38, 0x00, 0x90, 0x27, 0x3F, 0xC1, 0x4D }}
[Ppis]
## Include/Ppi/MasterBootMode.h
gEfiPeiMasterBootModePpiGuid = { 0x7408d748, 0xfc8c, 0x4ee6, {0x92, 0x88, 0xc4, 0xbe, 0xc0, 0x92, 0xa4, 0x10 } }
'''
Parser = TestTemplate(TestString, TestOK)
Obj = Parser.GetGuidSectionObject()
Items = Obj.GetGuids()
assert Obj.GetSectionName() == 'Guids'.upper()
assert len(Items) == 1
assert Items[0].GuidCName == 'gEfiGlobalVariableGuid'
assert Items[0].GuidCValue == '{ 0x8BE4DF61, 0x93CA, 0x11D2, { 0xAA, 0x0D, 0x00, 0xE0, 0x98, 0x03, 0x2B, 0x8C }}'
Obj = Parser.GetProtocolSectionObject()
Items = Obj.GetProtocols()
assert Obj.GetSectionName() == 'Protocols'.upper()
assert len(Items) == 1
assert Items[0].GuidCName == 'gEfiBdsArchProtocolGuid'
assert Items[0].GuidCValue == '{ 0x665E3FF6, 0x46CC, 0x11D4, { 0x9A, 0x38, 0x00, 0x90, 0x27, 0x3F, 0xC1, 0x4D }}'
Obj = Parser.GetPpiSectionObject()
Items = Obj.GetPpis()
assert Obj.GetSectionName() == 'Ppis'.upper()
assert len(Items) == 1
assert Items[0].GuidCName == 'gEfiPeiMasterBootModePpiGuid'
assert Items[0].GuidCValue == '{ 0x7408d748, 0xfc8c, 0x4ee6, {0x92, 0x88, 0xc4, 0xbe, 0xc0, 0x92, 0xa4, 0x10 } }'
def TestDecPcd():
TestString = '''
[Defines]
DEC_SPECIFICATION = 0x00010005
PACKAGE_NAME = MdePkg
PACKAGE_GUID = 1E73767F-8F52-4603-AEB4-F29B510B6766
PACKAGE_VERSION = 1.02
[PcdsFeatureFlag]
## If TRUE, the component name protocol will not be installed.
gEfiMdePkgTokenSpaceGuid.PcdComponentNameDisable|FALSE|BOOLEAN|0x0000000d
[PcdsFixedAtBuild]
## Indicates the maximum length of unicode string
gEfiMdePkgTokenSpaceGuid.PcdMaximumUnicodeStringLength|1000000|UINT32|0x00000001
[PcdsFixedAtBuild.IPF]
## The base address of IO port space for IA64 arch
gEfiMdePkgTokenSpaceGuid.PcdIoBlockBaseAddressForIpf|0x0ffffc000000|UINT64|0x0000000f
[PcdsFixedAtBuild,PcdsPatchableInModule]
## This flag is used to control the printout of DebugLib
gEfiMdePkgTokenSpaceGuid.PcdDebugPrintErrorLevel|0x80000000|UINT32|0x00000006
[PcdsFixedAtBuild,PcdsPatchableInModule,PcdsDynamic]
## This value is used to set the base address of pci express hierarchy
gEfiMdePkgTokenSpaceGuid.PcdPciExpressBaseAddress|0xE0000000|UINT64|0x0000000a
'''
Parser = TestTemplate(TestString, TestOK)
Obj = Parser.GetPcdSectionObject()
Items = Obj.GetPcds('PcdsFeatureFlag', 'COMMON')
assert len(Items) == 1
assert Items[0].TokenSpaceGuidCName == 'gEfiMdePkgTokenSpaceGuid'
assert Items[0].TokenCName == 'PcdComponentNameDisable'
assert Items[0].DefaultValue == 'FALSE'
assert Items[0].DatumType == 'BOOLEAN'
assert Items[0].TokenValue == '0x0000000d'
Items = Obj.GetPcdsByType('PcdsFixedAtBuild')
assert len(Items) == 4
assert len(Obj.GetPcdsByType('PcdsPatchableInModule')) == 2
def TestDecUserExtension():
TestString = '''
[Defines]
DEC_SPECIFICATION = 0x00010005
PACKAGE_NAME = MdePkg
PACKAGE_GUID = 1E73767F-8F52-4603-AEB4-F29B510B6766
PACKAGE_VERSION = 1.02
[UserExtensions.MyID."TestString".IA32]
Some Strings...
'''
Parser = TestTemplate(TestString, TestOK)
Obj = Parser.GetUserExtensionSectionObject()
Items = Obj.GetAllUserExtensions()
assert len(Items) == 1
assert Items[0].UserString == 'Some Strings...'
assert len(Items[0].ArchAndModuleType) == 1
assert ['MyID', '"TestString"', 'IA32'] in Items[0].ArchAndModuleType
if __name__ == '__main__':
import Logger.Logger
Logger.Logger.Initialize()
unittest.FunctionTestCase(TestToolFuncs).runTest()
unittest.FunctionTestCase(TestDecDefine).runTest()
unittest.FunctionTestCase(TestDecInclude).runTest()
unittest.FunctionTestCase(TestDecGuidPpiProtocol).runTest()
unittest.FunctionTestCase(TestDecPcd).runTest()
unittest.FunctionTestCase(TestDecUserExtension).runTest()
print('All tests passed...')
| edk2-master | BaseTools/Source/Python/UPT/UnitTest/DecParserTest.py |
## @file
# This file is for installed package information database operations
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
'''
Dependency
'''
##
# Import Modules
#
from os.path import dirname
import os
import Logger.Log as Logger
from Logger import StringTable as ST
from Library.Parsing import GetWorkspacePackage
from Library.Parsing import GetWorkspaceModule
from Library.Parsing import GetPkgInfoFromDec
from Library.Misc import GetRelativePath
from Library import GlobalData
from Logger.ToolError import FatalError
from Logger.ToolError import EDK1_INF_ERROR
from Logger.ToolError import UNKNOWN_ERROR
(DEPEX_CHECK_SUCCESS, DEPEX_CHECK_MODULE_NOT_FOUND, \
DEPEX_CHECK_PACKAGE_NOT_FOUND, DEPEX_CHECK_DP_NOT_FOUND) = (0, 1, 2, 3)
## DependencyRules
#
# This class represents the dependency rule check mechanism
#
# @param object: Inherited from object class
#
class DependencyRules(object):
def __init__(self, Datab, ToBeInstalledPkgList=None):
self.IpiDb = Datab
self.WsPkgList = GetWorkspacePackage()
self.WsModuleList = GetWorkspaceModule()
self.PkgsToBeDepend = [(PkgInfo[1], PkgInfo[2]) for PkgInfo in self.WsPkgList]
# Add package info from the DIST to be installed.
self.PkgsToBeDepend.extend(self.GenToBeInstalledPkgList(ToBeInstalledPkgList))
def GenToBeInstalledPkgList(self, ToBeInstalledPkgList):
if not ToBeInstalledPkgList:
return []
RtnList = []
for Dist in ToBeInstalledPkgList:
for Package in Dist.PackageSurfaceArea:
RtnList.append((Package[0], Package[1]))
return RtnList
## Check whether a module exists by checking the Guid+Version+Name+Path combination
#
# @param Guid: Guid of a module
# @param Version: Version of a module
# @param Name: Name of a module
# @param Path: Path of a module
# @return: True if module existed, else False
#
def CheckModuleExists(self, Guid, Version, Name, Path):
Logger.Verbose(ST.MSG_CHECK_MODULE_EXIST)
ModuleList = self.IpiDb.GetModInPackage(Guid, Version, Name, Path)
ModuleList.extend(self.IpiDb.GetStandaloneModule(Guid, Version, Name, Path))
Logger.Verbose(ST.MSG_CHECK_MODULE_EXIST_FINISH)
if len(ModuleList) > 0:
return True
else:
return False
## Check whether a module depex satisfied.
#
# @param ModuleObj: A module object
# @param DpObj: A distribution object
# @return: True if module depex satisfied
# False else
#
def CheckModuleDepexSatisfied(self, ModuleObj, DpObj=None):
Logger.Verbose(ST.MSG_CHECK_MODULE_DEPEX_START)
Result = True
Dep = None
if ModuleObj.GetPackageDependencyList():
Dep = ModuleObj.GetPackageDependencyList()[0]
for Dep in ModuleObj.GetPackageDependencyList():
#
# first check whether the dependency satisfied by current workspace
#
Exist = self.CheckPackageExists(Dep.GetGuid(), Dep.GetVersion())
#
# check whether satisfied by current distribution
#
if not Exist:
if DpObj is None:
Result = False
break
for GuidVerPair in DpObj.PackageSurfaceArea.keys():
if Dep.GetGuid() == GuidVerPair[0]:
if Dep.GetVersion() is None or \
len(Dep.GetVersion()) == 0:
Result = True
break
if Dep.GetVersion() == GuidVerPair[1]:
Result = True
break
else:
Result = False
break
if not Result:
Logger.Error("CheckModuleDepex", UNKNOWN_ERROR, \
ST.ERR_DEPENDENCY_NOT_MATCH % (ModuleObj.GetName(), \
Dep.GetPackageFilePath(), \
Dep.GetGuid(), \
Dep.GetVersion()))
return Result
## Check whether a package exists in a package list specified by PkgsToBeDepend.
#
# @param Guid: Guid of a package
# @param Version: Version of a package
# @return: True if package exist
# False else
#
def CheckPackageExists(self, Guid, Version):
Logger.Verbose(ST.MSG_CHECK_PACKAGE_START)
Found = False
for (PkgGuid, PkgVer) in self.PkgsToBeDepend:
if (PkgGuid == Guid):
#
# if version is not empty and not equal, then not match
#
if Version and (PkgVer != Version):
Found = False
break
else:
Found = True
break
else:
Found = False
Logger.Verbose(ST.MSG_CHECK_PACKAGE_FINISH)
return Found
## Check whether a package depex satisfied.
#
# @param PkgObj: A package object
# @param DpObj: A distribution object
# @return: True if package depex satisfied
# False else
#
def CheckPackageDepexSatisfied(self, PkgObj, DpObj=None):
ModuleDict = PkgObj.GetModuleDict()
for ModKey in ModuleDict.keys():
ModObj = ModuleDict[ModKey]
if self.CheckModuleDepexSatisfied(ModObj, DpObj):
continue
else:
return False
return True
## Check whether a DP exists.
#
# @param Guid: Guid of a Distribution
# @param Version: Version of a Distribution
# @return: True if Distribution exist
# False else
def CheckDpExists(self, Guid, Version):
Logger.Verbose(ST.MSG_CHECK_DP_START)
DpList = self.IpiDb.GetDp(Guid, Version)
if len(DpList) > 0:
Found = True
else:
Found = False
Logger.Verbose(ST.MSG_CHECK_DP_FINISH)
return Found
## Check whether a DP depex satisfied by current workspace for Install
#
# @param DpObj: A distribution object
# @return: True if distribution depex satisfied
# False else
#
def CheckInstallDpDepexSatisfied(self, DpObj):
return self.CheckDpDepexSatisfied(DpObj)
# # Check whether multiple DP depex satisfied by current workspace for Install
#
# @param DpObjList: A distribution object list
# @return: True if distribution depex satisfied
# False else
#
def CheckTestInstallPdDepexSatisfied(self, DpObjList):
for DpObj in DpObjList:
if self.CheckDpDepexSatisfied(DpObj):
for PkgKey in DpObj.PackageSurfaceArea.keys():
PkgObj = DpObj.PackageSurfaceArea[PkgKey]
self.PkgsToBeDepend.append((PkgObj.Guid, PkgObj.Version))
else:
return False, DpObj
return True, DpObj
## Check whether a DP depex satisfied by current workspace
# (excluding the original distribution's packages to be replaced) for Replace
#
# @param DpObj: A distribution object
# @param OrigDpGuid: The original distribution's Guid
# @param OrigDpVersion: The original distribution's Version
#
def ReplaceCheckNewDpDepex(self, DpObj, OrigDpGuid, OrigDpVersion):
self.PkgsToBeDepend = [(PkgInfo[1], PkgInfo[2]) for PkgInfo in self.WsPkgList]
OrigDpPackageList = self.IpiDb.GetPackageListFromDp(OrigDpGuid, OrigDpVersion)
for OrigPkgInfo in OrigDpPackageList:
Guid, Version = OrigPkgInfo[0], OrigPkgInfo[1]
if (Guid, Version) in self.PkgsToBeDepend:
self.PkgsToBeDepend.remove((Guid, Version))
return self.CheckDpDepexSatisfied(DpObj)
## Check whether a DP depex satisfied by current workspace.
#
# @param DpObj: A distribution object
#
def CheckDpDepexSatisfied(self, DpObj):
for PkgKey in DpObj.PackageSurfaceArea.keys():
PkgObj = DpObj.PackageSurfaceArea[PkgKey]
if self.CheckPackageDepexSatisfied(PkgObj, DpObj):
continue
else:
return False
for ModKey in DpObj.ModuleSurfaceArea.keys():
ModObj = DpObj.ModuleSurfaceArea[ModKey]
if self.CheckModuleDepexSatisfied(ModObj, DpObj):
continue
else:
return False
return True
## Check whether a DP could be removed from current workspace.
#
# @param DpGuid: File's guid
# @param DpVersion: File's version
# @retval Removable: True if distribution could be removed, False Else
# @retval DependModuleList: the list of modules that make distribution can not be removed
#
def CheckDpDepexForRemove(self, DpGuid, DpVersion):
Removable = True
DependModuleList = []
WsModuleList = self.WsModuleList
#
# remove modules that included in current DP
# List of item (FilePath)
DpModuleList = self.IpiDb.GetDpModuleList(DpGuid, DpVersion)
for Module in DpModuleList:
if Module in WsModuleList:
WsModuleList.remove(Module)
else:
Logger.Warn("UPT\n",
ST.ERR_MODULE_NOT_INSTALLED % Module)
#
# get packages in current Dp and find the install path
# List of item (PkgGuid, PkgVersion, InstallPath)
DpPackageList = self.IpiDb.GetPackageListFromDp(DpGuid, DpVersion)
DpPackagePathList = []
WorkSP = GlobalData.gWORKSPACE
for (PkgName, PkgGuid, PkgVersion, DecFile) in self.WsPkgList:
if PkgName:
pass
DecPath = dirname(DecFile)
if DecPath.find(WorkSP) > -1:
InstallPath = GetRelativePath(DecPath, WorkSP)
DecFileRelaPath = GetRelativePath(DecFile, WorkSP)
else:
InstallPath = DecPath
DecFileRelaPath = DecFile
if (PkgGuid, PkgVersion, InstallPath) in DpPackageList:
DpPackagePathList.append(DecFileRelaPath)
DpPackageList.remove((PkgGuid, PkgVersion, InstallPath))
#
# the left items in DpPackageList are the packages that installed but not found anymore
#
for (PkgGuid, PkgVersion, InstallPath) in DpPackageList:
Logger.Warn("UPT",
ST.WARN_INSTALLED_PACKAGE_NOT_FOUND%(PkgGuid, PkgVersion, InstallPath))
#
# check modules to see if has dependency on package of current DP
#
for Module in WsModuleList:
if (not VerifyRemoveModuleDep(Module, DpPackagePathList)):
Removable = False
DependModuleList.append(Module)
return (Removable, DependModuleList)
## Check whether a DP could be replaced by a distribution containing NewDpPkgList
# from current workspace.
#
# @param OrigDpGuid: original Dp's Guid
# @param OrigDpVersion: original Dp's version
# @param NewDpPkgList: a list of package information (Guid, Version) in new Dp
# @retval Replaceable: True if distribution could be replaced, False Else
# @retval DependModuleList: the list of modules that make distribution can not be replaced
#
def CheckDpDepexForReplace(self, OrigDpGuid, OrigDpVersion, NewDpPkgList):
Replaceable = True
DependModuleList = []
WsModuleList = self.WsModuleList
#
# remove modules that included in current DP
# List of item (FilePath)
DpModuleList = self.IpiDb.GetDpModuleList(OrigDpGuid, OrigDpVersion)
for Module in DpModuleList:
if Module in WsModuleList:
WsModuleList.remove(Module)
else:
Logger.Warn("UPT\n",
ST.ERR_MODULE_NOT_INSTALLED % Module)
OtherPkgList = NewDpPkgList
#
# get packages in current Dp and find the install path
# List of item (PkgGuid, PkgVersion, InstallPath)
DpPackageList = self.IpiDb.GetPackageListFromDp(OrigDpGuid, OrigDpVersion)
DpPackagePathList = []
WorkSP = GlobalData.gWORKSPACE
for (PkgName, PkgGuid, PkgVersion, DecFile) in self.WsPkgList:
if PkgName:
pass
DecPath = dirname(DecFile)
if DecPath.find(WorkSP) > -1:
InstallPath = GetRelativePath(DecPath, WorkSP)
DecFileRelaPath = GetRelativePath(DecFile, WorkSP)
else:
InstallPath = DecPath
DecFileRelaPath = DecFile
if (PkgGuid, PkgVersion, InstallPath) in DpPackageList:
DpPackagePathList.append(DecFileRelaPath)
DpPackageList.remove((PkgGuid, PkgVersion, InstallPath))
else:
OtherPkgList.append((PkgGuid, PkgVersion))
#
# the left items in DpPackageList are the packages that installed but not found anymore
#
for (PkgGuid, PkgVersion, InstallPath) in DpPackageList:
Logger.Warn("UPT",
ST.WARN_INSTALLED_PACKAGE_NOT_FOUND%(PkgGuid, PkgVersion, InstallPath))
#
# check modules to see if it can be satisfied by package not belong to removed DP
#
for Module in WsModuleList:
if (not VerifyReplaceModuleDep(Module, DpPackagePathList, OtherPkgList)):
Replaceable = False
DependModuleList.append(Module)
return (Replaceable, DependModuleList)
## check whether module depends on packages in DpPackagePathList, return True
# if found, False else
#
# @param Path: a module path
# @param DpPackagePathList: a list of Package Paths
# @retval: False: module depends on package in DpPackagePathList
# True: module doesn't depend on package in DpPackagePathList
#
def VerifyRemoveModuleDep(Path, DpPackagePathList):
try:
for Item in GetPackagePath(Path):
if Item in DpPackagePathList:
DecPath = os.path.normpath(os.path.join(GlobalData.gWORKSPACE, Item))
Logger.Info(ST.MSG_MODULE_DEPEND_ON % (Path, DecPath))
return False
else:
return True
except FatalError as ErrCode:
if ErrCode.message == EDK1_INF_ERROR:
Logger.Warn("UPT",
ST.WRN_EDK1_INF_FOUND%Path)
return True
else:
return True
# # GetPackagePath
#
# Get Dependency package path from an Inf file path
#
def GetPackagePath(InfPath):
PackagePath = []
if os.path.exists(InfPath):
FindSection = False
for Line in open(InfPath).readlines():
Line = Line.strip()
if not Line:
continue
if Line.startswith('#'):
continue
if Line.startswith('[Packages') and Line.endswith(']'):
FindSection = True
continue
if Line.startswith('[') and Line.endswith(']') and FindSection:
break
if FindSection:
PackagePath.append(os.path.normpath(Line))
return PackagePath
## check whether module depends on packages in DpPackagePathList and can not be satisfied by OtherPkgList
#
# @param Path: a module path
# @param DpPackagePathList: a list of Package Paths
# @param OtherPkgList: a list of Package Information (Guid, Version)
# @retval: False: module depends on package in DpPackagePathList and can not be satisfied by OtherPkgList
# True: either module doesn't depend on DpPackagePathList or module depends on DpPackagePathList
# but can be satisfied by OtherPkgList
#
def VerifyReplaceModuleDep(Path, DpPackagePathList, OtherPkgList):
try:
for Item in GetPackagePath(Path):
if Item in DpPackagePathList:
DecPath = os.path.normpath(os.path.join(GlobalData.gWORKSPACE, Item))
Name, Guid, Version = GetPkgInfoFromDec(DecPath)
if (Guid, Version) not in OtherPkgList:
Logger.Info(ST.MSG_MODULE_DEPEND_ON % (Path, DecPath))
return False
else:
return True
except FatalError as ErrCode:
if ErrCode.message == EDK1_INF_ERROR:
Logger.Warn("UPT",
ST.WRN_EDK1_INF_FOUND%Path)
return True
else:
return True
| edk2-master | BaseTools/Source/Python/UPT/Core/DependencyRules.py |
## @file
# This file hooks file and directory creation and removal
#
# Copyright (c) 2014 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
'''
File hook
'''
import os
import stat
import time
import zipfile
from time import sleep
from Library import GlobalData
__built_in_remove__ = os.remove
__built_in_mkdir__ = os.mkdir
__built_in_rmdir__ = os.rmdir
__built_in_chmod__ = os.chmod
__built_in_open__ = open
_RMFILE = 0
_MKFILE = 1
_RMDIR = 2
_MKDIR = 3
_CHMOD = 4
gBACKUPFILE = 'file.backup'
gEXCEPTION_LIST = ['Conf'+os.sep+'DistributionPackageDatabase.db', '.tmp', gBACKUPFILE]
class _PathInfo:
def __init__(self, action, path, mode=-1):
self.action = action
self.path = path
self.mode = mode
class RecoverMgr:
def __init__(self, workspace):
self.rlist = []
self.zip = None
self.workspace = os.path.normpath(workspace)
self.backupfile = gBACKUPFILE
self.zipfile = os.path.join(self.workspace, gBACKUPFILE)
def _createzip(self):
if self.zip:
return
self.zip = zipfile.ZipFile(self.zipfile, 'w', zipfile.ZIP_DEFLATED)
def _save(self, tmp, path):
if not self._tryhook(path):
return
self.rlist.append(_PathInfo(tmp, path))
def bkrmfile(self, path):
arc = self._tryhook(path)
if arc and os.path.isfile(path):
self._createzip()
self.zip.write(path, arc.encode('utf_8'))
sta = os.stat(path)
oldmode = stat.S_IMODE(sta.st_mode)
self.rlist.append(_PathInfo(_CHMOD, path, oldmode))
self.rlist.append(_PathInfo(_RMFILE, path))
__built_in_remove__(path)
def bkmkfile(self, path, mode, bufsize):
if not os.path.exists(path):
self._save(_MKFILE, path)
return __built_in_open__(path, mode, bufsize)
def bkrmdir(self, path):
if os.path.exists(path):
sta = os.stat(path)
oldmode = stat.S_IMODE(sta.st_mode)
self.rlist.append(_PathInfo(_CHMOD, path, oldmode))
self._save(_RMDIR, path)
__built_in_rmdir__(path)
def bkmkdir(self, path, mode):
if not os.path.exists(path):
self._save(_MKDIR, path)
__built_in_mkdir__(path, mode)
def bkchmod(self, path, mode):
if self._tryhook(path) and os.path.exists(path):
sta = os.stat(path)
oldmode = stat.S_IMODE(sta.st_mode)
self.rlist.append(_PathInfo(_CHMOD, path, oldmode))
__built_in_chmod__(path, mode)
def rollback(self):
if self.zip:
self.zip.close()
self.zip = None
index = len(self.rlist) - 1
while index >= 0:
item = self.rlist[index]
exist = os.path.exists(item.path)
if item.action == _MKFILE and exist:
#if not os.access(item.path, os.W_OK):
# os.chmod(item.path, S_IWUSR)
__built_in_remove__(item.path)
elif item.action == _RMFILE and not exist:
if not self.zip:
self.zip = zipfile.ZipFile(self.zipfile, 'r', zipfile.ZIP_DEFLATED)
arcname = os.path.normpath(item.path)
arcname = arcname[len(self.workspace)+1:].encode('utf_8')
if os.sep != "/" and os.sep in arcname:
arcname = arcname.replace(os.sep, '/')
mtime = self.zip.getinfo(arcname).date_time
content = self.zip.read(arcname)
filep = __built_in_open__(item.path, "wb")
filep.write(content)
filep.close()
intime = time.mktime(mtime + (0, 0, 0))
os.utime(item.path, (intime, intime))
elif item.action == _MKDIR and exist:
while True:
try:
__built_in_rmdir__(item.path)
break
except IOError:
# Sleep a short time and try again
# The anti-virus software may delay the file removal in this directory
sleep(0.1)
elif item.action == _RMDIR and not exist:
__built_in_mkdir__(item.path)
elif item.action == _CHMOD and exist:
try:
__built_in_chmod__(item.path, item.mode)
except EnvironmentError:
pass
index -= 1
self.commit()
def commit(self):
if self.zip:
self.zip.close()
__built_in_remove__(self.zipfile)
# Check if path needs to be hooked
def _tryhook(self, path):
path = os.path.normpath(path)
works = self.workspace if str(self.workspace).endswith(os.sep) else (self.workspace + os.sep)
if not path.startswith(works):
return ''
for exceptdir in gEXCEPTION_LIST:
full = os.path.join(self.workspace, exceptdir)
if full == path or path.startswith(full + os.sep) or os.path.split(full)[0] == path:
return ''
return path[len(self.workspace)+1:]
def _hookrm(path):
if GlobalData.gRECOVERMGR:
GlobalData.gRECOVERMGR.bkrmfile(path)
else:
__built_in_remove__(path)
def _hookmkdir(path, mode=0o777):
if GlobalData.gRECOVERMGR:
GlobalData.gRECOVERMGR.bkmkdir(path, mode)
else:
__built_in_mkdir__(path, mode)
def _hookrmdir(path):
if GlobalData.gRECOVERMGR:
GlobalData.gRECOVERMGR.bkrmdir(path)
else:
__built_in_rmdir__(path)
def _hookmkfile(path, mode='r', bufsize=-1):
if GlobalData.gRECOVERMGR:
return GlobalData.gRECOVERMGR.bkmkfile(path, mode, bufsize)
return __built_in_open__(path, mode, bufsize)
def _hookchmod(path, mode):
if GlobalData.gRECOVERMGR:
GlobalData.gRECOVERMGR.bkchmod(path, mode)
else:
__built_in_chmod__(path, mode)
def SetRecoverMgr(mgr):
GlobalData.gRECOVERMGR = mgr
os.remove = _hookrm
os.mkdir = _hookmkdir
os.rmdir = _hookrmdir
os.chmod = _hookchmod
__FileHookOpen__ = _hookmkfile
| edk2-master | BaseTools/Source/Python/UPT/Core/FileHook.py |
## @file
#
# PackageFile class represents the zip file of a distribution package.
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
'''
PackageFile
'''
##
# Import Modules
#
import os.path
import zipfile
import tempfile
import platform
from Logger.ToolError import FILE_OPEN_FAILURE
from Logger.ToolError import FILE_CHECKSUM_FAILURE
from Logger.ToolError import FILE_NOT_FOUND
from Logger.ToolError import FILE_DECOMPRESS_FAILURE
from Logger.ToolError import FILE_UNKNOWN_ERROR
from Logger.ToolError import FILE_WRITE_FAILURE
from Logger.ToolError import FILE_COMPRESS_FAILURE
import Logger.Log as Logger
from Logger import StringTable as ST
from Library.Misc import CreateDirectory
from Library.Misc import RemoveDirectory
from Core.FileHook import __FileHookOpen__
from Common.MultipleWorkspace import MultipleWorkspace as mws
class PackageFile:
def __init__(self, FileName, Mode="r"):
self._FileName = FileName
if Mode not in ["r", "w", "a"]:
Mode = "r"
try:
self._ZipFile = zipfile.ZipFile(FileName, Mode, \
zipfile.ZIP_DEFLATED)
self._Files = {}
for Filename in self._ZipFile.namelist():
self._Files[os.path.normpath(Filename)] = Filename
except BaseException as Xstr:
Logger.Error("PackagingTool", FILE_OPEN_FAILURE,
ExtraData="%s (%s)" % (FileName, str(Xstr)))
BadFile = self._ZipFile.testzip()
if BadFile is not None:
Logger.Error("PackagingTool", FILE_CHECKSUM_FAILURE,
ExtraData="[%s] in %s" % (BadFile, FileName))
def GetZipFile(self):
return self._ZipFile
## Get file name
#
def __str__(self):
return self._FileName
## Extract the file
#
# @param To: the destination file
#
def Unpack(self, ToDest):
for FileN in self._ZipFile.namelist():
ToFile = os.path.normpath(os.path.join(ToDest, FileN))
Msg = "%s -> %s" % (FileN, ToFile)
Logger.Info(Msg)
self.Extract(FileN, ToFile)
## Extract the file
#
# @param File: the extracted file
# @param ToFile: the destination file
#
def UnpackFile(self, File, ToFile):
File = File.replace('\\', '/')
if File in self._ZipFile.namelist():
Msg = "%s -> %s" % (File, ToFile)
Logger.Info(Msg)
self.Extract(File, ToFile)
return ToFile
return ''
## Extract the file
#
# @param Which: the source path
# @param ToDest: the destination path
#
def Extract(self, Which, ToDest):
Which = os.path.normpath(Which)
if Which not in self._Files:
Logger.Error("PackagingTool", FILE_NOT_FOUND,
ExtraData="[%s] in %s" % (Which, self._FileName))
try:
FileContent = self._ZipFile.read(self._Files[Which])
except BaseException as Xstr:
Logger.Error("PackagingTool", FILE_DECOMPRESS_FAILURE,
ExtraData="[%s] in %s (%s)" % (Which, \
self._FileName, \
str(Xstr)))
try:
CreateDirectory(os.path.dirname(ToDest))
if os.path.exists(ToDest) and not os.access(ToDest, os.W_OK):
Logger.Warn("PackagingTool", \
ST.WRN_FILE_NOT_OVERWRITTEN % ToDest)
return
else:
ToFile = __FileHookOpen__(ToDest, 'wb')
except BaseException as Xstr:
Logger.Error("PackagingTool", FILE_OPEN_FAILURE,
ExtraData="%s (%s)" % (ToDest, str(Xstr)))
try:
ToFile.write(FileContent)
ToFile.close()
except BaseException as Xstr:
Logger.Error("PackagingTool", FILE_WRITE_FAILURE,
ExtraData="%s (%s)" % (ToDest, str(Xstr)))
## Remove the file
#
# @param Files: the removed files
#
def Remove(self, Files):
TmpDir = os.path.join(tempfile.gettempdir(), ".packaging")
if os.path.exists(TmpDir):
RemoveDirectory(TmpDir, True)
os.mkdir(TmpDir)
self.Unpack(TmpDir)
for SinF in Files:
SinF = os.path.normpath(SinF)
if SinF not in self._Files:
Logger.Error("PackagingTool", FILE_NOT_FOUND,
ExtraData="%s is not in %s!" % \
(SinF, self._FileName))
self._Files.pop(SinF)
self._ZipFile.close()
self._ZipFile = zipfile.ZipFile(self._FileName, "w", \
zipfile.ZIP_DEFLATED)
Cwd = os.getcwd()
os.chdir(TmpDir)
self.PackFiles(self._Files)
os.chdir(Cwd)
RemoveDirectory(TmpDir, True)
## Pack the files under Top directory, the directory shown in the zipFile start from BaseDir,
# BaseDir should be the parent directory of the Top directory, for example,
# Pack(Workspace\Dir1, Workspace) will pack files under Dir1, and the path in the zipfile will
# start from Workspace
#
# @param Top: the top directory
# @param BaseDir: the base directory
#
def Pack(self, Top, BaseDir):
if not os.path.isdir(Top):
Logger.Error("PackagingTool", FILE_UNKNOWN_ERROR, \
"%s is not a directory!" %Top)
FilesToPack = []
Cwd = os.getcwd()
os.chdir(BaseDir)
RelaDir = Top[Top.upper().find(BaseDir.upper()).\
join(len(BaseDir).join(1)):]
for Root, Dirs, Files in os.walk(RelaDir):
if 'CVS' in Dirs:
Dirs.remove('CVS')
if '.svn' in Dirs:
Dirs.remove('.svn')
for Dir in Dirs:
if Dir.startswith('.'):
Dirs.remove(Dir)
for File1 in Files:
if File1.startswith('.'):
continue
ExtName = os.path.splitext(File1)[1]
#
# skip '.dec', '.inf', '.dsc', '.fdf' files
#
if ExtName.lower() in ['.dec', '.inf', '.dsc', '.fdf']:
continue
FilesToPack.append(os.path.join(Root, File1))
self.PackFiles(FilesToPack)
os.chdir(Cwd)
## Pack the file
#
# @param Files: the files to pack
#
def PackFiles(self, Files):
for File in Files:
Cwd = os.getcwd()
os.chdir(mws.getWs(mws.WORKSPACE, File))
self.PackFile(File)
os.chdir(Cwd)
## Pack the file
#
# @param File: the files to pack
# @param ArcName: the Arc Name
#
def PackFile(self, File, ArcName=None):
try:
#
# avoid packing same file multiple times
#
if platform.system() != 'Windows':
File = File.replace('\\', '/')
ZipedFilesNameList = self._ZipFile.namelist()
for ZipedFile in ZipedFilesNameList:
if File == os.path.normpath(ZipedFile):
return
Logger.Info("packing ..." + File)
self._ZipFile.write(File, ArcName)
except BaseException as Xstr:
Logger.Error("PackagingTool", FILE_COMPRESS_FAILURE,
ExtraData="%s (%s)" % (File, str(Xstr)))
## Write data to the packed file
#
# @param Data: data to write
# @param ArcName: the Arc Name
#
def PackData(self, Data, ArcName):
try:
if os.path.splitext(ArcName)[1].lower() == '.pkg':
Data = Data.encode('utf_8')
self._ZipFile.writestr(ArcName, Data)
except BaseException as Xstr:
Logger.Error("PackagingTool", FILE_COMPRESS_FAILURE,
ExtraData="%s (%s)" % (ArcName, str(Xstr)))
## Close file
#
#
def Close(self):
self._ZipFile.close()
| edk2-master | BaseTools/Source/Python/UPT/Core/PackageFile.py |
## @file
# Python 'Library' package initialization file.
#
# This file is required to make Python interpreter treat the directory
# as containing package.
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
'''
Core init file
'''
| edk2-master | BaseTools/Source/Python/UPT/Core/__init__.py |
## @file
# This file is used to define a class object to describe a distribution package
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
'''
DistributionPackageClass
'''
##
# Import Modules
#
import os.path
from Library.Misc import Sdict
from Library.Misc import GetNonMetaDataFiles
from PomAdapter.InfPomAlignment import InfPomAlignment
from PomAdapter.DecPomAlignment import DecPomAlignment
import Logger.Log as Logger
from Logger import StringTable as ST
from Logger.ToolError import OPTION_VALUE_INVALID
from Logger.ToolError import FatalError
from Logger.ToolError import EDK1_INF_ERROR
from Object.POM.CommonObject import IdentificationObject
from Object.POM.CommonObject import CommonHeaderObject
from Object.POM.CommonObject import MiscFileObject
from Common.MultipleWorkspace import MultipleWorkspace as mws
## DistributionPackageHeaderClass
#
# @param IdentificationObject: Identification Object
# @param CommonHeaderObject: Common Header Object
#
class DistributionPackageHeaderObject(IdentificationObject, \
CommonHeaderObject):
def __init__(self):
IdentificationObject.__init__(self)
CommonHeaderObject.__init__(self)
self.ReadOnly = ''
self.RePackage = ''
self.Vendor = ''
self.Date = ''
self.Signature = 'Md5Sum'
self.XmlSpecification = ''
def GetReadOnly(self):
return self.ReadOnly
def SetReadOnly(self, ReadOnly):
self.ReadOnly = ReadOnly
def GetRePackage(self):
return self.RePackage
def SetRePackage(self, RePackage):
self.RePackage = RePackage
def GetVendor(self):
return self.Vendor
def SetDate(self, Date):
self.Date = Date
def GetDate(self):
return self.Date
def SetSignature(self, Signature):
self.Signature = Signature
def GetSignature(self):
return self.Signature
def SetXmlSpecification(self, XmlSpecification):
self.XmlSpecification = XmlSpecification
def GetXmlSpecification(self):
return self.XmlSpecification
## DistributionPackageClass
#
# @param object: DistributionPackageClass
#
class DistributionPackageClass(object):
def __init__(self):
self.Header = DistributionPackageHeaderObject()
#
# {(Guid, Version, Path) : PackageObj}
#
self.PackageSurfaceArea = Sdict()
#
# {(Guid, Version, Name, Path) : ModuleObj}
#
self.ModuleSurfaceArea = Sdict()
self.Tools = MiscFileObject()
self.MiscellaneousFiles = MiscFileObject()
self.UserExtensions = []
self.FileList = []
## Get all included packages and modules for a distribution package
#
# @param WorkspaceDir: WorkspaceDir
# @param PackageList: A list of all packages
# @param ModuleList: A list of all modules
#
def GetDistributionPackage(self, WorkspaceDir, PackageList, ModuleList):
# Backup WorkspaceDir
Root = WorkspaceDir
#
# Get Packages
#
if PackageList:
for PackageFile in PackageList:
PackageFileFullPath = mws.join(Root, PackageFile)
WorkspaceDir = mws.getWs(Root, PackageFile)
DecObj = DecPomAlignment(PackageFileFullPath, WorkspaceDir, CheckMulDec=True)
PackageObj = DecObj
#
# Parser inf file one bye one
#
ModuleInfFileList = PackageObj.GetModuleFileList()
for File in ModuleInfFileList:
WsRelPath = os.path.join(PackageObj.GetPackagePath(), File)
WsRelPath = os.path.normpath(WsRelPath)
if ModuleList and WsRelPath in ModuleList:
Logger.Error("UPT",
OPTION_VALUE_INVALID,
ST.ERR_NOT_STANDALONE_MODULE_ERROR%\
(WsRelPath, PackageFile))
Filename = os.path.normpath\
(os.path.join(PackageObj.GetRelaPath(), File))
os.path.splitext(Filename)
#
# Call INF parser to generate Inf Object.
# Actually, this call is not directly call, but wrapped by
# Inf class in InfPomAlignment.
#
try:
ModuleObj = InfPomAlignment(Filename, WorkspaceDir, PackageObj.GetPackagePath())
#
# Add module to package
#
ModuleDict = PackageObj.GetModuleDict()
ModuleDict[(ModuleObj.GetGuid(), \
ModuleObj.GetVersion(), \
ModuleObj.GetName(), \
ModuleObj.GetCombinePath())] = ModuleObj
PackageObj.SetModuleDict(ModuleDict)
except FatalError as ErrCode:
if ErrCode.message == EDK1_INF_ERROR:
Logger.Warn("UPT",
ST.WRN_EDK1_INF_FOUND%Filename)
else:
raise
self.PackageSurfaceArea\
[(PackageObj.GetGuid(), PackageObj.GetVersion(), \
PackageObj.GetCombinePath())] = PackageObj
#
# Get Modules
#
if ModuleList:
for ModuleFile in ModuleList:
ModuleFileFullPath = mws.join(Root, ModuleFile)
WorkspaceDir = mws.getWs(Root, ModuleFile)
try:
ModuleObj = InfPomAlignment(ModuleFileFullPath, WorkspaceDir)
ModuleKey = (ModuleObj.GetGuid(),
ModuleObj.GetVersion(),
ModuleObj.GetName(),
ModuleObj.GetCombinePath())
self.ModuleSurfaceArea[ModuleKey] = ModuleObj
except FatalError as ErrCode:
if ErrCode.message == EDK1_INF_ERROR:
Logger.Error("UPT",
EDK1_INF_ERROR,
ST.WRN_EDK1_INF_FOUND%ModuleFileFullPath,
ExtraData=ST.ERR_NOT_SUPPORTED_SA_MODULE)
else:
raise
# Recover WorkspaceDir
WorkspaceDir = Root
## Get all files included for a distribution package, except tool/misc of
# distribution level
#
# @retval DistFileList A list of filepath for NonMetaDataFile, relative to workspace
# @retval MetaDataFileList A list of filepath for MetaDataFile, relative to workspace
#
def GetDistributionFileList(self):
MetaDataFileList = []
SkipModulesUniList = []
for Guid, Version, Path in self.PackageSurfaceArea:
Package = self.PackageSurfaceArea[Guid, Version, Path]
PackagePath = Package.GetPackagePath()
FullPath = Package.GetFullPath()
MetaDataFileList.append(Path)
IncludePathList = Package.GetIncludePathList()
for IncludePath in IncludePathList:
SearchPath = os.path.normpath(os.path.join(os.path.dirname(FullPath), IncludePath))
AddPath = os.path.normpath(os.path.join(PackagePath, IncludePath))
self.FileList += GetNonMetaDataFiles(SearchPath, ['CVS', '.svn'], False, AddPath)
#
# Add the miscellaneous files on DEC file
#
for MiscFileObj in Package.GetMiscFileList():
for FileObj in MiscFileObj.GetFileList():
MiscFileFullPath = os.path.normpath(os.path.join(PackagePath, FileObj.GetURI()))
if MiscFileFullPath not in self.FileList:
self.FileList.append(MiscFileFullPath)
Module = None
ModuleDict = Package.GetModuleDict()
for Guid, Version, Name, Path in ModuleDict:
Module = ModuleDict[Guid, Version, Name, Path]
ModulePath = Module.GetModulePath()
FullPath = Module.GetFullPath()
PkgRelPath = os.path.normpath(os.path.join(PackagePath, ModulePath))
MetaDataFileList.append(Path)
SkipList = ['CVS', '.svn']
NonMetaDataFileList = []
if Module.UniFileClassObject:
for UniFile in Module.UniFileClassObject.IncFileList:
OriPath = os.path.normpath(os.path.dirname(FullPath))
UniFilePath = os.path.normpath(os.path.join(PkgRelPath, UniFile.Path[len(OriPath) + 1:]))
if UniFilePath not in SkipModulesUniList:
SkipModulesUniList.append(UniFilePath)
for IncludeFile in Module.UniFileClassObject.IncludePathList:
if IncludeFile not in SkipModulesUniList:
SkipModulesUniList.append(IncludeFile)
NonMetaDataFileList = GetNonMetaDataFiles(os.path.dirname(FullPath), SkipList, False, PkgRelPath)
for NonMetaDataFile in NonMetaDataFileList:
if NonMetaDataFile not in self.FileList:
self.FileList.append(NonMetaDataFile)
for Guid, Version, Name, Path in self.ModuleSurfaceArea:
Module = self.ModuleSurfaceArea[Guid, Version, Name, Path]
ModulePath = Module.GetModulePath()
FullPath = Module.GetFullPath()
MetaDataFileList.append(Path)
SkipList = ['CVS', '.svn']
NonMetaDataFileList = []
if Module.UniFileClassObject:
for UniFile in Module.UniFileClassObject.IncFileList:
OriPath = os.path.normpath(os.path.dirname(FullPath))
UniFilePath = os.path.normpath(os.path.join(ModulePath, UniFile.Path[len(OriPath) + 1:]))
if UniFilePath not in SkipModulesUniList:
SkipModulesUniList.append(UniFilePath)
NonMetaDataFileList = GetNonMetaDataFiles(os.path.dirname(FullPath), SkipList, False, ModulePath)
for NonMetaDataFile in NonMetaDataFileList:
if NonMetaDataFile not in self.FileList:
self.FileList.append(NonMetaDataFile)
for SkipModuleUni in SkipModulesUniList:
if SkipModuleUni in self.FileList:
self.FileList.remove(SkipModuleUni)
return self.FileList, MetaDataFileList
| edk2-master | BaseTools/Source/Python/UPT/Core/DistributionPackageClass.py |
## @file
# This file is for installed package information database operations
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
'''
IpiDb
'''
##
# Import Modules
#
import sqlite3
import os.path
import time
import Logger.Log as Logger
from Logger import StringTable as ST
from Logger.ToolError import UPT_ALREADY_RUNNING_ERROR
from Logger.ToolError import UPT_DB_UPDATE_ERROR
import platform as pf
## IpiDb
#
# This class represents the installed package information database
# Add/Remove/Get installed distribution package information here.
#
#
# @param object: Inherited from object class
# @param DbPath: A string for the path of the database
#
#
class IpiDatabase(object):
def __init__(self, DbPath, Workspace):
Dir = os.path.dirname(DbPath)
if not os.path.isdir(Dir):
os.mkdir(Dir)
self.Conn = sqlite3.connect(u''.join(DbPath), isolation_level='DEFERRED')
self.Conn.execute("PRAGMA page_size=4096")
self.Conn.execute("PRAGMA synchronous=OFF")
self.Cur = self.Conn.cursor()
self.DpTable = 'DpInfo'
self.PkgTable = 'PkgInfo'
self.ModInPkgTable = 'ModInPkgInfo'
self.StandaloneModTable = 'StandaloneModInfo'
self.ModDepexTable = 'ModDepexInfo'
self.DpFileListTable = 'DpFileListInfo'
self.DummyTable = 'Dummy'
self.Workspace = os.path.normpath(Workspace)
## Initialize build database
#
#
def InitDatabase(self, SkipLock = False):
Logger.Verbose(ST.MSG_INIT_IPI_START)
if not SkipLock:
try:
#
# Create a dummy table, if already existed,
# then UPT is already running
#
SqlCommand = """
create table %s (
Dummy TEXT NOT NULL,
PRIMARY KEY (Dummy)
)""" % self.DummyTable
self.Cur.execute(SqlCommand)
self.Conn.commit()
except sqlite3.OperationalError:
Logger.Error("UPT",
UPT_ALREADY_RUNNING_ERROR,
ST.ERR_UPT_ALREADY_RUNNING_ERROR
)
#
# Create new table
#
SqlCommand = """
create table IF NOT EXISTS %s (
DpGuid TEXT NOT NULL,DpVersion TEXT NOT NULL,
InstallTime REAL NOT NULL,
NewPkgFileName TEXT NOT NULL,
PkgFileName TEXT NOT NULL,
RePackage TEXT NOT NULL,
PRIMARY KEY (DpGuid, DpVersion)
)""" % self.DpTable
self.Cur.execute(SqlCommand)
SqlCommand = """
create table IF NOT EXISTS %s (
FilePath TEXT NOT NULL,
DpGuid TEXT,
DpVersion TEXT,
Md5Sum TEXT,
PRIMARY KEY (FilePath)
)""" % self.DpFileListTable
self.Cur.execute(SqlCommand)
SqlCommand = """
create table IF NOT EXISTS %s (
PackageGuid TEXT NOT NULL,
PackageVersion TEXT NOT NULL,
InstallTime REAL NOT NULL,
DpGuid TEXT,
DpVersion TEXT,
InstallPath TEXT NOT NULL,
PRIMARY KEY (PackageGuid, PackageVersion, InstallPath)
)""" % self.PkgTable
self.Cur.execute(SqlCommand)
SqlCommand = """
create table IF NOT EXISTS %s (
ModuleGuid TEXT NOT NULL,
ModuleVersion TEXT NOT NULL,
ModuleName TEXT NOT NULL,
InstallTime REAL NOT NULL,
PackageGuid TEXT,
PackageVersion TEXT,
InstallPath TEXT NOT NULL,
PRIMARY KEY (ModuleGuid, ModuleVersion, ModuleName, InstallPath)
)""" % self.ModInPkgTable
self.Cur.execute(SqlCommand)
SqlCommand = """
create table IF NOT EXISTS %s (
ModuleGuid TEXT NOT NULL,
ModuleVersion TEXT NOT NULL,
ModuleName TEXT NOT NULL,
InstallTime REAL NOT NULL,
DpGuid TEXT,
DpVersion TEXT,
InstallPath TEXT NOT NULL,
PRIMARY KEY (ModuleGuid, ModuleVersion, ModuleName, InstallPath)
)""" % self.StandaloneModTable
self.Cur.execute(SqlCommand)
SqlCommand = """
create table IF NOT EXISTS %s (
ModuleGuid TEXT NOT NULL,
ModuleVersion TEXT NOT NULL,
ModuleName TEXT NOT NULL,
InstallPath TEXT NOT NULL,
DepexGuid TEXT,
DepexVersion TEXT
)""" % self.ModDepexTable
self.Cur.execute(SqlCommand)
self.Conn.commit()
Logger.Verbose(ST.MSG_INIT_IPI_FINISH)
def RollBack(self):
self.Conn.rollback()
def Commit(self):
self.Conn.commit()
## Add a distribution install information from DpObj
#
# @param DpObj:
# @param NewDpPkgFileName: New DpPkg File Name
# @param DpPkgFileName: DpPkg File Name
# @param RePackage: A RePackage
#
def AddDPObject(self, DpObj, NewDpPkgFileName, DpPkgFileName, RePackage):
try:
for PkgKey in DpObj.PackageSurfaceArea.keys():
PkgGuid = PkgKey[0]
PkgVersion = PkgKey[1]
PkgInstallPath = PkgKey[2]
self._AddPackage(PkgGuid, PkgVersion, DpObj.Header.GetGuid(), \
DpObj.Header.GetVersion(), PkgInstallPath)
PkgObj = DpObj.PackageSurfaceArea[PkgKey]
for ModKey in PkgObj.GetModuleDict().keys():
ModGuid = ModKey[0]
ModVersion = ModKey[1]
ModName = ModKey[2]
ModInstallPath = ModKey[3]
ModInstallPath = \
os.path.normpath(os.path.join(PkgInstallPath, ModInstallPath))
self._AddModuleInPackage(ModGuid, ModVersion, ModName, PkgGuid, \
PkgVersion, ModInstallPath)
ModObj = PkgObj.GetModuleDict()[ModKey]
for Dep in ModObj.GetPackageDependencyList():
DepexGuid = Dep.GetGuid()
DepexVersion = Dep.GetVersion()
self._AddModuleDepex(ModGuid, ModVersion, ModName, ModInstallPath, \
DepexGuid, DepexVersion)
for (FilePath, Md5Sum) in PkgObj.FileList:
self._AddDpFilePathList(DpObj.Header.GetGuid(), \
DpObj.Header.GetVersion(), FilePath, \
Md5Sum)
for ModKey in DpObj.ModuleSurfaceArea.keys():
ModGuid = ModKey[0]
ModVersion = ModKey[1]
ModName = ModKey[2]
ModInstallPath = ModKey[3]
self._AddStandaloneModule(ModGuid, ModVersion, ModName, \
DpObj.Header.GetGuid(), \
DpObj.Header.GetVersion(), \
ModInstallPath)
ModObj = DpObj.ModuleSurfaceArea[ModKey]
for Dep in ModObj.GetPackageDependencyList():
DepexGuid = Dep.GetGuid()
DepexVersion = Dep.GetVersion()
self._AddModuleDepex(ModGuid, ModVersion, ModName, ModInstallPath, \
DepexGuid, DepexVersion)
for (Path, Md5Sum) in ModObj.FileList:
self._AddDpFilePathList(DpObj.Header.GetGuid(), \
DpObj.Header.GetVersion(), \
Path, Md5Sum)
#
# add tool/misc files
#
for (Path, Md5Sum) in DpObj.FileList:
self._AddDpFilePathList(DpObj.Header.GetGuid(), \
DpObj.Header.GetVersion(), Path, Md5Sum)
self._AddDp(DpObj.Header.GetGuid(), DpObj.Header.GetVersion(), \
NewDpPkgFileName, DpPkgFileName, RePackage)
except sqlite3.IntegrityError as DetailMsg:
Logger.Error("UPT",
UPT_DB_UPDATE_ERROR,
ST.ERR_UPT_DB_UPDATE_ERROR,
ExtraData = DetailMsg
)
## Add a distribution install information
#
# @param Guid Guid of the distribution package
# @param Version Version of the distribution package
# @param NewDpFileName the saved filename of distribution package file
# @param DistributionFileName the filename of distribution package file
#
def _AddDp(self, Guid, Version, NewDpFileName, DistributionFileName, \
RePackage):
if Version is None or len(Version.strip()) == 0:
Version = 'N/A'
#
# Add newly installed DP information to DB.
#
if NewDpFileName is None or len(NewDpFileName.strip()) == 0:
PkgFileName = 'N/A'
else:
PkgFileName = NewDpFileName
CurrentTime = time.time()
SqlCommand = \
"""insert into %s values('%s', '%s', %s, '%s', '%s', '%s')""" % \
(self.DpTable, Guid, Version, CurrentTime, PkgFileName, \
DistributionFileName, str(RePackage).upper())
self.Cur.execute(SqlCommand)
## Add a file list from DP
#
# @param DpGuid: A DpGuid
# @param DpVersion: A DpVersion
# @param Path: A Path
# @param Path: A Md5Sum
#
def _AddDpFilePathList(self, DpGuid, DpVersion, Path, Md5Sum):
Path = os.path.normpath(Path)
if pf.system() == 'Windows':
if Path.startswith(self.Workspace):
Path = Path[len(self.Workspace):]
else:
if Path.startswith(self.Workspace + os.sep):
Path = Path[len(self.Workspace)+1:]
SqlCommand = """insert into %s values('%s', '%s', '%s', '%s')""" % \
(self.DpFileListTable, Path, DpGuid, DpVersion, Md5Sum)
self.Cur.execute(SqlCommand)
## Add a package install information
#
# @param Guid: A package guid
# @param Version: A package version
# @param DpGuid: A DpGuid
# @param DpVersion: A DpVersion
# @param Path: A Path
#
def _AddPackage(self, Guid, Version, DpGuid=None, DpVersion=None, Path=''):
if Version is None or len(Version.strip()) == 0:
Version = 'N/A'
if DpGuid is None or len(DpGuid.strip()) == 0:
DpGuid = 'N/A'
if DpVersion is None or len(DpVersion.strip()) == 0:
DpVersion = 'N/A'
#
# Add newly installed package information to DB.
#
CurrentTime = time.time()
SqlCommand = \
"""insert into %s values('%s', '%s', %s, '%s', '%s', '%s')""" % \
(self.PkgTable, Guid, Version, CurrentTime, DpGuid, DpVersion, Path)
self.Cur.execute(SqlCommand)
## Add a module that from a package install information
#
# @param Guid: Module Guid
# @param Version: Module version
# @param Name: Module Name
# @param PkgGuid: Package Guid
# @param PkgVersion: Package version
# @param Path: Package relative path that module installs
#
def _AddModuleInPackage(self, Guid, Version, Name, PkgGuid=None, \
PkgVersion=None, Path=''):
if Version is None or len(Version.strip()) == 0:
Version = 'N/A'
if PkgGuid is None or len(PkgGuid.strip()) == 0:
PkgGuid = 'N/A'
if PkgVersion is None or len(PkgVersion.strip()) == 0:
PkgVersion = 'N/A'
if os.name == 'posix':
Path = Path.replace('\\', os.sep)
else:
Path = Path.replace('/', os.sep)
#
# Add module from package information to DB.
#
CurrentTime = time.time()
SqlCommand = \
"""insert into %s values('%s', '%s', '%s', %s, '%s', '%s', '%s')""" % \
(self.ModInPkgTable, Guid, Version, Name, CurrentTime, PkgGuid, PkgVersion, \
Path)
self.Cur.execute(SqlCommand)
## Add a module that is standalone install information
#
# @param Guid: a module Guid
# @param Version: a module Version
# @param Name: a module name
# @param DpGuid: a DpGuid
# @param DpVersion: a DpVersion
# @param Path: path
#
def _AddStandaloneModule(self, Guid, Version, Name, DpGuid=None, \
DpVersion=None, Path=''):
if Version is None or len(Version.strip()) == 0:
Version = 'N/A'
if DpGuid is None or len(DpGuid.strip()) == 0:
DpGuid = 'N/A'
if DpVersion is None or len(DpVersion.strip()) == 0:
DpVersion = 'N/A'
#
# Add module standalone information to DB.
#
CurrentTime = time.time()
SqlCommand = \
"""insert into %s values('%s', '%s', '%s', %s, '%s', '%s', '%s')""" % \
(self.StandaloneModTable, Guid, Version, Name, CurrentTime, DpGuid, \
DpVersion, Path)
self.Cur.execute(SqlCommand)
## Add a module depex
#
# @param Guid: a module Guid
# @param Version: a module Version
# @param Name: a module name
# @param DepexGuid: a module DepexGuid
# @param DepexVersion: a module DepexVersion
#
def _AddModuleDepex(self, Guid, Version, Name, Path, DepexGuid=None, \
DepexVersion=None):
if DepexGuid is None or len(DepexGuid.strip()) == 0:
DepexGuid = 'N/A'
if DepexVersion is None or len(DepexVersion.strip()) == 0:
DepexVersion = 'N/A'
if os.name == 'posix':
Path = Path.replace('\\', os.sep)
else:
Path = Path.replace('/', os.sep)
#
# Add module depex information to DB.
#
SqlCommand = """insert into %s values('%s', '%s', '%s', '%s', '%s', '%s')"""\
% (self.ModDepexTable, Guid, Version, Name, Path, DepexGuid, DepexVersion)
self.Cur.execute(SqlCommand)
## Remove a distribution install information, if no version specified,
# remove all DPs with this Guid.
#
# @param DpGuid: guid of dpex
# @param DpVersion: version of dpex
#
def RemoveDpObj(self, DpGuid, DpVersion):
PkgList = self.GetPackageListFromDp(DpGuid, DpVersion)
#
# delete from ModDepex the standalone module's dependency
#
SqlCommand = \
"""delete from ModDepexInfo where ModDepexInfo.ModuleGuid in
(select ModuleGuid from StandaloneModInfo as B where B.DpGuid = '%s'
and B.DpVersion = '%s')
and ModDepexInfo.ModuleVersion in
(select ModuleVersion from StandaloneModInfo as B
where B.DpGuid = '%s' and B.DpVersion = '%s')
and ModDepexInfo.ModuleName in
(select ModuleName from StandaloneModInfo as B
where B.DpGuid = '%s' and B.DpVersion = '%s')
and ModDepexInfo.InstallPath in
(select InstallPath from StandaloneModInfo as B
where B.DpGuid = '%s' and B.DpVersion = '%s') """ % \
(DpGuid, DpVersion, DpGuid, DpVersion, DpGuid, DpVersion, DpGuid, DpVersion)
self.Cur.execute(SqlCommand)
#
# delete from ModDepex the from pkg module's dependency
#
for Pkg in PkgList:
SqlCommand = \
"""delete from ModDepexInfo where ModDepexInfo.ModuleGuid in
(select ModuleGuid from ModInPkgInfo
where ModInPkgInfo.PackageGuid ='%s' and
ModInPkgInfo.PackageVersion = '%s')
and ModDepexInfo.ModuleVersion in
(select ModuleVersion from ModInPkgInfo
where ModInPkgInfo.PackageGuid ='%s' and
ModInPkgInfo.PackageVersion = '%s')
and ModDepexInfo.ModuleName in
(select ModuleName from ModInPkgInfo
where ModInPkgInfo.PackageGuid ='%s' and
ModInPkgInfo.PackageVersion = '%s')
and ModDepexInfo.InstallPath in
(select InstallPath from ModInPkgInfo where
ModInPkgInfo.PackageGuid ='%s'
and ModInPkgInfo.PackageVersion = '%s')""" \
% (Pkg[0], Pkg[1], Pkg[0], Pkg[1], Pkg[0], Pkg[1], Pkg[0], Pkg[1])
self.Cur.execute(SqlCommand)
#
# delete the standalone module
#
SqlCommand = \
"""delete from %s where DpGuid ='%s' and DpVersion = '%s'""" % \
(self.StandaloneModTable, DpGuid, DpVersion)
self.Cur.execute(SqlCommand)
#
# delete the from pkg module
#
for Pkg in PkgList:
SqlCommand = \
"""delete from %s where %s.PackageGuid ='%s'
and %s.PackageVersion = '%s'""" % \
(self.ModInPkgTable, self.ModInPkgTable, Pkg[0], \
self.ModInPkgTable, Pkg[1])
self.Cur.execute(SqlCommand)
#
# delete packages
#
SqlCommand = \
"""delete from %s where DpGuid ='%s' and DpVersion = '%s'""" % \
(self.PkgTable, DpGuid, DpVersion)
self.Cur.execute(SqlCommand)
#
# delete file list from DP
#
SqlCommand = \
"""delete from %s where DpGuid ='%s' and DpVersion = '%s'""" % \
(self.DpFileListTable, DpGuid, DpVersion)
self.Cur.execute(SqlCommand)
#
# delete DP
#
SqlCommand = \
"""delete from %s where DpGuid ='%s' and DpVersion = '%s'""" % \
(self.DpTable, DpGuid, DpVersion)
self.Cur.execute(SqlCommand)
#self.Conn.commit()
## Get a list of distribution install information.
#
# @param Guid: distribution package guid
# @param Version: distribution package version
#
def GetDp(self, Guid, Version):
if Version is None or len(Version.strip()) == 0:
Version = 'N/A'
Logger.Verbose(ST.MSG_GET_DP_INSTALL_LIST)
(DpGuid, DpVersion) = (Guid, Version)
SqlCommand = """select * from %s where DpGuid ='%s'""" % \
(self.DpTable, DpGuid)
self.Cur.execute(SqlCommand)
else:
Logger.Verbose(ST.MSG_GET_DP_INSTALL_INFO_START)
(DpGuid, DpVersion) = (Guid, Version)
SqlCommand = \
"""select * from %s where DpGuid ='%s' and DpVersion = '%s'""" % \
(self.DpTable, DpGuid, DpVersion)
self.Cur.execute(SqlCommand)
DpList = []
for DpInfo in self.Cur:
DpGuid = DpInfo[0]
DpVersion = DpInfo[1]
InstallTime = DpInfo[2]
PkgFileName = DpInfo[3]
DpList.append((DpGuid, DpVersion, InstallTime, PkgFileName))
Logger.Verbose(ST.MSG_GET_DP_INSTALL_INFO_FINISH)
return DpList
## Get a list of distribution install dirs
#
# @param Guid: distribution package guid
# @param Version: distribution package version
#
def GetDpInstallDirList(self, Guid, Version):
SqlCommand = """select InstallPath from PkgInfo where DpGuid = '%s' and DpVersion = '%s'""" % (Guid, Version)
self.Cur.execute(SqlCommand)
DirList = []
for Result in self.Cur:
if Result[0] not in DirList:
DirList.append(Result[0])
SqlCommand = """select InstallPath from StandaloneModInfo where DpGuid = '%s' and DpVersion = '%s'""" % \
(Guid, Version)
self.Cur.execute(SqlCommand)
for Result in self.Cur:
if Result[0] not in DirList:
DirList.append(Result[0])
return DirList
## Get a list of distribution install file path information.
#
# @param Guid: distribution package guid
# @param Version: distribution package version
#
def GetDpFileList(self, Guid, Version):
(DpGuid, DpVersion) = (Guid, Version)
SqlCommand = \
"""select * from %s where DpGuid ='%s' and DpVersion = '%s'""" % \
(self.DpFileListTable, DpGuid, DpVersion)
self.Cur.execute(SqlCommand)
PathList = []
for Result in self.Cur:
Path = Result[0]
Md5Sum = Result[3]
PathList.append((os.path.join(self.Workspace, Path), Md5Sum))
return PathList
## Get files' repackage attribute if present that are installed into current workspace
#
# @retval FileDict: a Dict of file, key is file path, value is (DpGuid, DpVersion, NewDpFileName, RePackage)
#
def GetRePkgDict(self):
SqlCommand = """select * from %s """ % (self.DpTable)
self.Cur.execute(SqlCommand)
DpInfoList = []
for Result in self.Cur:
DpInfoList.append(Result)
FileDict = {}
for Result in DpInfoList:
DpGuid = Result[0]
DpVersion = Result[1]
NewDpFileName = Result[3]
RePackage = Result[5]
if RePackage == 'TRUE':
RePackage = True
else:
RePackage = False
for FileInfo in self.GetDpFileList(DpGuid, DpVersion):
PathInfo = FileInfo[0]
FileDict[PathInfo] = DpGuid, DpVersion, NewDpFileName, RePackage
return FileDict
## Get (Guid, Version) from distribution file name information.
#
# @param DistributionFile: Distribution File
#
def GetDpByName(self, DistributionFile):
SqlCommand = """select * from %s where NewPkgFileName = '%s'""" % \
(self.DpTable, DistributionFile)
self.Cur.execute(SqlCommand)
for Result in self.Cur:
DpGuid = Result[0]
DpVersion = Result[1]
NewDpFileName = Result[3]
return (DpGuid, DpVersion, NewDpFileName)
else:
return (None, None, None)
## Get a list of package information.
#
# @param Guid: package guid
# @param Version: package version
#
def GetPackage(self, Guid, Version, DpGuid='', DpVersion=''):
if DpVersion == '' or DpGuid == '':
(PackageGuid, PackageVersion) = (Guid, Version)
SqlCommand = """select * from %s where PackageGuid ='%s'
and PackageVersion = '%s'""" % (self.PkgTable, PackageGuid, \
PackageVersion)
self.Cur.execute(SqlCommand)
elif Version is None or len(Version.strip()) == 0:
SqlCommand = """select * from %s where PackageGuid ='%s'""" % \
(self.PkgTable, Guid)
self.Cur.execute(SqlCommand)
else:
(PackageGuid, PackageVersion) = (Guid, Version)
SqlCommand = """select * from %s where PackageGuid ='%s' and
PackageVersion = '%s'
and DpGuid = '%s' and DpVersion = '%s'""" % \
(self.PkgTable, PackageGuid, PackageVersion, \
DpGuid, DpVersion)
self.Cur.execute(SqlCommand)
PkgList = []
for PkgInfo in self.Cur:
PkgGuid = PkgInfo[0]
PkgVersion = PkgInfo[1]
InstallTime = PkgInfo[2]
InstallPath = PkgInfo[5]
PkgList.append((PkgGuid, PkgVersion, InstallTime, DpGuid, \
DpVersion, InstallPath))
return PkgList
## Get a list of module in package information.
#
# @param Guid: A module guid
# @param Version: A module version
#
def GetModInPackage(self, Guid, Version, Name, Path, PkgGuid='', PkgVersion=''):
(ModuleGuid, ModuleVersion, ModuleName, InstallPath) = (Guid, Version, Name, Path)
if PkgVersion == '' or PkgGuid == '':
SqlCommand = """select * from %s where ModuleGuid ='%s' and
ModuleVersion = '%s' and InstallPath = '%s'
and ModuleName = '%s'""" % (self.ModInPkgTable, ModuleGuid, \
ModuleVersion, InstallPath, ModuleName)
self.Cur.execute(SqlCommand)
else:
SqlCommand = """select * from %s where ModuleGuid ='%s' and
ModuleVersion = '%s' and InstallPath = '%s'
and ModuleName = '%s' and PackageGuid ='%s'
and PackageVersion = '%s'
""" % (self.ModInPkgTable, ModuleGuid, \
ModuleVersion, InstallPath, ModuleName, PkgGuid, PkgVersion)
self.Cur.execute(SqlCommand)
ModList = []
for ModInfo in self.Cur:
ModGuid = ModInfo[0]
ModVersion = ModInfo[1]
InstallTime = ModInfo[2]
InstallPath = ModInfo[5]
ModList.append((ModGuid, ModVersion, InstallTime, PkgGuid, \
PkgVersion, InstallPath))
return ModList
## Get a list of module standalone.
#
# @param Guid: A module guid
# @param Version: A module version
#
def GetStandaloneModule(self, Guid, Version, Name, Path, DpGuid='', DpVersion=''):
(ModuleGuid, ModuleVersion, ModuleName, InstallPath) = (Guid, Version, Name, Path)
if DpGuid == '':
SqlCommand = """select * from %s where ModuleGuid ='%s' and
ModuleVersion = '%s' and InstallPath = '%s'
and ModuleName = '%s'""" % (self.StandaloneModTable, ModuleGuid, \
ModuleVersion, InstallPath, ModuleName)
self.Cur.execute(SqlCommand)
else:
SqlCommand = """select * from %s where ModuleGuid ='%s' and
ModuleVersion = '%s' and InstallPath = '%s' and ModuleName = '%s' and DpGuid ='%s' and DpVersion = '%s'
""" % (self.StandaloneModTable, ModuleGuid, \
ModuleVersion, ModuleName, InstallPath, DpGuid, DpVersion)
self.Cur.execute(SqlCommand)
ModList = []
for ModInfo in self.Cur:
ModGuid = ModInfo[0]
ModVersion = ModInfo[1]
InstallTime = ModInfo[2]
InstallPath = ModInfo[5]
ModList.append((ModGuid, ModVersion, InstallTime, DpGuid, \
DpVersion, InstallPath))
return ModList
## Get a list of module information that comes from DP.
#
# @param DpGuid: A Distribution Guid
# @param DpVersion: A Distribution version
#
def GetSModInsPathListFromDp(self, DpGuid, DpVersion):
PathList = []
SqlCommand = """select InstallPath from %s where DpGuid ='%s'
and DpVersion = '%s'
""" % (self.StandaloneModTable, DpGuid, DpVersion)
self.Cur.execute(SqlCommand)
for Result in self.Cur:
InstallPath = Result[0]
PathList.append(InstallPath)
return PathList
## Get a list of package information.
#
# @param DpGuid: A Distribution Guid
# @param DpVersion: A Distribution version
#
def GetPackageListFromDp(self, DpGuid, DpVersion):
SqlCommand = """select * from %s where DpGuid ='%s' and
DpVersion = '%s' """ % (self.PkgTable, DpGuid, DpVersion)
self.Cur.execute(SqlCommand)
PkgList = []
for PkgInfo in self.Cur:
PkgGuid = PkgInfo[0]
PkgVersion = PkgInfo[1]
InstallPath = PkgInfo[5]
PkgList.append((PkgGuid, PkgVersion, InstallPath))
return PkgList
## Get a list of modules that depends on package information from a DP.
#
# @param DpGuid: A Distribution Guid
# @param DpVersion: A Distribution version
#
def GetDpDependentModuleList(self, DpGuid, DpVersion):
ModList = []
PkgList = self.GetPackageListFromDp(DpGuid, DpVersion)
if len(PkgList) > 0:
return ModList
for Pkg in PkgList:
#
# get all in-package modules that depends on current
# Pkg (Guid match, Version match or NA) but not belong to
# current Pkg
#
SqlCommand = """select t1.ModuleGuid, t1.ModuleVersion,
t1.InstallPath from %s as t1, %s as t2 where
t1.ModuleGuid = t2.ModuleGuid and
t1.ModuleVersion = t2.ModuleVersion and t2.DepexGuid ='%s'
and (t2.DepexVersion = '%s' or t2.DepexVersion = 'N/A') and
t1.PackageGuid != '%s' and t1.PackageVersion != '%s'
""" % (self.ModInPkgTable, \
self.ModDepexTable, Pkg[0], Pkg[1], Pkg[0], \
Pkg[1])
self.Cur.execute(SqlCommand)
for ModInfo in self.Cur:
ModGuid = ModInfo[0]
ModVersion = ModInfo[1]
InstallPath = ModInfo[2]
ModList.append((ModGuid, ModVersion, InstallPath))
#
# get all modules from standalone modules that depends on current
#Pkg (Guid match, Version match or NA) but not in current dp
#
SqlCommand = \
"""select t1.ModuleGuid, t1.ModuleVersion, t1.InstallPath
from %s as t1, %s as t2 where t1.ModuleGuid = t2.ModuleGuid and
t1.ModuleVersion = t2.ModuleVersion and t2.DepexGuid ='%s'
and (t2.DepexVersion = '%s' or t2.DepexVersion = 'N/A') and
t1.DpGuid != '%s' and t1.DpVersion != '%s'
""" % \
(self.StandaloneModTable, self.ModDepexTable, Pkg[0], \
Pkg[1], DpGuid, DpVersion)
self.Cur.execute(SqlCommand)
for ModInfo in self.Cur:
ModGuid = ModInfo[0]
ModVersion = ModInfo[1]
InstallPath = ModInfo[2]
ModList.append((ModGuid, ModVersion, InstallPath))
return ModList
## Get Dp's list of modules.
#
# @param DpGuid: A Distribution Guid
# @param DpVersion: A Distribution version
#
def GetDpModuleList(self, DpGuid, DpVersion):
ModList = []
#
# get Dp module list from the DpFileList table
#
SqlCommand = """select FilePath
from %s
where DpGuid = '%s' and DpVersion = '%s' and
FilePath like '%%.inf'
""" % (self.DpFileListTable, DpGuid, DpVersion)
self.Cur.execute(SqlCommand)
for ModuleInfo in self.Cur:
FilePath = ModuleInfo[0]
ModList.append(os.path.join(self.Workspace, FilePath))
return ModList
## Get a module depex
#
# @param DpGuid: A module Guid
# @param DpVersion: A module version
# @param Path:
#
def GetModuleDepex(self, Guid, Version, Path):
#
# Get module depex information to DB.
#
SqlCommand = """select * from %s where ModuleGuid ='%s' and
ModuleVersion = '%s' and InstallPath ='%s'
""" % (self.ModDepexTable, Guid, Version, Path)
self.Cur.execute(SqlCommand)
DepexList = []
for DepInfo in self.Cur:
DepexGuid = DepInfo[3]
DepexVersion = DepInfo[4]
DepexList.append((DepexGuid, DepexVersion))
return DepexList
## Inventory the distribution installed to current workspace
#
# Inventory the distribution installed to current workspace
#
def InventoryDistInstalled(self):
SqlCommand = """select * from %s """ % (self.DpTable)
self.Cur.execute(SqlCommand)
DpInfoList = []
for Result in self.Cur:
DpGuid = Result[0]
DpVersion = Result[1]
DpAliasName = Result[3]
DpFileName = Result[4]
DpInfoList.append((DpGuid, DpVersion, DpFileName, DpAliasName))
return DpInfoList
## Close entire database
#
# Close the connection and cursor
#
def CloseDb(self):
#
# drop the dummy table
#
SqlCommand = """
drop table IF EXISTS %s
""" % self.DummyTable
self.Cur.execute(SqlCommand)
self.Conn.commit()
self.Cur.close()
self.Conn.close()
## Convert To Sql String
#
# 1. Replace "'" with "''" in each item of StringList
#
# @param StringList: A list for strings to be converted
#
def __ConvertToSqlString(self, StringList):
if self.DpTable:
pass
return list(map(lambda s: s.replace("'", "''"), StringList))
| edk2-master | BaseTools/Source/Python/UPT/Core/IpiDb.py |
## @file
# This file is used to define strings used in the UPT tool
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
##
"""
This file contains user visible strings in a format that can be used for
localization
"""
import gettext
#
# string table starts here...
#
## strings are classified as following types
# MSG_...: it is a message string
# ERR_...: it is a error string
# WRN_...: it is a warning string
# HLP_...: it is a help string
#
_ = gettext.gettext
MSG_USAGE_STRING = _("\n"
"UEFI Packaging Tool (UEFIPT)\n"
"%prog [options]"
)
##
# Version and Copyright
#
MSG_VERSION_NUMBER = _("1.1")
MSG_VERSION = _("UEFI Packaging Tool (UEFIPT) - Revision " + \
MSG_VERSION_NUMBER)
MSG_COPYRIGHT = _("Copyright (c) 2011 - 2018 Intel Corporation All Rights Reserved.")
MSG_VERSION_COPYRIGHT = _("\n %s\n %s" % (MSG_VERSION, MSG_COPYRIGHT))
MSG_USAGE = _("%s [options]\n%s" % ("UPT", MSG_VERSION_COPYRIGHT))
MSG_DESCRIPTION = _("The UEFIPT is used to create, " + \
"install or remove a UEFI Distribution Package. " + \
"If WORKSPACE environment variable is present, " + \
"then UPT will install packages to the location specified by WORKSPACE, " + \
"otherwise UPT will install packages to the current directory. " + \
"Option -n will override this default installation location")
#
# INF Parser related strings.
#
ERR_INF_PARSER_HEADER_FILE = _(
"The Header comment section should start with an @file at the top.")
ERR_INF_PARSER_HEADER_MISSGING = _(
"The Header comment is missing. It must be corrected before continuing.")
ERR_INF_PARSER_UNKNOWN_SECTION = _("An unknown section was found. "
"It must be corrected before continuing. ")
ERR_INF_PARSER_NO_SECTION_ERROR = _("No section was found. "
"A section must be included before continuing.")
ERR_INF_PARSER_BUILD_OPTION_FORMAT_INVALID = \
_("Build Option format incorrect.")
ERR_INF_PARSER_BINARY_ITEM_FORMAT_INVALID = _(
"The format of binary %s item is incorrect. "
"It should contain at least %d elements.")
ERR_INF_PARSER_BINARY_ITEM_FORMAT_INVALID_MAX = _(
"The format of binary %s item is invalid, "
"it should contain not more than %d elements.")
ERR_INF_PARSER_BINARY_ITEM_INVALID_FILETYPE = _(
"The Binary FileType is incorrect. It should in %s")
ERR_INF_PARSER_BINARY_ITEM_FILE_NOT_EXIST = _(
"The Binary File: %s not exist.")
ERR_INF_PARSER_BINARY_ITEM_FILENAME_NOT_EXIST = _(
"The Binary File Name item not exist")
ERR_INF_PARSER_BINARY_VER_TYPE = _(
"Only this type is allowed: \"%s\".")
ERR_INF_PARSER_MULTI_DEFINE_SECTION = \
_("Multiple define sections found. "
"It must be corrected before continuing.")
ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND = \
_("More than 1 %s is defined in DEFINES section. "
"It must be corrected before continuing.")
ERR_INF_PARSER_DEFINE_NAME_INVALID = \
_("Incorrect name format for : %s")
ERR_INF_PARSER_DEFINE_GUID_INVALID = \
_("The format of this GUID is incorrect: %s")
ERR_INF_PARSER_DEFINE_MODULETYPE_INVALID = _("Incorrect MODULE_TYPE: %s")
ERR_INF_PARSER_DEFINE_FROMAT_INVALID = _("Incorrect format: %s")
ERR_INF_PARSER_FILE_NOT_EXIST = _("This file does not exist: %s")
ERR_INF_PARSER_FILE_NOT_EXIST_OR_NAME_INVALID = \
_("The file does not exist or not in sub-directories "
"or has an incorrect file name of the directory containing the INF or DEC file: %s. "
"It must be corrected before continuing")
ERR_INF_PARSER_DEFINE_SHADOW_INVALID = \
_("The SHADOW keyword is only valid for"
" SEC, PEI_CORE and PEIM module types.")
ERR_INF_PARSER_DEFINE_SECTION_HEADER_INVALID = \
_("The format of the section header is incorrect")
ERR_INF_PARSER_DEPEX_SECTION_INVALID = \
_("A module can't have a Depex section when its module type is %s")
ERR_INF_PARSER_DEPEX_SECTION_INVALID_FOR_BASE_LIBRARY_CLASS = \
_("A base type library class can't have a Depex section with module type not defined.")
ERR_INF_PARSER_DEPEX_SECTION_INVALID_FOR_LIBRARY_CLASS = \
_("A library class can't have a Depex section when its supported module type list is not defined.")
ERR_INF_PARSER_DEPEX_SECTION_INVALID_FOR_DRIVER = \
_("A driver can't have a Depex section when its module type is UEFI_DRIVER.")
ERR_INF_PARSER_DEPEX_SECTION_NOT_DETERMINED = \
_("Cannot determine the module's Depex type. The Depex's module types are conflict")
ERR_INF_PARSER_DEFINE_SECTION_MUST_ITEM_NOT_EXIST = _(
"No %s found in INF file, please check it.")
ERR_INF_PARSER_DEPEX_SECTION_MODULE_TYPE_ERROR = \
_("The module type of [Depex] section is invalid, not support type of %s")
ERR_INF_PARSER_DEPEX_SECTION_CONTENT_MISSING = \
_("Missing content in: %s")
ERR_INF_PARSER_DEPEX_SECTION_CONTENT_ERROR = \
_("The [Depex] section contains invalid content: %s")
ERR_INF_PARSER_DEPEX_SECTION_SEC_TYPE_ERROR = \
_("The format is incorrect. The section type keyword of the content in the"
" [Depex] section is only for 'PEI_DEPEX', 'DXE_DEPEX', 'SMM_DEPEX', "
"it does not support type: %s")
ERR_INF_PARSER_UE_SECTION_USER_ID_ERROR = \
_("This format is incorrect. "
"The UserID: %s in [UserExtension] section is incorrect.")
ERR_INF_PARSER_UE_SECTION_ID_STRING_ERROR = \
_("This format is incorrect. "
"IdString: %s in [UserExtension] section is incorrect.")
ERR_INF_PARSER_LIBRARY_SECTION_CONTENT_ERROR = \
_("The format is incorrect. "
"You can only have a Library name and a Feature flag in one line.")
ERR_INF_PARSER_LIBRARY_SECTION_LIBNAME_MISSING = \
_("Format invalid. Please specify a library name.")
ERR_INF_PARSER_SOURCES_SECTION_CONTENT_ERROR = \
_("The format is incorrect. It should be formatted as follows: "
"FileName, Family | TagName | ToolCode | FeatureFlagExpr.")
ERR_INF_PARSER_PCD_SECTION_TYPE_ERROR = \
_("The PCD section type is incorrect. The value should be this list: %s")
ERR_INF_PARSER_PCD_SECTION_CONTENT_ERROR = \
_("PcdName format invalid."
"Should like following: PcdName | Value | FeatureFlag.")
ERR_INF_PARSER_PCD_NAME_FORMAT_ERROR = \
_("Format invalid."
"Should like following: <TokenSpaceGuidCName>.<PcdCName> ")
ERR_INF_PARSER_GUID_PPI_PROTOCOL_SECTION_CONTENT_ERROR = \
_("The format is incorrect. "
"It should be formatted as follows: CName | FeatureFlag.")
ERR_INF_PARSER_PACKAGE_SECTION_CONTENT_ERROR = \
_("The format is incorrect. "
"It should be formatted as follows: <TokenSpaceGuidCName>.<PcdCName>")
ERR_INF_PARSER_PCD_TAIL_COMMENTS_INVALID = \
_("The format is incorrect. "
"Multiple usage descriptions must be described on subsequent lines.")
ERR_INF_PARSER_MODULE_SECTION_TYPE_ERROR = \
_("This section format is incorrect: %s.")
ERR_INF_PARSER_SECTION_NAME_DUPLICATE = \
_("This section has multiple section names, "
"only one section name is permitted.")
ERR_INF_PARSER_SECTION_ARCH_CONFLICT = \
_("The 'common' ARCH must not be used with the specified ARCHs.")
ERR_INF_PARSER_SOURCE_SECTION_TAGNAME_INVALID = \
_("This TagName is incorrect: %s. "
"It must be corrected before continuing.")
ERR_INF_PARSER_TAGNAME_NOT_PERMITTED = \
_("TagName is not permitted: %s. "
"It must be corrected before continuing.")
ERR_INF_PARSER_TOOLCODE_NOT_PERMITTED = \
_("ToolCode is not permitted: %s. "
"It must be corrected before continuing.")
ERR_INF_PARSER_SOURCE_SECTION_FAMILY_INVALID = \
_("This family is incorrect: %s. "
"It must be corrected before continuing. ")
ERR_INF_PARSER_SOURCE_SECTION_SECTIONNAME_INVALID = \
_("This SectionName is incorrect: %s. "
"It must be corrected before continuing.")
ERR_INF_PARSER_PCD_CVAR_GUID = \
_("TokenSpaceGuidCName must be valid C variable format.")
ERR_INF_PARSER_PCD_CVAR_PCDCNAME = \
_("PcdCName must be valid C variable format.")
ERR_INF_PARSER_PCD_VALUE_INVALID = \
_("The PCD value is incorrect. It must be corrected before continuing.")
ERR_INF_PARSER_FEATURE_FLAG_EXP_SYNTAX_INVLID = \
_("Incorrect feature flag expression: %s")
ERR_INF_PARSER_FEATURE_FLAG_EXP_MISSING = \
_("The feature flag expression is missing. Please specify a feature flag.")
ERR_INF_PARSER_INVALID_CNAME = \
_("Incorrect CName: %s. You must specify a valid C variable name.")
ERR_INF_PARSER_CNAME_MISSING = \
_("Missing CName. Specify a valid C variable name.")
ERR_INF_PARSER_DEFINE_SECTION_KEYWORD_INVALID = \
_("The Define section contains an invalid keyword: \"%s\"."
"It must be corrected before continuing.")
ERR_INF_PARSER_FILE_MISS_DEFINE = \
_("The following file listed in the module "
"directory is not listed in the INF: %s")
ERR_INF_PARSER_VERSION_NUMBER_DEPRICATED = \
_("VERSION_NUMBER deprecated. "
"The INF file %s should be modified to use the VERSION_STRING instead.")
ERR_INF_PARSER_VER_EXIST_BOTH_NUM_STR = \
_("The INF file %s defines both VERSION_NUMBER and VERSION_STRING, "
"using VERSION_STRING")
ERR_INF_PARSER_NOT_SUPPORT_EDKI_INF = _("EDKI INF is not supported")
ERR_INF_PARSER_EDKI_COMMENT_IN_EDKII = _("The EDKI style comment is not supported in EDKII modules")
ERR_INF_PARSER_FEATUREPCD_USAGE_INVALID = _("The usage for FeaturePcd can only"
" be type of \"CONSUMES\".")
ERR_INF_PARSER_DEFINE_ITEM_NO_NAME = _("No name specified")
ERR_INF_PARSER_DEFINE_ITEM_NO_VALUE = _("No value specified")
ERR_INF_PARSER_MODULETYPE_INVALID = _("Drivers and applications are not allowed to have a MODULE_TYPE of \"BASE\". "
"Only libraries are permitted to a have a MODULE_TYPE of \"BASE\".")
ERR_INF_GET_PKG_DEPENDENCY_FAIL = _("Failed to get PackageDependencies information from file %s")
ERR_INF_NO_PKG_DEPENDENCY_INFO = _("There are no packages defined that use the AsBuilt PCD information.")
#
# Item duplicate
#
ERR_INF_PARSER_ITEM_DUPLICATE_IN_DEC = \
_('"%s" is redefined in its dependent DEC files')
ERR_INF_PARSER_ITEM_DUPLICATE = _("%s define duplicated! "
"It must be corrected before continuing.")
ERR_INF_PARSER_ITEM_DUPLICATE_COMMON = _("%s define duplicated! Item listed"
"in an architectural section must not be listed in the common architectural"
"section.It must be corrected before continuing.")
ERR_INF_PARSER_UE_SECTION_DUPLICATE_ERROR = \
_("%s define duplicated! Each UserExtensions section header must have a "
"unique set of UserId, IdString and Arch values. "
"It must be corrected before continuing.")
ERR_INF_PARSER_DEFINE_LIB_NAME_INVALID = \
_("The name 'NULL' for LibraryClass is a reserved word."
"Please don't use it.")
ERR_GLOBAL_MARCO_INVALID = \
_("Using global MACRO in INF/DEC is not permitted: %s . "
"It must be corrected before continuing.")
ERR_MARCO_DEFINITION_MISS_ERROR = \
_("MACRO expand incorrectly, can not find the MACRO definition. "
"It must be corrected before continuing.")
#
# AsBuilt related
#
ERR_LIB_CONTATIN_ASBUILD_AND_COMMON = _("A binary INF file should not contain both AsBuilt LIB_INSTANCES information "
"and a common library entry.")
ERR_LIB_INSTANCE_MISS_GUID = _("Could not get FILE_GUID definition from instance INF file.")
ERR_BO_CONTATIN_ASBUILD_AND_COMMON = _("A binary INF file should contain either AsBuilt information "
"or a common build option entry, not both.")
ERR_ASBUILD_PCD_SECTION_TYPE = _("The AsBuilt INF file contains a PCD section type that is not permitted: %s.")
ERR_ASBUILD_PATCHPCD_FORMAT_INVALID = _("The AsBuilt PatchPcd entry must contain 3 elements: PcdName|Value|Offset")
ERR_ASBUILD_PCDEX_FORMAT_INVALID = _("The AsBuilt PcdEx entry must contain one element: PcdName")
ERR_ASBUILD_PCD_VALUE_INVALID = \
_("The AsBuilt PCD value %s is incorrect or not align with its datum type %s. "
"It must be corrected before continuing.")
ERR_ASBUILD_PCD_TOKENSPACE_GUID_VALUE_MISS = _("Package file value could not be retrieved for %s.")
ERR_ASBUILD_PCD_DECLARITION_MISS = _("PCD Declaration in DEC files could not be found for: %s.")
ERR_ASBUILD_PCD_OFFSET_FORMAT_INVALID = _("PCD offset format invalid, number of (0-4294967295) or"
"Hex number of UINT32 allowed : %s.")
#
# XML parser related strings
#
ERR_XML_PARSER_REQUIRED_ITEM_MISSING = \
_("The XML section/attribute '%s' is required under %s, it can't be missing or empty")
ERR_XML_INVALID_VARIABLENAME = \
_("The VariableName of the GUID in the XML tree does not conform to the packaging specification. "
"Only a Hex Byte Array of UCS-2 format or L\"string\" is allowed): %s %s %s")
ERR_XML_INVALID_LIB_SUPMODLIST = _("The LIBRARY_CLASS entry %s must have the list appended using the format as: \n"
"BASE SEC PEI_CORE PEIM DXE_CORE DXE_DRIVER SMM_CORE DXE_SMM_DRIVER DXE_RUNTIME_DRIVER "
"DXE_SAL_DRIVER UEFI_DRIVER UEFI_APPLICATION USER_DEFINED\n Current is %s.")
ERR_XML_INVALID_EXTERN_SUPARCHLIST = \
_("There is a mismatch of SupArchList %s between the EntryPoint, UnloadImage, Constructor, "
"and Destructor elements in the ModuleSurfaceArea.ModuleProperties: SupArchList: %s. ")
ERR_XML_INVALID_EXTERN_SUPMODLIST = _("The SupModList attribute of the CONSTRUCTOR or DESTRUCTOR element: %s does not "
"match the Supported Module Types listed after LIBRARY_CLASS = <Keyword> | %s")
ERR_XML_INVALID_EXTERN_SUPMODLIST_NOT_LIB = _("The module is not a library module. "
"The MODULE_TYPE : %s listed in the ModuleSurfaceArea.Header "
"must match the SupModList attribute %s")
ERR_XML_INVALID_BINARY_FILE_TYPE = _("Invalid binary file type %s.")
#
# Verbosity related strings.
#
MSG_DISTRIBUTION_PACKAGE_FILE_EXISTS = _(
"The distribution package file %s already exists.\nPress Y to override it."
" To exit the application, press any other key.")
MSG_CHECK_MODULE_EXIST = _(
"\nChecking to see if module exists in workspace started ...")
MSG_CHECK_MODULE_EXIST_FINISH = \
_("Checking to see if module exists in workspace ... Done.")
MSG_CHECK_MODULE_DEPEX_START = _(
"\nChecking to see if module depex met by workspace started ...")
MSG_CHECK_MODULE_DEPEX_FINISH = _(
"Checking to see if module depex met by workspace ... Done.")
MSG_CHECK_PACKAGE_START = _(
"\nChecking to see if package exists in workspace started ...")
MSG_CHECK_PACKAGE_FINISH = _(
"Checking to see if package exists in workspace ... Done.")
MSG_CHECK_DP_START = \
_("\nChecking to see if DP exists in workspace ... Done.")
MSG_CHECK_DP_FINISH = _("Check DP exists in workspace ... Done.")
MSG_MODULE_DEPEND_ON = _("Module %s depends on Package %s")
MSG_INIT_IPI_START = _("\nInitialize IPI database started ...")
MSG_INIT_IPI_FINISH = _("Initialize IPI database ... Done.")
MSG_GET_DP_INSTALL_LIST = _(
"\nGetting list of DP install information started ...")
MSG_GET_DP_INSTALL_INFO_START = _(
"\nGetting list of DP install information started ...")
MSG_GET_DP_INSTALL_INFO_FINISH = _("Getting DP install information ... Done.")
MSG_UZIP_PARSE_XML = _(
"Unzipping and parsing distribution package XML file ... ")
MSG_INSTALL_PACKAGE = _("Installing package ... %s")
MSG_INSTALL_MODULE = _("Installing module ... %s")
MSG_NEW_FILE_NAME_FOR_DIST = _(
"Provide new filename for distribution file to be saved:\n")
MSG_UPDATE_PACKAGE_DATABASE = _("Update Distribution Package Database ...")
MSG_PYTHON_ON = _("(Python %s on %s) ")
MSG_EDKII_MAIL_ADDR = '[email protected]'
MSG_SEARCH_FOR_HELP = _(
"\n(Please send email to %s for\n"
" help, attach the following call stack trace.)\n")
MSG_REMOVE_TEMP_FILE_STARTED = _("Removing temp files started ... ")
MSG_REMOVE_TEMP_FILE_DONE = _("Removing temp files ... Done.")
MSG_FINISH = _("Successfully Done.")
MSG_COMPRESS_DISTRIBUTION_PKG = _("Compressing Distribution Package File ...")
MSG_CONFIRM_REMOVE = _(
"Some packages or modules depend on this distribution package.\n"
"Do you really want to remove it?")
MSG_CONFIRM_REMOVE2 = _(
"This file has been modified: %s. Do you want to remove it?"
"Press Y to remove or other key to keep it")
MSG_CONFIRM_REMOVE3 = _(
"This is a newly created file: %s. Are you sure you want to remove it? "
"Press Y to remove or any other key to keep it")
MSG_USER_DELETE_OP = _(
"Press Y to delete all files or press any other key to quit:")
MSG_REMOVE_FILE = _("Removing file: %s ...")
MSG_INITIALIZE_ECC_STARTED = _("\nInitialize ECC database started ...")
MSG_INITIALIZE_ECC_DONE = _("Initialize ECC database ... Done.")
MSG_DEFINE_STATEMENT_FOUND = _("DEFINE statement '%s' found in section %s")
MSG_PARSING = _("Parsing %s ...")
MSG_REPKG_CONFLICT = \
_("Repackaging is not allowed on this file: %s. "
"It was installed from distribution %s(Guid %s Version %s).")
MSG_INVALID_MODULE_INTRODUCED = _("Some modules are not valid after removal.")
MSG_CHECK_LOG_FILE = _("Please check log file %s for full list")
MSG_NEW_FILE_NAME = _(
"Provide new filename:\n")
MSG_RELATIVE_PATH_ONLY = _("Please specify a relative path, full path is not allowed: %s")
MSG_NEW_PKG_PATH = _(
"Select package location. To quit with no input, press [Enter].")
MSG_CHECK_DP_FOR_REPLACE = _("Verifying the dependency rule for replacement of distributions:\n %s replaces %s")
MSG_CHECK_DP_FOR_INSTALL = _("Verifying the dependency rule for installation of distribution:\n %s")
MSG_REPLACE_ALREADY_INSTALLED_DP = _("Distribution with the same GUID/Version is already installed, "
"replace would result in two instances, which is not allowed")
MSG_RECOVER_START = _('An error was detected, recovery started ...')
MSG_RECOVER_DONE = _('Recovery completed.')
MSG_RECOVER_FAIL = _('Recovery failed.')
#
# Error related strings.
#
ERR_DEPENDENCY_NOT_MATCH = _(
"Module %s's dependency on package %s (GUID %s Version %s) "
"cannot be satisfied")
ERR_MODULE_NOT_INSTALLED = _(
"This module is not installed in the workspace: %s\n")
ERR_DIR_ALREADY_EXIST = _(
"This directory already exists: %s.\n"
"Select another location. Press [Enter] with no input to quit:")
ERR_USER_INTERRUPT = _("The user has paused the application")
ERR_DIST_FILE_TOOMANY = _(
"Only one .content and one .pkg file in ZIP file are allowed.")
ERR_DIST_FILE_TOOFEW = _(
"Must have one .content and one .pkg file in the ZIP file.")
ERR_FILE_ALREADY_EXIST = _(
"This file already exists: %s.\n"
"Select another path to continue. To quit with no input press [Enter]:")
ERR_SPECIFY_PACKAGE = _(
"One distribution package must be specified")
ERR_FILE_BROKEN = _(
"This file is invalid in the distribution package: %s")
ERR_PACKAGE_NOT_MATCH_DEPENDENCY = _(
"This distribution package does not meet the dependency requirements")
ERR_UNKNOWN_FATAL_INSTALL_ERR = \
_("Unknown unrecoverable error when installing: %s")
ERR_UNKNOWN_FATAL_REPLACE_ERR = \
_("Unknown unrecoverable error during replacement of distributions: %s replaces %s")
ERR_OPTION_NOT_FOUND = _("Options not found")
ERR_INVALID_PACKAGE_NAME = _("Incorrect package name: %s. ")
ERR_INVALID_PACKAGE_PATH = \
_("Incorrect package path: %s. The path must be a relative path.")
ERR_NOT_FOUND = _("This was not found: %s")
ERR_INVALID_MODULE_NAME = _("This is not a valid module name: %s")
ERR_INVALID_METAFILE_PATH = _('This file must be in sub-directory of WORKSPACE: %s.')
ERR_INVALID_MODULE_PATH = \
_("Incorrect module path: %s. The path must be a relative path.")
ERR_UNKNOWN_FATAL_CREATING_ERR = _("Unknown error when creating: %s")
ERR_PACKAGE_NOT_INSTALLED = _(
"This distribution package not installed: %s")
ERR_DISTRIBUTION_NOT_INSTALLED = _(
"The distribution package is not installed.")
ERR_UNKNOWN_FATAL_REMOVING_ERR = _("Unknown error when removing package")
ERR_UNKNOWN_FATAL_INVENTORYWS_ERR = _("Unknown error when inventorying WORKSPACE")
ERR_NOT_CONFIGURE_WORKSPACE_ENV = _(
"The WORKSPACE environment variable must be configured.")
ERR_NO_TEMPLATE_FILE = _("This package information data file is not found: %s")
ERR_DEBUG_LEVEL = _(
"Not supported debug level. Use default level instead.")
ERR_REQUIRE_T_OPTION = _(
"Option -t is required during distribution creation.")
ERR_REQUIRE_O_OPTION = _(
"Option -o is required during distribution replacement.")
ERR_REQUIRE_U_OPTION = _(
"Option -u is required during distribution replacement.")
ERR_REQUIRE_I_C_R_OPTION = _(
"Options -i, -c and -r are mutually exclusive.")
ERR_I_C_EXCLUSIVE = \
_("Option -c and -i are mutually exclusive.")
ERR_I_R_EXCLUSIVE = \
_("Option -i and -r are mutually exclusive.")
ERR_C_R_EXCLUSIVE = \
_("Option -c and -r are mutually exclusive.")
ERR_U_ICR_EXCLUSIVE = \
_("Option -u and -c/-i/-r are mutually exclusive.")
ERR_L_OA_EXCLUSIVE = \
_("Option -l and -c/-i/-r/-u are mutually exclusive.")
ERR_FAILED_LOAD = _("Failed to load %s\n\t%s")
ERR_PLACEHOLDER_DIFFERENT_REPEAT = _(
"${%s} has different repeat time from others.")
ERR_KEY_NOTALLOWED = _("This keyword is not allowed: %s")
ERR_NOT_FOUND_ENVIRONMENT = _("Environment variable not found")
ERR_WORKSPACE_NOTEXIST = _("WORKSPACE doesn't exist")
ERR_SPACE_NOTALLOWED = _(
"Whitespace characters are not allowed in the WORKSPACE path. ")
ERR_MACRONAME_NOGIVEN = _("No MACRO name given")
ERR_MACROVALUE_NOGIVEN = _("No MACRO value given")
ERR_MACRONAME_INVALID = _("Incorrect MACRO name: %s")
ERR_MACROVALUE_INVALID = _("Incorrect MACRO value: %s")
ERR_NAME_ONLY_DEFINE = _(
"This variable can only be defined via environment variable: %s")
ERR_EDK_GLOBAL_SAMENAME = _(
"EDK_GLOBAL defined a macro with the same name as one defined by 'DEFINE'")
ERR_SECTIONNAME_INVALID = _(
"An incorrect section name was found: %s. 'The correct file is '%s' .")
ERR_CHECKFILE_NOTFOUND = _(
"Can't find file '%s' defined in section '%s'")
ERR_INVALID_NOTFOUND = _(
"Incorrect statement '%s' was found in section '%s'")
ERR_TEMPLATE_NOTFOUND = _("This package information data file is not found: %s")
ERR_SECTION_NAME_INVALID = _('Incorrect section name: %s')
ERR_SECTION_REDEFINE = _(
"This section already defined: %s.")
ERR_SECTION_NAME_NONE = \
_('The section needs to be specified first.')
ERR_KEYWORD_INVALID = _('Invalid keyword: %s')
ERR_VALUE_INVALID = _("Invalid \"%s\" value in section [%s].")
ERR_FILELIST_LOCATION = _(
'The directory "%s" must contain this file: "%s".')
ERR_KEYWORD_REDEFINE = _(
"Keyword in this section can only be used once: %s.")
ERR_FILELIST_EXIST = _(
'This file does not exist: %s.')
ERR_COPYRIGHT_CONTENT = _(
"The copyright content must contain the word \"Copyright\" (case insensitive).")
ERR_WRONG_FILELIST_FORMAT = \
_('File list format is incorrect.'
'The correct format is: filename|key=value[|key=value]')
ERR_FILELIST_ATTR = _(
"The value of attribute \"%s\" includes illegal character.")
ERR_UNKNOWN_FILELIST_ATTR = _(
'Unknown attribute name: %s.')
ERR_EMPTY_VALUE = _("Empty value is not allowed")
ERR_KEYWORD_MANDATORY = _('This keyword is mandatory: %s')
ERR_BOOLEAN_VALUE = _(
'Value of key [%s] must be true or false, current: [%s]')
ERR_GUID_VALUE = _(
'GUID must have the format of 8-4-4-4-12 with HEX value. '
'Current value: [%s]')
ERR_VERSION_VALUE = _(
'The value of key [%s] must be a decimal number. Found: [%s]')
ERR_VERSION_XMLSPEC = _(
'XmlSpecification value must be 1.1, current: %s.')
ERR_INVALID_GUID = _("Incorrect GUID value string: %s")
ERR_FILE_NOT_FOUND = \
_("File or directory not found in workspace")
ERR_FILE_OPEN_FAILURE = _("Could not open file")
ERR_FILE_WRITE_FAILURE = _("Could not write file.")
ERR_FILE_PARSE_FAILURE = _("Could not parse file")
ERR_FILE_READ_FAILURE = _("Could not read file")
ERR_FILE_CREATE_FAILURE = _("Could not create file")
ERR_FILE_CHECKSUM_FAILURE = _("Checksum of file is incorrect")
ERR_FILE_COMPRESS_FAILURE = _("File compression did not correctly")
ERR_FILE_DECOMPRESS_FAILURE = \
_("File decompression did not complete correctly")
ERR_FILE_MOVE_FAILURE = _("Move file did not complete successfully")
ERR_FILE_DELETE_FAILURE = _("File could not be deleted")
ERR_FILE_COPY_FAILURE = _("File did not copy correctly")
ERR_FILE_POSITIONING_FAILURE = _("Could not find file seek position")
ERR_FILE_TYPE_MISMATCH = _("Incorrect file type")
ERR_FILE_CASE_MISMATCH = _("File name case mismatch")
ERR_FILE_DUPLICATED = _("Duplicate file found")
ERR_FILE_UNKNOWN_ERROR = _("Unknown error encountered on file")
ERR_FILE_NAME_INVALIDE = _("This file name is invalid, it must not be an absolute path or "
"contain a period \".\" or \"..\": %s.")
ERR_OPTION_UNKNOWN = _("Unknown option")
ERR_OPTION_MISSING = _("Missing option")
ERR_OPTION_CONFLICT = _("Options conflict")
ERR_OPTION_VALUE_INVALID = _("Invalid option value")
ERR_OPTION_DEPRECATED = _("Deprecated option")
ERR_OPTION_NOT_SUPPORTED = _("Unsupported option")
ERR_OPTION_UNKNOWN_ERROR = _("Unknown error when processing options")
ERR_PARAMETER_INVALID = _("Invalid parameter")
ERR_PARAMETER_MISSING = _("Missing parameter")
ERR_PARAMETER_UNKNOWN_ERROR = _("Unknown error in parameters")
ERR_FORMAT_INVALID = _("Invalid syntax/format")
ERR_FORMAT_NOT_SUPPORTED = _("Syntax/format not supported")
ERR_FORMAT_UNKNOWN = _("Unknown format")
ERR_FORMAT_UNKNOWN_ERROR = _("Unknown error in syntax/format ")
ERR_RESOURCE_NOT_AVAILABLE = _("Not available")
ERR_RESOURCE_ALLOCATE_FAILURE = _("A resource allocation has failed")
ERR_RESOURCE_FULL = _("Full")
ERR_RESOURCE_OVERFLOW = _("Overflow")
ERR_RESOURCE_UNDERRUN = _("Underrun")
ERR_RESOURCE_UNKNOWN_ERROR = _("Unknown error")
ERR_ATTRIBUTE_NOT_AVAILABLE = _("Not available")
ERR_ATTRIBUTE_RETRIEVE_FAILURE = _("Unable to retrieve")
ERR_ATTRIBUTE_SET_FAILURE = _("Unable to set")
ERR_ATTRIBUTE_UPDATE_FAILURE = _("Unable to update")
ERR_ATTRIBUTE_ACCESS_DENIED = _("Access denied")
ERR_ATTRIBUTE_UNKNOWN_ERROR = _("Unknown error when accessing")
ERR_COMMAND_FAILURE = _("Unable to execute command")
ERR_IO_NOT_READY = _("Not ready")
ERR_IO_BUSY = _("Busy")
ERR_IO_TIMEOUT = _("Timeout")
ERR_IO_UNKNOWN_ERROR = _("Unknown error in IO operation")
ERR_UNKNOWN_ERROR = _("Unknown error")
ERR_UPT_ALREADY_INSTALLED_ERROR = _("Already installed")
ERR_UPT_ENVIRON_MISSING_ERROR = _("Environ missing")
ERR_UPT_REPKG_ERROR = _("File not allowed for RePackage")
ERR_UPT_DB_UPDATE_ERROR = _("Update database did not complete successfully")
ERR_UPT_INI_PARSE_ERROR = _("INI file parse error")
ERR_COPYRIGHT_MISSING = \
_("Header comment section must have copyright information")
ERR_LICENSE_MISSING = \
_("Header comment section must have license information")
ERR_INVALID_BINARYHEADER_FORMAT = \
_("Binary Header comment section must have abstract,description,copyright,license information")
ERR_MULTIPLE_BINARYHEADER_EXIST = \
_("the inf file at most support one BinaryHeader at the fileheader section.")
ERR_INVALID_COMMENT_FORMAT = _("Comment must start with #")
ERR_USER_ABORT = _("User has stopped the application")
ERR_DIST_EXT_ERROR = \
_("Distribution file extension should be '.dist'. Current given: '%s'.")
ERR_DIST_FILENAME_ONLY_FOR_REMOVE = \
_("Only distribution filename without path allowed during remove. Current given: '%s'.")
ERR_NOT_STANDALONE_MODULE_ERROR = \
_("Module %s is not a standalone module (found in Package %s)")
ERR_UPT_ALREADY_RUNNING_ERROR = \
_("UPT is already running, only one instance is allowed")
ERR_MUL_DEC_ERROR = _("Multiple DEC files found within one package directory tree %s: %s, %s")
ERR_INSTALL_FILE_FROM_EMPTY_CONTENT = _("Error file to be installed is not found in content file: %s")
ERR_INSTALL_FILE_DEC_FILE_ERROR = _("Could not obtain the TokenSpaceGuidCName and the PcdCName from the DEC files "
"that the package depends on for this pcd entry: TokenValue: %s Token: %s")
ERR_NOT_SUPPORTED_SA_MODULE = _("Stand-alone module distribution does not allow EDK 1 INF")
ERR_INSTALL_DIST_NOT_FOUND = \
_("Distribution file to be installed is not found in current working directory or workspace: %s")
ERR_REPLACE_DIST_NOT_FOUND = \
_("Distribution file for replace function was not found in the current working directory or workspace: %s")
ERR_DIST_FILENAME_ONLY_FOR_REPLACE_ORIG = \
_("Only a distribution file name without a path is allowed for "
"the distribution to be replaced during replace. Current given: '%s'.")
ERR_UNIPARSE_DBLQUOTE_UNMATCHED = \
_("Only Language entry can contain a couple of matched quote in one line")
ERR_UNIPARSE_NO_SECTION_EXIST = _("No PackageDef or ModuleDef section exists in the UNI file.")
ERR_UNIPARSE_STRNAME_FORMAT_ERROR = _("The String Token Name %s must start with \"STR_\"")
ERR_UNIPARSE_SEP_LANGENTRY_LINE = _("Each <LangEntry> should be in a separate line :%s.")
ERR_UNIPARSE_MULTI_ENTRY_EXIST = \
_("There are same entries : %s in the UNI file, every kind of entry should be only one.")
ERR_UNIPARSE_ENTRY_ORDER_WRONG = \
_("The string entry order in UNI file should be <AbstractStrings>, <DescriptionStrings>, \
<BinaryAbstractStrings>, <BinaryDescriptionStrings>.")
ERR_UNIPARSE_STRTOKEN_FORMAT_ERROR = _("The String Token Type %s must be one of the '_PROMPT', '_HELP' and '_ERR_'.")
ERR_UNIPARSE_LINEFEED_UNDER_EXIST = _("Line feed should not exist under this line: %s.")
ERR_UNIPARSE_LINEFEED_UP_EXIST = _("Line feed should not exist up this line: %s.")
ERR_UNI_MISS_STRING_ENTRY = _("String entry missed in this Entry, %s.")
ERR_UNI_MISS_LANGENTRY = _("Language entry missed in this Entry, %s.")
ERR_BINARY_HEADER_ORDER = _("Binary header must follow the file header.")
ERR_NO_SOURCE_HEADER = _("File header statement \"## @file\" must exist at the first place.")
ERR_UNI_FILE_SUFFIX_WRONG = _("The UNI file must have an extension of '.uni', '.UNI' or '.Uni'")
ERR_UNI_FILE_NAME_INVALID = _("The use of '..', '../' and './' in the UNI file is prohibited.")
ERR_UNI_SUBGUID_VALUE_DEFINE_DEC_NOT_FOUND = _("There are no DEC file to define the GUID value for \
this GUID CName: '%s'.")
#
# Expression error message
#
ERR_EXPR_RIGHT_PAREN = \
_('Missing ")" in expression "%s".')
ERR_EXPR_FACTOR = \
_('"%s" is expected to be HEX, integer, macro, quoted string or PcdName in '
'expression "%s".')
ERR_EXPR_STRING_ITEM = \
_('"%s" is expected to be HEX, integer, macro, quoted string or PcdName in '
'expression [%s].')
ERR_EXPR_EQUALITY = \
_('"%s" is expected to be ==, EQ, != or NE in expression "%s".')
ERR_EXPR_BOOLEAN = \
_('The string "%s" in expression "%s" can not be recognized as a part of the logical expression.')
ERR_EXPR_EMPTY = _('Boolean value cannot be empty.')
ERR_EXPRESS_EMPTY = _('Expression can not be empty.')
ERR_EXPR_LOGICAL = \
_('The following is not a valid logical expression: "%s".')
ERR_EXPR_OR = _('The expression: "%s" must be encapsulated in open "(" and close ")" '
'parenthesis when using | or ||.')
ERR_EXPR_RANGE = \
_('The following is not a valid range expression: "%s".')
ERR_EXPR_RANGE_FACTOR = \
_('"%s" is expected to be HEX, integer in valid range expression "%s".')
ERR_EXPR_RANGE_DOUBLE_PAREN_NESTED = \
_('Double parentheses nested is not allowed in valid range expression: "%s".')
ERR_EXPR_RANGE_EMPTY = _('Valid range can not be empty.')
ERR_EXPR_LIST_EMPTY = _('Valid list can not be empty.')
ERR_PAREN_NOT_USED = _('Parenthesis must be used on both sides of "OR", "AND" in valid range : %s.')
ERR_EXPR_LIST = \
_('The following is not a valid list expression: "%s".')
# DEC parser error message
#
ERR_DECPARSE_STATEMENT_EMPTY = \
_('Must have at least one statement in section %s.')
ERR_DECPARSE_DEFINE_DEFINED = \
_('%s already defined in define section.')
ERR_DECPARSE_DEFINE_SECNAME = \
_('No arch and others can be followed for define section.')
ERR_DECPARSE_DEFINE_MULTISEC = \
_('The DEC file does not allow multiple define sections.')
ERR_DECPARSE_DEFINE_REQUIRED = \
_("Field [%s] is required in define section.")
ERR_DECPARSE_DEFINE_FORMAT = \
_("Wrong define section format, must be KEY = Value.")
ERR_DECPARSE_DEFINE_UNKNOWKEY = \
_("Unknown key [%s] in define section.")
ERR_DECPARSE_DEFINE_SPEC = \
_("Specification value must be HEX numbers or decimal numbers.")
ERR_DECPARSE_DEFINE_PKGNAME = \
_("Package name must be AlphaNumeric characters.")
ERR_DECPARSE_DEFINE_PKGGUID = \
_("GUID format error, must be HEX value with form 8-4-4-4-12.")
ERR_DECPARSE_DEFINE_PKGVERSION = \
_("Version number must be decimal number.")
ERR_DECPARSE_DEFINE_PKGVUNI = \
_("UNI file name format error or file does not exist.")
ERR_DECPARSE_INCLUDE = \
_("Incorrect path: [%s].")
ERR_DECPARSE_LIBCLASS_SPLIT = \
_("Library class format error, must be Libraryclass|Headerpath.")
ERR_DECPARSE_LIBCLASS_EMPTY = \
_("Class name or file name must not be empty.")
ERR_DECPARSE_LIBCLASS_LIB = \
_("Class name format error, must start with upper case letter followed with "
"zero or more alphanumeric characters.")
ERR_DECPARSE_LIBCLASS_PATH_EXT = _("File name must be end with .h.")
ERR_DECPARSE_LIBCLASS_PATH_DOT = _("Path must not include '..'.")
ERR_DECPARSE_LIBCLASS_PATH_EXIST = _("File name [%s] does not exist.")
ERR_DECPARSE_PCD_CVAR_GUID = \
_("TokenSpaceGuidCName must be valid C variable format.")
ERR_DECPARSE_PCD_SPLIT = \
_("Incorrect PcdName. The format must be TokenSpaceGuidCName.PcdCName"
"|PcdData|PcdType|Token.")
ERR_DECPARSE_PCD_NAME = \
_("Incorrect PCD name. The correct format must be "
"<TokenSpaceGuidCName>.<PcdCName>.")
ERR_DECPARSE_PCD_CVAR_PCDCNAME = \
_("PcdCName must be valid C variable format.")
ERR_DECPARSE_PCD_TYPE = \
_('Incorrect PCD data type. A PCD data type must be one of '
'"UINT8", "UINT16", "UINT32", "UINT64", "VOID*", "BOOLEAN".')
ERR_DECPARSE_PCD_VOID = \
_("Incorrect value [%s] of type [%s]. Value must be printable and in the "
"form of{...} for array, or ""..."" for string, or L""..."""
"for unicode string.")
ERR_DECPARSE_PCD_VALUE_EMPTY = \
_("Pcd value can not be empty.")
ERR_DECPARSE_PCD_BOOL = \
_("Invalid value [%s] of type [%s]; must be expression, TRUE, FALSE, 0 or 1.")
ERR_DECPARSE_PCD_INT = _("Incorrect value [%s] of type [%s]."\
" Value must be a hexadecimal, decimal or octal in C language format.")
ERR_DECPARSE_PCD_INT_NEGTIVE = _("Incorrect value [%s] of type [%s];"
" must not be signed number.")
ERR_DECPARSE_PCD_INT_EXCEED = _("Incorrect value [%s] of type [%s]; "
"the number is too long for this type.")
ERR_DECPARSE_PCD_FEATUREFLAG = \
_("PcdFeatureFlag only allow BOOLEAN type.")
ERR_DECPARSE_PCD_TOKEN = \
_("An incorrect PCD token found: [%s]. "
"It must start with 0x followed by 1 - 8 hexadecimal. ")
ERR_DECPARSE_PCD_TOKEN_INT = _("Incorrect token number [%s]. "
"This token number exceeds the maximal value of unsigned 32.")
ERR_DECPARSE_PCD_TOKEN_UNIQUE = _("Token number must be unique to the token space: %s.")
ERR_DECPARSE_CGUID = \
_("No GUID name or value specified, must be <CName> = <GuidValueInCFormat>.")
ERR_DECPARSE_CGUID_NAME = \
_("No GUID name specified, must be <CName> = <GuidValueInCFormat>.")
ERR_DECPARSE_CGUID_GUID = \
_("No GUID value specified, must be <CName> = <GuidValueInCFormat>.")
ERR_DECPARSE_CGUID_GUIDFORMAT = \
_("Incorrect GUID value format, must be <GuidValueInCFormat:"
"{8,4,4,{2,2,2,2,2,2,2,2}}>.")
ERR_DECPARSE_CGUID_NOT_FOUND = _("Unable to find the GUID value of this GUID CName : '%s'.")
ERR_DECPARSE_FILEOPEN = _("Unable to open: [%s].")
ERR_DECPARSE_SECTION_EMPTY = _("Empty sections are not allowed.")
ERR_DECPARSE_SECTION_UE = _("Incorrect UserExtensions format. "
"Must be UserExtenxions.UserId.IdString[.Arch]+.")
ERR_DECPARSE_SECTION_UE_USERID = _("Invalid UserId, must be underscore"
"or alphanumeric characters.")
ERR_DECPARSE_SECTION_UE_IDSTRING = \
_("Incorrect IdString, must be \" ... \".")
ERR_DECPARSE_ARCH = \
_("Unknown arch, must be 'common' or start with upper case letter followed by"
" zero or more upper case letters and numbers.")
ERR_DECPARSE_SECTION_COMMA = _("Section cannot end with comma.")
ERR_DECPARSE_SECTION_COMMON = \
_("'COMMON' must not be used with specific ARCHs in the same section.")
ERR_DECPARSE_SECTION_IDENTIFY = \
_("Section header must start with and end with brackets[].")
ERR_DECPARSE_SECTION_SUBEMPTY = \
_("Missing a sub-section name in section: [%s]. "
"All sub-sections need to have names. ")
ERR_DECPARSE_SECTION_SUBTOOMANY = _("Too many DOT splits in [%s].")
ERR_DECPARSE_SECTION_UNKNOW = _("Section name [%s] unknown.")
ERR_DECPARSE_SECTION_FEATUREFLAG = \
_("[%s] must not be in the same section as other types of PCD.")
ERR_DECPARSE_MACRO_PAIR = _("No macro name/value given.")
ERR_DECPARSE_MACRO_NAME = _("No macro name given.")
ERR_DECPARSE_MACRO_NAME_UPPER = \
_("Macro name must start with upper case letter followed "
"by zero or more upper case letters or numbers. Current macro name is: [%s].")
ERR_DECPARSE_SECTION_NAME = \
_('Cannot mix different section names %s.')
ERR_DECPARSE_BACKSLASH = \
_('Backslash must be the last character on a line and '
'preceded by a space character.')
ERR_DECPARSE_BACKSLASH_EMPTY = \
_('Empty line after previous line that has backslash is not allowed.')
ERR_DECPARSE_REDEFINE = _(
"\"%s\" already defined in line %d.")
ERR_DECPARSE_MACRO_RESOLVE = _("Macro %s in %s cannot be resolved.")
ERR_DECPARSE_UE_DUPLICATE = \
_("Duplicated UserExtensions header found.")
ERR_DECPARSE_PCDERRORMSG_MISS_VALUE_SPLIT = \
_("Missing '|' between Pcd's error code and Pcd's error message.")
ERR_DECPARSE_PCD_MISS_ERRORMSG = \
_("Missing Pcd's error message.")
ERR_DECPARSE_PCD_UNMATCHED_ERRORCODE = \
_("There is no error message matched with this Pcd error code : %s in both DEC and UNI file.")
ERR_DECPARSE_PCD_NODEFINED = _("The PCD : %s used in the Expression is undefined.")
#
# Used to print the current line content which cause error raise.
# Be attached to the end of every error message above.
#
ERR_DECPARSE_LINE = _(" Parsing line: \"%s\".")
#
# Warning related strings.
#
WRN_PACKAGE_EXISTED = _(
"A package with this GUID and Version already exists: "
"GUID %s, Version %s.")
WRN_MODULE_EXISTED = _("This module already exists: %s")
WRN_FILE_EXISTED = _("This file already exists: %s")
WRN_FILE_NOT_OVERWRITTEN = \
_("This file already exist and cannot be overwritten: %s")
WRN_DIST_PKG_INSTALLED = _("This distribution package %s has previously been installed.")
WRN_DIST_NOT_FOUND = _(
"Distribution is not found at location %s")
WRN_MULTI_PCD_RANGES = _(
"A PCD can only have one type of @ValidRange, @ValidList, and @Expression comment")
WRN_MULTI_PCD_VALIDVALUE = _(
"A PCD can only have one of @ValidList comment")
WRN_MULTI_PCD_PROMPT = _(
"A PCD can only have one of @Prompt comment")
WRN_MISSING_USAGE = _("Missing usage")
WRN_INVALID_GUID_TYPE = _("This is and incorrect Guid type: %s")
WRN_MISSING_GUID_TYPE = _("Missing Guid Type")
WRN_INVALID_USAGE = _("This is an incorrect Usage: %s")
WRN_INF_PARSER_MODULE_INVALID_HOB_TYPE = \
_("This is an incorrect HOB type: %s")
WRN_INF_PARSER_MODULE_INVALID_EVENT_TYPE = \
_("This is an incorrect EVENT type: %s")
WRN_INF_PARSER_MODULE_INVALID_BOOTMODE_TYPE = \
_("This is an incorrect BOOTMODE type: %s")
WRN_INVALID_MODULE_TYPE = \
_("This is an incorrect Module type: %s")
WRN_MODULE_PARSE_FAILED = \
_("Parsing of this module did not complete correctly: %s.")
WRN_EDK1_INF_FOUND = \
_("EDK 1 module file found: %s")
WRN_INVALID_COPYRIGHT = \
_("Copyright information is not right")
WARN_SPECIAL_SECTION_LOCATION_WRONG = _("Warning. A special section should be "
"at the end of a file or at the end of a section.")
WARN_INSTALLED_PACKAGE_NOT_FOUND = \
_("File not found. The DEC file for a package cannot be found in GUID/Version/Install path: %s %s %s")
WARN_CUSTOMPATH_OVERRIDE_USEGUIDEDPATH = \
_("option selection of --custom-path will override the option --use-guided-paths")
#
# Help related strings.
#
HLP_PRINT_DEBUG_INFO = _(
"Print DEBUG statements, where DEBUG_LEVEL is 0-9")
HLP_PRINT_INFORMATIONAL_STATEMENT = _("Print informational statements")
HLP_RETURN_NO_DISPLAY = _(
"Returns only the exit code, informational and error messages are"
" not displayed")
HLP_RETURN_AND_DISPLAY = _(
"Returns the exit code and displays error messages only")
HLP_SPECIFY_PACKAGE_NAME_INSTALL = _(
"Specify the UEFI Distribution Package filename to install")
HLP_SPECIFY_PACKAGE_NAME_CREATE = _(
"Specify the UEFI Distribution Package filename to create")
HLP_SPECIFY_PACKAGE_NAME_REMOVE = _(
"Specify the UEFI Distribution Package filename to remove")
HLP_SPECIFY_TEMPLATE_NAME_CREATE = _(
"Specify Package Information Data filename to create package")
HLP_SPECIFY_DEC_NAME_CREATE = _(
"Specify dec file names to create package")
HLP_SPECIFY_INF_NAME_CREATE = _(
"Specify inf file names to create package")
HLP_LIST_DIST_INSTALLED = _(
"List the UEFI Distribution Packages that have been installed")
HLP_NO_SUPPORT_GUI = _(
"Starting the tool in graphical mode is not supported in this version")
HLP_DISABLE_PROMPT = _(
"Disable user prompts for removing modified files. Valid only when -r is present")
HLP_CUSTOM_PATH_PROMPT = _(
"Enable user prompting for alternate installation directories")
HLP_SKIP_LOCK_CHECK = _(
"Skip the check for multiple instances")
HLP_SPECIFY_PACKAGE_NAME_REPLACE = _(
"Specify the UEFI Distribution Package file name to replace the existing file name")
HLP_SPECIFY_PACKAGE_NAME_TO_BE_REPLACED = _(
"Specify the UEFI Distribution Package file name to be replaced")
HLP_USE_GUIDED_PATHS = _(
"Install packages to the following directory path by default: <PackageName>_<PACKAGE_GUID>_<PACKAGE_VERSION>")
HLP_TEST_INSTALL = _(
"Specify the UEFI Distribution Package filenames to install")
MSG_TEST_INSTALL_PASS = _("All distribution package file are satisfied for dependence check.")
MSG_TEST_INSTALL_FAIL = _("NOT all distribution package file are satisfied for dependence check.")
| edk2-master | BaseTools/Source/Python/UPT/Logger/StringTable.py |
## @file
# This file implements the log mechanism for Python tools.
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
'''
Logger
'''
## Import modules
from sys import argv
from sys import stdout
from sys import stderr
import os.path
from os import remove
from logging import getLogger
from logging import Formatter
from logging import StreamHandler
from logging import FileHandler
from traceback import extract_stack
from Logger.ToolError import FatalError
from Logger.ToolError import WARNING_AS_ERROR
from Logger.ToolError import gERROR_MESSAGE
from Logger.ToolError import UNKNOWN_ERROR
from Library import GlobalData
#
# Log level constants
#
DEBUG_0 = 1
DEBUG_1 = 2
DEBUG_2 = 3
DEBUG_3 = 4
DEBUG_4 = 5
DEBUG_5 = 6
DEBUG_6 = 7
DEBUG_7 = 8
DEBUG_8 = 9
DEBUG_9 = 10
VERBOSE = 15
INFO = 20
WARN = 30
QUIET = 40
QUIET_1 = 41
ERROR = 50
SILENT = 60
IS_RAISE_ERROR = True
SUPRESS_ERROR = False
#
# Tool name
#
_TOOL_NAME = os.path.basename(argv[0])
#
# For validation purpose
#
_LOG_LEVELS = [DEBUG_0, DEBUG_1, DEBUG_2, DEBUG_3, DEBUG_4, DEBUG_5, DEBUG_6, \
DEBUG_7, DEBUG_8, DEBUG_9, VERBOSE, WARN, INFO, ERROR, QUIET, \
QUIET_1, SILENT]
#
# For DEBUG level (All DEBUG_0~9 are applicable)
#
_DEBUG_LOGGER = getLogger("tool_debug")
_DEBUG_FORMATTER = Formatter("[%(asctime)s.%(msecs)d]: %(message)s", \
datefmt="%H:%M:%S")
#
# For VERBOSE, INFO, WARN level
#
_INFO_LOGGER = getLogger("tool_info")
_INFO_FORMATTER = Formatter("%(message)s")
#
# For ERROR level
#
_ERROR_LOGGER = getLogger("tool_error")
_ERROR_FORMATTER = Formatter("%(message)s")
#
# String templates for ERROR/WARN/DEBUG log message
#
_ERROR_MESSAGE_TEMPLATE = \
('\n\n%(tool)s...\n%(file)s(%(line)s): error %(errorcode)04X: %(msg)s\n\t%(extra)s')
__ERROR_MESSAGE_TEMPLATE_WITHOUT_FILE = \
'\n\n%(tool)s...\n : error %(errorcode)04X: %(msg)s\n\t%(extra)s'
_WARNING_MESSAGE_TEMPLATE = '%(tool)s...\n%(file)s(%(line)s): warning: %(msg)s'
_WARNING_MESSAGE_TEMPLATE_WITHOUT_FILE = '%(tool)s: : warning: %(msg)s'
_DEBUG_MESSAGE_TEMPLATE = '%(file)s(%(line)s): debug: \n %(msg)s'
#
# Log INFO message
#
#Info = _INFO_LOGGER.info
def Info(msg, *args, **kwargs):
_INFO_LOGGER.info(msg, *args, **kwargs)
#
# Log information which should be always put out
#
def Quiet(msg, *args, **kwargs):
_ERROR_LOGGER.error(msg, *args, **kwargs)
## Log debug message
#
# @param Level DEBUG level (DEBUG0~9)
# @param Message Debug information
# @param ExtraData More information associated with "Message"
#
def Debug(Level, Message, ExtraData=None):
if _DEBUG_LOGGER.level > Level:
return
if Level > DEBUG_9:
return
#
# Find out the caller method information
#
CallerStack = extract_stack()[-2]
TemplateDict = {
"file" : CallerStack[0],
"line" : CallerStack[1],
"msg" : Message,
}
if ExtraData is not None:
LogText = _DEBUG_MESSAGE_TEMPLATE % TemplateDict + "\n %s" % ExtraData
else:
LogText = _DEBUG_MESSAGE_TEMPLATE % TemplateDict
_DEBUG_LOGGER.log(Level, LogText)
## Log verbose message
#
# @param Message Verbose information
#
def Verbose(Message):
return _INFO_LOGGER.log(VERBOSE, Message)
## Log warning message
#
# Warning messages are those which might be wrong but won't fail the tool.
#
# @param ToolName The name of the tool. If not given, the name of caller
# method will be used.
# @param Message Warning information
# @param File The name of file which caused the warning.
# @param Line The line number in the "File" which caused the warning.
# @param ExtraData More information associated with "Message"
#
def Warn(ToolName, Message, File=None, Line=None, ExtraData=None):
if _INFO_LOGGER.level > WARN:
return
#
# if no tool name given, use caller's source file name as tool name
#
if ToolName is None or ToolName == "":
ToolName = os.path.basename(extract_stack()[-2][0])
if Line is None:
Line = "..."
else:
Line = "%d" % Line
TemplateDict = {
"tool" : ToolName,
"file" : File,
"line" : Line,
"msg" : Message,
}
if File is not None:
LogText = _WARNING_MESSAGE_TEMPLATE % TemplateDict
else:
LogText = _WARNING_MESSAGE_TEMPLATE_WITHOUT_FILE % TemplateDict
if ExtraData is not None:
LogText += "\n %s" % ExtraData
_INFO_LOGGER.log(WARN, LogText)
#
# Raise an exception if indicated
#
if GlobalData.gWARNING_AS_ERROR == True:
raise FatalError(WARNING_AS_ERROR)
## Log ERROR message
#
# Once an error messages is logged, the tool's execution will be broken by
# raising an exception. If you don't want to break the execution later, you
# can give "RaiseError" with "False" value.
#
# @param ToolName The name of the tool. If not given, the name of caller
# method will be used.
# @param ErrorCode The error code
# @param Message Warning information
# @param File The name of file which caused the error.
# @param Line The line number in the "File" which caused the warning.
# @param ExtraData More information associated with "Message"
# @param RaiseError Raise an exception to break the tool's execution if
# it's True. This is the default behavior.
#
def Error(ToolName, ErrorCode, Message=None, File=None, Line=None, \
ExtraData=None, RaiseError=IS_RAISE_ERROR):
if ToolName:
pass
if Line is None:
Line = "..."
else:
Line = "%d" % Line
if Message is None:
if ErrorCode in gERROR_MESSAGE:
Message = gERROR_MESSAGE[ErrorCode]
else:
Message = gERROR_MESSAGE[UNKNOWN_ERROR]
if ExtraData is None:
ExtraData = ""
TemplateDict = {
"tool" : _TOOL_NAME,
"file" : File,
"line" : Line,
"errorcode" : ErrorCode,
"msg" : Message,
"extra" : ExtraData
}
if File is not None:
LogText = _ERROR_MESSAGE_TEMPLATE % TemplateDict
else:
LogText = __ERROR_MESSAGE_TEMPLATE_WITHOUT_FILE % TemplateDict
if not SUPRESS_ERROR:
_ERROR_LOGGER.log(ERROR, LogText)
if RaiseError:
raise FatalError(ErrorCode)
## Initialize log system
#
def Initialize():
#
# Since we use different format to log different levels of message into
# different place (stdout or stderr), we have to use different "Logger"
# objects to do this.
#
# For DEBUG level (All DEBUG_0~9 are applicable)
_DEBUG_LOGGER.setLevel(INFO)
_DebugChannel = StreamHandler(stdout)
_DebugChannel.setFormatter(_DEBUG_FORMATTER)
_DEBUG_LOGGER.addHandler(_DebugChannel)
#
# For VERBOSE, INFO, WARN level
#
_INFO_LOGGER.setLevel(INFO)
_InfoChannel = StreamHandler(stdout)
_InfoChannel.setFormatter(_INFO_FORMATTER)
_INFO_LOGGER.addHandler(_InfoChannel)
#
# For ERROR level
#
_ERROR_LOGGER.setLevel(INFO)
_ErrorCh = StreamHandler(stderr)
_ErrorCh.setFormatter(_ERROR_FORMATTER)
_ERROR_LOGGER.addHandler(_ErrorCh)
## Set log level
#
# @param Level One of log level in _LogLevel
#
def SetLevel(Level):
if Level not in _LOG_LEVELS:
Info("Not supported log level (%d). Use default level instead." % \
Level)
Level = INFO
_DEBUG_LOGGER.setLevel(Level)
_INFO_LOGGER.setLevel(Level)
_ERROR_LOGGER.setLevel(Level)
## Get current log level
#
def GetLevel():
return _INFO_LOGGER.getEffectiveLevel()
## Raise up warning as error
#
def SetWarningAsError():
GlobalData.gWARNING_AS_ERROR = True
## Specify a file to store the log message as well as put on console
#
# @param LogFile The file path used to store the log message
#
def SetLogFile(LogFile):
if os.path.exists(LogFile):
remove(LogFile)
_Ch = FileHandler(LogFile)
_Ch.setFormatter(_DEBUG_FORMATTER)
_DEBUG_LOGGER.addHandler(_Ch)
_Ch = FileHandler(LogFile)
_Ch.setFormatter(_INFO_FORMATTER)
_INFO_LOGGER.addHandler(_Ch)
_Ch = FileHandler(LogFile)
_Ch.setFormatter(_ERROR_FORMATTER)
_ERROR_LOGGER.addHandler(_Ch)
| edk2-master | BaseTools/Source/Python/UPT/Logger/Log.py |
## @file
# Python 'Logger' package initialization file.
#
# This file is required to make Python interpreter treat the directory
# as containing package.
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
'''
Logger
'''
| edk2-master | BaseTools/Source/Python/UPT/Logger/__init__.py |
## @file
# Standardized Error Handling infrastructures.
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
'''
ToolError
'''
import Logger.StringTable as ST
FILE_OPEN_FAILURE = 1
FILE_WRITE_FAILURE = 2
FILE_PARSE_FAILURE = 3
FILE_READ_FAILURE = 4
FILE_CREATE_FAILURE = 5
FILE_CHECKSUM_FAILURE = 6
FILE_COMPRESS_FAILURE = 7
FILE_DECOMPRESS_FAILURE = 8
FILE_MOVE_FAILURE = 9
FILE_DELETE_FAILURE = 10
FILE_COPY_FAILURE = 11
FILE_POSITIONING_FAILURE = 12
FILE_ALREADY_EXIST = 13
FILE_NOT_FOUND = 14
FILE_TYPE_MISMATCH = 15
FILE_CASE_MISMATCH = 16
FILE_DUPLICATED = 17
FILE_UNKNOWN_ERROR = 0x0FFF
OPTION_UNKNOWN = 0x1000
OPTION_MISSING = 0x1001
OPTION_CONFLICT = 0x1002
OPTION_VALUE_INVALID = 0x1003
OPTION_DEPRECATED = 0x1004
OPTION_NOT_SUPPORTED = 0x1005
OPTION_UNKNOWN_ERROR = 0x1FFF
PARAMETER_INVALID = 0x2000
PARAMETER_MISSING = 0x2001
PARAMETER_UNKNOWN_ERROR = 0x2FFF
FORMAT_INVALID = 0x3000
FORMAT_NOT_SUPPORTED = 0x3001
FORMAT_UNKNOWN = 0x3002
FORMAT_UNKNOWN_ERROR = 0x3FFF
RESOURCE_NOT_AVAILABLE = 0x4000
RESOURCE_ALLOCATE_FAILURE = 0x4001
RESOURCE_FULL = 0x4002
RESOURCE_OVERFLOW = 0x4003
RESOURCE_UNDERRUN = 0x4004
RESOURCE_UNKNOWN_ERROR = 0x4FFF
ATTRIBUTE_NOT_AVAILABLE = 0x5000
ATTRIBUTE_GET_FAILURE = 0x5001
ATTRIBUTE_SET_FAILURE = 0x5002
ATTRIBUTE_UPDATE_FAILURE = 0x5003
ATTRIBUTE_ACCESS_DENIED = 0x5004
ATTRIBUTE_RETRIEVE_FAILURE = 0x5005
ATTRIBUTE_UNKNOWN_ERROR = 0x5FFF
ATTRIBUTE_RETRIEVE_FAILURE = 0x5F00
IO_NOT_READY = 0x6000
IO_BUSY = 0x6001
IO_TIMEOUT = 0x6002
IO_UNKNOWN_ERROR = 0x6FFF
COMMAND_FAILURE = 0x7000
CODE_ERROR = 0xC0DE
AUTOGEN_ERROR = 0xF000
PARSER_ERROR = 0xF001
BUILD_ERROR = 0xF002
GENFDS_ERROR = 0xF003
ECC_ERROR = 0xF004
EOT_ERROR = 0xF005
DDC_ERROR = 0xF009
WARNING_AS_ERROR = 0xF006
MIGRATION_ERROR = 0xF010
EDK1_INF_ERROR = 0xF011
ABORT_ERROR = 0xFFFE
UNKNOWN_ERROR = 0xFFFF
UPT_ALREADY_INSTALLED_ERROR = 0xD000
UPT_ENVIRON_MISSING_ERROR = 0xD001
UPT_REPKG_ERROR = 0xD002
UPT_ALREADY_RUNNING_ERROR = 0xD003
UPT_MUL_DEC_ERROR = 0xD004
UPT_DB_UPDATE_ERROR = 0xD005
UPT_INI_PARSE_ERROR = 0xE000
## Error message of each error code
#
gERROR_MESSAGE = {
FILE_NOT_FOUND : ST.ERR_FILE_NOT_FOUND,
FILE_OPEN_FAILURE : ST.ERR_FILE_OPEN_FAILURE,
FILE_WRITE_FAILURE : ST.ERR_FILE_WRITE_FAILURE,
FILE_PARSE_FAILURE : ST.ERR_FILE_PARSE_FAILURE,
FILE_READ_FAILURE : ST.ERR_FILE_READ_FAILURE,
FILE_CREATE_FAILURE : ST.ERR_FILE_CREATE_FAILURE,
FILE_CHECKSUM_FAILURE : ST.ERR_FILE_CHECKSUM_FAILURE,
FILE_COMPRESS_FAILURE : ST.ERR_FILE_COMPRESS_FAILURE,
FILE_DECOMPRESS_FAILURE : ST.ERR_FILE_DECOMPRESS_FAILURE,
FILE_MOVE_FAILURE : ST.ERR_FILE_MOVE_FAILURE,
FILE_DELETE_FAILURE : ST.ERR_FILE_DELETE_FAILURE,
FILE_COPY_FAILURE : ST.ERR_FILE_COPY_FAILURE,
FILE_POSITIONING_FAILURE: ST.ERR_FILE_POSITIONING_FAILURE,
FILE_ALREADY_EXIST : ST.ERR_FILE_ALREADY_EXIST,
FILE_TYPE_MISMATCH : ST.ERR_FILE_TYPE_MISMATCH ,
FILE_CASE_MISMATCH : ST.ERR_FILE_CASE_MISMATCH,
FILE_DUPLICATED : ST.ERR_FILE_DUPLICATED,
FILE_UNKNOWN_ERROR : ST.ERR_FILE_UNKNOWN_ERROR,
OPTION_UNKNOWN : ST.ERR_OPTION_UNKNOWN,
OPTION_MISSING : ST.ERR_OPTION_MISSING,
OPTION_CONFLICT : ST.ERR_OPTION_CONFLICT,
OPTION_VALUE_INVALID : ST.ERR_OPTION_VALUE_INVALID,
OPTION_DEPRECATED : ST.ERR_OPTION_DEPRECATED,
OPTION_NOT_SUPPORTED : ST.ERR_OPTION_NOT_SUPPORTED,
OPTION_UNKNOWN_ERROR : ST.ERR_OPTION_UNKNOWN_ERROR,
PARAMETER_INVALID : ST.ERR_PARAMETER_INVALID,
PARAMETER_MISSING : ST.ERR_PARAMETER_MISSING,
PARAMETER_UNKNOWN_ERROR : ST.ERR_PARAMETER_UNKNOWN_ERROR,
FORMAT_INVALID : ST.ERR_FORMAT_INVALID,
FORMAT_NOT_SUPPORTED : ST.ERR_FORMAT_NOT_SUPPORTED,
FORMAT_UNKNOWN : ST.ERR_FORMAT_UNKNOWN,
FORMAT_UNKNOWN_ERROR : ST.ERR_FORMAT_UNKNOWN_ERROR,
RESOURCE_NOT_AVAILABLE : ST.ERR_RESOURCE_NOT_AVAILABLE,
RESOURCE_ALLOCATE_FAILURE : ST.ERR_RESOURCE_ALLOCATE_FAILURE,
RESOURCE_FULL : ST.ERR_RESOURCE_FULL,
RESOURCE_OVERFLOW : ST.ERR_RESOURCE_OVERFLOW,
RESOURCE_UNDERRUN : ST.ERR_RESOURCE_UNDERRUN,
RESOURCE_UNKNOWN_ERROR : ST.ERR_RESOURCE_UNKNOWN_ERROR,
ATTRIBUTE_NOT_AVAILABLE : ST.ERR_ATTRIBUTE_NOT_AVAILABLE,
ATTRIBUTE_RETRIEVE_FAILURE : ST.ERR_ATTRIBUTE_RETRIEVE_FAILURE,
ATTRIBUTE_SET_FAILURE : ST.ERR_ATTRIBUTE_SET_FAILURE,
ATTRIBUTE_UPDATE_FAILURE: ST.ERR_ATTRIBUTE_UPDATE_FAILURE,
ATTRIBUTE_ACCESS_DENIED : ST.ERR_ATTRIBUTE_ACCESS_DENIED,
ATTRIBUTE_UNKNOWN_ERROR : ST.ERR_ATTRIBUTE_UNKNOWN_ERROR,
COMMAND_FAILURE : ST.ERR_COMMAND_FAILURE,
IO_NOT_READY : ST.ERR_IO_NOT_READY,
IO_BUSY : ST.ERR_IO_BUSY,
IO_TIMEOUT : ST.ERR_IO_TIMEOUT,
IO_UNKNOWN_ERROR : ST.ERR_IO_UNKNOWN_ERROR,
UNKNOWN_ERROR : ST.ERR_UNKNOWN_ERROR,
UPT_ALREADY_INSTALLED_ERROR : ST.ERR_UPT_ALREADY_INSTALLED_ERROR,
UPT_ENVIRON_MISSING_ERROR : ST.ERR_UPT_ENVIRON_MISSING_ERROR,
UPT_REPKG_ERROR : ST.ERR_UPT_REPKG_ERROR,
UPT_ALREADY_RUNNING_ERROR : ST.ERR_UPT_ALREADY_RUNNING_ERROR,
UPT_MUL_DEC_ERROR : ST.ERR_MUL_DEC_ERROR,
UPT_INI_PARSE_ERROR : ST.ERR_UPT_INI_PARSE_ERROR,
}
## Exception indicating a fatal error
#
class FatalError(Exception):
pass
| edk2-master | BaseTools/Source/Python/UPT/Logger/ToolError.py |
## @file
# Common routines used by all tools
#
# Copyright (c) 2011 - 2019, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
'''
Misc
'''
##
# Import Modules
#
import os.path
from os import access
from os import F_OK
from os import makedirs
from os import getcwd
from os import chdir
from os import listdir
from os import remove
from os import rmdir
from os import linesep
from os import walk
from os import environ
import re
from collections import OrderedDict as Sdict
import Logger.Log as Logger
from Logger import StringTable as ST
from Logger import ToolError
from Library import GlobalData
from Library.DataType import SUP_MODULE_LIST
from Library.DataType import END_OF_LINE
from Library.DataType import TAB_SPLIT
from Library.DataType import TAB_LANGUAGE_EN_US
from Library.DataType import TAB_LANGUAGE_EN
from Library.DataType import TAB_LANGUAGE_EN_X
from Library.DataType import TAB_UNI_FILE_SUFFIXS
from Library.StringUtils import GetSplitValueList
from Library.ParserValidate import IsValidHexVersion
from Library.ParserValidate import IsValidPath
from Object.POM.CommonObject import TextObject
from Core.FileHook import __FileHookOpen__
from Common.MultipleWorkspace import MultipleWorkspace as mws
## Convert GUID string in xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx style to C
# structure style
#
# @param Guid: The GUID string
#
def GuidStringToGuidStructureString(Guid):
GuidList = Guid.split('-')
Result = '{'
for Index in range(0, 3, 1):
Result = Result + '0x' + GuidList[Index] + ', '
Result = Result + '{0x' + GuidList[3][0:2] + ', 0x' + GuidList[3][2:4]
for Index in range(0, 12, 2):
Result = Result + ', 0x' + GuidList[4][Index:Index + 2]
Result += '}}'
return Result
## Check whether GUID string is of format xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
#
# @param GuidValue: The GUID value
#
def CheckGuidRegFormat(GuidValue):
## Regular expression used to find out register format of GUID
#
RegFormatGuidPattern = re.compile("^\s*([0-9a-fA-F]){8}-"
"([0-9a-fA-F]){4}-"
"([0-9a-fA-F]){4}-"
"([0-9a-fA-F]){4}-"
"([0-9a-fA-F]){12}\s*$")
if RegFormatGuidPattern.match(GuidValue):
return True
else:
return False
## Convert GUID string in C structure style to
# xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
#
# @param GuidValue: The GUID value in C structure format
#
def GuidStructureStringToGuidString(GuidValue):
GuidValueString = GuidValue.lower().replace("{", "").replace("}", "").\
replace(" ", "").replace(";", "")
GuidValueList = GuidValueString.split(",")
if len(GuidValueList) != 11:
return ''
try:
return "%08x-%04x-%04x-%02x%02x-%02x%02x%02x%02x%02x%02x" % (
int(GuidValueList[0], 16),
int(GuidValueList[1], 16),
int(GuidValueList[2], 16),
int(GuidValueList[3], 16),
int(GuidValueList[4], 16),
int(GuidValueList[5], 16),
int(GuidValueList[6], 16),
int(GuidValueList[7], 16),
int(GuidValueList[8], 16),
int(GuidValueList[9], 16),
int(GuidValueList[10], 16)
)
except BaseException:
return ''
## Create directories
#
# @param Directory: The directory name
#
def CreateDirectory(Directory):
if Directory is None or Directory.strip() == "":
return True
try:
if not access(Directory, F_OK):
makedirs(Directory)
except BaseException:
return False
return True
## Remove directories, including files and sub-directories in it
#
# @param Directory: The directory name
#
def RemoveDirectory(Directory, Recursively=False):
if Directory is None or Directory.strip() == "" or not \
os.path.exists(Directory):
return
if Recursively:
CurrentDirectory = getcwd()
chdir(Directory)
for File in listdir("."):
if os.path.isdir(File):
RemoveDirectory(File, Recursively)
else:
remove(File)
chdir(CurrentDirectory)
rmdir(Directory)
## Store content in file
#
# This method is used to save file only when its content is changed. This is
# quite useful for "make" system to decide what will be re-built and what
# won't.
#
# @param File: The path of file
# @param Content: The new content of the file
# @param IsBinaryFile: The flag indicating if the file is binary file
# or not
#
def SaveFileOnChange(File, Content, IsBinaryFile=True):
if os.path.exists(File):
if IsBinaryFile:
try:
if Content == __FileHookOpen__(File, "rb").read():
return False
except BaseException:
Logger.Error(None, ToolError.FILE_OPEN_FAILURE, ExtraData=File)
else:
try:
if Content == __FileHookOpen__(File, "r").read():
return False
except BaseException:
Logger.Error(None, ToolError.FILE_OPEN_FAILURE, ExtraData=File)
CreateDirectory(os.path.dirname(File))
if IsBinaryFile:
try:
FileFd = __FileHookOpen__(File, "wb")
FileFd.write(Content)
FileFd.close()
except BaseException:
Logger.Error(None, ToolError.FILE_CREATE_FAILURE, ExtraData=File)
else:
try:
FileFd = __FileHookOpen__(File, "w")
FileFd.write(Content)
FileFd.close()
except BaseException:
Logger.Error(None, ToolError.FILE_CREATE_FAILURE, ExtraData=File)
return True
## Get all files of a directory
#
# @param Root: Root dir
# @param SkipList : The files need be skipped
#
def GetFiles(Root, SkipList=None, FullPath=True):
OriPath = os.path.normpath(Root)
FileList = []
for Root, Dirs, Files in walk(Root):
if SkipList:
for Item in SkipList:
if Item in Dirs:
Dirs.remove(Item)
if Item in Files:
Files.remove(Item)
for Dir in Dirs:
if Dir.startswith('.'):
Dirs.remove(Dir)
for File in Files:
if File.startswith('.'):
continue
File = os.path.normpath(os.path.join(Root, File))
if not FullPath:
File = File[len(OriPath) + 1:]
FileList.append(File)
return FileList
## Get all non-metadata files of a directory
#
# @param Root: Root Dir
# @param SkipList : List of path need be skipped
# @param FullPath: True if the returned file should be full path
# @param PrefixPath: the path that need to be added to the files found
# @return: the list of files found
#
def GetNonMetaDataFiles(Root, SkipList, FullPath, PrefixPath):
FileList = GetFiles(Root, SkipList, FullPath)
NewFileList = []
for File in FileList:
ExtName = os.path.splitext(File)[1]
#
# skip '.dec', '.inf', '.dsc', '.fdf' files
#
if ExtName.lower() not in ['.dec', '.inf', '.dsc', '.fdf']:
NewFileList.append(os.path.normpath(os.path.join(PrefixPath, File)))
return NewFileList
## Check if given file exists or not
#
# @param File: File name or path to be checked
# @param Dir: The directory the file is relative to
#
def ValidFile(File, Ext=None):
File = File.replace('\\', '/')
if Ext is not None:
FileExt = os.path.splitext(File)[1]
if FileExt.lower() != Ext.lower():
return False
if not os.path.exists(File):
return False
return True
## RealPath
#
# @param File: File name or path to be checked
# @param Dir: The directory the file is relative to
# @param OverrideDir: The override directory
#
def RealPath(File, Dir='', OverrideDir=''):
NewFile = os.path.normpath(os.path.join(Dir, File))
NewFile = GlobalData.gALL_FILES[NewFile]
if not NewFile and OverrideDir:
NewFile = os.path.normpath(os.path.join(OverrideDir, File))
NewFile = GlobalData.gALL_FILES[NewFile]
return NewFile
## RealPath2
#
# @param File: File name or path to be checked
# @param Dir: The directory the file is relative to
# @param OverrideDir: The override directory
#
def RealPath2(File, Dir='', OverrideDir=''):
if OverrideDir:
NewFile = GlobalData.gALL_FILES[os.path.normpath(os.path.join\
(OverrideDir, File))]
if NewFile:
if OverrideDir[-1] == os.path.sep:
return NewFile[len(OverrideDir):], NewFile[0:len(OverrideDir)]
else:
return NewFile[len(OverrideDir) + 1:], \
NewFile[0:len(OverrideDir)]
NewFile = GlobalData.gALL_FILES[os.path.normpath(os.path.join(Dir, File))]
if NewFile:
if Dir:
if Dir[-1] == os.path.sep:
return NewFile[len(Dir):], NewFile[0:len(Dir)]
else:
return NewFile[len(Dir) + 1:], NewFile[0:len(Dir)]
else:
return NewFile, ''
return None, None
## CommonPath
#
# @param PathList: PathList
#
def CommonPath(PathList):
Path1 = min(PathList).split(os.path.sep)
Path2 = max(PathList).split(os.path.sep)
for Index in range(min(len(Path1), len(Path2))):
if Path1[Index] != Path2[Index]:
return os.path.sep.join(Path1[:Index])
return os.path.sep.join(Path1)
## PathClass
#
class PathClass(object):
def __init__(self, File='', Root='', AlterRoot='', Type='', IsBinary=False,
Arch='COMMON', ToolChainFamily='', Target='', TagName='', \
ToolCode=''):
self.Arch = Arch
self.File = str(File)
if os.path.isabs(self.File):
self.Root = ''
self.AlterRoot = ''
else:
self.Root = str(Root)
self.AlterRoot = str(AlterRoot)
#
# Remove any '.' and '..' in path
#
if self.Root:
self.Path = os.path.normpath(os.path.join(self.Root, self.File))
self.Root = os.path.normpath(CommonPath([self.Root, self.Path]))
#
# eliminate the side-effect of 'C:'
#
if self.Root[-1] == ':':
self.Root += os.path.sep
#
# file path should not start with path separator
#
if self.Root[-1] == os.path.sep:
self.File = self.Path[len(self.Root):]
else:
self.File = self.Path[len(self.Root) + 1:]
else:
self.Path = os.path.normpath(self.File)
self.SubDir, self.Name = os.path.split(self.File)
self.BaseName, self.Ext = os.path.splitext(self.Name)
if self.Root:
if self.SubDir:
self.Dir = os.path.join(self.Root, self.SubDir)
else:
self.Dir = self.Root
else:
self.Dir = self.SubDir
if IsBinary:
self.Type = Type
else:
self.Type = self.Ext.lower()
self.IsBinary = IsBinary
self.Target = Target
self.TagName = TagName
self.ToolCode = ToolCode
self.ToolChainFamily = ToolChainFamily
self._Key = None
## Convert the object of this class to a string
#
# Convert member Path of the class to a string
#
def __str__(self):
return self.Path
## Override __eq__ function
#
# Check whether PathClass are the same
#
def __eq__(self, Other):
if isinstance(Other, type(self)):
return self.Path == Other.Path
else:
return self.Path == str(Other)
## Override __hash__ function
#
# Use Path as key in hash table
#
def __hash__(self):
return hash(self.Path)
## _GetFileKey
#
def _GetFileKey(self):
if self._Key is None:
self._Key = self.Path.upper()
return self._Key
## Validate
#
def Validate(self, Type='', CaseSensitive=True):
if GlobalData.gCASE_INSENSITIVE:
CaseSensitive = False
if Type and Type.lower() != self.Type:
return ToolError.FILE_TYPE_MISMATCH, '%s (expect %s but got %s)' % \
(self.File, Type, self.Type)
RealFile, RealRoot = RealPath2(self.File, self.Root, self.AlterRoot)
if not RealRoot and not RealFile:
RealFile = self.File
if self.AlterRoot:
RealFile = os.path.join(self.AlterRoot, self.File)
elif self.Root:
RealFile = os.path.join(self.Root, self.File)
return ToolError.FILE_NOT_FOUND, os.path.join(self.AlterRoot, RealFile)
ErrorCode = 0
ErrorInfo = ''
if RealRoot != self.Root or RealFile != self.File:
if CaseSensitive and (RealFile != self.File or \
(RealRoot != self.Root and RealRoot != \
self.AlterRoot)):
ErrorCode = ToolError.FILE_CASE_MISMATCH
ErrorInfo = self.File + '\n\t' + RealFile + \
" [in file system]"
self.SubDir, self.Name = os.path.split(RealFile)
self.BaseName, self.Ext = os.path.splitext(self.Name)
if self.SubDir:
self.Dir = os.path.join(RealRoot, self.SubDir)
else:
self.Dir = RealRoot
self.File = RealFile
self.Root = RealRoot
self.Path = os.path.join(RealRoot, RealFile)
return ErrorCode, ErrorInfo
Key = property(_GetFileKey)
## Get current workspace
#
# get WORKSPACE from environment variable if present,if not use current working directory as WORKSPACE
#
def GetWorkspace():
#
# check WORKSPACE
#
if "WORKSPACE" in environ:
WorkspaceDir = os.path.normpath(environ["WORKSPACE"])
if not os.path.exists(WorkspaceDir):
Logger.Error("UPT",
ToolError.UPT_ENVIRON_MISSING_ERROR,
ST.ERR_WORKSPACE_NOTEXIST,
ExtraData="%s" % WorkspaceDir)
else:
WorkspaceDir = os.getcwd()
if WorkspaceDir[-1] == ':':
WorkspaceDir += os.sep
PackagesPath = os.environ.get("PACKAGES_PATH")
mws.setWs(WorkspaceDir, PackagesPath)
return WorkspaceDir, mws.PACKAGES_PATH
## Get relative path
#
# use full path and workspace to get relative path
# the destination of this function is mainly to resolve the root path issue(like c: or c:\)
#
# @param Fullpath: a string of fullpath
# @param Workspace: a string of workspace
#
def GetRelativePath(Fullpath, Workspace):
RelativePath = ''
if Workspace.endswith(os.sep):
RelativePath = Fullpath[Fullpath.upper().find(Workspace.upper())+len(Workspace):]
else:
RelativePath = Fullpath[Fullpath.upper().find(Workspace.upper())+len(Workspace)+1:]
return RelativePath
## Check whether all module types are in list
#
# check whether all module types (SUP_MODULE_LIST) are in list
#
# @param ModuleList: a list of ModuleType
#
def IsAllModuleList(ModuleList):
NewModuleList = [Module.upper() for Module in ModuleList]
for Module in SUP_MODULE_LIST:
if Module not in NewModuleList:
return False
else:
return True
## Dictionary that use comment(GenericComment, TailComment) as value,
# if a new comment which key already in the dic is inserted, then the
# comment will be merged.
# Key is (Statement, SupArch), when TailComment is added, it will ident
# according to Statement
#
class MergeCommentDict(dict):
## []= operator
#
def __setitem__(self, Key, CommentVal):
GenericComment, TailComment = CommentVal
if Key in self:
OrigVal1, OrigVal2 = dict.__getitem__(self, Key)
Statement = Key[0]
dict.__setitem__(self, Key, (OrigVal1 + GenericComment, OrigVal2 \
+ len(Statement) * ' ' + TailComment))
else:
dict.__setitem__(self, Key, (GenericComment, TailComment))
## =[] operator
#
def __getitem__(self, Key):
return dict.__getitem__(self, Key)
## GenDummyHelpTextObj
#
# @retval HelpTxt: Generated dummy help text object
#
def GenDummyHelpTextObj():
HelpTxt = TextObject()
HelpTxt.SetLang(TAB_LANGUAGE_EN_US)
HelpTxt.SetString(' ')
return HelpTxt
## ConvertVersionToDecimal, the minor version should be within 0 - 99
# <HexVersion> ::= "0x" <Major> <Minor>
# <Major> ::= (a-fA-F0-9){4}
# <Minor> ::= (a-fA-F0-9){4}
# <DecVersion> ::= (0-65535) ["." (0-99)]
#
# @param StringIn: The string contains version defined in INF file.
# It can be Decimal or Hex
#
def ConvertVersionToDecimal(StringIn):
if IsValidHexVersion(StringIn):
Value = int(StringIn, 16)
Major = Value >> 16
Minor = Value & 0xFFFF
MinorStr = str(Minor)
if len(MinorStr) == 1:
MinorStr = '0' + MinorStr
return str(Major) + '.' + MinorStr
else:
if StringIn.find(TAB_SPLIT) != -1:
return StringIn
elif StringIn:
return StringIn + '.0'
else:
#
# when StringIn is '', return it directly
#
return StringIn
## GetHelpStringByRemoveHashKey
#
# Remove hash key at the header of string and return the remain.
#
# @param String: The string need to be processed.
#
def GetHelpStringByRemoveHashKey(String):
ReturnString = ''
PattenRemoveHashKey = re.compile(r"^[#+\s]+", re.DOTALL)
String = String.strip()
if String == '':
return String
LineList = GetSplitValueList(String, END_OF_LINE)
for Line in LineList:
ValueList = PattenRemoveHashKey.split(Line)
if len(ValueList) == 1:
ReturnString += ValueList[0] + END_OF_LINE
else:
ReturnString += ValueList[1] + END_OF_LINE
if ReturnString.endswith('\n') and not ReturnString.endswith('\n\n') and ReturnString != '\n':
ReturnString = ReturnString[:-1]
return ReturnString
## ConvPathFromAbsToRel
#
# Get relative file path from absolute path.
#
# @param Path: The string contain file absolute path.
# @param Root: The string contain the parent path of Path in.
#
#
def ConvPathFromAbsToRel(Path, Root):
Path = os.path.normpath(Path)
Root = os.path.normpath(Root)
FullPath = os.path.normpath(os.path.join(Root, Path))
#
# If Path is absolute path.
# It should be in Root.
#
if os.path.isabs(Path):
return FullPath[FullPath.find(Root) + len(Root) + 1:]
else:
return Path
## ConvertPath
#
# Convert special characters to '_', '\' to '/'
# return converted path: Test!1.inf -> Test_1.inf
#
# @param Path: Path to be converted
#
def ConvertPath(Path):
RetPath = ''
for Char in Path.strip():
if Char.isalnum() or Char in '.-_/':
RetPath = RetPath + Char
elif Char == '\\':
RetPath = RetPath + '/'
else:
RetPath = RetPath + '_'
return RetPath
## ConvertSpec
#
# during install, convert the Spec string extract from UPD into INF allowable definition,
# the difference is period is allowed in the former (not the first letter) but not in the latter.
# return converted Spec string
#
# @param SpecStr: SpecStr to be converted
#
def ConvertSpec(SpecStr):
RetStr = ''
for Char in SpecStr:
if Char.isalnum() or Char == '_':
RetStr = RetStr + Char
else:
RetStr = RetStr + '_'
return RetStr
## IsEqualList
#
# Judge two lists are identical(contain same item).
# The rule is elements in List A are in List B and elements in List B are in List A.
#
# @param ListA, ListB Lists need to be judged.
#
# @return True ListA and ListB are identical
# @return False ListA and ListB are different with each other
#
def IsEqualList(ListA, ListB):
if ListA == ListB:
return True
for ItemA in ListA:
if not ItemA in ListB:
return False
for ItemB in ListB:
if not ItemB in ListA:
return False
return True
## ConvertArchList
#
# Convert item in ArchList if the start character is lower case.
# In UDP spec, Arch is only allowed as: [A-Z]([a-zA-Z0-9])*
#
# @param ArchList The ArchList need to be converted.
#
# @return NewList The ArchList been converted.
#
def ConvertArchList(ArchList):
NewArchList = []
if not ArchList:
return NewArchList
if isinstance(ArchList, list):
for Arch in ArchList:
Arch = Arch.upper()
NewArchList.append(Arch)
elif isinstance(ArchList, str):
ArchList = ArchList.upper()
NewArchList.append(ArchList)
return NewArchList
## ProcessLineExtender
#
# Process the LineExtender of Line in LineList.
# If one line ends with a line extender, then it will be combined together with next line.
#
# @param LineList The LineList need to be processed.
#
# @return NewList The ArchList been processed.
#
def ProcessLineExtender(LineList):
NewList = []
Count = 0
while Count < len(LineList):
if LineList[Count].strip().endswith("\\") and Count + 1 < len(LineList):
NewList.append(LineList[Count].strip()[:-2] + LineList[Count + 1])
Count = Count + 1
else:
NewList.append(LineList[Count])
Count = Count + 1
return NewList
## ProcessEdkComment
#
# Process EDK style comment in LineList: c style /* */ comment or cpp style // comment
#
#
# @param LineList The LineList need to be processed.
#
# @return LineList The LineList been processed.
# @return FirstPos Where Edk comment is first found, -1 if not found
#
def ProcessEdkComment(LineList):
FindEdkBlockComment = False
Count = 0
StartPos = -1
EndPos = -1
FirstPos = -1
while(Count < len(LineList)):
Line = LineList[Count].strip()
if Line.startswith("/*"):
#
# handling c style comment
#
StartPos = Count
while Count < len(LineList):
Line = LineList[Count].strip()
if Line.endswith("*/"):
if (Count == StartPos) and Line.strip() == '/*/':
Count = Count + 1
continue
EndPos = Count
FindEdkBlockComment = True
break
Count = Count + 1
if FindEdkBlockComment:
if FirstPos == -1:
FirstPos = StartPos
for Index in range(StartPos, EndPos+1):
LineList[Index] = ''
FindEdkBlockComment = False
elif Line.find("//") != -1 and not Line.startswith("#"):
#
# handling cpp style comment
#
LineList[Count] = Line.replace("//", '#')
if FirstPos == -1:
FirstPos = Count
Count = Count + 1
return LineList, FirstPos
## GetLibInstanceInfo
#
# Get the information from Library Instance INF file.
#
# @param string. A string start with # and followed by INF file path
# @param WorkSpace. The WorkSpace directory used to combined with INF file path.
#
# @return GUID, Version
def GetLibInstanceInfo(String, WorkSpace, LineNo):
FileGuidString = ""
VerString = ""
OriginalString = String
String = String.strip()
if not String:
return None, None
#
# Remove "#" characters at the beginning
#
String = GetHelpStringByRemoveHashKey(String)
String = String.strip()
#
# Validate file name exist.
#
FullFileName = os.path.normpath(os.path.realpath(os.path.join(WorkSpace, String)))
if not (ValidFile(FullFileName)):
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_FILELIST_EXIST % (String),
File=GlobalData.gINF_MODULE_NAME,
Line=LineNo,
ExtraData=OriginalString)
#
# Validate file exist/format.
#
if IsValidPath(String, WorkSpace):
IsValidFileFlag = True
else:
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_FILE_NOT_EXIST_OR_NAME_INVALID % (String),
File=GlobalData.gINF_MODULE_NAME,
Line=LineNo,
ExtraData=OriginalString)
return False
if IsValidFileFlag:
FileLinesList = []
try:
FInputfile = open(FullFileName, "r")
try:
FileLinesList = FInputfile.readlines()
except BaseException:
Logger.Error("InfParser",
ToolError.FILE_READ_FAILURE,
ST.ERR_FILE_OPEN_FAILURE,
File=FullFileName)
finally:
FInputfile.close()
except BaseException:
Logger.Error("InfParser",
ToolError.FILE_READ_FAILURE,
ST.ERR_FILE_OPEN_FAILURE,
File=FullFileName)
ReFileGuidPattern = re.compile("^\s*FILE_GUID\s*=.*$")
ReVerStringPattern = re.compile("^\s*VERSION_STRING\s*=.*$")
FileLinesList = ProcessLineExtender(FileLinesList)
for Line in FileLinesList:
if ReFileGuidPattern.match(Line):
FileGuidString = Line
if ReVerStringPattern.match(Line):
VerString = Line
if FileGuidString:
FileGuidString = GetSplitValueList(FileGuidString, '=', 1)[1]
if VerString:
VerString = GetSplitValueList(VerString, '=', 1)[1]
return FileGuidString, VerString
## GetLocalValue
#
# Generate the local value for INF and DEC file. If Lang attribute not present, then use this value.
# If present, and there is no element without the Lang attribute, and one of the elements has the rfc1766 code is
# "en-x-tianocore", or "en-US" if "en-x-tianocore" was not found, or "en" if "en-US" was not found, or startswith 'en'
# if 'en' was not found, then use this value.
# If multiple entries of a tag exist which have the same language code, use the last entry.
#
# @param ValueList A list need to be processed.
# @param UseFirstValue: True to use the first value, False to use the last value
#
# @return LocalValue
def GetLocalValue(ValueList, UseFirstValue=False):
Value1 = ''
Value2 = ''
Value3 = ''
Value4 = ''
Value5 = ''
for (Key, Value) in ValueList:
if Key == TAB_LANGUAGE_EN_X:
if UseFirstValue:
if not Value1:
Value1 = Value
else:
Value1 = Value
if Key == TAB_LANGUAGE_EN_US:
if UseFirstValue:
if not Value2:
Value2 = Value
else:
Value2 = Value
if Key == TAB_LANGUAGE_EN:
if UseFirstValue:
if not Value3:
Value3 = Value
else:
Value3 = Value
if Key.startswith(TAB_LANGUAGE_EN):
if UseFirstValue:
if not Value4:
Value4 = Value
else:
Value4 = Value
if Key == '':
if UseFirstValue:
if not Value5:
Value5 = Value
else:
Value5 = Value
if Value1:
return Value1
if Value2:
return Value2
if Value3:
return Value3
if Value4:
return Value4
if Value5:
return Value5
return ''
## GetCharIndexOutStr
#
# Get comment character index outside a string
#
# @param Line: The string to be checked
# @param CommentCharacter: Comment char, used to ignore comment content
#
# @retval Index
#
def GetCharIndexOutStr(CommentCharacter, Line):
#
# remove whitespace
#
Line = Line.strip()
#
# Check whether comment character is in a string
#
InString = False
for Index in range(0, len(Line)):
if Line[Index] == '"':
InString = not InString
elif Line[Index] == CommentCharacter and InString :
pass
elif Line[Index] == CommentCharacter and (Index +1) < len(Line) and Line[Index+1] == CommentCharacter \
and not InString :
return Index
return -1
## ValidateUNIFilePath
#
# Check the UNI file path
#
# @param FilePath: The UNI file path
#
def ValidateUNIFilePath(Path):
Suffix = Path[Path.rfind(TAB_SPLIT):]
#
# Check if the suffix is one of the '.uni', '.UNI', '.Uni'
#
if Suffix not in TAB_UNI_FILE_SUFFIXS:
Logger.Error("Unicode File Parser",
ToolError.FORMAT_INVALID,
Message=ST.ERR_UNI_FILE_SUFFIX_WRONG,
ExtraData=Path)
#
# Check if '..' in the file name(without suffix)
#
if (TAB_SPLIT + TAB_SPLIT) in Path:
Logger.Error("Unicode File Parser",
ToolError.FORMAT_INVALID,
Message=ST.ERR_UNI_FILE_NAME_INVALID,
ExtraData=Path)
#
# Check if the file name is valid according to the DEC and INF specification
#
Pattern = '[a-zA-Z0-9_][a-zA-Z0-9_\-\.]*'
FileName = Path.replace(Suffix, '')
InvalidCh = re.sub(Pattern, '', FileName)
if InvalidCh:
Logger.Error("Unicode File Parser",
ToolError.FORMAT_INVALID,
Message=ST.ERR_INF_PARSER_FILE_NOT_EXIST_OR_NAME_INVALID,
ExtraData=Path)
| edk2-master | BaseTools/Source/Python/UPT/Library/Misc.py |
## @file
# Collect all defined strings in multiple uni files.
#
# Copyright (c) 2014 - 2019, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
"""
Collect all defined strings in multiple uni files
"""
from __future__ import print_function
##
# Import Modules
#
import os, codecs, re
import shlex
from Logger import ToolError
from Logger import Log as EdkLogger
from Logger import StringTable as ST
from Library.StringUtils import GetLineNo
from Library.Misc import PathClass
from Library.Misc import GetCharIndexOutStr
from Library import DataType as DT
from Library.ParserValidate import CheckUTF16FileHeader
##
# Static definitions
#
UNICODE_WIDE_CHAR = u'\\wide'
UNICODE_NARROW_CHAR = u'\\narrow'
UNICODE_NON_BREAKING_CHAR = u'\\nbr'
UNICODE_UNICODE_CR = '\r'
UNICODE_UNICODE_LF = '\n'
NARROW_CHAR = u'\uFFF0'
WIDE_CHAR = u'\uFFF1'
NON_BREAKING_CHAR = u'\uFFF2'
CR = u'\u000D'
LF = u'\u000A'
NULL = u'\u0000'
TAB = u'\t'
BACK_SPLASH = u'\\'
gLANG_CONV_TABLE = {'eng':'en', 'fra':'fr', \
'aar':'aa', 'abk':'ab', 'ave':'ae', 'afr':'af', 'aka':'ak', 'amh':'am', \
'arg':'an', 'ara':'ar', 'asm':'as', 'ava':'av', 'aym':'ay', 'aze':'az', \
'bak':'ba', 'bel':'be', 'bul':'bg', 'bih':'bh', 'bis':'bi', 'bam':'bm', \
'ben':'bn', 'bod':'bo', 'bre':'br', 'bos':'bs', 'cat':'ca', 'che':'ce', \
'cha':'ch', 'cos':'co', 'cre':'cr', 'ces':'cs', 'chu':'cu', 'chv':'cv', \
'cym':'cy', 'dan':'da', 'deu':'de', 'div':'dv', 'dzo':'dz', 'ewe':'ee', \
'ell':'el', 'epo':'eo', 'spa':'es', 'est':'et', 'eus':'eu', 'fas':'fa', \
'ful':'ff', 'fin':'fi', 'fij':'fj', 'fao':'fo', 'fry':'fy', 'gle':'ga', \
'gla':'gd', 'glg':'gl', 'grn':'gn', 'guj':'gu', 'glv':'gv', 'hau':'ha', \
'heb':'he', 'hin':'hi', 'hmo':'ho', 'hrv':'hr', 'hat':'ht', 'hun':'hu', \
'hye':'hy', 'her':'hz', 'ina':'ia', 'ind':'id', 'ile':'ie', 'ibo':'ig', \
'iii':'ii', 'ipk':'ik', 'ido':'io', 'isl':'is', 'ita':'it', 'iku':'iu', \
'jpn':'ja', 'jav':'jv', 'kat':'ka', 'kon':'kg', 'kik':'ki', 'kua':'kj', \
'kaz':'kk', 'kal':'kl', 'khm':'km', 'kan':'kn', 'kor':'ko', 'kau':'kr', \
'kas':'ks', 'kur':'ku', 'kom':'kv', 'cor':'kw', 'kir':'ky', 'lat':'la', \
'ltz':'lb', 'lug':'lg', 'lim':'li', 'lin':'ln', 'lao':'lo', 'lit':'lt', \
'lub':'lu', 'lav':'lv', 'mlg':'mg', 'mah':'mh', 'mri':'mi', 'mkd':'mk', \
'mal':'ml', 'mon':'mn', 'mar':'mr', 'msa':'ms', 'mlt':'mt', 'mya':'my', \
'nau':'na', 'nob':'nb', 'nde':'nd', 'nep':'ne', 'ndo':'ng', 'nld':'nl', \
'nno':'nn', 'nor':'no', 'nbl':'nr', 'nav':'nv', 'nya':'ny', 'oci':'oc', \
'oji':'oj', 'orm':'om', 'ori':'or', 'oss':'os', 'pan':'pa', 'pli':'pi', \
'pol':'pl', 'pus':'ps', 'por':'pt', 'que':'qu', 'roh':'rm', 'run':'rn', \
'ron':'ro', 'rus':'ru', 'kin':'rw', 'san':'sa', 'srd':'sc', 'snd':'sd', \
'sme':'se', 'sag':'sg', 'sin':'si', 'slk':'sk', 'slv':'sl', 'smo':'sm', \
'sna':'sn', 'som':'so', 'sqi':'sq', 'srp':'sr', 'ssw':'ss', 'sot':'st', \
'sun':'su', 'swe':'sv', 'swa':'sw', 'tam':'ta', 'tel':'te', 'tgk':'tg', \
'tha':'th', 'tir':'ti', 'tuk':'tk', 'tgl':'tl', 'tsn':'tn', 'ton':'to', \
'tur':'tr', 'tso':'ts', 'tat':'tt', 'twi':'tw', 'tah':'ty', 'uig':'ug', \
'ukr':'uk', 'urd':'ur', 'uzb':'uz', 'ven':'ve', 'vie':'vi', 'vol':'vo', \
'wln':'wa', 'wol':'wo', 'xho':'xh', 'yid':'yi', 'yor':'yo', 'zha':'za', \
'zho':'zh', 'zul':'zu'}
## Convert a python unicode string to a normal string
#
# Convert a python unicode string to a normal string
# UniToStr(u'I am a string') is 'I am a string'
#
# @param Uni: The python unicode string
#
# @retval: The formatted normal string
#
def UniToStr(Uni):
return repr(Uni)[2:-1]
## Convert a unicode string to a Hex list
#
# Convert a unicode string to a Hex list
# UniToHexList('ABC') is ['0x41', '0x00', '0x42', '0x00', '0x43', '0x00']
#
# @param Uni: The python unicode string
#
# @retval List: The formatted hex list
#
def UniToHexList(Uni):
List = []
for Item in Uni:
Temp = '%04X' % ord(Item)
List.append('0x' + Temp[2:4])
List.append('0x' + Temp[0:2])
return List
## Convert special unicode characters
#
# Convert special characters to (c), (r) and (tm).
#
# @param Uni: The python unicode string
#
# @retval NewUni: The converted unicode string
#
def ConvertSpecialUnicodes(Uni):
OldUni = NewUni = Uni
NewUni = NewUni.replace(u'\u00A9', '(c)')
NewUni = NewUni.replace(u'\u00AE', '(r)')
NewUni = NewUni.replace(u'\u2122', '(tm)')
if OldUni == NewUni:
NewUni = OldUni
return NewUni
## GetLanguageCode1766
#
# Check the language code read from .UNI file and convert RFC 4646 codes to RFC 1766 codes
# RFC 1766 language codes supported in compatibility mode
# RFC 4646 language codes supported in native mode
#
# @param LangName: Language codes read from .UNI file
#
# @retval LangName: Valid language code in RFC 1766 format or None
#
def GetLanguageCode1766(LangName, File=None):
return LangName
length = len(LangName)
if length == 2:
if LangName.isalpha():
for Key in gLANG_CONV_TABLE.keys():
if gLANG_CONV_TABLE.get(Key) == LangName.lower():
return Key
elif length == 3:
if LangName.isalpha() and gLANG_CONV_TABLE.get(LangName.lower()):
return LangName
else:
EdkLogger.Error("Unicode File Parser",
ToolError.FORMAT_INVALID,
"Invalid RFC 1766 language code : %s" % LangName,
File)
elif length == 5:
if LangName[0:2].isalpha() and LangName[2] == '-':
for Key in gLANG_CONV_TABLE.keys():
if gLANG_CONV_TABLE.get(Key) == LangName[0:2].lower():
return Key
elif length >= 6:
if LangName[0:2].isalpha() and LangName[2] == '-':
for Key in gLANG_CONV_TABLE.keys():
if gLANG_CONV_TABLE.get(Key) == LangName[0:2].lower():
return Key
if LangName[0:3].isalpha() and gLANG_CONV_TABLE.get(LangName.lower()) is None and LangName[3] == '-':
for Key in gLANG_CONV_TABLE.keys():
if Key == LangName[0:3].lower():
return Key
EdkLogger.Error("Unicode File Parser",
ToolError.FORMAT_INVALID,
"Invalid RFC 4646 language code : %s" % LangName,
File)
## GetLanguageCode
#
# Check the language code read from .UNI file and convert RFC 1766 codes to RFC 4646 codes if appropriate
# RFC 1766 language codes supported in compatibility mode
# RFC 4646 language codes supported in native mode
#
# @param LangName: Language codes read from .UNI file
#
# @retval LangName: Valid lanugage code in RFC 4646 format or None
#
def GetLanguageCode(LangName, IsCompatibleMode, File):
length = len(LangName)
if IsCompatibleMode:
if length == 3 and LangName.isalpha():
TempLangName = gLANG_CONV_TABLE.get(LangName.lower())
if TempLangName is not None:
return TempLangName
return LangName
else:
EdkLogger.Error("Unicode File Parser",
ToolError.FORMAT_INVALID,
"Invalid RFC 1766 language code : %s" % LangName,
File)
if (LangName[0] == 'X' or LangName[0] == 'x') and LangName[1] == '-':
return LangName
if length == 2:
if LangName.isalpha():
return LangName
elif length == 3:
if LangName.isalpha() and gLANG_CONV_TABLE.get(LangName.lower()) is None:
return LangName
elif length == 5:
if LangName[0:2].isalpha() and LangName[2] == '-':
return LangName
elif length >= 6:
if LangName[0:2].isalpha() and LangName[2] == '-':
return LangName
if LangName[0:3].isalpha() and gLANG_CONV_TABLE.get(LangName.lower()) is None and LangName[3] == '-':
return LangName
EdkLogger.Error("Unicode File Parser",
ToolError.FORMAT_INVALID,
"Invalid RFC 4646 language code : %s" % LangName,
File)
## FormatUniEntry
#
# Formatted the entry in Uni file.
#
# @param StrTokenName StrTokenName.
# @param TokenValueList A list need to be processed.
# @param ContainerFile ContainerFile.
#
# @return formatted entry
def FormatUniEntry(StrTokenName, TokenValueList, ContainerFile):
SubContent = ''
PreFormatLength = 40
if len(StrTokenName) > PreFormatLength:
PreFormatLength = len(StrTokenName) + 1
for (Lang, Value) in TokenValueList:
if not Value or Lang == DT.TAB_LANGUAGE_EN_X:
continue
if Lang == '':
Lang = DT.TAB_LANGUAGE_EN_US
if Lang == 'eng':
Lang = DT.TAB_LANGUAGE_EN_US
elif len(Lang.split('-')[0]) == 3:
Lang = GetLanguageCode(Lang.split('-')[0], True, ContainerFile)
else:
Lang = GetLanguageCode(Lang, False, ContainerFile)
ValueList = Value.split('\n')
SubValueContent = ''
for SubValue in ValueList:
if SubValue.strip():
SubValueContent += \
' ' * (PreFormatLength + len('#language en-US ')) + '\"%s\\n\"' % SubValue.strip() + '\r\n'
SubValueContent = SubValueContent[(PreFormatLength + len('#language en-US ')):SubValueContent.rfind('\\n')] \
+ '\"' + '\r\n'
SubContent += ' '*PreFormatLength + '#language %-5s ' % Lang + SubValueContent
if SubContent:
SubContent = StrTokenName + ' '*(PreFormatLength - len(StrTokenName)) + SubContent[PreFormatLength:]
return SubContent
## StringDefClassObject
#
# A structure for language definition
#
class StringDefClassObject(object):
def __init__(self, Name = None, Value = None, Referenced = False, Token = None, UseOtherLangDef = ''):
self.StringName = ''
self.StringNameByteList = []
self.StringValue = ''
self.StringValueByteList = ''
self.Token = 0
self.Referenced = Referenced
self.UseOtherLangDef = UseOtherLangDef
self.Length = 0
if Name is not None:
self.StringName = Name
self.StringNameByteList = UniToHexList(Name)
if Value is not None:
self.StringValue = Value
self.StringValueByteList = UniToHexList(self.StringValue)
self.Length = len(self.StringValueByteList)
if Token is not None:
self.Token = Token
def __str__(self):
return repr(self.StringName) + ' ' + \
repr(self.Token) + ' ' + \
repr(self.Referenced) + ' ' + \
repr(self.StringValue) + ' ' + \
repr(self.UseOtherLangDef)
def UpdateValue(self, Value = None):
if Value is not None:
if self.StringValue:
self.StringValue = self.StringValue + '\r\n' + Value
else:
self.StringValue = Value
self.StringValueByteList = UniToHexList(self.StringValue)
self.Length = len(self.StringValueByteList)
## UniFileClassObject
#
# A structure for .uni file definition
#
class UniFileClassObject(object):
def __init__(self, FileList = None, IsCompatibleMode = False, IncludePathList = None):
self.FileList = FileList
self.File = None
self.IncFileList = FileList
self.UniFileHeader = ''
self.Token = 2
self.LanguageDef = [] #[ [u'LanguageIdentifier', u'PrintableName'], ... ]
self.OrderedStringList = {} #{ u'LanguageIdentifier' : [StringDefClassObject] }
self.OrderedStringDict = {} #{ u'LanguageIdentifier' : {StringName:(IndexInList)} }
self.OrderedStringListByToken = {} #{ u'LanguageIdentifier' : {Token: StringDefClassObject} }
self.IsCompatibleMode = IsCompatibleMode
if not IncludePathList:
self.IncludePathList = []
else:
self.IncludePathList = IncludePathList
if len(self.FileList) > 0:
self.LoadUniFiles(FileList)
#
# Get Language definition
#
def GetLangDef(self, File, Line):
Lang = shlex.split(Line.split(u"//")[0])
if len(Lang) != 3:
try:
FileIn = codecs.open(File.Path, mode='rb', encoding='utf_8').readlines()
except UnicodeError as Xstr:
FileIn = codecs.open(File.Path, mode='rb', encoding='utf_16').readlines()
except UnicodeError as Xstr:
FileIn = codecs.open(File.Path, mode='rb', encoding='utf_16_le').readlines()
except:
EdkLogger.Error("Unicode File Parser",
ToolError.FILE_OPEN_FAILURE,
"File read failure: %s" % str(Xstr),
ExtraData=File)
LineNo = GetLineNo(FileIn, Line, False)
EdkLogger.Error("Unicode File Parser",
ToolError.PARSER_ERROR,
"Wrong language definition",
ExtraData="""%s\n\t*Correct format is like '#langdef en-US "English"'""" % Line,
File = File, Line = LineNo)
else:
LangName = GetLanguageCode(Lang[1], self.IsCompatibleMode, self.File)
LangPrintName = Lang[2]
IsLangInDef = False
for Item in self.LanguageDef:
if Item[0] == LangName:
IsLangInDef = True
break
if not IsLangInDef:
self.LanguageDef.append([LangName, LangPrintName])
#
# Add language string
#
self.AddStringToList(u'$LANGUAGE_NAME', LangName, LangName, 0, True, Index=0)
self.AddStringToList(u'$PRINTABLE_LANGUAGE_NAME', LangName, LangPrintName, 1, True, Index=1)
if not IsLangInDef:
#
# The found STRING tokens will be added into new language string list
# so that the unique STRING identifier is reserved for all languages in the package list.
#
FirstLangName = self.LanguageDef[0][0]
if LangName != FirstLangName:
for Index in range (2, len (self.OrderedStringList[FirstLangName])):
Item = self.OrderedStringList[FirstLangName][Index]
if Item.UseOtherLangDef != '':
OtherLang = Item.UseOtherLangDef
else:
OtherLang = FirstLangName
self.OrderedStringList[LangName].append (StringDefClassObject(Item.StringName,
'',
Item.Referenced,
Item.Token,
OtherLang))
self.OrderedStringDict[LangName][Item.StringName] = len(self.OrderedStringList[LangName]) - 1
return True
#
# Get String name and value
#
def GetStringObject(self, Item):
Language = ''
Value = ''
Name = Item.split()[1]
# Check the string name is the upper character
if Name != '':
MatchString = re.match('[A-Z0-9_]+', Name, re.UNICODE)
if MatchString is None or MatchString.end(0) != len(Name):
EdkLogger.Error("Unicode File Parser",
ToolError.FORMAT_INVALID,
'The string token name %s in UNI file %s must be upper case character.' %(Name, self.File))
LanguageList = Item.split(u'#language ')
for IndexI in range(len(LanguageList)):
if IndexI == 0:
continue
else:
Language = LanguageList[IndexI].split()[0]
#.replace(u'\r\n', u'')
Value = \
LanguageList[IndexI][LanguageList[IndexI].find(u'\"') + len(u'\"') : LanguageList[IndexI].rfind(u'\"')]
Language = GetLanguageCode(Language, self.IsCompatibleMode, self.File)
self.AddStringToList(Name, Language, Value)
#
# Get include file list and load them
#
def GetIncludeFile(self, Item, Dir = None):
if Dir:
pass
FileName = Item[Item.find(u'!include ') + len(u'!include ') :Item.find(u' ', len(u'!include '))][1:-1]
self.LoadUniFile(FileName)
#
# Pre-process before parse .uni file
#
def PreProcess(self, File, IsIncludeFile=False):
if not os.path.exists(File.Path) or not os.path.isfile(File.Path):
EdkLogger.Error("Unicode File Parser",
ToolError.FILE_NOT_FOUND,
ExtraData=File.Path)
#
# Check file header of the Uni file
#
# if not CheckUTF16FileHeader(File.Path):
# EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID,
# ExtraData='The file %s is either invalid UTF-16LE or it is missing the BOM.' % File.Path)
try:
FileIn = codecs.open(File.Path, mode='rb', encoding='utf_8').readlines()
except UnicodeError as Xstr:
FileIn = codecs.open(File.Path, mode='rb', encoding='utf_16').readlines()
except UnicodeError:
FileIn = codecs.open(File.Path, mode='rb', encoding='utf_16_le').readlines()
except:
EdkLogger.Error("Unicode File Parser", ToolError.FILE_OPEN_FAILURE, ExtraData=File.Path)
#
# get the file header
#
Lines = []
HeaderStart = False
HeaderEnd = False
if not self.UniFileHeader:
FirstGenHeader = True
else:
FirstGenHeader = False
for Line in FileIn:
Line = Line.strip()
if Line == u'':
continue
if Line.startswith(DT.TAB_COMMENT_EDK1_SPLIT) and (Line.find(DT.TAB_HEADER_COMMENT) > -1) \
and not HeaderEnd and not HeaderStart:
HeaderStart = True
if not Line.startswith(DT.TAB_COMMENT_EDK1_SPLIT) and HeaderStart and not HeaderEnd:
HeaderEnd = True
if Line.startswith(DT.TAB_COMMENT_EDK1_SPLIT) and HeaderStart and not HeaderEnd and FirstGenHeader:
self.UniFileHeader += Line + '\r\n'
continue
#
# Use unique identifier
#
FindFlag = -1
LineCount = 0
MultiLineFeedExits = False
#
# 0: initial value
# 1: single String entry exist
# 2: line feed exist under the some single String entry
#
StringEntryExistsFlag = 0
for Line in FileIn:
Line = FileIn[LineCount]
LineCount += 1
Line = Line.strip()
#
# Ignore comment line and empty line
#
if Line == u'' or Line.startswith(u'//'):
#
# Change the single line String entry flag status
#
if StringEntryExistsFlag == 1:
StringEntryExistsFlag = 2
#
# If the '#string' line and the '#language' line are not in the same line,
# there should be only one line feed character between them
#
if MultiLineFeedExits:
EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID, ExtraData=File.Path)
continue
MultiLineFeedExits = False
#
# Process comment embedded in string define lines
#
FindFlag = Line.find(u'//')
if FindFlag != -1 and Line.find(u'//') < Line.find(u'"'):
Line = Line.replace(Line[FindFlag:], u' ')
if FileIn[LineCount].strip().startswith('#language'):
Line = Line + FileIn[LineCount]
FileIn[LineCount-1] = Line
FileIn[LineCount] = '\r\n'
LineCount -= 1
for Index in range (LineCount + 1, len (FileIn) - 1):
if (Index == len(FileIn) -1):
FileIn[Index] = '\r\n'
else:
FileIn[Index] = FileIn[Index + 1]
continue
CommIndex = GetCharIndexOutStr(u'/', Line)
if CommIndex > -1:
if (len(Line) - 1) > CommIndex:
if Line[CommIndex+1] == u'/':
Line = Line[:CommIndex].strip()
else:
EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID, ExtraData=File.Path)
else:
EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID, ExtraData=File.Path)
Line = Line.replace(UNICODE_WIDE_CHAR, WIDE_CHAR)
Line = Line.replace(UNICODE_NARROW_CHAR, NARROW_CHAR)
Line = Line.replace(UNICODE_NON_BREAKING_CHAR, NON_BREAKING_CHAR)
Line = Line.replace(u'\\\\', u'\u0006')
Line = Line.replace(u'\\r\\n', CR + LF)
Line = Line.replace(u'\\n', CR + LF)
Line = Line.replace(u'\\r', CR)
Line = Line.replace(u'\\t', u'\t')
Line = Line.replace(u'''\"''', u'''"''')
Line = Line.replace(u'\t', u' ')
Line = Line.replace(u'\u0006', u'\\')
#
# Check if single line has correct '"'
#
if Line.startswith(u'#string') and Line.find(u'#language') > -1 and Line.find('"') > Line.find(u'#language'):
if not Line.endswith('"'):
EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID,
ExtraData='''The line %s misses '"' at the end of it in file %s'''
% (LineCount, File.Path))
#
# Between Name entry and Language entry can not contain line feed
#
if Line.startswith(u'#string') and Line.find(u'#language') == -1:
MultiLineFeedExits = True
if Line.startswith(u'#string') and Line.find(u'#language') > 0 and Line.find(u'"') < 0:
MultiLineFeedExits = True
#
# Between Language entry and String entry can not contain line feed
#
if Line.startswith(u'#language') and len(Line.split()) == 2:
MultiLineFeedExits = True
#
# Check the situation that there only has one '"' for the language entry
#
if Line.startswith(u'#string') and Line.find(u'#language') > 0 and Line.count(u'"') == 1:
EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID,
ExtraData='''The line %s misses '"' at the end of it in file %s'''
% (LineCount, File.Path))
#
# Check the situation that there has more than 2 '"' for the language entry
#
if Line.startswith(u'#string') and Line.find(u'#language') > 0 and Line.replace(u'\\"', '').count(u'"') > 2:
EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID,
ExtraData='''The line %s has more than 2 '"' for language entry in file %s'''
% (LineCount, File.Path))
#
# Between two String entry, can not contain line feed
#
if Line.startswith(u'"'):
if StringEntryExistsFlag == 2:
EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID,
Message=ST.ERR_UNIPARSE_LINEFEED_UP_EXIST % Line, ExtraData=File.Path)
StringEntryExistsFlag = 1
if not Line.endswith('"'):
EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID,
ExtraData='''The line %s misses '"' at the end of it in file %s'''
% (LineCount, File.Path))
#
# Check the situation that there has more than 2 '"' for the language entry
#
if Line.strip() and Line.replace(u'\\"', '').count(u'"') > 2:
EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID,
ExtraData='''The line %s has more than 2 '"' for language entry in file %s'''
% (LineCount, File.Path))
elif Line.startswith(u'#language'):
if StringEntryExistsFlag == 2:
EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID,
Message=ST.ERR_UNI_MISS_STRING_ENTRY % Line, ExtraData=File.Path)
StringEntryExistsFlag = 0
else:
StringEntryExistsFlag = 0
Lines.append(Line)
#
# Convert string def format as below
#
# #string MY_STRING_1
# #language eng
# "My first English string line 1"
# "My first English string line 2"
# #string MY_STRING_1
# #language spa
# "Mi segunda secuencia 1"
# "Mi segunda secuencia 2"
#
if not IsIncludeFile and not Lines:
EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID, \
Message=ST.ERR_UNIPARSE_NO_SECTION_EXIST, \
ExtraData=File.Path)
NewLines = []
StrName = u''
ExistStrNameList = []
for Line in Lines:
if StrName and not StrName.split()[1].startswith(DT.TAB_STR_TOKENCNAME + DT.TAB_UNDERLINE_SPLIT):
EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID, \
Message=ST.ERR_UNIPARSE_STRNAME_FORMAT_ERROR % StrName.split()[1], \
ExtraData=File.Path)
if StrName and len(StrName.split()[1].split(DT.TAB_UNDERLINE_SPLIT)) == 4:
StringTokenList = StrName.split()[1].split(DT.TAB_UNDERLINE_SPLIT)
if (StringTokenList[3].upper() in [DT.TAB_STR_TOKENPROMPT, DT.TAB_STR_TOKENHELP] and \
StringTokenList[3] not in [DT.TAB_STR_TOKENPROMPT, DT.TAB_STR_TOKENHELP]) or \
(StringTokenList[2].upper() == DT.TAB_STR_TOKENERR and StringTokenList[2] != DT.TAB_STR_TOKENERR):
EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID, \
Message=ST.ERR_UNIPARSE_STRTOKEN_FORMAT_ERROR % StrName.split()[1], \
ExtraData=File.Path)
if Line.count(u'#language') > 1:
EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID, \
Message=ST.ERR_UNIPARSE_SEP_LANGENTRY_LINE % Line, \
ExtraData=File.Path)
if Line.startswith(u'//'):
continue
elif Line.startswith(u'#langdef'):
if len(Line.split()) == 2:
NewLines.append(Line)
continue
elif len(Line.split()) > 2 and Line.find(u'"') > 0:
NewLines.append(Line[:Line.find(u'"')].strip())
NewLines.append(Line[Line.find(u'"'):])
else:
EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID, ExtraData=File.Path)
elif Line.startswith(u'#string'):
if len(Line.split()) == 2:
StrName = Line
if StrName:
if StrName.split()[1] not in ExistStrNameList:
ExistStrNameList.append(StrName.split()[1].strip())
elif StrName.split()[1] in [DT.TAB_INF_ABSTRACT, DT.TAB_INF_DESCRIPTION, \
DT.TAB_INF_BINARY_ABSTRACT, DT.TAB_INF_BINARY_DESCRIPTION, \
DT.TAB_DEC_PACKAGE_ABSTRACT, DT.TAB_DEC_PACKAGE_DESCRIPTION, \
DT.TAB_DEC_BINARY_ABSTRACT, DT.TAB_DEC_BINARY_DESCRIPTION]:
EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID, \
Message=ST.ERR_UNIPARSE_MULTI_ENTRY_EXIST % StrName.split()[1], \
ExtraData=File.Path)
continue
elif len(Line.split()) == 4 and Line.find(u'#language') > 0:
if Line[Line.find(u'#language')-1] != ' ' or \
Line[Line.find(u'#language')+len(u'#language')] != u' ':
EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID, ExtraData=File.Path)
if Line.find(u'"') > 0:
EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID, ExtraData=File.Path)
StrName = Line.split()[0] + u' ' + Line.split()[1]
if StrName:
if StrName.split()[1] not in ExistStrNameList:
ExistStrNameList.append(StrName.split()[1].strip())
elif StrName.split()[1] in [DT.TAB_INF_ABSTRACT, DT.TAB_INF_DESCRIPTION, \
DT.TAB_INF_BINARY_ABSTRACT, DT.TAB_INF_BINARY_DESCRIPTION, \
DT.TAB_DEC_PACKAGE_ABSTRACT, DT.TAB_DEC_PACKAGE_DESCRIPTION, \
DT.TAB_DEC_BINARY_ABSTRACT, DT.TAB_DEC_BINARY_DESCRIPTION]:
EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID, \
Message=ST.ERR_UNIPARSE_MULTI_ENTRY_EXIST % StrName.split()[1], \
ExtraData=File.Path)
if IsIncludeFile:
if StrName not in NewLines:
NewLines.append((Line[:Line.find(u'#language')]).strip())
else:
NewLines.append((Line[:Line.find(u'#language')]).strip())
NewLines.append((Line[Line.find(u'#language'):]).strip())
elif len(Line.split()) > 4 and Line.find(u'#language') > 0 and Line.find(u'"') > 0:
if Line[Line.find(u'#language')-1] != u' ' or \
Line[Line.find(u'#language')+len(u'#language')] != u' ':
EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID, ExtraData=File.Path)
if Line[Line.find(u'"')-1] != u' ':
EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID, ExtraData=File.Path)
StrName = Line.split()[0] + u' ' + Line.split()[1]
if StrName:
if StrName.split()[1] not in ExistStrNameList:
ExistStrNameList.append(StrName.split()[1].strip())
elif StrName.split()[1] in [DT.TAB_INF_ABSTRACT, DT.TAB_INF_DESCRIPTION, \
DT.TAB_INF_BINARY_ABSTRACT, DT.TAB_INF_BINARY_DESCRIPTION, \
DT.TAB_DEC_PACKAGE_ABSTRACT, DT.TAB_DEC_PACKAGE_DESCRIPTION, \
DT.TAB_DEC_BINARY_ABSTRACT, DT.TAB_DEC_BINARY_DESCRIPTION]:
EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID, \
Message=ST.ERR_UNIPARSE_MULTI_ENTRY_EXIST % StrName.split()[1], \
ExtraData=File.Path)
if IsIncludeFile:
if StrName not in NewLines:
NewLines.append((Line[:Line.find(u'#language')]).strip())
else:
NewLines.append((Line[:Line.find(u'#language')]).strip())
NewLines.append((Line[Line.find(u'#language'):Line.find(u'"')]).strip())
NewLines.append((Line[Line.find(u'"'):]).strip())
else:
EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID, ExtraData=File.Path)
elif Line.startswith(u'#language'):
if len(Line.split()) == 2:
if IsIncludeFile:
if StrName not in NewLines:
NewLines.append(StrName)
else:
NewLines.append(StrName)
NewLines.append(Line)
elif len(Line.split()) > 2 and Line.find(u'"') > 0:
if IsIncludeFile:
if StrName not in NewLines:
NewLines.append(StrName)
else:
NewLines.append(StrName)
NewLines.append((Line[:Line.find(u'"')]).strip())
NewLines.append((Line[Line.find(u'"'):]).strip())
else:
EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID, ExtraData=File.Path)
elif Line.startswith(u'"'):
if u'#string' in Line or u'#language' in Line:
EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID, ExtraData=File.Path)
NewLines.append(Line)
else:
print(Line)
EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID, ExtraData=File.Path)
if StrName and not StrName.split()[1].startswith(u'STR_'):
EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID, \
Message=ST.ERR_UNIPARSE_STRNAME_FORMAT_ERROR % StrName.split()[1], \
ExtraData=File.Path)
if StrName and not NewLines:
EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID, \
Message=ST.ERR_UNI_MISS_LANGENTRY % StrName, \
ExtraData=File.Path)
#
# Check Abstract, Description, BinaryAbstract and BinaryDescription order,
# should be Abstract, Description, BinaryAbstract, BinaryDescription
AbstractPosition = -1
DescriptionPosition = -1
BinaryAbstractPosition = -1
BinaryDescriptionPosition = -1
for StrName in ExistStrNameList:
if DT.TAB_HEADER_ABSTRACT.upper() in StrName:
if 'BINARY' in StrName:
BinaryAbstractPosition = ExistStrNameList.index(StrName)
else:
AbstractPosition = ExistStrNameList.index(StrName)
if DT.TAB_HEADER_DESCRIPTION.upper() in StrName:
if 'BINARY' in StrName:
BinaryDescriptionPosition = ExistStrNameList.index(StrName)
else:
DescriptionPosition = ExistStrNameList.index(StrName)
OrderList = sorted([AbstractPosition, DescriptionPosition])
BinaryOrderList = sorted([BinaryAbstractPosition, BinaryDescriptionPosition])
Min = OrderList[0]
Max = OrderList[1]
BinaryMin = BinaryOrderList[0]
BinaryMax = BinaryOrderList[1]
if BinaryDescriptionPosition > -1:
if not(BinaryDescriptionPosition == BinaryMax and BinaryAbstractPosition == BinaryMin and \
BinaryMax > Max):
EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID, \
Message=ST.ERR_UNIPARSE_ENTRY_ORDER_WRONG, \
ExtraData=File.Path)
elif BinaryAbstractPosition > -1:
if not(BinaryAbstractPosition > Max):
EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID, \
Message=ST.ERR_UNIPARSE_ENTRY_ORDER_WRONG, \
ExtraData=File.Path)
if DescriptionPosition > -1:
if not(DescriptionPosition == Max and AbstractPosition == Min and \
DescriptionPosition > AbstractPosition):
EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID, \
Message=ST.ERR_UNIPARSE_ENTRY_ORDER_WRONG, \
ExtraData=File.Path)
if not self.UniFileHeader:
EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID,
Message = ST.ERR_NO_SOURCE_HEADER,
ExtraData=File.Path)
return NewLines
#
# Load a .uni file
#
def LoadUniFile(self, File = None):
if File is None:
EdkLogger.Error("Unicode File Parser",
ToolError.PARSER_ERROR,
Message='No unicode file is given',
ExtraData=File.Path)
self.File = File
#
# Process special char in file
#
Lines = self.PreProcess(File)
#
# Get Unicode Information
#
for IndexI in range(len(Lines)):
Line = Lines[IndexI]
if (IndexI + 1) < len(Lines):
SecondLine = Lines[IndexI + 1]
if (IndexI + 2) < len(Lines):
ThirdLine = Lines[IndexI + 2]
#
# Get Language def information
#
if Line.find(u'#langdef ') >= 0:
self.GetLangDef(File, Line + u' ' + SecondLine)
continue
Name = ''
Language = ''
Value = ''
CombineToken = False
#
# Get string def information format as below
#
# #string MY_STRING_1
# #language eng
# "My first English string line 1"
# "My first English string line 2"
# #string MY_STRING_1
# #language spa
# "Mi segunda secuencia 1"
# "Mi segunda secuencia 2"
#
if Line.find(u'#string ') >= 0 and Line.find(u'#language ') < 0 and \
SecondLine.find(u'#string ') < 0 and SecondLine.find(u'#language ') >= 0 and \
ThirdLine.find(u'#string ') < 0 and ThirdLine.find(u'#language ') < 0:
if Line.find('"') > 0 or SecondLine.find('"') > 0:
EdkLogger.Error("Unicode File Parser", ToolError.FORMAT_INVALID,
Message=ST.ERR_UNIPARSE_DBLQUOTE_UNMATCHED,
ExtraData=File.Path)
Name = Line[Line.find(u'#string ') + len(u'#string ') : ].strip(' ')
Language = SecondLine[SecondLine.find(u'#language ') + len(u'#language ') : ].strip(' ')
for IndexJ in range(IndexI + 2, len(Lines)):
if Lines[IndexJ].find(u'#string ') < 0 and Lines[IndexJ].find(u'#language ') < 0 and \
Lines[IndexJ].strip().startswith(u'"') and Lines[IndexJ].strip().endswith(u'"'):
if Lines[IndexJ][-2] == ' ':
CombineToken = True
if CombineToken:
if Lines[IndexJ].strip()[1:-1].strip():
Value = Value + Lines[IndexJ].strip()[1:-1].rstrip() + ' '
else:
Value = Value + Lines[IndexJ].strip()[1:-1]
CombineToken = False
else:
Value = Value + Lines[IndexJ].strip()[1:-1] + '\r\n'
else:
IndexI = IndexJ
break
if Value.endswith('\r\n'):
Value = Value[: Value.rfind('\r\n')]
Language = GetLanguageCode(Language, self.IsCompatibleMode, self.File)
self.AddStringToList(Name, Language, Value)
continue
#
# Load multiple .uni files
#
def LoadUniFiles(self, FileList):
if len(FileList) > 0:
for File in FileList:
FilePath = File.Path.strip()
if FilePath.endswith('.uni') or FilePath.endswith('.UNI') or FilePath.endswith('.Uni'):
self.LoadUniFile(File)
#
# Add a string to list
#
def AddStringToList(self, Name, Language, Value, Token = 0, Referenced = False, UseOtherLangDef = '', Index = -1):
for LangNameItem in self.LanguageDef:
if Language == LangNameItem[0]:
break
if Language not in self.OrderedStringList:
self.OrderedStringList[Language] = []
self.OrderedStringDict[Language] = {}
IsAdded = True
if Name in self.OrderedStringDict[Language]:
IsAdded = False
if Value is not None:
ItemIndexInList = self.OrderedStringDict[Language][Name]
Item = self.OrderedStringList[Language][ItemIndexInList]
Item.UpdateValue(Value)
Item.UseOtherLangDef = ''
if IsAdded:
Token = len(self.OrderedStringList[Language])
if Index == -1:
self.OrderedStringList[Language].append(StringDefClassObject(Name,
Value,
Referenced,
Token,
UseOtherLangDef))
self.OrderedStringDict[Language][Name] = Token
for LangName in self.LanguageDef:
#
# New STRING token will be added into all language string lists.
# so that the unique STRING identifier is reserved for all languages in the package list.
#
if LangName[0] != Language:
if UseOtherLangDef != '':
OtherLangDef = UseOtherLangDef
else:
OtherLangDef = Language
self.OrderedStringList[LangName[0]].append(StringDefClassObject(Name,
'',
Referenced,
Token,
OtherLangDef))
self.OrderedStringDict[LangName[0]][Name] = len(self.OrderedStringList[LangName[0]]) - 1
else:
self.OrderedStringList[Language].insert(Index, StringDefClassObject(Name,
Value,
Referenced,
Token,
UseOtherLangDef))
self.OrderedStringDict[Language][Name] = Index
#
# Set the string as referenced
#
def SetStringReferenced(self, Name):
#
# String stoken are added in the same order in all language string lists.
# So, only update the status of string stoken in first language string list.
#
Lang = self.LanguageDef[0][0]
if Name in self.OrderedStringDict[Lang]:
ItemIndexInList = self.OrderedStringDict[Lang][Name]
Item = self.OrderedStringList[Lang][ItemIndexInList]
Item.Referenced = True
#
# Search the string in language definition by Name
#
def FindStringValue(self, Name, Lang):
if Name in self.OrderedStringDict[Lang]:
ItemIndexInList = self.OrderedStringDict[Lang][Name]
return self.OrderedStringList[Lang][ItemIndexInList]
return None
#
# Search the string in language definition by Token
#
def FindByToken(self, Token, Lang):
for Item in self.OrderedStringList[Lang]:
if Item.Token == Token:
return Item
return None
#
# Re-order strings and re-generate tokens
#
def ReToken(self):
if len(self.LanguageDef) == 0:
return None
#
# Retoken all language strings according to the status of string stoken in the first language string.
#
FirstLangName = self.LanguageDef[0][0]
# Convert the OrderedStringList to be OrderedStringListByToken in order to faciliate future search by token
for LangNameItem in self.LanguageDef:
self.OrderedStringListByToken[LangNameItem[0]] = {}
#
# Use small token for all referred string stoken.
#
RefToken = 0
for Index in range (0, len (self.OrderedStringList[FirstLangName])):
FirstLangItem = self.OrderedStringList[FirstLangName][Index]
if FirstLangItem.Referenced == True:
for LangNameItem in self.LanguageDef:
LangName = LangNameItem[0]
OtherLangItem = self.OrderedStringList[LangName][Index]
OtherLangItem.Referenced = True
OtherLangItem.Token = RefToken
self.OrderedStringListByToken[LangName][OtherLangItem.Token] = OtherLangItem
RefToken = RefToken + 1
#
# Use big token for all unreferred string stoken.
#
UnRefToken = 0
for Index in range (0, len (self.OrderedStringList[FirstLangName])):
FirstLangItem = self.OrderedStringList[FirstLangName][Index]
if FirstLangItem.Referenced == False:
for LangNameItem in self.LanguageDef:
LangName = LangNameItem[0]
OtherLangItem = self.OrderedStringList[LangName][Index]
OtherLangItem.Token = RefToken + UnRefToken
self.OrderedStringListByToken[LangName][OtherLangItem.Token] = OtherLangItem
UnRefToken = UnRefToken + 1
#
# Show the instance itself
#
def ShowMe(self):
print(self.LanguageDef)
#print self.OrderedStringList
for Item in self.OrderedStringList:
print(Item)
for Member in self.OrderedStringList[Item]:
print(str(Member))
#
# Read content from '!include' UNI file
#
def ReadIncludeUNIfile(self, FilaPath):
if self.File:
pass
if not os.path.exists(FilaPath) or not os.path.isfile(FilaPath):
EdkLogger.Error("Unicode File Parser",
ToolError.FILE_NOT_FOUND,
ExtraData=FilaPath)
try:
FileIn = codecs.open(FilaPath, mode='rb', encoding='utf_8').readlines()
except UnicodeError as Xstr:
FileIn = codecs.open(FilaPath, mode='rb', encoding='utf_16').readlines()
except UnicodeError:
FileIn = codecs.open(FilaPath, mode='rb', encoding='utf_16_le').readlines()
except:
EdkLogger.Error("Unicode File Parser", ToolError.FILE_OPEN_FAILURE, ExtraData=FilaPath)
return FileIn
| edk2-master | BaseTools/Source/Python/UPT/Library/UniClassObject.py |
## @file
# This file is used to define common static strings and global data used by UPT
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
'''
GlobalData
'''
#
# The workspace directory
#
gWORKSPACE = '.'
gPACKAGE_PATH = None
#
# INF module directory
#
gINF_MODULE_DIR = "."
gINF_MODULE_NAME = ''
#
# the directory to holds upt related files
#
gUPT_DIR = r"Conf/upt/"
#
# Log file for invalid meta-data files during force removing
#
gINVALID_MODULE_FILE = gUPT_DIR + r"Invalid_Modules.log"
#
# File name for content zip file in the distribution
#
gCONTENT_FILE = "dist.content"
#
# File name for XML file in the distribution
#
gDESC_FILE = 'dist.pkg'
#
# Case Insensitive flag
#
gCASE_INSENSITIVE = ''
#
# All Files dictionary
#
gALL_FILES = {}
#
# Database instance
#
gDB = None
#
# list for files that are found in module level but not in INF files,
# items are (File, ModulePath), all these should be relative to $(WORKSPACE)
#
gMISS_FILE_IN_MODLIST = []
#
# Global Current Line
#
gINF_CURRENT_LINE = None
#
# Global pkg list
#
gWSPKG_LIST = []
#
# Flag used to take WARN as ERROR.
# By default, only ERROR message will break the tools execution.
#
gWARNING_AS_ERROR = False
#
# Used to specify the temp directory to hold the unpacked distribution files
#
gUNPACK_DIR = []
#
# Flag used to mark whether the INF file is Binary INF or not.
#
gIS_BINARY_INF = False
#
# Used by FileHook module.
#
gRECOVERMGR = None
#
# Used by PCD parser
#
gPackageDict = {}
#
# Used by Library instance parser
# {FilePath: FileObj}
#
gLIBINSTANCEDICT = {}
#
# Store the list of DIST
#
gTO_BE_INSTALLED_DIST_LIST = []
| edk2-master | BaseTools/Source/Python/UPT/Library/GlobalData.py |
## @file
# This file is used to define common parsing related functions used in parsing
# INF/DEC/DSC process
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
'''
Parsing
'''
from __future__ import absolute_import
##
# Import Modules
#
import os.path
import re
from Library.StringUtils import RaiseParserError
from Library.StringUtils import GetSplitValueList
from Library.StringUtils import CheckFileType
from Library.StringUtils import CheckFileExist
from Library.StringUtils import CleanString
from Library.StringUtils import NormPath
from Logger.ToolError import FILE_NOT_FOUND
from Logger.ToolError import FatalError
from Logger.ToolError import FORMAT_INVALID
from Library import DataType
from Library.Misc import GuidStructureStringToGuidString
from Library.Misc import CheckGuidRegFormat
from Logger import StringTable as ST
import Logger.Log as Logger
from Parser.DecParser import Dec
from . import GlobalData
gPKG_INFO_DICT = {}
## GetBuildOption
#
# Parse a string with format "[<Family>:]<ToolFlag>=Flag"
# Return (Family, ToolFlag, Flag)
#
# @param String: String with BuildOption statement
# @param File: The file which defines build option, used in error report
#
def GetBuildOption(String, File, LineNo= -1):
(Family, ToolChain, Flag) = ('', '', '')
if String.find(DataType.TAB_EQUAL_SPLIT) < 0:
RaiseParserError(String, 'BuildOptions', File, \
'[<Family>:]<ToolFlag>=Flag', LineNo)
else:
List = GetSplitValueList(String, DataType.TAB_EQUAL_SPLIT, MaxSplit=1)
if List[0].find(':') > -1:
Family = List[0][ : List[0].find(':')].strip()
ToolChain = List[0][List[0].find(':') + 1 : ].strip()
else:
ToolChain = List[0].strip()
Flag = List[1].strip()
return (Family, ToolChain, Flag)
## Get Library Class
#
# Get Library of Dsc as <LibraryClassKeyWord>|<LibraryInstance>
#
# @param Item: String as <LibraryClassKeyWord>|<LibraryInstance>
# @param ContainerFile: The file which describes the library class, used for
# error report
#
def GetLibraryClass(Item, ContainerFile, WorkspaceDir, LineNo= -1):
List = GetSplitValueList(Item[0])
SupMod = DataType.SUP_MODULE_LIST_STRING
if len(List) != 2:
RaiseParserError(Item[0], 'LibraryClasses', ContainerFile, \
'<LibraryClassKeyWord>|<LibraryInstance>')
else:
CheckFileType(List[1], '.Inf', ContainerFile, \
'library class instance', Item[0], LineNo)
CheckFileExist(WorkspaceDir, List[1], ContainerFile, \
'LibraryClasses', Item[0], LineNo)
if Item[1] != '':
SupMod = Item[1]
return (List[0], List[1], SupMod)
## Get Library Class
#
# Get Library of Dsc as <LibraryClassKeyWord>[|<LibraryInstance>]
# [|<TokenSpaceGuidCName>.<PcdCName>]
#
# @param Item: String as <LibraryClassKeyWord>|<LibraryInstance>
# @param ContainerFile: The file which describes the library class, used for
# error report
#
def GetLibraryClassOfInf(Item, ContainerFile, WorkspaceDir, LineNo= -1):
ItemList = GetSplitValueList((Item[0] + DataType.TAB_VALUE_SPLIT * 2))
SupMod = DataType.SUP_MODULE_LIST_STRING
if len(ItemList) > 5:
RaiseParserError\
(Item[0], 'LibraryClasses', ContainerFile, \
'<LibraryClassKeyWord>[|<LibraryInstance>]\
[|<TokenSpaceGuidCName>.<PcdCName>]')
else:
CheckFileType(ItemList[1], '.Inf', ContainerFile, 'LibraryClasses', \
Item[0], LineNo)
CheckFileExist(WorkspaceDir, ItemList[1], ContainerFile, \
'LibraryClasses', Item[0], LineNo)
if ItemList[2] != '':
CheckPcdTokenInfo(ItemList[2], 'LibraryClasses', \
ContainerFile, LineNo)
if Item[1] != '':
SupMod = Item[1]
return (ItemList[0], ItemList[1], ItemList[2], SupMod)
## CheckPcdTokenInfo
#
# Check if PcdTokenInfo is following <TokenSpaceGuidCName>.<PcdCName>
#
# @param TokenInfoString: String to be checked
# @param Section: Used for error report
# @param File: Used for error report
#
def CheckPcdTokenInfo(TokenInfoString, Section, File, LineNo= -1):
Format = '<TokenSpaceGuidCName>.<PcdCName>'
if TokenInfoString != '' and TokenInfoString is not None:
TokenInfoList = GetSplitValueList(TokenInfoString, DataType.TAB_SPLIT)
if len(TokenInfoList) == 2:
return True
RaiseParserError(TokenInfoString, Section, File, Format, LineNo)
## Get Pcd
#
# Get Pcd of Dsc as <PcdTokenSpaceGuidCName>.<TokenCName>|<Value>
# [|<Type>|<MaximumDatumSize>]
#
# @param Item: String as <PcdTokenSpaceGuidCName>.<TokenCName>|
# <Value>[|<Type>|<MaximumDatumSize>]
# @param ContainerFile: The file which describes the pcd, used for error
# report
#
def GetPcd(Item, Type, ContainerFile, LineNo= -1):
TokenGuid, TokenName, Value, MaximumDatumSize, Token = '', '', '', '', ''
List = GetSplitValueList(Item + DataType.TAB_VALUE_SPLIT * 2)
if len(List) < 4 or len(List) > 6:
RaiseParserError(Item, 'Pcds' + Type, ContainerFile, \
'<PcdTokenSpaceGuidCName>.<TokenCName>|<Value>\
[|<Type>|<MaximumDatumSize>]', LineNo)
else:
Value = List[1]
MaximumDatumSize = List[2]
Token = List[3]
if CheckPcdTokenInfo(List[0], 'Pcds' + Type, ContainerFile, LineNo):
(TokenGuid, TokenName) = GetSplitValueList(List[0], DataType.TAB_SPLIT)
return (TokenName, TokenGuid, Value, MaximumDatumSize, Token, Type)
## Get FeatureFlagPcd
#
# Get FeatureFlagPcd of Dsc as <PcdTokenSpaceGuidCName>.<TokenCName>|TRUE/FALSE
#
# @param Item: String as <PcdTokenSpaceGuidCName>
# .<TokenCName>|TRUE/FALSE
# @param ContainerFile: The file which describes the pcd, used for error
# report
#
def GetFeatureFlagPcd(Item, Type, ContainerFile, LineNo= -1):
TokenGuid, TokenName, Value = '', '', ''
List = GetSplitValueList(Item)
if len(List) != 2:
RaiseParserError(Item, 'Pcds' + Type, ContainerFile, \
'<PcdTokenSpaceGuidCName>.<TokenCName>|TRUE/FALSE', \
LineNo)
else:
Value = List[1]
if CheckPcdTokenInfo(List[0], 'Pcds' + Type, ContainerFile, LineNo):
(TokenGuid, TokenName) = GetSplitValueList(List[0], DataType.TAB_SPLIT)
return (TokenName, TokenGuid, Value, Type)
## Get DynamicDefaultPcd
#
# Get DynamicDefaultPcd of Dsc as <PcdTokenSpaceGuidCName>.<TokenCName>
# |<Value>[|<DatumTyp>[|<MaxDatumSize>]]
#
# @param Item: String as <PcdTokenSpaceGuidCName>.<TokenCName>|
# TRUE/FALSE
# @param ContainerFile: The file which describes the pcd, used for error
# report
#
def GetDynamicDefaultPcd(Item, Type, ContainerFile, LineNo= -1):
TokenGuid, TokenName, Value, DatumTyp, MaxDatumSize = '', '', '', '', ''
List = GetSplitValueList(Item + DataType.TAB_VALUE_SPLIT * 2)
if len(List) < 4 or len(List) > 8:
RaiseParserError(Item, 'Pcds' + Type, ContainerFile, \
'<PcdTokenSpaceGuidCName>.<TokenCName>|<Value>\
[|<DatumTyp>[|<MaxDatumSize>]]', LineNo)
else:
Value = List[1]
DatumTyp = List[2]
MaxDatumSize = List[3]
if CheckPcdTokenInfo(List[0], 'Pcds' + Type, ContainerFile, LineNo):
(TokenGuid, TokenName) = GetSplitValueList(List[0], DataType.TAB_SPLIT)
return (TokenName, TokenGuid, Value, DatumTyp, MaxDatumSize, Type)
## Get DynamicHiiPcd
#
# Get DynamicHiiPcd of Dsc as <PcdTokenSpaceGuidCName>.<TokenCName>|<String>|
# <VariableGuidCName>|<VariableOffset>[|<DefaultValue>[|<MaximumDatumSize>]]
#
# @param Item: String as <PcdTokenSpaceGuidCName>.<TokenCName>|
# TRUE/FALSE
# @param ContainerFile: The file which describes the pcd, used for error
# report
#
def GetDynamicHiiPcd(Item, Type, ContainerFile, LineNo= -1):
TokenGuid, TokenName, List1, List2, List3, List4, List5 = \
'', '', '', '', '', '', ''
List = GetSplitValueList(Item + DataType.TAB_VALUE_SPLIT * 2)
if len(List) < 6 or len(List) > 8:
RaiseParserError(Item, 'Pcds' + Type, ContainerFile, \
'<PcdTokenSpaceGuidCName>.<TokenCName>|<String>|\
<VariableGuidCName>|<VariableOffset>[|<DefaultValue>\
[|<MaximumDatumSize>]]', LineNo)
else:
List1, List2, List3, List4, List5 = \
List[1], List[2], List[3], List[4], List[5]
if CheckPcdTokenInfo(List[0], 'Pcds' + Type, ContainerFile, LineNo):
(TokenGuid, TokenName) = GetSplitValueList(List[0], DataType.TAB_SPLIT)
return (TokenName, TokenGuid, List1, List2, List3, List4, List5, Type)
## Get DynamicVpdPcd
#
# Get DynamicVpdPcd of Dsc as <PcdTokenSpaceGuidCName>.<TokenCName>|
# <VpdOffset>[|<MaximumDatumSize>]
#
# @param Item: String as <PcdTokenSpaceGuidCName>.<TokenCName>
# |TRUE/FALSE
# @param ContainerFile: The file which describes the pcd, used for error
# report
#
def GetDynamicVpdPcd(Item, Type, ContainerFile, LineNo= -1):
TokenGuid, TokenName, List1, List2 = '', '', '', ''
List = GetSplitValueList(Item + DataType.TAB_VALUE_SPLIT)
if len(List) < 3 or len(List) > 4:
RaiseParserError(Item, 'Pcds' + Type, ContainerFile, \
'<PcdTokenSpaceGuidCName>.<TokenCName>|<VpdOffset>\
[|<MaximumDatumSize>]', LineNo)
else:
List1, List2 = List[1], List[2]
if CheckPcdTokenInfo(List[0], 'Pcds' + Type, ContainerFile, LineNo):
(TokenGuid, TokenName) = GetSplitValueList(List[0], DataType.TAB_SPLIT)
return (TokenName, TokenGuid, List1, List2, Type)
## GetComponent
#
# Parse block of the components defined in dsc file
# Set KeyValues as [ ['component name', [lib1, lib2, lib3],
# [bo1, bo2, bo3], [pcd1, pcd2, pcd3]], ...]
#
# @param Lines: The content to be parsed
# @param KeyValues: To store data after parsing
#
def GetComponent(Lines, KeyValues):
(FindBlock, FindLibraryClass, FindBuildOption, FindPcdsFeatureFlag, \
FindPcdsPatchableInModule, FindPcdsFixedAtBuild, FindPcdsDynamic, \
FindPcdsDynamicEx) = (False, False, False, False, False, False, False, \
False)
ListItem = None
LibraryClassItem = []
BuildOption = []
Pcd = []
for Line in Lines:
Line = Line[0]
#
# Ignore !include statement
#
if Line.upper().find(DataType.TAB_INCLUDE.upper() + ' ') > -1 or \
Line.upper().find(DataType.TAB_DEFINE + ' ') > -1:
continue
if FindBlock == False:
ListItem = Line
#
# find '{' at line tail
#
if Line.endswith('{'):
FindBlock = True
ListItem = CleanString(Line.rsplit('{', 1)[0], \
DataType.TAB_COMMENT_SPLIT)
#
# Parse a block content
#
if FindBlock:
if Line.find('<LibraryClasses>') != -1:
(FindLibraryClass, FindBuildOption, FindPcdsFeatureFlag, \
FindPcdsPatchableInModule, FindPcdsFixedAtBuild, \
FindPcdsDynamic, FindPcdsDynamicEx) = \
(True, False, False, False, False, False, False)
continue
if Line.find('<BuildOptions>') != -1:
(FindLibraryClass, FindBuildOption, FindPcdsFeatureFlag, \
FindPcdsPatchableInModule, FindPcdsFixedAtBuild, \
FindPcdsDynamic, FindPcdsDynamicEx) = \
(False, True, False, False, False, False, False)
continue
if Line.find('<PcdsFeatureFlag>') != -1:
(FindLibraryClass, FindBuildOption, FindPcdsFeatureFlag, \
FindPcdsPatchableInModule, FindPcdsFixedAtBuild, \
FindPcdsDynamic, FindPcdsDynamicEx) = \
(False, False, True, False, False, False, False)
continue
if Line.find('<PcdsPatchableInModule>') != -1:
(FindLibraryClass, FindBuildOption, FindPcdsFeatureFlag, \
FindPcdsPatchableInModule, FindPcdsFixedAtBuild, \
FindPcdsDynamic, FindPcdsDynamicEx) = \
(False, False, False, True, False, False, False)
continue
if Line.find('<PcdsFixedAtBuild>') != -1:
(FindLibraryClass, FindBuildOption, FindPcdsFeatureFlag, \
FindPcdsPatchableInModule, FindPcdsFixedAtBuild, \
FindPcdsDynamic, FindPcdsDynamicEx) = \
(False, False, False, False, True, False, False)
continue
if Line.find('<PcdsDynamic>') != -1:
(FindLibraryClass, FindBuildOption, FindPcdsFeatureFlag, \
FindPcdsPatchableInModule, FindPcdsFixedAtBuild, \
FindPcdsDynamic, FindPcdsDynamicEx) = \
(False, False, False, False, False, True, False)
continue
if Line.find('<PcdsDynamicEx>') != -1:
(FindLibraryClass, FindBuildOption, FindPcdsFeatureFlag, \
FindPcdsPatchableInModule, FindPcdsFixedAtBuild, \
FindPcdsDynamic, FindPcdsDynamicEx) = \
(False, False, False, False, False, False, True)
continue
if Line.endswith('}'):
#
# find '}' at line tail
#
KeyValues.append([ListItem, LibraryClassItem, \
BuildOption, Pcd])
(FindBlock, FindLibraryClass, FindBuildOption, \
FindPcdsFeatureFlag, FindPcdsPatchableInModule, \
FindPcdsFixedAtBuild, FindPcdsDynamic, FindPcdsDynamicEx) = \
(False, False, False, False, False, False, False, False)
LibraryClassItem, BuildOption, Pcd = [], [], []
continue
if FindBlock:
if FindLibraryClass:
LibraryClassItem.append(Line)
elif FindBuildOption:
BuildOption.append(Line)
elif FindPcdsFeatureFlag:
Pcd.append((DataType.TAB_PCDS_FEATURE_FLAG_NULL, Line))
elif FindPcdsPatchableInModule:
Pcd.append((DataType.TAB_PCDS_PATCHABLE_IN_MODULE_NULL, Line))
elif FindPcdsFixedAtBuild:
Pcd.append((DataType.TAB_PCDS_FIXED_AT_BUILD_NULL, Line))
elif FindPcdsDynamic:
Pcd.append((DataType.TAB_PCDS_DYNAMIC_DEFAULT_NULL, Line))
elif FindPcdsDynamicEx:
Pcd.append((DataType.TAB_PCDS_DYNAMIC_EX_DEFAULT_NULL, Line))
else:
KeyValues.append([ListItem, [], [], []])
return True
## GetExec
#
# Parse a string with format "InfFilename [EXEC = ExecFilename]"
# Return (InfFilename, ExecFilename)
#
# @param String: String with EXEC statement
#
def GetExec(String):
InfFilename = ''
ExecFilename = ''
if String.find('EXEC') > -1:
InfFilename = String[ : String.find('EXEC')].strip()
ExecFilename = String[String.find('EXEC') + len('EXEC') : ].strip()
else:
InfFilename = String.strip()
return (InfFilename, ExecFilename)
## GetComponents
#
# Parse block of the components defined in dsc file
# Set KeyValues as [ ['component name', [lib1, lib2, lib3], [bo1, bo2, bo3],
# [pcd1, pcd2, pcd3]], ...]
#
# @param Lines: The content to be parsed
# @param Key: Reserved
# @param KeyValues: To store data after parsing
# @param CommentCharacter: Comment char, used to ignore comment content
#
# @retval True Get component successfully
#
def GetComponents(Lines, KeyValues, CommentCharacter):
if Lines.find(DataType.TAB_SECTION_END) > -1:
Lines = Lines.split(DataType.TAB_SECTION_END, 1)[1]
(FindBlock, FindLibraryClass, FindBuildOption, FindPcdsFeatureFlag, \
FindPcdsPatchableInModule, FindPcdsFixedAtBuild, FindPcdsDynamic, \
FindPcdsDynamicEx) = \
(False, False, False, False, False, False, False, False)
ListItem = None
LibraryClassItem = []
BuildOption = []
Pcd = []
LineList = Lines.split('\n')
for Line in LineList:
Line = CleanString(Line, CommentCharacter)
if Line is None or Line == '':
continue
if FindBlock == False:
ListItem = Line
#
# find '{' at line tail
#
if Line.endswith('{'):
FindBlock = True
ListItem = CleanString(Line.rsplit('{', 1)[0], CommentCharacter)
#
# Parse a block content
#
if FindBlock:
if Line.find('<LibraryClasses>') != -1:
(FindLibraryClass, FindBuildOption, FindPcdsFeatureFlag, \
FindPcdsPatchableInModule, FindPcdsFixedAtBuild, \
FindPcdsDynamic, FindPcdsDynamicEx) = \
(True, False, False, False, False, False, False)
continue
if Line.find('<BuildOptions>') != -1:
(FindLibraryClass, FindBuildOption, FindPcdsFeatureFlag, \
FindPcdsPatchableInModule, FindPcdsFixedAtBuild, \
FindPcdsDynamic, FindPcdsDynamicEx) = \
(False, True, False, False, False, False, False)
continue
if Line.find('<PcdsFeatureFlag>') != -1:
(FindLibraryClass, FindBuildOption, FindPcdsFeatureFlag, \
FindPcdsPatchableInModule, FindPcdsFixedAtBuild, \
FindPcdsDynamic, FindPcdsDynamicEx) = \
(False, False, True, False, False, False, False)
continue
if Line.find('<PcdsPatchableInModule>') != -1:
(FindLibraryClass, FindBuildOption, FindPcdsFeatureFlag, \
FindPcdsPatchableInModule, FindPcdsFixedAtBuild, \
FindPcdsDynamic, FindPcdsDynamicEx) = \
(False, False, False, True, False, False, False)
continue
if Line.find('<PcdsFixedAtBuild>') != -1:
(FindLibraryClass, FindBuildOption, FindPcdsFeatureFlag, \
FindPcdsPatchableInModule, FindPcdsFixedAtBuild, \
FindPcdsDynamic, FindPcdsDynamicEx) = \
(False, False, False, False, True, False, False)
continue
if Line.find('<PcdsDynamic>') != -1:
(FindLibraryClass, FindBuildOption, FindPcdsFeatureFlag, \
FindPcdsPatchableInModule, FindPcdsFixedAtBuild, \
FindPcdsDynamic, FindPcdsDynamicEx) = \
(False, False, False, False, False, True, False)
continue
if Line.find('<PcdsDynamicEx>') != -1:
(FindLibraryClass, FindBuildOption, FindPcdsFeatureFlag, \
FindPcdsPatchableInModule, FindPcdsFixedAtBuild, \
FindPcdsDynamic, FindPcdsDynamicEx) = \
(False, False, False, False, False, False, True)
continue
if Line.endswith('}'):
#
# find '}' at line tail
#
KeyValues.append([ListItem, LibraryClassItem, BuildOption, \
Pcd])
(FindBlock, FindLibraryClass, FindBuildOption, \
FindPcdsFeatureFlag, FindPcdsPatchableInModule, \
FindPcdsFixedAtBuild, FindPcdsDynamic, FindPcdsDynamicEx) = \
(False, False, False, False, False, False, False, False)
LibraryClassItem, BuildOption, Pcd = [], [], []
continue
if FindBlock:
if FindLibraryClass:
LibraryClassItem.append(Line)
elif FindBuildOption:
BuildOption.append(Line)
elif FindPcdsFeatureFlag:
Pcd.append((DataType.TAB_PCDS_FEATURE_FLAG, Line))
elif FindPcdsPatchableInModule:
Pcd.append((DataType.TAB_PCDS_PATCHABLE_IN_MODULE, Line))
elif FindPcdsFixedAtBuild:
Pcd.append((DataType.TAB_PCDS_FIXED_AT_BUILD, Line))
elif FindPcdsDynamic:
Pcd.append((DataType.TAB_PCDS_DYNAMIC, Line))
elif FindPcdsDynamicEx:
Pcd.append((DataType.TAB_PCDS_DYNAMIC_EX, Line))
else:
KeyValues.append([ListItem, [], [], []])
return True
## Get Source
#
# Get Source of Inf as <Filename>[|<Family>[|<TagName>[|<ToolCode>
# [|<PcdFeatureFlag>]]]]
#
# @param Item: String as <Filename>[|<Family>[|<TagName>[|<ToolCode>
# [|<PcdFeatureFlag>]]]]
# @param ContainerFile: The file which describes the library class, used
# for error report
#
def GetSource(Item, ContainerFile, FileRelativePath, LineNo= -1):
ItemNew = Item + DataType.TAB_VALUE_SPLIT * 4
List = GetSplitValueList(ItemNew)
if len(List) < 5 or len(List) > 9:
RaiseParserError(Item, 'Sources', ContainerFile, \
'<Filename>[|<Family>[|<TagName>[|<ToolCode>\
[|<PcdFeatureFlag>]]]]', LineNo)
List[0] = NormPath(List[0])
CheckFileExist(FileRelativePath, List[0], ContainerFile, 'Sources', \
Item, LineNo)
if List[4] != '':
CheckPcdTokenInfo(List[4], 'Sources', ContainerFile, LineNo)
return (List[0], List[1], List[2], List[3], List[4])
## Get Binary
#
# Get Binary of Inf as <Filename>[|<Family>[|<TagName>[|<ToolCode>
# [|<PcdFeatureFlag>]]]]
#
# @param Item: String as <Filename>[|<Family>[|<TagName>
# [|<ToolCode>[|<PcdFeatureFlag>]]]]
# @param ContainerFile: The file which describes the library class,
# used for error report
#
def GetBinary(Item, ContainerFile, LineNo= -1):
ItemNew = Item + DataType.TAB_VALUE_SPLIT
List = GetSplitValueList(ItemNew)
if len(List) < 3 or len(List) > 5:
RaiseParserError(Item, 'Binaries', ContainerFile, \
"<FileType>|<Filename>[|<Target>\
[|<TokenSpaceGuidCName>.<PcdCName>]]", LineNo)
if len(List) >= 4:
if List[3] != '':
CheckPcdTokenInfo(List[3], 'Binaries', ContainerFile, LineNo)
return (List[0], List[1], List[2], List[3])
elif len(List) == 3:
return (List[0], List[1], List[2], '')
## Get Guids/Protocols/Ppis
#
# Get Guids/Protocols/Ppis of Inf as <GuidCName>[|<PcdFeatureFlag>]
#
# @param Item: String as <GuidCName>[|<PcdFeatureFlag>]
# @param Type: Type of parsing string
# @param ContainerFile: The file which describes the library class,
# used for error report
#
def GetGuidsProtocolsPpisOfInf(Item):
ItemNew = Item + DataType.TAB_VALUE_SPLIT
List = GetSplitValueList(ItemNew)
return (List[0], List[1])
## Get Guids/Protocols/Ppis
#
# Get Guids/Protocols/Ppis of Dec as <GuidCName>=<GuidValue>
#
# @param Item: String as <GuidCName>=<GuidValue>
# @param Type: Type of parsing string
# @param ContainerFile: The file which describes the library class,
# used for error report
#
def GetGuidsProtocolsPpisOfDec(Item, Type, ContainerFile, LineNo= -1):
List = GetSplitValueList(Item, DataType.TAB_EQUAL_SPLIT)
if len(List) != 2:
RaiseParserError(Item, Type, ContainerFile, '<CName>=<GuidValue>', \
LineNo)
#
#convert C-Format Guid to Register Format
#
if List[1][0] == '{' and List[1][-1] == '}':
RegisterFormatGuid = GuidStructureStringToGuidString(List[1])
if RegisterFormatGuid == '':
RaiseParserError(Item, Type, ContainerFile, \
'CFormat or RegisterFormat', LineNo)
else:
if CheckGuidRegFormat(List[1]):
RegisterFormatGuid = List[1]
else:
RaiseParserError(Item, Type, ContainerFile, \
'CFormat or RegisterFormat', LineNo)
return (List[0], RegisterFormatGuid)
## GetPackage
#
# Get Package of Inf as <PackagePath>[|<PcdFeatureFlag>]
#
# @param Item: String as <PackagePath>[|<PcdFeatureFlag>]
# @param Type: Type of parsing string
# @param ContainerFile: The file which describes the library class,
# used for error report
#
def GetPackage(Item, ContainerFile, FileRelativePath, LineNo= -1):
ItemNew = Item + DataType.TAB_VALUE_SPLIT
List = GetSplitValueList(ItemNew)
CheckFileType(List[0], '.Dec', ContainerFile, 'package', List[0], LineNo)
CheckFileExist(FileRelativePath, List[0], ContainerFile, 'Packages', \
List[0], LineNo)
if List[1] != '':
CheckPcdTokenInfo(List[1], 'Packages', ContainerFile, LineNo)
return (List[0], List[1])
## Get Pcd Values of Inf
#
# Get Pcd of Inf as <TokenSpaceGuidCName>.<PcdCName>[|<Value>]
#
# @param Item: The string describes pcd
# @param Type: The type of Pcd
# @param File: The file which describes the pcd, used for error report
#
def GetPcdOfInf(Item, Type, File, LineNo):
Format = '<TokenSpaceGuidCName>.<PcdCName>[|<Value>]'
TokenGuid, TokenName, Value, InfType = '', '', '', ''
if Type == DataType.TAB_PCDS_FIXED_AT_BUILD:
InfType = DataType.TAB_INF_FIXED_PCD
elif Type == DataType.TAB_PCDS_PATCHABLE_IN_MODULE:
InfType = DataType.TAB_INF_PATCH_PCD
elif Type == DataType.TAB_PCDS_FEATURE_FLAG:
InfType = DataType.TAB_INF_FEATURE_PCD
elif Type == DataType.TAB_PCDS_DYNAMIC_EX:
InfType = DataType.TAB_INF_PCD_EX
elif Type == DataType.TAB_PCDS_DYNAMIC:
InfType = DataType.TAB_INF_PCD
List = GetSplitValueList(Item, DataType.TAB_VALUE_SPLIT, 1)
TokenInfo = GetSplitValueList(List[0], DataType.TAB_SPLIT)
if len(TokenInfo) != 2:
RaiseParserError(Item, InfType, File, Format, LineNo)
else:
TokenGuid = TokenInfo[0]
TokenName = TokenInfo[1]
if len(List) > 1:
Value = List[1]
else:
Value = None
return (TokenGuid, TokenName, Value, InfType)
## Get Pcd Values of Dec
#
# Get Pcd of Dec as <TokenSpcCName>.<TokenCName>|<Value>|<DatumType>|<Token>
# @param Item: Pcd item
# @param Type: Pcd type
# @param File: Dec file
# @param LineNo: Line number
#
def GetPcdOfDec(Item, Type, File, LineNo= -1):
Format = '<TokenSpaceGuidCName>.<PcdCName>|<Value>|<DatumType>|<Token>'
TokenGuid, TokenName, Value, DatumType, Token = '', '', '', '', ''
List = GetSplitValueList(Item)
if len(List) != 4:
RaiseParserError(Item, 'Pcds' + Type, File, Format, LineNo)
else:
Value = List[1]
DatumType = List[2]
Token = List[3]
TokenInfo = GetSplitValueList(List[0], DataType.TAB_SPLIT)
if len(TokenInfo) != 2:
RaiseParserError(Item, 'Pcds' + Type, File, Format, LineNo)
else:
TokenGuid = TokenInfo[0]
TokenName = TokenInfo[1]
return (TokenGuid, TokenName, Value, DatumType, Token, Type)
## Parse DEFINE statement
#
# Get DEFINE macros
#
# @param LineValue: A DEFINE line value
# @param StartLine: A DEFINE start line
# @param Table: A table
# @param FileID: File ID
# @param Filename: File name
# @param SectionName: DEFINE section name
# @param SectionModel: DEFINE section model
# @param Arch: DEFINE arch
#
def ParseDefine(LineValue, StartLine, Table, FileID, SectionName, \
SectionModel, Arch):
Logger.Debug(Logger.DEBUG_2, ST.MSG_DEFINE_STATEMENT_FOUND % (LineValue, \
SectionName))
Define = \
GetSplitValueList(CleanString\
(LineValue[LineValue.upper().\
find(DataType.TAB_DEFINE.upper() + ' ') + \
len(DataType.TAB_DEFINE + ' ') : ]), \
DataType.TAB_EQUAL_SPLIT, 1)
Table.Insert(DataType.MODEL_META_DATA_DEFINE, Define[0], Define[1], '', \
'', '', Arch, SectionModel, FileID, StartLine, -1, \
StartLine, -1, 0)
## InsertSectionItems
#
# Insert item data of a section to a dict
#
# @param Model: A model
# @param CurrentSection: Current section
# @param SectionItemList: Section item list
# @param ArchList: Arch list
# @param ThirdList: Third list
# @param RecordSet: Record set
#
def InsertSectionItems(Model, SectionItemList, ArchList, \
ThirdList, RecordSet):
#
# Insert each item data of a section
#
for Index in range(0, len(ArchList)):
Arch = ArchList[Index]
Third = ThirdList[Index]
if Arch == '':
Arch = DataType.TAB_ARCH_COMMON
Records = RecordSet[Model]
for SectionItem in SectionItemList:
LineValue, StartLine, Comment = SectionItem[0], \
SectionItem[1], SectionItem[2]
Logger.Debug(4, ST.MSG_PARSING % LineValue)
#
# And then parse DEFINE statement
#
if LineValue.upper().find(DataType.TAB_DEFINE.upper() + ' ') > -1:
continue
#
# At last parse other sections
#
IdNum = -1
Records.append([LineValue, Arch, StartLine, IdNum, Third, Comment])
if RecordSet != {}:
RecordSet[Model] = Records
## GenMetaDatSectionItem
#
# @param Key: A key
# @param Value: A value
# @param List: A list
#
def GenMetaDatSectionItem(Key, Value, List):
if Key not in List:
List[Key] = [Value]
else:
List[Key].append(Value)
## GetPkgInfoFromDec
#
# get package name, guid, version info from dec files
#
# @param Path: File path
#
def GetPkgInfoFromDec(Path):
PkgName = None
PkgGuid = None
PkgVersion = None
Path = Path.replace('\\', '/')
if not os.path.exists(Path):
Logger.Error("\nUPT", FILE_NOT_FOUND, File=Path)
if Path in gPKG_INFO_DICT:
return gPKG_INFO_DICT[Path]
try:
DecParser = None
if Path not in GlobalData.gPackageDict:
DecParser = Dec(Path)
GlobalData.gPackageDict[Path] = DecParser
else:
DecParser = GlobalData.gPackageDict[Path]
PkgName = DecParser.GetPackageName()
PkgGuid = DecParser.GetPackageGuid()
PkgVersion = DecParser.GetPackageVersion()
gPKG_INFO_DICT[Path] = (PkgName, PkgGuid, PkgVersion)
return PkgName, PkgGuid, PkgVersion
except FatalError:
return None, None, None
## GetWorkspacePackage
#
# Get a list of workspace package information.
#
def GetWorkspacePackage():
DecFileList = []
WorkspaceDir = GlobalData.gWORKSPACE
PackageDir = GlobalData.gPACKAGE_PATH
for PkgRoot in [WorkspaceDir] + PackageDir:
for Root, Dirs, Files in os.walk(PkgRoot):
if 'CVS' in Dirs:
Dirs.remove('CVS')
if '.svn' in Dirs:
Dirs.remove('.svn')
for Dir in Dirs:
if Dir.startswith('.'):
Dirs.remove(Dir)
for FileSp in Files:
if FileSp.startswith('.'):
continue
Ext = os.path.splitext(FileSp)[1]
if Ext.lower() in ['.dec']:
DecFileList.append\
(os.path.normpath(os.path.join(Root, FileSp)))
#
# abstract package guid, version info from DecFile List
#
PkgList = []
for DecFile in DecFileList:
(PkgName, PkgGuid, PkgVersion) = GetPkgInfoFromDec(DecFile)
if PkgName and PkgGuid and PkgVersion:
PkgList.append((PkgName, PkgGuid, PkgVersion, DecFile))
return PkgList
## GetWorkspaceModule
#
# Get a list of workspace modules.
#
def GetWorkspaceModule():
InfFileList = []
WorkspaceDir = GlobalData.gWORKSPACE
for Root, Dirs, Files in os.walk(WorkspaceDir):
if 'CVS' in Dirs:
Dirs.remove('CVS')
if '.svn' in Dirs:
Dirs.remove('.svn')
if 'Build' in Dirs:
Dirs.remove('Build')
for Dir in Dirs:
if Dir.startswith('.'):
Dirs.remove(Dir)
for FileSp in Files:
if FileSp.startswith('.'):
continue
Ext = os.path.splitext(FileSp)[1]
if Ext.lower() in ['.inf']:
InfFileList.append\
(os.path.normpath(os.path.join(Root, FileSp)))
return InfFileList
## MacroParser used to parse macro definition
#
# @param Line: The content contain linestring and line number
# @param FileName: The meta-file file name
# @param SectionType: Section for the Line belong to
# @param FileLocalMacros: A list contain Macro defined in [Defines] section.
#
def MacroParser(Line, FileName, SectionType, FileLocalMacros):
MacroDefPattern = re.compile("^(DEFINE)[ \t]+")
LineContent = Line[0]
LineNo = Line[1]
Match = MacroDefPattern.match(LineContent)
if not Match:
#
# Not 'DEFINE/EDK_GLOBAL' statement, call decorated method
#
return None, None
TokenList = GetSplitValueList(LineContent[Match.end(1):], \
DataType.TAB_EQUAL_SPLIT, 1)
#
# Syntax check
#
if not TokenList[0]:
Logger.Error('Parser', FORMAT_INVALID, ST.ERR_MACRONAME_NOGIVEN,
ExtraData=LineContent, File=FileName, Line=LineNo)
if len(TokenList) < 2:
Logger.Error('Parser', FORMAT_INVALID, ST.ERR_MACROVALUE_NOGIVEN,
ExtraData=LineContent, File=FileName, Line=LineNo)
Name, Value = TokenList
#
# DEFINE defined macros
#
if SectionType == DataType.MODEL_META_DATA_HEADER:
FileLocalMacros[Name] = Value
ReIsValidMacroName = re.compile(r"^[A-Z][A-Z0-9_]*$", re.DOTALL)
if ReIsValidMacroName.match(Name) is None:
Logger.Error('Parser',
FORMAT_INVALID,
ST.ERR_MACRONAME_INVALID % (Name),
ExtraData=LineContent,
File=FileName,
Line=LineNo)
# Validate MACRO Value
#
# <MacroDefinition> ::= [<Comments>]{0,}
# "DEFINE" <MACRO> "=" [{<PATH>} {<VALUE>}] <EOL>
# <Value> ::= {<NumVal>} {<Boolean>} {<AsciiString>} {<GUID>}
# {<CString>} {<UnicodeString>} {<CArray>}
#
# The definition of <NumVal>, <PATH>, <Boolean>, <GUID>, <CString>,
# <UnicodeString>, <CArray> are subset of <AsciiString>.
#
ReIsValidMacroValue = re.compile(r"^[\x20-\x7e]*$", re.DOTALL)
if ReIsValidMacroValue.match(Value) is None:
Logger.Error('Parser',
FORMAT_INVALID,
ST.ERR_MACROVALUE_INVALID % (Value),
ExtraData=LineContent,
File=FileName,
Line=LineNo)
return Name, Value
## GenSection
#
# generate section contents
#
# @param SectionName: indicate the name of the section, details refer to
# INF, DEC specs
# @param SectionDict: section statement dict, key is SectionAttrs(arch,
# moduletype or platform may exist as needed) list
# separated by space,
# value is statement
#
def GenSection(SectionName, SectionDict, SplitArch=True, NeedBlankLine=False):
Content = ''
for SectionAttrs in SectionDict:
StatementList = SectionDict[SectionAttrs]
if SectionAttrs and SectionName != 'Defines' and SectionAttrs.strip().upper() != DataType.TAB_ARCH_COMMON:
if SplitArch:
ArchList = GetSplitValueList(SectionAttrs, DataType.TAB_SPACE_SPLIT)
else:
if SectionName != 'UserExtensions':
ArchList = GetSplitValueList(SectionAttrs, DataType.TAB_COMMENT_SPLIT)
else:
ArchList = [SectionAttrs]
for Index in range(0, len(ArchList)):
ArchList[Index] = ConvertArchForInstall(ArchList[Index])
Section = '[' + SectionName + '.' + (', ' + SectionName + '.').join(ArchList) + ']'
else:
Section = '[' + SectionName + ']'
Content += '\n' + Section + '\n'
if StatementList is not None:
for Statement in StatementList:
LineList = Statement.split('\n')
NewStatement = ""
for Line in LineList:
# ignore blank comment
if not Line.replace("#", '').strip() and SectionName not in ('Defines', 'Hob', 'Event', 'BootMode'):
continue
# add two space before non-comments line except the comments in Defines section
if Line.strip().startswith('#') and SectionName == 'Defines':
NewStatement += "%s\n" % Line
continue
NewStatement += " %s\n" % Line
if NeedBlankLine:
Content += NewStatement + '\n'
else:
Content += NewStatement
if NeedBlankLine:
Content = Content[:-1]
if not Content.replace('\\n', '').strip():
return ''
return Content
## ConvertArchForInstall
# if Arch.upper() is in "IA32", "X64", "IPF", and "EBC", it must be upper case. "common" must be lower case.
# Anything else, the case must be preserved
#
# @param Arch: the arch string that need to be converted, it should be stripped before pass in
# @return: the arch string that get converted
#
def ConvertArchForInstall(Arch):
if Arch.upper() in [DataType.TAB_ARCH_IA32, DataType.TAB_ARCH_X64,
DataType.TAB_ARCH_IPF, DataType.TAB_ARCH_EBC]:
Arch = Arch.upper()
elif Arch.upper() == DataType.TAB_ARCH_COMMON:
Arch = Arch.lower()
return Arch
| edk2-master | BaseTools/Source/Python/UPT/Library/Parsing.py |
## @file
# This file is used to define common string related functions used in parsing
# process
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
'''
StringUtils
'''
##
# Import Modules
#
import re
import os.path
import Logger.Log as Logger
import Library.DataType as DataType
from Logger.ToolError import FORMAT_INVALID
from Logger.ToolError import PARSER_ERROR
from Logger import StringTable as ST
#
# Regular expression for matching macro used in DSC/DEC/INF file inclusion
#
gMACRO_PATTERN = re.compile("\$\(([_A-Z][_A-Z0-9]*)\)", re.UNICODE)
## GetSplitValueList
#
# Get a value list from a string with multiple values split with SplitTag
# The default SplitTag is DataType.TAB_VALUE_SPLIT
# 'AAA|BBB|CCC' -> ['AAA', 'BBB', 'CCC']
#
# @param String: The input string to be splitted
# @param SplitTag: The split key, default is DataType.TAB_VALUE_SPLIT
# @param MaxSplit: The max number of split values, default is -1
#
#
def GetSplitValueList(String, SplitTag=DataType.TAB_VALUE_SPLIT, MaxSplit= -1):
return list(map(lambda l: l.strip(), String.split(SplitTag, MaxSplit)))
## MergeArches
#
# Find a key's all arches in dict, add the new arch to the list
# If not exist any arch, set the arch directly
#
# @param Dict: The input value for Dict
# @param Key: The input value for Key
# @param Arch: The Arch to be added or merged
#
def MergeArches(Dict, Key, Arch):
if Key in Dict.keys():
Dict[Key].append(Arch)
else:
Dict[Key] = Arch.split()
## GenDefines
#
# Parse a string with format "DEFINE <VarName> = <PATH>"
# Generate a map Defines[VarName] = PATH
# Return False if invalid format
#
# @param String: String with DEFINE statement
# @param Arch: Supported Arch
# @param Defines: DEFINE statement to be parsed
#
def GenDefines(String, Arch, Defines):
if String.find(DataType.TAB_DEFINE + ' ') > -1:
List = String.replace(DataType.TAB_DEFINE + ' ', '').\
split(DataType.TAB_EQUAL_SPLIT)
if len(List) == 2:
Defines[(CleanString(List[0]), Arch)] = CleanString(List[1])
return 0
else:
return -1
return 1
## GetLibraryClassesWithModuleType
#
# Get Library Class definition when no module type defined
#
# @param Lines: The content to be parsed
# @param Key: Reserved
# @param KeyValues: To store data after parsing
# @param CommentCharacter: Comment char, used to ignore comment content
#
def GetLibraryClassesWithModuleType(Lines, Key, KeyValues, CommentCharacter):
NewKey = SplitModuleType(Key)
Lines = Lines.split(DataType.TAB_SECTION_END, 1)[1]
LineList = Lines.splitlines()
for Line in LineList:
Line = CleanString(Line, CommentCharacter)
if Line != '' and Line[0] != CommentCharacter:
KeyValues.append([CleanString(Line, CommentCharacter), NewKey[1]])
return True
## GetDynamics
#
# Get Dynamic Pcds
#
# @param Lines: The content to be parsed
# @param Key: Reserved
# @param KeyValues: To store data after parsing
# @param CommentCharacter: Comment char, used to ignore comment content
#
def GetDynamics(Lines, Key, KeyValues, CommentCharacter):
#
# Get SkuId Name List
#
SkuIdNameList = SplitModuleType(Key)
Lines = Lines.split(DataType.TAB_SECTION_END, 1)[1]
LineList = Lines.splitlines()
for Line in LineList:
Line = CleanString(Line, CommentCharacter)
if Line != '' and Line[0] != CommentCharacter:
KeyValues.append([CleanString(Line, CommentCharacter), SkuIdNameList[1]])
return True
## SplitModuleType
#
# Split ModuleType out of section defien to get key
# [LibraryClass.Arch.ModuleType|ModuleType|ModuleType] -> [
# 'LibraryClass.Arch', ['ModuleType', 'ModuleType', 'ModuleType'] ]
#
# @param Key: String to be parsed
#
def SplitModuleType(Key):
KeyList = Key.split(DataType.TAB_SPLIT)
#
# Fill in for arch
#
KeyList.append('')
#
# Fill in for moduletype
#
KeyList.append('')
ReturnValue = []
KeyValue = KeyList[0]
if KeyList[1] != '':
KeyValue = KeyValue + DataType.TAB_SPLIT + KeyList[1]
ReturnValue.append(KeyValue)
ReturnValue.append(GetSplitValueList(KeyList[2]))
return ReturnValue
## Replace macro in string
#
# This method replace macros used in given string. The macros are given in a
# dictionary.
#
# @param String String to be processed
# @param MacroDefinitions The macro definitions in the form of dictionary
# @param SelfReplacement To decide whether replace un-defined macro to ''
# @param Line: The content contain line string and line number
# @param FileName: The meta-file file name
#
def ReplaceMacro(String, MacroDefinitions=None, SelfReplacement=False, Line=None, FileName=None, Flag=False):
LastString = String
if MacroDefinitions is None:
MacroDefinitions = {}
while MacroDefinitions:
QuotedStringList = []
HaveQuotedMacroFlag = False
if not Flag:
MacroUsed = gMACRO_PATTERN.findall(String)
else:
ReQuotedString = re.compile('\"')
QuotedStringList = ReQuotedString.split(String)
if len(QuotedStringList) >= 3:
HaveQuotedMacroFlag = True
Count = 0
MacroString = ""
for QuotedStringItem in QuotedStringList:
Count += 1
if Count % 2 != 0:
MacroString += QuotedStringItem
if Count == len(QuotedStringList) and Count % 2 == 0:
MacroString += QuotedStringItem
MacroUsed = gMACRO_PATTERN.findall(MacroString)
#
# no macro found in String, stop replacing
#
if len(MacroUsed) == 0:
break
for Macro in MacroUsed:
if Macro not in MacroDefinitions:
if SelfReplacement:
String = String.replace("$(%s)" % Macro, '')
Logger.Debug(5, "Delete undefined MACROs in file %s line %d: %s!" % (FileName, Line[1], Line[0]))
continue
if not HaveQuotedMacroFlag:
String = String.replace("$(%s)" % Macro, MacroDefinitions[Macro])
else:
Count = 0
for QuotedStringItem in QuotedStringList:
Count += 1
if Count % 2 != 0:
QuotedStringList[Count - 1] = QuotedStringList[Count - 1].replace("$(%s)" % Macro,
MacroDefinitions[Macro])
elif Count == len(QuotedStringList) and Count % 2 == 0:
QuotedStringList[Count - 1] = QuotedStringList[Count - 1].replace("$(%s)" % Macro,
MacroDefinitions[Macro])
RetString = ''
if HaveQuotedMacroFlag:
Count = 0
for QuotedStringItem in QuotedStringList:
Count += 1
if Count != len(QuotedStringList):
RetString += QuotedStringList[Count - 1] + "\""
else:
RetString += QuotedStringList[Count - 1]
String = RetString
#
# in case there's macro not defined
#
if String == LastString:
break
LastString = String
return String
## NormPath
#
# Create a normal path
# And replace DEFINE in the path
#
# @param Path: The input value for Path to be converted
# @param Defines: A set for DEFINE statement
#
def NormPath(Path, Defines=None):
IsRelativePath = False
if Defines is None:
Defines = {}
if Path:
if Path[0] == '.':
IsRelativePath = True
#
# Replace with Define
#
if Defines:
Path = ReplaceMacro(Path, Defines)
#
# To local path format
#
Path = os.path.normpath(Path)
if IsRelativePath and Path[0] != '.':
Path = os.path.join('.', Path)
return Path
## CleanString
#
# Remove comments in a string
# Remove spaces
#
# @param Line: The string to be cleaned
# @param CommentCharacter: Comment char, used to ignore comment content,
# default is DataType.TAB_COMMENT_SPLIT
#
def CleanString(Line, CommentCharacter=DataType.TAB_COMMENT_SPLIT, AllowCppStyleComment=False):
#
# remove whitespace
#
Line = Line.strip()
#
# Replace EDK1's comment character
#
if AllowCppStyleComment:
Line = Line.replace(DataType.TAB_COMMENT_EDK1_SPLIT, CommentCharacter)
#
# remove comments, but we should escape comment character in string
#
InString = False
for Index in range(0, len(Line)):
if Line[Index] == '"':
InString = not InString
elif Line[Index] == CommentCharacter and not InString:
Line = Line[0: Index]
break
#
# remove whitespace again
#
Line = Line.strip()
return Line
## CleanString2
#
# Split comments in a string
# Remove spaces
#
# @param Line: The string to be cleaned
# @param CommentCharacter: Comment char, used to ignore comment content,
# default is DataType.TAB_COMMENT_SPLIT
#
def CleanString2(Line, CommentCharacter=DataType.TAB_COMMENT_SPLIT, AllowCppStyleComment=False):
#
# remove whitespace
#
Line = Line.strip()
#
# Replace EDK1's comment character
#
if AllowCppStyleComment:
Line = Line.replace(DataType.TAB_COMMENT_EDK1_SPLIT, CommentCharacter)
#
# separate comments and statements
#
LineParts = Line.split(CommentCharacter, 1)
#
# remove whitespace again
#
Line = LineParts[0].strip()
if len(LineParts) > 1:
Comment = LineParts[1].strip()
#
# Remove prefixed and trailing comment characters
#
Start = 0
End = len(Comment)
while Start < End and Comment.startswith(CommentCharacter, Start, End):
Start += 1
while End >= 0 and Comment.endswith(CommentCharacter, Start, End):
End -= 1
Comment = Comment[Start:End]
Comment = Comment.strip()
else:
Comment = ''
return Line, Comment
## GetMultipleValuesOfKeyFromLines
#
# Parse multiple strings to clean comment and spaces
# The result is saved to KeyValues
#
# @param Lines: The content to be parsed
# @param Key: Reserved
# @param KeyValues: To store data after parsing
# @param CommentCharacter: Comment char, used to ignore comment content
#
def GetMultipleValuesOfKeyFromLines(Lines, Key, KeyValues, CommentCharacter):
if Key:
pass
if KeyValues:
pass
Lines = Lines.split(DataType.TAB_SECTION_END, 1)[1]
LineList = Lines.split('\n')
for Line in LineList:
Line = CleanString(Line, CommentCharacter)
if Line != '' and Line[0] != CommentCharacter:
KeyValues += [Line]
return True
## GetDefineValue
#
# Parse a DEFINE statement to get defined value
# DEFINE Key Value
#
# @param String: The content to be parsed
# @param Key: The key of DEFINE statement
# @param CommentCharacter: Comment char, used to ignore comment content
#
def GetDefineValue(String, Key, CommentCharacter):
if CommentCharacter:
pass
String = CleanString(String)
return String[String.find(Key + ' ') + len(Key + ' ') : ]
## GetSingleValueOfKeyFromLines
#
# Parse multiple strings as below to get value of each definition line
# Key1 = Value1
# Key2 = Value2
# The result is saved to Dictionary
#
# @param Lines: The content to be parsed
# @param Dictionary: To store data after parsing
# @param CommentCharacter: Comment char, be used to ignore comment content
# @param KeySplitCharacter: Key split char, between key name and key value.
# Key1 = Value1, '=' is the key split char
# @param ValueSplitFlag: Value split flag, be used to decide if has
# multiple values
# @param ValueSplitCharacter: Value split char, be used to split multiple
# values. Key1 = Value1|Value2, '|' is the value
# split char
#
def GetSingleValueOfKeyFromLines(Lines, Dictionary, CommentCharacter, KeySplitCharacter, \
ValueSplitFlag, ValueSplitCharacter):
Lines = Lines.split('\n')
Keys = []
Value = ''
DefineValues = ['']
SpecValues = ['']
for Line in Lines:
#
# Handle DEFINE and SPEC
#
if Line.find(DataType.TAB_INF_DEFINES_DEFINE + ' ') > -1:
if '' in DefineValues:
DefineValues.remove('')
DefineValues.append(GetDefineValue(Line, DataType.TAB_INF_DEFINES_DEFINE, CommentCharacter))
continue
if Line.find(DataType.TAB_INF_DEFINES_SPEC + ' ') > -1:
if '' in SpecValues:
SpecValues.remove('')
SpecValues.append(GetDefineValue(Line, DataType.TAB_INF_DEFINES_SPEC, CommentCharacter))
continue
#
# Handle Others
#
LineList = Line.split(KeySplitCharacter, 1)
if len(LineList) >= 2:
Key = LineList[0].split()
if len(Key) == 1 and Key[0][0] != CommentCharacter:
#
# Remove comments and white spaces
#
LineList[1] = CleanString(LineList[1], CommentCharacter)
if ValueSplitFlag:
Value = list(map(lambda x: x.strip(), LineList[1].split(ValueSplitCharacter)))
else:
Value = CleanString(LineList[1], CommentCharacter).splitlines()
if Key[0] in Dictionary:
if Key[0] not in Keys:
Dictionary[Key[0]] = Value
Keys.append(Key[0])
else:
Dictionary[Key[0]].extend(Value)
else:
Dictionary[DataType.TAB_INF_DEFINES_MACRO][Key[0]] = Value[0]
if DefineValues == []:
DefineValues = ['']
if SpecValues == []:
SpecValues = ['']
Dictionary[DataType.TAB_INF_DEFINES_DEFINE] = DefineValues
Dictionary[DataType.TAB_INF_DEFINES_SPEC] = SpecValues
return True
## The content to be parsed
#
# Do pre-check for a file before it is parsed
# Check $()
# Check []
#
# @param FileName: Used for error report
# @param FileContent: File content to be parsed
# @param SupSectionTag: Used for error report
#
def PreCheck(FileName, FileContent, SupSectionTag):
if SupSectionTag:
pass
LineNo = 0
IsFailed = False
NewFileContent = ''
for Line in FileContent.splitlines():
LineNo = LineNo + 1
#
# Clean current line
#
Line = CleanString(Line)
#
# Remove commented line
#
if Line.find(DataType.TAB_COMMA_SPLIT) == 0:
Line = ''
#
# Check $()
#
if Line.find('$') > -1:
if Line.find('$(') < 0 or Line.find(')') < 0:
Logger.Error("Parser", FORMAT_INVALID, Line=LineNo, File=FileName, RaiseError=Logger.IS_RAISE_ERROR)
#
# Check []
#
if Line.find('[') > -1 or Line.find(']') > -1:
#
# Only get one '[' or one ']'
#
if not (Line.find('[') > -1 and Line.find(']') > -1):
Logger.Error("Parser", FORMAT_INVALID, Line=LineNo, File=FileName, RaiseError=Logger.IS_RAISE_ERROR)
#
# Regenerate FileContent
#
NewFileContent = NewFileContent + Line + '\r\n'
if IsFailed:
Logger.Error("Parser", FORMAT_INVALID, Line=LineNo, File=FileName, RaiseError=Logger.IS_RAISE_ERROR)
return NewFileContent
## CheckFileType
#
# Check if the Filename is including ExtName
# Return True if it exists
# Raise a error message if it not exists
#
# @param CheckFilename: Name of the file to be checked
# @param ExtName: Ext name of the file to be checked
# @param ContainerFilename: The container file which describes the file to be
# checked, used for error report
# @param SectionName: Used for error report
# @param Line: The line in container file which defines the file
# to be checked
#
def CheckFileType(CheckFilename, ExtName, ContainerFilename, SectionName, Line, LineNo= -1):
if CheckFilename != '' and CheckFilename is not None:
(Root, Ext) = os.path.splitext(CheckFilename)
if Ext.upper() != ExtName.upper() and Root:
ContainerFile = open(ContainerFilename, 'r').read()
if LineNo == -1:
LineNo = GetLineNo(ContainerFile, Line)
ErrorMsg = ST.ERR_SECTIONNAME_INVALID % (SectionName, CheckFilename, ExtName)
Logger.Error("Parser", PARSER_ERROR, ErrorMsg, Line=LineNo, \
File=ContainerFilename, RaiseError=Logger.IS_RAISE_ERROR)
return True
## CheckFileExist
#
# Check if the file exists
# Return True if it exists
# Raise a error message if it not exists
#
# @param CheckFilename: Name of the file to be checked
# @param WorkspaceDir: Current workspace dir
# @param ContainerFilename: The container file which describes the file to
# be checked, used for error report
# @param SectionName: Used for error report
# @param Line: The line in container file which defines the
# file to be checked
#
def CheckFileExist(WorkspaceDir, CheckFilename, ContainerFilename, SectionName, Line, LineNo= -1):
CheckFile = ''
if CheckFilename != '' and CheckFilename is not None:
CheckFile = WorkspaceFile(WorkspaceDir, CheckFilename)
if not os.path.isfile(CheckFile):
ContainerFile = open(ContainerFilename, 'r').read()
if LineNo == -1:
LineNo = GetLineNo(ContainerFile, Line)
ErrorMsg = ST.ERR_CHECKFILE_NOTFOUND % (CheckFile, SectionName)
Logger.Error("Parser", PARSER_ERROR, ErrorMsg,
File=ContainerFilename, Line=LineNo, RaiseError=Logger.IS_RAISE_ERROR)
return CheckFile
## GetLineNo
#
# Find the index of a line in a file
#
# @param FileContent: Search scope
# @param Line: Search key
#
def GetLineNo(FileContent, Line, IsIgnoreComment=True):
LineList = FileContent.splitlines()
for Index in range(len(LineList)):
if LineList[Index].find(Line) > -1:
#
# Ignore statement in comment
#
if IsIgnoreComment:
if LineList[Index].strip()[0] == DataType.TAB_COMMENT_SPLIT:
continue
return Index + 1
return -1
## RaiseParserError
#
# Raise a parser error
#
# @param Line: String which has error
# @param Section: Used for error report
# @param File: File which has the string
# @param Format: Correct format
#
def RaiseParserError(Line, Section, File, Format='', LineNo= -1):
if LineNo == -1:
LineNo = GetLineNo(open(os.path.normpath(File), 'r').read(), Line)
ErrorMsg = ST.ERR_INVALID_NOTFOUND % (Line, Section)
if Format != '':
Format = "Correct format is " + Format
Logger.Error("Parser", PARSER_ERROR, ErrorMsg, File=File, Line=LineNo, \
ExtraData=Format, RaiseError=Logger.IS_RAISE_ERROR)
## WorkspaceFile
#
# Return a full path with workspace dir
#
# @param WorkspaceDir: Workspace dir
# @param Filename: Relative file name
#
def WorkspaceFile(WorkspaceDir, Filename):
return os.path.join(NormPath(WorkspaceDir), NormPath(Filename))
## Split string
#
# Remove '"' which startswith and endswith string
#
# @param String: The string need to be split
#
def SplitString(String):
if String.startswith('\"'):
String = String[1:]
if String.endswith('\"'):
String = String[:-1]
return String
## Convert To Sql String
#
# Replace "'" with "''" in each item of StringList
#
# @param StringList: A list for strings to be converted
#
def ConvertToSqlString(StringList):
return list(map(lambda s: s.replace("'", "''"), StringList))
## Convert To Sql String
#
# Replace "'" with "''" in the String
#
# @param String: A String to be converted
#
def ConvertToSqlString2(String):
return String.replace("'", "''")
## GetStringOfList
#
# Get String of a List
#
# @param Lines: string list
# @param Split: split character
#
def GetStringOfList(List, Split=' '):
if not isinstance(List, type([])):
return List
Str = ''
for Item in List:
Str = Str + Item + Split
return Str.strip()
## Get HelpTextList
#
# Get HelpTextList from HelpTextClassList
#
# @param HelpTextClassList: Help Text Class List
#
def GetHelpTextList(HelpTextClassList):
List = []
if HelpTextClassList:
for HelpText in HelpTextClassList:
if HelpText.String.endswith('\n'):
HelpText.String = HelpText.String[0: len(HelpText.String) - len('\n')]
List.extend(HelpText.String.split('\n'))
return List
## Get String Array Length
#
# Get String Array Length
#
# @param String: the source string
#
def StringArrayLength(String):
if String.startswith('L"'):
return (len(String) - 3 + 1) * 2
elif String.startswith('"'):
return (len(String) - 2 + 1)
else:
return len(String.split()) + 1
## RemoveDupOption
#
# Remove Dup Option
#
# @param OptionString: the option string
# @param Which: Which flag
# @param Against: Against flag
#
def RemoveDupOption(OptionString, Which="/I", Against=None):
OptionList = OptionString.split()
ValueList = []
if Against:
ValueList += Against
for Index in range(len(OptionList)):
Opt = OptionList[Index]
if not Opt.startswith(Which):
continue
if len(Opt) > len(Which):
Val = Opt[len(Which):]
else:
Val = ""
if Val in ValueList:
OptionList[Index] = ""
else:
ValueList.append(Val)
return " ".join(OptionList)
## Check if the string is HexDgit
#
# Return true if all characters in the string are digits and there is at
# least one character
# or valid Hexs (started with 0x, following by hexdigit letters)
# , false otherwise.
# @param string: input string
#
def IsHexDigit(Str):
try:
int(Str, 10)
return True
except ValueError:
if len(Str) > 2 and Str.upper().startswith('0X'):
try:
int(Str, 16)
return True
except ValueError:
return False
return False
## Check if the string is HexDgit and its integer value within limit of UINT32
#
# Return true if all characters in the string are digits and there is at
# least one character
# or valid Hexs (started with 0x, following by hexdigit letters)
# , false otherwise.
# @param string: input string
#
def IsHexDigitUINT32(Str):
try:
Value = int(Str, 10)
if (Value <= 0xFFFFFFFF) and (Value >= 0):
return True
except ValueError:
if len(Str) > 2 and Str.upper().startswith('0X'):
try:
Value = int(Str, 16)
if (Value <= 0xFFFFFFFF) and (Value >= 0):
return True
except ValueError:
return False
return False
## CleanSpecialChar
#
# The ASCII text files of type INF, DEC, INI are edited by developers,
# and may contain characters that cannot be directly translated to strings that
# are conformant with the UDP XML Schema. Any characters in this category
# (0x00-0x08, TAB [0x09], 0x0B, 0x0C, 0x0E-0x1F, 0x80-0xFF)
# must be converted to a space character[0x20] as part of the parsing process.
#
def ConvertSpecialChar(Lines):
RetLines = []
for line in Lines:
ReMatchSpecialChar = re.compile(r"[\x00-\x08]|\x09|\x0b|\x0c|[\x0e-\x1f]|[\x7f-\xff]")
RetLines.append(ReMatchSpecialChar.sub(' ', line))
return RetLines
## __GetTokenList
#
# Assume Str is a valid feature flag expression.
# Return a list which contains tokens: alpha numeric token and other token
# Whitespace are not stripped
#
def __GetTokenList(Str):
InQuote = False
Token = ''
TokenOP = ''
PreChar = ''
List = []
for Char in Str:
if InQuote:
Token += Char
if Char == '"' and PreChar != '\\':
InQuote = not InQuote
List.append(Token)
Token = ''
continue
if Char == '"':
if Token and Token != 'L':
List.append(Token)
Token = ''
if TokenOP:
List.append(TokenOP)
TokenOP = ''
InQuote = not InQuote
Token += Char
continue
if not (Char.isalnum() or Char in '_'):
TokenOP += Char
if Token:
List.append(Token)
Token = ''
else:
Token += Char
if TokenOP:
List.append(TokenOP)
TokenOP = ''
if PreChar == '\\' and Char == '\\':
PreChar = ''
else:
PreChar = Char
if Token:
List.append(Token)
if TokenOP:
List.append(TokenOP)
return List
## ConvertNEToNOTEQ
#
# Convert NE operator to NOT EQ
# For example: 1 NE 2 -> 1 NOT EQ 2
#
# @param Expr: Feature flag expression to be converted
#
def ConvertNEToNOTEQ(Expr):
List = __GetTokenList(Expr)
for Index in range(len(List)):
if List[Index] == 'NE':
List[Index] = 'NOT EQ'
return ''.join(List)
## ConvertNOTEQToNE
#
# Convert NOT EQ operator to NE
# For example: 1 NOT NE 2 -> 1 NE 2
#
# @param Expr: Feature flag expression to be converted
#
def ConvertNOTEQToNE(Expr):
List = __GetTokenList(Expr)
HasNOT = False
RetList = []
for Token in List:
if HasNOT and Token == 'EQ':
# At least, 'NOT' is in the list
while not RetList[-1].strip():
RetList.pop()
RetList[-1] = 'NE'
HasNOT = False
continue
if Token == 'NOT':
HasNOT = True
elif Token.strip():
HasNOT = False
RetList.append(Token)
return ''.join(RetList)
## SplitPcdEntry
#
# Split an PCD entry string to Token.CName and PCD value and FFE.
# NOTE: PCD Value and FFE can contain "|" in its expression. And in INF specification, have below rule.
# When using the characters "|" or "||" in an expression, the expression must be encapsulated in
# open "(" and close ")" parenthesis.
#
# @param String An PCD entry string need to be split.
#
# @return List [PcdTokenCName, Value, FFE]
#
def SplitPcdEntry(String):
if not String:
return ['', '', ''], False
PcdTokenCName = ''
PcdValue = ''
PcdFeatureFlagExp = ''
ValueList = GetSplitValueList(String, "|", 1)
#
# Only contain TokenCName
#
if len(ValueList) == 1:
return [ValueList[0]], True
NewValueList = []
if len(ValueList) == 2:
PcdTokenCName = ValueList[0]
InQuote = False
InParenthesis = False
StrItem = ''
for StrCh in ValueList[1]:
if StrCh == '"':
InQuote = not InQuote
elif StrCh == '(' or StrCh == ')':
InParenthesis = not InParenthesis
if StrCh == '|':
if not InQuote or not InParenthesis:
NewValueList.append(StrItem.strip())
StrItem = ' '
continue
StrItem += StrCh
NewValueList.append(StrItem.strip())
if len(NewValueList) == 1:
PcdValue = NewValueList[0]
return [PcdTokenCName, PcdValue], True
elif len(NewValueList) == 2:
PcdValue = NewValueList[0]
PcdFeatureFlagExp = NewValueList[1]
return [PcdTokenCName, PcdValue, PcdFeatureFlagExp], True
else:
return ['', '', ''], False
return ['', '', ''], False
## Check if two arches matched?
#
# @param Arch1
# @param Arch2
#
def IsMatchArch(Arch1, Arch2):
if 'COMMON' in Arch1 or 'COMMON' in Arch2:
return True
try:
if isinstance(Arch1, list) and isinstance(Arch2, list):
for Item1 in Arch1:
for Item2 in Arch2:
if Item1 == Item2:
return True
elif isinstance(Arch1, list):
return Arch2 in Arch1
elif isinstance(Arch2, list):
return Arch1 in Arch2
else:
if Arch1 == Arch2:
return True
except:
return False
# Search all files in FilePath to find the FileName with the largest index
# Return the FileName with index +1 under the FilePath
#
def GetUniFileName(FilePath, FileName):
Files = []
try:
Files = os.listdir(FilePath)
except:
pass
LargestIndex = -1
IndexNotFound = True
for File in Files:
if File.upper().startswith(FileName.upper()) and File.upper().endswith('.UNI'):
Index = File.upper().replace(FileName.upper(), '').replace('.UNI', '')
if Index:
try:
Index = int(Index)
except Exception:
Index = -1
else:
IndexNotFound = False
Index = 0
if Index > LargestIndex:
LargestIndex = Index + 1
if LargestIndex > -1 and not IndexNotFound:
return os.path.normpath(os.path.join(FilePath, FileName + str(LargestIndex) + '.uni'))
else:
return os.path.normpath(os.path.join(FilePath, FileName + '.uni'))
| edk2-master | BaseTools/Source/Python/UPT/Library/StringUtils.py |
## @file
# Python 'Library' package initialization file.
#
# This file is required to make Python interpreter treat the directory
# as containing package.
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
'''
Library
'''
| edk2-master | BaseTools/Source/Python/UPT/Library/__init__.py |
## @file
# This file is used to check PCD logical expression
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
'''
ExpressionValidate
'''
from __future__ import print_function
##
# Import Modules
#
import re
from Logger import StringTable as ST
## IsValidBareCString
#
# Check if String is comprised by whitespace(0x20), !(0x21), 0x23 - 0x7E
# or '\n', '\t', '\f', '\r', '\b', '\0', '\\'
#
# @param String: string to be checked
#
def IsValidBareCString(String):
EscapeList = ['n', 't', 'f', 'r', 'b', '0', '\\', '"']
PreChar = ''
LastChar = ''
for Char in String:
LastChar = Char
if PreChar == '\\':
if Char not in EscapeList:
return False
if Char == '\\':
PreChar = ''
continue
else:
IntChar = ord(Char)
if IntChar != 0x20 and IntChar != 0x09 and IntChar != 0x21 \
and (IntChar < 0x23 or IntChar > 0x7e):
return False
PreChar = Char
# Last char cannot be \ if PreChar is not \
if LastChar == '\\' and PreChar == LastChar:
return False
return True
def _ValidateToken(Token):
Token = Token.strip()
Index = Token.find("\"")
if Index != -1:
return IsValidBareCString(Token[Index+1:-1])
return True
## _ExprError
#
# @param Exception: Exception
#
class _ExprError(Exception):
def __init__(self, Error = ''):
Exception.__init__(self)
self.Error = Error
## _ExprBase
#
class _ExprBase:
HEX_PATTERN = '[\t\s]*0[xX][a-fA-F0-9]+'
INT_PATTERN = '[\t\s]*[0-9]+'
MACRO_PATTERN = '[\t\s]*\$\(([A-Z][_A-Z0-9]*)\)'
PCD_PATTERN = \
'[\t\s]*[_a-zA-Z][a-zA-Z0-9_]*[\t\s]*\.[\t\s]*[_a-zA-Z][a-zA-Z0-9_]*'
QUOTED_PATTERN = '[\t\s]*L?"[^"]*"'
BOOL_PATTERN = '[\t\s]*(true|True|TRUE|false|False|FALSE)'
def __init__(self, Token):
self.Token = Token
self.Index = 0
self.Len = len(Token)
## SkipWhitespace
#
def SkipWhitespace(self):
for Char in self.Token[self.Index:]:
if Char not in ' \t':
break
self.Index += 1
## IsCurrentOp
#
# @param OpList: option list
#
def IsCurrentOp(self, OpList):
self.SkipWhitespace()
LetterOp = ["EQ", "NE", "GE", "LE", "GT", "LT", "NOT", "and", "AND",
"or", "OR", "XOR"]
OpMap = {
'|' : '|',
'&' : '&',
'!' : '=',
'>' : '=',
'<' : '='
}
for Operator in OpList:
if not self.Token[self.Index:].startswith(Operator):
continue
self.Index += len(Operator)
Char = self.Token[self.Index : self.Index + 1]
if (Operator in LetterOp and (Char == '_' or Char.isalnum())) \
or (Operator in OpMap and OpMap[Operator] == Char):
self.Index -= len(Operator)
break
return True
return False
## _LogicalExpressionParser
#
# @param _ExprBase: _ExprBase object
#
class _LogicalExpressionParser(_ExprBase):
#
# STRINGITEM can only be logical field according to spec
#
STRINGITEM = -1
#
# Evaluate to True or False
#
LOGICAL = 0
REALLOGICAL = 2
#
# Just arithmetic expression
#
ARITH = 1
def __init__(self, Token):
_ExprBase.__init__(self, Token)
self.Parens = 0
def _CheckToken(self, MatchList):
for Match in MatchList:
if Match and Match.start() == 0:
if not _ValidateToken(
self.Token[self.Index:self.Index+Match.end()]
):
return False
self.Index += Match.end()
if self.Token[self.Index - 1] == '"':
return True
if self.Token[self.Index:self.Index+1] == '_' or \
self.Token[self.Index:self.Index+1].isalnum():
self.Index -= Match.end()
return False
Token = self.Token[self.Index - Match.end():self.Index]
if Token.strip() in ["EQ", "NE", "GE", "LE", "GT", "LT",
"NOT", "and", "AND", "or", "OR", "XOR"]:
self.Index -= Match.end()
return False
return True
return False
def IsAtomicNumVal(self):
#
# Hex number
#
Match1 = re.compile(self.HEX_PATTERN).match(self.Token[self.Index:])
#
# Number
#
Match2 = re.compile(self.INT_PATTERN).match(self.Token[self.Index:])
#
# Macro
#
Match3 = re.compile(self.MACRO_PATTERN).match(self.Token[self.Index:])
#
# PcdName
#
Match4 = re.compile(self.PCD_PATTERN).match(self.Token[self.Index:])
return self._CheckToken([Match1, Match2, Match3, Match4])
def IsAtomicItem(self):
#
# Macro
#
Match1 = re.compile(self.MACRO_PATTERN).match(self.Token[self.Index:])
#
# PcdName
#
Match2 = re.compile(self.PCD_PATTERN).match(self.Token[self.Index:])
#
# Quoted string
#
Match3 = re.compile(self.QUOTED_PATTERN).\
match(self.Token[self.Index:].replace('\\\\', '//').\
replace('\\\"', '\\\''))
return self._CheckToken([Match1, Match2, Match3])
## A || B
#
def LogicalExpression(self):
Ret = self.SpecNot()
while self.IsCurrentOp(['||', 'OR', 'or', '&&', 'AND', 'and', 'XOR', 'xor', '^']):
if self.Token[self.Index-1] == '|' and self.Parens <= 0:
raise _ExprError(ST.ERR_EXPR_OR % self.Token)
if Ret not in [self.ARITH, self.LOGICAL, self.REALLOGICAL, self.STRINGITEM]:
raise _ExprError(ST.ERR_EXPR_LOGICAL % self.Token)
Ret = self.SpecNot()
if Ret not in [self.ARITH, self.LOGICAL, self.REALLOGICAL, self.STRINGITEM]:
raise _ExprError(ST.ERR_EXPR_LOGICAL % self.Token)
Ret = self.REALLOGICAL
return Ret
def SpecNot(self):
if self.IsCurrentOp(["NOT", "!", "not"]):
return self.SpecNot()
return self.Rel()
## A < B, A > B, A <= B, A >= B
#
def Rel(self):
Ret = self.Expr()
if self.IsCurrentOp(["<=", ">=", ">", "<", "GT", "LT", "GE", "LE",
"==", "EQ", "!=", "NE"]):
if Ret == self.STRINGITEM:
raise _ExprError(ST.ERR_EXPR_LOGICAL % self.Token)
Ret = self.Expr()
if Ret == self.REALLOGICAL:
raise _ExprError(ST.ERR_EXPR_LOGICAL % self.Token)
Ret = self.REALLOGICAL
return Ret
## A + B, A - B
#
def Expr(self):
Ret = self.Factor()
while self.IsCurrentOp(["+", "-", "&", "|", "^", "XOR", "xor"]):
if self.Token[self.Index-1] == '|' and self.Parens <= 0:
raise _ExprError(ST.ERR_EXPR_OR)
if Ret == self.STRINGITEM or Ret == self.REALLOGICAL:
raise _ExprError(ST.ERR_EXPR_LOGICAL % self.Token)
Ret = self.Factor()
if Ret == self.STRINGITEM or Ret == self.REALLOGICAL:
raise _ExprError(ST.ERR_EXPR_LOGICAL % self.Token)
Ret = self.ARITH
return Ret
## Factor
#
def Factor(self):
if self.IsCurrentOp(["("]):
self.Parens += 1
Ret = self.LogicalExpression()
if not self.IsCurrentOp([")"]):
raise _ExprError(ST.ERR_EXPR_RIGHT_PAREN % \
(self.Token, self.Token[self.Index:]))
self.Parens -= 1
return Ret
if self.IsAtomicItem():
if self.Token[self.Index - 1] == '"':
return self.STRINGITEM
return self.LOGICAL
elif self.IsAtomicNumVal():
return self.ARITH
else:
raise _ExprError(ST.ERR_EXPR_FACTOR % \
(self.Token[self.Index:], self.Token))
## IsValidLogicalExpression
#
def IsValidLogicalExpression(self):
if self.Len == 0:
return False, ST.ERR_EXPRESS_EMPTY
try:
if self.LogicalExpression() not in [self.ARITH, self.LOGICAL, self.REALLOGICAL, self.STRINGITEM]:
return False, ST.ERR_EXPR_LOGICAL % self.Token
except _ExprError as XExcept:
return False, XExcept.Error
self.SkipWhitespace()
if self.Index != self.Len:
return False, (ST.ERR_EXPR_BOOLEAN % \
(self.Token[self.Index:], self.Token))
return True, ''
## _ValidRangeExpressionParser
#
class _ValidRangeExpressionParser(_ExprBase):
INT_RANGE_PATTERN = '[\t\s]*[0-9]+[\t\s]*-[\t\s]*[0-9]+'
HEX_RANGE_PATTERN = \
'[\t\s]*0[xX][a-fA-F0-9]+[\t\s]*-[\t\s]*0[xX][a-fA-F0-9]+'
def __init__(self, Token):
_ExprBase.__init__(self, Token)
self.Parens = 0
self.HEX = 1
self.INT = 2
self.IsParenHappen = False
self.IsLogicalOpHappen = False
## IsValidRangeExpression
#
def IsValidRangeExpression(self):
if self.Len == 0:
return False, ST.ERR_EXPR_RANGE_EMPTY
try:
if self.RangeExpression() not in [self.HEX, self.INT]:
return False, ST.ERR_EXPR_RANGE % self.Token
except _ExprError as XExcept:
return False, XExcept.Error
self.SkipWhitespace()
if self.Index != self.Len:
return False, (ST.ERR_EXPR_RANGE % self.Token)
return True, ''
## RangeExpression
#
def RangeExpression(self):
Ret = self.Unary()
while self.IsCurrentOp(['OR', 'AND', 'and', 'or']):
self.IsLogicalOpHappen = True
if not self.IsParenHappen:
raise _ExprError(ST.ERR_PAREN_NOT_USED % self.Token)
self.IsParenHappen = False
Ret = self.Unary()
if self.IsCurrentOp(['XOR']):
Ret = self.Unary()
return Ret
## Unary
#
def Unary(self):
if self.IsCurrentOp(["NOT"]):
return self.Unary()
return self.ValidRange()
## ValidRange
#
def ValidRange(self):
Ret = -1
if self.IsCurrentOp(["("]):
self.IsLogicalOpHappen = False
self.IsParenHappen = True
self.Parens += 1
if self.Parens > 1:
raise _ExprError(ST.ERR_EXPR_RANGE_DOUBLE_PAREN_NESTED % self.Token)
Ret = self.RangeExpression()
if not self.IsCurrentOp([")"]):
raise _ExprError(ST.ERR_EXPR_RIGHT_PAREN % self.Token)
self.Parens -= 1
return Ret
if self.IsLogicalOpHappen:
raise _ExprError(ST.ERR_PAREN_NOT_USED % self.Token)
if self.IsCurrentOp(["LT", "GT", "LE", "GE", "EQ", "XOR"]):
IntMatch = \
re.compile(self.INT_PATTERN).match(self.Token[self.Index:])
HexMatch = \
re.compile(self.HEX_PATTERN).match(self.Token[self.Index:])
if HexMatch and HexMatch.start() == 0:
self.Index += HexMatch.end()
Ret = self.HEX
elif IntMatch and IntMatch.start() == 0:
self.Index += IntMatch.end()
Ret = self.INT
else:
raise _ExprError(ST.ERR_EXPR_RANGE_FACTOR % (self.Token[self.Index:], self.Token))
else:
IntRangeMatch = re.compile(
self.INT_RANGE_PATTERN).match(self.Token[self.Index:]
)
HexRangeMatch = re.compile(
self.HEX_RANGE_PATTERN).match(self.Token[self.Index:]
)
if HexRangeMatch and HexRangeMatch.start() == 0:
self.Index += HexRangeMatch.end()
Ret = self.HEX
elif IntRangeMatch and IntRangeMatch.start() == 0:
self.Index += IntRangeMatch.end()
Ret = self.INT
else:
raise _ExprError(ST.ERR_EXPR_RANGE % self.Token)
return Ret
## _ValidListExpressionParser
#
class _ValidListExpressionParser(_ExprBase):
VALID_LIST_PATTERN = '(0[xX][0-9a-fA-F]+|[0-9]+)([\t\s]*,[\t\s]*(0[xX][0-9a-fA-F]+|[0-9]+))*'
def __init__(self, Token):
_ExprBase.__init__(self, Token)
self.NUM = 1
def IsValidListExpression(self):
if self.Len == 0:
return False, ST.ERR_EXPR_LIST_EMPTY
try:
if self.ListExpression() not in [self.NUM]:
return False, ST.ERR_EXPR_LIST % self.Token
except _ExprError as XExcept:
return False, XExcept.Error
self.SkipWhitespace()
if self.Index != self.Len:
return False, (ST.ERR_EXPR_LIST % self.Token)
return True, ''
def ListExpression(self):
Ret = -1
self.SkipWhitespace()
ListMatch = re.compile(self.VALID_LIST_PATTERN).match(self.Token[self.Index:])
if ListMatch and ListMatch.start() == 0:
self.Index += ListMatch.end()
Ret = self.NUM
else:
raise _ExprError(ST.ERR_EXPR_LIST % self.Token)
return Ret
## _StringTestParser
#
class _StringTestParser(_ExprBase):
def __init__(self, Token):
_ExprBase.__init__(self, Token)
## IsValidStringTest
#
def IsValidStringTest(self):
if self.Len == 0:
return False, ST.ERR_EXPR_EMPTY
try:
self.StringTest()
except _ExprError as XExcept:
return False, XExcept.Error
return True, ''
## StringItem
#
def StringItem(self):
Match1 = re.compile(self.QUOTED_PATTERN)\
.match(self.Token[self.Index:].replace('\\\\', '//')\
.replace('\\\"', '\\\''))
Match2 = re.compile(self.MACRO_PATTERN).match(self.Token[self.Index:])
Match3 = re.compile(self.PCD_PATTERN).match(self.Token[self.Index:])
MatchList = [Match1, Match2, Match3]
for Match in MatchList:
if Match and Match.start() == 0:
if not _ValidateToken(
self.Token[self.Index:self.Index+Match.end()]
):
raise _ExprError(ST.ERR_EXPR_STRING_ITEM % \
(self.Token, self.Token[self.Index:]))
self.Index += Match.end()
Token = self.Token[self.Index - Match.end():self.Index]
if Token.strip() in ["EQ", "NE"]:
raise _ExprError(ST.ERR_EXPR_STRING_ITEM % \
(self.Token, self.Token[self.Index:]))
return
else:
raise _ExprError(ST.ERR_EXPR_STRING_ITEM % \
(self.Token, self.Token[self.Index:]))
## StringTest
#
def StringTest(self):
self.StringItem()
if not self.IsCurrentOp(["==", "EQ", "!=", "NE"]):
raise _ExprError(ST.ERR_EXPR_EQUALITY % \
(self.Token[self.Index:], self.Token))
self.StringItem()
if self.Index != self.Len:
raise _ExprError(ST.ERR_EXPR_BOOLEAN % \
(self.Token[self.Index:], self.Token))
##
# Check syntax of string test
#
# @param Token: string test token
#
def IsValidStringTest(Token, Flag=False):
#
# Not do the check right now, keep the implementation for future enhancement.
#
if not Flag:
return True, ""
return _StringTestParser(Token).IsValidStringTest()
##
# Check syntax of logical expression
#
# @param Token: expression token
#
def IsValidLogicalExpr(Token, Flag=False):
#
# Not do the check right now, keep the implementation for future enhancement.
#
if not Flag:
return True, ""
return _LogicalExpressionParser(Token).IsValidLogicalExpression()
##
# Check syntax of range expression
#
# @param Token: range expression token
#
def IsValidRangeExpr(Token):
return _ValidRangeExpressionParser(Token).IsValidRangeExpression()
##
# Check syntax of value list expression token
#
# @param Token: value list expression token
#
def IsValidListExpr(Token):
return _ValidListExpressionParser(Token).IsValidListExpression()
##
# Check whether the feature flag expression is valid or not
#
# @param Token: feature flag expression
#
def IsValidFeatureFlagExp(Token, Flag=False):
#
# Not do the check right now, keep the implementation for future enhancement.
#
if not Flag:
return True, "", Token
else:
if Token in ['TRUE', 'FALSE', 'true', 'false', 'True', 'False',
'0x1', '0x01', '0x0', '0x00']:
return True, ""
Valid, Cause = IsValidStringTest(Token, Flag)
if not Valid:
Valid, Cause = IsValidLogicalExpr(Token, Flag)
if not Valid:
return False, Cause
return True, ""
if __name__ == '__main__':
# print IsValidRangeExpr('LT 9')
print(_LogicalExpressionParser('gCrownBayTokenSpaceGuid.PcdPciDevice1BridgeAddressLE0').IsValidLogicalExpression())
| edk2-master | BaseTools/Source/Python/UPT/Library/ExpressionValidate.py |
## @file
# This file is used to define comment generating interface
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
'''
CommentGenerating
'''
##
# Import Modules
#
from Library.StringUtils import GetSplitValueList
from Library.DataType import TAB_SPACE_SPLIT
from Library.DataType import TAB_INF_GUIDTYPE_VAR
from Library.DataType import USAGE_ITEM_NOTIFY
from Library.DataType import ITEM_UNDEFINED
from Library.DataType import TAB_HEADER_COMMENT
from Library.DataType import TAB_BINARY_HEADER_COMMENT
from Library.DataType import TAB_COMMENT_SPLIT
from Library.DataType import TAB_SPECIAL_COMMENT
from Library.DataType import END_OF_LINE
from Library.DataType import TAB_COMMENT_EDK1_SPLIT
from Library.DataType import TAB_COMMENT_EDK1_START
from Library.DataType import TAB_COMMENT_EDK1_END
from Library.DataType import TAB_STAR
from Library.DataType import TAB_PCD_PROMPT
from Library.UniClassObject import ConvertSpecialUnicodes
from Library.Misc import GetLocalValue
## GenTailCommentLines
#
# @param TailCommentLines: the tail comment lines that need to be generated
# @param LeadingSpaceNum: the number of leading space needed for non-first
# line tail comment
#
def GenTailCommentLines (TailCommentLines, LeadingSpaceNum = 0):
TailCommentLines = TailCommentLines.rstrip(END_OF_LINE)
CommentStr = TAB_SPACE_SPLIT*2 + TAB_SPECIAL_COMMENT + TAB_SPACE_SPLIT + \
(END_OF_LINE + LeadingSpaceNum * TAB_SPACE_SPLIT + TAB_SPACE_SPLIT*2 + TAB_SPECIAL_COMMENT + \
TAB_SPACE_SPLIT).join(GetSplitValueList(TailCommentLines, END_OF_LINE))
return CommentStr
## GenGenericComment
#
# @param CommentLines: Generic comment Text, maybe Multiple Lines
#
def GenGenericComment (CommentLines):
if not CommentLines:
return ''
CommentLines = CommentLines.rstrip(END_OF_LINE)
CommentStr = TAB_SPECIAL_COMMENT + TAB_SPACE_SPLIT + (END_OF_LINE + TAB_COMMENT_SPLIT + TAB_SPACE_SPLIT).join\
(GetSplitValueList(CommentLines, END_OF_LINE)) + END_OF_LINE
return CommentStr
## GenGenericCommentF
#
# similar to GenGenericComment but will remove <EOL> at end of comment once,
# and for line with only <EOL>, '#\n' will be generated instead of '# \n'
#
# @param CommentLines: Generic comment Text, maybe Multiple Lines
# @return CommentStr: Generated comment line
#
def GenGenericCommentF (CommentLines, NumOfPound=1, IsPrompt=False, IsInfLibraryClass=False):
if not CommentLines:
return ''
#
# if comment end with '\n', then remove it to prevent one extra line
# generate later on
#
if CommentLines.endswith(END_OF_LINE):
CommentLines = CommentLines[:-1]
CommentStr = ''
if IsPrompt:
CommentStr += TAB_COMMENT_SPLIT * NumOfPound + TAB_SPACE_SPLIT + TAB_PCD_PROMPT + TAB_SPACE_SPLIT + \
CommentLines.replace(END_OF_LINE, '') + END_OF_LINE
else:
CommentLineList = GetSplitValueList(CommentLines, END_OF_LINE)
FindLibraryClass = False
for Line in CommentLineList:
# If this comment is for @libraryclass and it has multiple lines
# make sure the second lines align to the first line after @libraryclass as below
#
# ## @libraryclass XYZ FIRST_LINE
# ## ABC SECOND_LINE
#
if IsInfLibraryClass and Line.find(u'@libraryclass ') > -1:
FindLibraryClass = True
if Line == '':
CommentStr += TAB_COMMENT_SPLIT * NumOfPound + END_OF_LINE
else:
if FindLibraryClass and Line.find(u'@libraryclass ') > -1:
CommentStr += TAB_COMMENT_SPLIT * NumOfPound + TAB_SPACE_SPLIT + Line + END_OF_LINE
elif FindLibraryClass:
CommentStr += TAB_COMMENT_SPLIT * NumOfPound + TAB_SPACE_SPLIT * 16 + Line + END_OF_LINE
else:
CommentStr += TAB_COMMENT_SPLIT * NumOfPound + TAB_SPACE_SPLIT + Line + END_OF_LINE
return CommentStr
## GenHeaderCommentSection
#
# Generate Header comment sections
#
# @param Abstract One line of abstract
# @param Description multiple lines of Description
# @param Copyright possible multiple copyright lines
# @param License possible multiple license lines
#
def GenHeaderCommentSection(Abstract, Description, Copyright, License, IsBinaryHeader=False, \
CommChar=TAB_COMMENT_SPLIT):
Content = ''
#
# Convert special character to (c), (r) and (tm).
#
Abstract = ConvertSpecialUnicodes(Abstract)
Description = ConvertSpecialUnicodes(Description)
if IsBinaryHeader:
Content += CommChar * 2 + TAB_SPACE_SPLIT + TAB_BINARY_HEADER_COMMENT + '\r\n'
elif CommChar == TAB_COMMENT_EDK1_SPLIT:
Content += CommChar + TAB_SPACE_SPLIT + TAB_COMMENT_EDK1_START + TAB_STAR + TAB_SPACE_SPLIT +\
TAB_HEADER_COMMENT + '\r\n'
else:
Content += CommChar * 2 + TAB_SPACE_SPLIT + TAB_HEADER_COMMENT + '\r\n'
if Abstract:
Abstract = Abstract.rstrip('\r\n')
Content += CommChar + TAB_SPACE_SPLIT + ('\r\n' + CommChar + TAB_SPACE_SPLIT).join(GetSplitValueList\
(Abstract, '\n'))
Content += '\r\n' + CommChar + '\r\n'
else:
Content += CommChar + '\r\n'
if Description:
Description = Description.rstrip('\r\n')
Content += CommChar + TAB_SPACE_SPLIT + ('\r\n' + CommChar + TAB_SPACE_SPLIT).join(GetSplitValueList\
(Description, '\n'))
Content += '\r\n' + CommChar + '\r\n'
#
# There is no '#\n' line to separate multiple copyright lines in code base
#
if Copyright:
Copyright = Copyright.rstrip('\r\n')
Content += CommChar + TAB_SPACE_SPLIT + ('\r\n' + CommChar + TAB_SPACE_SPLIT).join\
(GetSplitValueList(Copyright, '\n'))
Content += '\r\n' + CommChar + '\r\n'
if License:
License = License.rstrip('\r\n')
Content += CommChar + TAB_SPACE_SPLIT + ('\r\n' + CommChar + TAB_SPACE_SPLIT).join(GetSplitValueList\
(License, '\n'))
Content += '\r\n' + CommChar + '\r\n'
if CommChar == TAB_COMMENT_EDK1_SPLIT:
Content += CommChar + TAB_SPACE_SPLIT + TAB_STAR + TAB_COMMENT_EDK1_END + '\r\n'
else:
Content += CommChar * 2 + '\r\n'
return Content
## GenInfPcdTailComment
# Generate Pcd tail comment for Inf, this would be one line comment
#
# @param Usage: Usage type
# @param TailCommentText: Comment text for tail comment
#
def GenInfPcdTailComment (Usage, TailCommentText):
if (Usage == ITEM_UNDEFINED) and (not TailCommentText):
return ''
CommentLine = TAB_SPACE_SPLIT.join([Usage, TailCommentText])
return GenTailCommentLines(CommentLine)
## GenInfProtocolPPITailComment
# Generate Protocol/PPI tail comment for Inf
#
# @param Usage: Usage type
# @param TailCommentText: Comment text for tail comment
#
def GenInfProtocolPPITailComment (Usage, Notify, TailCommentText):
if (not Notify) and (Usage == ITEM_UNDEFINED) and (not TailCommentText):
return ''
if Notify:
CommentLine = USAGE_ITEM_NOTIFY + " ## "
else:
CommentLine = ''
CommentLine += TAB_SPACE_SPLIT.join([Usage, TailCommentText])
return GenTailCommentLines(CommentLine)
## GenInfGuidTailComment
# Generate Guid tail comment for Inf
#
# @param Usage: Usage type
# @param TailCommentText: Comment text for tail comment
#
def GenInfGuidTailComment (Usage, GuidTypeList, VariableName, TailCommentText):
GuidType = GuidTypeList[0]
if (Usage == ITEM_UNDEFINED) and (GuidType == ITEM_UNDEFINED) and \
(not TailCommentText):
return ''
FirstLine = Usage + " ## " + GuidType
if GuidType == TAB_INF_GUIDTYPE_VAR:
FirstLine += ":" + VariableName
CommentLine = TAB_SPACE_SPLIT.join([FirstLine, TailCommentText])
return GenTailCommentLines(CommentLine)
## GenDecGuidTailComment
#
# @param SupModuleList: Supported module type list
#
def GenDecTailComment (SupModuleList):
CommentLine = TAB_SPACE_SPLIT.join(SupModuleList)
return GenTailCommentLines(CommentLine)
## _GetHelpStr
# get HelpString from a list of HelpTextObject, the priority refer to
# related HLD
#
# @param HelpTextObjList: List of HelpTextObject
#
# @return HelpStr: the help text string found, '' means no help text found
#
def _GetHelpStr(HelpTextObjList):
ValueList = []
for HelpObj in HelpTextObjList:
ValueList.append((HelpObj.GetLang(), HelpObj.GetString()))
return GetLocalValue(ValueList, True)
| edk2-master | BaseTools/Source/Python/UPT/Library/CommentGenerating.py |
## @file
# This file is used to define class for data type structure
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
# Portions Copyright (c) 2022, Loongson Technology Corporation Limited. All rights reserved.
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
'''
DataType
'''
##
# Module List Items
#
MODULE_LIST = ["BASE",
"SEC",
"PEI_CORE",
"PEIM",
"DXE_CORE",
"DXE_DRIVER",
"SMM_CORE",
"DXE_RUNTIME_DRIVER",
"DXE_SAL_DRIVER",
"DXE_SMM_DRIVER",
"UEFI_DRIVER",
"UEFI_APPLICATION",
"USER_DEFINED"]
VALID_DEPEX_MODULE_TYPE_LIST = ["PEIM",
"DXE_DRIVER",
"DXE_SMM_DRIVER",
"DXE_RUNTIME_DRIVER",
"DXE_SAL_DRIVER",
"UEFI_DRIVER",
]
##
# Usage List Items
#
USAGE_LIST = ["CONSUMES",
"SOMETIMES_CONSUMES",
"PRODUCES",
"SOMETIMES_PRODUCES"]
TAB_LANGUAGE_EN_US = 'en-US'
TAB_LANGUAGE_ENG = 'eng'
TAB_LANGUAGE_EN = 'en'
TAB_LANGUAGE_EN_X = 'en-x-tianocore'
USAGE_ITEM_PRODUCES = 'PRODUCES'
USAGE_ITEM_SOMETIMES_PRODUCES = 'SOMETIMES_PRODUCES'
USAGE_ITEM_CONSUMES = 'CONSUMES'
USAGE_ITEM_SOMETIMES_CONSUMES = 'SOMETIMES_CONSUMES'
USAGE_ITEM_TO_START = 'TO_START'
USAGE_ITEM_BY_START = 'BY_START'
USAGE_ITEM_NOTIFY = 'NOTIFY'
USAGE_ITEM_UNDEFINED = 'UNDEFINED'
USAGE_CONSUMES_LIST = [USAGE_ITEM_CONSUMES,
'CONSUMED',
'ALWAYS_CONSUMED',
'ALWAYS_CONSUMES'
]
USAGE_PRODUCES_LIST = [USAGE_ITEM_PRODUCES,
'PRODUCED',
'ALWAYS_PRODUCED',
'ALWAYS_PRODUCES'
]
USAGE_SOMETIMES_PRODUCES_LIST = [USAGE_ITEM_SOMETIMES_PRODUCES,
'SOMETIMES_PRODUCED'
]
USAGE_SOMETIMES_CONSUMES_LIST = [USAGE_ITEM_SOMETIMES_CONSUMES,
'SOMETIMES_CONSUMED'
]
ITEM_UNDEFINED = 'UNDEFINED'
TAB_PCD_VALIDRANGE = '@ValidRange'
TAB_PCD_VALIDLIST = '@ValidList'
TAB_PCD_EXPRESSION = '@Expression'
TAB_PCD_PROMPT = '@Prompt'
TAB_STR_TOKENCNAME = 'STR'
TAB_STR_TOKENPROMPT = 'PROMPT'
TAB_STR_TOKENHELP = 'HELP'
TAB_STR_TOKENERR = 'ERR'
#
# Dictionary of usage tokens and their synonyms
#
ALL_USAGE_TOKENS = {
"PRODUCES" : "PRODUCES",
"PRODUCED" : "PRODUCES",
"ALWAYS_PRODUCES" : "PRODUCES",
"ALWAYS_PRODUCED" : "PRODUCES",
"SOMETIMES_PRODUCES" : "SOMETIMES_PRODUCES",
"SOMETIMES_PRODUCED" : "SOMETIMES_PRODUCES",
"CONSUMES" : "CONSUMES",
"CONSUMED" : "CONSUMES",
"ALWAYS_CONSUMES" : "CONSUMES",
"ALWAYS_CONSUMED" : "CONSUMES",
"SOMETIMES_CONSUMES" : "SOMETIMES_CONSUMES",
"SOMETIMES_CONSUMED" : "SOMETIMES_CONSUMES",
"SOMETIME_CONSUMES" : "SOMETIMES_CONSUMES",
"UNDEFINED" : "UNDEFINED"
}
PROTOCOL_USAGE_TOKENS = {
"TO_START" : "TO_START",
"BY_START" : "BY_START"
}
PROTOCOL_USAGE_TOKENS.update (ALL_USAGE_TOKENS)
#
# Dictionary of GUID type tokens
#
GUID_TYPE_TOKENS = {
"Event" : "Event",
"File" : "File",
"FV" : "FV",
"GUID" : "GUID",
"Guid" : "GUID",
"HII" : "HII",
"HOB" : "HOB",
"Hob" : "HOB",
"Hob:" : "HOB",
"SystemTable" : "SystemTable",
"TokenSpaceGuid" : "TokenSpaceGuid",
"UNDEFINED" : "UNDEFINED"
}
#
# Dictionary of Protocol Notify tokens and their synonyms
#
PROTOCOL_NOTIFY_TOKENS = {
"NOTIFY" : "NOTIFY",
"PROTOCOL_NOTIFY" : "NOTIFY",
"UNDEFINED" : "UNDEFINED"
}
#
# Dictionary of PPI Notify tokens and their synonyms
#
PPI_NOTIFY_TOKENS = {
"NOTIFY" : "NOTIFY",
"PPI_NOTIFY" : "NOTIFY",
"UNDEFINED" : "UNDEFINED"
}
EVENT_TOKENS = {
"EVENT_TYPE_PERIODIC_TIMER" : "EVENT_TYPE_PERIODIC_TIMER",
"EVENT_TYPE_RELATIVE_TIMER" : "EVENT_TYPE_RELATIVE_TIMER",
"UNDEFINED" : "UNDEFINED"
}
BOOTMODE_TOKENS = {
"FULL" : "FULL",
"MINIMAL" : "MINIMAL",
"NO_CHANGE" : "NO_CHANGE",
"DIAGNOSTICS" : "DIAGNOSTICS",
"DEFAULT" : "DEFAULT",
"S2_RESUME" : "S2_RESUME",
"S3_RESUME" : "S3_RESUME",
"S4_RESUME" : "S4_RESUME",
"S5_RESUME" : "S5_RESUME",
"FLASH_UPDATE" : "FLASH_UPDATE",
"RECOVERY_FULL" : "RECOVERY_FULL",
"RECOVERY_MINIMAL" : "RECOVERY_MINIMAL",
"RECOVERY_NO_CHANGE" : "RECOVERY_NO_CHANGE",
"RECOVERY_DIAGNOSTICS" : "RECOVERY_DIAGNOSTICS",
"RECOVERY_DEFAULT" : "RECOVERY_DEFAULT",
"RECOVERY_S2_RESUME" : "RECOVERY_S2_RESUME",
"RECOVERY_S3_RESUME" : "RECOVERY_S3_RESUME",
"RECOVERY_S4_RESUME" : "RECOVERY_S4_RESUME",
"RECOVERY_S5_RESUME" : "RECOVERY_S5_RESUME",
"RECOVERY_FLASH_UPDATE" : "RECOVERY_FLASH_UPDATE",
"UNDEFINED" : "UNDEFINED"
}
HOB_TOKENS = {
"PHIT" : "PHIT",
"MEMORY_ALLOCATION" : "MEMORY_ALLOCATION",
"LOAD_PEIM" : "LOAD_PEIM",
"RESOURCE_DESCRIPTOR" : "RESOURCE_DESCRIPTOR",
"FIRMWARE_VOLUME" : "FIRMWARE_VOLUME",
"UNDEFINED" : "UNDEFINED"
}
##
# Usage List Items for Protocol
#
PROTOCOL_USAGE_LIST = USAGE_LIST + ["TO_START", "BY_START"]
##
# End of Line
# Use this but not os.linesep for os.linesep has bug in it.
#
END_OF_LINE = '\n'
##
# Arch List Items
#
ARCH_LIST = ["IA32",
"X64",
"IPF",
"EBC",
"COMMON"]
##
# PCD driver type list items
#
PCD_DRIVER_TYPE_LIST = ["PEI_PCD_DRIVER", "DXE_PCD_DRIVER"]
##
# Boot Mode List Items
#
BOOT_MODE_LIST = ["FULL",
"MINIMAL",
"NO_CHANGE",
"DIAGNOSTICS",
"DEFAULT",
"S2_RESUME",
"S3_RESUME",
"S4_RESUME",
"S5_RESUME",
"FLASH_UPDATE",
"RECOVERY_FULL",
"RECOVERY_MINIMAL",
"RECOVERY_NO_CHANGE",
"RECOVERY_DIAGNOSTICS",
"RECOVERY_DEFAULT",
"RECOVERY_S2_RESUME",
"RECOVERY_S3_RESUME",
"RECOVERY_S4_RESUME",
"RECOVERY_S5_RESUME",
"RECOVERY_FLASH_UPDATE"]
##
# Event Type List Items
#
EVENT_TYPE_LIST = ["EVENT_TYPE_PERIODIC_TIMER",
"EVENT_TYPE_RELATIVE_TIMER"]
##
# Hob Type List Items
#
HOB_TYPE_LIST = ["PHIT",
"MEMORY_ALLOCATION",
"RESOURCE_DESCRIPTOR",
"FIRMWARE_VOLUME",
"LOAD_PEIM"]
##
# GUID_TYPE_LIST
#
GUID_TYPE_LIST = ["Event", "File", "FV", "GUID", "HII", "HOB",
"SystemTable", "TokenSpaceGuid", "Variable"]
##
# PCD Usage Type List of Package
#
PCD_USAGE_TYPE_LIST_OF_PACKAGE = ["FeatureFlag", "PatchableInModule",
"FixedAtBuild", "Dynamic", "DynamicEx"]
##
# PCD Usage Type List of Module
#
PCD_USAGE_TYPE_LIST_OF_MODULE = ["FEATUREPCD", "PATCHPCD", "FIXEDPCD", "PCD", "PCDEX"]
##
# PCD Usage Type List of UPT
#
PCD_USAGE_TYPE_LIST_OF_UPT = PCD_USAGE_TYPE_LIST_OF_MODULE
##
# Binary File Type List
#
BINARY_FILE_TYPE_LIST = ["PE32", "PIC", "TE", "DXE_DEPEX", "VER", "UI", "COMPAT16", "FV", "BIN", "RAW",
"ACPI", "ASL",
"PEI_DEPEX",
"SMM_DEPEX",
"SUBTYPE_GUID",
"DISPOSABLE"
]
BINARY_FILE_TYPE_LIST_IN_UDP = \
["GUID", "FREEFORM",
"UEFI_IMAGE", "PE32", "PIC",
"PEI_DEPEX",
"DXE_DEPEX",
"SMM_DEPEX",
"FV", "TE",
"BIN", "VER", "UI"
]
SUBTYPE_GUID_BINARY_FILE_TYPE = "FREEFORM"
##
# Possible values for COMPONENT_TYPE, and their descriptions, are listed in
# the table,
# "Component (module) Types." For each component, the BASE_NAME and
# COMPONENT_TYPE
# are required. The COMPONENT_TYPE definition is case sensitive.
#
COMPONENT_TYPE_LIST = [
"APPLICATION",
"ACPITABLE",
"APRIORI",
"BINARY",
"BS_DRIVER",
"CONFIG",
"FILE",
"FVIMAGEFILE",
"LIBRARY",
"LOGO",
"LEGACY16",
"MICROCODE",
"PE32_PEIM",
"PEI_CORE",
"RAWFILE",
"RT_DRIVER",
"SAL_RT_DRIVER",
"SECURITY_CORE",
"COMBINED_PEIM_DRIVER",
"PIC_PEIM",
"RELOCATABLE_PEIM"
]
##
# Common Definitions
#
TAB_SPLIT = '.'
TAB_COMMENT_EDK1_START = '/*'
TAB_COMMENT_EDK1_END = '*/'
TAB_COMMENT_EDK1_SPLIT = '//'
TAB_COMMENT_SPLIT = '#'
TAB_EQUAL_SPLIT = '='
TAB_DEQUAL_SPLIT = '=='
TAB_VALUE_SPLIT = '|'
TAB_COMMA_SPLIT = ','
TAB_HORIZON_LINE_SPLIT = '-'
TAB_SPACE_SPLIT = ' '
TAB_UNDERLINE_SPLIT = '_'
TAB_SEMI_COLON_SPLIT = ';'
TAB_COLON_SPLIT = ':'
TAB_SECTION_START = '['
TAB_SECTION_END = ']'
TAB_OPTION_START = '<'
TAB_OPTION_END = '>'
TAB_SLASH = '\\'
TAB_BACK_SLASH = '/'
TAB_SPECIAL_COMMENT = '##'
TAB_HEADER_COMMENT = '@file'
TAB_BINARY_HEADER_COMMENT = '@BinaryHeader'
TAB_STAR = '*'
TAB_ENCODING_UTF16LE = 'utf_16_le'
TAB_CAPHEX_START = '0X'
TAB_HEX_START = '0x'
TAB_PCD_ERROR = 'Error'
TAB_PCD_ERROR_SECTION_COMMENT = 'Error message section'
TAB_UNI_FILE_SUFFIXS = ['.uni', '.UNI', '.Uni']
TAB_EDK_SOURCE = '$(EDK_SOURCE)'
TAB_EFI_SOURCE = '$(EFI_SOURCE)'
TAB_WORKSPACE = '$(WORKSPACE)'
TAB_ARCH_NULL = ''
TAB_ARCH_COMMON = 'COMMON'
TAB_ARCH_IA32 = 'IA32'
TAB_ARCH_X64 = 'X64'
TAB_ARCH_IPF = 'IPF'
TAB_ARCH_ARM = 'ARM'
TAB_ARCH_LOONGARCH64 = 'LOONGARCH64'
TAB_ARCH_EBC = 'EBC'
ARCH_LIST = \
[TAB_ARCH_IA32, TAB_ARCH_X64, TAB_ARCH_IPF, TAB_ARCH_ARM, TAB_ARCH_LOONGARCH64, TAB_ARCH_EBC]
SUP_MODULE_BASE = 'BASE'
SUP_MODULE_SEC = 'SEC'
SUP_MODULE_PEI_CORE = 'PEI_CORE'
SUP_MODULE_PEIM = 'PEIM'
SUP_MODULE_DXE_CORE = 'DXE_CORE'
SUP_MODULE_DXE_DRIVER = 'DXE_DRIVER'
SUP_MODULE_DXE_RUNTIME_DRIVER = 'DXE_RUNTIME_DRIVER'
SUP_MODULE_DXE_SAL_DRIVER = 'DXE_SAL_DRIVER'
SUP_MODULE_DXE_SMM_DRIVER = 'DXE_SMM_DRIVER'
SUP_MODULE_UEFI_DRIVER = 'UEFI_DRIVER'
SUP_MODULE_UEFI_APPLICATION = 'UEFI_APPLICATION'
SUP_MODULE_USER_DEFINED = 'USER_DEFINED'
SUP_MODULE_SMM_CORE = 'SMM_CORE'
SUP_MODULE_LIST = \
[SUP_MODULE_BASE, SUP_MODULE_SEC, SUP_MODULE_PEI_CORE, SUP_MODULE_PEIM, \
SUP_MODULE_DXE_CORE, SUP_MODULE_DXE_DRIVER, \
SUP_MODULE_DXE_RUNTIME_DRIVER, SUP_MODULE_DXE_SAL_DRIVER, \
SUP_MODULE_DXE_SMM_DRIVER, SUP_MODULE_UEFI_DRIVER, \
SUP_MODULE_UEFI_APPLICATION, SUP_MODULE_USER_DEFINED, \
SUP_MODULE_SMM_CORE]
SUP_MODULE_LIST_STRING = TAB_VALUE_SPLIT.join(l for l in SUP_MODULE_LIST)
EDK_COMPONENT_TYPE_LIBRARY = 'LIBRARY'
EDK_COMPONENT_TYPE_SECUARITY_CORE = 'SECUARITY_CORE'
EDK_COMPONENT_TYPE_PEI_CORE = 'PEI_CORE'
EDK_COMPONENT_TYPE_COMBINED_PEIM_DRIVER = 'COMBINED_PEIM_DRIVER'
EDK_COMPONENT_TYPE_PIC_PEIM = 'PIC_PEIM'
EDK_COMPONENT_TYPE_RELOCATABLE_PEIM = 'RELOCATABLE_PEIM'
EDK_COMPONENT_TYPE_BS_DRIVER = 'BS_DRIVER'
EDK_COMPONENT_TYPE_RT_DRIVER = 'RT_DRIVER'
EDK_COMPONENT_TYPE_SAL_RT_DRIVER = 'SAL_RT_DRIVER'
EDK_COMPONENT_TYPE_APPLICATION = 'APPLICATION'
EDK_NAME = 'EDK'
EDKII_NAME = 'EDKII'
BINARY_FILE_TYPE_FW = 'FW'
BINARY_FILE_TYPE_GUID = 'GUID'
BINARY_FILE_TYPE_PREEFORM = 'PREEFORM'
BINARY_FILE_TYPE_UEFI_APP = 'UEFI_APP'
BINARY_FILE_TYPE_UNI_UI = 'UNI_UI'
BINARY_FILE_TYPE_SEC_UI = 'SEC_UI'
BINARY_FILE_TYPE_UNI_VER = 'UNI_VER'
BINARY_FILE_TYPE_SEC_VER = 'SEC_VER'
BINARY_FILE_TYPE_LIB = 'LIB'
BINARY_FILE_TYPE_PE32 = 'PE32'
BINARY_FILE_TYPE_PIC = 'PIC'
BINARY_FILE_TYPE_PEI_DEPEX = 'PEI_DEPEX'
BINARY_FILE_TYPE_DXE_DEPEX = 'DXE_DEPEX'
BINARY_FILE_TYPE_SMM_DEPEX = 'SMM_DEPEX'
BINARY_FILE_TYPE_TE = 'TE'
BINARY_FILE_TYPE_VER = 'VER'
BINARY_FILE_TYPE_UI = 'UI'
BINARY_FILE_TYPE_BIN = 'BIN'
BINARY_FILE_TYPE_FV = 'FV'
BINARY_FILE_TYPE_UI_LIST = [BINARY_FILE_TYPE_UNI_UI,
BINARY_FILE_TYPE_SEC_UI,
BINARY_FILE_TYPE_UI
]
BINARY_FILE_TYPE_VER_LIST = [BINARY_FILE_TYPE_UNI_VER,
BINARY_FILE_TYPE_SEC_VER,
BINARY_FILE_TYPE_VER
]
DEPEX_SECTION_LIST = ['<PEI_DEPEX>',
'<DXE_DEPEX>',
'<SMM_DEPEX>'
]
PLATFORM_COMPONENT_TYPE_LIBRARY = 'LIBRARY'
PLATFORM_COMPONENT_TYPE_LIBRARY_CLASS = 'LIBRARY_CLASS'
PLATFORM_COMPONENT_TYPE_MODULE = 'MODULE'
TAB_LIBRARIES = 'Libraries'
TAB_SOURCE = 'Source'
TAB_SOURCES = 'Sources'
TAB_SOURCES_COMMON = TAB_SOURCES + TAB_SPLIT + TAB_ARCH_COMMON
TAB_SOURCES_IA32 = TAB_SOURCES + TAB_SPLIT + TAB_ARCH_IA32
TAB_SOURCES_X64 = TAB_SOURCES + TAB_SPLIT + TAB_ARCH_X64
TAB_SOURCES_IPF = TAB_SOURCES + TAB_SPLIT + TAB_ARCH_IPF
TAB_SOURCES_ARM = TAB_SOURCES + TAB_SPLIT + TAB_ARCH_ARM
TAB_SOURCES_LOONGARCH64 = TAB_SOURCES + TAB_SPLIT + TAB_ARCH_LOONGARCH64
TAB_SOURCES_EBC = TAB_SOURCES + TAB_SPLIT + TAB_ARCH_EBC
TAB_BINARIES = 'Binaries'
TAB_BINARIES_COMMON = TAB_BINARIES + TAB_SPLIT + TAB_ARCH_COMMON
TAB_BINARIES_IA32 = TAB_BINARIES + TAB_SPLIT + TAB_ARCH_IA32
TAB_BINARIES_X64 = TAB_BINARIES + TAB_SPLIT + TAB_ARCH_X64
TAB_BINARIES_IPF = TAB_BINARIES + TAB_SPLIT + TAB_ARCH_IPF
TAB_BINARIES_ARM = TAB_BINARIES + TAB_SPLIT + TAB_ARCH_ARM
TAB_BINARIES_LOONGARCH64 = TAB_BINARIES + TAB_SPLIT + TAB_ARCH_LOONGARCH64
TAB_BINARIES_EBC = TAB_BINARIES + TAB_SPLIT + TAB_ARCH_EBC
TAB_INCLUDES = 'Includes'
TAB_INCLUDES_COMMON = TAB_INCLUDES + TAB_SPLIT + TAB_ARCH_COMMON
TAB_INCLUDES_IA32 = TAB_INCLUDES + TAB_SPLIT + TAB_ARCH_IA32
TAB_INCLUDES_X64 = TAB_INCLUDES + TAB_SPLIT + TAB_ARCH_X64
TAB_INCLUDES_IPF = TAB_INCLUDES + TAB_SPLIT + TAB_ARCH_IPF
TAB_INCLUDES_ARM = TAB_INCLUDES + TAB_SPLIT + TAB_ARCH_ARM
TAB_INCLUDES_LOONGARCH64 = TAB_INCLUDES + TAB_SPLIT + TAB_ARCH_LOONGARCH64
TAB_INCLUDES_EBC = TAB_INCLUDES + TAB_SPLIT + TAB_ARCH_EBC
TAB_GUIDS = 'Guids'
TAB_GUIDS_COMMON = TAB_GUIDS + TAB_SPLIT + TAB_ARCH_COMMON
TAB_GUIDS_IA32 = TAB_GUIDS + TAB_SPLIT + TAB_ARCH_IA32
TAB_GUIDS_X64 = TAB_GUIDS + TAB_SPLIT + TAB_ARCH_X64
TAB_GUIDS_IPF = TAB_GUIDS + TAB_SPLIT + TAB_ARCH_IPF
TAB_GUIDS_ARM = TAB_GUIDS + TAB_SPLIT + TAB_ARCH_ARM
TAB_GUIDS_LOONGARCH64 = TAB_GUIDS + TAB_SPLIT + TAB_ARCH_LOONGARCH64
TAB_GUIDS_EBC = TAB_GUIDS + TAB_SPLIT + TAB_ARCH_EBC
TAB_PROTOCOLS = 'Protocols'
TAB_PROTOCOLS_COMMON = TAB_PROTOCOLS + TAB_SPLIT + TAB_ARCH_COMMON
TAB_PROTOCOLS_IA32 = TAB_PROTOCOLS + TAB_SPLIT + TAB_ARCH_IA32
TAB_PROTOCOLS_X64 = TAB_PROTOCOLS + TAB_SPLIT + TAB_ARCH_X64
TAB_PROTOCOLS_IPF = TAB_PROTOCOLS + TAB_SPLIT + TAB_ARCH_IPF
TAB_PROTOCOLS_ARM = TAB_PROTOCOLS + TAB_SPLIT + TAB_ARCH_ARM
TAB_PROTOCOLS_LOONGARCH64 = TAB_PROTOCOLS + TAB_SPLIT + TAB_ARCH_LOONGARCH64
TAB_PROTOCOLS_EBC = TAB_PROTOCOLS + TAB_SPLIT + TAB_ARCH_EBC
TAB_PPIS = 'Ppis'
TAB_PPIS_COMMON = TAB_PPIS + TAB_SPLIT + TAB_ARCH_COMMON
TAB_PPIS_IA32 = TAB_PPIS + TAB_SPLIT + TAB_ARCH_IA32
TAB_PPIS_X64 = TAB_PPIS + TAB_SPLIT + TAB_ARCH_X64
TAB_PPIS_IPF = TAB_PPIS + TAB_SPLIT + TAB_ARCH_IPF
TAB_PPIS_ARM = TAB_PPIS + TAB_SPLIT + TAB_ARCH_ARM
TAB_PPIS_LOONGARCH64 = TAB_PPIS + TAB_SPLIT + TAB_ARCH_LOONGARCH64
TAB_PPIS_EBC = TAB_PPIS + TAB_SPLIT + TAB_ARCH_EBC
TAB_LIBRARY_CLASSES = 'LibraryClasses'
TAB_LIBRARY_CLASSES_COMMON = TAB_LIBRARY_CLASSES + TAB_SPLIT + TAB_ARCH_COMMON
TAB_LIBRARY_CLASSES_IA32 = TAB_LIBRARY_CLASSES + TAB_SPLIT + TAB_ARCH_IA32
TAB_LIBRARY_CLASSES_X64 = TAB_LIBRARY_CLASSES + TAB_SPLIT + TAB_ARCH_X64
TAB_LIBRARY_CLASSES_IPF = TAB_LIBRARY_CLASSES + TAB_SPLIT + TAB_ARCH_IPF
TAB_LIBRARY_CLASSES_ARM = TAB_LIBRARY_CLASSES + TAB_SPLIT + TAB_ARCH_ARM
TAB_LIBRARY_CLASSES_LOONGARCH64 = TAB_LIBRARY_CLASSES + TAB_SPLIT + TAB_ARCH_LOONGARCH64
TAB_LIBRARY_CLASSES_EBC = TAB_LIBRARY_CLASSES + TAB_SPLIT + TAB_ARCH_EBC
TAB_PACKAGES = 'Packages'
TAB_PACKAGES_COMMON = TAB_PACKAGES + TAB_SPLIT + TAB_ARCH_COMMON
TAB_PACKAGES_IA32 = TAB_PACKAGES + TAB_SPLIT + TAB_ARCH_IA32
TAB_PACKAGES_X64 = TAB_PACKAGES + TAB_SPLIT + TAB_ARCH_X64
TAB_PACKAGES_IPF = TAB_PACKAGES + TAB_SPLIT + TAB_ARCH_IPF
TAB_PACKAGES_ARM = TAB_PACKAGES + TAB_SPLIT + TAB_ARCH_ARM
TAB_PACKAGES_LOONGARCH64 = TAB_PACKAGES + TAB_SPLIT + TAB_ARCH_LOONGARCH64
TAB_PACKAGES_EBC = TAB_PACKAGES + TAB_SPLIT + TAB_ARCH_EBC
TAB_PCDS = 'Pcds'
TAB_PCDS_FIXED_AT_BUILD = 'FixedAtBuild'
TAB_PCDS_PATCHABLE_IN_MODULE = 'PatchableInModule'
TAB_PCDS_FEATURE_FLAG = 'FeatureFlag'
TAB_PCDS_DYNAMIC_EX = 'DynamicEx'
TAB_PCDS_DYNAMIC_EX_DEFAULT = 'DynamicExDefault'
TAB_PCDS_DYNAMIC_EX_VPD = 'DynamicExVpd'
TAB_PCDS_DYNAMIC_EX_HII = 'DynamicExHii'
TAB_PCDS_DYNAMIC = 'Dynamic'
TAB_PCDS_DYNAMIC_DEFAULT = 'DynamicDefault'
TAB_PCDS_DYNAMIC_VPD = 'DynamicVpd'
TAB_PCDS_DYNAMIC_HII = 'DynamicHii'
TAB_PTR_TYPE_PCD = 'VOID*'
PCD_DYNAMIC_TYPE_LIST = [TAB_PCDS_DYNAMIC, TAB_PCDS_DYNAMIC_DEFAULT, \
TAB_PCDS_DYNAMIC_VPD, TAB_PCDS_DYNAMIC_HII]
PCD_DYNAMIC_EX_TYPE_LIST = [TAB_PCDS_DYNAMIC_EX, TAB_PCDS_DYNAMIC_EX_DEFAULT, \
TAB_PCDS_DYNAMIC_EX_VPD, TAB_PCDS_DYNAMIC_EX_HII]
## Dynamic-ex PCD types
#
gDYNAMIC_EX_PCD = [TAB_PCDS_DYNAMIC_EX, TAB_PCDS_DYNAMIC_EX_DEFAULT, \
TAB_PCDS_DYNAMIC_EX_VPD, TAB_PCDS_DYNAMIC_EX_HII]
TAB_PCDS_FIXED_AT_BUILD_NULL = TAB_PCDS + TAB_PCDS_FIXED_AT_BUILD
TAB_PCDS_FIXED_AT_BUILD_COMMON = TAB_PCDS + TAB_PCDS_FIXED_AT_BUILD + \
TAB_SPLIT + TAB_ARCH_COMMON
TAB_PCDS_FIXED_AT_BUILD_IA32 = TAB_PCDS + TAB_PCDS_FIXED_AT_BUILD + \
TAB_SPLIT + TAB_ARCH_IA32
TAB_PCDS_FIXED_AT_BUILD_X64 = TAB_PCDS + TAB_PCDS_FIXED_AT_BUILD + \
TAB_SPLIT + TAB_ARCH_X64
TAB_PCDS_FIXED_AT_BUILD_IPF = TAB_PCDS + TAB_PCDS_FIXED_AT_BUILD + \
TAB_SPLIT + TAB_ARCH_IPF
TAB_PCDS_FIXED_AT_BUILD_ARM = TAB_PCDS + TAB_PCDS_FIXED_AT_BUILD + \
TAB_SPLIT + TAB_ARCH_ARM
TAB_PCDS_FIXED_AT_BUILD_LOONGARCH64 = TAB_PCDS + TAB_PCDS_FIXED_AT_BUILD + \
TAB_SPLIT + TAB_ARCH_LOONGARCH64
TAB_PCDS_FIXED_AT_BUILD_EBC = TAB_PCDS + TAB_PCDS_FIXED_AT_BUILD + \
TAB_SPLIT + TAB_ARCH_EBC
TAB_PCDS_PATCHABLE_IN_MODULE_NULL = TAB_PCDS + TAB_PCDS_PATCHABLE_IN_MODULE
TAB_PCDS_PATCHABLE_IN_MODULE_COMMON = TAB_PCDS + TAB_PCDS_PATCHABLE_IN_MODULE \
+ TAB_SPLIT + TAB_ARCH_COMMON
TAB_PCDS_PATCHABLE_IN_MODULE_IA32 = TAB_PCDS + TAB_PCDS_PATCHABLE_IN_MODULE + \
TAB_SPLIT + TAB_ARCH_IA32
TAB_PCDS_PATCHABLE_IN_MODULE_X64 = TAB_PCDS + TAB_PCDS_PATCHABLE_IN_MODULE + \
TAB_SPLIT + TAB_ARCH_X64
TAB_PCDS_PATCHABLE_IN_MODULE_IPF = TAB_PCDS + TAB_PCDS_PATCHABLE_IN_MODULE + \
TAB_SPLIT + TAB_ARCH_IPF
TAB_PCDS_PATCHABLE_IN_MODULE_ARM = TAB_PCDS + TAB_PCDS_PATCHABLE_IN_MODULE + \
TAB_SPLIT + TAB_ARCH_ARM
TAB_PCDS_PATCHABLE_IN_MODULE_LOONGARCH64 = TAB_PCDS + TAB_PCDS_PATCHABLE_IN_MODULE + \
TAB_SPLIT + TAB_ARCH_LOONGARCH64
TAB_PCDS_PATCHABLE_IN_MODULE_EBC = TAB_PCDS + TAB_PCDS_PATCHABLE_IN_MODULE + \
TAB_SPLIT + TAB_ARCH_EBC
TAB_PCDS_FEATURE_FLAG_NULL = TAB_PCDS + TAB_PCDS_FEATURE_FLAG
TAB_PCDS_FEATURE_FLAG_COMMON = TAB_PCDS + TAB_PCDS_FEATURE_FLAG + TAB_SPLIT \
+ TAB_ARCH_COMMON
TAB_PCDS_FEATURE_FLAG_IA32 = TAB_PCDS + TAB_PCDS_FEATURE_FLAG + TAB_SPLIT + \
TAB_ARCH_IA32
TAB_PCDS_FEATURE_FLAG_X64 = TAB_PCDS + TAB_PCDS_FEATURE_FLAG + TAB_SPLIT + \
TAB_ARCH_X64
TAB_PCDS_FEATURE_FLAG_IPF = TAB_PCDS + TAB_PCDS_FEATURE_FLAG + TAB_SPLIT + \
TAB_ARCH_IPF
TAB_PCDS_FEATURE_FLAG_ARM = TAB_PCDS + TAB_PCDS_FEATURE_FLAG + TAB_SPLIT + \
TAB_ARCH_ARM
TAB_PCDS_FEATURE_FLAG_LOONGARCH64 = TAB_PCDS + TAB_PCDS_FEATURE_FLAG + TAB_SPLIT + \
TAB_ARCH_LOONGARCH64
TAB_PCDS_FEATURE_FLAG_EBC = TAB_PCDS + TAB_PCDS_FEATURE_FLAG + TAB_SPLIT + \
TAB_ARCH_EBC
TAB_PCDS_DYNAMIC_EX_NULL = TAB_PCDS + TAB_PCDS_DYNAMIC_EX
TAB_PCDS_DYNAMIC_EX_DEFAULT_NULL = TAB_PCDS + TAB_PCDS_DYNAMIC_EX_DEFAULT
TAB_PCDS_DYNAMIC_EX_HII_NULL = TAB_PCDS + TAB_PCDS_DYNAMIC_EX_HII
TAB_PCDS_DYNAMIC_EX_VPD_NULL = TAB_PCDS + TAB_PCDS_DYNAMIC_EX_VPD
TAB_PCDS_DYNAMIC_EX_COMMON = TAB_PCDS + TAB_PCDS_DYNAMIC_EX + TAB_SPLIT + \
TAB_ARCH_COMMON
TAB_PCDS_DYNAMIC_EX_IA32 = TAB_PCDS + TAB_PCDS_DYNAMIC_EX + TAB_SPLIT + \
TAB_ARCH_IA32
TAB_PCDS_DYNAMIC_EX_X64 = TAB_PCDS + TAB_PCDS_DYNAMIC_EX + TAB_SPLIT + \
TAB_ARCH_X64
TAB_PCDS_DYNAMIC_EX_IPF = TAB_PCDS + TAB_PCDS_DYNAMIC_EX + TAB_SPLIT + \
TAB_ARCH_IPF
TAB_PCDS_DYNAMIC_EX_ARM = TAB_PCDS + TAB_PCDS_DYNAMIC_EX + TAB_SPLIT + \
TAB_ARCH_ARM
TAB_PCDS_DYNAMIC_EX_LOONGARCH64 = TAB_PCDS + TAB_PCDS_DYNAMIC_EX + TAB_SPLIT + \
TAB_ARCH_LOONGARCH64
TAB_PCDS_DYNAMIC_EX_EBC = TAB_PCDS + TAB_PCDS_DYNAMIC_EX + TAB_SPLIT + \
TAB_ARCH_EBC
TAB_PCDS_DYNAMIC_NULL = TAB_PCDS + TAB_PCDS_DYNAMIC
TAB_PCDS_DYNAMIC_DEFAULT_NULL = TAB_PCDS + TAB_PCDS_DYNAMIC_DEFAULT
TAB_PCDS_DYNAMIC_HII_NULL = TAB_PCDS + TAB_PCDS_DYNAMIC_HII
TAB_PCDS_DYNAMIC_VPD_NULL = TAB_PCDS + TAB_PCDS_DYNAMIC_VPD
TAB_PCDS_DYNAMIC_COMMON = TAB_PCDS + TAB_PCDS_DYNAMIC + TAB_SPLIT + \
TAB_ARCH_COMMON
TAB_PCDS_DYNAMIC_IA32 = TAB_PCDS + TAB_PCDS_DYNAMIC + TAB_SPLIT + TAB_ARCH_IA32
TAB_PCDS_DYNAMIC_X64 = TAB_PCDS + TAB_PCDS_DYNAMIC + TAB_SPLIT + TAB_ARCH_X64
TAB_PCDS_DYNAMIC_IPF = TAB_PCDS + TAB_PCDS_DYNAMIC + TAB_SPLIT + TAB_ARCH_IPF
TAB_PCDS_DYNAMIC_ARM = TAB_PCDS + TAB_PCDS_DYNAMIC + TAB_SPLIT + TAB_ARCH_ARM
TAB_PCDS_DYNAMIC_LOONGARCH64 = TAB_PCDS + TAB_PCDS_DYNAMIC + TAB_SPLIT + TAB_ARCH_LOONGARCH64
TAB_PCDS_DYNAMIC_EBC = TAB_PCDS + TAB_PCDS_DYNAMIC + TAB_SPLIT + TAB_ARCH_EBC
TAB_PCD_DYNAMIC_TYPE_LIST = [TAB_PCDS_DYNAMIC_DEFAULT_NULL, \
TAB_PCDS_DYNAMIC_VPD_NULL, \
TAB_PCDS_DYNAMIC_HII_NULL]
TAB_PCD_DYNAMIC_EX_TYPE_LIST = [TAB_PCDS_DYNAMIC_EX_DEFAULT_NULL, \
TAB_PCDS_DYNAMIC_EX_VPD_NULL, \
TAB_PCDS_DYNAMIC_EX_HII_NULL]
TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_PEI_PAGE_SIZE = \
'PcdLoadFixAddressPeiCodePageNumber'
TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_PEI_PAGE_SIZE_DATA_TYPE = 'UINT32'
TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_DXE_PAGE_SIZE = \
'PcdLoadFixAddressBootTimeCodePageNumber'
TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_DXE_PAGE_SIZE_DATA_TYPE = 'UINT32'
TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_RUNTIME_PAGE_SIZE = \
'PcdLoadFixAddressRuntimeCodePageNumber'
TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_RUNTIME_PAGE_SIZE_DATA_TYPE = 'UINT32'
TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_SMM_PAGE_SIZE = \
'PcdLoadFixAddressSmmCodePageNumber'
TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_SMM_PAGE_SIZE_DATA_TYPE = 'UINT32'
TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_LIST = \
[TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_PEI_PAGE_SIZE, \
TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_DXE_PAGE_SIZE, \
TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_RUNTIME_PAGE_SIZE, \
TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_SMM_PAGE_SIZE]
PCD_SECTION_LIST = [TAB_PCDS_FIXED_AT_BUILD_NULL.upper(), \
TAB_PCDS_PATCHABLE_IN_MODULE_NULL.upper(), \
TAB_PCDS_FEATURE_FLAG_NULL.upper(), \
TAB_PCDS_DYNAMIC_EX_NULL.upper(), \
TAB_PCDS_DYNAMIC_NULL.upper()]
INF_PCD_SECTION_LIST = ["FixedPcd".upper(), "FeaturePcd".upper(), \
"PatchPcd".upper(), "Pcd".upper(), "PcdEx".upper()]
TAB_DEPEX = 'Depex'
TAB_DEPEX_COMMON = TAB_DEPEX + TAB_SPLIT + TAB_ARCH_COMMON
TAB_DEPEX_IA32 = TAB_DEPEX + TAB_SPLIT + TAB_ARCH_IA32
TAB_DEPEX_X64 = TAB_DEPEX + TAB_SPLIT + TAB_ARCH_X64
TAB_DEPEX_IPF = TAB_DEPEX + TAB_SPLIT + TAB_ARCH_IPF
TAB_DEPEX_ARM = TAB_DEPEX + TAB_SPLIT + TAB_ARCH_ARM
TAB_DEPEX_LOONGARCH64 = TAB_DEPEX + TAB_SPLIT + TAB_ARCH_LOONGARCH64
TAB_DEPEX_EBC = TAB_DEPEX + TAB_SPLIT + TAB_ARCH_EBC
TAB_SKUIDS = 'SkuIds'
TAB_LIBRARIES = 'Libraries'
TAB_LIBRARIES_COMMON = TAB_LIBRARIES + TAB_SPLIT + TAB_ARCH_COMMON
TAB_LIBRARIES_IA32 = TAB_LIBRARIES + TAB_SPLIT + TAB_ARCH_IA32
TAB_LIBRARIES_X64 = TAB_LIBRARIES + TAB_SPLIT + TAB_ARCH_X64
TAB_LIBRARIES_IPF = TAB_LIBRARIES + TAB_SPLIT + TAB_ARCH_IPF
TAB_LIBRARIES_ARM = TAB_LIBRARIES + TAB_SPLIT + TAB_ARCH_ARM
TAB_LIBRARIES_LOONGARCH64 = TAB_LIBRARIES + TAB_SPLIT + TAB_ARCH_LOONGARCH64
TAB_LIBRARIES_EBC = TAB_LIBRARIES + TAB_SPLIT + TAB_ARCH_EBC
TAB_COMPONENTS = 'Components'
TAB_COMPONENTS_COMMON = TAB_COMPONENTS + TAB_SPLIT + TAB_ARCH_COMMON
TAB_COMPONENTS_IA32 = TAB_COMPONENTS + TAB_SPLIT + TAB_ARCH_IA32
TAB_COMPONENTS_X64 = TAB_COMPONENTS + TAB_SPLIT + TAB_ARCH_X64
TAB_COMPONENTS_IPF = TAB_COMPONENTS + TAB_SPLIT + TAB_ARCH_IPF
TAB_COMPONENTS_ARM = TAB_COMPONENTS + TAB_SPLIT + TAB_ARCH_ARM
TAB_COMPONENTS_LOONGARCH64 = TAB_COMPONENTS + TAB_SPLIT + TAB_ARCH_LOONGARCH64
TAB_COMPONENTS_EBC = TAB_COMPONENTS + TAB_SPLIT + TAB_ARCH_EBC
TAB_BUILD_OPTIONS = 'BuildOptions'
TAB_DEFINE = 'DEFINE'
TAB_NMAKE = 'Nmake'
TAB_USER_EXTENSIONS = 'UserExtensions'
TAB_INCLUDE = '!include'
TAB_PRIVATE = 'Private'
TAB_INTEL = 'Intel'
#
# Common Define
#
TAB_COMMON_DEFINES = 'Defines'
#
# Inf Definitions
#
TAB_INF_DEFINES = TAB_COMMON_DEFINES
TAB_INF_DEFINES_INF_VERSION = 'INF_VERSION'
TAB_INF_DEFINES_BASE_NAME = 'BASE_NAME'
TAB_INF_DEFINES_FILE_GUID = 'FILE_GUID'
TAB_INF_DEFINES_MODULE_TYPE = 'MODULE_TYPE'
TAB_INF_DEFINES_EFI_SPECIFICATION_VERSION = 'EFI_SPECIFICATION_VERSION'
TAB_INF_DEFINES_UEFI_SPECIFICATION_VERSION = 'UEFI_SPECIFICATION_VERSION'
TAB_INF_DEFINES_PI_SPECIFICATION_VERSION = 'PI_SPECIFICATION_VERSION'
TAB_INF_DEFINES_EDK_RELEASE_VERSION = 'EDK_RELEASE_VERSION'
TAB_INF_DEFINES_MODULE_UNI_FILE = 'MODULE_UNI_FILE'
TAB_INF_DEFINES_BINARY_MODULE = 'BINARY_MODULE'
TAB_INF_DEFINES_LIBRARY_CLASS = 'LIBRARY_CLASS'
TAB_INF_DEFINES_COMPONENT_TYPE = 'COMPONENT_TYPE'
TAB_INF_DEFINES_MAKEFILE_NAME = 'MAKEFILE_NAME'
TAB_INF_DEFINES_BUILD_NUMBER = 'BUILD_NUMBER'
TAB_INF_DEFINES_BUILD_TYPE = 'BUILD_TYPE'
TAB_INF_DEFINES_FFS_EXT = 'FFS_EXT'
TAB_INF_DEFINES_FV_EXT = 'FV_EXT'
TAB_INF_DEFINES_SOURCE_FV = 'SOURCE_FV'
TAB_INF_DEFINES_PACKAGE = 'PACKAGE'
TAB_INF_DEFINES_VERSION_NUMBER = 'VERSION_NUMBER'
TAB_INF_DEFINES_VERSION = 'VERSION'
TAB_INF_DEFINES_VERSION_STRING = 'VERSION_STRING'
TAB_INF_DEFINES_PCD_IS_DRIVER = 'PCD_IS_DRIVER'
TAB_INF_DEFINES_TIANO_EDK1_FLASHMAP_H = 'TIANO_EDK1_FLASHMAP_H'
TAB_INF_DEFINES_ENTRY_POINT = 'ENTRY_POINT'
TAB_INF_DEFINES_UNLOAD_IMAGE = 'UNLOAD_IMAGE'
TAB_INF_DEFINES_CONSTRUCTOR = 'CONSTRUCTOR'
TAB_INF_DEFINES_DESTRUCTOR = 'DESTRUCTOR'
TAB_INF_DEFINES_PCI_VENDOR_ID = 'PCI_VENDOR_ID'
TAB_INF_DEFINES_PCI_DEVICE_ID = 'PCI_DEVICE_ID'
TAB_INF_DEFINES_PCI_CLASS_CODE = 'PCI_CLASS_CODE'
TAB_INF_DEFINES_PCI_REVISION = 'PCI_REVISION'
TAB_INF_DEFINES_PCI_COMPRESS = 'PCI_COMPRESS'
TAB_INF_DEFINES_DEFINE = 'DEFINE'
TAB_INF_DEFINES_SPEC = 'SPEC'
TAB_INF_DEFINES_UEFI_HII_RESOURCE_SECTION = 'UEFI_HII_RESOURCE_SECTION'
TAB_INF_DEFINES_CUSTOM_MAKEFILE = 'CUSTOM_MAKEFILE'
TAB_INF_DEFINES_MACRO = '__MACROS__'
TAB_INF_DEFINES_SHADOW = 'SHADOW'
TAB_INF_DEFINES_DPX_SOURCE = 'DPX_SOURCE'
TAB_INF_FIXED_PCD = 'FixedPcd'
TAB_INF_FEATURE_PCD = 'FeaturePcd'
TAB_INF_PATCH_PCD = 'PatchPcd'
TAB_INF_PCD = 'Pcd'
TAB_INF_PCD_EX = 'PcdEx'
TAB_INF_GUIDTYPE_VAR = 'Variable'
TAB_INF_ABSTRACT = 'STR_MODULE_ABSTRACT'
TAB_INF_DESCRIPTION = 'STR_MODULE_DESCRIPTION'
TAB_INF_LICENSE = 'STR_MODULE_LICENSE'
TAB_INF_BINARY_ABSTRACT = 'STR_MODULE_BINARY_ABSTRACT'
TAB_INF_BINARY_DESCRIPTION = 'STR_MODULE_BINARY_DESCRIPTION'
TAB_INF_BINARY_LICENSE = 'STR_MODULE_BINARY_LICENSE'
#
# Dec Definitions
#
TAB_DEC_DEFINES = TAB_COMMON_DEFINES
TAB_DEC_DEFINES_DEC_SPECIFICATION = 'DEC_SPECIFICATION'
TAB_DEC_DEFINES_PACKAGE_NAME = 'PACKAGE_NAME'
TAB_DEC_DEFINES_PACKAGE_GUID = 'PACKAGE_GUID'
TAB_DEC_DEFINES_PACKAGE_VERSION = 'PACKAGE_VERSION'
TAB_DEC_DEFINES_PKG_UNI_FILE = 'PACKAGE_UNI_FILE'
TAB_DEC_PACKAGE_ABSTRACT = 'STR_PACKAGE_ABSTRACT'
TAB_DEC_PACKAGE_DESCRIPTION = 'STR_PACKAGE_DESCRIPTION'
TAB_DEC_PACKAGE_LICENSE = 'STR_PACKAGE_LICENSE'
TAB_DEC_BINARY_ABSTRACT = 'STR_PACKAGE_BINARY_ABSTRACT'
TAB_DEC_BINARY_DESCRIPTION = 'STR_PACKAGE_BINARY_DESCRIPTION'
TAB_DEC_BINARY_LICENSE = 'STR_PACKAGE_ASBUILT_LICENSE'
#
# Dsc Definitions
#
TAB_DSC_DEFINES = TAB_COMMON_DEFINES
TAB_DSC_DEFINES_PLATFORM_NAME = 'PLATFORM_NAME'
TAB_DSC_DEFINES_PLATFORM_GUID = 'PLATFORM_GUID'
TAB_DSC_DEFINES_PLATFORM_VERSION = 'PLATFORM_VERSION'
TAB_DSC_DEFINES_DSC_SPECIFICATION = 'DSC_SPECIFICATION'
TAB_DSC_DEFINES_OUTPUT_DIRECTORY = 'OUTPUT_DIRECTORY'
TAB_DSC_DEFINES_SUPPORTED_ARCHITECTURES = 'SUPPORTED_ARCHITECTURES'
TAB_DSC_DEFINES_BUILD_TARGETS = 'BUILD_TARGETS'
TAB_DSC_DEFINES_SKUID_IDENTIFIER = 'SKUID_IDENTIFIER'
TAB_DSC_DEFINES_FLASH_DEFINITION = 'FLASH_DEFINITION'
TAB_DSC_DEFINES_BUILD_NUMBER = 'BUILD_NUMBER'
TAB_DSC_DEFINES_MAKEFILE_NAME = 'MAKEFILE_NAME'
TAB_DSC_DEFINES_BS_BASE_ADDRESS = 'BsBaseAddress'
TAB_DSC_DEFINES_RT_BASE_ADDRESS = 'RtBaseAddress'
TAB_DSC_DEFINES_DEFINE = 'DEFINE'
TAB_FIX_LOAD_TOP_MEMORY_ADDRESS = 'FIX_LOAD_TOP_MEMORY_ADDRESS'
#
# TargetTxt Definitions
#
TAB_TAT_DEFINES_ACTIVE_PLATFORM = 'ACTIVE_PLATFORM'
TAB_TAT_DEFINES_ACTIVE_MODULE = 'ACTIVE_MODULE'
TAB_TAT_DEFINES_TOOL_CHAIN_CONF = 'TOOL_CHAIN_CONF'
TAB_TAT_DEFINES_MULTIPLE_THREAD = 'MULTIPLE_THREAD'
TAB_TAT_DEFINES_MAX_CONCURRENT_THREAD_NUMBER = 'MAX_CONCURRENT_THREAD_NUMBER'
TAB_TAT_DEFINES_TARGET = 'TARGET'
TAB_TAT_DEFINES_TOOL_CHAIN_TAG = 'TOOL_CHAIN_TAG'
TAB_TAT_DEFINES_TARGET_ARCH = 'TARGET_ARCH'
TAB_TAT_DEFINES_BUILD_RULE_CONF = "BUILD_RULE_CONF"
#
# ToolDef Definitions
#
TAB_TOD_DEFINES_TARGET = 'TARGET'
TAB_TOD_DEFINES_TOOL_CHAIN_TAG = 'TOOL_CHAIN_TAG'
TAB_TOD_DEFINES_TARGET_ARCH = 'TARGET_ARCH'
TAB_TOD_DEFINES_COMMAND_TYPE = 'COMMAND_TYPE'
TAB_TOD_DEFINES_FAMILY = 'FAMILY'
TAB_TOD_DEFINES_BUILDRULEFAMILY = 'BUILDRULEFAMILY'
#
# Conditional Statements
#
TAB_IF = '!if'
TAB_END_IF = '!endif'
TAB_ELSE_IF = '!elseif'
TAB_ELSE = '!else'
TAB_IF_DEF = '!ifdef'
TAB_IF_N_DEF = '!ifndef'
TAB_IF_EXIST = '!if exist'
#
# Unknown section
#
TAB_UNKNOWN = 'UNKNOWN'
#
# Header section (virtual section for abstract, description, copyright,
# license)
#
TAB_HEADER = 'Header'
TAB_HEADER_ABSTRACT = 'Abstract'
TAB_HEADER_DESCRIPTION = 'Description'
TAB_HEADER_COPYRIGHT = 'Copyright'
TAB_HEADER_LICENSE = 'License'
TAB_BINARY_HEADER_IDENTIFIER = 'BinaryHeader'
TAB_BINARY_HEADER_USERID = 'TianoCore'
#
# Build database path
#
DATABASE_PATH = ":memory:"
#
# used by ECC
#
MODIFIER_LIST = ['IN', 'OUT', 'OPTIONAL', 'UNALIGNED', 'EFI_RUNTIMESERVICE', \
'EFI_BOOTSERVICE', 'EFIAPI']
#
# Dependency Expression
#
DEPEX_SUPPORTED_OPCODE = ["BEFORE", "AFTER", "PUSH", "AND", "OR", "NOT", \
"END", "SOR", "TRUE", "FALSE", '(', ')']
TAB_STATIC_LIBRARY = "STATIC-LIBRARY-FILE"
TAB_DYNAMIC_LIBRARY = "DYNAMIC-LIBRARY-FILE"
TAB_FRAMEWORK_IMAGE = "EFI-IMAGE-FILE"
TAB_C_CODE_FILE = "C-CODE-FILE"
TAB_C_HEADER_FILE = "C-HEADER-FILE"
TAB_UNICODE_FILE = "UNICODE-TEXT-FILE"
TAB_DEPENDENCY_EXPRESSION_FILE = "DEPENDENCY-EXPRESSION-FILE"
TAB_UNKNOWN_FILE = "UNKNOWN-TYPE-FILE"
TAB_DEFAULT_BINARY_FILE = "_BINARY_FILE_"
#
# used to indicate the state of processing header comment section of dec,
# inf files
#
HEADER_COMMENT_NOT_STARTED = -1
HEADER_COMMENT_STARTED = 0
HEADER_COMMENT_FILE = 1
HEADER_COMMENT_ABSTRACT = 2
HEADER_COMMENT_DESCRIPTION = 3
HEADER_COMMENT_COPYRIGHT = 4
HEADER_COMMENT_LICENSE = 5
HEADER_COMMENT_END = 6
#
# Static values for data models
#
MODEL_UNKNOWN = 0
MODEL_FILE_C = 1001
MODEL_FILE_H = 1002
MODEL_FILE_ASM = 1003
MODEL_FILE_INF = 1011
MODEL_FILE_DEC = 1012
MODEL_FILE_DSC = 1013
MODEL_FILE_FDF = 1014
MODEL_FILE_INC = 1015
MODEL_FILE_CIF = 1016
MODEL_IDENTIFIER_FILE_HEADER = 2001
MODEL_IDENTIFIER_FUNCTION_HEADER = 2002
MODEL_IDENTIFIER_COMMENT = 2003
MODEL_IDENTIFIER_PARAMETER = 2004
MODEL_IDENTIFIER_STRUCTURE = 2005
MODEL_IDENTIFIER_VARIABLE = 2006
MODEL_IDENTIFIER_INCLUDE = 2007
MODEL_IDENTIFIER_PREDICATE_EXPRESSION = 2008
MODEL_IDENTIFIER_ENUMERATE = 2009
MODEL_IDENTIFIER_PCD = 2010
MODEL_IDENTIFIER_UNION = 2011
MODEL_IDENTIFIER_MACRO_IFDEF = 2012
MODEL_IDENTIFIER_MACRO_IFNDEF = 2013
MODEL_IDENTIFIER_MACRO_DEFINE = 2014
MODEL_IDENTIFIER_MACRO_ENDIF = 2015
MODEL_IDENTIFIER_MACRO_PROGMA = 2016
MODEL_IDENTIFIER_FUNCTION_CALLING = 2018
MODEL_IDENTIFIER_TYPEDEF = 2017
MODEL_IDENTIFIER_FUNCTION_DECLARATION = 2019
MODEL_IDENTIFIER_ASSIGNMENT_EXPRESSION = 2020
MODEL_EFI_PROTOCOL = 3001
MODEL_EFI_PPI = 3002
MODEL_EFI_GUID = 3003
MODEL_EFI_LIBRARY_CLASS = 3004
MODEL_EFI_LIBRARY_INSTANCE = 3005
MODEL_EFI_PCD = 3006
MODEL_EFI_SOURCE_FILE = 3007
MODEL_EFI_BINARY_FILE = 3008
MODEL_EFI_SKU_ID = 3009
MODEL_EFI_INCLUDE = 3010
MODEL_EFI_DEPEX = 3011
MODEL_PCD = 4000
MODEL_PCD_FIXED_AT_BUILD = 4001
MODEL_PCD_PATCHABLE_IN_MODULE = 4002
MODEL_PCD_FEATURE_FLAG = 4003
MODEL_PCD_DYNAMIC_EX = 4004
MODEL_PCD_DYNAMIC_EX_DEFAULT = 4005
MODEL_PCD_DYNAMIC_EX_VPD = 4006
MODEL_PCD_DYNAMIC_EX_HII = 4007
MODEL_PCD_DYNAMIC = 4008
MODEL_PCD_DYNAMIC_DEFAULT = 4009
MODEL_PCD_DYNAMIC_VPD = 4010
MODEL_PCD_DYNAMIC_HII = 4011
MODEL_META_DATA_FILE_HEADER = 5000
MODEL_META_DATA_HEADER = 5001
MODEL_META_DATA_INCLUDE = 5002
MODEL_META_DATA_DEFINE = 5003
MODEL_META_DATA_CONDITIONAL_STATEMENT_IF = 5004
MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSE = 5005
MODEL_META_DATA_CONDITIONAL_STATEMENT_IFDEF = 5006
MODEL_META_DATA_CONDITIONAL_STATEMENT_IFNDEF = 5007
MODEL_META_DATA_BUILD_OPTION = 5008
MODEL_META_DATA_COMPONENT = 5009
MODEL_META_DATA_USER_EXTENSION = 5010
MODEL_META_DATA_PACKAGE = 5011
MODEL_META_DATA_NMAKE = 5012
MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSEIF = 50013
MODEL_META_DATA_CONDITIONAL_STATEMENT_ENDIF = 5014
TOOL_FAMILY_LIST = ["MSFT",
"INTEL",
"GCC",
]
TYPE_HOB_SECTION = 'HOB'
TYPE_EVENT_SECTION = 'EVENT'
TYPE_BOOTMODE_SECTION = 'BOOTMODE'
PCD_ERR_CODE_MAX_SIZE = 4294967295
| edk2-master | BaseTools/Source/Python/UPT/Library/DataType.py |
## @file
# This file is used to define comment parsing interface
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
'''
CommentParsing
'''
##
# Import Modules
#
import re
from Library.StringUtils import GetSplitValueList
from Library.StringUtils import CleanString2
from Library.DataType import HEADER_COMMENT_NOT_STARTED
from Library.DataType import TAB_COMMENT_SPLIT
from Library.DataType import HEADER_COMMENT_LICENSE
from Library.DataType import HEADER_COMMENT_ABSTRACT
from Library.DataType import HEADER_COMMENT_COPYRIGHT
from Library.DataType import HEADER_COMMENT_DESCRIPTION
from Library.DataType import TAB_SPACE_SPLIT
from Library.DataType import TAB_COMMA_SPLIT
from Library.DataType import SUP_MODULE_LIST
from Library.DataType import TAB_VALUE_SPLIT
from Library.DataType import TAB_PCD_VALIDRANGE
from Library.DataType import TAB_PCD_VALIDLIST
from Library.DataType import TAB_PCD_EXPRESSION
from Library.DataType import TAB_PCD_PROMPT
from Library.DataType import TAB_CAPHEX_START
from Library.DataType import TAB_HEX_START
from Library.DataType import PCD_ERR_CODE_MAX_SIZE
from Library.ExpressionValidate import IsValidRangeExpr
from Library.ExpressionValidate import IsValidListExpr
from Library.ExpressionValidate import IsValidLogicalExpr
from Object.POM.CommonObject import TextObject
from Object.POM.CommonObject import PcdErrorObject
import Logger.Log as Logger
from Logger.ToolError import FORMAT_INVALID
from Logger.ToolError import FORMAT_NOT_SUPPORTED
from Logger import StringTable as ST
## ParseHeaderCommentSection
#
# Parse Header comment section lines, extract Abstract, Description, Copyright
# , License lines
#
# @param CommentList: List of (Comment, LineNumber)
# @param FileName: FileName of the comment
#
def ParseHeaderCommentSection(CommentList, FileName = None, IsBinaryHeader = False):
Abstract = ''
Description = ''
Copyright = ''
License = ''
EndOfLine = "\n"
if IsBinaryHeader:
STR_HEADER_COMMENT_START = "@BinaryHeader"
else:
STR_HEADER_COMMENT_START = "@file"
HeaderCommentStage = HEADER_COMMENT_NOT_STARTED
#
# first find the last copyright line
#
Last = 0
for Index in range(len(CommentList)-1, 0, -1):
Line = CommentList[Index][0]
if _IsCopyrightLine(Line):
Last = Index
break
for Item in CommentList:
Line = Item[0]
LineNo = Item[1]
if not Line.startswith(TAB_COMMENT_SPLIT) and Line:
Logger.Error("\nUPT", FORMAT_INVALID, ST.ERR_INVALID_COMMENT_FORMAT, FileName, Item[1])
Comment = CleanString2(Line)[1]
Comment = Comment.strip()
#
# if there are blank lines between License or Description, keep them as they would be
# indication of different block; or in the position that Abstract should be, also keep it
# as it indicates that no abstract
#
if not Comment and HeaderCommentStage not in [HEADER_COMMENT_LICENSE, \
HEADER_COMMENT_DESCRIPTION, HEADER_COMMENT_ABSTRACT]:
continue
if HeaderCommentStage == HEADER_COMMENT_NOT_STARTED:
if Comment.startswith(STR_HEADER_COMMENT_START):
HeaderCommentStage = HEADER_COMMENT_ABSTRACT
else:
License += Comment + EndOfLine
else:
if HeaderCommentStage == HEADER_COMMENT_ABSTRACT:
#
# in case there is no abstract and description
#
if not Comment:
HeaderCommentStage = HEADER_COMMENT_DESCRIPTION
elif _IsCopyrightLine(Comment):
Result, ErrMsg = _ValidateCopyright(Comment)
ValidateCopyright(Result, ST.WRN_INVALID_COPYRIGHT, FileName, LineNo, ErrMsg)
Copyright += Comment + EndOfLine
HeaderCommentStage = HEADER_COMMENT_COPYRIGHT
else:
Abstract += Comment + EndOfLine
HeaderCommentStage = HEADER_COMMENT_DESCRIPTION
elif HeaderCommentStage == HEADER_COMMENT_DESCRIPTION:
#
# in case there is no description
#
if _IsCopyrightLine(Comment):
Result, ErrMsg = _ValidateCopyright(Comment)
ValidateCopyright(Result, ST.WRN_INVALID_COPYRIGHT, FileName, LineNo, ErrMsg)
Copyright += Comment + EndOfLine
HeaderCommentStage = HEADER_COMMENT_COPYRIGHT
else:
Description += Comment + EndOfLine
elif HeaderCommentStage == HEADER_COMMENT_COPYRIGHT:
if _IsCopyrightLine(Comment):
Result, ErrMsg = _ValidateCopyright(Comment)
ValidateCopyright(Result, ST.WRN_INVALID_COPYRIGHT, FileName, LineNo, ErrMsg)
Copyright += Comment + EndOfLine
else:
#
# Contents after copyright line are license, those non-copyright lines in between
# copyright line will be discarded
#
if LineNo > Last:
if License:
License += EndOfLine
License += Comment + EndOfLine
HeaderCommentStage = HEADER_COMMENT_LICENSE
else:
if not Comment and not License:
continue
License += Comment + EndOfLine
return Abstract.strip(), Description.strip(), Copyright.strip(), License.strip()
## _IsCopyrightLine
# check whether current line is copyright line, the criteria is whether there is case insensitive keyword "Copyright"
# followed by zero or more white space characters followed by a "(" character
#
# @param LineContent: the line need to be checked
# @return: True if current line is copyright line, False else
#
def _IsCopyrightLine (LineContent):
LineContent = LineContent.upper()
Result = False
ReIsCopyrightRe = re.compile(r"""(^|\s)COPYRIGHT *\(""", re.DOTALL)
if ReIsCopyrightRe.search(LineContent):
Result = True
return Result
## ParseGenericComment
#
# @param GenericComment: Generic comment list, element of
# (CommentLine, LineNum)
# @param ContainerFile: Input value for filename of Dec file
#
def ParseGenericComment (GenericComment, ContainerFile=None, SkipTag=None):
if ContainerFile:
pass
HelpTxt = None
HelpStr = ''
for Item in GenericComment:
CommentLine = Item[0]
Comment = CleanString2(CommentLine)[1]
if SkipTag is not None and Comment.startswith(SkipTag):
Comment = Comment.replace(SkipTag, '', 1)
HelpStr += Comment + '\n'
if HelpStr:
HelpTxt = TextObject()
if HelpStr.endswith('\n') and not HelpStr.endswith('\n\n') and HelpStr != '\n':
HelpStr = HelpStr[:-1]
HelpTxt.SetString(HelpStr)
return HelpTxt
## ParsePcdErrorCode
#
# @param Value: original ErrorCode value
# @param ContainerFile: Input value for filename of Dec file
# @param LineNum: Line Num
#
def ParsePcdErrorCode (Value = None, ContainerFile = None, LineNum = None):
try:
if Value.strip().startswith((TAB_HEX_START, TAB_CAPHEX_START)):
Base = 16
else:
Base = 10
ErrorCode = int(Value, Base)
if ErrorCode > PCD_ERR_CODE_MAX_SIZE or ErrorCode < 0:
Logger.Error('Parser',
FORMAT_NOT_SUPPORTED,
"The format %s of ErrorCode is not valid, should be UNIT32 type or long type" % Value,
File = ContainerFile,
Line = LineNum)
ErrorCode = '0x%x' % ErrorCode
return ErrorCode
except ValueError as XStr:
if XStr:
pass
Logger.Error('Parser',
FORMAT_NOT_SUPPORTED,
"The format %s of ErrorCode is not valid, should be UNIT32 type or long type" % Value,
File = ContainerFile,
Line = LineNum)
## ParseDecPcdGenericComment
#
# @param GenericComment: Generic comment list, element of (CommentLine,
# LineNum)
# @param ContainerFile: Input value for filename of Dec file
#
def ParseDecPcdGenericComment (GenericComment, ContainerFile, TokenSpaceGuidCName, CName, MacroReplaceDict):
HelpStr = ''
PromptStr = ''
PcdErr = None
PcdErrList = []
ValidValueNum = 0
ValidRangeNum = 0
ExpressionNum = 0
for (CommentLine, LineNum) in GenericComment:
Comment = CleanString2(CommentLine)[1]
#
# To replace Macro
#
MACRO_PATTERN = '[\t\s]*\$\([A-Z][_A-Z0-9]*\)'
MatchedStrs = re.findall(MACRO_PATTERN, Comment)
for MatchedStr in MatchedStrs:
if MatchedStr:
Macro = MatchedStr.strip().lstrip('$(').rstrip(')').strip()
if Macro in MacroReplaceDict:
Comment = Comment.replace(MatchedStr, MacroReplaceDict[Macro])
if Comment.startswith(TAB_PCD_VALIDRANGE):
if ValidValueNum > 0 or ExpressionNum > 0:
Logger.Error('Parser',
FORMAT_NOT_SUPPORTED,
ST.WRN_MULTI_PCD_RANGES,
File = ContainerFile,
Line = LineNum)
else:
PcdErr = PcdErrorObject()
PcdErr.SetTokenSpaceGuidCName(TokenSpaceGuidCName)
PcdErr.SetCName(CName)
PcdErr.SetFileLine(Comment)
PcdErr.SetLineNum(LineNum)
ValidRangeNum += 1
ValidRange = Comment.replace(TAB_PCD_VALIDRANGE, "", 1).strip()
Valid, Cause = _CheckRangeExpression(ValidRange)
if Valid:
ValueList = ValidRange.split(TAB_VALUE_SPLIT)
if len(ValueList) > 1:
PcdErr.SetValidValueRange((TAB_VALUE_SPLIT.join(ValueList[1:])).strip())
PcdErr.SetErrorNumber(ParsePcdErrorCode(ValueList[0], ContainerFile, LineNum))
else:
PcdErr.SetValidValueRange(ValidRange)
PcdErrList.append(PcdErr)
else:
Logger.Error("Parser",
FORMAT_NOT_SUPPORTED,
Cause,
ContainerFile,
LineNum)
elif Comment.startswith(TAB_PCD_VALIDLIST):
if ValidRangeNum > 0 or ExpressionNum > 0:
Logger.Error('Parser',
FORMAT_NOT_SUPPORTED,
ST.WRN_MULTI_PCD_RANGES,
File = ContainerFile,
Line = LineNum)
elif ValidValueNum > 0:
Logger.Error('Parser',
FORMAT_NOT_SUPPORTED,
ST.WRN_MULTI_PCD_VALIDVALUE,
File = ContainerFile,
Line = LineNum)
else:
PcdErr = PcdErrorObject()
PcdErr.SetTokenSpaceGuidCName(TokenSpaceGuidCName)
PcdErr.SetCName(CName)
PcdErr.SetFileLine(Comment)
PcdErr.SetLineNum(LineNum)
ValidValueNum += 1
ValidValueExpr = Comment.replace(TAB_PCD_VALIDLIST, "", 1).strip()
Valid, Cause = _CheckListExpression(ValidValueExpr)
if Valid:
ValidValue = Comment.replace(TAB_PCD_VALIDLIST, "", 1).replace(TAB_COMMA_SPLIT, TAB_SPACE_SPLIT)
ValueList = ValidValue.split(TAB_VALUE_SPLIT)
if len(ValueList) > 1:
PcdErr.SetValidValue((TAB_VALUE_SPLIT.join(ValueList[1:])).strip())
PcdErr.SetErrorNumber(ParsePcdErrorCode(ValueList[0], ContainerFile, LineNum))
else:
PcdErr.SetValidValue(ValidValue)
PcdErrList.append(PcdErr)
else:
Logger.Error("Parser",
FORMAT_NOT_SUPPORTED,
Cause,
ContainerFile,
LineNum)
elif Comment.startswith(TAB_PCD_EXPRESSION):
if ValidRangeNum > 0 or ValidValueNum > 0:
Logger.Error('Parser',
FORMAT_NOT_SUPPORTED,
ST.WRN_MULTI_PCD_RANGES,
File = ContainerFile,
Line = LineNum)
else:
PcdErr = PcdErrorObject()
PcdErr.SetTokenSpaceGuidCName(TokenSpaceGuidCName)
PcdErr.SetCName(CName)
PcdErr.SetFileLine(Comment)
PcdErr.SetLineNum(LineNum)
ExpressionNum += 1
Expression = Comment.replace(TAB_PCD_EXPRESSION, "", 1).strip()
Valid, Cause = _CheckExpression(Expression)
if Valid:
ValueList = Expression.split(TAB_VALUE_SPLIT)
if len(ValueList) > 1:
PcdErr.SetExpression((TAB_VALUE_SPLIT.join(ValueList[1:])).strip())
PcdErr.SetErrorNumber(ParsePcdErrorCode(ValueList[0], ContainerFile, LineNum))
else:
PcdErr.SetExpression(Expression)
PcdErrList.append(PcdErr)
else:
Logger.Error("Parser",
FORMAT_NOT_SUPPORTED,
Cause,
ContainerFile,
LineNum)
elif Comment.startswith(TAB_PCD_PROMPT):
if PromptStr:
Logger.Error('Parser',
FORMAT_NOT_SUPPORTED,
ST.WRN_MULTI_PCD_PROMPT,
File = ContainerFile,
Line = LineNum)
PromptStr = Comment.replace(TAB_PCD_PROMPT, "", 1).strip()
else:
if Comment:
HelpStr += Comment + '\n'
#
# remove the last EOL if the comment is of format 'FOO\n'
#
if HelpStr.endswith('\n'):
if HelpStr != '\n' and not HelpStr.endswith('\n\n'):
HelpStr = HelpStr[:-1]
return HelpStr, PcdErrList, PromptStr
## ParseDecPcdTailComment
#
# @param TailCommentList: Tail comment list of Pcd, item of format (Comment, LineNum)
# @param ContainerFile: Input value for filename of Dec file
# @retVal SupModuleList: The supported module type list detected
# @retVal HelpStr: The generic help text string detected
#
def ParseDecPcdTailComment (TailCommentList, ContainerFile):
assert(len(TailCommentList) == 1)
TailComment = TailCommentList[0][0]
LineNum = TailCommentList[0][1]
Comment = TailComment.lstrip(" #")
ReFindFirstWordRe = re.compile(r"""^([^ #]*)""", re.DOTALL)
#
# get first word and compare with SUP_MODULE_LIST
#
MatchObject = ReFindFirstWordRe.match(Comment)
if not (MatchObject and MatchObject.group(1) in SUP_MODULE_LIST):
return None, Comment
#
# parse line, it must have supported module type specified
#
if Comment.find(TAB_COMMENT_SPLIT) == -1:
Comment += TAB_COMMENT_SPLIT
SupMode, HelpStr = GetSplitValueList(Comment, TAB_COMMENT_SPLIT, 1)
SupModuleList = []
for Mod in GetSplitValueList(SupMode, TAB_SPACE_SPLIT):
if not Mod:
continue
elif Mod not in SUP_MODULE_LIST:
Logger.Error("UPT",
FORMAT_INVALID,
ST.WRN_INVALID_MODULE_TYPE%Mod,
ContainerFile,
LineNum)
else:
SupModuleList.append(Mod)
return SupModuleList, HelpStr
## _CheckListExpression
#
# @param Expression: Pcd value list expression
#
def _CheckListExpression(Expression):
ListExpr = ''
if TAB_VALUE_SPLIT in Expression:
ListExpr = Expression[Expression.find(TAB_VALUE_SPLIT)+1:]
else:
ListExpr = Expression
return IsValidListExpr(ListExpr)
## _CheckExpression
#
# @param Expression: Pcd value expression
#
def _CheckExpression(Expression):
Expr = ''
if TAB_VALUE_SPLIT in Expression:
Expr = Expression[Expression.find(TAB_VALUE_SPLIT)+1:]
else:
Expr = Expression
return IsValidLogicalExpr(Expr, True)
## _CheckRangeExpression
#
# @param Expression: Pcd range expression
#
def _CheckRangeExpression(Expression):
RangeExpr = ''
if TAB_VALUE_SPLIT in Expression:
RangeExpr = Expression[Expression.find(TAB_VALUE_SPLIT)+1:]
else:
RangeExpr = Expression
return IsValidRangeExpr(RangeExpr)
## ValidateCopyright
#
#
#
def ValidateCopyright(Result, ErrType, FileName, LineNo, ErrMsg):
if not Result:
Logger.Warn("\nUPT", ErrType, FileName, LineNo, ErrMsg)
## _ValidateCopyright
#
# @param Line: Line that contains copyright information, # stripped
#
# @retval Result: True if line is conformed to Spec format, False else
# @retval ErrMsg: the detailed error description
#
def _ValidateCopyright(Line):
if Line:
pass
Result = True
ErrMsg = ''
return Result, ErrMsg
def GenerateTokenList (Comment):
#
# Tokenize Comment using '#' and ' ' as token separators
#
ReplacedComment = None
while Comment != ReplacedComment:
ReplacedComment = Comment
Comment = Comment.replace('##', '#').replace(' ', ' ').replace(' ', '#').strip('# ')
return Comment.split('#')
#
# Comment - Comment to parse
# TypeTokens - A dictionary of type token synonyms
# RemoveTokens - A list of tokens to remove from help text
# ParseVariable - True for parsing [Guids]. Otherwise False
#
def ParseComment (Comment, UsageTokens, TypeTokens, RemoveTokens, ParseVariable):
#
# Initialize return values
#
Usage = None
Type = None
String = None
Comment = Comment[0]
NumTokens = 2
if ParseVariable:
#
# Remove white space around first instance of ':' from Comment if 'Variable'
# is in front of ':' and Variable is the 1st or 2nd token in Comment.
#
List = Comment.split(':', 1)
if len(List) > 1:
SubList = GenerateTokenList (List[0].strip())
if len(SubList) in [1, 2] and SubList[-1] == 'Variable':
if List[1].strip().find('L"') == 0:
Comment = List[0].strip() + ':' + List[1].strip()
#
# Remove first instance of L"<VariableName> from Comment and put into String
# if and only if L"<VariableName>" is the 1st token, the 2nd token. Or
# L"<VariableName>" is the third token immediately following 'Variable:'.
#
End = -1
Start = Comment.find('Variable:L"')
if Start >= 0:
String = Comment[Start + 9:]
End = String[2:].find('"')
else:
Start = Comment.find('L"')
if Start >= 0:
String = Comment[Start:]
End = String[2:].find('"')
if End >= 0:
SubList = GenerateTokenList (Comment[:Start])
if len(SubList) < 2:
Comment = Comment[:Start] + String[End + 3:]
String = String[:End + 3]
Type = 'Variable'
NumTokens = 1
#
# Initialize HelpText to Comment.
# Content will be remove from HelpText as matching tokens are found
#
HelpText = Comment
#
# Tokenize Comment using '#' and ' ' as token separators
#
List = GenerateTokenList (Comment)
#
# Search first two tokens for Usage and Type and remove any matching tokens
# from HelpText
#
for Token in List[0:NumTokens]:
if Usage is None and Token in UsageTokens:
Usage = UsageTokens[Token]
HelpText = HelpText.replace(Token, '')
if Usage is not None or not ParseVariable:
for Token in List[0:NumTokens]:
if Type is None and Token in TypeTokens:
Type = TypeTokens[Token]
HelpText = HelpText.replace(Token, '')
if Usage is not None:
for Token in List[0:NumTokens]:
if Token in RemoveTokens:
HelpText = HelpText.replace(Token, '')
#
# If no Usage token is present and set Usage to UNDEFINED
#
if Usage is None:
Usage = 'UNDEFINED'
#
# If no Type token is present and set Type to UNDEFINED
#
if Type is None:
Type = 'UNDEFINED'
#
# If Type is not 'Variable:', then set String to None
#
if Type != 'Variable':
String = None
#
# Strip ' ' and '#' from the beginning of HelpText
# If HelpText is an empty string after all parsing is
# complete then set HelpText to None
#
HelpText = HelpText.lstrip('# ')
if HelpText == '':
HelpText = None
#
# Return parsing results
#
return Usage, Type, String, HelpText
| edk2-master | BaseTools/Source/Python/UPT/Library/CommentParsing.py |
## @file ParserValidate.py
# Functions for parser validation
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
'''
ParserValidate
'''
import os.path
import re
import platform
from Library.DataType import MODULE_LIST
from Library.DataType import COMPONENT_TYPE_LIST
from Library.DataType import PCD_USAGE_TYPE_LIST_OF_MODULE
from Library.DataType import TAB_SPACE_SPLIT
from Library.StringUtils import GetSplitValueList
from Library.ExpressionValidate import IsValidBareCString
from Library.ExpressionValidate import IsValidFeatureFlagExp
from Common.MultipleWorkspace import MultipleWorkspace as mws
## __HexDigit() method
#
# Whether char input is a Hex data bit
#
# @param TempChar: The char to test
#
def __HexDigit(TempChar):
if (TempChar >= 'a' and TempChar <= 'f') or \
(TempChar >= 'A' and TempChar <= 'F') \
or (TempChar >= '0' and TempChar <= '9'):
return True
else:
return False
## IsValidHex() method
#
# Whether char input is a Hex data.
#
# @param TempChar: The char to test
#
def IsValidHex(HexStr):
if not HexStr.upper().startswith("0X"):
return False
CharList = [c for c in HexStr[2:] if not __HexDigit(c)]
if len(CharList) == 0:
return True
else:
return False
## Judge the input string is valid bool type or not.
#
# <TRUE> ::= {"TRUE"} {"true"} {"True"} {"0x1"} {"0x01"}
# <FALSE> ::= {"FALSE"} {"false"} {"False"} {"0x0"} {"0x00"}
# <BoolType> ::= {<TRUE>} {<FALSE>}
#
# @param BoolString: A string contained the value need to be judged.
#
def IsValidBoolType(BoolString):
#
# Valid True
#
if BoolString == 'TRUE' or \
BoolString == 'True' or \
BoolString == 'true' or \
BoolString == '0x1' or \
BoolString == '0x01':
return True
#
# Valid False
#
elif BoolString == 'FALSE' or \
BoolString == 'False' or \
BoolString == 'false' or \
BoolString == '0x0' or \
BoolString == '0x00':
return True
#
# Invalid bool type
#
else:
return False
## Is Valid Module Type List or not
#
# @param ModuleTypeList: A list contain ModuleType strings need to be
# judged.
#
def IsValidInfMoudleTypeList(ModuleTypeList):
for ModuleType in ModuleTypeList:
return IsValidInfMoudleType(ModuleType)
## Is Valid Module Type or not
#
# @param ModuleType: A string contain ModuleType need to be judged.
#
def IsValidInfMoudleType(ModuleType):
if ModuleType in MODULE_LIST:
return True
else:
return False
## Is Valid Component Type or not
#
# @param ComponentType: A string contain ComponentType need to be judged.
#
def IsValidInfComponentType(ComponentType):
if ComponentType.upper() in COMPONENT_TYPE_LIST:
return True
else:
return False
## Is valid Tool Family or not
#
# @param ToolFamily: A string contain Tool Family need to be judged.
# Family := [A-Z]([a-zA-Z0-9])*
#
def IsValidToolFamily(ToolFamily):
ReIsValidFamily = re.compile(r"^[A-Z]+[A-Za-z0-9]{0,}$", re.DOTALL)
if ReIsValidFamily.match(ToolFamily) is None:
return False
return True
## Is valid Tool TagName or not
#
# The TagName sample is MYTOOLS and VS2005.
#
# @param TagName: A string contain Tool TagName need to be judged.
#
def IsValidToolTagName(TagName):
if TagName.strip() == '':
return True
if TagName.strip() == '*':
return True
if not IsValidWord(TagName):
return False
return True
## Is valid arch or not
#
# @param Arch The arch string need to be validated
# <OA> ::= (a-zA-Z)(A-Za-z0-9){0,}
# <arch> ::= {"IA32"} {"X64"} {"IPF"} {"EBC"} {<OA>}
# {"common"}
# @param Arch: Input arch
#
def IsValidArch(Arch):
if Arch == 'common':
return True
ReIsValidArch = re.compile(r"^[a-zA-Z]+[a-zA-Z0-9]{0,}$", re.DOTALL)
if ReIsValidArch.match(Arch) is None:
return False
return True
## Is valid family or not
#
# <Family> ::= {"MSFT"} {"GCC"} {"INTEL"} {<Usr>} {"*"}
# <Usr> ::= [A-Z][A-Za-z0-9]{0,}
#
# @param family: The family string need to be validated
#
def IsValidFamily(Family):
Family = Family.strip()
if Family == '*':
return True
if Family == '':
return True
ReIsValidFamily = re.compile(r"^[A-Z]+[A-Za-z0-9]{0,}$", re.DOTALL)
if ReIsValidFamily.match(Family) is None:
return False
return True
## Is valid build option name or not
#
# @param BuildOptionName: The BuildOptionName string need to be validated
#
def IsValidBuildOptionName(BuildOptionName):
if not BuildOptionName:
return False
ToolOptionList = GetSplitValueList(BuildOptionName, '_', 4)
if len(ToolOptionList) != 5:
return False
ReIsValidBuildOption1 = re.compile(r"^\s*(\*)|([A-Z][a-zA-Z0-9]*)$")
ReIsValidBuildOption2 = re.compile(r"^\s*(\*)|([a-zA-Z][a-zA-Z0-9]*)$")
if ReIsValidBuildOption1.match(ToolOptionList[0]) is None:
return False
if ReIsValidBuildOption1.match(ToolOptionList[1]) is None:
return False
if ReIsValidBuildOption2.match(ToolOptionList[2]) is None:
return False
if ToolOptionList[3] == "*" and ToolOptionList[4] not in ['FAMILY', 'DLL', 'DPATH']:
return False
return True
## IsValidToken
#
# Check if pattern string matches total token
#
# @param ReString: regular string
# @param Token: Token to be matched
#
def IsValidToken(ReString, Token):
Match = re.compile(ReString).match(Token)
return Match and Match.start() == 0 and Match.end() == len(Token)
## IsValidPath
#
# Check if path exist
#
# @param Path: Absolute path or relative path to be checked
# @param Root: Root path
#
def IsValidPath(Path, Root):
Path = Path.strip()
OrigPath = Path.replace('\\', '/')
Path = os.path.normpath(Path).replace('\\', '/')
Root = os.path.normpath(Root).replace('\\', '/')
FullPath = mws.join(Root, Path)
if not os.path.exists(FullPath):
return False
#
# If Path is absolute path.
# It should be in Root.
#
if os.path.isabs(Path):
if not Path.startswith(Root):
return False
return True
#
# Check illegal character
#
for Rel in ['/', './', '../']:
if OrigPath.startswith(Rel):
return False
for Rel in ['//', '/./', '/../']:
if Rel in OrigPath:
return False
for Rel in ['/.', '/..', '/']:
if OrigPath.endswith(Rel):
return False
Path = Path.rstrip('/')
#
# Check relative path
#
for Word in Path.split('/'):
if not IsValidWord(Word):
return False
return True
## IsValidInstallPath
#
# Check if an install path valid or not.
#
# Absolute path or path starts with '.' or path contains '..' are invalid.
#
# @param Path: path to be checked
#
def IsValidInstallPath(Path):
if platform.platform().find("Windows") >= 0:
if os.path.isabs(Path):
return False
else:
if Path[1:2] == ':':
return False
if os.path.isabs(Path):
return False
if Path.startswith('.'):
return False
if Path.find('..') != -1:
return False
return True
## IsValidCFormatGuid
#
# Check if GUID format has the from of {8,4,4,{2,2,2,2,2,2,2,2}}
#
# @param Guid: Guid to be checked
#
def IsValidCFormatGuid(Guid):
#
# Valid: { 0xf0b11735, 0x87a0, 0x4193, {0xb2, 0x66, 0x53, 0x8c, 0x38,
# 0xaf, 0x48, 0xce }}
# Invalid: { 0xf0b11735, 0x87a0, 0x4193, {0xb2, 0x66, 0x53, 0x8c, 0x38,
# 0xaf, 0x48, 0xce }} 0x123
# Invalid: { 0xf0b1 1735, 0x87a0, 0x4193, {0xb2, 0x66, 0x53, 0x8c, 0x38,
# 0xaf, 0x48, 0xce }}
#
List = ['{', 10, ',', 6, ',', 6, ',{', 4, ',', 4, ',', 4,
',', 4, ',', 4, ',', 4, ',', 4, ',', 4, '}}']
Index = 0
Value = ''
SepValue = ''
for Char in Guid:
if Char not in '{},\t ':
Value += Char
continue
if Value:
try:
#
# Index may out of bound
#
if not SepValue or SepValue != List[Index]:
return False
Index += 1
SepValue = ''
if not Value.startswith('0x') and not Value.startswith('0X'):
return False
#
# Index may out of bound
#
if not isinstance(List[Index], type(1)) or \
len(Value) > List[Index] or len(Value) < 3:
return False
#
# Check if string can be converted to integer
# Throw exception if not
#
int(Value, 16)
except BaseException:
#
# Exception caught means invalid format
#
return False
Value = ''
Index += 1
if Char in '{},':
SepValue += Char
return SepValue == '}}' and Value == ''
## IsValidPcdType
#
# Check whether the PCD type is valid
#
# @param PcdTypeString: The PcdType string need to be checked.
#
def IsValidPcdType(PcdTypeString):
if PcdTypeString.upper() in PCD_USAGE_TYPE_LIST_OF_MODULE:
return True
else:
return False
## IsValidWord
#
# Check whether the word is valid.
# <Word> ::= (a-zA-Z0-9_)(a-zA-Z0-9_-){0,} Alphanumeric characters with
# optional
# dash "-" and/or underscore "_" characters. No whitespace
# characters are permitted.
#
# @param Word: The word string need to be checked.
#
def IsValidWord(Word):
if not Word:
return False
#
# The first char should be alpha, _ or Digit.
#
if not Word[0].isalnum() and \
not Word[0] == '_' and \
not Word[0].isdigit():
return False
LastChar = ''
for Char in Word[1:]:
if (not Char.isalpha()) and \
(not Char.isdigit()) and \
Char != '-' and \
Char != '_' and \
Char != '.':
return False
if Char == '.' and LastChar == '.':
return False
LastChar = Char
return True
## IsValidSimpleWord
#
# Check whether the SimpleWord is valid.
# <SimpleWord> ::= (a-zA-Z0-9)(a-zA-Z0-9_-){0,}
# A word that cannot contain a period character.
#
# @param Word: The word string need to be checked.
#
def IsValidSimpleWord(Word):
ReIsValidSimpleWord = \
re.compile(r"^[0-9A-Za-z][0-9A-Za-z\-_]*$", re.DOTALL)
Word = Word.strip()
if not Word:
return False
if not ReIsValidSimpleWord.match(Word):
return False
return True
## IsValidDecVersion
#
# Check whether the decimal version is valid.
# <DecVersion> ::= (0-9){1,} ["." (0-9){1,}]
#
# @param Word: The word string need to be checked.
#
def IsValidDecVersion(Word):
if Word.find('.') > -1:
ReIsValidDecVersion = re.compile(r"[0-9]+\.?[0-9]+$")
else:
ReIsValidDecVersion = re.compile(r"[0-9]+$")
if ReIsValidDecVersion.match(Word) is None:
return False
return True
## IsValidHexVersion
#
# Check whether the hex version is valid.
# <HexVersion> ::= "0x" <Major> <Minor>
# <Major> ::= <HexDigit>{4}
# <Minor> ::= <HexDigit>{4}
#
# @param Word: The word string need to be checked.
#
def IsValidHexVersion(Word):
ReIsValidHexVersion = re.compile(r"[0][xX][0-9A-Fa-f]{8}$", re.DOTALL)
if ReIsValidHexVersion.match(Word) is None:
return False
return True
## IsValidBuildNumber
#
# Check whether the BUILD_NUMBER is valid.
# ["BUILD_NUMBER" "=" <Integer>{1,4} <EOL>]
#
# @param Word: The BUILD_NUMBER string need to be checked.
#
def IsValidBuildNumber(Word):
ReIsValieBuildNumber = re.compile(r"[0-9]{1,4}$", re.DOTALL)
if ReIsValieBuildNumber.match(Word) is None:
return False
return True
## IsValidDepex
#
# Check whether the Depex is valid.
#
# @param Word: The Depex string need to be checked.
#
def IsValidDepex(Word):
Index = Word.upper().find("PUSH")
if Index > -1:
return IsValidCFormatGuid(Word[Index+4:].strip())
ReIsValidCName = re.compile(r"^[A-Za-z_][0-9A-Za-z_\s\.]*$", re.DOTALL)
if ReIsValidCName.match(Word) is None:
return False
return True
## IsValidNormalizedString
#
# Check
# <NormalizedString> ::= <DblQuote> [{<Word>} {<Space>}]{1,} <DblQuote>
# <Space> ::= 0x20
#
# @param String: string to be checked
#
def IsValidNormalizedString(String):
if String == '':
return True
for Char in String:
if Char == '\t':
return False
StringList = GetSplitValueList(String, TAB_SPACE_SPLIT)
for Item in StringList:
if not Item:
continue
if not IsValidWord(Item):
return False
return True
## IsValidIdString
#
# Check whether the IdString is valid.
#
# @param IdString: The IdString need to be checked.
#
def IsValidIdString(String):
if IsValidSimpleWord(String.strip()):
return True
if String.strip().startswith('"') and \
String.strip().endswith('"'):
String = String[1:-1]
if String.strip() == "":
return True
if IsValidNormalizedString(String):
return True
return False
## IsValidVersionString
#
# Check whether the VersionString is valid.
# <AsciiString> ::= [ [<WhiteSpace>]{0,} [<AsciiChars>]{0,} ] {0,}
# <WhiteSpace> ::= {<Tab>} {<Space>}
# <Tab> ::= 0x09
# <Space> ::= 0x20
# <AsciiChars> ::= (0x21 - 0x7E)
#
# @param VersionString: The VersionString need to be checked.
#
def IsValidVersionString(VersionString):
VersionString = VersionString.strip()
for Char in VersionString:
if not (Char >= 0x21 and Char <= 0x7E):
return False
return True
## IsValidPcdValue
#
# Check whether the PcdValue is valid.
#
# @param VersionString: The PcdValue need to be checked.
#
def IsValidPcdValue(PcdValue):
for Char in PcdValue:
if Char == '\n' or Char == '\t' or Char == '\f':
return False
#
# <Boolean>
#
if IsValidFeatureFlagExp(PcdValue, True)[0]:
return True
#
# <Number> ::= {<Integer>} {<HexNumber>}
# <Integer> ::= {(0-9)} {(1-9)(0-9){1,}}
# <HexNumber> ::= "0x" <HexDigit>{1,}
# <HexDigit> ::= (a-fA-F0-9)
#
if IsValidHex(PcdValue):
return True
ReIsValidIntegerSingle = re.compile(r"^\s*[0-9]\s*$", re.DOTALL)
if ReIsValidIntegerSingle.match(PcdValue) is not None:
return True
ReIsValidIntegerMulti = re.compile(r"^\s*[1-9][0-9]+\s*$", re.DOTALL)
if ReIsValidIntegerMulti.match(PcdValue) is not None:
return True
#
# <StringVal> ::= {<StringType>} {<Array>} {"$(" <MACRO> ")"}
# <StringType> ::= {<UnicodeString>} {<CString>}
#
ReIsValidStringType = re.compile(r"^\s*[\"L].*[\"]\s*$")
if ReIsValidStringType.match(PcdValue):
IsTrue = False
if PcdValue.strip().startswith('L\"'):
StringValue = PcdValue.strip().lstrip('L\"').rstrip('\"')
if IsValidBareCString(StringValue):
IsTrue = True
elif PcdValue.strip().startswith('\"'):
StringValue = PcdValue.strip().lstrip('\"').rstrip('\"')
if IsValidBareCString(StringValue):
IsTrue = True
if IsTrue:
return IsTrue
#
# <Array> ::= {<CArray>} {<NList>} {<CFormatGUID>}
# <CArray> ::= "{" [<NList>] <CArray>{0,} "}"
# <NList> ::= <HexByte> ["," <HexByte>]{0,}
# <HexDigit> ::= (a-fA-F0-9)
# <HexByte> ::= "0x" <HexDigit>{1,2}
#
if IsValidCFormatGuid(PcdValue):
return True
ReIsValidByteHex = re.compile(r"^\s*0x[0-9a-fA-F]{1,2}\s*$", re.DOTALL)
if PcdValue.strip().startswith('{') and PcdValue.strip().endswith('}') :
StringValue = PcdValue.strip().lstrip('{').rstrip('}')
ValueList = StringValue.split(',')
AllValidFlag = True
for ValueItem in ValueList:
if not ReIsValidByteHex.match(ValueItem.strip()):
AllValidFlag = False
if AllValidFlag:
return True
#
# NList
#
AllValidFlag = True
ValueList = PcdValue.split(',')
for ValueItem in ValueList:
if not ReIsValidByteHex.match(ValueItem.strip()):
AllValidFlag = False
if AllValidFlag:
return True
return False
## IsValidCVariableName
#
# Check whether the PcdValue is valid.
#
# @param VersionString: The PcdValue need to be checked.
#
def IsValidCVariableName(CName):
ReIsValidCName = re.compile(r"^[A-Za-z_][0-9A-Za-z_]*$", re.DOTALL)
if ReIsValidCName.match(CName) is None:
return False
return True
## IsValidIdentifier
#
# <Identifier> ::= <NonDigit> <Chars>{0,}
# <Chars> ::= (a-zA-Z0-9_)
# <NonDigit> ::= (a-zA-Z_)
#
# @param Ident: identifier to be checked
#
def IsValidIdentifier(Ident):
ReIdent = re.compile(r"^[A-Za-z_][0-9A-Za-z_]*$", re.DOTALL)
if ReIdent.match(Ident) is None:
return False
return True
## IsValidDecVersionVal
#
# {(0-9){1,} "." (0-99)}
#
# @param Ver: version to be checked
#
def IsValidDecVersionVal(Ver):
ReVersion = re.compile(r"[0-9]+(\.[0-9]{1,2})$")
if ReVersion.match(Ver) is None:
return False
return True
## IsValidLibName
#
# (A-Z)(a-zA-Z0-9){0,} and could not be "NULL"
#
def IsValidLibName(LibName):
if LibName == 'NULL':
return False
ReLibName = re.compile("^[A-Z]+[a-zA-Z0-9]*$")
if not ReLibName.match(LibName):
return False
return True
# IsValidUserId
#
# <UserId> ::= (a-zA-Z)(a-zA-Z0-9_.){0,}
# Words that contain period "." must be encapsulated in double quotation marks.
#
def IsValidUserId(UserId):
UserId = UserId.strip()
Quoted = False
if UserId.startswith('"') and UserId.endswith('"'):
Quoted = True
UserId = UserId[1:-1]
if not UserId or not UserId[0].isalpha():
return False
for Char in UserId[1:]:
if not Char.isalnum() and not Char in '_.':
return False
if Char == '.' and not Quoted:
return False
return True
#
# Check if a UTF16-LE file has a BOM header
#
def CheckUTF16FileHeader(File):
FileIn = open(File, 'rb').read(2)
if FileIn != b'\xff\xfe':
return False
return True
| edk2-master | BaseTools/Source/Python/UPT/Library/ParserValidate.py |
## @file
# This is an XML API that uses a syntax similar to XPath, but it is written in
# standard python so that no extra python packages are required to use it.
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
'''
XmlRoutines
'''
##
# Import Modules
#
import xml.dom.minidom
import re
import codecs
from Logger.ToolError import PARSER_ERROR
import Logger.Log as Logger
## Create a element of XML
#
# @param Name
# @param String
# @param NodeList
# @param AttributeList
#
def CreateXmlElement(Name, String, NodeList, AttributeList):
Doc = xml.dom.minidom.Document()
Element = Doc.createElement(Name)
if String != '' and String is not None:
Element.appendChild(Doc.createTextNode(String))
for Item in NodeList:
if isinstance(Item, type([])):
Key = Item[0]
Value = Item[1]
if Key != '' and Key is not None and Value != '' and Value is not None:
Node = Doc.createElement(Key)
Node.appendChild(Doc.createTextNode(Value))
Element.appendChild(Node)
else:
Element.appendChild(Item)
for Item in AttributeList:
Key = Item[0]
Value = Item[1]
if Key != '' and Key is not None and Value != '' and Value is not None:
Element.setAttribute(Key, Value)
return Element
## Get a list of XML nodes using XPath style syntax.
#
# Return a list of XML DOM nodes from the root Dom specified by XPath String.
# If the input Dom or String is not valid, then an empty list is returned.
#
# @param Dom The root XML DOM node.
# @param String A XPath style path.
#
def XmlList(Dom, String):
if String is None or String == "" or Dom is None or Dom == "":
return []
if Dom.nodeType == Dom.DOCUMENT_NODE:
Dom = Dom.documentElement
if String[0] == "/":
String = String[1:]
TagList = String.split('/')
Nodes = [Dom]
Index = 0
End = len(TagList) - 1
while Index <= End:
ChildNodes = []
for Node in Nodes:
if Node.nodeType == Node.ELEMENT_NODE and Node.tagName == \
TagList[Index]:
if Index < End:
ChildNodes.extend(Node.childNodes)
else:
ChildNodes.append(Node)
Nodes = ChildNodes
ChildNodes = []
Index += 1
return Nodes
## Get a single XML node using XPath style syntax.
#
# Return a single XML DOM node from the root Dom specified by XPath String.
# If the input Dom or String is not valid, then an empty string is returned.
#
# @param Dom The root XML DOM node.
# @param String A XPath style path.
#
def XmlNode(Dom, String):
if String is None or String == "" or Dom is None or Dom == "":
return None
if Dom.nodeType == Dom.DOCUMENT_NODE:
Dom = Dom.documentElement
if String[0] == "/":
String = String[1:]
TagList = String.split('/')
Index = 0
End = len(TagList) - 1
ChildNodes = [Dom]
while Index <= End:
for Node in ChildNodes:
if Node.nodeType == Node.ELEMENT_NODE and \
Node.tagName == TagList[Index]:
if Index < End:
ChildNodes = Node.childNodes
else:
return Node
break
Index += 1
return None
## Get a single XML element using XPath style syntax.
#
# Return a single XML element from the root Dom specified by XPath String.
# If the input Dom or String is not valid, then an empty string is returned.
#
# @param Dom The root XML DOM object.
# @param Strin A XPath style path.
#
def XmlElement(Dom, String):
try:
return XmlNode(Dom, String).firstChild.data.strip()
except BaseException:
return ""
## Get a single XML element using XPath style syntax.
#
# Similar with XmlElement, but do not strip all the leading and tailing space
# and newline, instead just remove the newline and spaces introduced by
# toprettyxml()
#
# @param Dom The root XML DOM object.
# @param Strin A XPath style path.
#
def XmlElement2(Dom, String):
try:
HelpStr = XmlNode(Dom, String).firstChild.data
gRemovePrettyRe = re.compile(r"""(?:(\n *) )(.*)\1""", re.DOTALL)
HelpStr = re.sub(gRemovePrettyRe, r"\2", HelpStr)
return HelpStr
except BaseException:
return ""
## Get a single XML element of the current node.
#
# Return a single XML element specified by the current root Dom.
# If the input Dom is not valid, then an empty string is returned.
#
# @param Dom The root XML DOM object.
#
def XmlElementData(Dom):
try:
return Dom.firstChild.data.strip()
except BaseException:
return ""
## Get a list of XML elements using XPath style syntax.
#
# Return a list of XML elements from the root Dom specified by XPath String.
# If the input Dom or String is not valid, then an empty list is returned.
#
# @param Dom The root XML DOM object.
# @param String A XPath style path.
#
def XmlElementList(Dom, String):
return list(map(XmlElementData, XmlList(Dom, String)))
## Get the XML attribute of the current node.
#
# Return a single XML attribute named Attribute from the current root Dom.
# If the input Dom or Attribute is not valid, then an empty string is returned.
#
# @param Dom The root XML DOM object.
# @param Attribute The name of Attribute.
#
def XmlAttribute(Dom, Attribute):
try:
return Dom.getAttribute(Attribute)
except BaseException:
return ''
## Get the XML node name of the current node.
#
# Return a single XML node name from the current root Dom.
# If the input Dom is not valid, then an empty string is returned.
#
# @param Dom The root XML DOM object.
#
def XmlNodeName(Dom):
try:
return Dom.nodeName.strip()
except BaseException:
return ''
## Parse an XML file.
#
# Parse the input XML file named FileName and return a XML DOM it stands for.
# If the input File is not a valid XML file, then an empty string is returned.
#
# @param FileName The XML file name.
#
def XmlParseFile(FileName):
try:
XmlFile = codecs.open(FileName, 'rb')
Dom = xml.dom.minidom.parse(XmlFile)
XmlFile.close()
return Dom
except BaseException as XExcept:
XmlFile.close()
Logger.Error('\nUPT', PARSER_ERROR, XExcept, File=FileName, RaiseError=True)
| edk2-master | BaseTools/Source/Python/UPT/Library/Xml/XmlRoutines.py |
## @file
# Python 'Library' package initialization file.
#
# This file is required to make Python interpreter treat the directory
# as containing package.
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
'''
Xml
'''
| edk2-master | BaseTools/Source/Python/UPT/Library/Xml/__init__.py |
## @file
# This file contained the parser for [Pcds] sections in INF file
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
'''
InfPcdSectionParser
'''
##
# Import Modules
#
import Logger.Log as Logger
from Logger import StringTable as ST
from Logger.ToolError import FORMAT_INVALID
from Parser.InfParserMisc import InfExpandMacro
from Library import DataType as DT
from Library.Parsing import MacroParser
from Library.Misc import GetSplitValueList
from Library import GlobalData
from Library.StringUtils import SplitPcdEntry
from Parser.InfParserMisc import InfParserSectionRoot
class InfPcdSectionParser(InfParserSectionRoot):
## Section PCD related parser
#
# For 5 types of PCD list below, all use this function.
# 'FixedPcd', 'FeaturePcd', 'PatchPcd', 'Pcd', 'PcdEx'
#
# This is a INF independent parser, the validation in this parser only
# cover
# INF spec scope, will not cross DEC/DSC to check pcd value
#
def InfPcdParser(self, SectionString, InfSectionObject, FileName):
KeysList = []
PcdList = []
CommentsList = []
ValueList = []
#
# Current section archs
#
LineIndex = -1
for Item in self.LastSectionHeaderContent:
if (Item[0], Item[1], Item[3]) not in KeysList:
KeysList.append((Item[0], Item[1], Item[3]))
LineIndex = Item[3]
if (Item[0].upper() == DT.TAB_INF_FIXED_PCD.upper() or \
Item[0].upper() == DT.TAB_INF_FEATURE_PCD.upper() or \
Item[0].upper() == DT.TAB_INF_PCD.upper()) and GlobalData.gIS_BINARY_INF:
Logger.Error('InfParser', FORMAT_INVALID, ST.ERR_ASBUILD_PCD_SECTION_TYPE%("\"" + Item[0] + "\""),
File=FileName, Line=LineIndex)
#
# For Common INF file
#
if not GlobalData.gIS_BINARY_INF:
#
# Macro defined in this section
#
SectionMacros = {}
for Line in SectionString:
PcdLineContent = Line[0]
PcdLineNo = Line[1]
if PcdLineContent.strip() == '':
CommentsList = []
continue
if PcdLineContent.strip().startswith(DT.TAB_COMMENT_SPLIT):
CommentsList.append(Line)
continue
else:
#
# Encounter a PCD entry
#
if PcdLineContent.find(DT.TAB_COMMENT_SPLIT) > -1:
CommentsList.append((
PcdLineContent[PcdLineContent.find(DT.TAB_COMMENT_SPLIT):],
PcdLineNo))
PcdLineContent = PcdLineContent[:PcdLineContent.find(DT.TAB_COMMENT_SPLIT)]
if PcdLineContent != '':
#
# Find Macro
#
Name, Value = MacroParser((PcdLineContent, PcdLineNo),
FileName,
DT.MODEL_EFI_PCD,
self.FileLocalMacros)
if Name is not None:
SectionMacros[Name] = Value
ValueList = []
CommentsList = []
continue
PcdEntryReturn = SplitPcdEntry(PcdLineContent)
if not PcdEntryReturn[1]:
TokenList = ['']
else:
TokenList = PcdEntryReturn[0]
ValueList[0:len(TokenList)] = TokenList
#
# Replace with Local section Macro and [Defines] section Macro.
#
ValueList = [InfExpandMacro(Value, (FileName, PcdLineContent, PcdLineNo),
self.FileLocalMacros, SectionMacros, True)
for Value in ValueList]
if len(ValueList) >= 1:
PcdList.append((ValueList, CommentsList, (PcdLineContent, PcdLineNo, FileName)))
ValueList = []
CommentsList = []
continue
#
# For Binary INF file
#
else:
for Line in SectionString:
LineContent = Line[0].strip()
LineNo = Line[1]
if LineContent == '':
CommentsList = []
continue
if LineContent.startswith(DT.TAB_COMMENT_SPLIT):
CommentsList.append(LineContent)
continue
#
# Have comments at tail.
#
CommentIndex = LineContent.find(DT.TAB_COMMENT_SPLIT)
if CommentIndex > -1:
CommentsList.append(LineContent[CommentIndex+1:])
LineContent = LineContent[:CommentIndex]
TokenList = GetSplitValueList(LineContent, DT.TAB_VALUE_SPLIT)
#
# PatchablePcd
# TokenSpace.CName | Value | Offset
#
if KeysList[0][0].upper() == DT.TAB_INF_PATCH_PCD.upper():
if len(TokenList) != 3:
Logger.Error('InfParser',
FORMAT_INVALID,
ST.ERR_ASBUILD_PATCHPCD_FORMAT_INVALID,
File=FileName,
Line=LineNo,
ExtraData=LineContent)
#
elif KeysList[0][0].upper() == DT.TAB_INF_PCD_EX.upper():
if len(TokenList) != 1:
Logger.Error('InfParser',
FORMAT_INVALID,
ST.ERR_ASBUILD_PCDEX_FORMAT_INVALID,
File=FileName,
Line=LineNo,
ExtraData=LineContent)
ValueList[0:len(TokenList)] = TokenList
if len(ValueList) >= 1:
PcdList.append((ValueList, CommentsList, (LineContent, LineNo, FileName)))
ValueList = []
CommentsList = []
continue
if not InfSectionObject.SetPcds(PcdList, KeysList = KeysList,
PackageInfo = self.InfPackageSection.GetPackages()):
Logger.Error('InfParser',
FORMAT_INVALID,
ST.ERR_INF_PARSER_MODULE_SECTION_TYPE_ERROR%("[PCD]"),
File=FileName,
Line=LineIndex)
| edk2-master | BaseTools/Source/Python/UPT/Parser/InfPcdSectionParser.py |
## @file
# This file contained the parser for [Depex] sections in INF file
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
'''
InfDepexSectionParser
'''
##
# Import Modules
#
import re
import Logger.Log as Logger
from Logger import StringTable as ST
from Logger.ToolError import FORMAT_INVALID
from Parser.InfParserMisc import InfExpandMacro
from Library import DataType as DT
from Library.Misc import GetSplitValueList
from Parser.InfParserMisc import InfParserSectionRoot
class InfDepexSectionParser(InfParserSectionRoot):
## InfDepexParser
#
# For now, only separate Depex String and comments.
# Have two types of section header.
# 1. [Depex.Arch.ModuleType, ...]
# 2. [Depex.Arch|FFE, ...]
#
def InfDepexParser(self, SectionString, InfSectionObject, FileName):
DepexContent = []
DepexComment = []
ValueList = []
#
# Parse section content
#
for Line in SectionString:
LineContent = Line[0]
LineNo = Line[1]
#
# Found comment
#
if LineContent.strip().startswith(DT.TAB_COMMENT_SPLIT):
DepexComment.append((LineContent, LineNo))
continue
#
# Replace with [Defines] section Macro
#
LineContent = InfExpandMacro(LineContent,
(FileName, LineContent, Line[1]),
self.FileLocalMacros,
None, True)
CommentCount = LineContent.find(DT.TAB_COMMENT_SPLIT)
if CommentCount > -1:
DepexComment.append((LineContent[CommentCount:], LineNo))
LineContent = LineContent[:CommentCount-1]
CommentCount = -1
DepexContent.append((LineContent, LineNo))
TokenList = GetSplitValueList(LineContent, DT.TAB_COMMENT_SPLIT)
ValueList[0:len(TokenList)] = TokenList
#
# Current section archs
#
KeyList = []
LastItem = ''
for Item in self.LastSectionHeaderContent:
LastItem = Item
if (Item[1], Item[2], Item[3]) not in KeyList:
KeyList.append((Item[1], Item[2], Item[3]))
NewCommentList = []
FormatCommentLn = -1
ReFormatComment = re.compile(r"""#(?:\s*)\[(.*?)\](?:.*)""", re.DOTALL)
for CommentItem in DepexComment:
CommentContent = CommentItem[0]
if ReFormatComment.match(CommentContent) is not None:
FormatCommentLn = CommentItem[1] + 1
continue
if CommentItem[1] != FormatCommentLn:
NewCommentList.append(CommentContent)
else:
FormatCommentLn = CommentItem[1] + 1
if not InfSectionObject.SetDepex(DepexContent, KeyList = KeyList, CommentList = NewCommentList):
Logger.Error('InfParser',
FORMAT_INVALID,
ST.ERR_INF_PARSER_MODULE_SECTION_TYPE_ERROR%("[Depex]"),
File=FileName,
Line=LastItem[3])
| edk2-master | BaseTools/Source/Python/UPT/Parser/InfDepexSectionParser.py |
## @file
# This file contained the parser for INF file
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
'''
InfParser
'''
##
# Import Modules
#
import re
import os
from copy import deepcopy
from Library.StringUtils import GetSplitValueList
from Library.StringUtils import ConvertSpecialChar
from Library.Misc import ProcessLineExtender
from Library.Misc import ProcessEdkComment
from Library.Parsing import NormPath
from Library.ParserValidate import IsValidInfMoudleTypeList
from Library.ParserValidate import IsValidArch
from Library import DataType as DT
from Library import GlobalData
import Logger.Log as Logger
from Logger import StringTable as ST
from Logger.ToolError import FORMAT_INVALID
from Logger.ToolError import FILE_READ_FAILURE
from Logger.ToolError import PARSER_ERROR
from Object.Parser.InfCommonObject import InfSectionCommonDef
from Parser.InfSectionParser import InfSectionParser
from Parser.InfParserMisc import gINF_SECTION_DEF
from Parser.InfParserMisc import IsBinaryInf
## OpenInfFile
#
#
def OpenInfFile(Filename):
FileLinesList = []
try:
FInputfile = open(Filename, "r")
try:
FileLinesList = FInputfile.readlines()
except BaseException:
Logger.Error("InfParser",
FILE_READ_FAILURE,
ST.ERR_FILE_OPEN_FAILURE,
File=Filename)
finally:
FInputfile.close()
except BaseException:
Logger.Error("InfParser",
FILE_READ_FAILURE,
ST.ERR_FILE_OPEN_FAILURE,
File=Filename)
return FileLinesList
## InfParser
#
# This class defined the structure used in InfParser object
#
# @param InfObject: Inherited from InfSectionParser class
# @param Filename: Input value for Filename of INF file, default is
# None
# @param WorkspaceDir: Input value for current workspace directory,
# default is None
#
class InfParser(InfSectionParser):
def __init__(self, Filename = None, WorkspaceDir = None):
#
# Call parent class construct function
#
InfSectionParser.__init__()
self.WorkspaceDir = WorkspaceDir
self.SupArchList = DT.ARCH_LIST
self.EventList = []
self.HobList = []
self.BootModeList = []
#
# Load Inf file if filename is not None
#
if Filename is not None:
self.ParseInfFile(Filename)
## Parse INF file
#
# Parse the file if it exists
#
# @param Filename: Input value for filename of INF file
#
def ParseInfFile(self, Filename):
Filename = NormPath(Filename)
(Path, Name) = os.path.split(Filename)
self.FullPath = Filename
self.RelaPath = Path
self.FileName = Name
GlobalData.gINF_MODULE_DIR = Path
GlobalData.gINF_MODULE_NAME = self.FullPath
GlobalData.gIS_BINARY_INF = False
#
# Initialize common data
#
LineNo = 0
CurrentSection = DT.MODEL_UNKNOWN
SectionLines = []
#
# Flags
#
HeaderCommentStart = False
HeaderCommentEnd = False
HeaderStarLineNo = -1
BinaryHeaderCommentStart = False
BinaryHeaderCommentEnd = False
BinaryHeaderStarLineNo = -1
#
# While Section ends. parse whole section contents.
#
NewSectionStartFlag = False
FirstSectionStartFlag = False
#
# Parse file content
#
CommentBlock = []
#
# Variables for Event/Hob/BootMode
#
self.EventList = []
self.HobList = []
self.BootModeList = []
SectionType = ''
FileLinesList = OpenInfFile (Filename)
#
# One INF file can only has one [Defines] section.
#
DefineSectionParsedFlag = False
#
# Convert special characters in lines to space character.
#
FileLinesList = ConvertSpecialChar(FileLinesList)
#
# Process Line Extender
#
FileLinesList = ProcessLineExtender(FileLinesList)
#
# Process EdkI INF style comment if found
#
OrigLines = [Line for Line in FileLinesList]
FileLinesList, EdkCommentStartPos = ProcessEdkComment(FileLinesList)
#
# Judge whether the INF file is Binary INF or not
#
if IsBinaryInf(FileLinesList):
GlobalData.gIS_BINARY_INF = True
InfSectionCommonDefObj = None
for Line in FileLinesList:
LineNo = LineNo + 1
Line = Line.strip()
if (LineNo < len(FileLinesList) - 1):
NextLine = FileLinesList[LineNo].strip()
#
# blank line
#
if (Line == '' or not Line) and LineNo == len(FileLinesList):
LastSectionFalg = True
#
# check whether file header comment section started
#
if Line.startswith(DT.TAB_SPECIAL_COMMENT) and \
(Line.find(DT.TAB_HEADER_COMMENT) > -1) and \
not HeaderCommentStart and not HeaderCommentEnd:
CurrentSection = DT.MODEL_META_DATA_FILE_HEADER
#
# Append the first line to section lines.
#
HeaderStarLineNo = LineNo
SectionLines.append((Line, LineNo))
HeaderCommentStart = True
continue
#
# Collect Header content.
#
if (Line.startswith(DT.TAB_COMMENT_SPLIT) and CurrentSection == DT.MODEL_META_DATA_FILE_HEADER) and\
HeaderCommentStart and not Line.startswith(DT.TAB_SPECIAL_COMMENT) and not\
HeaderCommentEnd and NextLine != '':
SectionLines.append((Line, LineNo))
continue
#
# Header content end
#
if (Line.startswith(DT.TAB_SPECIAL_COMMENT) or not Line.strip().startswith("#")) and HeaderCommentStart \
and not HeaderCommentEnd:
HeaderCommentEnd = True
BinaryHeaderCommentStart = False
BinaryHeaderCommentEnd = False
HeaderCommentStart = False
if Line.find(DT.TAB_BINARY_HEADER_COMMENT) > -1:
self.InfHeaderParser(SectionLines, self.InfHeader, self.FileName)
SectionLines = []
else:
SectionLines.append((Line, LineNo))
#
# Call Header comment parser.
#
self.InfHeaderParser(SectionLines, self.InfHeader, self.FileName)
SectionLines = []
continue
#
# check whether binary header comment section started
#
if Line.startswith(DT.TAB_SPECIAL_COMMENT) and \
(Line.find(DT.TAB_BINARY_HEADER_COMMENT) > -1) and \
not BinaryHeaderCommentStart:
SectionLines = []
CurrentSection = DT.MODEL_META_DATA_FILE_HEADER
#
# Append the first line to section lines.
#
BinaryHeaderStarLineNo = LineNo
SectionLines.append((Line, LineNo))
BinaryHeaderCommentStart = True
HeaderCommentEnd = True
continue
#
# check whether there are more than one binary header exist
#
if Line.startswith(DT.TAB_SPECIAL_COMMENT) and BinaryHeaderCommentStart and \
not BinaryHeaderCommentEnd and (Line.find(DT.TAB_BINARY_HEADER_COMMENT) > -1):
Logger.Error('Parser',
FORMAT_INVALID,
ST.ERR_MULTIPLE_BINARYHEADER_EXIST,
File=Filename)
#
# Collect Binary Header content.
#
if (Line.startswith(DT.TAB_COMMENT_SPLIT) and CurrentSection == DT.MODEL_META_DATA_FILE_HEADER) and\
BinaryHeaderCommentStart and not Line.startswith(DT.TAB_SPECIAL_COMMENT) and not\
BinaryHeaderCommentEnd and NextLine != '':
SectionLines.append((Line, LineNo))
continue
#
# Binary Header content end
#
if (Line.startswith(DT.TAB_SPECIAL_COMMENT) or not Line.strip().startswith(DT.TAB_COMMENT_SPLIT)) and \
BinaryHeaderCommentStart and not BinaryHeaderCommentEnd:
SectionLines.append((Line, LineNo))
BinaryHeaderCommentStart = False
#
# Call Binary Header comment parser.
#
self.InfHeaderParser(SectionLines, self.InfBinaryHeader, self.FileName, True)
SectionLines = []
BinaryHeaderCommentEnd = True
continue
#
# Find a new section tab
# Or at the last line of INF file,
# need to process the last section.
#
LastSectionFalg = False
if LineNo == len(FileLinesList):
LastSectionFalg = True
if Line.startswith(DT.TAB_COMMENT_SPLIT) and not Line.startswith(DT.TAB_SPECIAL_COMMENT):
SectionLines.append((Line, LineNo))
if not LastSectionFalg:
continue
#
# Encountered a section. start with '[' and end with ']'
#
if (Line.startswith(DT.TAB_SECTION_START) and \
Line.find(DT.TAB_SECTION_END) > -1) or LastSectionFalg:
HeaderCommentEnd = True
BinaryHeaderCommentEnd = True
if not LastSectionFalg:
#
# check to prevent '#' inside section header
#
HeaderContent = Line[1:Line.find(DT.TAB_SECTION_END)]
if HeaderContent.find(DT.TAB_COMMENT_SPLIT) != -1:
Logger.Error("InfParser",
FORMAT_INVALID,
ST.ERR_INF_PARSER_DEFINE_SECTION_HEADER_INVALID,
File=self.FullPath,
Line=LineNo,
ExtraData=Line)
#
# Keep last time section header content for section parser
# usage.
#
self.LastSectionHeaderContent = deepcopy(self.SectionHeaderContent)
#
# TailComments in section define.
#
TailComments = ''
CommentIndex = Line.find(DT.TAB_COMMENT_SPLIT)
if CommentIndex > -1:
TailComments = Line[CommentIndex:]
Line = Line[:CommentIndex]
InfSectionCommonDefObj = InfSectionCommonDef()
if TailComments != '':
InfSectionCommonDefObj.SetTailComments(TailComments)
if CommentBlock != '':
InfSectionCommonDefObj.SetHeaderComments(CommentBlock)
CommentBlock = []
#
# Call section parser before section header parer to avoid encounter EDKI INF file
#
if CurrentSection == DT.MODEL_META_DATA_DEFINE:
DefineSectionParsedFlag = self._CallSectionParsers(CurrentSection,
DefineSectionParsedFlag, SectionLines,
InfSectionCommonDefObj, LineNo)
#
# Compare the new section name with current
#
self.SectionHeaderParser(Line, self.FileName, LineNo)
self._CheckSectionHeaders(Line, LineNo)
SectionType = _ConvertSecNameToType(self.SectionHeaderContent[0][0])
if not FirstSectionStartFlag:
CurrentSection = SectionType
FirstSectionStartFlag = True
else:
NewSectionStartFlag = True
else:
SectionLines.append((Line, LineNo))
continue
if LastSectionFalg:
SectionLines, CurrentSection = self._ProcessLastSection(SectionLines, Line, LineNo, CurrentSection)
#
# End of section content collect.
# Parser the section content collected previously.
#
if NewSectionStartFlag or LastSectionFalg:
if CurrentSection != DT.MODEL_META_DATA_DEFINE or \
(LastSectionFalg and CurrentSection == DT.MODEL_META_DATA_DEFINE):
DefineSectionParsedFlag = self._CallSectionParsers(CurrentSection,
DefineSectionParsedFlag, SectionLines,
InfSectionCommonDefObj, LineNo)
CurrentSection = SectionType
#
# Clear section lines
#
SectionLines = []
if HeaderStarLineNo == -1:
Logger.Error("InfParser",
FORMAT_INVALID,
ST.ERR_NO_SOURCE_HEADER,
File=self.FullPath)
if BinaryHeaderStarLineNo > -1 and HeaderStarLineNo > -1 and HeaderStarLineNo > BinaryHeaderStarLineNo:
Logger.Error("InfParser",
FORMAT_INVALID,
ST.ERR_BINARY_HEADER_ORDER,
File=self.FullPath)
#
# EDKII INF should not have EDKI style comment
#
if EdkCommentStartPos != -1:
Logger.Error("InfParser",
FORMAT_INVALID,
ST.ERR_INF_PARSER_EDKI_COMMENT_IN_EDKII,
File=self.FullPath,
Line=EdkCommentStartPos + 1,
ExtraData=OrigLines[EdkCommentStartPos])
#
# extract [Event] [Hob] [BootMode] sections
#
self._ExtractEventHobBootMod(FileLinesList)
## _CheckSectionHeaders
#
#
def _CheckSectionHeaders(self, Line, LineNo):
if len(self.SectionHeaderContent) == 0:
Logger.Error("InfParser",
FORMAT_INVALID,
ST.ERR_INF_PARSER_DEFINE_SECTION_HEADER_INVALID,
File=self.FullPath,
Line=LineNo, ExtraData=Line)
else:
for SectionItem in self.SectionHeaderContent:
ArchList = []
#
# Not cover Depex/UserExtension section header
# check.
#
if SectionItem[0].strip().upper() == DT.TAB_INF_FIXED_PCD.upper() or \
SectionItem[0].strip().upper() == DT.TAB_INF_PATCH_PCD.upper() or \
SectionItem[0].strip().upper() == DT.TAB_INF_PCD_EX.upper() or \
SectionItem[0].strip().upper() == DT.TAB_INF_PCD.upper() or \
SectionItem[0].strip().upper() == DT.TAB_INF_FEATURE_PCD.upper():
ArchList = GetSplitValueList(SectionItem[1].strip(), ' ')
else:
ArchList = [SectionItem[1].strip()]
for Arch in ArchList:
if (not IsValidArch(Arch)) and \
(SectionItem[0].strip().upper() != DT.TAB_DEPEX.upper()) and \
(SectionItem[0].strip().upper() != DT.TAB_USER_EXTENSIONS.upper()) and \
(SectionItem[0].strip().upper() != DT.TAB_COMMON_DEFINES.upper()):
Logger.Error("InfParser",
FORMAT_INVALID,
ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(SectionItem[1]),
File=self.FullPath,
Line=LineNo, ExtraData=Line)
#
# Check if the ModuleType is valid
#
ChkModSectionList = ['LIBRARYCLASSES']
if (self.SectionHeaderContent[0][0].upper() in ChkModSectionList):
if SectionItem[2].strip().upper():
MoudleTypeList = GetSplitValueList(
SectionItem[2].strip().upper())
if (not IsValidInfMoudleTypeList(MoudleTypeList)):
Logger.Error("InfParser",
FORMAT_INVALID,
ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(SectionItem[2]),
File=self.FullPath, Line=LineNo,
ExtraData=Line)
## _CallSectionParsers
#
#
def _CallSectionParsers(self, CurrentSection, DefineSectionParsedFlag,
SectionLines, InfSectionCommonDefObj, LineNo):
if CurrentSection == DT.MODEL_META_DATA_DEFINE:
if not DefineSectionParsedFlag:
self.InfDefineParser(SectionLines,
self.InfDefSection,
self.FullPath,
InfSectionCommonDefObj)
DefineSectionParsedFlag = True
else:
Logger.Error("Parser",
PARSER_ERROR,
ST.ERR_INF_PARSER_MULTI_DEFINE_SECTION,
File=self.FullPath,
RaiseError = Logger.IS_RAISE_ERROR)
elif CurrentSection == DT.MODEL_META_DATA_BUILD_OPTION:
self.InfBuildOptionParser(SectionLines,
self.InfBuildOptionSection,
self.FullPath)
elif CurrentSection == DT.MODEL_EFI_LIBRARY_CLASS:
self.InfLibraryParser(SectionLines,
self.InfLibraryClassSection,
self.FullPath)
elif CurrentSection == DT.MODEL_META_DATA_PACKAGE:
self.InfPackageParser(SectionLines,
self.InfPackageSection,
self.FullPath)
#
# [Pcd] Sections, put it together
#
elif CurrentSection == DT.MODEL_PCD_FIXED_AT_BUILD or \
CurrentSection == DT.MODEL_PCD_PATCHABLE_IN_MODULE or \
CurrentSection == DT.MODEL_PCD_FEATURE_FLAG or \
CurrentSection == DT.MODEL_PCD_DYNAMIC_EX or \
CurrentSection == DT.MODEL_PCD_DYNAMIC:
self.InfPcdParser(SectionLines,
self.InfPcdSection,
self.FullPath)
elif CurrentSection == DT.MODEL_EFI_SOURCE_FILE:
self.InfSourceParser(SectionLines,
self.InfSourcesSection,
self.FullPath)
elif CurrentSection == DT.MODEL_META_DATA_USER_EXTENSION:
self.InfUserExtensionParser(SectionLines,
self.InfUserExtensionSection,
self.FullPath)
elif CurrentSection == DT.MODEL_EFI_PROTOCOL:
self.InfProtocolParser(SectionLines,
self.InfProtocolSection,
self.FullPath)
elif CurrentSection == DT.MODEL_EFI_PPI:
self.InfPpiParser(SectionLines,
self.InfPpiSection,
self.FullPath)
elif CurrentSection == DT.MODEL_EFI_GUID:
self.InfGuidParser(SectionLines,
self.InfGuidSection,
self.FullPath)
elif CurrentSection == DT.MODEL_EFI_DEPEX:
self.InfDepexParser(SectionLines,
self.InfDepexSection,
self.FullPath)
elif CurrentSection == DT.MODEL_EFI_BINARY_FILE:
self.InfBinaryParser(SectionLines,
self.InfBinariesSection,
self.FullPath)
#
# Unknown section type found, raise error.
#
else:
if len(self.SectionHeaderContent) >= 1:
Logger.Error("Parser",
PARSER_ERROR,
ST.ERR_INF_PARSER_UNKNOWN_SECTION,
File=self.FullPath, Line=LineNo,
RaiseError = Logger.IS_RAISE_ERROR)
else:
Logger.Error("Parser",
PARSER_ERROR,
ST.ERR_INF_PARSER_NO_SECTION_ERROR,
File=self.FullPath, Line=LineNo,
RaiseError = Logger.IS_RAISE_ERROR)
return DefineSectionParsedFlag
def _ExtractEventHobBootMod(self, FileLinesList):
SpecialSectionStart = False
CheckLocation = False
GFindSpecialCommentRe = \
re.compile(r"""#(?:\s*)\[(.*?)\](?:.*)""", re.DOTALL)
GFindNewSectionRe2 = \
re.compile(r"""#?(\s*)\[(.*?)\](.*)""", re.DOTALL)
LineNum = 0
Element = []
for Line in FileLinesList:
Line = Line.strip()
LineNum += 1
MatchObject = GFindSpecialCommentRe.search(Line)
if MatchObject:
SpecialSectionStart = True
Element = []
if MatchObject.group(1).upper().startswith("EVENT"):
List = self.EventList
elif MatchObject.group(1).upper().startswith("HOB"):
List = self.HobList
elif MatchObject.group(1).upper().startswith("BOOTMODE"):
List = self.BootModeList
else:
SpecialSectionStart = False
CheckLocation = False
if SpecialSectionStart:
Element.append([Line, LineNum])
List.append(Element)
else:
#
# if currently in special section, try to detect end of current section
#
MatchObject = GFindNewSectionRe2.search(Line)
if SpecialSectionStart:
if MatchObject:
SpecialSectionStart = False
CheckLocation = False
Element = []
elif not Line:
SpecialSectionStart = False
CheckLocation = True
Element = []
else:
if not Line.startswith(DT.TAB_COMMENT_SPLIT):
Logger.Warn("Parser",
ST.WARN_SPECIAL_SECTION_LOCATION_WRONG,
File=self.FullPath, Line=LineNum)
SpecialSectionStart = False
CheckLocation = False
Element = []
else:
Element.append([Line, LineNum])
else:
if CheckLocation:
if MatchObject:
CheckLocation = False
elif Line:
Logger.Warn("Parser",
ST.WARN_SPECIAL_SECTION_LOCATION_WRONG,
File=self.FullPath, Line=LineNum)
CheckLocation = False
if len(self.BootModeList) >= 1:
self.InfSpecialCommentParser(self.BootModeList,
self.InfSpecialCommentSection,
self.FileName,
DT.TYPE_BOOTMODE_SECTION)
if len(self.EventList) >= 1:
self.InfSpecialCommentParser(self.EventList,
self.InfSpecialCommentSection,
self.FileName,
DT.TYPE_EVENT_SECTION)
if len(self.HobList) >= 1:
self.InfSpecialCommentParser(self.HobList,
self.InfSpecialCommentSection,
self.FileName,
DT.TYPE_HOB_SECTION)
## _ProcessLastSection
#
#
def _ProcessLastSection(self, SectionLines, Line, LineNo, CurrentSection):
#
# The last line is a section header. will discard it.
#
if not (Line.startswith(DT.TAB_SECTION_START) and Line.find(DT.TAB_SECTION_END) > -1):
SectionLines.append((Line, LineNo))
if len(self.SectionHeaderContent) >= 1:
TemSectionName = self.SectionHeaderContent[0][0].upper()
if TemSectionName.upper() not in gINF_SECTION_DEF.keys():
Logger.Error("InfParser",
FORMAT_INVALID,
ST.ERR_INF_PARSER_UNKNOWN_SECTION,
File=self.FullPath,
Line=LineNo,
ExtraData=Line,
RaiseError = Logger.IS_RAISE_ERROR
)
else:
CurrentSection = gINF_SECTION_DEF[TemSectionName]
self.LastSectionHeaderContent = self.SectionHeaderContent
return SectionLines, CurrentSection
## _ConvertSecNameToType
#
#
def _ConvertSecNameToType(SectionName):
SectionType = ''
if SectionName.upper() not in gINF_SECTION_DEF.keys():
SectionType = DT.MODEL_UNKNOWN
else:
SectionType = gINF_SECTION_DEF[SectionName.upper()]
return SectionType
| edk2-master | BaseTools/Source/Python/UPT/Parser/InfParser.py |
## @file
# This file contained the parser for BuildOption sections in INF file
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
'''
InfBuildOptionSectionParser
'''
##
# Import Modules
#
from Library import DataType as DT
from Library import GlobalData
import Logger.Log as Logger
from Logger import StringTable as ST
from Logger.ToolError import FORMAT_INVALID
from Parser.InfParserMisc import InfExpandMacro
from Library.Misc import GetSplitValueList
from Parser.InfParserMisc import IsAsBuildOptionInfo
from Library.Misc import GetHelpStringByRemoveHashKey
from Library.ParserValidate import IsValidFamily
from Library.ParserValidate import IsValidBuildOptionName
from Parser.InfParserMisc import InfParserSectionRoot
class InfBuildOptionSectionParser(InfParserSectionRoot):
## InfBuildOptionParser
#
#
def InfBuildOptionParser(self, SectionString, InfSectionObject, FileName):
BuildOptionList = []
SectionContent = ''
if not GlobalData.gIS_BINARY_INF:
ValueList = []
LineNo = 0
for Line in SectionString:
LineContent = Line[0]
LineNo = Line[1]
TailComments = ''
ReplaceFlag = False
if LineContent.strip() == '':
SectionContent += LineContent + DT.END_OF_LINE
continue
#
# Found Comment
#
if LineContent.strip().startswith(DT.TAB_COMMENT_SPLIT):
SectionContent += LineContent + DT.END_OF_LINE
continue
#
# Find Tail comment.
#
if LineContent.find(DT.TAB_COMMENT_SPLIT) > -1:
TailComments = LineContent[LineContent.find(DT.TAB_COMMENT_SPLIT):]
LineContent = LineContent[:LineContent.find(DT.TAB_COMMENT_SPLIT)]
TokenList = GetSplitValueList(LineContent, DT.TAB_DEQUAL_SPLIT, 1)
if len(TokenList) == 2:
#
# "Replace" type build option
#
TokenList.append('True')
ReplaceFlag = True
else:
TokenList = GetSplitValueList(LineContent, DT.TAB_EQUAL_SPLIT, 1)
#
# "Append" type build option
#
if len(TokenList) == 2:
TokenList.append('False')
else:
Logger.Error('InfParser',
FORMAT_INVALID,
ST.ERR_INF_PARSER_BUILD_OPTION_FORMAT_INVALID,
ExtraData=LineContent,
File=FileName,
Line=LineNo)
ValueList[0:len(TokenList)] = TokenList
#
# Replace with [Defines] section Macro
#
ValueList[0] = InfExpandMacro(ValueList[0], (FileName, LineContent, LineNo),
self.FileLocalMacros, None)
ValueList[1] = InfExpandMacro(ValueList[1], (FileName, LineContent, LineNo),
self.FileLocalMacros, None, True)
EqualString = ''
if not ReplaceFlag:
EqualString = ' = '
else:
EqualString = ' == '
SectionContent += ValueList[0] + EqualString + ValueList[1] + TailComments + DT.END_OF_LINE
Family = GetSplitValueList(ValueList[0], DT.TAB_COLON_SPLIT, 1)
if len(Family) == 2:
if not IsValidFamily(Family[0]):
Logger.Error('InfParser',
FORMAT_INVALID,
ST.ERR_INF_PARSER_BUILD_OPTION_FORMAT_INVALID,
ExtraData=LineContent,
File=FileName,
Line=LineNo)
if not IsValidBuildOptionName(Family[1]):
Logger.Error('InfParser',
FORMAT_INVALID,
ST.ERR_INF_PARSER_BUILD_OPTION_FORMAT_INVALID,
ExtraData=LineContent,
File=FileName,
Line=LineNo)
if len(Family) == 1:
if not IsValidBuildOptionName(Family[0]):
Logger.Error('InfParser',
FORMAT_INVALID,
ST.ERR_INF_PARSER_BUILD_OPTION_FORMAT_INVALID,
ExtraData=LineContent,
File=FileName,
Line=LineNo)
BuildOptionList.append(ValueList)
ValueList = []
continue
else:
BuildOptionList = InfAsBuiltBuildOptionParser(SectionString, FileName)
#
# Current section archs
#
ArchList = []
LastItem = ''
for Item in self.LastSectionHeaderContent:
LastItem = Item
if not (Item[1] == '' or Item[1] == '') and Item[1] not in ArchList:
ArchList.append(Item[1])
InfSectionObject.SetSupArchList(Item[1])
InfSectionObject.SetAllContent(SectionContent)
if not InfSectionObject.SetBuildOptions(BuildOptionList, ArchList, SectionContent):
Logger.Error('InfParser',
FORMAT_INVALID,
ST.ERR_INF_PARSER_MODULE_SECTION_TYPE_ERROR%("[BuilOptions]"),
File=FileName,
Line=LastItem[3])
## InfBuildOptionParser
#
#
def InfAsBuiltBuildOptionParser(SectionString, FileName):
BuildOptionList = []
#
# AsBuild Binary INF file.
#
AsBuildOptionFlag = False
BuildOptionItem = []
Count = 0
for Line in SectionString:
Count += 1
LineContent = Line[0]
LineNo = Line[1]
#
# The last line
#
if len(SectionString) == Count:
if LineContent.strip().startswith("##") and AsBuildOptionFlag:
BuildOptionList.append(BuildOptionItem)
BuildOptionList.append([GetHelpStringByRemoveHashKey(LineContent)])
elif LineContent.strip().startswith("#") and AsBuildOptionFlag:
BuildOptionInfo = GetHelpStringByRemoveHashKey(LineContent)
BuildOptionItem.append(BuildOptionInfo)
BuildOptionList.append(BuildOptionItem)
else:
if len(BuildOptionItem) > 0:
BuildOptionList.append(BuildOptionItem)
break
if LineContent.strip() == '':
AsBuildOptionFlag = False
continue
if LineContent.strip().startswith("##") and AsBuildOptionFlag:
if len(BuildOptionItem) > 0:
BuildOptionList.append(BuildOptionItem)
BuildOptionItem = []
if not LineContent.strip().startswith("#"):
Logger.Error('InfParser',
FORMAT_INVALID,
ST.ERR_BO_CONTATIN_ASBUILD_AND_COMMON,
File=FileName,
Line=LineNo,
ExtraData=LineContent)
if IsAsBuildOptionInfo(LineContent):
AsBuildOptionFlag = True
continue
if AsBuildOptionFlag:
BuildOptionInfo = GetHelpStringByRemoveHashKey(LineContent)
BuildOptionItem.append(BuildOptionInfo)
return BuildOptionList
| edk2-master | BaseTools/Source/Python/UPT/Parser/InfBuildOptionSectionParser.py |
## @file
# This file contained the parser for [Binaries] sections in INF file
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
'''
InfBinarySectionParser
'''
##
# Import Modules
#
import Logger.Log as Logger
from Logger import StringTable as ST
from Logger.ToolError import FORMAT_INVALID
from Parser.InfParserMisc import InfExpandMacro
from Library import DataType as DT
from Library.Parsing import MacroParser
from Library.Misc import GetSplitValueList
from Object.Parser.InfCommonObject import InfLineCommentObject
from Object.Parser.InfCommonObject import CurrentLine
from Parser.InfParserMisc import InfParserSectionRoot
class InfBinarySectionParser(InfParserSectionRoot):
## InfBinaryParser
#
#
def InfBinaryParser(self, SectionString, InfSectionObject, FileName):
#
# Macro defined in this section
#
SectionMacros = {}
ValueList = []
#
# For UI (UI, SEC_UI, UNI_UI) binaries
# One and only one UI section can be included
#
UiBinaryList = []
#
# For Version (VER, SEC_VER, UNI_VER).
# One and only one VER section on be included
#
VerBinaryList = []
#
# For other common type binaries
#
ComBinaryList = []
StillCommentFalg = False
HeaderComments = []
LineComment = None
AllSectionContent = ''
#
# Parse section content
#
for Line in SectionString:
BinLineContent = Line[0]
BinLineNo = Line[1]
if BinLineContent.strip() == '':
continue
CurrentLineObj = CurrentLine()
CurrentLineObj.FileName = FileName
CurrentLineObj.LineString = BinLineContent
CurrentLineObj.LineNo = BinLineNo
#
# Found Header Comments
#
if BinLineContent.strip().startswith(DT.TAB_COMMENT_SPLIT):
#
# Last line is comments, and this line go on.
#
if StillCommentFalg:
HeaderComments.append(Line)
AllSectionContent += BinLineContent + DT.END_OF_LINE
continue
#
# First time encounter comment
#
else:
#
# Clear original data
#
HeaderComments = []
HeaderComments.append(Line)
AllSectionContent += BinLineContent + DT.END_OF_LINE
StillCommentFalg = True
continue
else:
StillCommentFalg = False
if len(HeaderComments) >= 1:
LineComment = InfLineCommentObject()
LineCommentContent = ''
for Item in HeaderComments:
LineCommentContent += Item[0] + DT.END_OF_LINE
LineComment.SetHeaderComments(LineCommentContent)
#
# Find Tail comment.
#
if BinLineContent.find(DT.TAB_COMMENT_SPLIT) > -1:
TailComments = BinLineContent[BinLineContent.find(DT.TAB_COMMENT_SPLIT):]
BinLineContent = BinLineContent[:BinLineContent.find(DT.TAB_COMMENT_SPLIT)]
if LineComment is None:
LineComment = InfLineCommentObject()
LineComment.SetTailComments(TailComments)
#
# Find Macro
#
MacroDef = MacroParser((BinLineContent, BinLineNo),
FileName,
DT.MODEL_EFI_BINARY_FILE,
self.FileLocalMacros)
if MacroDef[0] is not None:
SectionMacros[MacroDef[0]] = MacroDef[1]
LineComment = None
HeaderComments = []
continue
#
# Replace with Local section Macro and [Defines] section Macro.
#
LineContent = InfExpandMacro(BinLineContent,
(FileName, BinLineContent, BinLineNo),
self.FileLocalMacros,
SectionMacros, True)
AllSectionContent += LineContent + DT.END_OF_LINE
TokenList = GetSplitValueList(LineContent, DT.TAB_VALUE_SPLIT, 1)
ValueList[0:len(TokenList)] = TokenList
#
# Should equal to UI/SEC_UI/UNI_UI
#
ValueList[0] = ValueList[0].strip()
if ValueList[0] == DT.BINARY_FILE_TYPE_UNI_UI or \
ValueList[0] == DT.BINARY_FILE_TYPE_SEC_UI or \
ValueList[0] == DT.BINARY_FILE_TYPE_UI:
if len(ValueList) == 2:
TokenList = GetSplitValueList(ValueList[1],
DT.TAB_VALUE_SPLIT,
2)
NewValueList = []
NewValueList.append(ValueList[0])
for Item in TokenList:
NewValueList.append(Item)
UiBinaryList.append((NewValueList,
LineComment,
CurrentLineObj))
#
# Should equal to VER/SEC_VER/UNI_VER
#
elif ValueList[0] == DT.BINARY_FILE_TYPE_UNI_VER or \
ValueList[0] == DT.BINARY_FILE_TYPE_SEC_VER or \
ValueList[0] == DT.BINARY_FILE_TYPE_VER:
if len(ValueList) == 2:
TokenList = GetSplitValueList(ValueList[1],
DT.TAB_VALUE_SPLIT,
2)
NewValueList = []
NewValueList.append(ValueList[0])
for Item in TokenList:
NewValueList.append(Item)
VerBinaryList.append((NewValueList,
LineComment,
CurrentLineObj))
else:
if len(ValueList) == 2:
if ValueList[0].strip() == 'SUBTYPE_GUID':
TokenList = GetSplitValueList(ValueList[1],
DT.TAB_VALUE_SPLIT,
5)
else:
TokenList = GetSplitValueList(ValueList[1],
DT.TAB_VALUE_SPLIT,
4)
NewValueList = []
NewValueList.append(ValueList[0])
for Item in TokenList:
NewValueList.append(Item)
ComBinaryList.append((NewValueList,
LineComment,
CurrentLineObj))
elif len(ValueList) == 1:
NewValueList = []
NewValueList.append(ValueList[0])
ComBinaryList.append((NewValueList,
LineComment,
CurrentLineObj))
ValueList = []
LineComment = None
TailComments = ''
HeaderComments = []
continue
#
# Current section archs
#
ArchList = []
for Item in self.LastSectionHeaderContent:
if Item[1] not in ArchList:
ArchList.append(Item[1])
InfSectionObject.SetSupArchList(Item[1])
InfSectionObject.SetAllContent(AllSectionContent)
if not InfSectionObject.SetBinary(UiBinaryList,
VerBinaryList,
ComBinaryList,
ArchList):
Logger.Error('InfParser',
FORMAT_INVALID,
ST.ERR_INF_PARSER_MODULE_SECTION_TYPE_ERROR%("[Binaries]"),
File=FileName,
Line=Item[3])
| edk2-master | BaseTools/Source/Python/UPT/Parser/InfBinarySectionParser.py |
## @file
# This file contained the parser for [Sources] sections in INF file
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
'''
InfSourceSectionParser
'''
##
# Import Modules
#
import Logger.Log as Logger
from Logger import StringTable as ST
from Logger.ToolError import FORMAT_INVALID
from Parser.InfParserMisc import InfExpandMacro
from Library import DataType as DT
from Library.Parsing import MacroParser
from Library.Misc import GetSplitValueList
from Object.Parser.InfCommonObject import InfLineCommentObject
from Parser.InfParserMisc import InfParserSectionRoot
class InfSourceSectionParser(InfParserSectionRoot):
## InfSourceParser
#
#
def InfSourceParser(self, SectionString, InfSectionObject, FileName):
SectionMacros = {}
ValueList = []
SourceList = []
StillCommentFalg = False
HeaderComments = []
LineComment = None
SectionContent = ''
for Line in SectionString:
SrcLineContent = Line[0]
SrcLineNo = Line[1]
if SrcLineContent.strip() == '':
continue
#
# Found Header Comments
#
if SrcLineContent.strip().startswith(DT.TAB_COMMENT_SPLIT):
#
# Last line is comments, and this line go on.
#
if StillCommentFalg:
HeaderComments.append(Line)
SectionContent += SrcLineContent + DT.END_OF_LINE
continue
#
# First time encounter comment
#
else:
#
# Clear original data
#
HeaderComments = []
HeaderComments.append(Line)
StillCommentFalg = True
SectionContent += SrcLineContent + DT.END_OF_LINE
continue
else:
StillCommentFalg = False
if len(HeaderComments) >= 1:
LineComment = InfLineCommentObject()
LineCommentContent = ''
for Item in HeaderComments:
LineCommentContent += Item[0] + DT.END_OF_LINE
LineComment.SetHeaderComments(LineCommentContent)
#
# Find Tail comment.
#
if SrcLineContent.find(DT.TAB_COMMENT_SPLIT) > -1:
TailComments = SrcLineContent[SrcLineContent.find(DT.TAB_COMMENT_SPLIT):]
SrcLineContent = SrcLineContent[:SrcLineContent.find(DT.TAB_COMMENT_SPLIT)]
if LineComment is None:
LineComment = InfLineCommentObject()
LineComment.SetTailComments(TailComments)
#
# Find Macro
#
Name, Value = MacroParser((SrcLineContent, SrcLineNo),
FileName,
DT.MODEL_EFI_SOURCE_FILE,
self.FileLocalMacros)
if Name is not None:
SectionMacros[Name] = Value
LineComment = None
HeaderComments = []
continue
#
# Replace with Local section Macro and [Defines] section Macro.
#
SrcLineContent = InfExpandMacro(SrcLineContent,
(FileName, SrcLineContent, SrcLineNo),
self.FileLocalMacros,
SectionMacros)
TokenList = GetSplitValueList(SrcLineContent, DT.TAB_VALUE_SPLIT, 4)
ValueList[0:len(TokenList)] = TokenList
#
# Store section content string after MACRO replaced.
#
SectionContent += SrcLineContent + DT.END_OF_LINE
SourceList.append((ValueList, LineComment,
(SrcLineContent, SrcLineNo, FileName)))
ValueList = []
LineComment = None
TailComments = ''
HeaderComments = []
continue
#
# Current section archs
#
ArchList = []
for Item in self.LastSectionHeaderContent:
if Item[1] not in ArchList:
ArchList.append(Item[1])
InfSectionObject.SetSupArchList(Item[1])
InfSectionObject.SetAllContent(SectionContent)
if not InfSectionObject.SetSources(SourceList, Arch = ArchList):
Logger.Error('InfParser',
FORMAT_INVALID,
ST.ERR_INF_PARSER_MODULE_SECTION_TYPE_ERROR % ("[Sources]"),
File=FileName,
Line=Item[3])
| edk2-master | BaseTools/Source/Python/UPT/Parser/InfSourceSectionParser.py |
## @file
# This file contained the parser for [Guids], [Ppis], [Protocols] sections in INF file
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
'''
InfGuidPpiProtocolSectionParser
'''
##
# Import Modules
#
import Logger.Log as Logger
from Logger import StringTable as ST
from Logger.ToolError import FORMAT_INVALID
from Parser.InfParserMisc import InfExpandMacro
from Library import DataType as DT
from Library import GlobalData
from Library.Parsing import MacroParser
from Library.Misc import GetSplitValueList
from Library.ParserValidate import IsValidIdString
from Library.ParserValidate import IsValidUserId
from Library.ParserValidate import IsValidArch
from Parser.InfParserMisc import InfParserSectionRoot
class InfGuidPpiProtocolSectionParser(InfParserSectionRoot):
## InfGuidParser
#
#
def InfGuidParser(self, SectionString, InfSectionObject, FileName):
#
# Macro defined in this section
#
SectionMacros = {}
ValueList = []
GuidList = []
CommentsList = []
CurrentLineVar = None
#
# Parse section content
#
for Line in SectionString:
LineContent = Line[0]
LineNo = Line[1]
if LineContent.strip() == '':
CommentsList = []
continue
if LineContent.strip().startswith(DT.TAB_COMMENT_SPLIT):
CommentsList.append(Line)
continue
else:
#
# Encounter a GUID entry
#
if LineContent.find(DT.TAB_COMMENT_SPLIT) > -1:
CommentsList.append((
LineContent[LineContent.find(DT.TAB_COMMENT_SPLIT):],
LineNo))
LineContent = \
LineContent[:LineContent.find(DT.TAB_COMMENT_SPLIT)]
if LineContent != '':
#
# Find Macro
#
Name, Value = MacroParser((LineContent, LineNo),
FileName,
DT.MODEL_EFI_GUID,
self.FileLocalMacros)
if Name is not None:
SectionMacros[Name] = Value
CommentsList = []
ValueList = []
continue
TokenList = GetSplitValueList(LineContent, DT.TAB_VALUE_SPLIT, 1)
ValueList[0:len(TokenList)] = TokenList
#
# Replace with Local section Macro and [Defines] section Macro.
#
ValueList = [InfExpandMacro(Value, (FileName, LineContent, LineNo),
self.FileLocalMacros, SectionMacros, True)
for Value in ValueList]
CurrentLineVar = (LineContent, LineNo, FileName)
if len(ValueList) >= 1:
GuidList.append((ValueList, CommentsList, CurrentLineVar))
CommentsList = []
ValueList = []
continue
#
# Current section archs
#
ArchList = []
LineIndex = -1
for Item in self.LastSectionHeaderContent:
LineIndex = Item[3]
if Item[1] not in ArchList:
ArchList.append(Item[1])
if not InfSectionObject.SetGuid(GuidList, Arch=ArchList):
Logger.Error('InfParser',
FORMAT_INVALID,
ST.ERR_INF_PARSER_MODULE_SECTION_TYPE_ERROR % ("[Guid]"),
File=FileName,
Line=LineIndex)
## InfPpiParser
#
#
def InfPpiParser(self, SectionString, InfSectionObject, FileName):
#
# Macro defined in this section
#
SectionMacros = {}
ValueList = []
PpiList = []
CommentsList = []
CurrentLineVar = None
#
# Parse section content
#
for Line in SectionString:
LineContent = Line[0]
LineNo = Line[1]
if LineContent.strip() == '':
CommentsList = []
continue
if LineContent.strip().startswith(DT.TAB_COMMENT_SPLIT):
CommentsList.append(Line)
continue
else:
#
# Encounter a PPI entry
#
if LineContent.find(DT.TAB_COMMENT_SPLIT) > -1:
CommentsList.append((
LineContent[LineContent.find(DT.TAB_COMMENT_SPLIT):],
LineNo))
LineContent = \
LineContent[:LineContent.find(DT.TAB_COMMENT_SPLIT)]
if LineContent != '':
#
# Find Macro
#
Name, Value = MacroParser((LineContent, LineNo),
FileName,
DT.MODEL_EFI_PPI,
self.FileLocalMacros)
if Name is not None:
SectionMacros[Name] = Value
ValueList = []
CommentsList = []
continue
TokenList = GetSplitValueList(LineContent, DT.TAB_VALUE_SPLIT, 1)
ValueList[0:len(TokenList)] = TokenList
#
# Replace with Local section Macro and [Defines] section Macro.
#
ValueList = [InfExpandMacro(Value, (FileName, LineContent, LineNo), self.FileLocalMacros, SectionMacros)
for Value in ValueList]
CurrentLineVar = (LineContent, LineNo, FileName)
if len(ValueList) >= 1:
PpiList.append((ValueList, CommentsList, CurrentLineVar))
ValueList = []
CommentsList = []
continue
#
# Current section archs
#
ArchList = []
LineIndex = -1
for Item in self.LastSectionHeaderContent:
LineIndex = Item[3]
if Item[1] not in ArchList:
ArchList.append(Item[1])
if not InfSectionObject.SetPpi(PpiList, Arch=ArchList):
Logger.Error('InfParser',
FORMAT_INVALID,
ST.ERR_INF_PARSER_MODULE_SECTION_TYPE_ERROR % ("[Ppis]"),
File=FileName,
Line=LineIndex)
## InfUserExtensionParser
#
#
def InfUserExtensionParser(self, SectionString, InfSectionObject, FileName):
UserExtensionContent = ''
#
# Parse section content
#
for Line in SectionString:
LineContent = Line[0]
# Comment the code to support user extension without any statement just the section header in []
# if LineContent.strip() == '':
# continue
UserExtensionContent += LineContent + DT.END_OF_LINE
continue
#
# Current section UserId, IdString
#
IdContentList = []
LastItem = ''
SectionLineNo = None
for Item in self.LastSectionHeaderContent:
UserId = Item[1]
IdString = Item[2]
Arch = Item[3]
SectionLineNo = Item[4]
if not IsValidArch(Arch):
Logger.Error(
'InfParser',
FORMAT_INVALID,
ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID % (Arch),
File=GlobalData.gINF_MODULE_NAME,
Line=SectionLineNo,
ExtraData=None)
if (UserId, IdString, Arch) not in IdContentList:
#
# To check the UserId and IdString valid or not.
#
if not IsValidUserId(UserId):
Logger.Error('InfParser',
FORMAT_INVALID,
ST.ERR_INF_PARSER_UE_SECTION_USER_ID_ERROR % (Item[1]),
File=GlobalData.gINF_MODULE_NAME,
Line=SectionLineNo,
ExtraData=None)
if not IsValidIdString(IdString):
Logger.Error('InfParser',
FORMAT_INVALID,
ST.ERR_INF_PARSER_UE_SECTION_ID_STRING_ERROR % (IdString),
File=GlobalData.gINF_MODULE_NAME, Line=SectionLineNo,
ExtraData=None)
IdContentList.append((UserId, IdString, Arch))
else:
#
# Each UserExtensions section header must have a unique set
# of UserId, IdString and Arch values.
# This means that the same UserId can be used in more than one
# section header, provided the IdString or Arch values are
# different. The same IdString values can be used in more than
# one section header if the UserId or Arch values are
# different. The same UserId and the same IdString can be used
# in a section header if the Arch values are different in each
# of the section headers.
#
Logger.Error('InfParser',
FORMAT_INVALID,
ST.ERR_INF_PARSER_UE_SECTION_DUPLICATE_ERROR % (
IdString),
File=GlobalData.gINF_MODULE_NAME,
Line=SectionLineNo,
ExtraData=None)
LastItem = Item
if not InfSectionObject.SetUserExtension(UserExtensionContent,
IdContent=IdContentList,
LineNo=SectionLineNo):
Logger.Error\
('InfParser', FORMAT_INVALID, \
ST.ERR_INF_PARSER_MODULE_SECTION_TYPE_ERROR % ("[UserExtension]"), \
File=FileName, Line=LastItem[4])
def InfProtocolParser(self, SectionString, InfSectionObject, FileName):
#
# Macro defined in this section
#
SectionMacros = {}
ValueList = []
ProtocolList = []
CommentsList = []
CurrentLineVar = None
#
# Parse section content
#
for Line in SectionString:
LineContent = Line[0]
LineNo = Line[1]
if LineContent.strip() == '':
CommentsList = []
continue
if LineContent.strip().startswith(DT.TAB_COMMENT_SPLIT):
CommentsList.append(Line)
continue
else:
#
# Encounter a Protocol entry
#
if LineContent.find(DT.TAB_COMMENT_SPLIT) > -1:
CommentsList.append((
LineContent[LineContent.find(DT.TAB_COMMENT_SPLIT):],
LineNo))
LineContent = \
LineContent[:LineContent.find(DT.TAB_COMMENT_SPLIT)]
if LineContent != '':
#
# Find Macro
#
Name, Value = MacroParser((LineContent, LineNo),
FileName,
DT.MODEL_EFI_PROTOCOL,
self.FileLocalMacros)
if Name is not None:
SectionMacros[Name] = Value
ValueList = []
CommentsList = []
continue
TokenList = GetSplitValueList(LineContent, DT.TAB_VALUE_SPLIT, 1)
ValueList[0:len(TokenList)] = TokenList
#
# Replace with Local section Macro and [Defines] section Macro.
#
ValueList = [InfExpandMacro(Value, (FileName, LineContent, LineNo), self.FileLocalMacros, SectionMacros)
for Value in ValueList]
CurrentLineVar = (LineContent, LineNo, FileName)
if len(ValueList) >= 1:
ProtocolList.append((ValueList, CommentsList, CurrentLineVar))
ValueList = []
CommentsList = []
continue
#
# Current section archs
#
ArchList = []
LineIndex = -1
for Item in self.LastSectionHeaderContent:
LineIndex = Item[3]
if Item[1] not in ArchList:
ArchList.append(Item[1])
if not InfSectionObject.SetProtocol(ProtocolList, Arch=ArchList):
Logger.Error\
('InfParser', FORMAT_INVALID, \
ST.ERR_INF_PARSER_MODULE_SECTION_TYPE_ERROR % ("[Protocol]"), \
File=FileName, Line=LineIndex)
| edk2-master | BaseTools/Source/Python/UPT/Parser/InfGuidPpiProtocolSectionParser.py |
## @file
# Python 'Parser' package initialization file.
#
# This file is required to make Python interpreter treat the directory
# as containing package.
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
'''
Parser
'''
| edk2-master | BaseTools/Source/Python/UPT/Parser/__init__.py |
## @file
# This file is used to provide method for process AsBuilt INF file. It will consumed by InfParser
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
'''
InfAsBuiltProcess
'''
## Import modules
#
import os
import re
from Library import GlobalData
import Logger.Log as Logger
from Logger import StringTable as ST
from Logger import ToolError
from Library.StringUtils import GetSplitValueList
from Library.Misc import GetHelpStringByRemoveHashKey
from Library.Misc import ValidFile
from Library.Misc import ProcessLineExtender
from Library.ParserValidate import IsValidPath
from Library.Parsing import MacroParser
from Parser.InfParserMisc import InfExpandMacro
from Library import DataType as DT
## GetLibInstanceInfo
#
# Get the information from Library Instance INF file.
#
# @param string. A string start with # and followed by INF file path
# @param WorkSpace. The WorkSpace directory used to combined with INF file path.
#
# @return GUID, Version
def GetLibInstanceInfo(String, WorkSpace, LineNo, CurrentInfFileName):
FileGuidString = ""
VerString = ""
OriginalString = String
String = String.strip()
if not String:
return None, None
#
# Remove "#" characters at the beginning
#
String = GetHelpStringByRemoveHashKey(String)
String = String.strip()
#
# To deal with library instance specified by GUID and version
#
RegFormatGuidPattern = re.compile("\s*([0-9a-fA-F]){8}-"
"([0-9a-fA-F]){4}-"
"([0-9a-fA-F]){4}-"
"([0-9a-fA-F]){4}-"
"([0-9a-fA-F]){12}\s*")
VersionPattern = re.compile('[\t\s]*\d+(\.\d+)?[\t\s]*')
GuidMatchedObj = RegFormatGuidPattern.search(String)
if String.upper().startswith('GUID') and GuidMatchedObj and 'Version' in String:
VersionStr = String[String.upper().find('VERSION') + 8:]
VersionMatchedObj = VersionPattern.search(VersionStr)
if VersionMatchedObj:
Guid = GuidMatchedObj.group().strip()
Version = VersionMatchedObj.group().strip()
return Guid, Version
#
# To deal with library instance specified by file name
#
FileLinesList = GetFileLineContent(String, WorkSpace, LineNo, OriginalString)
ReFindFileGuidPattern = re.compile("^\s*FILE_GUID\s*=.*$")
ReFindVerStringPattern = re.compile("^\s*VERSION_STRING\s*=.*$")
for Line in FileLinesList:
if ReFindFileGuidPattern.match(Line):
FileGuidString = Line
if ReFindVerStringPattern.match(Line):
VerString = Line
if FileGuidString:
FileGuidString = GetSplitValueList(FileGuidString, '=', 1)[1]
if VerString:
VerString = GetSplitValueList(VerString, '=', 1)[1]
return FileGuidString, VerString
## GetPackageListInfo
#
# Get the package information from INF file.
#
# @param string. A string start with # and followed by INF file path
# @param WorkSpace. The WorkSpace directory used to combined with INF file path.
#
# @return GUID, Version
def GetPackageListInfo(FileNameString, WorkSpace, LineNo):
PackageInfoList = []
DefineSectionMacros = {}
PackageSectionMacros = {}
FileLinesList = GetFileLineContent(FileNameString, WorkSpace, LineNo, '')
RePackageHeader = re.compile('^\s*\[Packages.*\].*$')
ReDefineHeader = re.compile('^\s*\[Defines].*$')
PackageHederFlag = False
DefineHeaderFlag = False
LineNo = -1
for Line in FileLinesList:
LineNo += 1
Line = Line.strip()
if Line.startswith('['):
PackageHederFlag = False
DefineHeaderFlag = False
if Line.startswith("#"):
continue
if not Line:
continue
#
# Found [Packages] section
#
if RePackageHeader.match(Line):
PackageHederFlag = True
continue
#
# Found [Define] section
#
if ReDefineHeader.match(Line):
DefineHeaderFlag = True
continue
if DefineHeaderFlag:
#
# Find Macro
#
Name, Value = MacroParser((Line, LineNo),
FileNameString,
DT.MODEL_META_DATA_HEADER,
DefineSectionMacros)
if Name is not None:
DefineSectionMacros[Name] = Value
continue
if PackageHederFlag:
#
# Find Macro
#
Name, Value = MacroParser((Line, LineNo),
FileNameString,
DT.MODEL_META_DATA_PACKAGE,
DefineSectionMacros)
if Name is not None:
PackageSectionMacros[Name] = Value
continue
#
# Replace with Local section Macro and [Defines] section Macro.
#
Line = InfExpandMacro(Line, (FileNameString, Line, LineNo), DefineSectionMacros, PackageSectionMacros, True)
Line = GetSplitValueList(Line, "#", 1)[0]
Line = GetSplitValueList(Line, "|", 1)[0]
PackageInfoList.append(Line)
return PackageInfoList
def GetFileLineContent(FileName, WorkSpace, LineNo, OriginalString):
if not LineNo:
LineNo = -1
#
# Validate file name exist.
#
FullFileName = os.path.normpath(os.path.realpath(os.path.join(WorkSpace, FileName)))
if not (ValidFile(FullFileName)):
return []
#
# Validate file exist/format.
#
if not IsValidPath(FileName, WorkSpace):
return []
FileLinesList = []
try:
FullFileName = FullFileName.replace('\\', '/')
Inputfile = open(FullFileName, "r")
try:
FileLinesList = Inputfile.readlines()
except BaseException:
Logger.Error("InfParser", ToolError.FILE_READ_FAILURE, ST.ERR_FILE_OPEN_FAILURE, File=FullFileName)
finally:
Inputfile.close()
except BaseException:
Logger.Error("InfParser",
ToolError.FILE_READ_FAILURE,
ST.ERR_FILE_OPEN_FAILURE,
File=FullFileName)
FileLinesList = ProcessLineExtender(FileLinesList)
return FileLinesList
##
# Get all INF files from current workspace
#
#
def GetInfsFromWorkSpace(WorkSpace):
InfFiles = []
for top, dirs, files in os.walk(WorkSpace):
dirs = dirs # just for pylint
for File in files:
if File.upper().endswith(".INF"):
InfFiles.append(os.path.join(top, File))
return InfFiles
##
# Get GUID and version from library instance file
#
#
def GetGuidVerFormLibInstance(Guid, Version, WorkSpace, CurrentInfFileName):
for InfFile in GetInfsFromWorkSpace(WorkSpace):
try:
if InfFile.strip().upper() == CurrentInfFileName.strip().upper():
continue
InfFile = InfFile.replace('\\', '/')
if InfFile not in GlobalData.gLIBINSTANCEDICT:
InfFileObj = open(InfFile, "r")
GlobalData.gLIBINSTANCEDICT[InfFile] = InfFileObj
else:
InfFileObj = GlobalData.gLIBINSTANCEDICT[InfFile]
except BaseException:
Logger.Error("InfParser",
ToolError.FILE_READ_FAILURE,
ST.ERR_FILE_OPEN_FAILURE,
File=InfFile)
try:
FileLinesList = InfFileObj.readlines()
FileLinesList = ProcessLineExtender(FileLinesList)
ReFindFileGuidPattern = re.compile("^\s*FILE_GUID\s*=.*$")
ReFindVerStringPattern = re.compile("^\s*VERSION_STRING\s*=.*$")
for Line in FileLinesList:
if ReFindFileGuidPattern.match(Line):
FileGuidString = Line
if ReFindVerStringPattern.match(Line):
VerString = Line
if FileGuidString:
FileGuidString = GetSplitValueList(FileGuidString, '=', 1)[1]
if VerString:
VerString = GetSplitValueList(VerString, '=', 1)[1]
if FileGuidString.strip().upper() == Guid.upper() and \
VerString.strip().upper() == Version.upper():
return Guid, Version
except BaseException:
Logger.Error("InfParser", ToolError.FILE_READ_FAILURE, ST.ERR_FILE_OPEN_FAILURE, File=InfFile)
finally:
InfFileObj.close()
return '', ''
| edk2-master | BaseTools/Source/Python/UPT/Parser/InfAsBuiltProcess.py |
## @file
# This file contained the miscellaneous functions for INF parser
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
'''
InfParserMisc
'''
##
# Import Modules
#
import re
from Library import DataType as DT
from Library.StringUtils import gMACRO_PATTERN
from Library.StringUtils import ReplaceMacro
from Object.Parser.InfMisc import ErrorInInf
from Logger.StringTable import ERR_MARCO_DEFINITION_MISS_ERROR
#
# Global variable
#
#
# Sections can exist in INF file
#
gINF_SECTION_DEF = {
DT.TAB_UNKNOWN.upper() : DT.MODEL_UNKNOWN,
DT.TAB_HEADER.upper() : DT.MODEL_META_DATA_FILE_HEADER,
DT.TAB_INF_DEFINES.upper() : DT.MODEL_META_DATA_DEFINE,
DT.TAB_BUILD_OPTIONS.upper() : DT.MODEL_META_DATA_BUILD_OPTION,
DT.TAB_LIBRARY_CLASSES.upper() : DT.MODEL_EFI_LIBRARY_CLASS,
DT.TAB_PACKAGES.upper() : DT.MODEL_META_DATA_PACKAGE,
DT.TAB_INF_FIXED_PCD.upper() : DT.MODEL_PCD_FIXED_AT_BUILD,
DT.TAB_INF_PATCH_PCD.upper() : DT.MODEL_PCD_PATCHABLE_IN_MODULE,
DT.TAB_INF_FEATURE_PCD.upper() : DT.MODEL_PCD_FEATURE_FLAG,
DT.TAB_INF_PCD_EX.upper() : DT.MODEL_PCD_DYNAMIC_EX,
DT.TAB_INF_PCD.upper() : DT.MODEL_PCD_DYNAMIC,
DT.TAB_SOURCES.upper() : DT.MODEL_EFI_SOURCE_FILE,
DT.TAB_GUIDS.upper() : DT.MODEL_EFI_GUID,
DT.TAB_PROTOCOLS.upper() : DT.MODEL_EFI_PROTOCOL,
DT.TAB_PPIS.upper() : DT.MODEL_EFI_PPI,
DT.TAB_DEPEX.upper() : DT.MODEL_EFI_DEPEX,
DT.TAB_BINARIES.upper() : DT.MODEL_EFI_BINARY_FILE,
DT.TAB_USER_EXTENSIONS.upper() : DT.MODEL_META_DATA_USER_EXTENSION
#
# EDK1 section
# TAB_NMAKE.upper() : MODEL_META_DATA_NMAKE
#
}
## InfExpandMacro
#
# Expand MACRO definition with MACROs defined in [Defines] section and specific section.
# The MACROs defined in specific section has high priority and will be expanded firstly.
#
# @param LineInfo Contain information of FileName, LineContent, LineNo
# @param GlobalMacros MACROs defined in INF [Defines] section
# @param SectionMacros MACROs defined in INF specific section
# @param Flag If the flag set to True, need to skip macros in a quoted string
#
def InfExpandMacro(Content, LineInfo, GlobalMacros=None, SectionMacros=None, Flag=False):
if GlobalMacros is None:
GlobalMacros = {}
if SectionMacros is None:
SectionMacros = {}
FileName = LineInfo[0]
LineContent = LineInfo[1]
LineNo = LineInfo[2]
# Don't expand macros in comments
if LineContent.strip().startswith("#"):
return Content
NewLineInfo = (FileName, LineNo, LineContent)
#
# First, replace MARCOs with value defined in specific section
#
Content = ReplaceMacro (Content,
SectionMacros,
False,
(LineContent, LineNo),
FileName,
Flag)
#
# Then replace MARCOs with value defined in [Defines] section
#
Content = ReplaceMacro (Content,
GlobalMacros,
False,
(LineContent, LineNo),
FileName,
Flag)
MacroUsed = gMACRO_PATTERN.findall(Content)
#
# no macro found in String, stop replacing
#
if len(MacroUsed) == 0:
return Content
else:
for Macro in MacroUsed:
gQuotedMacro = re.compile(".*\".*\$\(%s\).*\".*"%(Macro))
if not gQuotedMacro.match(Content):
#
# Still have MACROs can't be expanded.
#
ErrorInInf (ERR_MARCO_DEFINITION_MISS_ERROR,
LineInfo=NewLineInfo)
return Content
## IsBinaryInf
#
# Judge whether the INF file is Binary INF or Common INF
#
# @param FileLineList A list contain all INF file content.
#
def IsBinaryInf(FileLineList):
if not FileLineList:
return False
ReIsSourcesSection = re.compile("^\s*\[Sources.*\]\s.*$", re.IGNORECASE)
ReIsBinarySection = re.compile("^\s*\[Binaries.*\]\s.*$", re.IGNORECASE)
BinarySectionFoundFlag = False
for Line in FileLineList:
if ReIsSourcesSection.match(Line):
return False
if ReIsBinarySection.match(Line):
BinarySectionFoundFlag = True
if BinarySectionFoundFlag:
return True
return False
## IsLibInstanceInfo
#
# Judge whether the string contain the information of ## @LIB_INSTANCES.
#
# @param String
#
# @return Flag
#
def IsLibInstanceInfo(String):
ReIsLibInstance = re.compile("^\s*##\s*@LIB_INSTANCES\s*$")
if ReIsLibInstance.match(String):
return True
else:
return False
## IsAsBuildOptionInfo
#
# Judge whether the string contain the information of ## @ASBUILD.
#
# @param String
#
# @return Flag
#
def IsAsBuildOptionInfo(String):
ReIsAsBuildInstance = re.compile("^\s*##\s*@AsBuilt\s*$")
if ReIsAsBuildInstance.match(String):
return True
else:
return False
class InfParserSectionRoot(object):
def __init__(self):
#
# Macros defined in [Define] section are file scope global
#
self.FileLocalMacros = {}
#
# Current Section Header content.
#
self.SectionHeaderContent = []
#
# Last time Section Header content.
#
self.LastSectionHeaderContent = []
self.FullPath = ''
self.InfDefSection = None
self.InfBuildOptionSection = None
self.InfLibraryClassSection = None
self.InfPackageSection = None
self.InfPcdSection = None
self.InfSourcesSection = None
self.InfUserExtensionSection = None
self.InfProtocolSection = None
self.InfPpiSection = None
self.InfGuidSection = None
self.InfDepexSection = None
self.InfPeiDepexSection = None
self.InfDxeDepexSection = None
self.InfSmmDepexSection = None
self.InfBinariesSection = None
self.InfHeader = None
self.InfSpecialCommentSection = None
| edk2-master | BaseTools/Source/Python/UPT/Parser/InfParserMisc.py |
## @file
# This file is used to define helper class and function for DEC parser
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
'''
DecParserMisc
'''
## Import modules
#
import os
import Logger.Log as Logger
from Logger.ToolError import FILE_PARSE_FAILURE
from Logger import StringTable as ST
from Library.DataType import TAB_COMMENT_SPLIT
from Library.DataType import TAB_COMMENT_EDK1_SPLIT
from Library.ExpressionValidate import IsValidBareCString
from Library.ParserValidate import IsValidCFormatGuid
from Library.ExpressionValidate import IsValidFeatureFlagExp
from Library.ExpressionValidate import IsValidLogicalExpr
from Library.ExpressionValidate import IsValidStringTest
from Library.Misc import CheckGuidRegFormat
TOOL_NAME = 'DecParser'
VERSION_PATTERN = '[0-9]+(\.[0-9]+)?'
CVAR_PATTERN = '[_a-zA-Z][a-zA-Z0-9_]*'
PCD_TOKEN_PATTERN = '(0[xX]0*[a-fA-F0-9]{1,8})|([0-9]+)'
MACRO_PATTERN = '[A-Z][_A-Z0-9]*'
## FileContent
# Class to hold DEC file information
#
class FileContent:
def __init__(self, Filename, FileContent2):
self.Filename = Filename
self.PackagePath, self.PackageFile = os.path.split(Filename)
self.LineIndex = 0
self.CurrentLine = ''
self.NextLine = ''
self.HeadComment = []
self.TailComment = []
self.CurrentScope = None
self.Content = FileContent2
self.Macros = {}
self.FileLines = len(FileContent2)
def GetNextLine(self):
if self.LineIndex >= self.FileLines:
return ''
Line = self.Content[self.LineIndex]
self.LineIndex += 1
return Line
def UndoNextLine(self):
if self.LineIndex > 0:
self.LineIndex -= 1
def ResetNext(self):
self.HeadComment = []
self.TailComment = []
self.NextLine = ''
def SetNext(self, Line, HeadComment, TailComment):
self.NextLine = Line
self.HeadComment = HeadComment
self.TailComment = TailComment
def IsEndOfFile(self):
return self.LineIndex >= self.FileLines
## StripRoot
#
# Strip root path
#
# @param Root: Root must be absolute path
# @param Path: Path to be stripped
#
def StripRoot(Root, Path):
OrigPath = Path
Root = os.path.normpath(Root)
Path = os.path.normpath(Path)
if not os.path.isabs(Root):
return OrigPath
if Path.startswith(Root):
Path = Path[len(Root):]
if Path and Path[0] == os.sep:
Path = Path[1:]
return Path
return OrigPath
## CleanString
#
# Split comments in a string
# Remove spaces
#
# @param Line: The string to be cleaned
# @param CommentCharacter: Comment char, used to ignore comment content,
# default is DataType.TAB_COMMENT_SPLIT
#
def CleanString(Line, CommentCharacter=TAB_COMMENT_SPLIT, \
AllowCppStyleComment=False):
#
# remove whitespace
#
Line = Line.strip()
#
# Replace EDK1's comment character
#
if AllowCppStyleComment:
Line = Line.replace(TAB_COMMENT_EDK1_SPLIT, CommentCharacter)
#
# separate comments and statements
#
Comment = ''
InQuote = False
for Index in range(0, len(Line)):
if Line[Index] == '"':
InQuote = not InQuote
continue
if Line[Index] == CommentCharacter and not InQuote:
Comment = Line[Index:].strip()
Line = Line[0:Index].strip()
break
return Line, Comment
## IsValidNumValUint8
#
# Check if Token is NumValUint8: <NumValUint8> ::= {<ShortNum>} {<UINT8>} {<Expression>}
#
# @param Token: Token to be checked
#
def IsValidNumValUint8(Token):
Valid = True
Cause = ""
TokenValue = None
Token = Token.strip()
if Token.lower().startswith('0x'):
Base = 16
else:
Base = 10
try:
TokenValue = int(Token, Base)
except BaseException:
Valid, Cause = IsValidLogicalExpr(Token, True)
if Cause:
pass
if not Valid:
return False
if TokenValue and (TokenValue < 0 or TokenValue > 0xFF):
return False
else:
return True
## IsValidNList
#
# Check if Value has the format of <NumValUint8> ["," <NumValUint8>]{0,}
# <NumValUint8> ::= {<ShortNum>} {<UINT8>} {<Expression>}
#
# @param Value: Value to be checked
#
def IsValidNList(Value):
Par = ParserHelper(Value)
if Par.End():
return False
while not Par.End():
Token = Par.GetToken(',')
if not IsValidNumValUint8(Token):
return False
if Par.Expect(','):
if Par.End():
return False
continue
else:
break
return Par.End()
## IsValidCArray
#
# check Array is valid
#
# @param Array: The input Array
#
def IsValidCArray(Array):
Par = ParserHelper(Array)
if not Par.Expect('{'):
return False
if Par.End():
return False
while not Par.End():
Token = Par.GetToken(',}')
#
# ShortNum, UINT8, Expression
#
if not IsValidNumValUint8(Token):
return False
if Par.Expect(','):
if Par.End():
return False
continue
elif Par.Expect('}'):
#
# End of C array
#
break
else:
return False
return Par.End()
## IsValidPcdDatum
#
# check PcdDatum is valid
#
# @param Type: The pcd Type
# @param Value: The pcd Value
#
def IsValidPcdDatum(Type, Value):
if not Value:
return False, ST.ERR_DECPARSE_PCD_VALUE_EMPTY
Valid = True
Cause = ""
if Type not in ["UINT8", "UINT16", "UINT32", "UINT64", "VOID*", "BOOLEAN"]:
return False, ST.ERR_DECPARSE_PCD_TYPE
if Type == "VOID*":
if not ((Value.startswith('L"') or Value.startswith('"') and \
Value.endswith('"'))
or (IsValidCArray(Value)) or (IsValidCFormatGuid(Value)) \
or (IsValidNList(Value)) or (CheckGuidRegFormat(Value))
):
return False, ST.ERR_DECPARSE_PCD_VOID % (Value, Type)
RealString = Value[Value.find('"') + 1 :-1]
if RealString:
if not IsValidBareCString(RealString):
return False, ST.ERR_DECPARSE_PCD_VOID % (Value, Type)
elif Type == 'BOOLEAN':
if Value in ['TRUE', 'FALSE', 'true', 'false', 'True', 'False',
'0x1', '0x01', '1', '0x0', '0x00', '0']:
return True, ""
Valid, Cause = IsValidStringTest(Value, True)
if not Valid:
Valid, Cause = IsValidFeatureFlagExp(Value, True)
if not Valid:
return False, Cause
else:
if Value and (Value[0] == '-' or Value[0] == '+'):
return False, ST.ERR_DECPARSE_PCD_INT_NEGTIVE % (Value, Type)
try:
StrVal = Value
if Value and not Value.startswith('0x') \
and not Value.startswith('0X'):
Value = Value.lstrip('0')
if not Value:
return True, ""
Value = int(Value, 0)
MAX_VAL_TYPE = {"BOOLEAN": 0x01, 'UINT8': 0xFF, 'UINT16': 0xFFFF, 'UINT32': 0xFFFFFFFF,
'UINT64': 0xFFFFFFFFFFFFFFFF}
if Value > MAX_VAL_TYPE[Type]:
return False, ST.ERR_DECPARSE_PCD_INT_EXCEED % (StrVal, Type)
except BaseException:
Valid, Cause = IsValidLogicalExpr(Value, True)
if not Valid:
return False, Cause
return True, ""
## ParserHelper
#
class ParserHelper:
def __init__(self, String, File=''):
self._String = String
self._StrLen = len(String)
self._Index = 0
self._File = File
## End
#
# End
#
def End(self):
self.__SkipWhitespace()
return self._Index >= self._StrLen
## __SkipWhitespace
#
# Skip whitespace
#
def __SkipWhitespace(self):
for Char in self._String[self._Index:]:
if Char not in ' \t':
break
self._Index += 1
## Expect
#
# Expect char in string
#
# @param ExpectChar: char expected in index of string
#
def Expect(self, ExpectChar):
self.__SkipWhitespace()
for Char in self._String[self._Index:]:
if Char != ExpectChar:
return False
else:
self._Index += 1
return True
#
# Index out of bound of String
#
return False
## GetToken
#
# Get token until encounter StopChar, front whitespace is consumed
#
# @param StopChar: Get token until encounter char in StopChar
# @param StkipPair: Only can be ' or ", StopChar in SkipPair are skipped
#
def GetToken(self, StopChar='.,|\t ', SkipPair='"'):
self.__SkipWhitespace()
PreIndex = self._Index
InQuote = False
LastChar = ''
for Char in self._String[self._Index:]:
if Char == SkipPair and LastChar != '\\':
InQuote = not InQuote
if Char in StopChar and not InQuote:
break
self._Index += 1
if Char == '\\' and LastChar == '\\':
LastChar = ''
else:
LastChar = Char
return self._String[PreIndex:self._Index]
## AssertChar
#
# Assert char at current index of string is AssertChar, or will report
# error message
#
# @param AssertChar: AssertChar
# @param ErrorString: ErrorString
# @param ErrorLineNum: ErrorLineNum
#
def AssertChar(self, AssertChar, ErrorString, ErrorLineNum):
if not self.Expect(AssertChar):
Logger.Error(TOOL_NAME, FILE_PARSE_FAILURE, File=self._File,
Line=ErrorLineNum, ExtraData=ErrorString)
## AssertEnd
#
# @param ErrorString: ErrorString
# @param ErrorLineNum: ErrorLineNum
#
def AssertEnd(self, ErrorString, ErrorLineNum):
self.__SkipWhitespace()
if self._Index != self._StrLen:
Logger.Error(TOOL_NAME, FILE_PARSE_FAILURE, File=self._File,
Line=ErrorLineNum, ExtraData=ErrorString)
| edk2-master | BaseTools/Source/Python/UPT/Parser/DecParserMisc.py |
## @file
# This file contained the parser for [Libraries] sections in INF file
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
'''
InfLibrarySectionParser
'''
##
# Import Modules
#
import Logger.Log as Logger
from Logger import StringTable as ST
from Logger.ToolError import FORMAT_INVALID
from Parser.InfParserMisc import InfExpandMacro
from Library import DataType as DT
from Library.Parsing import MacroParser
from Library.Misc import GetSplitValueList
from Object.Parser.InfCommonObject import InfLineCommentObject
from Library import GlobalData
from Parser.InfParserMisc import IsLibInstanceInfo
from Parser.InfAsBuiltProcess import GetLibInstanceInfo
from Parser.InfParserMisc import InfParserSectionRoot
class InfLibrarySectionParser(InfParserSectionRoot):
## InfLibraryParser
#
#
def InfLibraryParser(self, SectionString, InfSectionObject, FileName):
#
# For Common INF file
#
if not GlobalData.gIS_BINARY_INF:
#
# Macro defined in this section
#
SectionMacros = {}
ValueList = []
LibraryList = []
LibStillCommentFalg = False
LibHeaderComments = []
LibLineComment = None
#
# Parse section content
#
for Line in SectionString:
LibLineContent = Line[0]
LibLineNo = Line[1]
if LibLineContent.strip() == '':
continue
#
# Found Header Comments
#
if LibLineContent.strip().startswith(DT.TAB_COMMENT_SPLIT):
#
# Last line is comments, and this line go on.
#
if LibStillCommentFalg:
LibHeaderComments.append(Line)
continue
#
# First time encounter comment
#
else:
#
# Clear original data
#
LibHeaderComments = []
LibHeaderComments.append(Line)
LibStillCommentFalg = True
continue
else:
LibStillCommentFalg = False
if len(LibHeaderComments) >= 1:
LibLineComment = InfLineCommentObject()
LineCommentContent = ''
for Item in LibHeaderComments:
LineCommentContent += Item[0] + DT.END_OF_LINE
LibLineComment.SetHeaderComments(LineCommentContent)
#
# Find Tail comment.
#
if LibLineContent.find(DT.TAB_COMMENT_SPLIT) > -1:
LibTailComments = LibLineContent[LibLineContent.find(DT.TAB_COMMENT_SPLIT):]
LibLineContent = LibLineContent[:LibLineContent.find(DT.TAB_COMMENT_SPLIT)]
if LibLineComment is None:
LibLineComment = InfLineCommentObject()
LibLineComment.SetTailComments(LibTailComments)
#
# Find Macro
#
Name, Value = MacroParser((LibLineContent, LibLineNo),
FileName,
DT.MODEL_EFI_LIBRARY_CLASS,
self.FileLocalMacros)
if Name is not None:
SectionMacros[Name] = Value
LibLineComment = None
LibHeaderComments = []
continue
TokenList = GetSplitValueList(LibLineContent, DT.TAB_VALUE_SPLIT, 1)
ValueList[0:len(TokenList)] = TokenList
#
# Replace with Local section Macro and [Defines] section Macro.
#
ValueList = [InfExpandMacro(Value, (FileName, LibLineContent, LibLineNo),
self.FileLocalMacros, SectionMacros, True)
for Value in ValueList]
LibraryList.append((ValueList, LibLineComment,
(LibLineContent, LibLineNo, FileName)))
ValueList = []
LibLineComment = None
LibTailComments = ''
LibHeaderComments = []
continue
#
# Current section archs
#
KeyList = []
for Item in self.LastSectionHeaderContent:
if (Item[1], Item[2]) not in KeyList:
KeyList.append((Item[1], Item[2]))
if not InfSectionObject.SetLibraryClasses(LibraryList, KeyList=KeyList):
Logger.Error('InfParser',
FORMAT_INVALID,
ST.ERR_INF_PARSER_MODULE_SECTION_TYPE_ERROR % ("[Library]"),
File=FileName,
Line=Item[3])
#
# For Binary INF
#
else:
self.InfAsBuiltLibraryParser(SectionString, InfSectionObject, FileName)
def InfAsBuiltLibraryParser(self, SectionString, InfSectionObject, FileName):
LibraryList = []
LibInsFlag = False
for Line in SectionString:
LineContent = Line[0]
LineNo = Line[1]
if LineContent.strip() == '':
LibInsFlag = False
continue
if not LineContent.strip().startswith("#"):
Logger.Error('InfParser',
FORMAT_INVALID,
ST.ERR_LIB_CONTATIN_ASBUILD_AND_COMMON,
File=FileName,
Line=LineNo,
ExtraData=LineContent)
if IsLibInstanceInfo(LineContent):
LibInsFlag = True
continue
if LibInsFlag:
LibGuid, LibVer = GetLibInstanceInfo(LineContent, GlobalData.gWORKSPACE, LineNo, FileName)
#
# If the VERSION_STRING is missing from the INF file, tool should default to "0".
#
if LibVer == '':
LibVer = '0'
if LibGuid != '':
if (LibGuid, LibVer) not in LibraryList:
LibraryList.append((LibGuid, LibVer))
#
# Current section archs
#
KeyList = []
Item = ['', '', '']
for Item in self.LastSectionHeaderContent:
if (Item[1], Item[2]) not in KeyList:
KeyList.append((Item[1], Item[2]))
if not InfSectionObject.SetLibraryClasses(LibraryList, KeyList=KeyList):
Logger.Error('InfParser',
FORMAT_INVALID,
ST.ERR_INF_PARSER_MODULE_SECTION_TYPE_ERROR % ("[Library]"),
File=FileName,
Line=Item[3])
| edk2-master | BaseTools/Source/Python/UPT/Parser/InfLibrarySectionParser.py |
## @file
# This file is used to parse DEC file. It will consumed by DecParser
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
'''
DecParser
'''
## Import modules
#
import Logger.Log as Logger
from Logger.ToolError import FILE_PARSE_FAILURE
from Logger.ToolError import FILE_OPEN_FAILURE
from Logger import StringTable as ST
from Logger.ToolError import FORMAT_INVALID
import Library.DataType as DT
from Library.ParserValidate import IsValidToken
from Library.ParserValidate import IsValidPath
from Library.ParserValidate import IsValidCFormatGuid
from Library.ParserValidate import IsValidIdString
from Library.ParserValidate import IsValidUserId
from Library.ParserValidate import IsValidArch
from Library.ParserValidate import IsValidWord
from Library.ParserValidate import IsValidDecVersionVal
from Parser.DecParserMisc import TOOL_NAME
from Parser.DecParserMisc import CleanString
from Parser.DecParserMisc import IsValidPcdDatum
from Parser.DecParserMisc import ParserHelper
from Parser.DecParserMisc import StripRoot
from Parser.DecParserMisc import VERSION_PATTERN
from Parser.DecParserMisc import CVAR_PATTERN
from Parser.DecParserMisc import PCD_TOKEN_PATTERN
from Parser.DecParserMisc import MACRO_PATTERN
from Parser.DecParserMisc import FileContent
from Object.Parser.DecObject import _DecComments
from Object.Parser.DecObject import DecDefineObject
from Object.Parser.DecObject import DecDefineItemObject
from Object.Parser.DecObject import DecIncludeObject
from Object.Parser.DecObject import DecIncludeItemObject
from Object.Parser.DecObject import DecLibraryclassObject
from Object.Parser.DecObject import DecLibraryclassItemObject
from Object.Parser.DecObject import DecGuidObject
from Object.Parser.DecObject import DecPpiObject
from Object.Parser.DecObject import DecProtocolObject
from Object.Parser.DecObject import DecGuidItemObject
from Object.Parser.DecObject import DecUserExtensionObject
from Object.Parser.DecObject import DecUserExtensionItemObject
from Object.Parser.DecObject import DecPcdObject
from Object.Parser.DecObject import DecPcdItemObject
from Library.Misc import GuidStructureStringToGuidString
from Library.Misc import CheckGuidRegFormat
from Library.StringUtils import ReplaceMacro
from Library.StringUtils import GetSplitValueList
from Library.StringUtils import gMACRO_PATTERN
from Library.StringUtils import ConvertSpecialChar
from Library.CommentParsing import ParsePcdErrorCode
##
# _DecBase class for parsing
#
class _DecBase:
def __init__(self, RawData):
self._RawData = RawData
self._ItemDict = {}
self._LocalMacro = {}
#
# Data parsed by 'self' are saved to this object
#
self.ItemObject = None
def GetDataObject(self):
return self.ItemObject
def GetLocalMacro(self):
return self._LocalMacro
## BlockStart
#
# Called if a new section starts
#
def BlockStart(self):
self._LocalMacro = {}
## _CheckReDefine
#
# @param Key: to be checked if multi-defined
# @param Scope: Format: [[SectionName, Arch], ...].
# If scope is none, use global scope
#
def _CheckReDefine(self, Key, Scope = None):
if not Scope:
Scope = self._RawData.CurrentScope
return
SecArch = []
#
# Copy scope to SecArch, avoid Scope be changed outside
#
SecArch[0:1] = Scope[:]
if Key not in self._ItemDict:
self._ItemDict[Key] = [[SecArch, self._RawData.LineIndex]]
return
for Value in self._ItemDict[Key]:
for SubValue in Scope:
#
# If current is common section
#
if SubValue[-1] == 'COMMON':
for Other in Value[0]:
# Key in common cannot be redefined in other arches
# [:-1] means stripping arch info
if Other[:-1] == SubValue[:-1]:
self._LoggerError(ST.ERR_DECPARSE_REDEFINE % (Key, Value[1]))
return
continue
CommonScope = []
CommonScope[0:1] = SubValue
CommonScope[-1] = 'COMMON'
#
# Cannot be redefined if this key already defined in COMMON Or defined in same arch
#
if SubValue in Value[0] or CommonScope in Value[0]:
self._LoggerError(ST.ERR_DECPARSE_REDEFINE % (Key, Value[1]))
return
self._ItemDict[Key].append([SecArch, self._RawData.LineIndex])
## CheckRequiredFields
# Some sections need to check if some fields exist, define section for example
# Derived class can re-implement, top parser will call this function after all parsing done
#
def CheckRequiredFields(self):
if self._RawData:
pass
return True
## IsItemRequired
# In DEC spec, sections must have at least one statement except user
# extension.
# For example: "[guids" [<attribs>] "]" <EOL> <statements>+
# sub class can override this method to indicate if statement is a must.
#
def _IsStatementRequired(self):
if self._RawData:
pass
return False
def _LoggerError(self, ErrorString):
Logger.Error(TOOL_NAME, FILE_PARSE_FAILURE, File=self._RawData.Filename,
Line = self._RawData.LineIndex,
ExtraData=ErrorString + ST.ERR_DECPARSE_LINE % self._RawData.CurrentLine)
def _ReplaceMacro(self, String):
if gMACRO_PATTERN.findall(String):
String = ReplaceMacro(String, self._LocalMacro, False,
FileName = self._RawData.Filename,
Line = ['', self._RawData.LineIndex])
String = ReplaceMacro(String, self._RawData.Macros, False,
FileName = self._RawData.Filename,
Line = ['', self._RawData.LineIndex])
MacroUsed = gMACRO_PATTERN.findall(String)
if MacroUsed:
Logger.Error(TOOL_NAME, FILE_PARSE_FAILURE,
File=self._RawData.Filename,
Line = self._RawData.LineIndex,
ExtraData = ST.ERR_DECPARSE_MACRO_RESOLVE % (str(MacroUsed), String))
return String
def _MacroParser(self, String):
TokenList = GetSplitValueList(String, ' ', 1)
if len(TokenList) < 2 or TokenList[1] == '':
self._LoggerError(ST.ERR_DECPARSE_MACRO_PAIR)
TokenList = GetSplitValueList(TokenList[1], DT.TAB_EQUAL_SPLIT, 1)
if TokenList[0] == '':
self._LoggerError(ST.ERR_DECPARSE_MACRO_NAME)
elif not IsValidToken(MACRO_PATTERN, TokenList[0]):
self._LoggerError(ST.ERR_DECPARSE_MACRO_NAME_UPPER % TokenList[0])
if len(TokenList) == 1:
self._LocalMacro[TokenList[0]] = ''
else:
self._LocalMacro[TokenList[0]] = self._ReplaceMacro(TokenList[1])
## _ParseItem
#
# Parse specified item, this function must be derived by subclass
#
def _ParseItem(self):
if self._RawData:
pass
#
# Should never be called
#
return None
## _TailCommentStrategy
#
# This function can be derived to parse tail comment
# default is it will not consume any lines
#
# @param Comment: Comment of current line
#
def _TailCommentStrategy(self, Comment):
if Comment:
pass
if self._RawData:
pass
return False
## _StopCurrentParsing
#
# Called in Parse if current parsing should be stopped when encounter some
# keyword
# Default is section start and end
#
# @param Line: Current line
#
def _StopCurrentParsing(self, Line):
if self._RawData:
pass
return Line[0] == DT.TAB_SECTION_START and Line[-1] == DT.TAB_SECTION_END
## _TryBackSlash
#
# Split comment and DEC content, concatenate lines if end of char is '\'
#
# @param ProcessedLine: ProcessedLine line
# @param ProcessedComments: ProcessedComments line
#
def _TryBackSlash(self, ProcessedLine, ProcessedComments):
CatLine = ''
Comment = ''
Line = ProcessedLine
CommentList = ProcessedComments
while not self._RawData.IsEndOfFile():
if Line == '':
self._LoggerError(ST.ERR_DECPARSE_BACKSLASH_EMPTY)
break
if Comment:
CommentList.append((Comment, self._RawData.LineIndex))
if Line[-1] != DT.TAB_SLASH:
CatLine += Line
break
elif len(Line) < 2 or Line[-2] != ' ':
self._LoggerError(ST.ERR_DECPARSE_BACKSLASH)
else:
CatLine += Line[:-1]
Line, Comment = CleanString(self._RawData.GetNextLine())
#
# Reach end of content
#
if self._RawData.IsEndOfFile():
if not CatLine:
if ProcessedLine[-1] == DT.TAB_SLASH:
self._LoggerError(ST.ERR_DECPARSE_BACKSLASH_EMPTY)
CatLine = ProcessedLine
else:
if not Line or Line[-1] == DT.TAB_SLASH:
self._LoggerError(ST.ERR_DECPARSE_BACKSLASH_EMPTY)
CatLine += Line
#
# All MACRO values defined by the DEFINE statements in any section
# (except [Userextensions] sections for Intel) of the INF or DEC file
# must be expanded before processing of the file.
#
__IsReplaceMacro = True
Header = self._RawData.CurrentScope[0] if self._RawData.CurrentScope else None
if Header and len(Header) > 2:
if Header[0].upper() == 'USEREXTENSIONS' and not (Header[1] == 'TianoCore' and Header[2] == '"ExtraFiles"'):
__IsReplaceMacro = False
if __IsReplaceMacro:
self._RawData.CurrentLine = self._ReplaceMacro(CatLine)
else:
self._RawData.CurrentLine = CatLine
return CatLine, CommentList
## Parse
# This is a template method in which other member functions which might
# override by sub class are called. It is responsible for reading file
# line by line, and call other member functions to parse. This function
# should not be re-implement by sub class.
#
def Parse(self):
HeadComments = []
TailComments = []
#======================================================================
# CurComments may pointer to HeadComments or TailComments
#======================================================================
CurComments = HeadComments
CurObj = None
ItemNum = 0
FromBuf = False
#======================================================================
# Used to report error information if empty section found
#======================================================================
Index = self._RawData.LineIndex
LineStr = self._RawData.CurrentLine
while not self._RawData.IsEndOfFile() or self._RawData.NextLine:
if self._RawData.NextLine:
#==============================================================
# Have processed line in buffer
#==============================================================
Line = self._RawData.NextLine
HeadComments.extend(self._RawData.HeadComment)
TailComments.extend(self._RawData.TailComment)
self._RawData.ResetNext()
Comment = ''
FromBuf = True
else:
#==============================================================
# No line in buffer, read next line
#==============================================================
Line, Comment = CleanString(self._RawData.GetNextLine())
FromBuf = False
if Line:
if not FromBuf and CurObj and TailComments:
#==========================================================
# Set tail comments to previous statement if not empty.
#==========================================================
CurObj.SetTailComment(CurObj.GetTailComment()+TailComments)
if not FromBuf:
del TailComments[:]
CurComments = TailComments
Comments = []
if Comment:
Comments = [(Comment, self._RawData.LineIndex)]
#==============================================================
# Try if last char of line has backslash
#==============================================================
Line, Comments = self._TryBackSlash(Line, Comments)
CurComments.extend(Comments)
#==============================================================
# Macro found
#==============================================================
if Line.startswith('DEFINE '):
self._MacroParser(Line)
del HeadComments[:]
del TailComments[:]
CurComments = HeadComments
continue
if self._StopCurrentParsing(Line):
#==========================================================
# This line does not belong to this parse,
# Save it, can be used by next parse
#==========================================================
self._RawData.SetNext(Line, HeadComments, TailComments)
break
Obj = self._ParseItem()
ItemNum += 1
if Obj:
Obj.SetHeadComment(Obj.GetHeadComment()+HeadComments)
Obj.SetTailComment(Obj.GetTailComment()+TailComments)
del HeadComments[:]
del TailComments[:]
CurObj = Obj
else:
CurObj = None
else:
if id(CurComments) == id(TailComments):
#==========================================================
# Check if this comment belongs to tail comment
#==========================================================
if not self._TailCommentStrategy(Comment):
CurComments = HeadComments
if Comment:
CurComments.append(((Comment, self._RawData.LineIndex)))
else:
del CurComments[:]
if self._IsStatementRequired() and ItemNum == 0:
Logger.Error(
TOOL_NAME, FILE_PARSE_FAILURE,
File=self._RawData.Filename,
Line=Index,
ExtraData=ST.ERR_DECPARSE_STATEMENT_EMPTY % LineStr
)
## _DecDefine
# Parse define section
#
class _DecDefine(_DecBase):
def __init__(self, RawData):
_DecBase.__init__(self, RawData)
self.ItemObject = DecDefineObject(RawData.Filename)
self._LocalMacro = self._RawData.Macros
self._DefSecNum = 0
#
# Each field has a function to validate
#
self.DefineValidation = {
DT.TAB_DEC_DEFINES_DEC_SPECIFICATION : self._SetDecSpecification,
DT.TAB_DEC_DEFINES_PACKAGE_NAME : self._SetPackageName,
DT.TAB_DEC_DEFINES_PACKAGE_GUID : self._SetPackageGuid,
DT.TAB_DEC_DEFINES_PACKAGE_VERSION : self._SetPackageVersion,
DT.TAB_DEC_DEFINES_PKG_UNI_FILE : self._SetPackageUni,
}
def BlockStart(self):
self._DefSecNum += 1
if self._DefSecNum > 1:
self._LoggerError(ST.ERR_DECPARSE_DEFINE_MULTISEC)
## CheckRequiredFields
#
# Check required fields: DEC_SPECIFICATION, PACKAGE_NAME
# PACKAGE_GUID, PACKAGE_VERSION
#
def CheckRequiredFields(self):
Ret = False
if self.ItemObject.GetPackageSpecification() == '':
Logger.Error(TOOL_NAME, FILE_PARSE_FAILURE, File=self._RawData.Filename,
ExtraData=ST.ERR_DECPARSE_DEFINE_REQUIRED % DT.TAB_DEC_DEFINES_DEC_SPECIFICATION)
elif self.ItemObject.GetPackageName() == '':
Logger.Error(TOOL_NAME, FILE_PARSE_FAILURE, File=self._RawData.Filename,
ExtraData=ST.ERR_DECPARSE_DEFINE_REQUIRED % DT.TAB_DEC_DEFINES_PACKAGE_NAME)
elif self.ItemObject.GetPackageGuid() == '':
Logger.Error(TOOL_NAME, FILE_PARSE_FAILURE, File=self._RawData.Filename,
ExtraData=ST.ERR_DECPARSE_DEFINE_REQUIRED % DT.TAB_DEC_DEFINES_PACKAGE_GUID)
elif self.ItemObject.GetPackageVersion() == '':
Logger.Error(TOOL_NAME, FILE_PARSE_FAILURE, File=self._RawData.Filename,
ExtraData=ST.ERR_DECPARSE_DEFINE_REQUIRED % DT.TAB_DEC_DEFINES_PACKAGE_VERSION)
else:
Ret = True
return Ret
def _ParseItem(self):
Line = self._RawData.CurrentLine
TokenList = GetSplitValueList(Line, DT.TAB_EQUAL_SPLIT, 1)
if TokenList[0] == DT.TAB_DEC_DEFINES_PKG_UNI_FILE:
self.DefineValidation[TokenList[0]](TokenList[1])
elif len(TokenList) < 2:
self._LoggerError(ST.ERR_DECPARSE_DEFINE_FORMAT)
elif TokenList[0] not in self.DefineValidation:
self._LoggerError(ST.ERR_DECPARSE_DEFINE_UNKNOWKEY % TokenList[0])
else:
self.DefineValidation[TokenList[0]](TokenList[1])
DefineItem = DecDefineItemObject()
DefineItem.Key = TokenList[0]
DefineItem.Value = TokenList[1]
self.ItemObject.AddItem(DefineItem, self._RawData.CurrentScope)
return DefineItem
def _SetDecSpecification(self, Token):
if self.ItemObject.GetPackageSpecification():
self._LoggerError(ST.ERR_DECPARSE_DEFINE_DEFINED % DT.TAB_DEC_DEFINES_DEC_SPECIFICATION)
if not IsValidToken('0[xX][0-9a-fA-F]{8}', Token):
if not IsValidDecVersionVal(Token):
self._LoggerError(ST.ERR_DECPARSE_DEFINE_SPEC)
self.ItemObject.SetPackageSpecification(Token)
def _SetPackageName(self, Token):
if self.ItemObject.GetPackageName():
self._LoggerError(ST.ERR_DECPARSE_DEFINE_DEFINED % DT.TAB_DEC_DEFINES_PACKAGE_NAME)
if not IsValidWord(Token):
self._LoggerError(ST.ERR_DECPARSE_DEFINE_PKGNAME)
self.ItemObject.SetPackageName(Token)
def _SetPackageGuid(self, Token):
if self.ItemObject.GetPackageGuid():
self._LoggerError(ST.ERR_DECPARSE_DEFINE_DEFINED % DT.TAB_DEC_DEFINES_PACKAGE_GUID)
if not CheckGuidRegFormat(Token):
self._LoggerError(ST.ERR_DECPARSE_DEFINE_PKGGUID)
self.ItemObject.SetPackageGuid(Token)
def _SetPackageVersion(self, Token):
if self.ItemObject.GetPackageVersion():
self._LoggerError(ST.ERR_DECPARSE_DEFINE_DEFINED % DT.TAB_DEC_DEFINES_PACKAGE_VERSION)
if not IsValidToken(VERSION_PATTERN, Token):
self._LoggerError(ST.ERR_DECPARSE_DEFINE_PKGVERSION)
else:
if not DT.TAB_SPLIT in Token:
Token = Token + '.0'
self.ItemObject.SetPackageVersion(Token)
def _SetPackageUni(self, Token):
if self.ItemObject.GetPackageUniFile():
self._LoggerError(ST.ERR_DECPARSE_DEFINE_DEFINED % DT.TAB_DEC_DEFINES_PKG_UNI_FILE)
self.ItemObject.SetPackageUniFile(Token)
## _DecInclude
#
# Parse include section
#
class _DecInclude(_DecBase):
def __init__(self, RawData):
_DecBase.__init__(self, RawData)
self.ItemObject = DecIncludeObject(RawData.Filename)
def _ParseItem(self):
Line = self._RawData.CurrentLine
if not IsValidPath(Line, self._RawData.PackagePath):
self._LoggerError(ST.ERR_DECPARSE_INCLUDE % Line)
Item = DecIncludeItemObject(StripRoot(self._RawData.PackagePath, Line), self._RawData.PackagePath)
self.ItemObject.AddItem(Item, self._RawData.CurrentScope)
return Item
## _DecLibraryclass
#
# Parse library class section
#
class _DecLibraryclass(_DecBase):
def __init__(self, RawData):
_DecBase.__init__(self, RawData)
self.ItemObject = DecLibraryclassObject(RawData.Filename)
def _ParseItem(self):
Line = self._RawData.CurrentLine
TokenList = GetSplitValueList(Line, DT.TAB_VALUE_SPLIT)
if len(TokenList) != 2:
self._LoggerError(ST.ERR_DECPARSE_LIBCLASS_SPLIT)
if TokenList[0] == '' or TokenList[1] == '':
self._LoggerError(ST.ERR_DECPARSE_LIBCLASS_EMPTY)
if not IsValidToken('[A-Z][0-9A-Za-z]*', TokenList[0]):
self._LoggerError(ST.ERR_DECPARSE_LIBCLASS_LIB)
self._CheckReDefine(TokenList[0])
Value = TokenList[1]
#
# Must end with .h
#
if not Value.endswith('.h'):
self._LoggerError(ST.ERR_DECPARSE_LIBCLASS_PATH_EXT)
#
# Path must be existed
#
if not IsValidPath(Value, self._RawData.PackagePath):
self._LoggerError(ST.ERR_DECPARSE_INCLUDE % Value)
Item = DecLibraryclassItemObject(TokenList[0], StripRoot(self._RawData.PackagePath, Value),
self._RawData.PackagePath)
self.ItemObject.AddItem(Item, self._RawData.CurrentScope)
return Item
## _DecPcd
#
# Parse PCD section
#
class _DecPcd(_DecBase):
def __init__(self, RawData):
_DecBase.__init__(self, RawData)
self.ItemObject = DecPcdObject(RawData.Filename)
#
# Used to check duplicate token
# Key is token space and token number (integer), value is C name
#
self.TokenMap = {}
def _ParseItem(self):
Line = self._RawData.CurrentLine
TokenList = Line.split(DT.TAB_VALUE_SPLIT)
if len(TokenList) < 4:
self._LoggerError(ST.ERR_DECPARSE_PCD_SPLIT)
#
# Token space guid C name
#
PcdName = GetSplitValueList(TokenList[0], DT.TAB_SPLIT)
if len(PcdName) != 2 or PcdName[0] == '' or PcdName[1] == '':
self._LoggerError(ST.ERR_DECPARSE_PCD_NAME)
Guid = PcdName[0]
if not IsValidToken(CVAR_PATTERN, Guid):
self._LoggerError(ST.ERR_DECPARSE_PCD_CVAR_GUID)
#
# PCD C name
#
CName = PcdName[1]
if not IsValidToken(CVAR_PATTERN, CName):
self._LoggerError(ST.ERR_DECPARSE_PCD_CVAR_PCDCNAME)
self._CheckReDefine(Guid + DT.TAB_SPLIT + CName)
#
# Default value, may be C array, string or number
#
Data = DT.TAB_VALUE_SPLIT.join(TokenList[1:-2]).strip()
#
# PCD data type
#
DataType = TokenList[-2].strip()
Valid, Cause = IsValidPcdDatum(DataType, Data)
if not Valid:
self._LoggerError(Cause)
PcdType = self._RawData.CurrentScope[0][0]
if PcdType == DT.TAB_PCDS_FEATURE_FLAG_NULL.upper() and DataType != 'BOOLEAN':
self._LoggerError(ST.ERR_DECPARSE_PCD_FEATUREFLAG)
#
# Token value is the last element in list.
#
Token = TokenList[-1].strip()
if not IsValidToken(PCD_TOKEN_PATTERN, Token):
self._LoggerError(ST.ERR_DECPARSE_PCD_TOKEN % Token)
elif not Token.startswith('0x') and not Token.startswith('0X'):
if int(Token) > 4294967295:
self._LoggerError(ST.ERR_DECPARSE_PCD_TOKEN_INT % Token)
Token = '0x%x' % int(Token)
IntToken = int(Token, 0)
if (Guid, IntToken) in self.TokenMap:
if self.TokenMap[Guid, IntToken] != CName:
self._LoggerError(ST.ERR_DECPARSE_PCD_TOKEN_UNIQUE%(Token))
else:
self.TokenMap[Guid, IntToken] = CName
Item = DecPcdItemObject(Guid, CName, Data, DataType, Token)
self.ItemObject.AddItem(Item, self._RawData.CurrentScope)
return Item
## _DecGuid
#
# Parse GUID, PPI, Protocol section
#
class _DecGuid(_DecBase):
def __init__(self, RawData):
_DecBase.__init__(self, RawData)
self.GuidObj = DecGuidObject(RawData.Filename)
self.PpiObj = DecPpiObject(RawData.Filename)
self.ProtocolObj = DecProtocolObject(RawData.Filename)
self.ObjectDict = \
{
DT.TAB_GUIDS.upper() : self.GuidObj,
DT.TAB_PPIS.upper() : self.PpiObj,
DT.TAB_PROTOCOLS.upper() : self.ProtocolObj
}
def GetDataObject(self):
if self._RawData.CurrentScope:
return self.ObjectDict[self._RawData.CurrentScope[0][0]]
return None
def GetGuidObject(self):
return self.GuidObj
def GetPpiObject(self):
return self.PpiObj
def GetProtocolObject(self):
return self.ProtocolObj
def _ParseItem(self):
Line = self._RawData.CurrentLine
TokenList = GetSplitValueList(Line, DT.TAB_EQUAL_SPLIT, 1)
if len(TokenList) < 2:
self._LoggerError(ST.ERR_DECPARSE_CGUID)
if TokenList[0] == '':
self._LoggerError(ST.ERR_DECPARSE_CGUID_NAME)
if TokenList[1] == '':
self._LoggerError(ST.ERR_DECPARSE_CGUID_GUID)
if not IsValidToken(CVAR_PATTERN, TokenList[0]):
self._LoggerError(ST.ERR_DECPARSE_PCD_CVAR_GUID)
self._CheckReDefine(TokenList[0])
if TokenList[1][0] != '{':
if not CheckGuidRegFormat(TokenList[1]):
self._LoggerError(ST.ERR_DECPARSE_DEFINE_PKGGUID)
GuidString = TokenList[1]
else:
#
# Convert C format GUID to GUID string and Simple error check
#
GuidString = GuidStructureStringToGuidString(TokenList[1])
if TokenList[1][0] != '{' or TokenList[1][-1] != '}' or GuidString == '':
self._LoggerError(ST.ERR_DECPARSE_CGUID_GUIDFORMAT)
#
# Check C format GUID
#
if not IsValidCFormatGuid(TokenList[1]):
self._LoggerError(ST.ERR_DECPARSE_CGUID_GUIDFORMAT)
Item = DecGuidItemObject(TokenList[0], TokenList[1], GuidString)
ItemObject = self.ObjectDict[self._RawData.CurrentScope[0][0]]
ItemObject.AddItem(Item, self._RawData.CurrentScope)
return Item
## _DecUserExtension
#
# Parse user extension section
#
class _DecUserExtension(_DecBase):
def __init__(self, RawData):
_DecBase.__init__(self, RawData)
self.ItemObject = DecUserExtensionObject(RawData.Filename)
self._Headers = []
self._CurItems = []
def BlockStart(self):
self._CurItems = []
for Header in self._RawData.CurrentScope:
if Header in self._Headers:
self._LoggerError(ST.ERR_DECPARSE_UE_DUPLICATE)
else:
self._Headers.append(Header)
for Item in self._CurItems:
if Item.UserId == Header[1] and Item.IdString == Header[2]:
Item.ArchAndModuleType.append(Header[3])
break
else:
Item = DecUserExtensionItemObject()
Item.UserId = Header[1]
Item.IdString = Header[2]
Item.ArchAndModuleType.append(Header[3])
self._CurItems.append(Item)
self.ItemObject.AddItem(Item, None)
self._LocalMacro = {}
def _ParseItem(self):
Line = self._RawData.CurrentLine
Item = None
for Item in self._CurItems:
if Item.UserString:
Item.UserString = '\n'.join([Item.UserString, Line])
else:
Item.UserString = Line
return Item
## Dec
#
# Top dec parser
#
class Dec(_DecBase, _DecComments):
def __init__(self, DecFile, Parse = True):
try:
Content = ConvertSpecialChar(open(DecFile, 'r').readlines())
except BaseException:
Logger.Error(TOOL_NAME, FILE_OPEN_FAILURE, File=DecFile,
ExtraData=ST.ERR_DECPARSE_FILEOPEN % DecFile)
#
# Pre-parser for Private section
#
self._Private = ''
__IsFoundPrivate = False
NewContent = []
for Line in Content:
Line = Line.strip()
if Line.startswith(DT.TAB_SECTION_START) and Line.endswith(DT.TAB_PRIVATE + DT.TAB_SECTION_END):
__IsFoundPrivate = True
if Line.startswith(DT.TAB_SECTION_START) and Line.endswith(DT.TAB_SECTION_END)\
and not Line.endswith(DT.TAB_PRIVATE + DT.TAB_SECTION_END):
__IsFoundPrivate = False
if __IsFoundPrivate:
self._Private += Line + '\r'
if not __IsFoundPrivate:
NewContent.append(Line + '\r')
RawData = FileContent(DecFile, NewContent)
_DecComments.__init__(self)
_DecBase.__init__(self, RawData)
self.BinaryHeadComment = []
self.PcdErrorCommentDict = {}
self._Define = _DecDefine(RawData)
self._Include = _DecInclude(RawData)
self._Guid = _DecGuid(RawData)
self._LibClass = _DecLibraryclass(RawData)
self._Pcd = _DecPcd(RawData)
self._UserEx = _DecUserExtension(RawData)
#
# DEC file supported data types (one type per section)
#
self._SectionParser = {
DT.TAB_DEC_DEFINES.upper() : self._Define,
DT.TAB_INCLUDES.upper() : self._Include,
DT.TAB_LIBRARY_CLASSES.upper() : self._LibClass,
DT.TAB_GUIDS.upper() : self._Guid,
DT.TAB_PPIS.upper() : self._Guid,
DT.TAB_PROTOCOLS.upper() : self._Guid,
DT.TAB_PCDS_FIXED_AT_BUILD_NULL.upper() : self._Pcd,
DT.TAB_PCDS_PATCHABLE_IN_MODULE_NULL.upper() : self._Pcd,
DT.TAB_PCDS_FEATURE_FLAG_NULL.upper() : self._Pcd,
DT.TAB_PCDS_DYNAMIC_NULL.upper() : self._Pcd,
DT.TAB_PCDS_DYNAMIC_EX_NULL.upper() : self._Pcd,
DT.TAB_USER_EXTENSIONS.upper() : self._UserEx
}
if Parse:
self.ParseDecComment()
self.Parse()
#
# Parsing done, check required fields
#
self.CheckRequiredFields()
def CheckRequiredFields(self):
for SectionParser in self._SectionParser.values():
if not SectionParser.CheckRequiredFields():
return False
return True
##
# Parse DEC file
#
def ParseDecComment(self):
IsFileHeader = False
IsBinaryHeader = False
FileHeaderLineIndex = -1
BinaryHeaderLineIndex = -1
TokenSpaceGuidCName = ''
#
# Parse PCD error comment section
#
while not self._RawData.IsEndOfFile():
self._RawData.CurrentLine = self._RawData.GetNextLine()
if self._RawData.CurrentLine.startswith(DT.TAB_COMMENT_SPLIT) and \
DT.TAB_SECTION_START in self._RawData.CurrentLine and \
DT.TAB_SECTION_END in self._RawData.CurrentLine:
self._RawData.CurrentLine = self._RawData.CurrentLine.replace(DT.TAB_COMMENT_SPLIT, '').strip()
if self._RawData.CurrentLine[0] == DT.TAB_SECTION_START and \
self._RawData.CurrentLine[-1] == DT.TAB_SECTION_END:
RawSection = self._RawData.CurrentLine[1:-1].strip()
if RawSection.upper().startswith(DT.TAB_PCD_ERROR.upper()+'.'):
TokenSpaceGuidCName = RawSection.split(DT.TAB_PCD_ERROR+'.')[1].strip()
continue
if TokenSpaceGuidCName and self._RawData.CurrentLine.startswith(DT.TAB_COMMENT_SPLIT):
self._RawData.CurrentLine = self._RawData.CurrentLine.replace(DT.TAB_COMMENT_SPLIT, '').strip()
if self._RawData.CurrentLine != '':
if DT.TAB_VALUE_SPLIT not in self._RawData.CurrentLine:
self._LoggerError(ST.ERR_DECPARSE_PCDERRORMSG_MISS_VALUE_SPLIT)
PcdErrorNumber, PcdErrorMsg = GetSplitValueList(self._RawData.CurrentLine, DT.TAB_VALUE_SPLIT, 1)
PcdErrorNumber = ParsePcdErrorCode(PcdErrorNumber, self._RawData.Filename, self._RawData.LineIndex)
if not PcdErrorMsg.strip():
self._LoggerError(ST.ERR_DECPARSE_PCD_MISS_ERRORMSG)
self.PcdErrorCommentDict[(TokenSpaceGuidCName, PcdErrorNumber)] = PcdErrorMsg.strip()
else:
TokenSpaceGuidCName = ''
self._RawData.LineIndex = 0
self._RawData.CurrentLine = ''
self._RawData.NextLine = ''
while not self._RawData.IsEndOfFile():
Line, Comment = CleanString(self._RawData.GetNextLine())
#
# Header must be pure comment
#
if Line != '':
self._RawData.UndoNextLine()
break
if Comment and Comment.startswith(DT.TAB_SPECIAL_COMMENT) and Comment.find(DT.TAB_HEADER_COMMENT) > 0 \
and not Comment[2:Comment.find(DT.TAB_HEADER_COMMENT)].strip():
IsFileHeader = True
IsBinaryHeader = False
FileHeaderLineIndex = self._RawData.LineIndex
#
# Get license information before '@file'
#
if not IsFileHeader and not IsBinaryHeader and Comment and Comment.startswith(DT.TAB_COMMENT_SPLIT) and \
DT.TAB_BINARY_HEADER_COMMENT not in Comment:
self._HeadComment.append((Comment, self._RawData.LineIndex))
if Comment and IsFileHeader and \
not(Comment.startswith(DT.TAB_SPECIAL_COMMENT) \
and Comment.find(DT.TAB_BINARY_HEADER_COMMENT) > 0):
self._HeadComment.append((Comment, self._RawData.LineIndex))
#
# Double '#' indicates end of header comments
#
if (not Comment or Comment == DT.TAB_SPECIAL_COMMENT) and IsFileHeader:
IsFileHeader = False
continue
if Comment and Comment.startswith(DT.TAB_SPECIAL_COMMENT) \
and Comment.find(DT.TAB_BINARY_HEADER_COMMENT) > 0:
IsBinaryHeader = True
IsFileHeader = False
BinaryHeaderLineIndex = self._RawData.LineIndex
if Comment and IsBinaryHeader:
self.BinaryHeadComment.append((Comment, self._RawData.LineIndex))
#
# Double '#' indicates end of header comments
#
if (not Comment or Comment == DT.TAB_SPECIAL_COMMENT) and IsBinaryHeader:
IsBinaryHeader = False
break
if FileHeaderLineIndex > -1 and not IsFileHeader and not IsBinaryHeader:
break
if FileHeaderLineIndex > BinaryHeaderLineIndex and FileHeaderLineIndex > -1 and BinaryHeaderLineIndex > -1:
self._LoggerError(ST.ERR_BINARY_HEADER_ORDER)
if FileHeaderLineIndex == -1:
# self._LoggerError(ST.ERR_NO_SOURCE_HEADER)
Logger.Error(TOOL_NAME, FORMAT_INVALID,
ST.ERR_NO_SOURCE_HEADER,
File=self._RawData.Filename)
return
def _StopCurrentParsing(self, Line):
return False
def _ParseItem(self):
self._SectionHeaderParser()
if len(self._RawData.CurrentScope) == 0:
self._LoggerError(ST.ERR_DECPARSE_SECTION_EMPTY)
SectionObj = self._SectionParser[self._RawData.CurrentScope[0][0]]
SectionObj.BlockStart()
SectionObj.Parse()
return SectionObj.GetDataObject()
def _UserExtentionSectionParser(self):
self._RawData.CurrentScope = []
ArchList = set()
Section = self._RawData.CurrentLine[1:-1]
Par = ParserHelper(Section, self._RawData.Filename)
while not Par.End():
#
# User extention
#
Token = Par.GetToken()
if Token.upper() != DT.TAB_USER_EXTENSIONS.upper():
self._LoggerError(ST.ERR_DECPARSE_SECTION_UE)
UserExtension = Token.upper()
Par.AssertChar(DT.TAB_SPLIT, ST.ERR_DECPARSE_SECTION_UE, self._RawData.LineIndex)
#
# UserID
#
Token = Par.GetToken()
if not IsValidUserId(Token):
self._LoggerError(ST.ERR_DECPARSE_SECTION_UE_USERID)
UserId = Token
Par.AssertChar(DT.TAB_SPLIT, ST.ERR_DECPARSE_SECTION_UE, self._RawData.LineIndex)
#
# IdString
#
Token = Par.GetToken()
if not IsValidIdString(Token):
self._LoggerError(ST.ERR_DECPARSE_SECTION_UE_IDSTRING)
IdString = Token
Arch = 'COMMON'
if Par.Expect(DT.TAB_SPLIT):
Token = Par.GetToken()
Arch = Token.upper()
if not IsValidArch(Arch):
self._LoggerError(ST.ERR_DECPARSE_ARCH)
ArchList.add(Arch)
if [UserExtension, UserId, IdString, Arch] not in \
self._RawData.CurrentScope:
self._RawData.CurrentScope.append(
[UserExtension, UserId, IdString, Arch]
)
if not Par.Expect(DT.TAB_COMMA_SPLIT):
break
elif Par.End():
self._LoggerError(ST.ERR_DECPARSE_SECTION_COMMA)
Par.AssertEnd(ST.ERR_DECPARSE_SECTION_UE, self._RawData.LineIndex)
if 'COMMON' in ArchList and len(ArchList) > 1:
self._LoggerError(ST.ERR_DECPARSE_SECTION_COMMON)
## Section header parser
#
# The section header is always in following format:
#
# [section_name.arch<.platform|module_type>]
#
def _SectionHeaderParser(self):
if self._RawData.CurrentLine[0] != DT.TAB_SECTION_START or self._RawData.CurrentLine[-1] != DT.TAB_SECTION_END:
self._LoggerError(ST.ERR_DECPARSE_SECTION_IDENTIFY)
RawSection = self._RawData.CurrentLine[1:-1].strip().upper()
#
# Check defines section which is only allowed to occur once and
# no arch can be followed
#
if RawSection.startswith(DT.TAB_DEC_DEFINES.upper()):
if RawSection != DT.TAB_DEC_DEFINES.upper():
self._LoggerError(ST.ERR_DECPARSE_DEFINE_SECNAME)
#
# Check user extension section
#
if RawSection.startswith(DT.TAB_USER_EXTENSIONS.upper()):
return self._UserExtentionSectionParser()
self._RawData.CurrentScope = []
SectionNames = []
ArchList = set()
for Item in GetSplitValueList(RawSection, DT.TAB_COMMA_SPLIT):
if Item == '':
self._LoggerError(ST.ERR_DECPARSE_SECTION_SUBEMPTY % self._RawData.CurrentLine)
ItemList = GetSplitValueList(Item, DT.TAB_SPLIT)
#
# different types of PCD are permissible in one section
#
SectionName = ItemList[0]
if SectionName not in self._SectionParser:
self._LoggerError(ST.ERR_DECPARSE_SECTION_UNKNOW % SectionName)
if SectionName not in SectionNames:
SectionNames.append(SectionName)
#
# In DEC specification, all section headers have at most two part:
# SectionName.Arch except UserExtension
#
if len(ItemList) > 2:
self._LoggerError(ST.ERR_DECPARSE_SECTION_SUBTOOMANY % Item)
if DT.TAB_PCDS_FEATURE_FLAG_NULL.upper() in SectionNames and len(SectionNames) > 1:
self._LoggerError(ST.ERR_DECPARSE_SECTION_FEATUREFLAG % DT.TAB_PCDS_FEATURE_FLAG_NULL)
#
# S1 is always Arch
#
if len(ItemList) > 1:
Str1 = ItemList[1]
if not IsValidArch(Str1):
self._LoggerError(ST.ERR_DECPARSE_ARCH)
else:
Str1 = 'COMMON'
ArchList.add(Str1)
if [SectionName, Str1] not in self._RawData.CurrentScope:
self._RawData.CurrentScope.append([SectionName, Str1])
#
# 'COMMON' must not be used with specific ARCHs at the same section
#
if 'COMMON' in ArchList and len(ArchList) > 1:
self._LoggerError(ST.ERR_DECPARSE_SECTION_COMMON)
if len(SectionNames) == 0:
self._LoggerError(ST.ERR_DECPARSE_SECTION_SUBEMPTY % self._RawData.CurrentLine)
if len(SectionNames) != 1:
for Sec in SectionNames:
if not Sec.startswith(DT.TAB_PCDS.upper()):
self._LoggerError(ST.ERR_DECPARSE_SECTION_NAME % str(SectionNames))
def GetDefineSectionMacro(self):
return self._Define.GetLocalMacro()
def GetDefineSectionObject(self):
return self._Define.GetDataObject()
def GetIncludeSectionObject(self):
return self._Include.GetDataObject()
def GetGuidSectionObject(self):
return self._Guid.GetGuidObject()
def GetProtocolSectionObject(self):
return self._Guid.GetProtocolObject()
def GetPpiSectionObject(self):
return self._Guid.GetPpiObject()
def GetLibraryClassSectionObject(self):
return self._LibClass.GetDataObject()
def GetPcdSectionObject(self):
return self._Pcd.GetDataObject()
def GetUserExtensionSectionObject(self):
return self._UserEx.GetDataObject()
def GetPackageSpecification(self):
return self._Define.GetDataObject().GetPackageSpecification()
def GetPackageName(self):
return self._Define.GetDataObject().GetPackageName()
def GetPackageGuid(self):
return self._Define.GetDataObject().GetPackageGuid()
def GetPackageVersion(self):
return self._Define.GetDataObject().GetPackageVersion()
def GetPackageUniFile(self):
return self._Define.GetDataObject().GetPackageUniFile()
def GetPrivateSections(self):
return self._Private
| edk2-master | BaseTools/Source/Python/UPT/Parser/DecParser.py |
## @file
# This file contained the parser for sections in INF file
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
'''
InfSectionParser
'''
##
# Import Modules
#
from copy import deepcopy
import re
from Library.StringUtils import GetSplitValueList
from Library.CommentParsing import ParseHeaderCommentSection
from Library.CommentParsing import ParseComment
from Library import DataType as DT
import Logger.Log as Logger
from Logger import StringTable as ST
from Logger.ToolError import FORMAT_INVALID
from Object.Parser.InfDefineObject import InfDefObject
from Object.Parser.InfBuildOptionObject import InfBuildOptionsObject
from Object.Parser.InfLibraryClassesObject import InfLibraryClassObject
from Object.Parser.InfPackagesObject import InfPackageObject
from Object.Parser.InfPcdObject import InfPcdObject
from Object.Parser.InfSoucesObject import InfSourcesObject
from Object.Parser.InfUserExtensionObject import InfUserExtensionObject
from Object.Parser.InfProtocolObject import InfProtocolObject
from Object.Parser.InfPpiObject import InfPpiObject
from Object.Parser.InfGuidObject import InfGuidObject
from Object.Parser.InfDepexObject import InfDepexObject
from Object.Parser.InfBinaryObject import InfBinariesObject
from Object.Parser.InfHeaderObject import InfHeaderObject
from Object.Parser.InfMisc import InfSpecialCommentObject
from Object.Parser.InfMisc import InfHobObject
from Object.Parser.InfMisc import InfBootModeObject
from Object.Parser.InfMisc import InfEventObject
from Parser.InfParserMisc import gINF_SECTION_DEF
from Parser.InfDefineSectionParser import InfDefinSectionParser
from Parser.InfBuildOptionSectionParser import InfBuildOptionSectionParser
from Parser.InfSourceSectionParser import InfSourceSectionParser
from Parser.InfLibrarySectionParser import InfLibrarySectionParser
from Parser.InfPackageSectionParser import InfPackageSectionParser
from Parser.InfGuidPpiProtocolSectionParser import InfGuidPpiProtocolSectionParser
from Parser.InfBinarySectionParser import InfBinarySectionParser
from Parser.InfPcdSectionParser import InfPcdSectionParser
from Parser.InfDepexSectionParser import InfDepexSectionParser
## GetSpecialStr2
#
# GetSpecialStr2
#
def GetSpecialStr2(ItemList, FileName, LineNo, SectionString):
Str2 = ''
#
# S2 may be Platform or ModuleType
#
if len(ItemList) == 3:
#
# Except [LibraryClass], [Depex]
# section can has more than 2 items in section header string,
# others should report error.
#
if not (ItemList[0].upper() == DT.TAB_LIBRARY_CLASSES.upper() or \
ItemList[0].upper() == DT.TAB_DEPEX.upper() or \
ItemList[0].upper() == DT.TAB_USER_EXTENSIONS.upper()):
if ItemList[2] != '':
Logger.Error('Parser',
FORMAT_INVALID,
ST.ERR_INF_PARSER_SOURCE_SECTION_SECTIONNAME_INVALID % (SectionString),
File=FileName,
Line=LineNo,
ExtraData=SectionString)
Str2 = ItemList[2]
elif len(ItemList) == 4:
#
# Except [UserExtension]
# section can has 4 items in section header string,
# others should report error.
#
if not ItemList[0].upper() == DT.TAB_USER_EXTENSIONS.upper() or ItemList[0].upper() == DT.TAB_DEPEX.upper():
if ItemList[3] != '':
Logger.Error('Parser', FORMAT_INVALID, ST.ERR_INF_PARSER_SOURCE_SECTION_SECTIONNAME_INVALID \
% (SectionString), File=FileName, Line=LineNo, ExtraData=SectionString)
if not ItemList[0].upper() == DT.TAB_USER_EXTENSIONS.upper():
Str2 = ItemList[2] + ' | ' + ItemList[3]
else:
Str2 = ItemList[2]
elif len(ItemList) > 4:
Logger.Error('Parser', FORMAT_INVALID, ST.ERR_INF_PARSER_SOURCE_SECTION_SECTIONNAME_INVALID \
% (SectionString), File=FileName, Line=LineNo, ExtraData=SectionString)
return Str2
## ProcessUseExtHeader
#
#
def ProcessUseExtHeader(ItemList):
NewItemList = []
AppendContent = ''
CompleteFlag = False
for Item in ItemList:
if Item.startswith('\"') and not Item.endswith('\"'):
AppendContent = Item
CompleteFlag = True
elif Item.endswith('\"') and not Item.startswith('\"'):
#
# Should not have an userId or IdString not starts with " before but ends with ".
#
if not CompleteFlag:
return False, []
AppendContent = AppendContent + "." + Item
NewItemList.append(AppendContent)
CompleteFlag = False
AppendContent = ''
elif Item.endswith('\"') and Item.startswith('\"'):
#
# Common item, not need to combine the information
#
NewItemList.append(Item)
else:
if not CompleteFlag:
NewItemList.append(Item)
else:
AppendContent = AppendContent + "." + Item
if len(NewItemList) > 4:
return False, []
return True, NewItemList
## GetArch
#
# GetArch
#
def GetArch(ItemList, ArchList, FileName, LineNo, SectionString):
#
# S1 is always Arch
#
if len(ItemList) > 1:
Arch = ItemList[1]
else:
Arch = 'COMMON'
ArchList.add(Arch)
#
# 'COMMON' must not be used with specific ARCHs at the same section
#
if 'COMMON' in ArchList and len(ArchList) > 1:
Logger.Error('Parser',
FORMAT_INVALID,
ST.ERR_INF_PARSER_SECTION_ARCH_CONFLICT,
File=FileName,
Line=LineNo,
ExtraData=SectionString)
return Arch, ArchList
## InfSectionParser
#
# Inherit from object
#
class InfSectionParser(InfDefinSectionParser,
InfBuildOptionSectionParser,
InfSourceSectionParser,
InfLibrarySectionParser,
InfPackageSectionParser,
InfGuidPpiProtocolSectionParser,
InfBinarySectionParser,
InfPcdSectionParser,
InfDepexSectionParser):
#
# Parser objects used to implement singleton
#
MetaFiles = {}
## Factory method
#
# One file, one parser object. This factory method makes sure that there's
# only one object constructed for one meta file.
#
# @param Class class object of real AutoGen class
# (InfParser, DecParser or DscParser)
# @param FilePath The path of meta file
#
def __new__(cls, FilePath, *args, **kwargs):
if args:
pass
if kwargs:
pass
if FilePath in cls.MetaFiles:
return cls.MetaFiles[FilePath]
else:
ParserObject = super(InfSectionParser, cls).__new__(cls)
cls.MetaFiles[FilePath] = ParserObject
return ParserObject
def __init__(self):
InfDefinSectionParser.__init__(self)
InfBuildOptionSectionParser.__init__(self)
InfSourceSectionParser.__init__(self)
InfLibrarySectionParser.__init__(self)
InfPackageSectionParser.__init__(self)
InfGuidPpiProtocolSectionParser.__init__(self)
InfBinarySectionParser.__init__(self)
InfPcdSectionParser.__init__(self)
InfDepexSectionParser.__init__(self)
#
# Initialize all objects that an INF file will generated.
#
self.InfDefSection = InfDefObject()
self.InfBuildOptionSection = InfBuildOptionsObject()
self.InfLibraryClassSection = InfLibraryClassObject()
self.InfPackageSection = InfPackageObject()
self.InfPcdSection = InfPcdObject(list(self.MetaFiles.keys())[0])
self.InfSourcesSection = InfSourcesObject()
self.InfUserExtensionSection = InfUserExtensionObject()
self.InfProtocolSection = InfProtocolObject()
self.InfPpiSection = InfPpiObject()
self.InfGuidSection = InfGuidObject()
self.InfDepexSection = InfDepexObject()
self.InfPeiDepexSection = InfDepexObject()
self.InfDxeDepexSection = InfDepexObject()
self.InfSmmDepexSection = InfDepexObject()
self.InfBinariesSection = InfBinariesObject()
self.InfHeader = InfHeaderObject()
self.InfBinaryHeader = InfHeaderObject()
self.InfSpecialCommentSection = InfSpecialCommentObject()
#
# A List for store define section content.
#
self._PcdNameList = []
self._SectionName = ''
self._SectionType = 0
self.RelaPath = ''
self.FileName = ''
#
# File Header content parser
#
def InfHeaderParser(self, Content, InfHeaderObject2, FileName, IsBinaryHeader = False):
if IsBinaryHeader:
(Abstract, Description, Copyright, License) = ParseHeaderCommentSection(Content, FileName, True)
if not Abstract or not Description or not Copyright or not License:
Logger.Error('Parser',
FORMAT_INVALID,
ST.ERR_INVALID_BINARYHEADER_FORMAT,
File=FileName)
else:
(Abstract, Description, Copyright, License) = ParseHeaderCommentSection(Content, FileName)
#
# Not process file name now, for later usage.
#
if self.FileName:
pass
#
# Insert Abstract, Description, CopyRight, License into header object
#
InfHeaderObject2.SetAbstract(Abstract)
InfHeaderObject2.SetDescription(Description)
InfHeaderObject2.SetCopyright(Copyright)
InfHeaderObject2.SetLicense(License)
## Section header parser
#
# The section header is always in following format:
#
# [section_name.arch<.platform|module_type>]
#
# @param String A string contained the content need to be parsed.
#
def SectionHeaderParser(self, SectionString, FileName, LineNo):
_Scope = []
_SectionName = ''
ArchList = set()
_ValueList = []
_PcdNameList = [DT.TAB_INF_FIXED_PCD.upper(),
DT.TAB_INF_FEATURE_PCD.upper(),
DT.TAB_INF_PATCH_PCD.upper(),
DT.TAB_INF_PCD.upper(),
DT.TAB_INF_PCD_EX.upper()
]
SectionString = SectionString.strip()
for Item in GetSplitValueList(SectionString[1:-1], DT.TAB_COMMA_SPLIT):
if Item == '':
Logger.Error('Parser',
FORMAT_INVALID,
ST.ERR_INF_PARSER_MODULE_SECTION_TYPE_ERROR % (""),
File=FileName,
Line=LineNo,
ExtraData=SectionString)
ItemList = GetSplitValueList(Item, DT.TAB_SPLIT)
#
# different section should not mix in one section
# Allow different PCD type sections mixed together
#
if _SectionName.upper() not in _PcdNameList:
if _SectionName != '' and _SectionName.upper() != ItemList[0].upper():
Logger.Error('Parser',
FORMAT_INVALID,
ST.ERR_INF_PARSER_SECTION_NAME_DUPLICATE,
File=FileName,
Line=LineNo,
ExtraData=SectionString)
elif _PcdNameList[1] in [_SectionName.upper(), ItemList[0].upper()] and \
(_SectionName.upper()!= ItemList[0].upper()):
Logger.Error('Parser',
FORMAT_INVALID,
ST.ERR_INF_PARSER_MODULE_SECTION_TYPE_ERROR % (""),
File=FileName,
Line=LineNo,
ExtraData=SectionString)
_SectionName = ItemList[0]
if _SectionName.upper() in gINF_SECTION_DEF:
self._SectionType = gINF_SECTION_DEF[_SectionName.upper()]
else:
self._SectionType = DT.MODEL_UNKNOWN
Logger.Error("Parser",
FORMAT_INVALID,
ST.ERR_INF_PARSER_UNKNOWN_SECTION,
File=FileName,
Line=LineNo,
ExtraData=SectionString)
#
# Get Arch
#
Str1, ArchList = GetArch(ItemList, ArchList, FileName, LineNo, SectionString)
#
# For [Defines] section, do special check.
#
if ItemList[0].upper() == DT.TAB_COMMON_DEFINES.upper():
if len(ItemList) != 1:
Logger.Error('Parser',
FORMAT_INVALID,
ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID % (SectionString),
File=FileName, Line=LineNo, ExtraData=SectionString)
#
# For [UserExtension] section, do special check.
#
if ItemList[0].upper() == DT.TAB_USER_EXTENSIONS.upper():
RetValue = ProcessUseExtHeader(ItemList)
if not RetValue[0]:
Logger.Error('Parser',
FORMAT_INVALID,
ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID % (SectionString),
File=FileName, Line=LineNo, ExtraData=SectionString)
else:
ItemList = RetValue[1]
if len(ItemList) == 3:
ItemList.append('COMMON')
Str1 = ItemList[1]
#
# For Library classes, need to check module type.
#
if ItemList[0].upper() == DT.TAB_LIBRARY_CLASSES.upper() and len(ItemList) == 3:
if ItemList[2] != '':
ModuleTypeList = GetSplitValueList(ItemList[2], DT.TAB_VALUE_SPLIT)
for Item in ModuleTypeList:
if Item.strip() not in DT.MODULE_LIST:
Logger.Error('Parser',
FORMAT_INVALID,
ST.ERR_INF_PARSER_DEFINE_MODULETYPE_INVALID % (Item),
File=FileName,
Line=LineNo,
ExtraData=SectionString)
#
# GetSpecialStr2
#
Str2 = GetSpecialStr2(ItemList, FileName, LineNo, SectionString)
_Scope.append([Str1, Str2])
_NewValueList = []
_AppendFlag = True
if _SectionName.upper() in _PcdNameList:
for ValueItem in _ValueList:
if _SectionName.upper() == ValueItem[0].upper() and Str1.upper() not in ValueItem[1].split():
ValueItem[1] = ValueItem[1] + " " + Str1
_AppendFlag = False
elif _SectionName.upper() == ValueItem[0].upper() and Str1.upper() in ValueItem[1].split():
_AppendFlag = False
_NewValueList.append(ValueItem)
_ValueList = _NewValueList
if _AppendFlag:
if not ItemList[0].upper() == DT.TAB_USER_EXTENSIONS.upper():
_ValueList.append([_SectionName, Str1, Str2, LineNo])
else:
if len(ItemList) == 4:
_ValueList.append([_SectionName, Str1, Str2, ItemList[3], LineNo])
self.SectionHeaderContent = deepcopy(_ValueList)
## GenSpecialSectionList
#
# @param SpecialSectionList: a list of list, of which item's format
# (Comment, LineNum)
# @param ContainerFile: Input value for filename of Inf file
#
def InfSpecialCommentParser (self, SpecialSectionList, InfSectionObject, ContainerFile, SectionType):
ReFindSpecialCommentRe = re.compile(r"""#(?:\s*)\[(.*?)\](?:.*)""", re.DOTALL)
ReFindHobArchRe = re.compile(r"""[Hh][Oo][Bb]\.([^,]*)""", re.DOTALL)
if self.FileName:
pass
SpecialObjectList = []
ArchList = []
if SectionType == DT.TYPE_EVENT_SECTION:
TokenDict = DT.EVENT_TOKENS
elif SectionType == DT.TYPE_HOB_SECTION:
TokenDict = DT.HOB_TOKENS
else:
TokenDict = DT.BOOTMODE_TOKENS
for List in SpecialSectionList:
#
# Hob has Arch attribute, need to be handled specially here
#
if SectionType == DT.TYPE_HOB_SECTION:
MatchObject = ReFindSpecialCommentRe.search(List[0][0])
HobSectionStr = MatchObject.group(1)
ArchList = []
for Match in ReFindHobArchRe.finditer(HobSectionStr):
Arch = Match.groups(1)[0].upper()
ArchList.append(Arch)
CommentSoFar = ''
for Index in range(1, len(List)):
Result = ParseComment(List[Index], DT.ALL_USAGE_TOKENS, TokenDict, [], False)
Usage = Result[0]
Type = Result[1]
HelpText = Result[3]
if Usage == DT.ITEM_UNDEFINED and Type == DT.ITEM_UNDEFINED:
if HelpText is None:
HelpText = ''
if not HelpText.endswith('\n'):
HelpText += '\n'
CommentSoFar += HelpText
else:
if HelpText:
CommentSoFar += HelpText
if SectionType == DT.TYPE_EVENT_SECTION:
SpecialObject = InfEventObject()
SpecialObject.SetEventType(Type)
SpecialObject.SetUsage(Usage)
SpecialObject.SetHelpString(CommentSoFar)
elif SectionType == DT.TYPE_HOB_SECTION:
SpecialObject = InfHobObject()
SpecialObject.SetHobType(Type)
SpecialObject.SetUsage(Usage)
SpecialObject.SetHelpString(CommentSoFar)
if len(ArchList) >= 1:
SpecialObject.SetSupArchList(ArchList)
else:
SpecialObject = InfBootModeObject()
SpecialObject.SetSupportedBootModes(Type)
SpecialObject.SetUsage(Usage)
SpecialObject.SetHelpString(CommentSoFar)
SpecialObjectList.append(SpecialObject)
CommentSoFar = ''
if not InfSectionObject.SetSpecialComments(SpecialObjectList,
SectionType):
Logger.Error('InfParser',
FORMAT_INVALID,
ST.ERR_INF_PARSER_MODULE_SECTION_TYPE_ERROR % (SectionType),
ContainerFile
)
| edk2-master | BaseTools/Source/Python/UPT/Parser/InfSectionParser.py |
## @file
# This file contained the parser for [Packages] sections in INF file
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
'''
InfPackageSectionParser
'''
##
# Import Modules
#
import Logger.Log as Logger
from Logger import StringTable as ST
from Logger.ToolError import FORMAT_INVALID
from Parser.InfParserMisc import InfExpandMacro
from Library import DataType as DT
from Library.Parsing import MacroParser
from Library.Misc import GetSplitValueList
from Object.Parser.InfCommonObject import InfLineCommentObject
from Parser.InfParserMisc import InfParserSectionRoot
class InfPackageSectionParser(InfParserSectionRoot):
## InfPackageParser
#
#
def InfPackageParser(self, SectionString, InfSectionObject, FileName):
#
# Macro defined in this section
#
SectionMacros = {}
ValueList = []
PackageList = []
StillCommentFalg = False
HeaderComments = []
LineComment = None
#
# Parse section content
#
for Line in SectionString:
PkgLineContent = Line[0]
PkgLineNo = Line[1]
if PkgLineContent.strip() == '':
continue
#
# Find Header Comments
#
if PkgLineContent.strip().startswith(DT.TAB_COMMENT_SPLIT):
#
# Last line is comments, and this line go on.
#
if StillCommentFalg:
HeaderComments.append(Line)
continue
#
# First time encounter comment
#
else:
#
# Clear original data
#
HeaderComments = []
HeaderComments.append(Line)
StillCommentFalg = True
continue
else:
StillCommentFalg = False
if len(HeaderComments) >= 1:
LineComment = InfLineCommentObject()
LineCommentContent = ''
for Item in HeaderComments:
LineCommentContent += Item[0] + DT.END_OF_LINE
LineComment.SetHeaderComments(LineCommentContent)
#
# Find Tail comment.
#
if PkgLineContent.find(DT.TAB_COMMENT_SPLIT) > -1:
TailComments = PkgLineContent[PkgLineContent.find(DT.TAB_COMMENT_SPLIT):]
PkgLineContent = PkgLineContent[:PkgLineContent.find(DT.TAB_COMMENT_SPLIT)]
if LineComment is None:
LineComment = InfLineCommentObject()
LineComment.SetTailComments(TailComments)
#
# Find Macro
#
Name, Value = MacroParser((PkgLineContent, PkgLineNo),
FileName,
DT.MODEL_META_DATA_PACKAGE,
self.FileLocalMacros)
if Name is not None:
SectionMacros[Name] = Value
LineComment = None
HeaderComments = []
continue
TokenList = GetSplitValueList(PkgLineContent, DT.TAB_VALUE_SPLIT, 1)
ValueList[0:len(TokenList)] = TokenList
#
# Replace with Local section Macro and [Defines] section Macro.
#
ValueList = [InfExpandMacro(Value, (FileName, PkgLineContent, PkgLineNo),
self.FileLocalMacros, SectionMacros, True)
for Value in ValueList]
PackageList.append((ValueList, LineComment,
(PkgLineContent, PkgLineNo, FileName)))
ValueList = []
LineComment = None
TailComments = ''
HeaderComments = []
continue
#
# Current section archs
#
ArchList = []
for Item in self.LastSectionHeaderContent:
if Item[1] not in ArchList:
ArchList.append(Item[1])
if not InfSectionObject.SetPackages(PackageList, Arch = ArchList):
Logger.Error('InfParser',
FORMAT_INVALID,
ST.ERR_INF_PARSER_MODULE_SECTION_TYPE_ERROR\
%("[Packages]"),
File=FileName,
Line=Item[3])
| edk2-master | BaseTools/Source/Python/UPT/Parser/InfPackageSectionParser.py |
## @file
# This file contained the parser for define sections in INF file
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
'''
InfDefineSectionParser
'''
##
# Import Modules
#
import re
from Library import DataType as DT
from Library import GlobalData
from Library.Parsing import MacroParser
from Library.Misc import GetSplitValueList
from Library.ParserValidate import IsValidArch
from Object.Parser.InfCommonObject import InfLineCommentObject
from Object.Parser.InfDefineObject import InfDefMember
from Parser.InfParserMisc import InfExpandMacro
from Object.Parser.InfMisc import ErrorInInf
from Logger import StringTable as ST
from Parser.InfParserMisc import InfParserSectionRoot
## __GetValidateArchList
#
#
def GetValidateArchList(LineContent):
TempArch = ''
ArchList = []
ValidateAcrhPatten = re.compile(r"^\s*#\s*VALID_ARCHITECTURES\s*=\s*.*$", re.DOTALL)
if ValidateAcrhPatten.match(LineContent):
TempArch = GetSplitValueList(LineContent, DT.TAB_EQUAL_SPLIT, 1)[1]
TempArch = GetSplitValueList(TempArch, '(', 1)[0]
ArchList = re.split('\s+', TempArch)
NewArchList = []
for Arch in ArchList:
if IsValidArch(Arch):
NewArchList.append(Arch)
ArchList = NewArchList
return ArchList
class InfDefinSectionParser(InfParserSectionRoot):
def InfDefineParser(self, SectionString, InfSectionObject, FileName, SectionComment):
if SectionComment:
pass
#
# Parser Defines section content and fill self._ContentList dict.
#
StillCommentFalg = False
HeaderComments = []
SectionContent = ''
ArchList = []
_ContentList = []
_ValueList = []
#
# Add WORKSPACE to global Marco dict.
#
self.FileLocalMacros['WORKSPACE'] = GlobalData.gWORKSPACE
for Line in SectionString:
LineContent = Line[0]
LineNo = Line[1]
TailComments = ''
LineComment = None
LineInfo = ['', -1, '']
LineInfo[0] = FileName
LineInfo[1] = LineNo
LineInfo[2] = LineContent
if LineContent.strip() == '':
continue
#
# The first time encountered VALIDATE_ARCHITECHERS will be considered as support arch list.
#
if not ArchList:
ArchList = GetValidateArchList(LineContent)
#
# Parser Comment
#
if LineContent.strip().startswith(DT.TAB_COMMENT_SPLIT):
#
# Last line is comments, and this line go on.
#
if StillCommentFalg:
HeaderComments.append(Line)
SectionContent += LineContent + DT.END_OF_LINE
continue
#
# First time encounter comment
#
else:
#
# Clear original data
#
HeaderComments = []
HeaderComments.append(Line)
StillCommentFalg = True
SectionContent += LineContent + DT.END_OF_LINE
continue
else:
StillCommentFalg = False
if len(HeaderComments) >= 1:
LineComment = InfLineCommentObject()
LineCommentContent = ''
for Item in HeaderComments:
LineCommentContent += Item[0] + DT.END_OF_LINE
LineComment.SetHeaderComments(LineCommentContent)
#
# Find Tail comment.
#
if LineContent.find(DT.TAB_COMMENT_SPLIT) > -1:
TailComments = LineContent[LineContent.find(DT.TAB_COMMENT_SPLIT):]
LineContent = LineContent[:LineContent.find(DT.TAB_COMMENT_SPLIT)]
if LineComment is None:
LineComment = InfLineCommentObject()
LineComment.SetTailComments(TailComments)
#
# Find Macro
#
Name, Value = MacroParser((LineContent, LineNo),
FileName,
DT.MODEL_META_DATA_HEADER,
self.FileLocalMacros)
if Name is not None:
self.FileLocalMacros[Name] = Value
continue
#
# Replace with [Defines] section Macro
#
LineContent = InfExpandMacro(LineContent,
(FileName, LineContent, LineNo),
self.FileLocalMacros,
None, True)
SectionContent += LineContent + DT.END_OF_LINE
TokenList = GetSplitValueList(LineContent, DT.TAB_EQUAL_SPLIT, 1)
if len(TokenList) < 2:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_NO_VALUE,
LineInfo=LineInfo)
_ValueList[0:len(TokenList)] = TokenList
if not _ValueList[0]:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_NO_NAME,
LineInfo=LineInfo)
if not _ValueList[1]:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_NO_VALUE,
LineInfo=LineInfo)
Name, Value = _ValueList[0], _ValueList[1]
InfDefMemberObj = InfDefMember(Name, Value)
if (LineComment is not None):
InfDefMemberObj.Comments.SetHeaderComments(LineComment.GetHeaderComments())
InfDefMemberObj.Comments.SetTailComments(LineComment.GetTailComments())
InfDefMemberObj.CurrentLine.SetFileName(self.FullPath)
InfDefMemberObj.CurrentLine.SetLineString(LineContent)
InfDefMemberObj.CurrentLine.SetLineNo(LineNo)
_ContentList.append(InfDefMemberObj)
HeaderComments = []
TailComments = ''
#
# Current Define section archs
#
if not ArchList:
ArchList = ['COMMON']
InfSectionObject.SetAllContent(SectionContent)
InfSectionObject.SetDefines(_ContentList, Arch=ArchList)
| edk2-master | BaseTools/Source/Python/UPT/Parser/InfDefineSectionParser.py |
## @file
# This file is used to parse a xml file of .PKG file
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
'''
XmlParserMisc
'''
from Object.POM.CommonObject import TextObject
from Logger.StringTable import ERR_XML_PARSER_REQUIRED_ITEM_MISSING
from Logger.ToolError import PARSER_ERROR
import Logger.Log as Logger
## ConvertVariableName()
# Convert VariableName to be L"string",
# input of UCS-2 format Hex Array or L"string" (C style.) could be converted successfully,
# others will not.
#
# @param VariableName: string need to be converted
# @retval: the L quoted string converted if success, else None will be returned
#
def ConvertVariableName(VariableName):
VariableName = VariableName.strip()
#
# check for L quoted string
#
if VariableName.startswith('L"') and VariableName.endswith('"'):
return VariableName
#
# check for Hex Array, it should be little endian even number of hex numbers
#
ValueList = VariableName.split(' ')
if len(ValueList)%2 == 1:
return None
TransferedStr = ''
Index = 0
while Index < len(ValueList):
FirstByte = int(ValueList[Index], 16)
SecondByte = int(ValueList[Index + 1], 16)
if SecondByte != 0:
return None
if FirstByte not in range(0x20, 0x7F):
return None
TransferedStr += ('%c')%FirstByte
Index = Index + 2
return 'L"' + TransferedStr + '"'
## IsRequiredItemListNull
#
# Check if a required XML section item/attribue is NULL
#
# @param ItemList: The list of items to be checked
# @param XmlTreeLevel: The error message tree level
#
def IsRequiredItemListNull(ItemDict, XmlTreeLevel):
for Key in ItemDict:
if not ItemDict[Key]:
Msg = "->".join(Node for Node in XmlTreeLevel)
ErrorMsg = ERR_XML_PARSER_REQUIRED_ITEM_MISSING % (Key, Msg)
Logger.Error('\nUPT', PARSER_ERROR, ErrorMsg, RaiseError=True)
## Get help text
#
# @param HelpText
#
def GetHelpTextList(HelpText):
HelpTextList = []
for HelT in HelpText:
HelpTextObj = TextObject()
HelpTextObj.SetLang(HelT.Lang)
HelpTextObj.SetString(HelT.HelpText)
HelpTextList.append(HelpTextObj)
return HelpTextList
## Get Prompt text
#
# @param Prompt
#
def GetPromptList(Prompt):
PromptList = []
for SubPrompt in Prompt:
PromptObj = TextObject()
PromptObj.SetLang(SubPrompt.Lang)
PromptObj.SetString(SubPrompt.Prompt)
PromptList.append(PromptObj)
return PromptList
| edk2-master | BaseTools/Source/Python/UPT/Xml/XmlParserMisc.py |
## @file
# This file is used to parse a PCD file of .PKG file
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
'''
CommonXml
'''
##
# Import Modules
#
from Core.DistributionPackageClass import DistributionPackageHeaderObject
from Library.StringUtils import ConvertNEToNOTEQ
from Library.StringUtils import ConvertNOTEQToNE
from Library.StringUtils import GetSplitValueList
from Library.StringUtils import GetStringOfList
from Library.Xml.XmlRoutines import XmlElement
from Library.Xml.XmlRoutines import XmlElement2
from Library.Xml.XmlRoutines import XmlAttribute
from Library.Xml.XmlRoutines import XmlNode
from Library.Xml.XmlRoutines import XmlList
from Library.Xml.XmlRoutines import CreateXmlElement
from Library.UniClassObject import ConvertSpecialUnicodes
from Library.UniClassObject import GetLanguageCode1766
from Object.POM.CommonObject import FileObject
from Object.POM.CommonObject import MiscFileObject
from Object.POM.CommonObject import UserExtensionObject
from Object.POM.CommonObject import ClonedRecordObject
from Object.POM.CommonObject import LibraryClassObject
from Object.POM.CommonObject import FileNameObject
from Object.POM.ModuleObject import ModuleObject
from Xml.XmlParserMisc import IsRequiredItemListNull
from Xml.XmlParserMisc import GetHelpTextList
import Library.DataType as DataType
##
# ClonedFromXml
#
class ClonedFromXml(object):
def __init__(self):
self.GUID = ''
self.Version = ''
def FromXml(self, Item, Key):
self.GUID = XmlElement(Item, '%s/GUID' % Key)
self.Version = XmlAttribute(XmlNode(Item, '%s/GUID' % Key), 'Version')
if self.GUID == '' and self.Version == '':
return None
ClonedFrom = ClonedRecordObject()
ClonedFrom.SetPackageGuid(self.GUID)
ClonedFrom.SetPackageVersion(self.Version)
return ClonedFrom
def ToXml(self, ClonedFrom, Key):
if self.GUID:
pass
Element1 = CreateXmlElement('GUID', ClonedFrom.GetPackageGuid(), [],
[['Version', ClonedFrom.GetPackageVersion()]])
AttributeList = []
NodeList = [Element1]
Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
return Root
def __str__(self):
return "GUID = %s Version = %s" % (self.GUID, self.Version)
##
# CommonDefinesXml
#
class CommonDefinesXml(object):
def __init__(self):
self.Usage = ''
self.SupArchList = []
self.SupModList = []
self.FeatureFlag = ''
def FromXml(self, Item, Key):
if Key:
pass
self.Usage = XmlAttribute(Item, 'Usage')
self.SupArchList = \
[Arch for Arch in GetSplitValueList(XmlAttribute(Item, 'SupArchList'), DataType.TAB_SPACE_SPLIT) if Arch]
self.SupModList = \
[Mod for Mod in GetSplitValueList(XmlAttribute(Item, 'SupModList'), DataType.TAB_SPACE_SPLIT) if Mod]
self.FeatureFlag = ConvertNOTEQToNE(XmlAttribute(Item, 'FeatureFlag'))
def ToXml(self):
pass
def __str__(self):
return "Usage = %s SupArchList = %s SupModList = %s FeatureFlag = %s" \
% (self.Usage, self.SupArchList, self.SupModList, self.FeatureFlag)
##
# PromptXml
#
class PromptXml(object):
def __init__(self):
self.Prompt = ''
self.Lang = ''
def FromXml(self, Item, Key):
if Key:
pass
self.Prompt = XmlElement2(Item, 'Prompt')
self.Lang = XmlAttribute(Item, 'Lang')
def ToXml(self, Prompt, Key='Prompt'):
if self.Prompt:
pass
return CreateXmlElement('%s' % Key, Prompt.GetString(), [], [['Lang', Prompt.GetLang()]])
def __str__(self):
return "Prompt = %s Lang = %s" % (self.Prompt, self.Lang)
##
# HelpTextXml
#
class HelpTextXml(object):
def __init__(self):
self.HelpText = ''
self.Lang = ''
def FromXml(self, Item, Key):
if Key:
pass
self.HelpText = XmlElement2(Item, 'HelpText')
self.Lang = XmlAttribute(Item, 'Lang')
def ToXml(self, HelpText, Key='HelpText'):
if self.HelpText:
pass
return CreateXmlElement('%s' % Key, HelpText.GetString(), [], [['Lang', HelpText.GetLang()]])
def __str__(self):
return "HelpText = %s Lang = %s" % (self.HelpText, self.Lang)
##
# HeaderXml
#
class HeaderXml(object):
def __init__(self):
self.Name = ''
self.BaseName = ''
self.GUID = ''
self.Version = ''
self.CopyrightList = []
self.LicenseList = []
self.AbstractList = []
self.DescriptionList = []
def FromXml(self, Item, Key, IsRequiredCheck=False, IsStandAlongModule=False):
if not Item and IsRequiredCheck:
XmlTreeLevel = []
if IsStandAlongModule:
XmlTreeLevel = ['DistributionPackage', 'ModuleSurfaceArea']
else:
XmlTreeLevel = ['DistributionPackage', 'PackageSurfaceArea', 'ModuleSurfaceArea']
CheckDict = {'Header':''}
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
self.Name = XmlElement(Item, '%s/Name' % Key)
self.BaseName = XmlAttribute(XmlNode(Item, '%s/Name' % Key), 'BaseName')
self.GUID = XmlElement(Item, '%s/GUID' % Key)
self.Version = XmlAttribute(XmlNode(Item, '%s/GUID' % Key), 'Version')
for SubItem in XmlList(Item, '%s/Abstract' % Key):
HeaderAbstractLang = XmlAttribute(SubItem, 'Lang')
self.AbstractList.append((HeaderAbstractLang, XmlElement(SubItem, '%s/Abstract' % Key)))
for SubItem in XmlList(Item, '%s/Description' % Key):
HeaderDescriptionLang = XmlAttribute(SubItem, 'Lang')
self.DescriptionList.append((HeaderDescriptionLang, XmlElement(SubItem, '%s/Description' % Key)))
for SubItem in XmlList(Item, '%s/Copyright' % Key):
HeaderCopyrightLang = XmlAttribute(SubItem, 'Lang')
self.CopyrightList.append((HeaderCopyrightLang, XmlElement(SubItem, '%s/Copyright' % Key)))
for SubItem in XmlList(Item, '%s/License' % Key):
HeaderLicenseLang = XmlAttribute(SubItem, 'Lang')
self.LicenseList.append((HeaderLicenseLang, XmlElement(SubItem, '%s/License' % Key)))
ModuleHeader = ModuleObject()
ModuleHeader.SetName(self.Name)
ModuleHeader.SetBaseName(self.BaseName)
ModuleHeader.SetGuid(self.GUID)
ModuleHeader.SetVersion(self.Version)
ModuleHeader.SetCopyright(self.CopyrightList)
ModuleHeader.SetLicense(self.LicenseList)
ModuleHeader.SetAbstract(self.AbstractList)
ModuleHeader.SetDescription(self.DescriptionList)
return ModuleHeader
def ToXml(self, Header, Key):
if self.GUID:
pass
Element1 = CreateXmlElement('Name', Header.GetName(), [], [['BaseName', Header.GetBaseName()]])
Element2 = CreateXmlElement('GUID', Header.GetGuid(), [], [['Version', Header.GetVersion()]])
NodeList = [Element1,
Element2,
]
UNIInfAbstractList = []
UNIInfDescriptionList = []
# Get Abstract and Description from Uni File
# if the Uni File exists
if Header.UniFileClassObject is not None:
UniStrDict = Header.UniFileClassObject.OrderedStringList
for Lang in UniStrDict:
for StringDefClassObject in UniStrDict[Lang]:
if not StringDefClassObject.StringValue:
continue
if StringDefClassObject.StringName == DataType.TAB_INF_ABSTRACT:
UNIInfAbstractList.append((GetLanguageCode1766(Lang),
ConvertSpecialUnicodes(StringDefClassObject.StringValue)))
if StringDefClassObject.StringName == DataType.TAB_INF_DESCRIPTION:
UNIInfDescriptionList.append((GetLanguageCode1766(Lang),
ConvertSpecialUnicodes(StringDefClassObject.StringValue)))
# Get Abstract and Description from INF File Header
for (Lang, Value) in Header.GetCopyright():
if Value:
NodeList.append(CreateXmlElement('Copyright', Value, [], []))
for (Lang, Value) in Header.GetLicense():
if Value:
NodeList.append(CreateXmlElement('License', Value, [], []))
for (Lang, Value) in Header.GetAbstract() + UNIInfAbstractList:
if Value:
NodeList.append(CreateXmlElement('Abstract', Value, [], [['Lang', Lang]]))
for (Lang, Value) in Header.GetDescription() + UNIInfDescriptionList:
if Value:
NodeList.append(CreateXmlElement('Description', Value, [], [['Lang', Lang]]))
AttributeList = []
Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
return Root
def __str__(self):
return "Name = %s BaseName = %s GUID = %s Version = %s Copyright = %s \
License = %s Abstract = %s Description = %s" % \
(self.Name, self.BaseName, self.GUID, self.Version, self.CopyrightList, \
self.LicenseList, self.AbstractList, self.DescriptionList)
##
# DistributionPackageHeaderXml
#
class DistributionPackageHeaderXml(object):
def __init__(self):
self.Header = HeaderXml()
self.ReadOnly = ''
self.RePackage = ''
self.Vendor = ''
self.Date = ''
self.Signature = ''
self.XmlSpecification = ''
def FromXml(self, Item, Key):
if not Item:
return None
self.ReadOnly = XmlAttribute(XmlNode(Item, '%s' % Key), 'ReadOnly')
self.RePackage = XmlAttribute(XmlNode(Item, '%s' % Key), 'RePackage')
self.Vendor = XmlElement(Item, '%s/Vendor' % Key)
self.Date = XmlElement(Item, '%s/Date' % Key)
self.Signature = XmlElement(Item, '%s/Signature' % Key)
self.XmlSpecification = XmlElement(Item, '%s/XmlSpecification' % Key)
self.Header.FromXml(Item, Key)
DistributionPackageHeader = DistributionPackageHeaderObject()
if self.ReadOnly.upper() == 'TRUE':
DistributionPackageHeader.ReadOnly = True
elif self.ReadOnly.upper() == 'FALSE':
DistributionPackageHeader.ReadOnly = False
if self.RePackage.upper() == 'TRUE':
DistributionPackageHeader.RePackage = True
elif self.RePackage.upper() == 'FALSE':
DistributionPackageHeader.RePackage = False
DistributionPackageHeader.Vendor = self.Vendor
DistributionPackageHeader.Date = self.Date
DistributionPackageHeader.Signature = self.Signature
DistributionPackageHeader.XmlSpecification = self.XmlSpecification
DistributionPackageHeader.SetName(self.Header.Name)
DistributionPackageHeader.SetBaseName(self.Header.BaseName)
DistributionPackageHeader.SetGuid(self.Header.GUID)
DistributionPackageHeader.SetVersion(self.Header.Version)
DistributionPackageHeader.SetCopyright(self.Header.CopyrightList)
DistributionPackageHeader.SetLicense(self.Header.LicenseList)
DistributionPackageHeader.SetAbstract(self.Header.AbstractList)
DistributionPackageHeader.SetDescription(self.Header.DescriptionList)
return DistributionPackageHeader
def ToXml(self, DistributionPackageHeader, Key):
if self.Header:
pass
Element1 = CreateXmlElement('Name', \
DistributionPackageHeader.GetName(), [], \
[['BaseName', \
DistributionPackageHeader.GetBaseName()]])
Element2 = CreateXmlElement('GUID', \
DistributionPackageHeader.GetGuid(), [], \
[['Version', \
DistributionPackageHeader.GetVersion()]])
AttributeList = []
if DistributionPackageHeader.ReadOnly != '':
AttributeList.append(['ReadOnly', str(DistributionPackageHeader.ReadOnly).lower()])
if DistributionPackageHeader.RePackage != '':
AttributeList.append(['RePackage', str(DistributionPackageHeader.RePackage).lower()])
if DistributionPackageHeader.GetAbstract():
DPAbstract = DistributionPackageHeader.GetAbstract()[0][1]
else:
DPAbstract = ''
if DistributionPackageHeader.GetDescription():
DPDescription = DistributionPackageHeader.GetDescription()[0][1]
else:
DPDescription = ''
if DistributionPackageHeader.GetCopyright():
DPCopyright = DistributionPackageHeader.GetCopyright()[0][1]
else:
DPCopyright = ''
if DistributionPackageHeader.GetLicense():
DPLicense = DistributionPackageHeader.GetLicense()[0][1]
else:
DPLicense = ''
NodeList = [Element1,
Element2,
['Vendor', DistributionPackageHeader.Vendor],
['Date', DistributionPackageHeader.Date],
['Copyright', DPCopyright],
['License', DPLicense],
['Abstract', DPAbstract],
['Description', DPDescription],
['Signature', DistributionPackageHeader.Signature],
['XmlSpecification', \
DistributionPackageHeader.XmlSpecification],
]
Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
return Root
def __str__(self):
return "ReadOnly = %s RePackage = %s Vendor = %s Date = %s \
Signature = %s XmlSpecification = %s %s" % \
(self.ReadOnly, self.RePackage, self.Vendor, self.Date, \
self.Signature, self.XmlSpecification, self.Header)
##
# PackageHeaderXml
#
class PackageHeaderXml(object):
def __init__(self):
self.Header = HeaderXml()
self.PackagePath = ''
def FromXml(self, Item, Key, PackageObject2):
if not Item:
XmlTreeLevel = ['DistributionPackage', 'PackageSurfaceArea']
CheckDict = {'PackageHeader': None, }
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
self.PackagePath = XmlElement(Item, '%s/PackagePath' % Key)
self.Header.FromXml(Item, Key)
PackageObject2.SetName(self.Header.Name)
PackageObject2.SetBaseName(self.Header.BaseName)
PackageObject2.SetGuid(self.Header.GUID)
PackageObject2.SetVersion(self.Header.Version)
PackageObject2.SetCopyright(self.Header.CopyrightList)
PackageObject2.SetLicense(self.Header.LicenseList)
PackageObject2.SetAbstract(self.Header.AbstractList)
PackageObject2.SetDescription(self.Header.DescriptionList)
PackageObject2.SetPackagePath(self.PackagePath)
def ToXml(self, PackageObject2, Key):
if self.PackagePath:
pass
Element1 = CreateXmlElement('Name', PackageObject2.GetName(), [], \
[['BaseName', PackageObject2.GetBaseName()]])
Element2 = CreateXmlElement('GUID', PackageObject2.GetGuid(), [], \
[['Version', PackageObject2.GetVersion()]])
NodeList = [Element1,
Element2
]
UNIPackageAbrstractList = []
UNIPackageDescriptionList = []
# Get Abstract and Description from Uni File
# if the Uni File exists
if PackageObject2.UniFileClassObject is not None:
UniStrDict = PackageObject2.UniFileClassObject.OrderedStringList
for Lang in UniStrDict:
for StringDefClassObject in UniStrDict[Lang]:
if not StringDefClassObject.StringValue:
continue
if StringDefClassObject.StringName == DataType.TAB_DEC_PACKAGE_ABSTRACT:
UNIPackageAbrstractList.append((GetLanguageCode1766(Lang),
ConvertSpecialUnicodes(StringDefClassObject.StringValue)))
if StringDefClassObject.StringName == DataType.TAB_DEC_PACKAGE_DESCRIPTION:
UNIPackageDescriptionList.append((GetLanguageCode1766(Lang),
ConvertSpecialUnicodes(StringDefClassObject.StringValue)))
# Get Abstract and Description from DEC File Header
for (Lang, Value) in PackageObject2.GetCopyright():
if Value:
NodeList.append(CreateXmlElement(DataType.TAB_HEADER_COPYRIGHT, Value, [], []))
for (Lang, Value) in PackageObject2.GetLicense():
if Value:
NodeList.append(CreateXmlElement(DataType.TAB_HEADER_LICENSE, Value, [], []))
for (Lang, Value) in PackageObject2.GetAbstract() + UNIPackageAbrstractList:
if Value:
NodeList.append(CreateXmlElement(DataType.TAB_HEADER_ABSTRACT, Value, [], [['Lang', Lang]]))
for (Lang, Value) in PackageObject2.GetDescription() + UNIPackageDescriptionList:
if Value:
NodeList.append(CreateXmlElement(DataType.TAB_HEADER_DESCRIPTION, Value, [], [['Lang', Lang]]))
NodeList.append(['PackagePath', PackageObject2.GetPackagePath()])
AttributeList = []
Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
return Root
def __str__(self):
return "PackagePath = %s %s" \
% (self.PackagePath, self.Header)
##
# MiscellaneousFileXml
#
class MiscellaneousFileXml(object):
def __init__(self):
self.Header = HeaderXml()
self.Files = []
##
# This API is used for Package or Module's MiscellaneousFile section
#
def FromXml(self, Item, Key):
if not Item:
return None
self.Header.FromXml(Item, Key)
NewItem = XmlNode(Item, '%s/Header' % Key)
self.Header.FromXml(NewItem, 'Header')
for SubItem in XmlList(Item, '%s/Filename' % Key):
Filename = XmlElement(SubItem, '%s/Filename' % Key)
Executable = XmlAttribute(XmlNode(SubItem, '%s/Filename' % Key), 'Executable')
if Executable.upper() == "TRUE":
Executable = True
elif Executable.upper() == "FALSE":
Executable = False
else:
Executable = ''
self.Files.append([Filename, Executable])
MiscFile = MiscFileObject()
MiscFile.SetCopyright(self.Header.CopyrightList)
MiscFile.SetLicense(self.Header.LicenseList)
MiscFile.SetAbstract(self.Header.AbstractList)
MiscFile.SetDescription(self.Header.DescriptionList)
MiscFileList = []
for File in self.Files:
FileObj = FileObject()
FileObj.SetURI(File[0])
FileObj.SetExecutable(File[1])
MiscFileList.append(FileObj)
MiscFile.SetFileList(MiscFileList)
return MiscFile
##
# This API is used for DistP's tool section
#
def FromXml2(self, Item, Key):
if Item is None:
return None
NewItem = XmlNode(Item, '%s/Header' % Key)
self.Header.FromXml(NewItem, 'Header')
for SubItem in XmlList(Item, '%s/Filename' % Key):
Filename = XmlElement(SubItem, '%s/Filename' % Key)
Executable = \
XmlAttribute(XmlNode(SubItem, '%s/Filename' % Key), 'Executable')
OsType = XmlAttribute(XmlNode(SubItem, '%s/Filename' % Key), 'OS')
if Executable.upper() == "TRUE":
Executable = True
elif Executable.upper() == "FALSE":
Executable = False
else:
Executable = ''
self.Files.append([Filename, Executable, OsType])
MiscFile = MiscFileObject()
MiscFile.SetName(self.Header.Name)
MiscFile.SetCopyright(self.Header.CopyrightList)
MiscFile.SetLicense(self.Header.LicenseList)
MiscFile.SetAbstract(self.Header.AbstractList)
MiscFile.SetDescription(self.Header.DescriptionList)
MiscFileList = []
for File in self.Files:
FileObj = FileObject()
FileObj.SetURI(File[0])
FileObj.SetExecutable(File[1])
FileObj.SetOS(File[2])
MiscFileList.append(FileObj)
MiscFile.SetFileList(MiscFileList)
return MiscFile
##
# This API is used for Package or Module's MiscellaneousFile section
#
def ToXml(self, MiscFile, Key):
if self.Header:
pass
if MiscFile:
if MiscFile.GetAbstract():
DPAbstract = MiscFile.GetAbstract()[0][1]
else:
DPAbstract = ''
if MiscFile.GetDescription():
DPDescription = MiscFile.GetDescription()[0][1]
else:
DPDescription = ''
if MiscFile.GetCopyright():
DPCopyright = MiscFile.GetCopyright()[0][1]
else:
DPCopyright = ''
if MiscFile.GetLicense():
DPLicense = MiscFile.GetLicense()[0][1]
else:
DPLicense = ''
NodeList = [['Copyright', DPCopyright],
['License', DPLicense],
['Abstract', DPAbstract],
['Description', DPDescription],
]
for File in MiscFile.GetFileList():
NodeList.append\
(CreateXmlElement\
('Filename', File.GetURI(), [], \
[['Executable', str(File.GetExecutable()).lower()]]))
Root = CreateXmlElement('%s' % Key, '', NodeList, [])
return Root
##
# This API is used for DistP's tool section
#
def ToXml2(self, MiscFile, Key):
if self.Header:
pass
if MiscFile:
if MiscFile.GetAbstract():
DPAbstract = MiscFile.GetAbstract()[0][1]
else:
DPAbstract = ''
if MiscFile.GetDescription():
DPDescription = MiscFile.GetDescription()[0][1]
else:
DPDescription = ''
if MiscFile.GetCopyright():
DPCopyright = MiscFile.GetCopyright()[0][1]
else:
DPCopyright = ''
if MiscFile.GetLicense():
DPLicense = MiscFile.GetLicense()[0][1]
else:
DPLicense = ''
NodeList = [['Name', MiscFile.GetName()],
['Copyright', DPCopyright],
['License', DPLicense],
['Abstract', DPAbstract],
['Description', DPDescription],
]
HeaderNode = CreateXmlElement('Header', '', NodeList, [])
NodeList = [HeaderNode]
for File in MiscFile.GetFileList():
NodeList.append\
(CreateXmlElement\
('Filename', File.GetURI(), [], \
[['Executable', str(File.GetExecutable()).lower()], \
['OS', File.GetOS()]]))
Root = CreateXmlElement('%s' % Key, '', NodeList, [])
return Root
def __str__(self):
Str = str(self.Header)
for Item in self.Files:
Str = Str + '\n\tFilename:' + str(Item)
return Str
##
# UserExtensionsXml
#
class UserExtensionsXml(object):
def __init__(self):
self.UserId = ''
self.Identifier = ''
self.BinaryAbstractList = []
self.BinaryDescriptionList = []
self.BinaryCopyrightList = []
self.BinaryLicenseList = []
self.LangDefsList = []
self.DefineDict = {}
self.BuildOptionDict = {}
self.IncludesDict = {}
self.SourcesDict = {}
self.BinariesDict = {}
self.SupArchList = []
self.Statement = ''
self.Defines = ''
self.BuildOptions = ''
def FromXml2(self, Item, Key):
self.UserId = XmlAttribute(XmlNode(Item, '%s' % Key), 'UserId')
self.Identifier = XmlAttribute(XmlNode(Item, '%s' % Key), 'Identifier')
UserExtension = UserExtensionObject()
UserExtension.SetUserID(self.UserId)
UserExtension.SetIdentifier(self.Identifier)
return UserExtension
def FromXml(self, Item, Key):
self.UserId = XmlAttribute(XmlNode(Item, '%s' % Key), 'UserId')
self.Identifier = XmlAttribute(XmlNode(Item, '%s' % Key), 'Identifier')
if self.UserId == DataType.TAB_BINARY_HEADER_USERID \
and self.Identifier == DataType.TAB_BINARY_HEADER_IDENTIFIER:
for SubItem in XmlList(Item, '%s/BinaryAbstract' % Key):
BinaryAbstractLang = XmlAttribute(SubItem, 'Lang')
self.BinaryAbstractList.append((BinaryAbstractLang, XmlElement(SubItem, '%s/BinaryAbstract' % Key)))
for SubItem in XmlList(Item, '%s/BinaryDescription' % Key):
BinaryDescriptionLang = XmlAttribute(SubItem, 'Lang')
self.BinaryDescriptionList.append((BinaryDescriptionLang,
XmlElement(SubItem, '%s/BinaryDescription' % Key)))
for SubItem in XmlList(Item, '%s/BinaryCopyright' % Key):
BinaryCopyrightLang = XmlAttribute(SubItem, 'Lang')
self.BinaryCopyrightList.append((BinaryCopyrightLang,
XmlElement(SubItem, '%s/BinaryCopyright' % Key)))
for SubItem in XmlList(Item, '%s/BinaryLicense' % Key):
BinaryLicenseLang = XmlAttribute(SubItem, 'Lang')
self.BinaryLicenseList.append((BinaryLicenseLang,
XmlElement(SubItem, '%s/BinaryLicense' % Key)))
DefineItem = XmlNode(Item, '%s/Define' % Key)
for SubItem in XmlList(DefineItem, 'Define/Statement'):
Statement = XmlElement(SubItem, '%s/Statement' % Key)
self.DefineDict[Statement] = ""
BuildOptionItem = XmlNode(Item, '%s/BuildOption' % Key)
for SubItem in XmlList(BuildOptionItem, 'BuildOption/Statement'):
Statement = XmlElement(SubItem, '%s/Statement' % Key)
Arch = XmlAttribute(XmlNode(SubItem, '%s/Statement' % Key), 'SupArchList')
self.BuildOptionDict[Arch] = Statement
IncludesItem = XmlNode(Item, '%s/Includes' % Key)
for SubItem in XmlList(IncludesItem, 'Includes/Statement'):
Statement = XmlElement(SubItem, '%s/Statement' % Key)
Arch = XmlAttribute(XmlNode(SubItem, '%s/Statement' % Key), 'SupArchList')
self.IncludesDict[Statement] = Arch
SourcesItem = XmlNode(Item, '%s/Sources' % Key)
Tmp = UserExtensionSourceXml()
SourceDict = Tmp.FromXml(SourcesItem, 'Sources')
self.SourcesDict = SourceDict
BinariesItem = XmlNode(Item, '%s/Binaries' % Key)
Tmp = UserExtensionBinaryXml()
BinariesDict = Tmp.FromXml(BinariesItem, 'Binaries')
self.BinariesDict = BinariesDict
self.Statement = XmlElement(Item, 'UserExtensions')
SupArch = XmlAttribute(XmlNode(Item, '%s' % Key), 'SupArchList')
self.SupArchList = [Arch for Arch in GetSplitValueList(SupArch, DataType.TAB_SPACE_SPLIT) if Arch]
UserExtension = UserExtensionObject()
UserExtension.SetUserID(self.UserId)
UserExtension.SetIdentifier(self.Identifier)
UserExtension.SetBinaryAbstract(self.BinaryAbstractList)
UserExtension.SetBinaryDescription(self.BinaryDescriptionList)
UserExtension.SetBinaryCopyright(self.BinaryCopyrightList)
UserExtension.SetBinaryLicense(self.BinaryLicenseList)
UserExtension.SetStatement(self.Statement)
UserExtension.SetSupArchList(self.SupArchList)
UserExtension.SetDefinesDict(self.DefineDict)
UserExtension.SetBuildOptionDict(self.BuildOptionDict)
UserExtension.SetIncludesDict(self.IncludesDict)
UserExtension.SetSourcesDict(self.SourcesDict)
UserExtension.SetBinariesDict(self.BinariesDict)
return UserExtension
def ToXml(self, UserExtension, Key):
if self.UserId:
pass
AttributeList = [['UserId', str(UserExtension.GetUserID())],
['Identifier', str(UserExtension.GetIdentifier())],
['SupArchList', \
GetStringOfList(UserExtension.GetSupArchList())],
]
Root = CreateXmlElement('%s' % Key, UserExtension.GetStatement(), [], \
AttributeList)
if UserExtension.GetIdentifier() == DataType.TAB_BINARY_HEADER_IDENTIFIER and \
UserExtension.GetUserID() == DataType.TAB_BINARY_HEADER_USERID:
for (Lang, Value) in UserExtension.GetBinaryAbstract():
if Value:
ChildElement = CreateXmlElement('BinaryAbstract', Value, [], [['Lang', Lang]])
Root.appendChild(ChildElement)
for (Lang, Value) in UserExtension.GetBinaryDescription():
if Value:
ChildElement = CreateXmlElement('BinaryDescription', Value, [], [['Lang', Lang]])
Root.appendChild(ChildElement)
for (Lang, Value) in UserExtension.GetBinaryCopyright():
if Value:
ChildElement = CreateXmlElement('BinaryCopyright', Value, [], [])
Root.appendChild(ChildElement)
for (Lang, Value) in UserExtension.GetBinaryLicense():
if Value:
ChildElement = CreateXmlElement('BinaryLicense', Value, [], [])
Root.appendChild(ChildElement)
NodeList = []
DefineDict = UserExtension.GetDefinesDict()
if DefineDict:
for Item in DefineDict.keys():
NodeList.append(CreateXmlElement\
('Statement', Item, [], []))
DefineElement = CreateXmlElement('Define', '', NodeList, [])
Root.appendChild(DefineElement)
NodeList = []
BuildOptionDict = UserExtension.GetBuildOptionDict()
if BuildOptionDict:
for Item in BuildOptionDict.keys():
NodeList.append(CreateXmlElement\
('Statement', BuildOptionDict[Item], [], \
[['SupArchList', Item]]))
BuildOptionElement = \
CreateXmlElement('BuildOption', '', NodeList, [])
Root.appendChild(BuildOptionElement)
NodeList = []
IncludesDict = UserExtension.GetIncludesDict()
if IncludesDict:
for Item in IncludesDict.keys():
NodeList.append(CreateXmlElement\
('Statement', Item, [], \
[['SupArchList', IncludesDict[Item]]]))
IncludesElement = CreateXmlElement('Includes', '', NodeList, [])
Root.appendChild(IncludesElement)
NodeList = []
SourcesDict = UserExtension.GetSourcesDict()
if SourcesDict:
Tmp = UserExtensionSourceXml()
Root.appendChild(Tmp.ToXml(SourcesDict, 'Sources'))
NodeList = []
BinariesDict = UserExtension.GetBinariesDict()
if BinariesDict:
Tmp = UserExtensionBinaryXml()
Root.appendChild(Tmp.ToXml(BinariesDict, 'Binaries'))
return Root
def __str__(self):
Str = "UserId = %s Identifier = %s" % (self.UserId, self.Identifier)
Str = Str + '\n\tDefines:' + str(self.Defines)
Str = Str + '\n\tBuildOptions:' + str(self.BuildOptions)
return Str
##
# UserExtensionSourceXml
#
class UserExtensionSourceXml(object):
def __init__(self):
self.UserExtensionSource = ''
def FromXml(self, Item, Key):
if Key:
pass
if self.UserExtensionSource:
pass
Dict = {}
#SourcesItem = XmlNode(Item, '%s/Sources' % Key)
for SubItem in XmlList(Item, 'Sources/SourceFile'):
FileName = XmlElement(SubItem, 'SourceFile/FileName')
Family = XmlElement(SubItem, 'SourceFile/Family')
FeatureFlag = XmlElement(SubItem, 'SourceFile/FeatureFlag')
SupArchStr = XmlElement(SubItem, 'SourceFile/SupArchList')
DictKey = (FileName, Family, FeatureFlag, SupArchStr)
ValueList = []
for ValueNodeItem in XmlList(SubItem, \
'SourceFile/SourceFileOtherAttr'):
TagName = XmlElement(ValueNodeItem, \
'SourceFileOtherAttr/TagName')
ToolCode = XmlElement(ValueNodeItem, \
'SourceFileOtherAttr/ToolCode')
Comment = XmlElement(ValueNodeItem, \
'SourceFileOtherAttr/Comment')
if (TagName == ' ') and (ToolCode == ' ') and (Comment == ' '):
TagName = ''
ToolCode = ''
Comment = ''
ValueList.append((TagName, ToolCode, Comment))
Dict[DictKey] = ValueList
return Dict
def ToXml(self, Dict, Key):
if self.UserExtensionSource:
pass
SourcesNodeList = []
for Item in Dict:
ValueList = Dict[Item]
(FileName, Family, FeatureFlag, SupArchStr) = Item
SourceFileNodeList = []
SourceFileNodeList.append(["FileName", FileName])
SourceFileNodeList.append(["Family", Family])
SourceFileNodeList.append(["FeatureFlag", FeatureFlag])
SourceFileNodeList.append(["SupArchList", SupArchStr])
for (TagName, ToolCode, Comment) in ValueList:
ValueNodeList = []
if not (TagName or ToolCode or Comment):
TagName = ' '
ToolCode = ' '
Comment = ' '
ValueNodeList.append(["TagName", TagName])
ValueNodeList.append(["ToolCode", ToolCode])
ValueNodeList.append(["Comment", Comment])
ValueNodeXml = CreateXmlElement('SourceFileOtherAttr', '', \
ValueNodeList, [])
SourceFileNodeList.append(ValueNodeXml)
SourceFileNodeXml = CreateXmlElement('SourceFile', '', \
SourceFileNodeList, [])
SourcesNodeList.append(SourceFileNodeXml)
Root = CreateXmlElement('%s' % Key, '', SourcesNodeList, [])
return Root
##
# UserExtensionBinaryXml
#
class UserExtensionBinaryXml(object):
def __init__(self):
self.UserExtensionBinary = ''
def FromXml(self, Item, Key):
if Key:
pass
if self.UserExtensionBinary:
pass
Dict = {}
for SubItem in XmlList(Item, 'Binaries/Binary'):
FileName = XmlElement(SubItem, 'Binary/FileName')
FileType = XmlElement(SubItem, 'Binary/FileType')
FFE = XmlElement(SubItem, 'Binary/FeatureFlag')
SupArch = XmlElement(SubItem, 'Binary/SupArchList')
DictKey = (FileName, FileType, ConvertNOTEQToNE(FFE), SupArch)
ValueList = []
for ValueNodeItem in XmlList(SubItem, \
'Binary/BinaryFileOtherAttr'):
Target = XmlElement(ValueNodeItem, \
'BinaryFileOtherAttr/Target')
Family = XmlElement(ValueNodeItem, \
'BinaryFileOtherAttr/Family')
TagName = XmlElement(ValueNodeItem, \
'BinaryFileOtherAttr/TagName')
Comment = XmlElement(ValueNodeItem, \
'BinaryFileOtherAttr/Comment')
if (Target == ' ') and (Family == ' ') and \
(TagName == ' ') and (Comment == ' '):
Target = ''
Family = ''
TagName = ''
Comment = ''
ValueList.append((Target, Family, TagName, Comment))
Dict[DictKey] = ValueList
return Dict
def ToXml(self, Dict, Key):
if self.UserExtensionBinary:
pass
BinariesNodeList = []
for Item in Dict:
ValueList = Dict[Item]
(FileName, FileType, FeatureFlag, SupArch) = Item
FileNodeList = []
FileNodeList.append(["FileName", FileName])
FileNodeList.append(["FileType", FileType])
FileNodeList.append(["FeatureFlag", ConvertNEToNOTEQ(FeatureFlag)])
FileNodeList.append(["SupArchList", SupArch])
for (Target, Family, TagName, Comment) in ValueList:
ValueNodeList = []
if not (Target or Family or TagName or Comment):
Target = ' '
Family = ' '
TagName = ' '
Comment = ' '
ValueNodeList.append(["Target", Target])
ValueNodeList.append(["Family", Family])
ValueNodeList.append(["TagName", TagName])
ValueNodeList.append(["Comment", Comment])
ValueNodeXml = CreateXmlElement('BinaryFileOtherAttr', '', \
ValueNodeList, [])
FileNodeList.append(ValueNodeXml)
FileNodeXml = CreateXmlElement('Binary', '', FileNodeList, [])
BinariesNodeList.append(FileNodeXml)
Root = CreateXmlElement('%s' % Key, '', BinariesNodeList, [])
return Root
##
# LibraryClassXml
#
class LibraryClassXml(object):
def __init__(self):
self.Keyword = ''
self.HeaderFile = ''
self.RecommendedInstanceGuid = ''
self.RecommendedInstanceVersion = ''
self.CommonDefines = CommonDefinesXml()
self.HelpText = []
def FromXml(self, Item, Key):
self.Keyword = XmlAttribute(XmlNode(Item, '%s' % Key), 'Keyword')
if self.Keyword == '':
self.Keyword = XmlElement(Item, '%s/Keyword' % Key)
self.HeaderFile = XmlElement(Item, '%s/HeaderFile' % Key)
self.CommonDefines.FromXml(XmlNode(Item, '%s' % Key), Key)
for HelpTextItem in XmlList(Item, '%s/HelpText' % Key):
HelpTextObj = HelpTextXml()
HelpTextObj.FromXml(HelpTextItem, '%s/HelpText' % Key)
self.HelpText.append(HelpTextObj)
LibraryClass = LibraryClassObject()
LibraryClass.SetLibraryClass(self.Keyword)
LibraryClass.SetIncludeHeader(self.HeaderFile)
if self.CommonDefines.Usage:
LibraryClass.SetUsage(self.CommonDefines.Usage)
LibraryClass.SetSupArchList(self.CommonDefines.SupArchList)
LibraryClass.SetSupModuleList(self.CommonDefines.SupModList)
LibraryClass.SetFeatureFlag(ConvertNOTEQToNE(self.CommonDefines.FeatureFlag))
LibraryClass.SetHelpTextList(GetHelpTextList(self.HelpText))
return LibraryClass
def ToXml(self, LibraryClass, Key):
if self.HeaderFile:
pass
AttributeList = \
[['Keyword', LibraryClass.GetLibraryClass()],
['SupArchList', GetStringOfList(LibraryClass.GetSupArchList())],
['SupModList', GetStringOfList(LibraryClass.GetSupModuleList())]
]
NodeList = [['HeaderFile', LibraryClass.GetIncludeHeader()]]
for Item in LibraryClass.GetHelpTextList():
Tmp = HelpTextXml()
NodeList.append(Tmp.ToXml(Item))
Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
return Root
def ToXml2(self, LibraryClass, Key):
if self.HeaderFile:
pass
FeatureFlag = ConvertNEToNOTEQ(LibraryClass.GetFeatureFlag())
AttributeList = \
[['Usage', LibraryClass.GetUsage()], \
['SupArchList', GetStringOfList(LibraryClass.GetSupArchList())], \
['SupModList', GetStringOfList(LibraryClass.GetSupModuleList())], \
['FeatureFlag', FeatureFlag]
]
NodeList = [['Keyword', LibraryClass.GetLibraryClass()], ]
for Item in LibraryClass.GetHelpTextList():
Tmp = HelpTextXml()
NodeList.append(Tmp.ToXml(Item))
Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
return Root
def __str__(self):
Str = "Keyword = %s HeaderFile = %s RecommendedInstanceGuid = %s RecommendedInstanceVersion = %s %s" % \
(self.Keyword, self.HeaderFile, self.RecommendedInstanceGuid, self.RecommendedInstanceVersion, \
self.CommonDefines)
for Item in self.HelpText:
Str = Str + "\n\t" + str(Item)
return Str
##
# FilenameXml
#
class FilenameXml(object):
def __init__(self):
self.FileType = ''
self.Filename = ''
self.CommonDefines = CommonDefinesXml()
def FromXml(self, Item, Key):
self.FileType = XmlAttribute(Item, 'FileType')
Guid = XmlAttribute(Item, 'GUID')
self.Filename = XmlElement(Item, 'Filename')
self.CommonDefines.FromXml(Item, Key)
FeatureFlag = ConvertNOTEQToNE(self.CommonDefines.FeatureFlag)
Filename = FileNameObject()
#
# Convert File Type
#
if self.FileType == 'UEFI_IMAGE':
self.FileType = 'PE32'
Filename.SetGuidValue(Guid)
Filename.SetFileType(self.FileType)
Filename.SetFilename(self.Filename)
Filename.SetSupArchList(self.CommonDefines.SupArchList)
Filename.SetFeatureFlag(FeatureFlag)
return Filename
def ToXml(self, Filename, Key):
if self.Filename:
pass
AttributeList = [['SupArchList', \
GetStringOfList(Filename.GetSupArchList())],
['FileType', Filename.GetFileType()],
['FeatureFlag', ConvertNEToNOTEQ(Filename.GetFeatureFlag())],
['GUID', Filename.GetGuidValue()]
]
Root = CreateXmlElement('%s' % Key, Filename.GetFilename(), [], AttributeList)
return Root
def __str__(self):
return "FileType = %s Filename = %s %s" \
% (self.FileType, self.Filename, self.CommonDefines)
| edk2-master | BaseTools/Source/Python/UPT/Xml/CommonXml.py |
## @file
# This file is used to parse a PCD file of .PKG file
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
'''
PcdXml
'''
##
# Import Modules
#
from Library.Xml.XmlRoutines import XmlElement
from Library.Xml.XmlRoutines import XmlAttribute
from Library.Xml.XmlRoutines import XmlNode
from Library.Xml.XmlRoutines import CreateXmlElement
from Library.Xml.XmlRoutines import XmlList
from Library.StringUtils import GetStringOfList
from Library.StringUtils import ConvertNEToNOTEQ
from Library.StringUtils import ConvertNOTEQToNE
from Library import GlobalData
from Object.POM.CommonObject import PcdObject
from Object.POM.CommonObject import PcdErrorObject
from Xml.CommonXml import HelpTextXml
from Xml.CommonXml import PromptXml
from Xml.CommonXml import CommonDefinesXml
from Xml.XmlParserMisc import GetHelpTextList
from Xml.XmlParserMisc import GetPromptList
import re
##
# PcdErrorXml
#
class PcdErrorXml(object):
def __init__(self):
self.ValidValueList = ''
self.ValidValueListLang = ''
self.ValidValueRange = ''
self.Expression = ''
self.ErrorNumber = ''
self.ErrorMessage = []
def FromXml(self, Item, Key):
self.ValidValueList = XmlElement(Item, '%s/ValidValueList' % Key)
self.ValidValueListLang = \
XmlAttribute(XmlNode(Item, '%s/ValidValueList' % Key), 'Lang')
self.ValidValueRange = self.TransferValidEpxr2ValidRange(XmlElement(Item, '%s/ValidValueRange' % Key))
self.Expression = XmlElement(Item, '%s/Expression' % Key)
self.ErrorNumber = XmlElement(Item, '%s/ErrorNumber' % Key)
for ErrMsg in XmlList(Item, '%s/ErrorMessage' % Key):
ErrorMessageString = XmlElement(ErrMsg, 'ErrorMessage')
ErrorMessageLang = \
XmlAttribute(XmlNode(ErrMsg, 'ErrorMessage'), 'Lang')
self.ErrorMessage.append((ErrorMessageLang, ErrorMessageString))
Error = PcdErrorObject()
Error.SetValidValue(self.ValidValueList)
Error.SetValidValueLang(self.ValidValueListLang)
Error.SetValidValueRange(self.ValidValueRange)
Error.SetExpression(self.Expression)
Error.SetErrorNumber(self.ErrorNumber)
Error.SetErrorMessageList(self.ErrorMessage)
return Error
def ToXml(self, PcdError, Key):
if self.Expression:
pass
AttributeList = []
NodeList = []
if PcdError.GetValidValue():
Element1 = \
CreateXmlElement('ValidValueList', PcdError.GetValidValue(), [], \
[['Lang', PcdError.GetValidValueLang()]])
NodeList.append(Element1)
if PcdError.GetValidValueRange():
TansferedRangeStr = self.TransferValidRange2Expr(PcdError.GetTokenSpaceGuidCName(),
PcdError.GetCName(),
PcdError.GetValidValueRange())
Element1 = \
CreateXmlElement('ValidValueRange', \
TansferedRangeStr, [], [])
NodeList.append(Element1)
if PcdError.GetExpression():
NodeList.append(['Expression', PcdError.GetExpression()])
if PcdError.GetErrorNumber():
NodeList.append(['ErrorNumber', PcdError.GetErrorNumber()])
for Item in PcdError.GetErrorMessageList():
Element = \
CreateXmlElement('ErrorMessage', Item[1], [], [['Lang', Item[0]]])
NodeList.append(Element)
Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
return Root
def TransferValidRange2Expr(self, TokenSpaceGuidCName, CName, ValidRange):
if self.Expression:
pass
INT_RANGE_PATTERN1 = '[\t\s]*[0-9]+[\t\s]*-[\t\s]*[0-9]+'
INT_RANGE_PATTERN2 = '[\t\s]*(LT|GT|LE|GE|XOR|EQ)[\t\s]+\d+[\t\s]*'
HEX_RANGE_PATTERN1 = \
'[\t\s]*0[xX][a-fA-F0-9]+[\t\s]*-[\t\s]*0[xX][a-fA-F0-9]+'
HEX_RANGE_PATTERN2 = '[\t\s]*(LT|GT|LE|GE|XOR|EQ)[\t\s]+0[xX][a-fA-F0-9]+[\t\s]*'
IntMatch1 = re.compile(INT_RANGE_PATTERN1)
IntMatch2 = re.compile(INT_RANGE_PATTERN2)
HexMatch1 = re.compile(HEX_RANGE_PATTERN1)
HexMatch2 = re.compile(HEX_RANGE_PATTERN2)
PcdName = '.'.join([TokenSpaceGuidCName, CName])
HexMatchedList = []
IntMatchedList = []
#
# Convert HEX2 format range
#
if HexMatch2:
for MatchObj in HexMatch2.finditer(ValidRange):
MatchStr = MatchObj.group()
TransferedRangeStr = ' '.join(['', PcdName, MatchStr.strip()])
ValidRange = ValidRange.replace(MatchStr, TransferedRangeStr)
#
# Convert INT2 format range
#
if IntMatch2:
for MatchObj in IntMatch2.finditer(ValidRange):
MatchStr = MatchObj.group()
TransferedRangeStr = ' '.join(['', PcdName, MatchStr.strip()])
ValidRange = ValidRange.replace(MatchStr, TransferedRangeStr)
#
# Convert HEX1 format range
#
if HexMatch1:
HexMatchedList += HexMatch1.findall(ValidRange)
for MatchStr in HexMatchedList:
RangeItemList = MatchStr.strip().split('-')
TransferedRangeStr = '(%s GE %s) AND (%s LE %s)' % \
(PcdName, RangeItemList[0].strip(), PcdName, RangeItemList[1].strip())
ValidRange = ValidRange.replace(MatchStr, TransferedRangeStr)
#
# Convert INT1 format range
#
if IntMatch1:
IntMatchedList += IntMatch1.findall(ValidRange)
for MatchStr in IntMatchedList:
RangeItemList = MatchStr.strip().split('-')
TransferedRangeStr = '(%s GE %s) AND (%s LE %s)' % \
(PcdName, RangeItemList[0].strip(), PcdName, RangeItemList[1].strip())
ValidRange = ValidRange.replace(MatchStr, TransferedRangeStr)
return ValidRange
def TransferValidEpxr2ValidRange(self, ValidRangeExpr):
if self.Expression:
pass
PCD_PATTERN = \
'[\t\s]*[_a-zA-Z][a-zA-Z0-9_]*[\t\s]*\.[\t\s]*[_a-zA-Z][a-zA-Z0-9_]*[\t\s]*'
IntPattern1 = \
'[\t\s]*\([\t\s]*'+PCD_PATTERN+'[\t\s]+GE[\t\s]+\d+[\t\s]*\)[\t\s]+AND[\t\s]+\([\t\s]*'+\
PCD_PATTERN+'[\t\s]+LE[\t\s]+\d+[\t\s]*\)'
IntPattern1 = IntPattern1.replace(' ', '')
IntPattern2 = '[\t\s]*'+PCD_PATTERN+'[\t\s]+(LT|GT|LE|GE|XOR|EQ)[\t\s]+\d+[\t\s]*'
HexPattern1 = \
'[\t\s]*\([\t\s]*'+PCD_PATTERN+'[\t\s]+GE[\t\s]+0[xX][0-9a-fA-F]+[\t\s]*\)[\t\s]+AND[\t\s]+\([\t\s]*'+\
PCD_PATTERN+'[\t\s]+LE[\t\s]+0[xX][0-9a-fA-F]+[\t\s]*\)'
HexPattern1 = HexPattern1.replace(' ', '')
HexPattern2 = '[\t\s]*'+PCD_PATTERN+'[\t\s]+(LT|GT|LE|GE|XOR|EQ)[\t\s]+0[xX][0-9a-zA-Z]+[\t\s]*'
#
# Do the Hex1 conversion
#
HexMatchedList = re.compile(HexPattern1).findall(ValidRangeExpr)
HexRangeDict = {}
for HexMatchedItem in HexMatchedList:
#
# To match items on both sides of '-'
#
RangeItemList = re.compile('[\t\s]*0[xX][0-9a-fA-F]+[\t\s]*').findall(HexMatchedItem)
if RangeItemList and len(RangeItemList) == 2:
HexRangeDict[HexMatchedItem] = RangeItemList
for Key in HexRangeDict.keys():
MaxItem = MixItem = ''
if int(HexRangeDict[Key][0], 16) > int(HexRangeDict[Key][1], 16):
MaxItem = HexRangeDict[Key][0]
MixItem = HexRangeDict[Key][1]
else:
MaxItem = HexRangeDict[Key][1]
MixItem = HexRangeDict[Key][0]
Range = ' %s - %s' % (MixItem.strip(), MaxItem.strip())
ValidRangeExpr = ValidRangeExpr.replace(Key, Range)
#
# Do the INT1 conversion
#
IntRangeDict = {}
IntMatchList = re.compile(IntPattern1).findall(ValidRangeExpr)
for MatchedItem in IntMatchList:
#
# To match items on both sides of '-'
#
RangeItemList = re.compile('[\t\s]*\d+[\t\s]*').findall(MatchedItem)
if RangeItemList and len(RangeItemList) == 2:
IntRangeDict[MatchedItem] = RangeItemList
for Key in IntRangeDict.keys():
MaxItem = MixItem = ''
if int(IntRangeDict[Key][0]) > int(IntRangeDict[Key][1]):
MaxItem = IntRangeDict[Key][0]
MixItem = IntRangeDict[Key][1]
else:
MaxItem = IntRangeDict[Key][1]
MixItem = IntRangeDict[Key][0]
Range = ' %s - %s' % (MixItem.strip(), MaxItem.strip())
ValidRangeExpr = ValidRangeExpr.replace(Key, Range)
#
# Do the HEX2 conversion
#
for MatchObj in re.compile(HexPattern2).finditer(ValidRangeExpr):
MatchStr = MatchObj.group()
Range = re.compile(PCD_PATTERN).sub(' ', MatchStr)
ValidRangeExpr = ValidRangeExpr.replace(MatchStr, Range)
#
# Do the INT2 conversion
#
for MatchObj in re.compile(IntPattern2).finditer(ValidRangeExpr):
MatchStr = MatchObj.group()
Range = re.compile(PCD_PATTERN).sub(' ', MatchStr)
ValidRangeExpr = ValidRangeExpr.replace(MatchStr, Range)
return ValidRangeExpr
def __str__(self):
return "ValidValueList = %s ValidValueListLang = %s ValidValueRange \
= %s Expression = %s ErrorNumber = %s %s" % \
(self.ValidValueList, self.ValidValueListLang, self.ValidValueRange, \
self.Expression, self.ErrorNumber, self.ErrorMessage)
##
# PcdEntryXml
#
class PcdEntryXml(object):
def __init__(self):
self.PcdItemType = ''
self.PcdUsage = ''
self.TokenSpaceGuidCName = ''
self.TokenSpaceGuidValue = ''
self.Token = ''
self.CName = ''
self.PcdCName = ''
self.DatumType = ''
self.ValidUsage = ''
self.DefaultValue = ''
self.MaxDatumSize = ''
self.Value = ''
self.Offset = ''
self.CommonDefines = CommonDefinesXml()
self.Prompt = []
self.HelpText = []
self.PcdError = []
##
# AsBuilt will use FromXml
#
def FromXml(self, Item, Key):
self.PcdItemType = \
XmlAttribute(XmlNode(Item, '%s' % Key), 'PcdItemType')
self.PcdUsage = XmlAttribute(XmlNode(Item, '%s' % Key), 'PcdUsage')
self.TokenSpaceGuidCName = \
XmlElement(Item, '%s/TokenSpaceGuidCname' % Key)
self.TokenSpaceGuidValue = \
XmlElement(Item, '%s/TokenSpaceGuidValue' % Key)
self.Token = XmlElement(Item, '%s/Token' % Key)
self.CName = XmlElement(Item, '%s/CName' % Key)
self.PcdCName = XmlElement(Item, '%s/PcdCName' % Key)
self.DatumType = XmlElement(Item, '%s/DatumType' % Key)
self.ValidUsage = XmlElement(Item, '%s/ValidUsage' % Key)
if not GlobalData.gIS_BINARY_INF:
self.DefaultValue = XmlElement(Item, '%s/DefaultValue' % Key)
else:
self.DefaultValue = XmlElement(Item, '%s/Value' % Key)
self.MaxDatumSize = XmlElement(Item, '%s/MaxDatumSize' % Key)
self.Value = XmlElement(Item, '%s/Value' % Key)
self.Offset = XmlElement(Item, '%s/Offset' % Key)
self.CommonDefines.FromXml(XmlNode(Item, '%s' % Key), Key)
for HelpTextItem in XmlList(Item, '%s/HelpText' % Key):
HelpTextObj = HelpTextXml()
HelpTextObj.FromXml(HelpTextItem, '%s/HelpText' % Key)
self.HelpText.append(HelpTextObj)
for PcdErrorItem in XmlList(Item, '%s/PcdError' % Key):
PcdErrorObjXml = PcdErrorXml()
PcdErrorObj = PcdErrorObjXml.FromXml(PcdErrorItem, 'PcdError')
self.PcdError.append(PcdErrorObj)
self.DefaultValue = ConvertNOTEQToNE(self.DefaultValue)
PcdEntry = PcdObject()
PcdEntry.SetSupArchList(self.CommonDefines.SupArchList)
PcdEntry.SetTokenSpaceGuidCName(self.TokenSpaceGuidCName)
PcdEntry.SetTokenSpaceGuidValue(self.TokenSpaceGuidValue)
PcdEntry.SetToken(self.Token)
PcdEntry.SetOffset(self.Offset)
PcdEntry.SetCName(self.CName)
PcdEntry.SetPcdCName(self.PcdCName)
PcdEntry.SetDatumType(self.DatumType)
PcdEntry.SetValidUsage(self.ValidUsage)
PcdEntry.SetDefaultValue(self.DefaultValue)
PcdEntry.SetMaxDatumSize(self.MaxDatumSize)
PcdEntry.SetFeatureFlag(ConvertNOTEQToNE(self.CommonDefines.FeatureFlag))
PcdEntry.SetItemType(self.PcdItemType)
PcdEntry.SetHelpTextList(GetHelpTextList(self.HelpText))
PcdEntry.SetPcdErrorsList(self.PcdError)
return PcdEntry
##
# Package will use FromXml2
#
def FromXml2(self, Item, Key):
self.TokenSpaceGuidCName = \
XmlElement(Item, '%s/TokenSpaceGuidCname' % Key)
self.Token = XmlElement(Item, '%s/Token' % Key)
self.CName = XmlElement(Item, '%s/CName' % Key)
self.DatumType = XmlElement(Item, '%s/DatumType' % Key)
self.ValidUsage = XmlElement(Item, '%s/ValidUsage' % Key)
self.DefaultValue = XmlElement(Item, '%s/DefaultValue' % Key)
self.MaxDatumSize = XmlElement(Item, '%s/MaxDatumSize' % Key)
self.CommonDefines.FromXml(XmlNode(Item, '%s' % Key), Key)
for PromptItem in XmlList(Item, '%s/Prompt' % Key):
PromptObj = PromptXml()
PromptObj.FromXml(PromptItem, '%s/Prompt' % Key)
self.Prompt.append(PromptObj)
for HelpTextItem in XmlList(Item, '%s/HelpText' % Key):
HelpTextObj = HelpTextXml()
HelpTextObj.FromXml(HelpTextItem, '%s/HelpText' % Key)
self.HelpText.append(HelpTextObj)
for PcdErrorItem in XmlList(Item, '%s/PcdError' % Key):
PcdErrorObjXml = PcdErrorXml()
PcdErrorObj = PcdErrorObjXml.FromXml(PcdErrorItem, 'PcdError')
self.PcdError.append(PcdErrorObj)
self.DefaultValue = ConvertNOTEQToNE(self.DefaultValue)
PcdEntry = PcdObject()
PcdEntry.SetSupArchList(self.CommonDefines.SupArchList)
PcdEntry.SetSupModuleList(self.CommonDefines.SupModList)
PcdEntry.SetTokenSpaceGuidCName(self.TokenSpaceGuidCName)
PcdEntry.SetToken(self.Token)
PcdEntry.SetCName(self.CName)
PcdEntry.SetDatumType(self.DatumType)
PcdEntry.SetValidUsage(self.ValidUsage)
PcdEntry.SetDefaultValue(self.DefaultValue)
PcdEntry.SetMaxDatumSize(self.MaxDatumSize)
PcdEntry.SetFeatureFlag(ConvertNOTEQToNE(self.CommonDefines.FeatureFlag))
PcdEntry.SetPromptList(GetPromptList(self.Prompt))
PcdEntry.SetHelpTextList(GetHelpTextList(self.HelpText))
PcdEntry.SetPcdErrorsList(self.PcdError)
return PcdEntry
##
# Module will use FromXml3
#
def FromXml3(self, Item, Key):
self.PcdItemType = \
XmlAttribute(XmlNode(Item, '%s' % Key), 'PcdItemType')
self.PcdUsage = XmlAttribute(XmlNode(Item, '%s' % Key), 'PcdUsage')
self.TokenSpaceGuidCName = \
XmlElement(Item, '%s/TokenSpaceGuidCName' % Key)
self.CName = XmlElement(Item, '%s/CName' % Key)
self.DefaultValue = XmlElement(Item, '%s/DefaultValue' % Key)
self.CommonDefines.FromXml(XmlNode(Item, '%s' % Key), Key)
for HelpTextItem in XmlList(Item, '%s/HelpText' % Key):
HelpTextObj = HelpTextXml()
HelpTextObj.FromXml(HelpTextItem, '%s/HelpText' % Key)
self.HelpText.append(HelpTextObj)
for PcdErrorItem in XmlList(Item, '%s/PcdError' % Key):
PcdErrorObj = PcdErrorXml()
PcdErrorObj.FromXml(PcdErrorItem, 'PcdError')
self.PcdError.append(PcdErrorObj)
self.DefaultValue = ConvertNOTEQToNE(self.DefaultValue)
PcdEntry = PcdObject()
PcdEntry.SetSupArchList(self.CommonDefines.SupArchList)
PcdEntry.SetTokenSpaceGuidCName(self.TokenSpaceGuidCName)
PcdEntry.SetCName(self.CName)
PcdEntry.SetValidUsage(self.PcdUsage)
PcdEntry.SetDefaultValue(self.DefaultValue)
PcdEntry.SetFeatureFlag(ConvertNOTEQToNE(self.CommonDefines.FeatureFlag))
PcdEntry.SetItemType(self.PcdItemType)
PcdEntry.SetHelpTextList(GetHelpTextList(self.HelpText))
PcdEntry.SetPcdErrorsList(self.PcdError)
return PcdEntry
def ToXml(self, PcdEntry, Key):
if self.PcdCName:
pass
DefaultValue = ConvertNEToNOTEQ(PcdEntry.GetDefaultValue())
AttributeList = \
[['SupArchList', GetStringOfList(PcdEntry.GetSupArchList())], \
['PcdUsage', PcdEntry.GetValidUsage()], \
['PcdItemType', PcdEntry.GetItemType()], \
['FeatureFlag', PcdEntry.GetFeatureFlag()],
]
NodeList = [['TokenSpaceGuidCname', PcdEntry.GetTokenSpaceGuidCName()],
['TokenSpaceGuidValue', PcdEntry.GetTokenSpaceGuidValue()],
['Token', PcdEntry.GetToken()],
['CName', PcdEntry.GetCName()],
['DatumType', PcdEntry.GetDatumType()],
['ValidUsage', GetStringOfList(PcdEntry.GetValidUsage())],
['DefaultValue', DefaultValue],
['MaxDatumSize', PcdEntry.GetMaxDatumSize()],
['Offset', PcdEntry.GetOffset()],
]
for Item in PcdEntry.GetHelpTextList():
Tmp = HelpTextXml()
NodeList.append(Tmp.ToXml(Item))
for Item in PcdEntry.GetPcdErrorsList():
Tmp = PcdErrorXml()
NodeList.append(Tmp.ToXml(Item, 'PcdError'))
Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
return Root
##
# Package will use ToXml2
#
def ToXml2(self, PcdEntry, Key):
if self.PcdCName:
pass
DefaultValue = ConvertNEToNOTEQ(PcdEntry.GetDefaultValue())
AttributeList = \
[['SupArchList', GetStringOfList(PcdEntry.GetSupArchList())], \
['SupModList', GetStringOfList(PcdEntry.GetSupModuleList())]
]
NodeList = [['TokenSpaceGuidCname', PcdEntry.GetTokenSpaceGuidCName()],
['Token', PcdEntry.GetToken()],
['CName', PcdEntry.GetCName()],
['DatumType', PcdEntry.GetDatumType()],
['ValidUsage', GetStringOfList(PcdEntry.GetValidUsage())],
['DefaultValue', DefaultValue],
['MaxDatumSize', PcdEntry.GetMaxDatumSize()],
]
for Item in PcdEntry.GetPromptList():
Tmp = PromptXml()
NodeList.append(Tmp.ToXml(Item))
for Item in PcdEntry.GetHelpTextList():
Tmp = HelpTextXml()
NodeList.append(Tmp.ToXml(Item))
for Item in PcdEntry.GetPcdErrorsList():
Tmp = PcdErrorXml()
NodeList.append(Tmp.ToXml(Item, 'PcdError'))
Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
return Root
##
# Module will use ToXml3
#
def ToXml3(self, PcdEntry, Key):
if self.PcdCName:
pass
DefaultValue = ConvertNEToNOTEQ(PcdEntry.GetDefaultValue())
AttributeList = \
[['SupArchList', GetStringOfList(PcdEntry.GetSupArchList())], \
['PcdUsage', PcdEntry.GetValidUsage()], \
['PcdItemType', PcdEntry.GetItemType()], \
['FeatureFlag', ConvertNEToNOTEQ(PcdEntry.GetFeatureFlag())],
]
NodeList = [['CName', PcdEntry.GetCName()],
['TokenSpaceGuidCName', PcdEntry.GetTokenSpaceGuidCName()],
['DefaultValue', DefaultValue],
]
for Item in PcdEntry.GetHelpTextList():
Tmp = HelpTextXml()
NodeList.append(Tmp.ToXml(Item))
for Item in PcdEntry.GetPcdErrorsList():
Tmp = PcdErrorXml()
NodeList.append(Tmp.ToXml(Item, 'PcdError'))
Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
return Root
##
# AsBuild Module will use ToXml4
#
def ToXml4(self, PcdEntry, Key):
if self.PcdCName:
pass
DefaultValue = ConvertNEToNOTEQ(PcdEntry.GetDefaultValue())
AttributeList = []
NodeList = [
['TokenSpaceGuidValue', PcdEntry.GetTokenSpaceGuidValue()],
['PcdCName', PcdEntry.GetCName()],
['Token', PcdEntry.GetToken()],
['DatumType', PcdEntry.GetDatumType()],
['MaxDatumSize', PcdEntry.GetMaxDatumSize()],
['Value', DefaultValue],
['Offset', PcdEntry.GetOffset()]
]
for Item in PcdEntry.GetHelpTextList():
Tmp = HelpTextXml()
NodeList.append(Tmp.ToXml(Item))
for Item in PcdEntry.GetPcdErrorsList():
Tmp = PcdErrorXml()
NodeList.append(Tmp.ToXml(Item, 'PcdError'))
Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
return Root
def __str__(self):
Str = \
('PcdItemType = %s PcdUsage = %s TokenSpaceGuidCName = %s \
TokenSpaceGuidValue = %s Token = %s CName = %s PcdCName = %s \
DatumType = %s ValidUsage = %s DefaultValue = %s MaxDatumSize = %s \
Value = %s Offset = %s %s') % \
(self.PcdItemType, self.PcdUsage, self.TokenSpaceGuidCName, \
self.TokenSpaceGuidValue, self.Token, self.CName, self.PcdCName, \
self.DatumType, self.ValidUsage, self.DefaultValue, \
self.MaxDatumSize, self.Value, self.Offset, self.CommonDefines)
for Item in self.HelpText:
Str = Str + "\n\t" + str(Item)
for Item in self.PcdError:
Str = Str + "\n\tPcdError:" + str(Item)
return Str
| edk2-master | BaseTools/Source/Python/UPT/Xml/PcdXml.py |
## @file
# Python 'Library' package initialization file.
#
# This file is required to make Python interpreter treat the directory
# as containing package.
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
'''
Xml
'''
| edk2-master | BaseTools/Source/Python/UPT/Xml/__init__.py |
## @file
# This file is used to parse a xml file of .PKG file
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
'''
GuidProtocolPpiXml
'''
from Library.StringUtils import ConvertNEToNOTEQ
from Library.StringUtils import ConvertNOTEQToNE
from Library.StringUtils import GetStringOfList
from Library.Xml.XmlRoutines import XmlElement
from Library.Xml.XmlRoutines import XmlAttribute
from Library.Xml.XmlRoutines import XmlNode
from Library.Xml.XmlRoutines import XmlList
from Library.Xml.XmlRoutines import CreateXmlElement
from Object.POM.CommonObject import GuidObject
from Object.POM.CommonObject import ProtocolObject
from Object.POM.CommonObject import PpiObject
from Xml.CommonXml import CommonDefinesXml
from Xml.CommonXml import HelpTextXml
from Xml.XmlParserMisc import GetHelpTextList
##
#GUID/Protocol/Ppi Common
#
class GuidProtocolPpiXml(object):
def __init__(self, Mode):
self.UiName = ''
self.GuidTypes = ''
self.Notify = ''
self.CName = ''
self.GuidValue = ''
self.CommonDefines = CommonDefinesXml()
self.HelpText = []
#
# Guid/Ppi/Library, internal used for indicate return object for
# FromXml
#
self.Type = ''
#
# there are slightly different field between package and module
#
self.Mode = Mode
self.GuidType = ''
self.VariableName = ''
def FromXml(self, Item, Key):
self.UiName = XmlAttribute(XmlNode(Item, '%s' % Key), 'UiName')
self.GuidType = XmlAttribute(XmlNode(Item, '%s' % Key), 'GuidType')
self.Notify = XmlAttribute(XmlNode(Item, '%s' % Key), 'Notify')
self.CName = XmlElement(Item, '%s/CName' % Key)
self.GuidValue = XmlElement(Item, '%s/GuidValue' % Key)
self.VariableName = XmlElement(Item, '%s/VariableName' % Key)
self.CommonDefines.FromXml(XmlNode(Item, '%s' % Key), Key)
for HelpTextItem in XmlList(Item, '%s/HelpText' % Key):
HelpTextObj = HelpTextXml()
HelpTextObj.FromXml(HelpTextItem, '%s/HelpText' % Key)
self.HelpText.append(HelpTextObj)
if self.Type == 'Guid':
GuidProtocolPpi = GuidObject()
elif self.Type == 'Protocol':
GuidProtocolPpi = ProtocolObject()
else:
GuidProtocolPpi = PpiObject()
GuidProtocolPpi.SetHelpTextList(GetHelpTextList(self.HelpText))
return GuidProtocolPpi
def ToXml(self, GuidProtocolPpi, Key):
if self.GuidValue:
pass
AttributeList = \
[['Usage', GetStringOfList(GuidProtocolPpi.GetUsage())], \
['UiName', GuidProtocolPpi.GetName()], \
['GuidType', GetStringOfList(GuidProtocolPpi.GetGuidTypeList())], \
['Notify', str(GuidProtocolPpi.GetNotify()).lower()], \
['SupArchList', GetStringOfList(GuidProtocolPpi.GetSupArchList())], \
['SupModList', GetStringOfList(GuidProtocolPpi.GetSupModuleList())], \
['FeatureFlag', ConvertNEToNOTEQ(GuidProtocolPpi.GetFeatureFlag())]
]
NodeList = [['CName', GuidProtocolPpi.GetCName()],
['GuidValue', GuidProtocolPpi.GetGuid()],
['VariableName', GuidProtocolPpi.VariableName]
]
for Item in GuidProtocolPpi.GetHelpTextList():
Tmp = HelpTextXml()
NodeList.append(Tmp.ToXml(Item))
Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
return Root
def __str__(self):
Str = \
"UiName = %s Notify = %s GuidTypes = %s CName = %s GuidValue = %s %s" \
% (self.UiName, self.Notify, self.GuidTypes, self.CName, \
self.GuidValue, self.CommonDefines)
for Item in self.HelpText:
Str = Str + "\n\t" + str(Item)
return Str
##
#GUID Xml
#
class GuidXml(GuidProtocolPpiXml):
def __init__(self, Mode):
GuidProtocolPpiXml.__init__(self, Mode)
self.Type = 'Guid'
def FromXml(self, Item, Key):
GuidProtocolPpi = GuidProtocolPpiXml.FromXml(self, Item, Key)
if self.Mode == 'Package':
GuidProtocolPpi.SetSupArchList(self.CommonDefines.SupArchList)
GuidProtocolPpi.SetSupModuleList(self.CommonDefines.SupModList)
GuidProtocolPpi.SetCName(self.CName)
GuidProtocolPpi.SetGuid(self.GuidValue)
else:
GuidProtocolPpi.SetUsage(self.CommonDefines.Usage)
if self.GuidType:
GuidProtocolPpi.SetGuidTypeList([self.GuidType])
GuidProtocolPpi.SetSupArchList(self.CommonDefines.SupArchList)
GuidProtocolPpi.SetFeatureFlag(ConvertNOTEQToNE(self.CommonDefines.FeatureFlag))
GuidProtocolPpi.SetCName(self.CName)
GuidProtocolPpi.SetVariableName(self.VariableName)
return GuidProtocolPpi
def ToXml(self, GuidProtocolPpi, Key):
if self.Mode == 'Package':
AttributeList = \
[['GuidType', \
GetStringOfList(GuidProtocolPpi.GetGuidTypeList())], \
['SupArchList', \
GetStringOfList(GuidProtocolPpi.GetSupArchList())], \
['SupModList', \
GetStringOfList(GuidProtocolPpi.GetSupModuleList())],
]
NodeList = [['CName', GuidProtocolPpi.GetCName()],
['GuidValue', GuidProtocolPpi.GetGuid()],
]
else:
AttributeList = \
[['Usage', GetStringOfList(GuidProtocolPpi.GetUsage())], \
['GuidType', GetStringOfList(GuidProtocolPpi.GetGuidTypeList())],\
['SupArchList', \
GetStringOfList(GuidProtocolPpi.GetSupArchList())], \
['FeatureFlag', ConvertNEToNOTEQ(GuidProtocolPpi.GetFeatureFlag())]
]
NodeList = [['CName', GuidProtocolPpi.GetCName()],
['VariableName', GuidProtocolPpi.GetVariableName()]
]
for Item in GuidProtocolPpi.GetHelpTextList():
Tmp = HelpTextXml()
NodeList.append(Tmp.ToXml(Item))
Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
return Root
##
#Protocol Xml
#
class ProtocolXml(GuidProtocolPpiXml):
def __init__(self, Mode):
GuidProtocolPpiXml.__init__(self, Mode)
self.Type = 'Protocol'
def FromXml(self, Item, Key):
GuidProtocolPpi = GuidProtocolPpiXml.FromXml(self, Item, Key)
if self.Mode == 'Package':
GuidProtocolPpi.SetFeatureFlag(self.CommonDefines.FeatureFlag)
GuidProtocolPpi.SetSupArchList(self.CommonDefines.SupArchList)
GuidProtocolPpi.SetSupModuleList(self.CommonDefines.SupModList)
GuidProtocolPpi.SetCName(self.CName)
GuidProtocolPpi.SetGuid(self.GuidValue)
else:
GuidProtocolPpi.SetUsage(self.CommonDefines.Usage)
if self.Notify.upper() == "TRUE":
GuidProtocolPpi.SetNotify(True)
elif self.Notify.upper() == "FALSE":
GuidProtocolPpi.SetNotify(False)
else:
GuidProtocolPpi.SetNotify('')
GuidProtocolPpi.SetSupArchList(self.CommonDefines.SupArchList)
GuidProtocolPpi.SetFeatureFlag(ConvertNOTEQToNE(self.CommonDefines.FeatureFlag))
GuidProtocolPpi.SetCName(self.CName)
return GuidProtocolPpi
def ToXml(self, GuidProtocolPpi, Key):
if self.Mode == 'Package':
AttributeList = \
[['SupArchList', \
GetStringOfList(GuidProtocolPpi.GetSupArchList())], \
['SupModList', \
GetStringOfList(GuidProtocolPpi.GetSupModuleList())], \
['FeatureFlag', GuidProtocolPpi.GetFeatureFlag()]
]
NodeList = [['CName', GuidProtocolPpi.GetCName()],
['GuidValue', GuidProtocolPpi.GetGuid()],
]
else:
AttributeList = \
[['Usage', GetStringOfList(GuidProtocolPpi.GetUsage())], \
['Notify', str(GuidProtocolPpi.GetNotify()).lower()], \
['SupArchList', \
GetStringOfList(GuidProtocolPpi.GetSupArchList())], \
['FeatureFlag', ConvertNEToNOTEQ(GuidProtocolPpi.GetFeatureFlag())]
]
NodeList = [['CName', GuidProtocolPpi.GetCName()],
]
for Item in GuidProtocolPpi.GetHelpTextList():
Tmp = HelpTextXml()
NodeList.append(Tmp.ToXml(Item))
Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
return Root
##
#Ppi Xml
#
class PpiXml(GuidProtocolPpiXml):
def __init__(self, Mode):
GuidProtocolPpiXml.__init__(self, Mode)
self.Type = 'Ppi'
def FromXml(self, Item, Key):
GuidProtocolPpi = GuidProtocolPpiXml.FromXml(self, Item, Key)
if self.Mode == 'Package':
GuidProtocolPpi.SetSupArchList(self.CommonDefines.SupArchList)
GuidProtocolPpi.SetSupModuleList(self.CommonDefines.SupModList)
GuidProtocolPpi.SetCName(self.CName)
GuidProtocolPpi.SetGuid(self.GuidValue)
else:
GuidProtocolPpi.SetUsage(self.CommonDefines.Usage)
if self.Notify.upper() == "TRUE":
GuidProtocolPpi.SetNotify(True)
elif self.Notify.upper() == "FALSE":
GuidProtocolPpi.SetNotify(False)
else:
GuidProtocolPpi.SetNotify('')
GuidProtocolPpi.SetSupArchList(self.CommonDefines.SupArchList)
GuidProtocolPpi.SetFeatureFlag(ConvertNOTEQToNE(self.CommonDefines.FeatureFlag))
GuidProtocolPpi.SetCName(self.CName)
return GuidProtocolPpi
def ToXml(self, GuidProtocolPpi, Key):
if self.Mode == 'Package':
AttributeList = \
[['SupArchList', \
GetStringOfList(GuidProtocolPpi.GetSupArchList())],
]
NodeList = [['CName', GuidProtocolPpi.GetCName()],
['GuidValue', GuidProtocolPpi.GetGuid()],
]
else:
AttributeList = \
[['Usage', GetStringOfList(GuidProtocolPpi.GetUsage())], \
['Notify', str(GuidProtocolPpi.GetNotify()).lower()], \
['SupArchList', \
GetStringOfList(GuidProtocolPpi.GetSupArchList())], \
['FeatureFlag', ConvertNEToNOTEQ(GuidProtocolPpi.GetFeatureFlag())]
]
NodeList = [['CName', GuidProtocolPpi.GetCName()],
]
for Item in GuidProtocolPpi.GetHelpTextList():
Tmp = HelpTextXml()
NodeList.append(Tmp.ToXml(Item))
Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
return Root
| edk2-master | BaseTools/Source/Python/UPT/Xml/GuidProtocolPpiXml.py |
## @file
# This file is used to parse a Module file of .PKG file
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
'''
ModuleSurfaceAreaXml
'''
from xml.dom import minidom
from Library.StringUtils import ConvertNEToNOTEQ
from Library.StringUtils import ConvertNOTEQToNE
from Library.StringUtils import GetStringOfList
from Library.StringUtils import IsMatchArch
from Library.Xml.XmlRoutines import XmlElement
from Library.Xml.XmlRoutines import XmlAttribute
from Library.Xml.XmlRoutines import XmlNode
from Library.Xml.XmlRoutines import XmlList
from Library.Xml.XmlRoutines import CreateXmlElement
from Object.POM.CommonObject import GuidVersionObject
from Object.POM.ModuleObject import BootModeObject
from Object.POM.ModuleObject import DepexObject
from Object.POM.ModuleObject import ModuleObject
from Object.POM.ModuleObject import EventObject
from Object.POM.ModuleObject import HobObject
from Object.POM.ModuleObject import SourceFileObject
from Object.POM.ModuleObject import PackageDependencyObject
from Object.POM.ModuleObject import ExternObject
from Object.POM.ModuleObject import BinaryFileObject
from Object.POM.ModuleObject import AsBuiltObject
from Object.POM.ModuleObject import BinaryBuildFlagObject
from Xml.CommonXml import ClonedFromXml
from Xml.CommonXml import HeaderXml
from Xml.CommonXml import HelpTextXml
from Xml.CommonXml import CommonDefinesXml
from Xml.CommonXml import LibraryClassXml
from Xml.CommonXml import UserExtensionsXml
from Xml.CommonXml import MiscellaneousFileXml
from Xml.CommonXml import FilenameXml
from Xml.GuidProtocolPpiXml import GuidXml
from Xml.GuidProtocolPpiXml import ProtocolXml
from Xml.GuidProtocolPpiXml import PpiXml
from Xml.PcdXml import PcdEntryXml
from Xml.XmlParserMisc import GetHelpTextList
from Library import GlobalData
from Library.Misc import GetSplitValueList
## BinaryFileXml
#
# represent the following XML item
#
# <BinaryFile>
# <Filename
# FileType=" FileType " {1}
# SupArchList=" ArchListType " {0,1}
# FeatureFlag=" FeatureFlagExpression " {0,1} >
# xs:anyURI
# </Filename> {1,}
# <AsBuilt> ... </AsBuilt> {0,}
# </BinaryFile> {1,}
#
class BinaryFileXml(object):
def __init__(self):
self.FileNames = []
self.AsBuiltList = []
self.PatchPcdValues = ''
self.PcdExValues = ''
self.LibraryInstances = ''
self.BuildFlags = ''
def FromXml(self, Item, Key):
if self.FileNames:
pass
BinaryFile = BinaryFileObject()
FilenameList = []
SupArchList = ['COMMON']
for SubItem in XmlList(Item, '%s/Filename' % Key):
Axml = FilenameXml()
Bxml = Axml.FromXml(SubItem, 'Filename')
FilenameList.append(Bxml)
BinaryFile.SetFileNameList(FilenameList)
for FileName in FilenameList:
if FileName.GetSupArchList():
SupArchList = FileName.GetSupArchList()
BinaryFile.SetSupArchList(SupArchList)
if GlobalData.gIS_BINARY_INF:
AsBuiltList = []
for AsBuiltItem in XmlList(Item, '%s/AsBuilt' % Key):
AsBuilt = AsBuiltObject()
PatchPcdValueList = []
for SubItem in XmlList(AsBuiltItem, 'AsBuilt/PatchPcdValue'):
Axml = PcdEntryXml()
Bxml = Axml.FromXml(SubItem, 'PatchPcdValue')
PatchPcdValueList.append(Bxml)
AsBuilt.SetPatchPcdList(PatchPcdValueList)
PcdExValueList = []
for SubItem in XmlList(AsBuiltItem, 'AsBuilt/PcdExValue'):
Axml = PcdEntryXml()
Bxml = Axml.FromXml(SubItem, 'PcdExValue')
PcdExValueList.append(Bxml)
AsBuilt.SetPcdExList(PcdExValueList)
LibraryList = []
for SubItem in XmlList(Item, '%s/AsBuilt/LibraryInstances/GUID' % Key):
GuidVerObj = GuidVersionObject()
GUID = XmlElement(SubItem, 'GUID')
Version = XmlAttribute(XmlNode(SubItem, 'GUID'), 'Version')
GuidVerObj.SetGuid(GUID)
GuidVerObj.SetVersion(Version)
LibraryList.append(GuidVerObj)
if XmlList(Item, '%s/AsBuilt/LibraryInstances' % Key) and not LibraryList:
LibraryList = [None]
AsBuilt.SetLibraryInstancesList(LibraryList)
BuildFlagList = []
for SubItem in XmlList(Item, '%s/AsBuilt/BuildFlags' % Key):
BuildFlag = BuildFlagXml()
BuildFlagList.append(BuildFlag.FromXml2(SubItem, 'BuildFlags'))
AsBuilt.SetBuildFlagsList(BuildFlagList)
AsBuiltList.append(AsBuilt)
BinaryFile.SetAsBuiltList(AsBuiltList)
return BinaryFile
def ToXml(self, BinaryFile, Key):
if self.FileNames:
pass
NodeList = []
FilenameList = BinaryFile.GetFileNameList()
SupportArch = None
for Filename in FilenameList:
Tmp = FilenameXml()
NodeList.append(Tmp.ToXml(Filename, 'Filename'))
SupportArch = Filename.SupArchList
AsBuildList = BinaryFile.GetAsBuiltList()
PatchPcdValueList = AsBuildList.GetPatchPcdList()
PcdExList = AsBuildList.GetPcdExList()
LibGuidVerList = AsBuildList.GetLibraryInstancesList()
BuildFlagList = AsBuildList.GetBuildFlagsList()
AsBuiltNodeList = []
for Pcd in PatchPcdValueList:
if IsMatchArch(Pcd.SupArchList, SupportArch):
Tmp = PcdEntryXml()
AsBuiltNodeList.append(Tmp.ToXml4(Pcd, 'PatchPcdValue'))
for Pcd in PcdExList:
if IsMatchArch(Pcd.SupArchList, SupportArch):
Tmp = PcdEntryXml()
AsBuiltNodeList.append(Tmp.ToXml4(Pcd, 'PcdExValue'))
GuiVerElemList = []
for LibGuidVer in LibGuidVerList:
if LibGuidVer.GetLibGuid() and IsMatchArch(LibGuidVer.GetSupArchList(), SupportArch):
GuiVerElem = \
CreateXmlElement('GUID', LibGuidVer.GetLibGuid(), [], [['Version', LibGuidVer.GetLibVersion()]])
GuiVerElemList.append(GuiVerElem)
if len(GuiVerElemList) > 0:
LibGuidVerElem = CreateXmlElement('LibraryInstances', '', GuiVerElemList, [])
AsBuiltNodeList.append(LibGuidVerElem)
for BuildFlag in BuildFlagList:
if IsMatchArch(BuildFlag.GetSupArchList(), SupportArch):
for Item in BuildFlag.GetAsBuildList():
Tmp = BuildFlagXml()
Elem = CreateXmlElement('BuildFlags', ''.join(Item), [], [])
AsBuiltNodeList.append(Elem)
if len(AsBuiltNodeList) > 0:
Element = CreateXmlElement('AsBuilt', '', AsBuiltNodeList, [])
NodeList.append(Element)
Root = CreateXmlElement('%s' % Key, '', NodeList, [])
return Root
def __str__(self):
Str = "BinaryFiles:"
for Item in self.FileNames:
Str = Str + '\n\t' + str(Item)
for Item in self.PatchPcdValues:
Str = Str + '\n\t' + str(Item)
for Item in self.PcdExValues:
Str = Str + '\n\t' + str(Item)
for Item in self.LibraryInstances:
Str = Str + '\n\t' + str(Item)
for Item in self.BuildFlags:
Str = Str + '\n\t' + str(Item)
return Str
##
# PackageXml
#
class PackageXml(object):
def __init__(self):
self.Description = ''
self.Guid = ''
self.Version = ''
self.CommonDefines = CommonDefinesXml()
def FromXml(self, Item, Key):
self.Description = XmlElement(Item, '%s/Description' % Key)
self.Guid = XmlElement(Item, '%s/GUID' % Key)
self.Version = XmlAttribute(XmlNode(Item, '%s/GUID' % Key), 'Version')
self.CommonDefines.FromXml(XmlNode(Item, '%s' % Key), Key)
PackageDependency = PackageDependencyObject()
PackageDependency.SetPackage(self.Description)
PackageDependency.SetGuid(self.Guid)
PackageDependency.SetVersion(self.Version)
PackageDependency.SetFeatureFlag(ConvertNOTEQToNE(self.CommonDefines.FeatureFlag))
PackageDependency.SetSupArchList(self.CommonDefines.SupArchList)
return PackageDependency
def ToXml(self, PackageDependency, Key):
if self.Guid:
pass
AttributeList = [['SupArchList', GetStringOfList(PackageDependency.GetSupArchList())],
['FeatureFlag', ConvertNEToNOTEQ(PackageDependency.GetFeatureFlag())], ]
Element1 = CreateXmlElement('GUID', PackageDependency.GetGuid(), [],
[['Version', PackageDependency.GetVersion()]])
NodeList = [['Description', PackageDependency.GetPackage()], Element1, ]
Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
return Root
def __str__(self):
Str = "Description = %s Guid = %s Version = %s %s" \
% (self.Description, self.Guid, self.Version, self.CommonDefines)
return Str
##
# ExternXml
#
class ExternXml(object):
def __init__(self):
self.CommonDefines = CommonDefinesXml()
self.EntryPoint = ''
self.UnloadImage = ''
self.Constructor = ''
self.Destructor = ''
self.SupModList = ''
self.SupArchList = ''
self.HelpText = []
def FromXml(self, Item, Key):
self.CommonDefines.FromXml(Item, Key)
self.EntryPoint = XmlElement(Item, '%s/EntryPoint' % Key)
self.UnloadImage = XmlElement(Item, '%s/UnloadImage' % Key)
self.Constructor = XmlElement(Item, '%s/Constructor' % Key)
self.Destructor = XmlElement(Item, '%s/Destructor' % Key)
Extern = ExternObject()
Extern.SetEntryPoint(self.EntryPoint)
Extern.SetUnloadImage(self.UnloadImage)
Extern.SetConstructor(self.Constructor)
Extern.SetDestructor(self.Destructor)
if self.CommonDefines.SupModList:
Extern.SetSupModList(self.CommonDefines.SupModList)
if self.CommonDefines.SupArchList:
Extern.SetSupArchList(self.CommonDefines.SupArchList)
return Extern
def ToXml(self, Extern, Key):
if self.HelpText:
pass
NodeList = []
if Extern.GetEntryPoint():
NodeList.append(['EntryPoint', Extern.GetEntryPoint()])
if Extern.GetUnloadImage():
NodeList.append(['UnloadImage', Extern.GetUnloadImage()])
if Extern.GetConstructor():
NodeList.append(['Constructor', Extern.GetConstructor()])
if Extern.GetDestructor():
NodeList.append(['Destructor', Extern.GetDestructor()])
Root = CreateXmlElement('%s' % Key, '', NodeList, [])
return Root
def __str__(self):
Str = "EntryPoint = %s UnloadImage = %s Constructor = %s Destructor = %s %s" \
% (self.EntryPoint, self.UnloadImage, self.Constructor, self.Destructor, self.CommonDefines)
for Item in self.HelpText:
Str = Str + '\n\t' + str(Item)
return Str
##
# DepexXml
#
class DepexXml(object):
def __init__(self):
self.CommonDefines = CommonDefinesXml()
self.Expression = None
self.HelpText = []
def FromXml(self, Item, Key):
if not Item:
return None
self.CommonDefines.FromXml(Item, Key)
self.Expression = XmlElement(Item, '%s/Expression' % Key)
for HelpTextItem in XmlList(Item, '%s/HelpText' % Key):
HelpTextObj = HelpTextXml()
HelpTextObj.FromXml(HelpTextItem, '%s/HelpText' % Key)
self.HelpText.append(HelpTextObj)
Depex = DepexObject()
Depex.SetDepex(self.Expression)
Depex.SetModuleType(self.CommonDefines.SupModList)
Depex.SetSupArchList(self.CommonDefines.SupArchList)
Depex.SetFeatureFlag(self.CommonDefines.FeatureFlag)
Depex.SetHelpTextList(GetHelpTextList(self.HelpText))
return Depex
def ToXml(self, Depex, Key):
if self.HelpText:
pass
AttributeList = [['SupArchList', GetStringOfList(Depex.GetSupArchList())],
['SupModList', Depex.GetModuleType()]]
NodeList = [['Expression', Depex.GetDepex()]]
if Depex.GetHelpText():
Tmp = HelpTextXml()
NodeList.append(Tmp.ToXml(Depex.GetHelpText(), 'HelpText'))
Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
return Root
def __str__(self):
Str = "Expression = %s" % (self.Expression)
for Item in self.HelpText:
Str = Str + '\n\t' + str(Item)
return Str
##
# BootModeXml
#
class BootModeXml(object):
def __init__(self):
self.SupportedBootModes = ''
self.CommonDefines = CommonDefinesXml()
self.HelpText = []
def FromXml(self, Item, Key):
self.SupportedBootModes = \
XmlElement(Item, '%s/SupportedBootModes' % Key)
self.CommonDefines.FromXml(Item, Key)
for HelpTextItem in XmlList(Item, '%s/HelpText' % Key):
HelpTextObj = HelpTextXml()
HelpTextObj.FromXml(HelpTextItem, '%s/HelpText' % Key)
self.HelpText.append(HelpTextObj)
BootMode = BootModeObject()
BootMode.SetSupportedBootModes(self.SupportedBootModes)
BootMode.SetUsage(self.CommonDefines.Usage)
BootMode.SetHelpTextList(GetHelpTextList(self.HelpText))
return BootMode
def ToXml(self, BootMode, Key):
if self.HelpText:
pass
AttributeList = [['Usage', BootMode.GetUsage()], ]
NodeList = [['SupportedBootModes', BootMode.GetSupportedBootModes()]]
for Item in BootMode.GetHelpTextList():
Tmp = HelpTextXml()
NodeList.append(Tmp.ToXml(Item, 'HelpText'))
Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
return Root
def __str__(self):
Str = "SupportedBootModes = %s %s" % (self.SupportedBootModes, self.CommonDefines)
for Item in self.HelpText:
Str = Str + '\n\t' + str(Item)
return Str
##
# EventXml
#
class EventXml(object):
def __init__(self):
self.EventType = ''
self.Name = ''
self.CommonDefines = CommonDefinesXml()
self.HelpText = []
def FromXml(self, Item, Key):
self.EventType = XmlAttribute(XmlNode(Item, '%s' % Key), 'EventType')
self.Name = XmlElement(Item, '%s' % Key)
self.CommonDefines.FromXml(Item, Key)
for HelpTextItem in XmlList(Item, '%s/HelpText' % Key):
HelpTextObj = HelpTextXml()
HelpTextObj.FromXml(HelpTextItem, '%s/HelpText' % Key)
self.HelpText.append(HelpTextObj)
Event = EventObject()
Event.SetEventType(self.EventType)
Event.SetUsage(self.CommonDefines.Usage)
Event.SetHelpTextList(GetHelpTextList(self.HelpText))
return Event
def ToXml(self, Event, Key):
if self.HelpText:
pass
AttributeList = [['EventType', Event.GetEventType()],
['Usage', Event.GetUsage()],
]
NodeList = []
for Item in Event.GetHelpTextList():
Tmp = HelpTextXml()
NodeList.append(Tmp.ToXml(Item, 'HelpText'))
Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
return Root
def __str__(self):
Str = "EventType = %s %s" % (self.EventType, self.CommonDefines)
for Item in self.HelpText:
Str = Str + '\n\t' + str(Item)
return Str
##
# HobXml
#
class HobXml(object):
def __init__(self):
self.HobType = ''
self.Name = ''
self.CommonDefines = CommonDefinesXml()
self.HelpText = []
def FromXml(self, Item, Key):
self.HobType = XmlAttribute(XmlNode(Item, '%s' % Key), 'HobType')
self.Name = XmlElement(Item, '%s' % Key)
self.CommonDefines.FromXml(Item, Key)
for HelpTextItem in XmlList(Item, '%s/HelpText' % Key):
HelpTextObj = HelpTextXml()
HelpTextObj.FromXml(HelpTextItem, '%s/HelpText' % Key)
self.HelpText.append(HelpTextObj)
Hob = HobObject()
Hob.SetHobType(self.HobType)
Hob.SetSupArchList(self.CommonDefines.SupArchList)
Hob.SetUsage(self.CommonDefines.Usage)
Hob.SetHelpTextList(GetHelpTextList(self.HelpText))
return Hob
def ToXml(self, Hob, Key):
if self.Name:
pass
AttributeList = [['HobType', Hob.GetHobType()],
['Usage', Hob.GetUsage()],
['SupArchList', GetStringOfList(Hob.GetSupArchList())], ]
NodeList = []
for Item in Hob.GetHelpTextList():
Tmp = HelpTextXml()
NodeList.append(Tmp.ToXml(Item, 'HelpText'))
Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
return Root
def __str__(self):
Str = "HobType = %s %s" % (self.HobType, self.CommonDefines)
for Item in self.HelpText:
Str = Str + '\n\t' + str(Item)
return Str
##
# SourceFileXml
#
class SourceFileXml(object):
def __init__(self):
self.SourceFile = ''
self.ToolChainFamily = ''
self.FileType = ''
self.CommonDefines = CommonDefinesXml()
def FromXml(self, Item, Key):
self.ToolChainFamily = XmlAttribute(Item, 'Family')
self.SourceFile = XmlElement(Item, 'Filename')
self.CommonDefines.FromXml(Item, Key)
self.CommonDefines.FeatureFlag = ConvertNOTEQToNE(self.CommonDefines.FeatureFlag)
SourceFile = SourceFileObject()
SourceFile.SetSourceFile(self.SourceFile)
SourceFile.SetFamily(self.ToolChainFamily)
SourceFile.SetSupArchList(self.CommonDefines.SupArchList)
SourceFile.SetFeatureFlag(self.CommonDefines.FeatureFlag)
return SourceFile
def ToXml(self, SourceFile, Key):
if self.SourceFile:
pass
FeatureFlag = ConvertNEToNOTEQ(SourceFile.GetFeatureFlag())
AttributeList = [['SupArchList', GetStringOfList(SourceFile.GetSupArchList())],
['Family', SourceFile.GetFamily()],
['FeatureFlag', FeatureFlag], ]
Root = CreateXmlElement('%s' % Key, SourceFile.GetSourceFile(), [], AttributeList)
return Root
##
# ModulePropertyXml
#
class ModulePropertyXml(object):
def __init__(self):
self.CommonDefines = CommonDefinesXml()
self.ModuleType = ''
self.Path = ''
self.PcdIsDriver = ''
self.UefiSpecificationVersion = ''
self.PiSpecificationVersion = ''
self.SpecificationList = []
self.SpecificationVersion = ''
self.BootModes = []
self.Events = []
self.HOBs = []
def FromXml(self, Item, Key, Header=None):
self.CommonDefines.FromXml(Item, Key)
self.ModuleType = XmlElement(Item, '%s/ModuleType' % Key)
self.Path = XmlElement(Item, '%s/Path' % Key)
self.PcdIsDriver = XmlElement(Item, '%s/PcdIsDriver' % Key)
self.UefiSpecificationVersion = XmlElement(Item, '%s/UefiSpecificationVersion' % Key)
self.PiSpecificationVersion = XmlElement(Item, '%s/PiSpecificationVersion' % Key)
for SubItem in XmlList(Item, '%s/Specification' % Key):
Specification = XmlElement(SubItem, '/Specification')
Version = XmlAttribute(XmlNode(SubItem, '/Specification'), 'Version')
self.SpecificationList.append((Specification, Version))
for SubItem in XmlList(Item, '%s/BootMode' % Key):
Axml = BootModeXml()
BootMode = Axml.FromXml(SubItem, 'BootMode')
self.BootModes.append(BootMode)
for SubItem in XmlList(Item, '%s/Event' % Key):
Axml = EventXml()
Event = Axml.FromXml(SubItem, 'Event')
self.Events.append(Event)
for SubItem in XmlList(Item, '%s/HOB' % Key):
Axml = HobXml()
Hob = Axml.FromXml(SubItem, 'HOB')
self.HOBs.append(Hob)
if Header is None:
Header = ModuleObject()
Header.SetModuleType(self.ModuleType)
Header.SetSupArchList(self.CommonDefines.SupArchList)
Header.SetModulePath(self.Path)
Header.SetPcdIsDriver(self.PcdIsDriver)
Header.SetUefiSpecificationVersion(self.UefiSpecificationVersion)
Header.SetPiSpecificationVersion(self.PiSpecificationVersion)
Header.SetSpecList(self.SpecificationList)
return Header, self.BootModes, self.Events, self.HOBs
def ToXml(self, Header, BootModes, Events, Hobs, Key):
if self.ModuleType:
pass
AttributeList = [['SupArchList', GetStringOfList(Header.GetSupArchList())], ]
NodeList = [['ModuleType', Header.GetModuleType()],
['Path', Header.GetModulePath()],
['PcdIsDriver', Header.GetPcdIsDriver()],
['UefiSpecificationVersion', Header.GetUefiSpecificationVersion()],
['PiSpecificationVersion', Header.GetPiSpecificationVersion()],
]
for Item in Header.GetSpecList():
Spec, Version = Item
SpecElem = CreateXmlElement('Specification', Spec, [], [['Version', Version]])
NodeList.append(SpecElem)
for Item in BootModes:
Tmp = BootModeXml()
NodeList.append(Tmp.ToXml(Item, 'BootMode'))
for Item in Events:
Tmp = EventXml()
NodeList.append(Tmp.ToXml(Item, 'Event'))
for Item in Hobs:
Tmp = HobXml()
NodeList.append(Tmp.ToXml(Item, 'HOB'))
Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
return Root
def __str__(self):
Str = "ModuleType = %s Path = %s PcdIsDriver = %s UefiSpecificationVersion = %s PiSpecificationVersion = %s \
Specification = %s SpecificationVersion = %s %s" % \
(self.ModuleType, self.Path, self.PcdIsDriver, \
self.UefiSpecificationVersion, self.PiSpecificationVersion, \
self.SpecificationList, self.SpecificationVersion, self.CommonDefines)
for Item in self.BootModes:
Str = Str + '\n\t' + str(Item)
for Item in self.Events:
Str = Str + '\n\t' + str(Item)
for Item in self.HOBs:
Str = Str + '\n\t' + str(Item)
return Str
##
# ModuleXml
#
class ModuleSurfaceAreaXml(object):
def __init__(self, Package=''):
self.Module = None
#
# indicate the package that this module resides in
#
self.Package = Package
def FromXml2(self, Item, Module):
if self.Module:
pass
#
# PeiDepex
#
PeiDepexList = []
for SubItem in XmlList(Item, '/ModuleSurfaceArea/PeiDepex'):
Tmp = DepexXml()
Depex = Tmp.FromXml(XmlNode(SubItem, 'PeiDepex'), 'PeiDepex')
PeiDepexList.append(Depex)
Module.SetPeiDepex(PeiDepexList)
#
# DxeDepex
#
DxeDepexList = []
for SubItem in XmlList(Item, '/ModuleSurfaceArea/DxeDepex'):
Tmp = DepexXml()
Depex = Tmp.FromXml(XmlNode(SubItem, 'DxeDepex'), 'DxeDepex')
DxeDepexList.append(Depex)
Module.SetDxeDepex(DxeDepexList)
#
# SmmDepex
#
SmmDepexList = []
for SubItem in XmlList(Item, '/ModuleSurfaceArea/SmmDepex'):
Tmp = DepexXml()
Depex = Tmp.FromXml(XmlNode(SubItem, 'SmmDepex'), 'SmmDepex')
SmmDepexList.append(Depex)
Module.SetSmmDepex(SmmDepexList)
#
# MiscellaneousFile
Tmp = MiscellaneousFileXml()
MiscFileList = Tmp.FromXml(XmlNode(Item, '/ModuleSurfaceArea/MiscellaneousFiles'), 'MiscellaneousFiles')
if MiscFileList:
Module.SetMiscFileList([MiscFileList])
else:
Module.SetMiscFileList([])
#
# UserExtensions
#
for Item in XmlList(Item, '/ModuleSurfaceArea/UserExtensions'):
Tmp = UserExtensionsXml()
UserExtension = Tmp.FromXml(Item, 'UserExtensions')
Module.SetUserExtensionList(Module.GetUserExtensionList() + [UserExtension])
return Module
def FromXml(self, Item, Key, IsStandAlongModule=False):
IsBinaryModule = XmlAttribute(Item, 'BinaryModule')
#
# Header
#
Tmp = HeaderXml()
Module = Tmp.FromXml(XmlNode(Item, '/%s/Header' % Key), 'Header', True, IsStandAlongModule)
Module.SetBinaryModule(IsBinaryModule)
if IsBinaryModule:
GlobalData.gIS_BINARY_INF = True
#
# ModuleProperties
#
Tmp = ModulePropertyXml()
(Module, BootModes, Events, HOBs) = \
Tmp.FromXml(XmlNode(Item, '/ModuleSurfaceArea/ModuleProperties'), 'ModuleProperties', Module)
Module.SetBootModeList(BootModes)
Module.SetEventList(Events)
Module.SetHobList(HOBs)
#
# ClonedFrom
#
Tmp = ClonedFromXml()
ClonedFrom = Tmp.FromXml(XmlNode(Item, '/ModuleSurfaceArea/ClonedFrom'), 'ClonedFrom')
if ClonedFrom:
Module.SetClonedFrom(ClonedFrom)
#
# LibraryClass
#
for SubItem in XmlList(Item, '/ModuleSurfaceArea/LibraryClassDefinitions/LibraryClass'):
Tmp = LibraryClassXml()
LibraryClass = Tmp.FromXml(SubItem, 'LibraryClass')
Module.SetLibraryClassList(Module.GetLibraryClassList() + [LibraryClass])
if XmlList(Item, '/ModuleSurfaceArea/LibraryClassDefinitions') and \
not XmlList(Item, '/ModuleSurfaceArea/LibraryClassDefinitions/LibraryClass'):
Module.SetLibraryClassList([None])
#
# SourceFiles
#
for SubItem in XmlList(Item, '/ModuleSurfaceArea/SourceFiles/Filename'):
Tmp = SourceFileXml()
SourceFile = Tmp.FromXml(SubItem, 'Filename')
Module.SetSourceFileList(Module.GetSourceFileList() + [SourceFile])
if XmlList(Item, '/ModuleSurfaceArea/SourceFiles') and \
not XmlList(Item, '/ModuleSurfaceArea/SourceFiles/Filename') :
Module.SetSourceFileList([None])
#
# BinaryFile
#
for SubItem in XmlList(Item, '/ModuleSurfaceArea/BinaryFiles/BinaryFile'):
Tmp = BinaryFileXml()
BinaryFile = Tmp.FromXml(SubItem, 'BinaryFile')
Module.SetBinaryFileList(Module.GetBinaryFileList() + [BinaryFile])
if XmlList(Item, '/ModuleSurfaceArea/BinaryFiles') and \
not XmlList(Item, '/ModuleSurfaceArea/BinaryFiles/BinaryFile') :
Module.SetBinaryFileList([None])
#
# PackageDependencies
#
for SubItem in XmlList(Item, '/ModuleSurfaceArea/PackageDependencies/Package'):
Tmp = PackageXml()
PackageDependency = Tmp.FromXml(SubItem, 'Package')
Module.SetPackageDependencyList(Module.GetPackageDependencyList() + [PackageDependency])
if XmlList(Item, '/ModuleSurfaceArea/PackageDependencies') and \
not XmlList(Item, '/ModuleSurfaceArea/PackageDependencies/Package'):
Module.SetPackageDependencyList([None])
#
# Guid
#
for SubItem in XmlList(Item, '/ModuleSurfaceArea/Guids/GuidCName'):
Tmp = GuidXml('Module')
GuidProtocolPpi = Tmp.FromXml(SubItem, 'GuidCName')
Module.SetGuidList(Module.GetGuidList() + [GuidProtocolPpi])
if XmlList(Item, '/ModuleSurfaceArea/Guids') and not XmlList(Item, '/ModuleSurfaceArea/Guids/GuidCName'):
Module.SetGuidList([None])
#
# Protocol
#
for SubItem in XmlList(Item, '/ModuleSurfaceArea/Protocols/Protocol'):
Tmp = ProtocolXml('Module')
GuidProtocolPpi = Tmp.FromXml(SubItem, 'Protocol')
Module.SetProtocolList(Module.GetProtocolList() + [GuidProtocolPpi])
if XmlList(Item, '/ModuleSurfaceArea/Protocols') and not XmlList(Item, '/ModuleSurfaceArea/Protocols/Protocol'):
Module.SetProtocolList([None])
#
# Ppi
#
for SubItem in XmlList(Item, '/ModuleSurfaceArea/PPIs/Ppi'):
Tmp = PpiXml('Module')
GuidProtocolPpi = Tmp.FromXml(SubItem, 'Ppi')
Module.SetPpiList(Module.GetPpiList() + [GuidProtocolPpi])
if XmlList(Item, '/ModuleSurfaceArea/PPIs') and not XmlList(Item, '/ModuleSurfaceArea/PPIs/Ppi'):
Module.SetPpiList([None])
#
# Extern
#
for SubItem in XmlList(Item, '/ModuleSurfaceArea/Externs/Extern'):
Tmp = ExternXml()
Extern = Tmp.FromXml(SubItem, 'Extern')
Module.SetExternList(Module.GetExternList() + [Extern])
if XmlList(Item, '/ModuleSurfaceArea/Externs') and not XmlList(Item, '/ModuleSurfaceArea/Externs/Extern'):
Module.SetExternList([None])
if not Module.GetBinaryModule():
#
# PcdCoded
#
for SubItem in XmlList(Item, '/ModuleSurfaceArea/PcdCoded/PcdEntry'):
Tmp = PcdEntryXml()
PcdEntry = Tmp.FromXml3(SubItem, 'PcdEntry')
Module.SetPcdList(Module.GetPcdList() + [PcdEntry])
if XmlList(Item, '/ModuleSurfaceArea/PcdCoded') and \
not XmlList(Item, '/ModuleSurfaceArea/PcdCoded/PcdEntry'):
Module.SetPcdList([None])
Module = self.FromXml2(Item, Module)
#
# return the module object
#
self.Module = Module
return self.Module
def ToXml(self, Module):
if self.Package:
pass
#
# Create root node of module surface area
#
DomModule = minidom.Document().createElement('ModuleSurfaceArea')
if Module.GetBinaryModule():
DomModule.setAttribute('BinaryModule', 'true')
#
# Header
#
Tmp = HeaderXml()
DomModule.appendChild(Tmp.ToXml(Module, 'Header'))
#
# ModuleProperties
#
Tmp = ModulePropertyXml()
DomModule.appendChild(Tmp.ToXml(Module, Module.GetBootModeList(), Module.GetEventList(), Module.GetHobList(), \
'ModuleProperties'))
#
# ClonedFrom
#
Tmp = ClonedFromXml()
if Module.GetClonedFrom():
DomModule.appendChild(Tmp.ToXml(Module.GetClonedFrom(), 'ClonedFrom'))
#
# LibraryClass
#
LibraryClassNode = CreateXmlElement('LibraryClassDefinitions', '', [], [])
for LibraryClass in Module.GetLibraryClassList():
Tmp = LibraryClassXml()
LibraryClassNode.appendChild(Tmp.ToXml2(LibraryClass, 'LibraryClass'))
DomModule.appendChild(LibraryClassNode)
#
# SourceFile
#
SourceFileNode = CreateXmlElement('SourceFiles', '', [], [])
for SourceFile in Module.GetSourceFileList():
Tmp = SourceFileXml()
SourceFileNode.appendChild(Tmp.ToXml(SourceFile, 'Filename'))
DomModule.appendChild(SourceFileNode)
#
# BinaryFile
#
BinaryFileNode = CreateXmlElement('BinaryFiles', '', [], [])
for BinaryFile in Module.GetBinaryFileList():
Tmp = BinaryFileXml()
BinaryFileNode.appendChild(Tmp.ToXml(BinaryFile, 'BinaryFile'))
DomModule.appendChild(BinaryFileNode)
#
# PackageDependencies
#
PackageDependencyNode = CreateXmlElement('PackageDependencies', '', [], [])
for PackageDependency in Module.GetPackageDependencyList():
Tmp = PackageXml()
PackageDependencyNode.appendChild(Tmp.ToXml(PackageDependency, 'Package'))
DomModule.appendChild(PackageDependencyNode)
#
# Guid
#
GuidProtocolPpiNode = CreateXmlElement('Guids', '', [], [])
for GuidProtocolPpi in Module.GetGuidList():
Tmp = GuidXml('Module')
GuidProtocolPpiNode.appendChild(Tmp.ToXml(GuidProtocolPpi, 'GuidCName'))
DomModule.appendChild(GuidProtocolPpiNode)
#
# Protocol
#
GuidProtocolPpiNode = CreateXmlElement('Protocols', '', [], [])
for GuidProtocolPpi in Module.GetProtocolList():
Tmp = ProtocolXml('Module')
GuidProtocolPpiNode.appendChild(Tmp.ToXml(GuidProtocolPpi, 'Protocol'))
DomModule.appendChild(GuidProtocolPpiNode)
#
# Ppi
#
GuidProtocolPpiNode = CreateXmlElement('PPIs', '', [], [])
for GuidProtocolPpi in Module.GetPpiList():
Tmp = PpiXml('Module')
GuidProtocolPpiNode.appendChild(Tmp.ToXml(GuidProtocolPpi, 'Ppi'))
DomModule.appendChild(GuidProtocolPpiNode)
#
# Extern
#
ExternNode = CreateXmlElement('Externs', '', [], [])
for Extern in Module.GetExternList():
Tmp = ExternXml()
ExternNode.appendChild(Tmp.ToXml(Extern, 'Extern'))
DomModule.appendChild(ExternNode)
#
# PcdCoded
#
PcdEntryNode = CreateXmlElement('PcdCoded', '', [], [])
for PcdEntry in Module.GetPcdList():
Tmp = PcdEntryXml()
PcdEntryNode.appendChild(Tmp.ToXml3(PcdEntry, 'PcdEntry'))
DomModule.appendChild(PcdEntryNode)
#
# PeiDepex
#
if Module.GetPeiDepex():
for Item in Module.GetPeiDepex():
Tmp = DepexXml()
DomModule.appendChild(Tmp.ToXml(Item, 'PeiDepex'))
#
# DxeDepex
#
if Module.GetDxeDepex():
for Item in Module.GetDxeDepex():
Tmp = DepexXml()
DomModule.appendChild(Tmp.ToXml(Item, 'DxeDepex'))
#
# SmmDepex
#
if Module.GetSmmDepex():
for Item in Module.GetSmmDepex():
Tmp = DepexXml()
DomModule.appendChild(Tmp.ToXml(Item, 'SmmDepex'))
#
# MiscellaneousFile
#
if Module.GetMiscFileList():
Tmp = MiscellaneousFileXml()
DomModule.appendChild(Tmp.ToXml(Module.GetMiscFileList()[0], 'MiscellaneousFiles'))
#
# UserExtensions
#
if Module.GetUserExtensionList():
for UserExtension in Module.GetUserExtensionList():
Tmp = UserExtensionsXml()
DomModule.appendChild(Tmp.ToXml(UserExtension, 'UserExtensions'))
return DomModule
##
# BuildFlagXml used to generate BuildFlag for <AsBuilt>
#
class BuildFlagXml(object):
def __init__(self):
self.Target = ''
self.TagName = ''
self.Family = ''
self.AsBuiltFlags = ''
def FromXml(self, Item, Key):
self.Target = XmlElement(Item, '%s/Target' % Key)
self.TagName = XmlElement(Item, '%s/TagName' % Key)
self.Family = XmlElement(Item, '%s/Family' % Key)
BuildFlag = BinaryBuildFlagObject()
BuildFlag.SetTarget(self.Target)
BuildFlag.SetTagName(self.TagName)
BuildFlag.SetFamily(self.Family)
return BuildFlag
#
# For AsBuild INF usage
#
def FromXml2(self, Item, Key):
self.AsBuiltFlags = XmlElement(Item, '%s' % Key)
LineList = GetSplitValueList(self.AsBuiltFlags, '\n')
ReturnLine = ''
Count = 0
for Line in LineList:
if Count == 0:
ReturnLine = "# " + Line
else:
ReturnLine = ReturnLine + '\n' + '# ' + Line
Count += 1
BuildFlag = BinaryBuildFlagObject()
BuildFlag.SetAsBuiltOptionFlags(ReturnLine)
return BuildFlag
def ToXml(self, BuildFlag, Key):
if self.Target:
pass
AttributeList = []
NodeList = []
NodeList.append(['BuildFlags', BuildFlag])
Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
return Root
| edk2-master | BaseTools/Source/Python/UPT/Xml/ModuleSurfaceAreaXml.py |
## @file
# This file is used to parse a xml file of .PKG file
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
'''
XmlParser
'''
##
# Import Modules
#
import re
from Library.Xml.XmlRoutines import XmlNode
from Library.Xml.XmlRoutines import CreateXmlElement
from Library.Xml.XmlRoutines import XmlList
from Library.Xml.XmlRoutines import XmlParseFile
from Core.DistributionPackageClass import DistributionPackageClass
from Object.POM.ModuleObject import DepexObject
from Library.ParserValidate import IsValidInfMoudleType
from Library.ParserValidate import IsValidInstallPath
from Library.Misc import IsEqualList
from Library.Misc import Sdict
from Logger.StringTable import ERR_XML_INVALID_VARIABLENAME
from Logger.StringTable import ERR_XML_INVALID_LIB_SUPMODLIST
from Logger.StringTable import ERR_XML_INVALID_EXTERN_SUPARCHLIST
from Logger.StringTable import ERR_XML_INVALID_EXTERN_SUPMODLIST
from Logger.StringTable import ERR_XML_INVALID_EXTERN_SUPMODLIST_NOT_LIB
from Logger.StringTable import ERR_FILE_NAME_INVALIDE
from Logger.ToolError import PARSER_ERROR
from Logger.ToolError import FORMAT_INVALID
from Xml.CommonXml import DistributionPackageHeaderXml
from Xml.CommonXml import MiscellaneousFileXml
from Xml.CommonXml import UserExtensionsXml
from Xml.XmlParserMisc import ConvertVariableName
from Xml.XmlParserMisc import IsRequiredItemListNull
from Xml.ModuleSurfaceAreaXml import ModuleSurfaceAreaXml
from Xml.PackageSurfaceAreaXml import PackageSurfaceAreaXml
import Logger.Log as Logger
##
# DistributionPackageXml
#
class DistributionPackageXml(object):
def __init__(self):
self.DistP = DistributionPackageClass()
self.Pkg = ''
## ValidateDistributionPackage
#
# Check if any required item is missing in DistributionPackage
#
def ValidateDistributionPackage(self):
XmlTreeLevel = ['DistributionPackage']
if self.DistP:
#
# Check DistributionPackage -> DistributionHeader
#
XmlTreeLevel = ['DistributionPackage', '']
CheckDict = {'DistributionHeader':self.DistP.Header }
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
if self.DistP.Header:
DpHeader = self.DistP.Header
XmlTreeLevel = ['DistributionPackage', 'DistributionHeader']
CheckDict = Sdict()
if DpHeader.GetAbstract():
DPAbstract = DpHeader.GetAbstract()[0][1]
else:
DPAbstract = ''
if DpHeader.GetCopyright():
DPCopyright = DpHeader.GetCopyright()[0][1]
else:
DPCopyright = ''
if DpHeader.GetLicense():
DPLicense = DpHeader.GetLicense()[0][1]
else:
DPLicense = ''
CheckDict['Name'] = DpHeader.GetName()
CheckDict['GUID'] = DpHeader.GetGuid()
CheckDict['Version'] = DpHeader.GetVersion()
CheckDict['Copyright'] = DPCopyright
CheckDict['License'] = DPLicense
CheckDict['Abstract'] = DPAbstract
CheckDict['Vendor'] = DpHeader.GetVendor()
CheckDict['Date'] = DpHeader.GetDate()
CheckDict['XmlSpecification'] = DpHeader.GetXmlSpecification()
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
else:
XmlTreeLevel = ['DistributionPackage', 'DistributionHeader']
CheckDict = CheckDict = {'DistributionHeader': '', }
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
#
# Check Each Package
#
for Key in self.DistP.PackageSurfaceArea:
ValidatePackageSurfaceArea(self.DistP.PackageSurfaceArea[Key])
#
# Check Each Module
#
for Key in self.DistP.ModuleSurfaceArea:
ValidateMS(self.DistP.ModuleSurfaceArea[Key], ['DistributionPackage', 'ModuleSurfaceArea'])
#
# Check Each Tool
#
if self.DistP.Tools:
XmlTreeLevel = ['DistributionPackage', 'Tools', 'Header']
CheckDict = {'Name': self.DistP.Tools.GetName(), }
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
if not self.DistP.Tools.GetFileList():
XmlTreeLevel = ['DistributionPackage', 'Tools']
CheckDict = {'FileName': None, }
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
for Item in self.DistP.Tools.GetFileList():
XmlTreeLevel = ['DistributionPackage', 'Tools']
CheckDict = {'FileName': Item.GetURI(), }
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
#
# Check Each Misc File
#
if self.DistP.MiscellaneousFiles:
XmlTreeLevel = ['DistributionPackage', 'MiscellaneousFiles', 'Header']
CheckDict = {'Name': self.DistP.MiscellaneousFiles.GetName(), }
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
if not self.DistP.MiscellaneousFiles.GetFileList():
XmlTreeLevel = ['DistributionPackage', 'MiscellaneousFiles']
CheckDict = {'FileName': None, }
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
for Item in self.DistP.MiscellaneousFiles.GetFileList():
XmlTreeLevel = ['DistributionPackage', 'MiscellaneousFiles']
CheckDict = {'FileName': Item.GetURI(), }
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
#
# Check Each Distribution Level User Extension
#
for Item in self.DistP.UserExtensions:
XmlTreeLevel = ['DistributionPackage', 'UserExtensions']
CheckDict = {'UserId': Item.GetUserID(), }
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
def FromXml(self, Filename=None):
if Filename is not None:
self.DistP = DistributionPackageClass()
#
# Load to XML
#
self.Pkg = XmlParseFile(Filename)
#
# Parse Header information
#
Tmp = DistributionPackageHeaderXml()
DistributionPackageHeader = \
Tmp.FromXml(XmlNode(self.Pkg, '/DistributionPackage/DistributionHeader'), 'DistributionHeader')
self.DistP.Header = DistributionPackageHeader
#
# Parse each PackageSurfaceArea
#
for Item in XmlList(self.Pkg, '/DistributionPackage/PackageSurfaceArea'):
Psa = PackageSurfaceAreaXml()
Package = Psa.FromXml(Item, 'PackageSurfaceArea')
self.DistP.PackageSurfaceArea[(Package.GetGuid(), \
Package.GetVersion(), \
Package.GetPackagePath())] = \
Package
#
# Parse each ModuleSurfaceArea
#
for Item in XmlList(self.Pkg, '/DistributionPackage/ModuleSurfaceArea'):
Msa = ModuleSurfaceAreaXml()
Module = Msa.FromXml(Item, 'ModuleSurfaceArea', True)
ModuleKey = (Module.GetGuid(), Module.GetVersion(), Module.GetName(), Module.GetModulePath())
self.DistP.ModuleSurfaceArea[ModuleKey] = Module
#
# Parse Tools
#
Tmp = MiscellaneousFileXml()
self.DistP.Tools = Tmp.FromXml2(XmlNode(self.Pkg, '/DistributionPackage/Tools'), 'Tools')
#
# Parse MiscFiles
#
Tmp = MiscellaneousFileXml()
self.DistP.MiscellaneousFiles = \
Tmp.FromXml2(XmlNode(self.Pkg, \
'/DistributionPackage/MiscellaneousFiles'), \
'MiscellaneousFiles')
#
# Parse UserExtensions
#
for Item in XmlList(self.Pkg, '/DistributionPackage/UserExtensions'):
Tmp = UserExtensionsXml()
self.DistP.UserExtensions.append(Tmp.FromXml2(Item, 'UserExtensions'))
#
# Check Required Items for XML
#
self.ValidateDistributionPackage()
return self.DistP
def ToXml(self, DistP):
if self.DistP:
pass
if DistP is not None:
#
# Parse DistributionPackageHeader
#
Attrs = [['xmlns', 'http://www.uefi.org/2011/1.1'],
['xmlns:xsi', 'http:/www.w3.org/2001/XMLSchema-instance'],
]
Root = CreateXmlElement('DistributionPackage', '', [], Attrs)
Tmp = DistributionPackageHeaderXml()
Root.appendChild(Tmp.ToXml(DistP.Header, 'DistributionHeader'))
#
# Parse each PackageSurfaceArea
#
for Package in DistP.PackageSurfaceArea.values():
Psa = PackageSurfaceAreaXml()
DomPackage = Psa.ToXml(Package)
Root.appendChild(DomPackage)
#
# Parse each ModuleSurfaceArea
#
for Module in DistP.ModuleSurfaceArea.values():
Msa = ModuleSurfaceAreaXml()
DomModule = Msa.ToXml(Module)
Root.appendChild(DomModule)
#
# Parse Tools
#
Tmp = MiscellaneousFileXml()
ToolNode = Tmp.ToXml2(DistP.Tools, 'Tools')
if ToolNode is not None:
Root.appendChild(ToolNode)
#
# Parse MiscFiles
#
Tmp = MiscellaneousFileXml()
MiscFileNode = Tmp.ToXml2(DistP.MiscellaneousFiles,
'MiscellaneousFiles')
if MiscFileNode is not None:
Root.appendChild(MiscFileNode)
XmlContent = Root.toprettyxml(indent=' ')
#
# Remove empty element
#
XmlContent = re.sub(r'[\s\r\n]*<[^<>=]*/>', '', XmlContent)
#
# Remove empty help text element
#
XmlContent = re.sub(r'[\s\r\n]*<HelpText Lang="en-US"/>', '',
XmlContent)
#
# Remove SupArchList="COMMON" or "common"
#
XmlContent = \
re.sub(r'[\s\r\n]*SupArchList[\s\r\n]*=[\s\r\n]*"[\s\r\n]*COMMON'
'[\s\r\n]*"', '', XmlContent)
XmlContent = \
re.sub(r'[\s\r\n]*SupArchList[\s\r\n]*=[\s\r\n]*"[\s\r\n]*common'
'[\s\r\n]*"', '', XmlContent)
#
# Remove <SupArchList> COMMON </SupArchList>
#
XmlContent = \
re.sub(r'[\s\r\n]*<SupArchList>[\s\r\n]*COMMON[\s\r\n]*'
'</SupArchList>[\s\r\n]*', '', XmlContent)
#
# Remove <SupArchList> common </SupArchList>
#
XmlContent = \
re.sub(r'[\s\r\n]*<SupArchList>[\s\r\n]*'
'common[\s\r\n]*</SupArchList>[\s\r\n]*', '', XmlContent)
#
# Remove SupModList="COMMON" or "common"
#
XmlContent = \
re.sub(r'[\s\r\n]*SupModList[\s\r\n]*=[\s\r\n]*"[\s\r\n]*COMMON'
'[\s\r\n]*"', '', XmlContent)
XmlContent = \
re.sub(r'[\s\r\n]*SupModList[\s\r\n]*=[\s\r\n]*"[\s\r\n]*common'
'[\s\r\n]*"', '', XmlContent)
return XmlContent
return ''
## ValidateMS
#
# Check if any required item is missing in ModuleSurfaceArea
#
# @param Module: The ModuleSurfaceArea to be checked
# @param XmlTreeLevel: The top level of Module
#
def ValidateMS(Module, TopXmlTreeLevel):
ValidateMS1(Module, TopXmlTreeLevel)
ValidateMS2(Module, TopXmlTreeLevel)
ValidateMS3(Module, TopXmlTreeLevel)
## ValidateMS1
#
# Check if any required item is missing in ModuleSurfaceArea
#
# @param Module: The ModuleSurfaceArea to be checked
# @param XmlTreeLevel: The top level of Module
#
def ValidateMS1(Module, TopXmlTreeLevel):
#
# Check Guids -> GuidCName
#
XmlTreeLevel = TopXmlTreeLevel + ['Guids']
for Item in Module.GetGuidList():
if Item is None:
CheckDict = {'GuidCName':''}
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
XmlTreeLevel = TopXmlTreeLevel + ['Guids', 'GuidCName']
for Item in Module.GetGuidList():
CheckDict = {'CName':Item.GetCName(),
'GuidType':Item.GetGuidTypeList(),
'Usage':Item.GetUsage()}
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
if Item.GetVariableName():
Result = ConvertVariableName(Item.GetVariableName())
if Result is None:
Msg = "->".join(Node for Node in XmlTreeLevel)
ErrorMsg = ERR_XML_INVALID_VARIABLENAME % (Item.GetVariableName(), Item.GetCName(), Msg)
Logger.Error('\nUPT', PARSER_ERROR, ErrorMsg, RaiseError=True)
else:
Item.SetVariableName(Result)
#
# Check Protocols -> Protocol
#
XmlTreeLevel = TopXmlTreeLevel + ['Protocols']
for Item in Module.GetProtocolList():
if Item is None:
CheckDict = {'Protocol':''}
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
XmlTreeLevel = TopXmlTreeLevel + ['Protocols', 'Protocol']
for Item in Module.GetProtocolList():
CheckDict = {'CName':Item.GetCName(),
'Usage':Item.GetUsage()}
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
#
# Check PPIs -> Ppi
#
XmlTreeLevel = TopXmlTreeLevel + ['PPIs']
for Item in Module.GetPpiList():
if Item is None:
CheckDict = {'Ppi':''}
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
XmlTreeLevel = TopXmlTreeLevel + ['PPIs', 'Ppi']
for Item in Module.GetPpiList():
CheckDict = {'CName':Item.GetCName(),
'Usage':Item.GetUsage()}
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
#
# Check PcdCoded -> Entry
#
XmlTreeLevel = TopXmlTreeLevel + ['PcdCoded']
for Item in Module.GetPcdList():
if Item is None:
CheckDict = {'PcdEntry':''}
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
XmlTreeLevel = TopXmlTreeLevel + ['PcdCoded', 'PcdEntry']
for Item in Module.GetPcdList():
CheckDict = {'TokenSpaceGuidCname':Item.GetTokenSpaceGuidCName(),
'CName':Item.GetCName(),
'PcdUsage':Item.GetValidUsage(),
'PcdItemType':Item.GetItemType()}
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
#
# Check Externs -> Extern
#
XmlTreeLevel = TopXmlTreeLevel + ['Externs']
for Item in Module.GetExternList():
if Item is None:
CheckDict = {'Extern':''}
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
#
# If SupArchList is used to identify different EntryPoint, UnloadImage, Constructor/Destructor elements and
# that SupArchList does not match ModuleSurfaceArea.ModuleProperties:SupArchList, the tool must exit gracefully,
# informing the user that the EDK II Build system does not support different EntryPoint, UnloadImage,
# Constructor or Destructor elements based on Architecture type. Two SupArchList attributes are considered
# identical if it lists the same CPU architectures in any order.
#
for Item in Module.GetExternList():
if len(Item.SupArchList) > 0:
if not IsEqualList(Item.SupArchList, Module.SupArchList):
Logger.Error('\nUPT',
PARSER_ERROR,
ERR_XML_INVALID_EXTERN_SUPARCHLIST % (str(Item.SupArchList), str(Module.SupArchList)),
RaiseError=True)
#
# Check DistributionPackage -> ModuleSurfaceArea -> UserExtensions
#
XmlTreeLevel = TopXmlTreeLevel + ['UserExtensions']
for Item in Module.GetUserExtensionList():
CheckDict = {'UserId':Item.GetUserID(), 'Identifier':Item.GetIdentifier()}
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
#
# Check DistributionPackage -> PackageSurfaceArea -> MiscellaneousFiles -> Filename
#
XmlTreeLevel = TopXmlTreeLevel + ['MiscellaneousFiles']
for Item in Module.GetMiscFileList():
if not Item.GetFileList():
CheckDict = {'Filename': '', }
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
for File in Item.GetFileList():
CheckDict = {'Filename': File.GetURI(), }
## ValidateMS2
#
# Check if any required item is missing in ModuleSurfaceArea
#
# @param Module: The ModuleSurfaceArea to be checked
# @param XmlTreeLevel: The top level of Module
#
def ValidateMS2(Module, TopXmlTreeLevel):
#
# Check Header
#
XmlTreeLevel = TopXmlTreeLevel + ['Header']
CheckDict = Sdict()
CheckDict['Name'] = Module.GetName()
CheckDict['BaseName'] = Module.GetBaseName()
CheckDict['GUID'] = Module.GetGuid()
CheckDict['Version'] = Module.GetVersion()
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
#
# Check ModuleProperties
#
XmlTreeLevel = TopXmlTreeLevel + ['ModuleProperties']
CheckDict = {'ModuleType':Module.GetModuleType(),
'Path':Module.GetModulePath()}
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
if not IsValidInstallPath(Module.GetModulePath()):
Logger.Error("UPT", FORMAT_INVALID, ERR_FILE_NAME_INVALIDE % Module.GetModulePath())
#
# Check ModuleProperties->BootMode
#
XmlTreeLevel = TopXmlTreeLevel + ['ModuleProperties'] + ['BootMode']
for Item in Module.GetBootModeList():
CheckDict = {'Usage':Item.GetUsage(),
'SupportedBootModes':Item.GetSupportedBootModes()}
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
#
# Check ModuleProperties->Event
#
XmlTreeLevel = TopXmlTreeLevel + ['ModuleProperties'] + ['Event']
for Item in Module.GetEventList():
CheckDict = {'Usage':Item.GetUsage(),
'EventType':Item.GetEventType()}
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
#
# Check ModuleProperties->Hob
#
XmlTreeLevel = TopXmlTreeLevel + ['ModuleProperties'] + ['HOB']
for Item in Module.GetHobList():
CheckDict = {'Usage':Item.GetUsage(),
'HobType':Item.GetHobType()}
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
#
# The UDP Specification supports the module type of UEFI_RUNTIME_DRIVER, which is not present in the EDK II INF
# File Specification v. 1.23, so UPT must perform the following translation that include the generation of a
# [Depex] section.
#
if Module.ModuleType == "UEFI_RUNTIME_DRIVER":
Module.ModuleType = "DXE_RUNTIME_DRIVER"
DxeObj = DepexObject()
DxeObj.SetDepex("gEfiBdsArchProtocolGuid AND \ngEfiCpuArchProtocolGuid AND\n" + \
"gEfiMetronomeArchProtocolGuid AND \ngEfiMonotonicCounterArchProtocolGuid AND\n" + \
"gEfiRealTimeClockArchProtocolGuid AND \ngEfiResetArchProtocolGuid AND\n" + \
"gEfiRuntimeArchProtocolGuid AND \ngEfiSecurityArchProtocolGuid AND\n" + \
"gEfiTimerArchProtocolGuid AND \ngEfiVariableWriteArchProtocolGuid AND\n" + \
"gEfiVariableArchProtocolGuid AND \ngEfiWatchdogTimerArchProtocolGuid")
DxeObj.SetModuleType(['DXE_RUNTIME_DRIVER'])
Module.PeiDepex = []
Module.DxeDepex = []
Module.SmmDepex = []
Module.DxeDepex.append(DxeObj)
#
# Check LibraryClassDefinitions -> LibraryClass
#
XmlTreeLevel = TopXmlTreeLevel + ['LibraryClassDefinitions']
for Item in Module.GetLibraryClassList():
if Item is None:
CheckDict = {'LibraryClass':''}
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
XmlTreeLevel = TopXmlTreeLevel + ['LibraryClassDefinitions', 'LibraryClass']
IsLibraryModule = False
LibrarySupModList = []
for Item in Module.GetLibraryClassList():
CheckDict = {'Keyword':Item.GetLibraryClass(),
'Usage':Item.GetUsage()}
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
#
# If the LibraryClass:SupModList is not "UNDEFINED" the LIBRARY_CLASS entry must have the list
# appended using the format:
# LIBRARY_CLASS = <ClassName> ["|" <Edk2ModuleTypeList>]
#
# Edk2ModuleTypeList ::= <ModuleType> [" " <ModuleType>]{0,}
# <ModuleTypes> ::= {"BASE"} {"SEC"} {"PEI_CORE"} {"PEIM"}
# {"DXE_CORE"} {"DXE_DRIVER"} {"SMM_CORE"}
# {"DXE_SMM_DRIVER"} {"DXE_RUNTIME_DRIVER"}
# {"DXE_SAL_DRIVER"} {"UEFI_DRIVER"}
# {"UEFI_APPLICATION"} {"USER_DEFINED"}
#
if len(Item.SupModuleList) > 0:
for SupModule in Item.SupModuleList:
if not IsValidInfMoudleType(SupModule):
Logger.Error('\nUPT',
PARSER_ERROR,
ERR_XML_INVALID_LIB_SUPMODLIST % (Item.LibraryClass, str(SupModule)),
RaiseError=True)
if Item.Usage == 'PRODUCES' or Item.Usage == 'SOMETIMES_PRODUCES':
IsLibraryModule = True
LibrarySupModList = Item.SupModuleList
#
# For Library modules (indicated by a LIBRARY_CLASS statement in the [Defines] section)
# If the SupModList attribute of the CONSTRUCTOR or DESTRUCTOR element does not match the Supported Module
# Types listed after "LIBRARY_CLASS = <Keyword> |", the tool should gracefully exit with an error message
# stating that there is a conflict in the module types the CONSTRUCTOR/DESTRUCTOR is to be used with and
# the Module types this Library supports.
#
if IsLibraryModule:
for Item in Module.GetExternList():
if Item.Constructor or Item.Destructor:
if hasattr(Item, 'SupModList') and len(Item.SupModList) > 0 and \
not IsEqualList(Item.SupModList, LibrarySupModList):
Logger.Error('\nUPT',
PARSER_ERROR,
ERR_XML_INVALID_EXTERN_SUPMODLIST % (str(Item.SupModList), str(LibrarySupModList)),
RaiseError=True)
#
# If the module is not a library module, the MODULE_TYPE listed in the ModuleSurfaceArea.Header must match the
# SupModList attribute. If these conditions cannot be met, the tool must exit gracefully, informing the user
# that the EDK II Build system does not currently support the features required by this Module.
#
if not IsLibraryModule:
for Item in Module.GetExternList():
if hasattr(Item, 'SupModList') and len(Item.SupModList) > 0 and \
not IsEqualList(Item.SupModList, [Module.ModuleType]):
Logger.Error('\nUPT',
PARSER_ERROR,
ERR_XML_INVALID_EXTERN_SUPMODLIST_NOT_LIB % (str(Module.ModuleType), str(Item.SupModList)),
RaiseError=True)
#
# Check SourceFiles
#
XmlTreeLevel = TopXmlTreeLevel + ['SourceFiles']
for Item in Module.GetSourceFileList():
if Item is None:
CheckDict = {'Filename':''}
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
XmlTreeLevel = TopXmlTreeLevel + ['SourceFiles']
for Item in Module.GetSourceFileList():
CheckDict = {'Filename':Item.GetSourceFile()}
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
for ItemCount in range(len(Module.GetBinaryFileList())):
Item = Module.GetBinaryFileList()[ItemCount]
if Item and len(Item.FileNamList) > 0 and Item.FileNamList[0].FileType == 'FREEFORM':
Item.FileNamList[0].FileType = 'SUBTYPE_GUID'
Module.GetBinaryFileList()[ItemCount] = Item
## ValidateMS3
#
# Check if any required item is missing in ModuleSurfaceArea
#
# @param Module: The ModuleSurfaceArea to be checked
# @param XmlTreeLevel: The top level of Module
#
def ValidateMS3(Module, TopXmlTreeLevel):
#
# Check PackageDependencies -> Package
#
XmlTreeLevel = TopXmlTreeLevel + ['PackageDependencies']
for Item in Module.GetPackageDependencyList():
if Item is None:
CheckDict = {'Package':''}
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
XmlTreeLevel = TopXmlTreeLevel + ['PackageDependencies', 'Package']
for Item in Module.GetPackageDependencyList():
CheckDict = {'GUID':Item.GetGuid()}
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
#
# Check BinaryFiles -> BinaryFile
#
for Item in Module.GetBinaryFileList():
if Item is None:
XmlTreeLevel = TopXmlTreeLevel + ['BinaryFiles']
CheckDict = {'BinaryFile':''}
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
if not Item.GetFileNameList():
XmlTreeLevel = TopXmlTreeLevel + ['BinaryFiles', 'BinaryFile']
CheckDict = {'Filename':''}
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
XmlTreeLevel = TopXmlTreeLevel + ['BinaryFiles', 'BinaryFile']
for File in Item.GetFileNameList():
CheckDict = {'Filename':File.GetFilename(),
'FileType':File.GetFileType()}
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
for AsBuilt in Item.GetAsBuiltList():
#
# Check LibInstance
#
if len(AsBuilt.LibraryInstancesList) == 1 and not AsBuilt.LibraryInstancesList[0]:
CheckDict = {'GUID':''}
XmlTreeLevel = TopXmlTreeLevel + ['BinaryFiles', 'BinaryFile', 'AsBuilt', 'LibraryInstances']
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
for LibItem in AsBuilt.LibraryInstancesList:
CheckDict = {'Guid':LibItem.Guid,
'Version':LibItem.Version}
XmlTreeLevel = TopXmlTreeLevel + ['BinaryFiles', 'BinaryFile', 'AsBuilt', 'LibraryInstances']
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
#
# Check PatchPcd
#
for PatchPcdItem in AsBuilt.PatchPcdList:
CheckDict = {'TokenSpaceGuidValue':PatchPcdItem.TokenSpaceGuidValue,
'PcdCName':PatchPcdItem.PcdCName,
'Token':PatchPcdItem.Token,
'DatumType':PatchPcdItem.DatumType,
'Value':PatchPcdItem.DefaultValue,
'Offset':PatchPcdItem.Offset}
XmlTreeLevel = TopXmlTreeLevel + ['BinaryFiles', 'BinaryFile', 'AsBuilt', 'PatchPcdValue']
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
#
# Check PcdError
#
for PcdErrorItem in PatchPcdItem.PcdErrorsList:
CheckDict = {'ErrorNumber':PcdErrorItem.ErrorNumber}
XmlTreeLevel = TopXmlTreeLevel + ['BinaryFiles', 'BinaryFile', 'AsBuilt',
'PatchPcdValue', 'PcdError']
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
#
# Check PcdEx
#
for PcdExItem in AsBuilt.PcdExValueList:
CheckDict = {'TokenSpaceGuidValue':PcdExItem.TokenSpaceGuidValue,
'Token':PcdExItem.Token,
'DatumType':PcdExItem.DatumType}
XmlTreeLevel = TopXmlTreeLevel + ['BinaryFiles', 'BinaryFile', 'AsBuilt', 'PcdExValue']
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
#
# Check PcdError
#
for PcdErrorItem in PcdExItem.PcdErrorsList:
CheckDict = {'ErrorNumber':PcdErrorItem.ErrorNumber}
XmlTreeLevel = TopXmlTreeLevel + ['BinaryFiles', 'BinaryFile', 'AsBuilt',
'PcdExValue', 'PcdError']
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
#
# Check SmmDepex
#
XmlTreeLevel = TopXmlTreeLevel + ['SmmDepex']
for Item in Module.GetSmmDepex():
CheckDict = {'Expression':Item.GetDepex()}
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
#
# Check PeiDepex
#
XmlTreeLevel = TopXmlTreeLevel + ['PeiDepex']
for Item in Module.GetPeiDepex():
CheckDict = {'Expression':Item.GetDepex()}
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
#
# Check DxeDepex
#
XmlTreeLevel = TopXmlTreeLevel + ['DxeDepex']
for Item in Module.GetDxeDepex():
CheckDict = {'Expression':Item.GetDepex()}
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
#
# Check <UserExtensions>
#
XmlTreeLevel = TopXmlTreeLevel + ['UserExtensions']
for Item in Module.GetUserExtensionList():
CheckDict = {'UserId':Item.GetUserID(), 'Identifier':Item.GetIdentifier()}
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
## ValidatePS1
#
# ValidatePS1
#
def ValidatePS1(Package):
#
# Check DistributionPackage -> PackageSurfaceArea -> Header
#
XmlTreeLevel = ['DistributionPackage', 'PackageSurfaceArea', 'Header']
CheckDict = Sdict()
CheckDict['Name'] = Package.GetName()
CheckDict['BaseName'] = Package.GetBaseName()
CheckDict['GUID'] = Package.GetGuid()
CheckDict['Version'] = Package.GetVersion()
CheckDict['PackagePath'] = Package.GetPackagePath()
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
if not IsValidInstallPath(Package.GetPackagePath()):
Logger.Error("UPT", FORMAT_INVALID, ERR_FILE_NAME_INVALIDE % Package.GetPackagePath())
#
# Check DistributionPackage -> PackageSurfaceArea -> ClonedFrom
#
XmlTreeLevel = ['DistributionPackage', 'PackageSurfaceArea', 'ClonedFrom']
for Item in Package.GetClonedFromList():
if Item is None:
CheckDict = Sdict()
CheckDict['GUID'] = ''
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
CheckDict = Sdict()
CheckDict['GUID'] = Item.GetPackageGuid()
CheckDict['Version'] = Item.GetPackageVersion()
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
#
# Check DistributionPackage -> PackageSurfaceArea -> LibraryClassDeclarations -> LibraryClass
#
XmlTreeLevel = ['DistributionPackage', 'PackageSurfaceArea', 'LibraryClassDeclarations']
for Item in Package.GetLibraryClassList():
if Item is None:
CheckDict = {'LibraryClass':''}
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
XmlTreeLevel = ['DistributionPackage', 'PackageSurfaceArea', 'LibraryClassDeclarations', 'LibraryClass']
for Item in Package.GetLibraryClassList():
CheckDict = {'Keyword':Item.GetLibraryClass(),
'HeaderFile':Item.GetIncludeHeader()}
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
#
# Check DistributionPackage -> PackageSurfaceArea -> IndustryStandardIncludes -> IndustryStandardHeader
#
XmlTreeLevel = ['DistributionPackage', 'PackageSurfaceArea', 'IndustryStandardIncludes']
for Item in Package.GetStandardIncludeFileList():
if Item is None:
CheckDict = {'IndustryStandardHeader':''}
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
XmlTreeLevel = ['DistributionPackage', 'PackageSurfaceArea', 'IndustryStandardIncludes', 'IndustryStandardHeader']
for Item in Package.GetStandardIncludeFileList():
CheckDict = {'HeaderFile':Item.GetFilePath()}
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
#
# Check DistributionPackage -> PackageSurfaceArea -> PackageIncludes -> PackageHeader
#
XmlTreeLevel = ['DistributionPackage', 'PackageSurfaceArea', 'PackageIncludes']
for Item in Package.GetPackageIncludeFileList():
if Item is None:
CheckDict = {'PackageHeader':''}
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
XmlTreeLevel = ['DistributionPackage', 'PackageSurfaceArea', 'PackageIncludes', 'PackageHeader']
for Item in Package.GetPackageIncludeFileList():
CheckDict = {'HeaderFile':Item.GetFilePath()}
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
## ValidatePS2
#
# ValidatePS2
#
def ValidatePS2(Package):
#
# Check DistributionPackage -> PackageSurfaceArea -> Modules -> ModuleSurfaceArea
#
XmlTreeLevel = ['DistributionPackage', 'PackageSurfaceArea', 'Modules', 'ModuleSurfaceArea']
for Item in Package.GetModuleDict().values():
ValidateMS(Item, XmlTreeLevel)
#
# Check DistributionPackage -> PackageSurfaceArea -> GuidDeclarations Entry
#
XmlTreeLevel = ['DistributionPackage', 'PackageSurfaceArea', 'GuidDeclarations']
for Item in Package.GetGuidList():
if Item is None:
CheckDict = {'Entry':''}
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
XmlTreeLevel = ['DistributionPackage', 'PackageSurfaceArea', 'GuidDeclarations', 'Entry']
for Item in Package.GetGuidList():
CheckDict = {'CName':Item.GetCName(),
'GuidValue':Item.GetGuid()}
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
#
# Check DistributionPackage -> PackageSurfaceArea -> ProtocolDeclarations -> Entry
#
XmlTreeLevel = ['DistributionPackage', 'PackageSurfaceArea', 'ProtocolDeclarations']
for Item in Package.GetProtocolList():
if Item is None:
CheckDict = {'Entry':''}
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
XmlTreeLevel = ['DistributionPackage', 'PackageSurfaceArea', 'ProtocolDeclarations', 'Entry']
for Item in Package.GetProtocolList():
CheckDict = {'CName':Item.GetCName(),
'GuidValue':Item.GetGuid()}
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
#
# Check DistributionPackage -> PackageSurfaceArea -> PpiDeclarations -> Entry
#
XmlTreeLevel = ['DistributionPackage', 'PackageSurfaceArea', 'PpiDeclarations']
for Item in Package.GetPpiList():
if Item is None:
CheckDict = {'Entry':''}
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
XmlTreeLevel = ['DistributionPackage', 'PackageSurfaceArea', 'PpiDeclarations', 'Entry']
for Item in Package.GetPpiList():
CheckDict = {'CName':Item.GetCName(),
'GuidValue':Item.GetGuid()}
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
#
# Check DistributionPackage -> PackageSurfaceArea -> PcdDeclarations -> Entry
#
XmlTreeLevel = ['DistributionPackage', 'PackageSurfaceArea', 'PcdDeclarations']
for Item in Package.GetPcdList():
if Item is None:
CheckDict = {'PcdEntry':''}
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
XmlTreeLevel = ['DistributionPackage', 'PackageSurfaceArea', 'PcdDeclarations', 'PcdEntry']
for Item in Package.GetPcdList():
CheckDict = {'TokenSpaceGuidCname':Item.GetTokenSpaceGuidCName(),
'Token':Item.GetToken(),
'CName':Item.GetCName(),
'DatumType':Item.GetDatumType(),
'ValidUsage':Item.GetValidUsage(),
'DefaultValue':Item.GetDefaultValue()}
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
#
# Check DistributionPackage -> PackageSurfaceArea -> UserExtensions
#
XmlTreeLevel = ['DistributionPackage', 'PackageSurfaceArea', 'UserExtensions']
for Item in Package.GetUserExtensionList():
CheckDict = {'UserId':Item.GetUserID(), 'Identifier':Item.GetIdentifier()}
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
#
# Check DistributionPackage -> PackageSurfaceArea -> MiscellaneousFiles -> Filename
#
XmlTreeLevel = ['DistributionPackage', 'PackageSurfaceArea', 'MiscellaneousFiles']
for Item in Package.GetMiscFileList():
if not Item.GetFileList():
CheckDict = {'Filename': '', }
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
for File in Item.GetFileList():
CheckDict = {'Filename': File.GetURI(), }
IsRequiredItemListNull(CheckDict, XmlTreeLevel)
## ValidatePackageSurfaceArea
#
# Check if any required item is missing in PackageSurfaceArea
#
# @param Package: The PackageSurfaceArea to be checked
#
def ValidatePackageSurfaceArea(Package):
ValidatePS1(Package)
ValidatePS2(Package)
| edk2-master | BaseTools/Source/Python/UPT/Xml/XmlParser.py |
## @file
# This file is for converting package information data file to xml file.
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
'''
IniToXml
'''
import os.path
import re
from time import strftime
from time import localtime
import Logger.Log as Logger
from Logger.ToolError import UPT_INI_PARSE_ERROR
from Logger.ToolError import FILE_NOT_FOUND
from Library.Xml.XmlRoutines import CreateXmlElement
from Library.DataType import TAB_VALUE_SPLIT
from Library.DataType import TAB_EQUAL_SPLIT
from Library.DataType import TAB_SECTION_START
from Library.DataType import TAB_SECTION_END
from Logger import StringTable as ST
from Library.StringUtils import ConvertSpecialChar
from Library.ParserValidate import IsValidPath
from Library import GlobalData
## log error:
#
# @param error: error
# @param File: File
# @param Line: Line
#
def IniParseError(Error, File, Line):
Logger.Error("UPT", UPT_INI_PARSE_ERROR, File=File,
Line=Line, ExtraData=Error)
## __ValidatePath
#
# @param Path: Path to be checked
#
def __ValidatePath(Path, Root):
Path = Path.strip()
if os.path.isabs(Path) or not IsValidPath(Path, Root):
return False, ST.ERR_FILELIST_LOCATION % (Root, Path)
return True, ''
## ValidateMiscFile
#
# @param Filename: File to be checked
#
def ValidateMiscFile(Filename):
Root = GlobalData.gWORKSPACE
return __ValidatePath(Filename, Root)
## ValidateToolsFile
#
# @param Filename: File to be checked
#
def ValidateToolsFile(Filename):
Valid, Cause = False, ''
if not Valid and 'EDK_TOOLS_PATH' in os.environ:
Valid, Cause = __ValidatePath(Filename, os.environ['EDK_TOOLS_PATH'])
if not Valid:
Valid, Cause = __ValidatePath(Filename, GlobalData.gWORKSPACE)
return Valid, Cause
## ParseFileList
#
# @param Line: Line
# @param Map: Map
# @param CurrentKey: CurrentKey
# @param PathFunc: Path validate function
#
def ParseFileList(Line, Map, CurrentKey, PathFunc):
FileList = ["", {}]
TokenList = Line.split(TAB_VALUE_SPLIT)
if len(TokenList) > 0:
Path = TokenList[0].strip().replace('\\', '/')
if not Path:
return False, ST.ERR_WRONG_FILELIST_FORMAT
Valid, Cause = PathFunc(Path)
if not Valid:
return Valid, Cause
FileList[0] = TokenList[0].strip()
for Token in TokenList[1:]:
Attr = Token.split(TAB_EQUAL_SPLIT)
if len(Attr) != 2 or not Attr[0].strip() or not Attr[1].strip():
return False, ST.ERR_WRONG_FILELIST_FORMAT
Key = Attr[0].strip()
Val = Attr[1].strip()
if Key not in ['OS', 'Executable']:
return False, ST.ERR_UNKNOWN_FILELIST_ATTR % Key
if Key == 'OS' and Val not in ["Win32", "Win64", "Linux32",
"Linux64", "OS/X32", "OS/X64",
"GenericWin", "GenericNix"]:
return False, ST.ERR_FILELIST_ATTR % 'OS'
elif Key == 'Executable' and Val not in ['true', 'false']:
return False, ST.ERR_FILELIST_ATTR % 'Executable'
FileList[1][Key] = Val
Map[CurrentKey].append(FileList)
return True, ''
## Create header XML file
#
# @param DistMap: DistMap
# @param Root: Root
#
def CreateHeaderXml(DistMap, Root):
Element1 = CreateXmlElement('Name', DistMap['Name'],
[], [['BaseName', DistMap['BaseName']]])
Element2 = CreateXmlElement('GUID', DistMap['GUID'],
[], [['Version', DistMap['Version']]])
AttributeList = [['ReadOnly', DistMap['ReadOnly']],
['RePackage', DistMap['RePackage']]]
NodeList = [Element1,
Element2,
['Vendor', DistMap['Vendor']],
['Date', DistMap['Date']],
['Copyright', DistMap['Copyright']],
['License', DistMap['License']],
['Abstract', DistMap['Abstract']],
['Description', DistMap['Description']],
['Signature', DistMap['Signature']],
['XmlSpecification', DistMap['XmlSpecification']],
]
Root.appendChild(CreateXmlElement('DistributionHeader', '',
NodeList, AttributeList))
## Create tools XML file
#
# @param Map: Map
# @param Root: Root
# @param Tag: Tag
#
def CreateToolsXml(Map, Root, Tag):
#
# Check if all elements in this section are empty
#
for Key in Map:
if len(Map[Key]) > 0:
break
else:
return
NodeList = [['Name', Map['Name']],
['Copyright', Map['Copyright']],
['License', Map['License']],
['Abstract', Map['Abstract']],
['Description', Map['Description']],
]
HeaderNode = CreateXmlElement('Header', '', NodeList, [])
NodeList = [HeaderNode]
for File in Map['FileList']:
AttrList = []
for Key in File[1]:
AttrList.append([Key, File[1][Key]])
NodeList.append(CreateXmlElement('Filename', File[0], [], AttrList))
Root.appendChild(CreateXmlElement(Tag, '', NodeList, []))
## ValidateValues
#
# @param Key: Key
# @param Value: Value
# @param SectionName: SectionName
#
def ValidateValues(Key, Value, SectionName):
if SectionName == 'DistributionHeader':
Valid, Cause = ValidateRegValues(Key, Value)
if not Valid:
return Valid, Cause
Valid = __ValidateDistHeader(Key, Value)
if not Valid:
return Valid, ST.ERR_VALUE_INVALID % (Key, SectionName)
else:
Valid = __ValidateOtherHeader(Key, Value)
if not Valid:
return Valid, ST.ERR_VALUE_INVALID % (Key, SectionName)
return True, ''
## ValidateRegValues
#
# @param Key: Key
# @param Value: Value
#
def ValidateRegValues(Key, Value):
ValidateMap = {
'ReadOnly' :
('true|false', ST.ERR_BOOLEAN_VALUE % (Key, Value)),
'RePackage' :
('true|false', ST.ERR_BOOLEAN_VALUE % (Key, Value)),
'GUID' :
('[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}'
'-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}',
ST.ERR_GUID_VALUE % Value),
'Version' : ('[0-9]+(\.[0-9]+)?', ST.ERR_VERSION_VALUE % \
(Key, Value)),
'XmlSpecification' : ('1\.1', ST.ERR_VERSION_XMLSPEC % Value)
}
if Key not in ValidateMap:
return True, ''
Elem = ValidateMap[Key]
Match = re.compile(Elem[0]).match(Value)
if Match and Match.start() == 0 and Match.end() == len(Value):
return True, ''
return False, Elem[1]
## __ValidateDistHeaderName
#
# @param Name: Name
#
def __ValidateDistHeaderName(Name):
if len(Name) < 1:
return False
for Char in Name:
if ord(Char) < 0x20 or ord(Char) >= 0x7f:
return False
return True
## __ValidateDistHeaderBaseName
#
# @param BaseName: BaseName
#
def __ValidateDistHeaderBaseName(BaseName):
if not BaseName:
return False
# if CheckLen and len(BaseName) < 2:
# return False
if not BaseName[0].isalnum() and BaseName[0] != '_':
return False
for Char in BaseName[1:]:
if not Char.isalnum() and Char not in '-_':
return False
return True
## __ValidateDistHeaderAbstract
#
# @param Abstract: Abstract
#
def __ValidateDistHeaderAbstract(Abstract):
return '\t' not in Abstract and len(Abstract.splitlines()) == 1
## __ValidateOtherHeaderAbstract
#
# @param Abstract: Abstract
#
def __ValidateOtherHeaderAbstract(Abstract):
return __ValidateDistHeaderAbstract(Abstract)
## __ValidateDistHeader
#
# @param Key: Key
# @param Value: Value
#
def __ValidateDistHeader(Key, Value):
ValidateMap = {
'Name' : __ValidateDistHeaderName,
'BaseName' : __ValidateDistHeaderBaseName,
'Abstract' : __ValidateDistHeaderAbstract,
'Vendor' : __ValidateDistHeaderAbstract
}
return not (Value and Key in ValidateMap and not ValidateMap[Key](Value))
## __ValidateOtherHeader
#
# @param Key: Key
# @param Value: Value
#
def __ValidateOtherHeader(Key, Value):
ValidateMap = {
'Name' : __ValidateDistHeaderName,
'Abstract' : __ValidateOtherHeaderAbstract
}
return not (Value and Key in ValidateMap and not ValidateMap[Key](Value))
## Convert ini file to xml file
#
# @param IniFile
#
def IniToXml(IniFile):
if not os.path.exists(IniFile):
Logger.Error("UPT", FILE_NOT_FOUND, ST.ERR_TEMPLATE_NOTFOUND % IniFile)
DistMap = {'ReadOnly' : '', 'RePackage' : '', 'Name' : '',
'BaseName' : '', 'GUID' : '', 'Version' : '', 'Vendor' : '',
'Date' : '', 'Copyright' : '', 'License' : '', 'Abstract' : '',
'Description' : '', 'Signature' : '', 'XmlSpecification' : ''
}
ToolsMap = {'Name' : '', 'Copyright' : '', 'License' : '',
'Abstract' : '', 'Description' : '', 'FileList' : []}
#
# Only FileList is a list: [['file1', {}], ['file2', {}], ...]
#
MiscMap = {'Name' : '', 'Copyright' : '', 'License' : '',
'Abstract' : '', 'Description' : '', 'FileList' : []}
SectionMap = {
'DistributionHeader' : DistMap,
'ToolsHeader' : ToolsMap,
'MiscellaneousFilesHeader' : MiscMap
}
PathValidator = {
'ToolsHeader' : ValidateToolsFile,
'MiscellaneousFilesHeader' : ValidateMiscFile
}
ParsedSection = []
SectionName = ''
CurrentKey = ''
PreMap = None
Map = None
FileContent = ConvertSpecialChar(open(IniFile, 'r').readlines())
LastIndex = 0
for Index in range(0, len(FileContent)):
LastIndex = Index
Line = FileContent[Index].strip()
if Line == '' or Line.startswith(';'):
continue
if Line[0] == TAB_SECTION_START and Line[-1] == TAB_SECTION_END:
CurrentKey = ''
SectionName = Line[1:-1].strip()
if SectionName not in SectionMap:
IniParseError(ST.ERR_SECTION_NAME_INVALID % SectionName,
IniFile, Index+1)
if SectionName in ParsedSection:
IniParseError(ST.ERR_SECTION_REDEFINE % SectionName,
IniFile, Index+1)
else:
ParsedSection.append(SectionName)
Map = SectionMap[SectionName]
continue
if not Map:
IniParseError(ST.ERR_SECTION_NAME_NONE, IniFile, Index+1)
TokenList = Line.split(TAB_EQUAL_SPLIT, 1)
TempKey = TokenList[0].strip()
#
# Value spanned multiple or same keyword appears more than one time
#
if len(TokenList) < 2 or TempKey not in Map:
if CurrentKey == '':
IniParseError(ST.ERR_KEYWORD_INVALID % TempKey,
IniFile, Index+1)
elif CurrentKey == 'FileList':
#
# Special for FileList
#
Valid, Cause = ParseFileList(Line, Map, CurrentKey,
PathValidator[SectionName])
if not Valid:
IniParseError(Cause, IniFile, Index+1)
else:
#
# Multiple lines for one key such as license
# Or if string on the left side of '=' is not a keyword
#
Map[CurrentKey] = ''.join([Map[CurrentKey], '\n', Line])
Valid, Cause = ValidateValues(CurrentKey,
Map[CurrentKey], SectionName)
if not Valid:
IniParseError(Cause, IniFile, Index+1)
continue
if (TokenList[1].strip() == ''):
IniParseError(ST.ERR_EMPTY_VALUE, IniFile, Index+1)
#
# A keyword found
#
CurrentKey = TempKey
if Map[CurrentKey]:
IniParseError(ST.ERR_KEYWORD_REDEFINE % CurrentKey,
IniFile, Index+1)
if id(Map) != id(PreMap) and Map['Copyright']:
PreMap = Map
Copyright = Map['Copyright'].lower()
Pos = Copyright.find('copyright')
if Pos == -1:
IniParseError(ST.ERR_COPYRIGHT_CONTENT, IniFile, Index)
if not Copyright[Pos + len('copyright'):].lstrip(' ').startswith('('):
IniParseError(ST.ERR_COPYRIGHT_CONTENT, IniFile, Index)
if CurrentKey == 'FileList':
Valid, Cause = ParseFileList(TokenList[1], Map, CurrentKey,
PathValidator[SectionName])
if not Valid:
IniParseError(Cause, IniFile, Index+1)
else:
Map[CurrentKey] = TokenList[1].strip()
Valid, Cause = ValidateValues(CurrentKey,
Map[CurrentKey], SectionName)
if not Valid:
IniParseError(Cause, IniFile, Index+1)
if id(Map) != id(PreMap) and Map['Copyright'] and 'copyright' not in Map['Copyright'].lower():
IniParseError(ST.ERR_COPYRIGHT_CONTENT, IniFile, LastIndex)
#
# Check mandatory keys
#
CheckMdtKeys(DistMap, IniFile, LastIndex,
(('ToolsHeader', ToolsMap), ('MiscellaneousFilesHeader', MiscMap))
)
return CreateXml(DistMap, ToolsMap, MiscMap, IniFile)
## CheckMdtKeys
#
# @param MdtDistKeys: All mandatory keys
# @param DistMap: Dist content
# @param IniFile: Ini file
# @param LastIndex: Last index of Ini file
# @param Maps: Tools and Misc section name and map. (('section_name', map),*)
#
def CheckMdtKeys(DistMap, IniFile, LastIndex, Maps):
MdtDistKeys = ['Name', 'GUID', 'Version', 'Vendor', 'Copyright', 'License', 'Abstract', 'XmlSpecification']
for Key in MdtDistKeys:
if Key not in DistMap or DistMap[Key] == '':
IniParseError(ST.ERR_KEYWORD_MANDATORY % Key, IniFile, LastIndex+1)
if '.' not in DistMap['Version']:
DistMap['Version'] = DistMap['Version'] + '.0'
DistMap['Date'] = str(strftime("%Y-%m-%dT%H:%M:%S", localtime()))
#
# Check Tools Surface Area according to UPT Spec
# <Tools> {0,}
# <Header> ... </Header> {0,1}
# <Filename> ... </Filename> {1,}
# </Tools>
# <Header>
# <Name> xs:normalizedString </Name> {1}
# <Copyright> xs:string </Copyright> {0,1}
# <License> xs:string </License> {0,1}
# <Abstract> xs:normalizedString </Abstract> {0,1}
# <Description> xs:string </Description> {0,1}
# </Header>
#
for Item in Maps:
Map = Item[1]
NonEmptyKey = 0
for Key in Map:
if Map[Key]:
NonEmptyKey += 1
if NonEmptyKey > 0 and not Map['FileList']:
IniParseError(ST.ERR_KEYWORD_MANDATORY % (Item[0] + '.FileList'), IniFile, LastIndex+1)
if NonEmptyKey > 0 and not Map['Name']:
IniParseError(ST.ERR_KEYWORD_MANDATORY % (Item[0] + '.Name'), IniFile, LastIndex+1)
## CreateXml
#
# @param DistMap: Dist Content
# @param ToolsMap: Tools Content
# @param MiscMap: Misc Content
# @param IniFile: Ini File
#
def CreateXml(DistMap, ToolsMap, MiscMap, IniFile):
Attrs = [['xmlns', 'http://www.uefi.org/2011/1.1'],
['xmlns:xsi', 'http:/www.w3.org/2001/XMLSchema-instance'],
]
Root = CreateXmlElement('DistributionPackage', '', [], Attrs)
CreateHeaderXml(DistMap, Root)
CreateToolsXml(ToolsMap, Root, 'Tools')
CreateToolsXml(MiscMap, Root, 'MiscellaneousFiles')
FileAndExt = IniFile.rsplit('.', 1)
if len(FileAndExt) > 1:
FileName = FileAndExt[0] + '.xml'
else:
FileName = IniFile + '.xml'
File = open(FileName, 'w')
try:
File.write(Root.toprettyxml(indent = ' '))
finally:
File.close()
return FileName
| edk2-master | BaseTools/Source/Python/UPT/Xml/IniToXml.py |
## @file
# This file is used to parse a Package file of .PKG file
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
'''
PackageSurfaceAreaXml
'''
from xml.dom import minidom
from Library.StringUtils import GetStringOfList
from Library.Xml.XmlRoutines import XmlElement
from Library.Xml.XmlRoutines import XmlNode
from Library.Xml.XmlRoutines import XmlList
from Library.Xml.XmlRoutines import CreateXmlElement
from Object.POM.CommonObject import IncludeObject
from Object.POM.CommonObject import TextObject
from Object.POM.PackageObject import PackageObject
from Xml.CommonXml import ClonedFromXml
from Xml.CommonXml import PackageHeaderXml
from Xml.CommonXml import HelpTextXml
from Xml.CommonXml import CommonDefinesXml
from Xml.CommonXml import LibraryClassXml
from Xml.CommonXml import UserExtensionsXml
from Xml.CommonXml import MiscellaneousFileXml
from Xml.GuidProtocolPpiXml import GuidXml
from Xml.GuidProtocolPpiXml import ProtocolXml
from Xml.GuidProtocolPpiXml import PpiXml
from Xml.ModuleSurfaceAreaXml import ModuleSurfaceAreaXml
from Xml.PcdXml import PcdEntryXml
##
# IndustryStandardHeaderXml
#
class IndustryStandardHeaderXml(object):
def __init__(self):
self.HeaderFile = ''
self.HelpText = []
def FromXml(self, Item, Key):
self.HeaderFile = XmlElement(Item, '%s/HeaderFile' % Key)
for HelpTextItem in XmlList(Item, '%s/HelpText' % Key):
HelpTextObj = HelpTextXml()
HelpTextObj.FromXml(HelpTextItem, '%s/HelpText' % Key)
self.HelpText.append(HelpTextObj)
Include = IncludeObject()
Include.SetFilePath(self.HeaderFile)
HelpTxt = TextObject()
HelpTxt.SetString(self.HelpText)
Include.SetHelpText(HelpTxt)
return Include
def ToXml(self, IndustryStandardHeader, Key):
if self.HeaderFile:
pass
AttributeList = []
NodeList = [['HeaderFile', IndustryStandardHeader.GetFilePath()]]
Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
return Root
def __str__(self):
Str = "HeaderFile = %s" % (self.HeaderFile)
for Item in self.HelpText:
Str = Str + "\n\t" + str(Item)
return Str
##
# PackageIncludeHeaderXml
#
class PackageIncludeHeaderXml(object):
def __init__(self):
self.HeaderFile = ''
self.CommonDefines = CommonDefinesXml()
self.HelpText = []
def FromXml(self, Item, Key):
self.HeaderFile = XmlElement(Item, '%s/HeaderFile' % Key)
self.CommonDefines.FromXml(XmlNode(Item, '%s/HeaderFile' % Key), 'HeaderFile')
for HelpTextItem in XmlList(Item, '%s/HelpText' % Key):
HelpTextObj = HelpTextXml()
HelpTextObj.FromXml(HelpTextItem, '%s/HelpText' % Key)
self.HelpText.append(HelpTextObj)
Include = IncludeObject()
Include.SetFilePath(self.HeaderFile)
Include.SetSupArchList(self.CommonDefines.SupArchList)
HelpTxt = TextObject()
HelpTxt.SetString(self.HelpText)
Include.SetHelpText(HelpTxt)
return Include
def ToXml(self, PackageIncludeHeader, Key):
if self.HeaderFile:
pass
AttributeList = [['SupArchList', GetStringOfList(PackageIncludeHeader.GetSupArchList())], \
['SupModList', GetStringOfList(PackageIncludeHeader.GetSupModuleList())], ]
HeaderFileNode = CreateXmlElement('HeaderFile', PackageIncludeHeader.FilePath, [], AttributeList)
NodeList = [HeaderFileNode]
for Item in PackageIncludeHeader.GetHelpTextList():
Tmp = HelpTextXml()
NodeList.append(Tmp.ToXml(Item))
Root = CreateXmlElement('%s' % Key, '', NodeList, [])
return Root
def __str__(self):
Str = "HeaderFile = %s\n\t%s" % (self.HeaderFile, self.CommonDefines)
for Item in self.HelpText:
Str = Str + "\n\t" + str(Item)
return Str
##
# PcdCheckXml
#
class PcdCheckXml(object):
def __init__(self):
self.PcdCheck = ''
def FromXml(self, Item, Key):
if Key:
pass
self.PcdCheck = XmlElement(Item, 'PcdCheck')
return self.PcdCheck
def ToXml(self, PcdCheck, Key):
if self.PcdCheck:
pass
Root = CreateXmlElement('%s' % Key, PcdCheck, [], [])
return Root
def __str__(self):
return "PcdCheck = %s" % (self.PcdCheck)
##
# PackageSurfaceAreaXml
#
class PackageSurfaceAreaXml(object):
def __init__(self):
self.Package = None
def FromXml(self, Item, Key):
if Key:
pass
#
# Create a package object
#
Package = PackageObject()
#
# Header
#
Tmp = PackageHeaderXml()
Tmp.FromXml(XmlNode(Item, '/PackageSurfaceArea/Header'), 'Header', Package)
#
# ClonedFrom
#
Tmp = ClonedFromXml()
if XmlNode(Item, '/PackageSurfaceArea/ClonedFrom'):
ClonedFrom = Tmp.FromXml(XmlNode(Item, '/PackageSurfaceArea/ClonedFrom'), 'ClonedFrom')
Package.SetClonedFromList([ClonedFrom])
#
# LibraryClass
#
for SubItem in XmlList(Item, '/PackageSurfaceArea/LibraryClassDeclarations/LibraryClass'):
Tmp = LibraryClassXml()
LibraryClass = Tmp.FromXml(SubItem, 'LibraryClass')
Package.SetLibraryClassList(Package.GetLibraryClassList() + [LibraryClass])
if XmlList(Item, '/PackageSurfaceArea/LibraryClassDeclarations') and \
not XmlList(Item, '/PackageSurfaceArea/LibraryClassDeclarations/LibraryClass'):
Package.SetLibraryClassList([None])
#
# IndustryStandardHeader
#
for SubItem in XmlList(Item, '/PackageSurfaceArea/IndustryStandardIncludes/IndustryStandardHeader'):
Tmp = IndustryStandardHeaderXml()
Include = Tmp.FromXml(SubItem, 'IndustryStandardHeader')
Package.SetStandardIncludeFileList(Package.GetStandardIncludeFileList() + [Include])
if XmlList(Item, '/PackageSurfaceArea/IndustryStandardIncludes') and \
not XmlList(Item, '/PackageSurfaceArea/IndustryStandardIncludes/IndustryStandardHeader'):
Package.SetStandardIncludeFileList([None])
#
# PackageHeader
#
for SubItem in XmlList(Item, '/PackageSurfaceArea/PackageIncludes/PackageHeader'):
Tmp = PackageIncludeHeaderXml()
Include = Tmp.FromXml(SubItem, 'PackageHeader')
Package.SetPackageIncludeFileList(Package.GetPackageIncludeFileList() + [Include])
if XmlList(Item, '/PackageSurfaceArea/PackageIncludes') and not \
XmlList(Item, '/PackageSurfaceArea/PackageIncludes/PackageHeader'):
Package.SetPackageIncludeFileList([None])
#
# Guid
#
for SubItem in XmlList(Item, '/PackageSurfaceArea/GuidDeclarations/Entry'):
Tmp = GuidXml('Package')
GuidProtocolPpi = Tmp.FromXml(SubItem, 'Entry')
Package.SetGuidList(Package.GetGuidList() + [GuidProtocolPpi])
if XmlList(Item, '/PackageSurfaceArea/GuidDeclarations') and not \
XmlList(Item, '/PackageSurfaceArea/GuidDeclarations/Entry'):
Package.SetGuidList([None])
#
# Protocol
#
for SubItem in XmlList(Item, '/PackageSurfaceArea/ProtocolDeclarations/Entry'):
Tmp = ProtocolXml('Package')
GuidProtocolPpi = Tmp.FromXml(SubItem, 'Entry')
Package.SetProtocolList(Package.GetProtocolList() + [GuidProtocolPpi])
if XmlList(Item, '/PackageSurfaceArea/ProtocolDeclarations') and not \
XmlList(Item, '/PackageSurfaceArea/ProtocolDeclarations/Entry'):
Package.SetProtocolList([None])
#
# Ppi
#
for SubItem in XmlList(Item, '/PackageSurfaceArea/PpiDeclarations/Entry'):
Tmp = PpiXml('Package')
GuidProtocolPpi = Tmp.FromXml(SubItem, 'Entry')
Package.SetPpiList(Package.GetPpiList() + [GuidProtocolPpi])
if XmlList(Item, '/PackageSurfaceArea/PpiDeclarations') and not \
XmlList(Item, '/PackageSurfaceArea/PpiDeclarations/Entry'):
Package.SetPpiList([None])
#
# PcdEntry
#
for SubItem in XmlList(Item, '/PackageSurfaceArea/PcdDeclarations/PcdEntry'):
Tmp = PcdEntryXml()
PcdEntry = Tmp.FromXml2(SubItem, 'PcdEntry')
Package.SetPcdList(Package.GetPcdList() + [PcdEntry])
#
# Get PcdErrorCommentDict from PcdError in PcdEntry Node
#
for PcdErrorObj in PcdEntry.GetPcdErrorsList():
PcdErrorMessageList = PcdErrorObj.GetErrorMessageList()
if PcdErrorMessageList:
Package.PcdErrorCommentDict[(PcdEntry.GetTokenSpaceGuidCName(), PcdErrorObj.GetErrorNumber())] = \
PcdErrorMessageList
if XmlList(Item, '/PackageSurfaceArea/PcdDeclarations') and not \
XmlList(Item, '/PackageSurfaceArea/PcdDeclarations/PcdEntry'):
Package.SetPcdList([None])
#
# PcdCheck
#
for SubItem in XmlList(Item, '/PackageSurfaceArea/PcdRelationshipChecks/PcdCheck'):
Tmp = PcdCheckXml()
PcdCheck = Tmp.FromXml(SubItem, 'PcdCheck')
Package.PcdChecks.append(PcdCheck)
#
# Modules
#
for SubItem in XmlList(Item, '/PackageSurfaceArea/Modules/ModuleSurfaceArea'):
Tmp = ModuleSurfaceAreaXml()
Module = Tmp.FromXml(SubItem, 'ModuleSurfaceArea')
ModuleDictKey = (Module.GetGuid(), Module.GetVersion(), Module.GetName(), Module.GetModulePath())
Package.ModuleDict[ModuleDictKey] = Module
#
# MiscellaneousFile
#
Tmp = MiscellaneousFileXml()
MiscFileList = Tmp.FromXml(XmlNode(Item, '/PackageSurfaceArea/MiscellaneousFiles'), 'MiscellaneousFiles')
if MiscFileList:
Package.SetMiscFileList([MiscFileList])
else:
Package.SetMiscFileList([])
#
# UserExtensions
#
for Item in XmlList(Item, '/PackageSurfaceArea/UserExtensions'):
Tmp = UserExtensionsXml()
UserExtension = Tmp.FromXml(Item, 'UserExtensions')
Package.UserExtensionList.append(UserExtension)
self.Package = Package
return self.Package
def ToXml(self, Package):
if self.Package:
pass
#
# Create PackageSurfaceArea node
#
DomPackage = minidom.Document().createElement('PackageSurfaceArea')
#
# Header
#
Tmp = PackageHeaderXml()
DomPackage.appendChild(Tmp.ToXml(Package, 'Header'))
#
# ClonedFrom
#
Tmp = ClonedFromXml()
if Package.GetClonedFromList() != []:
DomPackage.appendChild(Tmp.ToXml(Package.GetClonedFromList[0], 'ClonedFrom'))
#
# LibraryClass
#
LibraryClassNode = CreateXmlElement('LibraryClassDeclarations', '', [], [])
for LibraryClass in Package.GetLibraryClassList():
Tmp = LibraryClassXml()
LibraryClassNode.appendChild(Tmp.ToXml(LibraryClass, 'LibraryClass'))
DomPackage.appendChild(LibraryClassNode)
#
# IndustryStandardHeader
#
IndustryStandardHeaderNode = CreateXmlElement('IndustryStandardIncludes', '', [], [])
for Include in Package.GetStandardIncludeFileList():
Tmp = IndustryStandardHeaderXml()
IndustryStandardHeaderNode.appendChild(Tmp.ToXml(Include, 'IndustryStandardHeader'))
DomPackage.appendChild(IndustryStandardHeaderNode)
#
# PackageHeader
#
PackageIncludeHeaderNode = CreateXmlElement('PackageIncludes', '', [], [])
for Include in Package.GetPackageIncludeFileList():
Tmp = PackageIncludeHeaderXml()
PackageIncludeHeaderNode.appendChild(Tmp.ToXml(Include, 'PackageHeader'))
DomPackage.appendChild(PackageIncludeHeaderNode)
ModuleNode = CreateXmlElement('Modules', '', [], [])
for Module in Package.GetModuleDict().values():
Tmp = ModuleSurfaceAreaXml()
ModuleNode.appendChild(Tmp.ToXml(Module))
DomPackage.appendChild(ModuleNode)
#
# Guid
#
GuidProtocolPpiNode = CreateXmlElement('GuidDeclarations', '', [], [])
for GuidProtocolPpi in Package.GetGuidList():
Tmp = GuidXml('Package')
GuidProtocolPpiNode.appendChild(Tmp.ToXml\
(GuidProtocolPpi, 'Entry'))
DomPackage.appendChild(GuidProtocolPpiNode)
#
# Protocol
#
GuidProtocolPpiNode = \
CreateXmlElement('ProtocolDeclarations', '', [], [])
for GuidProtocolPpi in Package.GetProtocolList():
Tmp = ProtocolXml('Package')
GuidProtocolPpiNode.appendChild\
(Tmp.ToXml(GuidProtocolPpi, 'Entry'))
DomPackage.appendChild(GuidProtocolPpiNode)
#
# Ppi
#
GuidProtocolPpiNode = CreateXmlElement('PpiDeclarations', '', [], [])
for GuidProtocolPpi in Package.GetPpiList():
Tmp = PpiXml('Package')
GuidProtocolPpiNode.appendChild\
(Tmp.ToXml(GuidProtocolPpi, 'Entry'))
DomPackage.appendChild(GuidProtocolPpiNode)
#
# PcdEntry
#
PcdEntryNode = CreateXmlElement('PcdDeclarations', '', [], [])
for PcdEntry in Package.GetPcdList():
Tmp = PcdEntryXml()
PcdEntryNode.appendChild(Tmp.ToXml2(PcdEntry, 'PcdEntry'))
DomPackage.appendChild(PcdEntryNode)
#
# MiscellaneousFile
#
Tmp = MiscellaneousFileXml()
if Package.GetMiscFileList():
DomPackage.appendChild(Tmp.ToXml(Package.GetMiscFileList()[0], 'MiscellaneousFiles'))
#
# UserExtensions
#
if Package.GetUserExtensionList():
for UserExtension in Package.GetUserExtensionList():
Tmp = UserExtensionsXml()
DomPackage.appendChild(Tmp.ToXml(UserExtension, 'UserExtensions'))
return DomPackage
| edk2-master | BaseTools/Source/Python/UPT/Xml/PackageSurfaceAreaXml.py |
## @file
# Python 'Object' package initialization file.
#
# This file is required to make Python interpreter treat the directory
# as containing package.
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
'''
Object
'''
| edk2-master | BaseTools/Source/Python/UPT/Object/__init__.py |
## @file
# This file is used to define common items of class object
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
'''
Common Object
'''
from Library.DataType import TAB_LANGUAGE_EN_US
## HelpTextObject
#
# @param object: Inherited from object class
#
class HelpTextObject(object):
def __init__(self):
self.HelpText = TextObject()
def SetHelpText(self, HelpText):
self.HelpText = HelpText
def GetHelpText(self):
return self.HelpText
## HelpTextListObject
#
# @param object: Inherited from object class
#
class HelpTextListObject(object):
def __init__(self):
self.HelpTextList = []
def SetHelpTextList(self, HelpTextList):
self.HelpTextList = HelpTextList
def GetHelpTextList(self):
return self.HelpTextList
## PromptListObject
#
# @param object: Inherited from object class
#
class PromptListObject(object):
def __init__(self):
self.PromptList = []
def SetPromptList(self, PromptList):
self.PromptList = PromptList
def GetPromptList(self):
return self.PromptList
## CommonPropertiesObject
#
# This class defined common attribution used in Module/Platform/Package files
#
# @param object: Inherited from object class
# @param Usage: Input value for Usage, default is []
# @param FeatureFlag: Input value for FeatureFalg, default is ''
# @param SupArchList: Input value for SupArchList, default is []
# @param HelpText: Input value for HelpText, default is ''
# @param HelpTextList: Input value for HelpTextList, default is []
#
class CommonPropertiesObject(HelpTextObject, HelpTextListObject):
def __init__(self):
self.Usage = []
self.FeatureFlag = ''
self.SupArchList = []
self.GuidValue = ''
HelpTextObject.__init__(self)
HelpTextListObject.__init__(self)
def SetUsage(self, Usage):
self.Usage = Usage
def GetUsage(self):
return self.Usage
def SetFeatureFlag(self, FeatureFlag):
self.FeatureFlag = FeatureFlag
def GetFeatureFlag(self):
return self.FeatureFlag
def SetSupArchList(self, SupArchList):
self.SupArchList = SupArchList
def GetSupArchList(self):
return self.SupArchList
def SetGuidValue(self, GuidValue):
self.GuidValue = GuidValue
def GetGuidValue(self):
return self.GuidValue
## CommonHeaderObject
#
# This class defined common header items used in Module/Platform/Package files
#
# @param object: Inherited from object class
#
class CommonHeaderObject(object):
def __init__(self):
self.AbstractList = []
self.DescriptionList = []
self.CopyrightList = []
self.LicenseList = []
def SetAbstract(self, Abstract):
if isinstance(Abstract, list):
self.AbstractList = Abstract
else:
self.AbstractList.append(Abstract)
def GetAbstract(self):
return self.AbstractList
def SetDescription(self, Description):
if isinstance(Description, list):
self.DescriptionList = Description
else:
self.DescriptionList.append(Description)
def GetDescription(self):
return self.DescriptionList
def SetCopyright(self, Copyright):
if isinstance(Copyright, list):
self.CopyrightList = Copyright
else:
self.CopyrightList.append(Copyright)
def GetCopyright(self):
return self.CopyrightList
def SetLicense(self, License):
if isinstance(License, list):
self.LicenseList = License
else:
self.LicenseList.append(License)
def GetLicense(self):
return self.LicenseList
## BinaryHeaderObject
#
# This class defined Binary header items used in Module/Platform/Package files
#
# @param object: Inherited from object class
#
class BinaryHeaderObject(object):
def __init__(self):
self.BinaryHeaderAbstractList = []
self.BinaryHeaderDescriptionList = []
self.BinaryHeaderCopyrightList = []
self.BinaryHeaderLicenseList = []
def SetBinaryHeaderAbstract(self, Abstract):
if isinstance(Abstract, list) and Abstract:
self.BinaryHeaderAbstractList = Abstract
elif isinstance(Abstract, tuple) and Abstract[1]:
self.BinaryHeaderAbstractList.append(Abstract)
def GetBinaryHeaderAbstract(self):
return self.BinaryHeaderAbstractList
def SetBinaryHeaderDescription(self, Description):
if isinstance(Description, list) and Description:
self.BinaryHeaderDescriptionList = Description
elif isinstance(Description, tuple) and Description[1]:
self.BinaryHeaderDescriptionList.append(Description)
def GetBinaryHeaderDescription(self):
return self.BinaryHeaderDescriptionList
def SetBinaryHeaderCopyright(self, Copyright):
if isinstance(Copyright, list) and Copyright:
self.BinaryHeaderCopyrightList = Copyright
elif isinstance(Copyright, tuple) and Copyright[1]:
self.BinaryHeaderCopyrightList.append(Copyright)
def GetBinaryHeaderCopyright(self):
return self.BinaryHeaderCopyrightList
def SetBinaryHeaderLicense(self, License):
if isinstance(License, list) and License:
self.BinaryHeaderLicenseList = License
elif isinstance(License, tuple) and License[1]:
self.BinaryHeaderLicenseList.append(License)
def GetBinaryHeaderLicense(self):
return self.BinaryHeaderLicenseList
## ClonedRecordObject
#
# This class defined ClonedRecord items used in Module/Platform/Package files
#
# @param object: Inherited from object class
#
class ClonedRecordObject(object):
def __init__(self):
self.IdNum = 0
self.FarGuid = ''
self.PackageGuid = ''
self.PackageVersion = ''
self.ModuleGuid = ''
self.ModuleVersion = ''
def SetId(self, IdNo):
self.IdNum = IdNo
def GetId(self):
return self.IdNum
def SetFarGuid(self, FarGuid):
self.FarGuid = FarGuid
def GetFarGuid(self):
return self.FarGuid
def SetPackageGuid(self, PackageGuid):
self.PackageGuid = PackageGuid
def GetPackageGuid(self):
return self.PackageGuid
def SetPackageVersion(self, PackageVersion):
self.PackageVersion = PackageVersion
def GetPackageVersion(self):
return self.PackageVersion
def SetModuleGuid(self, ModuleGuid):
self.ModuleGuid = ModuleGuid
def GetModuleGuid(self):
return self.ModuleGuid
def SetModuleVersion(self, ModuleVersion):
self.ModuleVersion = ModuleVersion
def GetModuleVersion(self):
return self.ModuleVersion
## TextObject
#
# This class defined Text item used in PKG file
#
# @param object: Inherited from object class
#
class TextObject(object):
def __init__(self):
self.Lang = TAB_LANGUAGE_EN_US
self.String = ''
def SetLang(self, Lang):
self.Lang = Lang
def GetLang(self):
return self.Lang
def SetString(self, String):
self.String = String
def GetString(self):
return self.String
## FileNameObject
#
# This class defined File item used in module, for binary files
#
# @param CommonPropertiesObject: Inherited from CommonPropertiesObject class
#
class FileNameObject(CommonPropertiesObject):
def __init__(self):
self.FileType = ''
self.Filename = ''
CommonPropertiesObject.__init__(self)
def SetFileType(self, FileType):
self.FileType = FileType
def GetFileType(self):
return self.FileType
def SetFilename(self, Filename):
self.Filename = Filename
def GetFilename(self):
return self.Filename
## FileObject
#
# This class defined File item used in PKG file
#
# @param object: Inherited from object class
#
class FileObject(object):
def __init__(self):
self.Executable = ''
self.Uri = ''
self.OsType = ''
def SetExecutable(self, Executable):
self.Executable = Executable
def GetExecutable(self):
return self.Executable
def SetURI(self, URI):
self.Uri = URI
def GetURI(self):
return self.Uri
def SetOS(self, OsType):
self.OsType = OsType
def GetOS(self):
return self.OsType
##
# MiscFileObject is used for xml
#
# @param CommonHeaderObject: Inherited from CommonHeaderObject class
#
class MiscFileObject(CommonHeaderObject):
def __init__(self):
self.Name = ''
self.FileList = []
CommonHeaderObject.__init__(self)
def SetName(self, Name):
self.Name = Name
def GetName(self):
return self.Name
def SetFileList(self, FileList):
self.FileList = FileList
def GetFileList(self):
return self.FileList
##
# ToolsObject
#
class ToolsObject(MiscFileObject):
pass
## GuidVersionObject
#
# This class defined GUID/Version items used in PKG file
#
# @param object: Inherited from object class
#
class GuidVersionObject(object):
def __init__(self):
self.Guid = ''
self.Version = ''
def SetGuid(self, Guid):
self.Guid = Guid
def GetGuid(self):
return self.Guid
def SetVersion(self, Version):
self.Version = Version
def GetVersion(self):
return self.Version
## IdentificationObject
#
# This class defined Identification items used in Module/Platform/Package files
#
# @param object: Inherited from object class
#
class IdentificationObject(GuidVersionObject):
def __init__(self):
self.Name = ''
self.BaseName = ''
self.FileName = ''
self.FullPath = ''
self.RelaPath = ''
self.PackagePath = ''
self.ModulePath = ''
self.CombinePath = ''
GuidVersionObject.__init__(self)
def SetName(self, Name):
self.Name = Name
def GetName(self):
return self.Name
def SetBaseName(self, BaseName):
self.BaseName = BaseName
def GetBaseName(self):
return self.BaseName
def SetFileName(self, FileName):
self.FileName = FileName
def GetFileName(self):
return self.FileName
def SetFullPath(self, FullPath):
self.FullPath = FullPath
def GetFullPath(self):
return self.FullPath
def SetRelaPath(self, RelaPath):
self.RelaPath = RelaPath
def GetRelaPath(self):
return self.RelaPath
def SetPackagePath(self, PackagePath):
self.PackagePath = PackagePath
def GetPackagePath(self):
return self.PackagePath
def SetModulePath(self, ModulePath):
self.ModulePath = ModulePath
def GetModulePath(self):
return self.ModulePath
def SetCombinePath(self, CombinePath):
self.CombinePath = CombinePath
def GetCombinePath(self):
return self.CombinePath
## GuidProtocolPpiCommonObject
#
# This class defined Guid, Protocol and Ppi like items used in
# Module/Platform/Package files
#
# @param CommonPropertiesObject: Inherited from CommonPropertiesObject class
#
class GuidProtocolPpiCommonObject(CommonPropertiesObject):
def __init__(self):
self.Name = ''
self.CName = ''
self.Guid = ''
self.SupModuleList = []
CommonPropertiesObject.__init__(self)
def SetName(self, Name):
self.Name = Name
def GetName(self):
return self.Name
def SetCName(self, CName):
self.CName = CName
def GetCName(self):
return self.CName
def SetGuid(self, Guid):
self.Guid = Guid
def GetGuid(self):
return self.Guid
def SetSupModuleList(self, SupModuleList):
self.SupModuleList = SupModuleList
def GetSupModuleList(self):
return self.SupModuleList
## GuidObject
#
# This class defined Guid item used in Module/Platform/Package files
#
# @param GuidProtocolPpiCommonObject: GuidProtocolPpiCommonObject
#
class GuidObject(GuidProtocolPpiCommonObject):
def __init__(self):
self.VariableName = ''
self.GuidTypeList = []
GuidProtocolPpiCommonObject.__init__(self)
def SetVariableName(self, VariableName):
self.VariableName = VariableName
def GetVariableName(self):
return self.VariableName
def SetGuidTypeList(self, GuidTypeList):
self.GuidTypeList = GuidTypeList
def GetGuidTypeList(self):
return self.GuidTypeList
## ProtocolObject
#
# This class defined Protocol item used in Module/Platform/Package files
#
# @param GuidProtocolPpiCommonObject: Inherited from
# GuidProtocolPpiCommonObject
#
class ProtocolObject(GuidProtocolPpiCommonObject):
def __init__(self):
self.Notify = False
GuidProtocolPpiCommonObject.__init__(self)
def SetNotify(self, Notify):
self.Notify = Notify
def GetNotify(self):
return self.Notify
## PpiObject
#
# This class defined Ppi item used in Module/Platform/Package files
#
# @param GuidProtocolPpiCommonObject: Inherited from
# GuidProtocolPpiCommonObject
#
class PpiObject(GuidProtocolPpiCommonObject):
def __init__(self):
self.Notify = False
GuidProtocolPpiCommonObject.__init__(self)
def SetNotify(self, Notify):
self.Notify = Notify
def GetNotify(self):
return self.Notify
## DefineObject
#
# This class defined item DEFINE used in Module/Platform/Package files
#
# @param object: Inherited from object class
#
class DefineClass(object):
def __init__(self):
self.Define = {}
## UserExtensionObject
#
# @param object: Inherited from object class
#
class UserExtensionObject(object):
def __init__(self):
self.UserID = ''
self.Identifier = ''
self.BinaryAbstractList = []
self.BinaryDescriptionList = []
self.BinaryCopyrightList = []
self.BinaryLicenseList = []
self.UniLangDefsList = []
#
# { Statement : Arch , ... }
#
self.DefinesDict = {}
#
# { Arch : Statement , ... }
#
self.BuildOptionDict = {}
self.IncludesDict = {}
self.SourcesDict = {}
self.BinariesDict = {}
#
# UserExtension statement from meta-data file [UserExtension] section
#
self.Statement = ''
self.SupArchList = []
def SetStatement(self, Statement):
self.Statement = Statement
def GetStatement(self):
return self.Statement
def SetSupArchList(self, ArchList):
self.SupArchList = ArchList
def GetSupArchList(self):
return self.SupArchList
def SetUserID(self, UserID):
self.UserID = UserID
def GetUserID(self):
return self.UserID
def SetIdentifier(self, Identifier):
self.Identifier = Identifier
def GetIdentifier(self):
return self.Identifier
def SetUniLangDefsList(self, UniLangDefsList):
self.UniLangDefsList = UniLangDefsList
def GetUniLangDefsList(self):
return self.UniLangDefsList
def SetBinaryAbstract(self, BinaryAbstractList):
self.BinaryAbstractList = BinaryAbstractList
def GetBinaryAbstract(self, Lang=None):
if Lang:
for (Key, Value) in self.BinaryAbstractList:
if Key == Lang:
return Value
return None
else:
return self.BinaryAbstractList
def SetBinaryDescription(self, BinaryDescriptionList):
self.BinaryDescriptionList = BinaryDescriptionList
def GetBinaryDescription(self, Lang=None):
if Lang:
for (Key, Value) in self.BinaryDescriptionList:
if Key == Lang:
return Value
return None
else:
return self.BinaryDescriptionList
def SetBinaryCopyright(self, BinaryCopyrightList):
self.BinaryCopyrightList = BinaryCopyrightList
def GetBinaryCopyright(self, Lang=None):
if Lang:
for (Key, Value) in self.BinaryCopyrightList:
if Key == Lang:
return Value
return None
else:
return self.BinaryCopyrightList
def SetBinaryLicense(self, BinaryLicenseList):
self.BinaryLicenseList = BinaryLicenseList
def GetBinaryLicense(self, Lang=None):
if Lang:
for (Key, Value) in self.BinaryLicenseList:
if Key == Lang:
return Value
return None
else:
return self.BinaryLicenseList
def SetDefinesDict(self, DefinesDict):
self.DefinesDict = DefinesDict
def GetDefinesDict(self):
return self.DefinesDict
def SetBuildOptionDict(self, BuildOptionDict):
self.BuildOptionDict = BuildOptionDict
def GetBuildOptionDict(self):
return self.BuildOptionDict
def SetIncludesDict(self, IncludesDict):
self.IncludesDict = IncludesDict
def GetIncludesDict(self):
return self.IncludesDict
def SetSourcesDict(self, SourcesDict):
self.SourcesDict = SourcesDict
def GetSourcesDict(self):
return self.SourcesDict
def SetBinariesDict(self, BinariesDict):
self.BinariesDict = BinariesDict
def GetBinariesDict(self):
return self.BinariesDict
## LibraryClassObject
#
# This class defined Library item used in Module/Platform/Package files
#
# @param CommonPropertiesObject: Inherited from CommonPropertiesObject class
#
class LibraryClassObject(CommonPropertiesObject):
def __init__(self):
self.LibraryClass = ''
self.IncludeHeader = ''
self.SupModuleList = []
self.RecommendedInstance = GuidVersionObject()
CommonPropertiesObject.__init__(self)
def SetLibraryClass(self, LibraryClass):
self.LibraryClass = LibraryClass
def GetLibraryClass(self):
return self.LibraryClass
def SetSupModuleList(self, SupModuleList):
self.SupModuleList = SupModuleList
def GetSupModuleList(self):
return self.SupModuleList
def SetIncludeHeader(self, IncludeHeader):
self.IncludeHeader = IncludeHeader
def GetIncludeHeader(self):
return self.IncludeHeader
def SetRecommendedInstance(self, RecommendedInstance):
self.RecommendedInstance = RecommendedInstance
def GetRecommendedInstance(self):
return self.RecommendedInstance
## PcdErrorObject
#
# @param object: Inherited from object class
#
class PcdErrorObject(object):
def __init__(self):
self.ValidValue = ''
self.ValidValueLang = ''
self.ValidValueRange = ''
self.Expression = ''
self.ErrorNumber = ''
self.ErrorMessageList = []
self.TokenSpaceGuidCName = ''
self.CName = ''
self.FileLine = ''
self.LineNum = 0
def SetValidValue(self, ValidValue):
self.ValidValue = ValidValue
def GetValidValue(self):
return self.ValidValue
def SetValidValueLang(self, ValidValueLang):
self.ValidValueLang = ValidValueLang
def GetValidValueLang(self):
return self.ValidValueLang
def SetValidValueRange(self, ValidValueRange):
self.ValidValueRange = ValidValueRange
def GetValidValueRange(self):
return self.ValidValueRange
def SetExpression(self, Expression):
self.Expression = Expression
def GetExpression(self):
return self.Expression
def SetErrorNumber(self, ErrorNumber):
self.ErrorNumber = ErrorNumber
def GetErrorNumber(self):
return self.ErrorNumber
def SetErrorMessageList(self, ErrorMessageList):
self.ErrorMessageList = ErrorMessageList
def GetErrorMessageList(self):
return self.ErrorMessageList
def SetTokenSpaceGuidCName(self, TokenSpaceGuidCName):
self.TokenSpaceGuidCName = TokenSpaceGuidCName
def GetTokenSpaceGuidCName(self):
return self.TokenSpaceGuidCName
def SetCName(self, CName):
self.CName = CName
def GetCName(self):
return self.CName
def SetFileLine(self, FileLine):
self.FileLine = FileLine
def GetFileLine(self):
return self.FileLine
def SetLineNum(self, LineNum):
self.LineNum = LineNum
def GetLineNum(self):
return self.LineNum
## IncludeObject
#
# This class defined Include item used in Module/Platform/Package files
#
# @param CommonPropertiesObject: Inherited from CommonPropertiesObject class
#
class IncludeObject(CommonPropertiesObject):
def __init__(self):
self.FilePath = ''
self.ModuleType = ''
self.SupModuleList = []
self.Comment = ''
CommonPropertiesObject.__init__(self)
def SetFilePath(self, FilePath):
self.FilePath = FilePath
def GetFilePath(self):
return self.FilePath
def SetModuleType(self, ModuleType):
self.ModuleType = ModuleType
def GetModuleType(self):
return self.ModuleType
def SetSupModuleList(self, SupModuleList):
self.SupModuleList = SupModuleList
def GetSupModuleList(self):
return self.SupModuleList
def SetComment(self, Comment):
self.Comment = Comment
def GetComment(self):
return self.Comment
## PcdObject
#
# This class defined Pcd item used in Module/Platform/Package files
#
# @param CName: Input value for CName, default is ''
# @param Token: Input value for Token, default is ''
# @param TokenSpaceGuidCName: Input value for TokenSpaceGuidCName, default is
# ''
# @param DatumType: Input value for DatumType, default is ''
# @param MaxDatumSize: Input value for MaxDatumSize, default is ''
# @param DefaultValue: Input value for DefaultValue, default is ''
# @param ItemType: Input value for ItemType, default is ''
# @param ValidUsage: Input value for ValidUsage, default is []
# @param SkuInfoList: Input value for SkuInfoList, default is {}
# @param SupModuleList: Input value for SupModuleList, default is []
#
class PcdObject(CommonPropertiesObject, HelpTextListObject, PromptListObject):
def __init__(self):
self.PcdCName = ''
self.CName = ''
self.Token = ''
self.TokenSpaceGuidCName = ''
self.TokenSpaceGuidValue = ''
self.DatumType = ''
self.MaxDatumSize = ''
self.DefaultValue = ''
self.Offset = ''
self.ValidUsage = ''
self.ItemType = ''
self.PcdErrorsList = []
self.SupModuleList = []
CommonPropertiesObject.__init__(self)
HelpTextListObject.__init__(self)
PromptListObject.__init__(self)
def SetPcdCName(self, PcdCName):
self.PcdCName = PcdCName
def GetPcdCName(self):
return self.PcdCName
def SetCName(self, CName):
self.CName = CName
def GetCName(self):
return self.CName
def SetToken(self, Token):
self.Token = Token
def GetOffset(self):
return self.Offset
def SetOffset(self, Offset):
self.Offset = Offset
def GetToken(self):
return self.Token
def SetTokenSpaceGuidCName(self, TokenSpaceGuidCName):
self.TokenSpaceGuidCName = TokenSpaceGuidCName
def GetTokenSpaceGuidCName(self):
return self.TokenSpaceGuidCName
def SetTokenSpaceGuidValue(self, TokenSpaceGuidValue):
self.TokenSpaceGuidValue = TokenSpaceGuidValue
def GetTokenSpaceGuidValue(self):
return self.TokenSpaceGuidValue
def SetDatumType(self, DatumType):
self.DatumType = DatumType
def GetDatumType(self):
return self.DatumType
def SetMaxDatumSize(self, MaxDatumSize):
self.MaxDatumSize = MaxDatumSize
def GetMaxDatumSize(self):
return self.MaxDatumSize
def SetDefaultValue(self, DefaultValue):
self.DefaultValue = DefaultValue
def GetDefaultValue(self):
return self.DefaultValue
def SetValidUsage(self, ValidUsage):
self.ValidUsage = ValidUsage
def GetValidUsage(self):
return self.ValidUsage
def SetPcdErrorsList(self, PcdErrorsList):
self.PcdErrorsList = PcdErrorsList
def GetPcdErrorsList(self):
return self.PcdErrorsList
def SetItemType(self, ItemType):
self.ItemType = ItemType
def GetItemType(self):
return self.ItemType
def SetSupModuleList(self, SupModuleList):
self.SupModuleList = SupModuleList
def GetSupModuleList(self):
return self.SupModuleList
| edk2-master | BaseTools/Source/Python/UPT/Object/POM/CommonObject.py |
## @file
# Python 'Object' package initialization file.
#
# This file is required to make Python interpreter treat the directory
# as containing package.
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
'''
POM
'''
| edk2-master | BaseTools/Source/Python/UPT/Object/POM/__init__.py |
## @file
# This file is used to define a class object to describe a module
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
'''
ModuleObject
'''
##
# Import Modules
#
from Object.POM.CommonObject import CommonPropertiesObject
from Object.POM.CommonObject import IdentificationObject
from Object.POM.CommonObject import CommonHeaderObject
from Object.POM.CommonObject import BinaryHeaderObject
from Object.POM.CommonObject import HelpTextListObject
from Object.POM.CommonObject import GuidVersionObject
##
# BootModeObject
#
class BootModeObject(CommonPropertiesObject, HelpTextListObject):
def __init__(self):
self.SupportedBootModes = ''
CommonPropertiesObject.__init__(self)
HelpTextListObject.__init__(self)
def SetSupportedBootModes(self, SupportedBootModes):
self.SupportedBootModes = SupportedBootModes
def GetSupportedBootModes(self):
return self.SupportedBootModes
##
# EventObject
#
class EventObject(CommonPropertiesObject, HelpTextListObject):
def __init__(self):
self.EventType = ''
CommonPropertiesObject.__init__(self)
HelpTextListObject.__init__(self)
def SetEventType(self, EventType):
self.EventType = EventType
def GetEventType(self):
return self.EventType
##
# HobObject
#
class HobObject(CommonPropertiesObject, HelpTextListObject):
def __init__(self):
self.HobType = ''
CommonPropertiesObject.__init__(self)
HelpTextListObject.__init__(self)
def SetHobType(self, HobType):
self.HobType = HobType
def GetHobType(self):
return self.HobType
##
# SpecObject
#
class SpecObject(object):
def __init__(self):
self.Spec = ''
self.Version = ''
def SetSpec(self, Spec):
self.Spec = Spec
def GetSpec(self):
return self.Spec
def SetVersion(self, Version):
self.Version = Version
def GetVersion(self):
return self.Version
## ModuleHeaderObject
#
# This class defined header items used in Module file
#
class ModuleHeaderObject(IdentificationObject, CommonHeaderObject, BinaryHeaderObject):
def __init__(self):
self.IsLibrary = False
self.IsLibraryModList = []
self.ModuleType = ''
self.BinaryModule = False
self.PcdIsDriver = ''
self.PiSpecificationVersion = ''
self.UefiSpecificationVersion = ''
self.UNIFlag = False
self.ModuleUniFile = ''
#
# SpecObject
#
self.SpecList = []
#
# BootModeObject
#
self.BootModeList = []
#
# EventObject
#
self.EventList = []
#
# HobObject
#
self.HobList = []
#
# LibraryClassObject
#
self.LibraryClassList = []
self.SupArchList = []
IdentificationObject.__init__(self)
CommonHeaderObject.__init__(self)
BinaryHeaderObject.__init__(self)
def SetIsLibrary(self, IsLibrary):
self.IsLibrary = IsLibrary
def GetIsLibrary(self):
return self.IsLibrary
def SetIsLibraryModList(self, IsLibraryModList):
self.IsLibraryModList = IsLibraryModList
def GetIsLibraryModList(self):
return self.IsLibraryModList
def SetModuleType(self, ModuleType):
self.ModuleType = ModuleType
def GetModuleType(self):
return self.ModuleType
def SetBinaryModule(self, BinaryModule):
self.BinaryModule = BinaryModule
def GetBinaryModule(self):
return self.BinaryModule
def SetPcdIsDriver(self, PcdIsDriver):
self.PcdIsDriver = PcdIsDriver
def GetPcdIsDriver(self):
return self.PcdIsDriver
def SetPiSpecificationVersion(self, PiSpecificationVersion):
self.PiSpecificationVersion = PiSpecificationVersion
def GetPiSpecificationVersion(self):
return self.PiSpecificationVersion
def SetUefiSpecificationVersion(self, UefiSpecificationVersion):
self.UefiSpecificationVersion = UefiSpecificationVersion
def GetUefiSpecificationVersion(self):
return self.UefiSpecificationVersion
def SetSpecList(self, SpecList):
self.SpecList = SpecList
def GetSpecList(self):
return self.SpecList
def SetBootModeList(self, BootModeList):
self.BootModeList = BootModeList
def GetBootModeList(self):
return self.BootModeList
def SetEventList(self, EventList):
self.EventList = EventList
def GetEventList(self):
return self.EventList
def SetHobList(self, HobList):
self.HobList = HobList
def GetHobList(self):
return self.HobList
def SetLibraryClassList(self, LibraryClassList):
self.LibraryClassList = LibraryClassList
def GetLibraryClassList(self):
return self.LibraryClassList
def SetSupArchList(self, SupArchList):
self.SupArchList = SupArchList
def GetSupArchList(self):
return self.SupArchList
def SetModuleUniFile(self, ModuleUniFile):
self.ModuleUniFile = ModuleUniFile
def GetModuleUniFile(self):
return self.ModuleUniFile
##
# SourceFileObject
#
class SourceFileObject(CommonPropertiesObject):
def __init__(self):
CommonPropertiesObject.__init__(self)
self.SourceFile = ''
self.TagName = ''
self.ToolCode = ''
self.Family = ''
self.FileType = ''
def SetSourceFile(self, SourceFile):
self.SourceFile = SourceFile
def GetSourceFile(self):
return self.SourceFile
def SetTagName(self, TagName):
self.TagName = TagName
def GetTagName(self):
return self.TagName
def SetToolCode(self, ToolCode):
self.ToolCode = ToolCode
def GetToolCode(self):
return self.ToolCode
def SetFamily(self, Family):
self.Family = Family
def GetFamily(self):
return self.Family
def SetFileType(self, FileType):
self.FileType = FileType
def GetFileType(self):
return self.FileType
##
# BinaryFileObject
#
class BinaryFileObject(CommonPropertiesObject):
def __init__(self):
self.FileNamList = []
self.AsBuiltList = []
CommonPropertiesObject.__init__(self)
def SetFileNameList(self, FileNamList):
self.FileNamList = FileNamList
def GetFileNameList(self):
return self.FileNamList
def SetAsBuiltList(self, AsBuiltList):
self.AsBuiltList = AsBuiltList
def GetAsBuiltList(self):
return self.AsBuiltList
##
# AsBuildLibraryClassObject
#
class AsBuildLibraryClassObject(object):
def __init__(self):
self.LibGuid = ''
self.LibVersion = ''
self.SupArchList = []
def SetLibGuid(self, LibGuid):
self.LibGuid = LibGuid
def GetLibGuid(self):
return self.LibGuid
def SetLibVersion(self, LibVersion):
self.LibVersion = LibVersion
def GetLibVersion(self):
return self.LibVersion
def SetSupArchList(self, SupArchList):
self.SupArchList = SupArchList
def GetSupArchList(self):
return self.SupArchList
##
# AsBuiltObject
#
class AsBuiltObject(object):
def __init__(self):
#
# list of PcdObject
#
self.PatchPcdList = []
#
# list of PcdObject
#
self.PcdExValueList = []
#
# list of GuidVersionObject
#
self.LibraryInstancesList = []
#
# List of BinaryBuildFlag object
#
self.BinaryBuildFlagList = []
def SetPatchPcdList(self, PatchPcdList):
self.PatchPcdList = PatchPcdList
def GetPatchPcdList(self):
return self.PatchPcdList
def SetPcdExList(self, PcdExValueList):
self.PcdExValueList = PcdExValueList
def GetPcdExList(self):
return self.PcdExValueList
def SetLibraryInstancesList(self, LibraryInstancesList):
self.LibraryInstancesList = LibraryInstancesList
def GetLibraryInstancesList(self):
return self.LibraryInstancesList
def SetBuildFlagsList(self, BinaryBuildFlagList):
self.BinaryBuildFlagList = BinaryBuildFlagList
def GetBuildFlagsList(self):
return self.BinaryBuildFlagList
##
# BinaryBuildFlag, this object will include those fields that are not
# covered by the UPT Spec BinaryFile field
#
class BinaryBuildFlagObject(object):
def __init__(self):
self.Target = ''
self.TagName = ''
self.Family = ''
self.AsBuiltOptionFlags = ''
def SetTarget(self, Target):
self.Target = Target
def GetTarget(self):
return self.Target
def SetTagName(self, TagName):
self.TagName = TagName
def GetTagName(self):
return self.TagName
def SetFamily(self, Family):
self.Family = Family
def GetFamily(self):
return self.Family
def SetAsBuiltOptionFlags(self, AsBuiltOptionFlags):
self.AsBuiltOptionFlags = AsBuiltOptionFlags
def GetAsBuiltOptionFlags(self):
return self.AsBuiltOptionFlags
##
# ExternObject
#
class ExternObject(CommonPropertiesObject):
def __init__(self):
self.EntryPoint = ''
self.UnloadImage = ''
self.Constructor = ''
self.Destructor = ''
self.SupModList = []
CommonPropertiesObject.__init__(self)
def SetEntryPoint(self, EntryPoint):
self.EntryPoint = EntryPoint
def GetEntryPoint(self):
return self.EntryPoint
def SetUnloadImage(self, UnloadImage):
self.UnloadImage = UnloadImage
def GetUnloadImage(self):
return self.UnloadImage
def SetConstructor(self, Constructor):
self.Constructor = Constructor
def GetConstructor(self):
return self.Constructor
def SetDestructor(self, Destructor):
self.Destructor = Destructor
def GetDestructor(self):
return self.Destructor
def SetSupModList(self, SupModList):
self.SupModList = SupModList
def GetSupModList(self):
return self.SupModList
##
# DepexObject
#
class DepexObject(CommonPropertiesObject):
def __init__(self):
self.Depex = ''
self.ModuelType = ''
CommonPropertiesObject.__init__(self)
def SetDepex(self, Depex):
self.Depex = Depex
def GetDepex(self):
return self.Depex
def SetModuleType(self, ModuleType):
self.ModuelType = ModuleType
def GetModuleType(self):
return self.ModuelType
##
# PackageDependencyObject
#
class PackageDependencyObject(GuidVersionObject, CommonPropertiesObject):
def __init__(self):
self.Package = ''
self.PackageFilePath = ''
GuidVersionObject.__init__(self)
CommonPropertiesObject.__init__(self)
def SetPackageFilePath(self, PackageFilePath):
self.PackageFilePath = PackageFilePath
def GetPackageFilePath(self):
return self.PackageFilePath
def SetPackage(self, Package):
self.Package = Package
def GetPackage(self):
return self.Package
##
# BuildOptionObject
#
class BuildOptionObject(CommonPropertiesObject):
def __init__(self):
CommonPropertiesObject.__init__(self)
self.BuildOption = ''
def SetBuildOption(self, BuildOption):
self.BuildOption = BuildOption
def GetBuildOption(self):
return self.BuildOption
##
# ModuleObject
#
class ModuleObject(ModuleHeaderObject):
def __init__(self):
#
# {Arch : ModuleHeaderObject}
#
self.HeaderDict = {}
#
# LibraryClassObject
#
self.LibraryClassList = []
#
# SourceFileObject
#
self.SourceFileList = []
#
# BinaryFileObject
#
self.BinaryFileList = []
#
# PackageDependencyObject
#
self.PackageDependencyList = []
#
# DepexObject
#
self.PeiDepex = []
#
# DepexObject
#
self.DxeDepex = []
#
# DepexObject
#
self.SmmDepex = []
#
# ProtocolObject
#
self.ProtocolList = []
#
# PpiObject
#
self.PpiList = []
#
# GuidObject
#
self.GuidList = []
#
# PcdObject
#
self.PcdList = []
#
# ExternObject
#
self.ExternList = []
#
# BuildOptionObject
#
self.BuildOptionList = []
#
# UserExtensionObject
#
self.UserExtensionList = []
#
# MiscFileObject
#
self.MiscFileList = []
#
# ClonedFromObject
#
self.ClonedFrom = None
ModuleHeaderObject.__init__(self)
def SetHeaderDict(self, HeaderDict):
self.HeaderDict = HeaderDict
def GetHeaderDict(self):
return self.HeaderDict
def SetLibraryClassList(self, LibraryClassList):
self.LibraryClassList = LibraryClassList
def GetLibraryClassList(self):
return self.LibraryClassList
def SetSourceFileList(self, SourceFileList):
self.SourceFileList = SourceFileList
def GetSourceFileList(self):
return self.SourceFileList
def SetBinaryFileList(self, BinaryFileList):
self.BinaryFileList = BinaryFileList
def GetBinaryFileList(self):
return self.BinaryFileList
def SetPackageDependencyList(self, PackageDependencyList):
self.PackageDependencyList = PackageDependencyList
def GetPackageDependencyList(self):
return self.PackageDependencyList
def SetPeiDepex(self, PeiDepex):
self.PeiDepex = PeiDepex
def GetPeiDepex(self):
return self.PeiDepex
def SetDxeDepex(self, DxeDepex):
self.DxeDepex = DxeDepex
def GetDxeDepex(self):
return self.DxeDepex
def SetSmmDepex(self, SmmDepex):
self.SmmDepex = SmmDepex
def GetSmmDepex(self):
return self.SmmDepex
def SetPpiList(self, PpiList):
self.PpiList = PpiList
def GetPpiList(self):
return self.PpiList
def SetProtocolList(self, ProtocolList):
self.ProtocolList = ProtocolList
def GetProtocolList(self):
return self.ProtocolList
def SetPcdList(self, PcdList):
self.PcdList = PcdList
def GetPcdList(self):
return self.PcdList
def SetGuidList(self, GuidList):
self.GuidList = GuidList
def GetGuidList(self):
return self.GuidList
def SetExternList(self, ExternList):
self.ExternList = ExternList
def GetExternList(self):
return self.ExternList
def SetBuildOptionList(self, BuildOptionList):
self.BuildOptionList = BuildOptionList
def GetBuildOptionList(self):
return self.BuildOptionList
def SetUserExtensionList(self, UserExtensionList):
self.UserExtensionList = UserExtensionList
def GetUserExtensionList(self):
return self.UserExtensionList
def SetMiscFileList(self, MiscFileList):
self.MiscFileList = MiscFileList
def GetMiscFileList(self):
return self.MiscFileList
def SetClonedFrom(self, ClonedFrom):
self.ClonedFrom = ClonedFrom
def GetClonedFrom(self):
return self.ClonedFrom
| edk2-master | BaseTools/Source/Python/UPT/Object/POM/ModuleObject.py |
## @file
# This file is used to define a class object to describe a package
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
'''
PackageObject
'''
##
# Import Modules
#
from Object.POM.CommonObject import CommonPropertiesObject
from Object.POM.CommonObject import IdentificationObject
from Object.POM.CommonObject import CommonHeaderObject
from Object.POM.CommonObject import BinaryHeaderObject
from Library.Misc import Sdict
## StandardIncludeFileObject
#
class StandardIncludeFileObject(CommonPropertiesObject):
def __init__(self):
CommonPropertiesObject.__init__(self)
self.IncludeFile = ''
def SetIncludeFile(self, IncludeFile):
self.IncludeFile = IncludeFile
def GetIncludeFile(self):
return self.IncludeFile
## PackageIncludeFileObject
#
class PackageIncludeFileObject(StandardIncludeFileObject):
pass
##
# PackageObject
#
class PackageObject(IdentificationObject, CommonHeaderObject, BinaryHeaderObject):
def __init__(self):
IdentificationObject.__init__(self)
CommonHeaderObject.__init__(self)
BinaryHeaderObject.__init__(self)
#
# LibraryClassObject
#
self.LibraryClassList = []
#
# FileObject
#
self.IncludePathList = []
#
# StandardIncludeFileObject
#
self.StandardIncludeFileList = []
#
# PackageIncludeFileObject
#
self.PackageIncludeFileList = []
#
# Include and Arch List, item is (IncludePath, SupArchList-List of Arch), used during install package
#
self.IncludeArchList = []
#
# ProtocolObject
#
self.ProtocolList = []
#
# PpiObject
#
self.PpiList = []
#
# GuidObject
#
self.GuidList = []
#
# (PcdObject, PcdErrorObject)
#
self.PcdList = []
#
# {(PcdTokenSpaceGuidCName, PcdErrroNumber): PcdErrorMessageList}
#
self.PcdErrorCommentDict = {}
#
# UserExtensionObject
#
self.UserExtensionList = []
#
# MiscFileObject
#
self.MiscFileList = []
self.ModuleDict = Sdict()
#
# ClonedRecordObject
#
self.ClonedFromList = []
#
# string object
#
self.ModuleFileList = []
self.PcdChecks = []
self.UNIFlag = False
def SetLibraryClassList(self, LibraryClassList):
self.LibraryClassList = LibraryClassList
def GetLibraryClassList(self):
return self.LibraryClassList
def SetIncludePathList(self, IncludePathList):
self.IncludePathList = IncludePathList
def GetIncludePathList(self):
return self.IncludePathList
def SetIncludeArchList(self, IncludeArchList):
self.IncludeArchList = IncludeArchList
def GetIncludeArchList(self):
return self.IncludeArchList
def SetStandardIncludeFileList(self, StandardIncludeFileList):
self.StandardIncludeFileList = StandardIncludeFileList
def GetStandardIncludeFileList(self):
return self.StandardIncludeFileList
def SetPackageIncludeFileList(self, PackageIncludeFileList):
self.PackageIncludeFileList = PackageIncludeFileList
def GetPackageIncludeFileList(self):
return self.PackageIncludeFileList
def SetProtocolList(self, ProtocolList):
self.ProtocolList = ProtocolList
def GetProtocolList(self):
return self.ProtocolList
def SetPpiList(self, PpiList):
self.PpiList = PpiList
def GetPpiList(self):
return self.PpiList
def SetGuidList(self, GuidList):
self.GuidList = GuidList
def GetGuidList(self):
return self.GuidList
def SetPcdList(self, PcdList):
self.PcdList = PcdList
def GetPcdList(self):
return self.PcdList
def SetUserExtensionList(self, UserExtensionList):
self.UserExtensionList = UserExtensionList
def GetUserExtensionList(self):
return self.UserExtensionList
def SetMiscFileList(self, MiscFileList):
self.MiscFileList = MiscFileList
def GetMiscFileList(self):
return self.MiscFileList
def SetModuleDict(self, ModuleDict):
self.ModuleDict = ModuleDict
def GetModuleDict(self):
return self.ModuleDict
def SetClonedFromList(self, ClonedFromList):
self.ClonedFromList = ClonedFromList
def GetClonedFromList(self):
return self.ClonedFromList
def SetModuleFileList(self, ModuleFileList):
self.ModuleFileList = ModuleFileList
def GetModuleFileList(self):
return self.ModuleFileList
| edk2-master | BaseTools/Source/Python/UPT/Object/POM/PackageObject.py |
## @file
# This file is used to define class objects for DEC file. It will consumed by
#DecParser
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
'''
DecObject
'''
## Import modules
#
import os.path
from Library.Misc import Sdict
from Library.DataType import TAB_GUIDS
from Library.DataType import TAB_PPIS
from Library.DataType import TAB_PROTOCOLS
from Library.DataType import TAB_DEC_DEFINES
from Library.DataType import TAB_INCLUDES
from Library.DataType import TAB_LIBRARY_CLASSES
from Library.DataType import TAB_USER_EXTENSIONS
from Library.DataType import TAB_PCDS
from Library.DataType import TAB_ARCH_COMMON
## _DecComments
#
# Base class for all data objects which have head and tail comments
#
class _DecComments:
##constructor
#
def __init__(self):
self._HeadComment = []
self._TailComment = []
## GetComments
#
def GetComments(self):
return self._HeadComment, self._TailComment
## GetHeadComment
#
def GetHeadComment(self):
return self._HeadComment
## SetHeadComment
#
# @param Comment: comment content
#
def SetHeadComment(self, Comment):
self._HeadComment = Comment
## GetTailComment
#
def GetTailComment(self):
return self._TailComment
## SetTailComment
#
# @param Comment: comment content
#
def SetTailComment(self, Comment):
self._TailComment = Comment
## _DecBaseObject
#
# Base class that hold common info
#
class _DecBaseObject(_DecComments):
def __init__(self, PkgFullName):
_DecComments.__init__(self)
#
# Key is combined with (Arch, SectionType)
# Default is common
#
self.ValueDict = Sdict()
self._PkgFullName = PkgFullName
self._PackagePath, self._FileName = os.path.split(PkgFullName)
self._SecName = ''
## GetSectionName
#
def GetSectionName(self):
return self._SecName
## GetPackagePath
#
def GetPackagePath(self):
return self._PackagePath
## GetPackageFile
#
def GetPackageFile(self):
return self._FileName
## GetPackageFullName
#
def GetPackageFullName(self):
return self._PkgFullName
## AddItem
# Add sub-item to current object, sub-class should override it if needed
#
# @param Item: Sub-item to be added
# @param Scope: A list store section name and arch info
#
def AddItem(self, Item, Scope):
if not Scope:
return
if not Item:
return
ArchModule = []
for Ele in Scope:
if Ele[1] in self.ValueDict:
self.ValueDict[Ele[1]].append(Item)
else:
self.ValueDict[Ele[1]] = [Item]
ArchModule.append(Ele[1])
Item.ArchAndModuleType = ArchModule
## _GetItemByArch
# Helper class used by sub-class
# @param Arch: arch
#
def _GetItemByArch(self, Arch):
Arch = Arch.upper()
if Arch not in self.ValueDict:
return []
return self.ValueDict[Arch]
## _GetAllItems
# Get all items, union all arches, items in returned list are unique
#
def _GetAllItems(self):
Retlst = []
for Arch in self.ValueDict:
for Item in self.ValueDict[Arch]:
if Item not in Retlst:
Retlst.append(Item)
return Retlst
## _DecItemBaseObject
#
# Module type and arch the item belongs to
#
class _DecItemBaseObject(_DecComments):
def __init__(self):
_DecComments.__init__(self)
#
# Item's arch, if PCD, also include PCD type
#
self.ArchAndModuleType = []
## GetArchList
#
def GetArchList(self):
ArchSet = set()
for Arch in self.ArchAndModuleType:
ArchSet.add(Arch)
return list(ArchSet)
## DecDefineObject
#
# Class to hold define section information
#
class DecDefineObject(_DecBaseObject):
def __init__(self, PkgFullName):
_DecBaseObject.__init__(self, PkgFullName)
self._SecName = TAB_DEC_DEFINES.upper()
self._DecSpec = ''
self._PkgName = ''
self._PkgGuid = ''
self._PkgVersion = ''
self._PkgUniFile = ''
## GetPackageSpecification
#
def GetPackageSpecification(self):
return self._DecSpec
def SetPackageSpecification(self, DecSpec):
self._DecSpec = DecSpec
## GetPackageName
#
def GetPackageName(self):
return self._PkgName
def SetPackageName(self, PkgName):
self._PkgName = PkgName
## GetPackageGuid
#
def GetPackageGuid(self):
return self._PkgGuid
def SetPackageGuid(self, PkgGuid):
self._PkgGuid = PkgGuid
## GetPackageVersion
#
def GetPackageVersion(self):
return self._PkgVersion
def SetPackageVersion(self, PkgVersion):
self._PkgVersion = PkgVersion
## GetPackageUniFile
#
def GetPackageUniFile(self):
return self._PkgUniFile
def SetPackageUniFile(self, PkgUniFile):
self._PkgUniFile = PkgUniFile
## GetDefines
#
def GetDefines(self):
return self._GetItemByArch(TAB_ARCH_COMMON)
## GetAllDefines
#
def GetAllDefines(self):
return self._GetAllItems()
## DecDefineItemObject
#
# Each item of define section
#
class DecDefineItemObject(_DecItemBaseObject):
def __init__(self):
_DecItemBaseObject.__init__(self)
self.Key = ''
self.Value = ''
## __hash__
#
def __hash__(self):
return hash(self.Key + self.Value)
## __eq__
#
def __eq__(self, Other):
return id(self) == id(Other)
## __str__
#
def __str__(self):
return str(self.ArchAndModuleType) + '\n' + self.Key + \
' = ' + self.Value
## DecIncludeObject
#
# Class to hold include section info
#
class DecIncludeObject(_DecBaseObject):
def __init__(self, PkgFullName):
_DecBaseObject.__init__(self, PkgFullName)
self._SecName = TAB_INCLUDES.upper()
## GetIncludes
#
def GetIncludes(self, Arch=TAB_ARCH_COMMON):
return self._GetItemByArch(Arch)
## GetAllIncludes
#
def GetAllIncludes(self):
return self._GetAllItems()
## DecIncludeItemObject
#
# Item of include section
#
class DecIncludeItemObject(_DecItemBaseObject):
def __init__(self, File, Root):
self.File = File
self.Root = Root
_DecItemBaseObject.__init__(self)
## __hash__
#
def __hash__(self):
return hash(self.File)
## __eq__
#
def __eq__(self, Other):
return id(self) == id(Other)
## __str__
#
def __str__(self):
return self.File
## DecLibraryclassObject
#
# Class to hold library class section info
#
class DecLibraryclassObject(_DecBaseObject):
def __init__(self, PkgFullName):
_DecBaseObject.__init__(self, PkgFullName)
self._PackagePath, self._FileName = os.path.split(PkgFullName)
self._SecName = TAB_LIBRARY_CLASSES.upper()
## GetLibraryclasses
#
def GetLibraryclasses(self, Arch=TAB_ARCH_COMMON):
return self._GetItemByArch(Arch)
## GetAllLibraryclasses
#
def GetAllLibraryclasses(self):
return self._GetAllItems()
## DecLibraryclassItemObject
# Item of library class section
#
class DecLibraryclassItemObject(_DecItemBaseObject):
def __init__(self, Libraryclass, File, Root):
_DecItemBaseObject.__init__(self)
self.File = File
self.Root = Root
self.Libraryclass = Libraryclass
## __hash__
#
def __hash__(self):
return hash(self.Libraryclass + self.File)
## __eq__
#
def __eq__(self, Other):
return id(self) == id(Other)
## __str__
#
def __str__(self):
return self.Libraryclass + '|' + self.File
## DecPcdObject
# Class to hold PCD section
#
class DecPcdObject(_DecBaseObject):
def __init__(self, PkgFullName):
_DecBaseObject.__init__(self, PkgFullName)
self._SecName = TAB_PCDS.upper()
## AddItem
#
# Diff from base class
#
# @param Item: Item
# @param Scope: Scope
#
def AddItem(self, Item, Scope):
if not Scope:
return
if not Item:
return
ArchModule = []
for Type, Arch in Scope:
if (Type, Arch) in self.ValueDict:
self.ValueDict[Type, Arch].append(Item)
else:
self.ValueDict[Type, Arch] = [Item]
ArchModule.append([Type, Arch])
Item.ArchAndModuleType = ArchModule
## GetPcds
#
# @param PcdType: PcdType
# @param Arch: Arch
#
def GetPcds(self, PcdType, Arch=TAB_ARCH_COMMON):
PcdType = PcdType.upper()
Arch = Arch.upper()
if (PcdType, Arch) not in self.ValueDict:
return []
return self.ValueDict[PcdType, Arch]
## GetPcdsByType
#
# @param PcdType: PcdType
#
def GetPcdsByType(self, PcdType):
PcdType = PcdType.upper()
Retlst = []
for TypeInDict, Arch in self.ValueDict:
if TypeInDict != PcdType:
continue
for Item in self.ValueDict[PcdType, Arch]:
if Item not in Retlst:
Retlst.append(Item)
return Retlst
## DecPcdItemObject
#
# Item of PCD section
#
# @param _DecItemBaseObject: _DecItemBaseObject object
#
class DecPcdItemObject(_DecItemBaseObject):
def __init__(self, Guid, Name, Value, DatumType,
Token, MaxDatumSize=''):
_DecItemBaseObject.__init__(self)
self.TokenCName = Name
self.TokenSpaceGuidCName = Guid
self.DatumType = DatumType
self.DefaultValue = Value
self.TokenValue = Token
self.MaxDatumSize = MaxDatumSize
## __hash__
#
def __hash__(self):
return hash(self.TokenSpaceGuidCName + self.TokenCName)
## __eq__
#
def __eq__(self, Other):
return id(self) == id(Other)
## GetArchListOfType
#
# @param PcdType: PcdType
#
def GetArchListOfType(self, PcdType):
ItemSet = set()
PcdType = PcdType.upper()
for Type, Arch in self.ArchAndModuleType:
if Type != PcdType:
continue
ItemSet.add(Arch)
return list(ItemSet)
## DecGuidObjectBase
#
# Base class for PPI, Protocol, and GUID.
# Hold same data but has different method for clarification in sub-class
#
# @param _DecBaseObject: Dec Base Object
#
class DecGuidObjectBase(_DecBaseObject):
def __init__(self, PkgFullName):
_DecBaseObject.__init__(self, PkgFullName)
## GetGuidStyleItems
#
# @param Arch: Arch
#
def GetGuidStyleItems(self, Arch=TAB_ARCH_COMMON):
return self._GetItemByArch(Arch)
## GetGuidStyleAllItems
#
def GetGuidStyleAllItems(self):
return self._GetAllItems()
## DecGuidItemObject
#
# Item of GUID, PPI and Protocol section
#
# @param _DecItemBaseObject: Dec Item Base Object
#
class DecGuidItemObject(_DecItemBaseObject):
def __init__(self, CName, GuidCValue, GuidString):
_DecItemBaseObject.__init__(self)
self.GuidCName = CName
self.GuidCValue = GuidCValue
self.GuidString = GuidString
## __hash__
#
def __hash__(self):
return hash(self.GuidCName)
## __eq__
#
def __eq__(self, Other):
return id(self) == id(Other)
## __str__
#
def __str__(self):
return self.GuidCName + ' = ' + self.GuidCValue
## DecGuidObject
#
# Class for GUID section
#
# @param DecGuidObjectBase: Dec Guid Object Base
#
class DecGuidObject(DecGuidObjectBase):
def __init__(self, PkgFullName):
DecGuidObjectBase.__init__(self, PkgFullName)
self._SecName = TAB_GUIDS.upper()
## GetGuids
#
# @param Arch: Arch
#
def GetGuids(self, Arch=TAB_ARCH_COMMON):
return self._GetItemByArch(Arch)
## GetAllGuids
#
def GetAllGuids(self):
return self._GetAllItems()
## DecPpiObject
#
# Class for PPI section
#
# @param DecGuidObjectBase: Dec Guid Object Base
#
class DecPpiObject(DecGuidObjectBase):
def __init__(self, PkgFullName):
DecGuidObjectBase.__init__(self, PkgFullName)
self._SecName = TAB_PPIS.upper()
## GetPpis
#
# @param Arch: Arch
#
def GetPpis(self, Arch=TAB_ARCH_COMMON):
return self._GetItemByArch(Arch)
## GetAllPpis
#
def GetAllPpis(self):
return self._GetAllItems()
## DecProtocolObject
#
# Class for protocol section
#
# @param DecGuidObjectBase: Dec Guid Object Base
#
class DecProtocolObject(DecGuidObjectBase):
def __init__(self, PkgFullName):
DecGuidObjectBase.__init__(self, PkgFullName)
self._SecName = TAB_PROTOCOLS.upper()
## GetProtocols
#
# @param Arch: Arch
#
def GetProtocols(self, Arch=TAB_ARCH_COMMON):
return self._GetItemByArch(Arch)
## GetAllProtocols
#
def GetAllProtocols(self):
return self._GetAllItems()
## DecUserExtensionObject
#
# Class for user extension section
#
# @param _DecBaseObject: Dec Guid Object Base
#
class DecUserExtensionObject(_DecBaseObject):
def __init__(self, PkgFullName):
_DecBaseObject.__init__(self, PkgFullName)
self._SecName = TAB_USER_EXTENSIONS.upper()
self.ItemList = []
## GetProtocols
#
# @param Item: Item
# @param Scope: Scope
#
def AddItem(self, Item, Scope):
if not Scope:
pass
if not Item:
return
self.ItemList.append(Item)
## GetAllUserExtensions
#
def GetAllUserExtensions(self):
return self.ItemList
## DecUserExtensionItemObject
# Item for user extension section
#
# @param _DecItemBaseObject: Dec Item Base Object
#
class DecUserExtensionItemObject(_DecItemBaseObject):
def __init__(self):
_DecItemBaseObject.__init__(self)
self.UserString = ''
self.UserId = ''
self.IdString = ''
| edk2-master | BaseTools/Source/Python/UPT/Object/Parser/DecObject.py |
## @file
# This file is used to define class objects of INF file [Guids] section.
# It will consumed by InfParser.
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
'''
InfGuidObject
'''
from Library.ParserValidate import IsValidCVariableName
from Library.CommentParsing import ParseComment
from Library.ExpressionValidate import IsValidFeatureFlagExp
from Library.Misc import Sdict
from Library import DataType as DT
import Logger.Log as Logger
from Logger import ToolError
from Logger import StringTable as ST
class InfGuidItemCommentContent():
def __init__(self):
#
# ## SOMETIMES_CONSUMES ## Variable:L"MemoryTypeInformation"
# TailString.
#
#
# SOMETIMES_CONSUMES
#
self.UsageItem = ''
#
# Variable
#
self.GuidTypeItem = ''
#
# MemoryTypeInformation
#
self.VariableNameItem = ''
#
# TailString
#
self.HelpStringItem = ''
def SetUsageItem(self, UsageItem):
self.UsageItem = UsageItem
def GetUsageItem(self):
return self.UsageItem
def SetGuidTypeItem(self, GuidTypeItem):
self.GuidTypeItem = GuidTypeItem
def GetGuidTypeItem(self):
return self.GuidTypeItem
def SetVariableNameItem(self, VariableNameItem):
self.VariableNameItem = VariableNameItem
def GetVariableNameItem(self):
return self.VariableNameItem
def SetHelpStringItem(self, HelpStringItem):
self.HelpStringItem = HelpStringItem
def GetHelpStringItem(self):
return self.HelpStringItem
class InfGuidItem():
def __init__(self):
self.Name = ''
self.FeatureFlagExp = ''
#
# A list contain instance of InfGuidItemCommentContent
#
self.CommentList = []
self.SupArchList = []
def SetName(self, Name):
self.Name = Name
def GetName(self):
return self.Name
def SetFeatureFlagExp(self, FeatureFlagExp):
self.FeatureFlagExp = FeatureFlagExp
def GetFeatureFlagExp(self):
return self.FeatureFlagExp
def SetCommentList(self, CommentList):
self.CommentList = CommentList
def GetCommentList(self):
return self.CommentList
def SetSupArchList(self, SupArchList):
self.SupArchList = SupArchList
def GetSupArchList(self):
return self.SupArchList
## ParseComment
#
# ParseComment
#
def ParseGuidComment(CommentsList, InfGuidItemObj):
#
# Get/Set Usage and HelpString
#
if CommentsList is not None and len(CommentsList) != 0 :
CommentInsList = []
PreUsage = None
PreGuidType = None
PreHelpText = ''
BlockFlag = -1
Count = 0
for CommentItem in CommentsList:
Count = Count + 1
CommentItemUsage, \
CommentItemGuidType, \
CommentItemVarString, \
CommentItemHelpText = \
ParseComment(CommentItem,
DT.ALL_USAGE_TOKENS,
DT.GUID_TYPE_TOKENS,
[],
True)
if CommentItemHelpText is None:
CommentItemHelpText = ''
if Count == len(CommentsList) and CommentItemUsage == CommentItemGuidType == DT.ITEM_UNDEFINED:
CommentItemHelpText = DT.END_OF_LINE
if Count == len(CommentsList):
if BlockFlag == 1 or BlockFlag == 2:
if CommentItemUsage == CommentItemGuidType == DT.ITEM_UNDEFINED:
BlockFlag = 4
else:
BlockFlag = 3
if BlockFlag == -1:
BlockFlag = 4
if BlockFlag == -1 or BlockFlag == 1 or BlockFlag == 2:
if CommentItemUsage == CommentItemGuidType == DT.ITEM_UNDEFINED:
if BlockFlag == -1:
BlockFlag = 1
elif BlockFlag == 1:
BlockFlag = 2
else:
if BlockFlag == 1 or BlockFlag == 2:
BlockFlag = 3
elif BlockFlag == -1:
BlockFlag = 4
#
# Combine two comment line if they are generic comment
#
if CommentItemUsage == CommentItemGuidType == PreUsage == PreGuidType == DT.ITEM_UNDEFINED:
CommentItemHelpText = PreHelpText + DT.END_OF_LINE + CommentItemHelpText
PreHelpText = CommentItemHelpText
if BlockFlag == 4:
CommentItemIns = InfGuidItemCommentContent()
CommentItemIns.SetUsageItem(CommentItemUsage)
CommentItemIns.SetGuidTypeItem(CommentItemGuidType)
CommentItemIns.SetVariableNameItem(CommentItemVarString)
if CommentItemHelpText == '' or CommentItemHelpText.endswith(DT.END_OF_LINE):
CommentItemHelpText = CommentItemHelpText.strip(DT.END_OF_LINE)
CommentItemIns.SetHelpStringItem(CommentItemHelpText)
CommentInsList.append(CommentItemIns)
BlockFlag = -1
PreUsage = None
PreGuidType = None
PreHelpText = ''
elif BlockFlag == 3:
#
# Add previous help string
#
CommentItemIns = InfGuidItemCommentContent()
CommentItemIns.SetUsageItem(DT.ITEM_UNDEFINED)
CommentItemIns.SetGuidTypeItem(DT.ITEM_UNDEFINED)
if PreHelpText == '' or PreHelpText.endswith(DT.END_OF_LINE):
PreHelpText = PreHelpText.strip(DT.END_OF_LINE)
CommentItemIns.SetHelpStringItem(PreHelpText)
CommentInsList.append(CommentItemIns)
#
# Add Current help string
#
CommentItemIns = InfGuidItemCommentContent()
CommentItemIns.SetUsageItem(CommentItemUsage)
CommentItemIns.SetGuidTypeItem(CommentItemGuidType)
CommentItemIns.SetVariableNameItem(CommentItemVarString)
if CommentItemHelpText == '' or CommentItemHelpText.endswith(DT.END_OF_LINE):
CommentItemHelpText = CommentItemHelpText.strip(DT.END_OF_LINE)
CommentItemIns.SetHelpStringItem(CommentItemHelpText)
CommentInsList.append(CommentItemIns)
BlockFlag = -1
PreUsage = None
PreGuidType = None
PreHelpText = ''
else:
PreUsage = CommentItemUsage
PreGuidType = CommentItemGuidType
PreHelpText = CommentItemHelpText
InfGuidItemObj.SetCommentList(CommentInsList)
else:
#
# Still need to set the USAGE/GUIDTYPE to undefined.
#
CommentItemIns = InfGuidItemCommentContent()
CommentItemIns.SetUsageItem(DT.ITEM_UNDEFINED)
CommentItemIns.SetGuidTypeItem(DT.ITEM_UNDEFINED)
InfGuidItemObj.SetCommentList([CommentItemIns])
return InfGuidItemObj
## InfGuidObject
#
# InfGuidObject
#
class InfGuidObject():
def __init__(self):
self.Guids = Sdict()
#
# Macro defined in this section should be only used in this section.
#
self.Macros = {}
def SetGuid(self, GuidList, Arch = None):
__SupportArchList = []
for ArchItem in Arch:
#
# Validate Arch
#
if (ArchItem == '' or ArchItem is None):
ArchItem = 'COMMON'
__SupportArchList.append(ArchItem)
for Item in GuidList:
#
# Get Comment content of this protocol
#
CommentsList = None
if len(Item) == 3:
CommentsList = Item[1]
CurrentLineOfItem = Item[2]
Item = Item[0]
InfGuidItemObj = InfGuidItem()
if len(Item) >= 1 and len(Item) <= 2:
#
# Only GuildName contained
#
if not IsValidCVariableName(Item[0]):
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_INVALID_CNAME%(Item[0]),
File=CurrentLineOfItem[2],
Line=CurrentLineOfItem[1],
ExtraData=CurrentLineOfItem[0])
if (Item[0] != ''):
InfGuidItemObj.SetName(Item[0])
else:
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_CNAME_MISSING,
File=CurrentLineOfItem[2],
Line=CurrentLineOfItem[1],
ExtraData=CurrentLineOfItem[0])
if len(Item) == 2:
#
# Contained CName and Feature Flag Express
# <statements> ::= <CName> ["|" <FeatureFlagExpress>]
# For GUID entry.
#
if Item[1].strip() == '':
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_FEATURE_FLAG_EXP_MISSING,
File=CurrentLineOfItem[2],
Line=CurrentLineOfItem[1],
ExtraData=CurrentLineOfItem[0])
#
# Validate Feature Flag Express
#
FeatureFlagRtv = IsValidFeatureFlagExp(Item[1].strip())
if not FeatureFlagRtv[0]:
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_FEATURE_FLAG_EXP_SYNTAX_INVLID%(FeatureFlagRtv[1]),
File=CurrentLineOfItem[2],
Line=CurrentLineOfItem[1],
ExtraData=CurrentLineOfItem[0])
InfGuidItemObj.SetFeatureFlagExp(Item[1])
if len(Item) != 1 and len(Item) != 2:
#
# Invalid format of GUID statement
#
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_GUID_PPI_PROTOCOL_SECTION_CONTENT_ERROR,
File=CurrentLineOfItem[2],
Line=CurrentLineOfItem[1],
ExtraData=CurrentLineOfItem[0])
InfGuidItemObj = ParseGuidComment(CommentsList, InfGuidItemObj)
InfGuidItemObj.SetSupArchList(__SupportArchList)
#
# Determine GUID name duplicate. Follow below rule:
#
# A GUID must not be duplicated within a [Guids] section.
# A GUID may appear in multiple architectural [Guids]
# sections. A GUID listed in an architectural [Guids]
# section must not be listed in the common architectural
# [Guids] section.
#
# NOTE: This check will not report error now.
#
for Item in self.Guids:
if Item.GetName() == InfGuidItemObj.GetName():
ItemSupArchList = Item.GetSupArchList()
for ItemArch in ItemSupArchList:
for GuidItemObjArch in __SupportArchList:
if ItemArch == GuidItemObjArch:
#
# ST.ERR_INF_PARSER_ITEM_DUPLICATE
#
pass
if ItemArch.upper() == 'COMMON' or GuidItemObjArch.upper() == 'COMMON':
#
# ST.ERR_INF_PARSER_ITEM_DUPLICATE_COMMON
#
pass
if (InfGuidItemObj) in self.Guids:
GuidList = self.Guids[InfGuidItemObj]
GuidList.append(InfGuidItemObj)
self.Guids[InfGuidItemObj] = GuidList
else:
GuidList = []
GuidList.append(InfGuidItemObj)
self.Guids[InfGuidItemObj] = GuidList
return True
def GetGuid(self):
return self.Guids
| edk2-master | BaseTools/Source/Python/UPT/Object/Parser/InfGuidObject.py |
## @file
# This file is used to define common class objects for INF file.
# It will consumed by InfParser
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
'''
InfCommonObject
'''
## InfLineCommentObject
#
# Comment Object for any line in the INF file
#
# #
# # HeaderComment
# #
# Line # TailComment
#
class InfLineCommentObject():
def __init__(self):
self.HeaderComments = ''
self.TailComments = ''
def SetHeaderComments(self, HeaderComments):
self.HeaderComments = HeaderComments
def GetHeaderComments(self):
return self.HeaderComments
def SetTailComments(self, TailComments):
self.TailComments = TailComments
def GetTailComments(self):
return self.TailComments
## CurrentLine
#
class CurrentLine():
def __init__(self):
self.LineNo = ''
self.LineString = ''
self.FileName = ''
## SetLineNo
#
# @param LineNo: LineNo
#
def SetLineNo(self, LineNo):
self.LineNo = LineNo
## GetLineNo
#
def GetLineNo(self):
return self.LineNo
## SetLineString
#
# @param LineString: Line String content
#
def SetLineString(self, LineString):
self.LineString = LineString
## GetLineString
#
def GetLineString(self):
return self.LineString
## SetFileName
#
# @param FileName: File Name
#
def SetFileName(self, FileName):
self.FileName = FileName
## GetFileName
#
def GetFileName(self):
return self.FileName
##
# Inf Section common data
#
class InfSectionCommonDef():
def __init__(self):
#
# #
# # HeaderComments at here
# #
# [xxSection] TailComments at here
# data
#
self.HeaderComments = ''
self.TailComments = ''
#
# The support arch list of this section
#
self.SupArchList = []
#
# Store all section content
# Key is supported Arch
#
self.AllContent = {}
## SetHeaderComments
#
# @param HeaderComments: HeaderComments
#
def SetHeaderComments(self, HeaderComments):
self.HeaderComments = HeaderComments
## GetHeaderComments
#
def GetHeaderComments(self):
return self.HeaderComments
## SetTailComments
#
# @param TailComments: TailComments
#
def SetTailComments(self, TailComments):
self.TailComments = TailComments
## GetTailComments
#
def GetTailComments(self):
return self.TailComments
## SetSupArchList
#
# @param Arch: Arch
#
def SetSupArchList(self, Arch):
if Arch not in self.SupArchList:
self.SupArchList.append(Arch)
## GetSupArchList
#
def GetSupArchList(self):
return self.SupArchList
## SetAllContent
#
# @param ArchList: ArchList
# @param Content: Content
#
def SetAllContent(self, Content):
self.AllContent = Content
## GetAllContent
#
def GetAllContent(self):
return self.AllContent
| edk2-master | BaseTools/Source/Python/UPT/Object/Parser/InfCommonObject.py |
## @file
# This file is used to define class objects of INF file [Sources] section.
# It will consumed by InfParser.
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
'''
InfSourcesObject
'''
import os
from Logger import StringTable as ST
from Logger import ToolError
import Logger.Log as Logger
from Library import GlobalData
from Library.Misc import Sdict
from Library.ExpressionValidate import IsValidFeatureFlagExp
from Object.Parser.InfCommonObject import InfSectionCommonDef
from Library.Misc import ValidFile
from Library.ParserValidate import IsValidFamily
from Library.ParserValidate import IsValidPath
## __GenSourceInstance
#
#
def GenSourceInstance(Item, CurrentLineOfItem, ItemObj):
IsValidFileFlag = False
if len(Item) < 6 and len(Item) >= 1:
#
# File | Family | TagName | ToolCode | FeatureFlagExpr
#
if len(Item) == 5:
#
# Validate Feature Flag Express
#
if Item[4].strip() == '':
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_FEATURE_FLAG_EXP_MISSING,
File=CurrentLineOfItem[2],
Line=CurrentLineOfItem[1],
ExtraData=CurrentLineOfItem[0])
#
# Validate FFE
#
FeatureFlagRtv = IsValidFeatureFlagExp(Item[4].strip())
if not FeatureFlagRtv[0]:
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_FEATURE_FLAG_EXP_SYNTAX_INVLID%(FeatureFlagRtv[1]),
File=CurrentLineOfItem[2],
Line=CurrentLineOfItem[1],
ExtraData=CurrentLineOfItem[0])
ItemObj.SetFeatureFlagExp(Item[4])
if len(Item) >= 4:
if Item[3].strip() == '':
ItemObj.SetToolCode(Item[3])
else:
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_TOOLCODE_NOT_PERMITTED%(Item[2]),
File=CurrentLineOfItem[2],
Line=CurrentLineOfItem[1],
ExtraData=CurrentLineOfItem[0])
if len(Item) >= 3:
if Item[2].strip() == '':
ItemObj.SetTagName(Item[2])
else:
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_TAGNAME_NOT_PERMITTED%(Item[2]),
File=CurrentLineOfItem[2],
Line=CurrentLineOfItem[1],
ExtraData=CurrentLineOfItem[0])
if len(Item) >= 2:
if IsValidFamily(Item[1].strip()):
#
# To align with UDP specification. "*" is not permitted in UDP specification
#
if Item[1].strip() == "*":
Item[1] = ""
ItemObj.SetFamily(Item[1])
else:
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_SOURCE_SECTION_FAMILY_INVALID%(Item[1]),
File=CurrentLineOfItem[2],
Line=CurrentLineOfItem[1],
ExtraData=CurrentLineOfItem[0])
if len(Item) >= 1:
#
# Validate file name exist.
#
FullFileName = os.path.normpath(os.path.realpath(os.path.join(GlobalData.gINF_MODULE_DIR, Item[0])))
if not (ValidFile(FullFileName) or ValidFile(Item[0])):
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_FILELIST_EXIST%(Item[0]),
File=CurrentLineOfItem[2],
Line=CurrentLineOfItem[1],
ExtraData=CurrentLineOfItem[0])
#
# Validate file exist/format.
#
if IsValidPath(Item[0], GlobalData.gINF_MODULE_DIR):
IsValidFileFlag = True
else:
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_FILE_NOT_EXIST_OR_NAME_INVALID%(Item[0]),
File=CurrentLineOfItem[2],
Line=CurrentLineOfItem[1],
ExtraData=CurrentLineOfItem[0])
return False
if IsValidFileFlag:
ItemObj.SetSourceFileName(Item[0])
else:
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_SOURCES_SECTION_CONTENT_ERROR,
File=CurrentLineOfItem[2],
Line=CurrentLineOfItem[1],
ExtraData=CurrentLineOfItem[0])
return ItemObj
## InfSourcesItemObject()
#
#
class InfSourcesItemObject():
def __init__(self, \
SourceFileName = '', \
Family = '', \
TagName = '', \
ToolCode = '', \
FeatureFlagExp = ''):
self.SourceFileName = SourceFileName
self.Family = Family
self.TagName = TagName
self.ToolCode = ToolCode
self.FeatureFlagExp = FeatureFlagExp
self.HeaderString = ''
self.TailString = ''
self.SupArchList = []
def SetSourceFileName(self, SourceFilename):
self.SourceFileName = SourceFilename
def GetSourceFileName(self):
return self.SourceFileName
def SetFamily(self, Family):
self.Family = Family
def GetFamily(self):
return self.Family
def SetTagName(self, TagName):
self.TagName = TagName
def GetTagName(self):
return self.TagName
def SetToolCode(self, ToolCode):
self.ToolCode = ToolCode
def GetToolCode(self):
return self.ToolCode
def SetFeatureFlagExp(self, FeatureFlagExp):
self.FeatureFlagExp = FeatureFlagExp
def GetFeatureFlagExp(self):
return self.FeatureFlagExp
def SetHeaderString(self, HeaderString):
self.HeaderString = HeaderString
def GetHeaderString(self):
return self.HeaderString
def SetTailString(self, TailString):
self.TailString = TailString
def GetTailString(self):
return self.TailString
def SetSupArchList(self, SupArchList):
self.SupArchList = SupArchList
def GetSupArchList(self):
return self.SupArchList
##
#
#
#
class InfSourcesObject(InfSectionCommonDef):
def __init__(self):
self.Sources = Sdict()
InfSectionCommonDef.__init__(self)
def SetSources(self, SourceList, Arch = None):
__SupArchList = []
for ArchItem in Arch:
#
# Validate Arch
#
if (ArchItem == '' or ArchItem is None):
ArchItem = 'COMMON'
__SupArchList.append(ArchItem)
for Item in SourceList:
ItemObj = InfSourcesItemObject()
CurrentLineOfItem = Item[2]
Item = Item[0]
ItemObj = GenSourceInstance(Item, CurrentLineOfItem, ItemObj)
ItemObj.SetSupArchList(__SupArchList)
if (ItemObj) in self.Sources:
SourceContent = self.Sources[ItemObj]
SourceContent.append(ItemObj)
self.Sources[ItemObj] = SourceContent
else:
SourceContent = []
SourceContent.append(ItemObj)
self.Sources[ItemObj] = SourceContent
return True
def GetSources(self):
return self.Sources
| edk2-master | BaseTools/Source/Python/UPT/Object/Parser/InfSoucesObject.py |
## @file
# This file is used to define class objects of INF file [Pcds] section.
# It will consumed by InfParser.
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
'''
InfPcdObject
'''
import os
import re
from Logger import StringTable as ST
from Logger import ToolError
import Logger.Log as Logger
from Library import GlobalData
from Library import DataType as DT
from Library.Misc import Sdict
from Library.Misc import GetHelpStringByRemoveHashKey
from Library.ParserValidate import IsValidPcdType
from Library.ParserValidate import IsValidCVariableName
from Library.ParserValidate import IsValidPcdValue
from Library.ParserValidate import IsValidArch
from Library.CommentParsing import ParseComment
from Library.StringUtils import GetSplitValueList
from Library.StringUtils import IsHexDigitUINT32
from Library.ExpressionValidate import IsValidFeatureFlagExp
from Parser.InfAsBuiltProcess import GetPackageListInfo
from Parser.DecParser import Dec
from Object.Parser.InfPackagesObject import InfPackageItem
def ValidateArch(ArchItem, PcdTypeItem1, LineNo, SupArchDict, SupArchList):
#
# Validate Arch
#
if (ArchItem == '' or ArchItem is None):
ArchItem = 'COMMON'
if PcdTypeItem1.upper != DT.TAB_INF_FEATURE_PCD.upper():
ArchList = GetSplitValueList(ArchItem, ' ')
for ArchItemNew in ArchList:
if not IsValidArch(ArchItemNew):
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID % (ArchItemNew),
File=GlobalData.gINF_MODULE_NAME,
Line=LineNo,
ExtraData=ArchItemNew)
SupArchDict[PcdTypeItem1] = ArchList
else:
SupArchList.append(ArchItem)
return SupArchList, SupArchDict
def ParsePcdComment(CommentList, PcdTypeItem, PcdItemObj):
CommentInsList = []
PreUsage = None
PreHelpText = ''
BlockFlag = -1
FFEHelpText = ''
CommentItemHelpText = ''
Count = 0
for CommentItem in CommentList:
Count = Count + 1
CommentItemUsage, CommentType, CommentString, CommentItemHelpText = ParseComment(CommentItem,
DT.ALL_USAGE_TOKENS,
{},
[],
False)
if CommentType and CommentString:
pass
if PcdTypeItem == 'FeaturePcd':
CommentItemUsage = DT.USAGE_ITEM_CONSUMES
if CommentItemHelpText is None:
CommentItemHelpText = ''
if Count == 1:
FFEHelpText = CommentItemHelpText
else:
FFEHelpText = FFEHelpText + DT.END_OF_LINE + CommentItemHelpText
if Count == len(CommentList):
CommentItemHelpText = FFEHelpText
BlockFlag = 4
else:
continue
if CommentItemHelpText is None:
CommentItemHelpText = ''
if Count == len(CommentList) and CommentItemUsage == DT.ITEM_UNDEFINED:
CommentItemHelpText = DT.END_OF_LINE
if Count == len(CommentList) and (BlockFlag == 1 or BlockFlag == 2):
if CommentItemUsage == DT.ITEM_UNDEFINED:
BlockFlag = 4
else:
BlockFlag = 3
elif BlockFlag == -1 and Count == len(CommentList):
BlockFlag = 4
if BlockFlag == -1 or BlockFlag == 1 or BlockFlag == 2:
if CommentItemUsage == DT.ITEM_UNDEFINED:
if BlockFlag == -1:
BlockFlag = 1
elif BlockFlag == 1:
BlockFlag = 2
else:
if BlockFlag == 1 or BlockFlag == 2:
BlockFlag = 3
elif BlockFlag == -1:
BlockFlag = 4
#
# Combine two comment line if they are generic comment
#
if CommentItemUsage == PreUsage == DT.ITEM_UNDEFINED:
CommentItemHelpText = PreHelpText + DT.END_OF_LINE + CommentItemHelpText
PreHelpText = CommentItemHelpText
if BlockFlag == 4:
CommentItemIns = InfPcdItemCommentContent()
CommentItemIns.SetUsageItem(CommentItemUsage)
CommentItemIns.SetHelpStringItem(CommentItemHelpText)
CommentInsList.append(CommentItemIns)
BlockFlag = -1
PreUsage = None
PreHelpText = ''
elif BlockFlag == 3:
#
# Add previous help string
#
CommentItemIns = InfPcdItemCommentContent()
CommentItemIns.SetUsageItem(DT.ITEM_UNDEFINED)
if PreHelpText == '' or PreHelpText.endswith(DT.END_OF_LINE):
PreHelpText += DT.END_OF_LINE
CommentItemIns.SetHelpStringItem(PreHelpText)
CommentInsList.append(CommentItemIns)
#
# Add Current help string
#
CommentItemIns = InfPcdItemCommentContent()
CommentItemIns.SetUsageItem(CommentItemUsage)
CommentItemIns.SetHelpStringItem(CommentItemHelpText)
CommentInsList.append(CommentItemIns)
BlockFlag = -1
PreUsage = None
PreHelpText = ''
else:
PreUsage = CommentItemUsage
PreHelpText = CommentItemHelpText
PcdItemObj.SetHelpStringList(CommentInsList)
return PcdItemObj
class InfPcdItemCommentContent():
def __init__(self):
#
# ## SOMETIMES_CONSUMES ## HelpString
#
self.UsageItem = ''
#
# Help String
#
self.HelpStringItem = ''
def SetUsageItem(self, UsageItem):
self.UsageItem = UsageItem
def GetUsageItem(self):
return self.UsageItem
def SetHelpStringItem(self, HelpStringItem):
self.HelpStringItem = HelpStringItem
def GetHelpStringItem(self):
return self.HelpStringItem
## InfPcdItem
#
# This class defined Pcd item used in Module files
#
# @param CName: Input value for CName, default is ''
# @param Token: Input value for Token, default is ''
# @param TokenSpaceGuidCName: Input value for TokenSpaceGuidCName, default
# is ''
# @param DatumType: Input value for DatumType, default is ''
# @param MaxDatumSize: Input value for MaxDatumSize, default is ''
# @param DefaultValue: Input value for DefaultValue, default is ''
# @param ItemType: Input value for ItemType, default is ''
# @param ValidUsage: Input value for ValidUsage, default is []
# @param SkuInfoList: Input value for SkuInfoList, default is {}
# @param SupModuleList: Input value for SupModuleList, default is []
#
class InfPcdItem():
def __init__(self):
self.CName = ''
self.Token = ''
self.TokenSpaceGuidCName = ''
self.TokenSpaceGuidValue = ''
self.DatumType = ''
self.MaxDatumSize = ''
self.DefaultValue = ''
self.Offset = ''
self.ValidUsage = ''
self.ItemType = ''
self.SupModuleList = []
self.HelpStringList = []
self.FeatureFlagExp = ''
self.SupArchList = []
self.PcdErrorsList = []
def SetCName(self, CName):
self.CName = CName
def GetCName(self):
return self.CName
def SetToken(self, Token):
self.Token = Token
def GetToken(self):
return self.Token
def SetTokenSpaceGuidCName(self, TokenSpaceGuidCName):
self.TokenSpaceGuidCName = TokenSpaceGuidCName
def GetTokenSpaceGuidCName(self):
return self.TokenSpaceGuidCName
def SetTokenSpaceGuidValue(self, TokenSpaceGuidValue):
self.TokenSpaceGuidValue = TokenSpaceGuidValue
def GetTokenSpaceGuidValue(self):
return self.TokenSpaceGuidValue
def SetDatumType(self, DatumType):
self.DatumType = DatumType
def GetDatumType(self):
return self.DatumType
def SetMaxDatumSize(self, MaxDatumSize):
self.MaxDatumSize = MaxDatumSize
def GetMaxDatumSize(self):
return self.MaxDatumSize
def SetDefaultValue(self, DefaultValue):
self.DefaultValue = DefaultValue
def GetDefaultValue(self):
return self.DefaultValue
def SetPcdErrorsList(self, PcdErrorsList):
self.PcdErrorsList = PcdErrorsList
def GetPcdErrorsList(self):
return self.PcdErrorsList
def SetItemType(self, ItemType):
self.ItemType = ItemType
def GetItemType(self):
return self.ItemType
def SetSupModuleList(self, SupModuleList):
self.SupModuleList = SupModuleList
def GetSupModuleList(self):
return self.SupModuleList
def SetHelpStringList(self, HelpStringList):
self.HelpStringList = HelpStringList
def GetHelpStringList(self):
return self.HelpStringList
def SetFeatureFlagExp(self, FeatureFlagExp):
self.FeatureFlagExp = FeatureFlagExp
def GetFeatureFlagExp(self):
return self.FeatureFlagExp
def SetSupportArchList(self, ArchList):
self.SupArchList = ArchList
def GetSupportArchList(self):
return self.SupArchList
def SetOffset(self, Offset):
self.Offset = Offset
def GetOffset(self):
return self.Offset
def SetValidUsage(self, ValidUsage):
self.ValidUsage = ValidUsage
def GetValidUsage(self):
return self.ValidUsage
##
#
#
#
class InfPcdObject():
def __init__(self, FileName):
self.Pcds = Sdict()
self.FileName = FileName
def SetPcds(self, PcdContent, KeysList=None, PackageInfo=None):
if GlobalData.gIS_BINARY_INF:
self.SetAsBuildPcds(PcdContent, KeysList, PackageInfo)
return True
#
# Validate Arch
#
SupArchList = []
SupArchDict = {}
PcdTypeItem = ''
for (PcdTypeItem1, ArchItem, LineNo) in KeysList:
SupArchList, SupArchDict = ValidateArch(ArchItem, PcdTypeItem1, LineNo, SupArchDict, SupArchList)
#
# Validate PcdType
#
if (PcdTypeItem1 == '' or PcdTypeItem1 is None):
return False
else:
if not IsValidPcdType(PcdTypeItem1):
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_PCD_SECTION_TYPE_ERROR % (DT.PCD_USAGE_TYPE_LIST_OF_MODULE),
File=GlobalData.gINF_MODULE_NAME,
Line=LineNo,
ExtraData=PcdTypeItem1)
return False
PcdTypeItem = PcdTypeItem1
for PcdItem in PcdContent:
PcdItemObj = InfPcdItem()
CommentList = PcdItem[1]
CurrentLineOfPcdItem = PcdItem[2]
PcdItem = PcdItem[0]
if CommentList is not None and len(CommentList) != 0:
PcdItemObj = ParsePcdComment(CommentList, PcdTypeItem, PcdItemObj)
else:
CommentItemIns = InfPcdItemCommentContent()
CommentItemIns.SetUsageItem(DT.ITEM_UNDEFINED)
PcdItemObj.SetHelpStringList([CommentItemIns])
if len(PcdItem) >= 1 and len(PcdItem) <= 3:
PcdItemObj = SetPcdName(PcdItem, CurrentLineOfPcdItem, PcdItemObj)
if len(PcdItem) >= 2 and len(PcdItem) <= 3:
#
# Contain PcdName and Value, validate value.
#
if IsValidPcdValue(PcdItem[1]) or PcdItem[1].strip() == "":
PcdItemObj.SetDefaultValue(PcdItem[1])
else:
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_PCD_VALUE_INVALID,
File=CurrentLineOfPcdItem[2],
Line=CurrentLineOfPcdItem[1],
ExtraData=PcdItem[1])
if len(PcdItem) == 3:
#
# Contain PcdName, value, and FeatureFlag express
#
#
# Validate Feature Flag Express
#
if PcdItem[2].strip() == '':
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_FEATURE_FLAG_EXP_MISSING,
File=CurrentLineOfPcdItem[2],
Line=CurrentLineOfPcdItem[1],
ExtraData=CurrentLineOfPcdItem[0])
#
# Validate FFE
#
FeatureFlagRtv = IsValidFeatureFlagExp(PcdItem[2].strip())
if not FeatureFlagRtv[0]:
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_FEATURE_FLAG_EXP_SYNTAX_INVLID % (FeatureFlagRtv[1]),
File=CurrentLineOfPcdItem[2],
Line=CurrentLineOfPcdItem[1],
ExtraData=CurrentLineOfPcdItem[0])
PcdItemObj.SetFeatureFlagExp(PcdItem[2])
if len(PcdItem) < 1 or len(PcdItem) > 3:
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_PCD_SECTION_CONTENT_ERROR,
File=CurrentLineOfPcdItem[2],
Line=CurrentLineOfPcdItem[1],
ExtraData=CurrentLineOfPcdItem[0])
return False
if PcdTypeItem.upper != DT.TAB_INF_FEATURE_PCD.upper():
PcdItemObj.SetSupportArchList(SupArchDict[PcdTypeItem])
else:
PcdItemObj.SetSupportArchList(SupArchList)
if (PcdTypeItem, PcdItemObj) in self.Pcds:
PcdsList = self.Pcds[PcdTypeItem, PcdItemObj]
PcdsList.append(PcdItemObj)
self.Pcds[PcdTypeItem, PcdItemObj] = PcdsList
else:
PcdsList = []
PcdsList.append(PcdItemObj)
self.Pcds[PcdTypeItem, PcdItemObj] = PcdsList
return True
def SetAsBuildPcds(self, PcdContent, KeysList=None, PackageInfo=None):
for PcdItem in PcdContent:
PcdItemObj = InfPcdItem()
CommentList = PcdItem[1]
CurrentLineOfPcdItem = PcdItem[2]
PcdItem = PcdItem[0]
CommentString = ''
for CommentLine in CommentList:
CommentString = GetHelpStringByRemoveHashKey(CommentLine)
CommentItemIns = InfPcdItemCommentContent()
CommentItemIns.SetHelpStringItem(CommentString)
CommentItemIns.SetUsageItem(CommentString)
PcdItemObj.SetHelpStringList(PcdItemObj.GetHelpStringList() + [CommentItemIns])
if PcdItemObj.GetValidUsage():
PcdItemObj.SetValidUsage(PcdItemObj.GetValidUsage() + DT.TAB_VALUE_SPLIT + CommentString)
else:
PcdItemObj.SetValidUsage(CommentString)
PcdItemObj.SetItemType(KeysList[0][0])
#
# Set PcdTokenSpaceCName and CName
#
PcdItemObj = SetPcdName(PcdItem, CurrentLineOfPcdItem, PcdItemObj)
#
# Set Value/DatumType/OffSet/Token
#
PcdItemObj = SetValueDatumTypeMaxSizeToken(PcdItem,
CurrentLineOfPcdItem,
PcdItemObj,
KeysList[0][1],
PackageInfo)
PcdTypeItem = KeysList[0][0]
if (PcdTypeItem, PcdItemObj) in self.Pcds:
PcdsList = self.Pcds[PcdTypeItem, PcdItemObj]
PcdsList.append(PcdItemObj)
self.Pcds[PcdTypeItem, PcdItemObj] = PcdsList
else:
PcdsList = []
PcdsList.append(PcdItemObj)
self.Pcds[PcdTypeItem, PcdItemObj] = PcdsList
def GetPcds(self):
return self.Pcds
def ParserPcdInfoInDec(String):
ValueList = GetSplitValueList(String, DT.TAB_VALUE_SPLIT, 3)
#
# DatumType, Token
#
return ValueList[2], ValueList[3]
def SetValueDatumTypeMaxSizeToken(PcdItem, CurrentLineOfPcdItem, PcdItemObj, Arch, PackageInfo=None):
#
# Package information not been generated currently, we need to parser INF file to get information.
#
if not PackageInfo:
PackageInfo = []
InfFileName = CurrentLineOfPcdItem[2]
PackageInfoList = GetPackageListInfo(InfFileName, GlobalData.gWORKSPACE, -1)
for PackageInfoListItem in PackageInfoList:
PackageInfoIns = InfPackageItem()
PackageInfoIns.SetPackageName(PackageInfoListItem)
PackageInfo.append(PackageInfoIns)
PcdInfoInDecHasFound = False
for PackageItem in PackageInfo:
if PcdInfoInDecHasFound:
break
PackageName = PackageItem.PackageName
#
# Open DEC file to get information
#
FullFileName = os.path.normpath(os.path.realpath(os.path.join(GlobalData.gWORKSPACE, PackageName)))
DecParser = None
if FullFileName not in GlobalData.gPackageDict:
DecParser = Dec(FullFileName)
GlobalData.gPackageDict[FullFileName] = DecParser
else:
DecParser = GlobalData.gPackageDict[FullFileName]
#
# Find PCD information.
#
DecPcdsDict = DecParser.GetPcdSectionObject().ValueDict
for Key in DecPcdsDict.keys():
if (Key[0] == 'PCDSDYNAMICEX' and PcdItemObj.GetItemType() == 'PcdEx') and \
(Key[1] == 'COMMON' or Key[1] == Arch):
for PcdInDec in DecPcdsDict[Key]:
if PcdInDec.TokenCName == PcdItemObj.CName and \
PcdInDec.TokenSpaceGuidCName == PcdItemObj.TokenSpaceGuidCName:
PcdItemObj.SetToken(PcdInDec.TokenValue)
PcdItemObj.SetDatumType(PcdInDec.DatumType)
PcdItemObj.SetSupportArchList([Arch])
PcdItemObj.SetDefaultValue(PcdInDec.DefaultValue)
if (Key[0] == 'PCDSPATCHABLEINMODULE' and PcdItemObj.GetItemType() == 'PatchPcd') and \
(Key[1] == 'COMMON' or Key[1] == Arch):
for PcdInDec in DecPcdsDict[Key]:
if PcdInDec.TokenCName == PcdItemObj.CName and \
PcdInDec.TokenSpaceGuidCName == PcdItemObj.TokenSpaceGuidCName:
PcdItemObj.SetToken(PcdInDec.TokenValue)
PcdItemObj.SetDatumType(PcdInDec.DatumType)
PcdItemObj.SetSupportArchList([Arch])
if PcdItemObj.GetDatumType() == 'VOID*':
if len(PcdItem) > 1:
PcdItemObj.SetMaxDatumSize('%s' % (len(GetSplitValueList(PcdItem[1], DT.TAB_COMMA_SPLIT))))
DecGuidsDict = DecParser.GetGuidSectionObject().ValueDict
for Key in DecGuidsDict.keys():
if Key == 'COMMON' or Key == Arch:
for GuidInDec in DecGuidsDict[Key]:
if GuidInDec.GuidCName == PcdItemObj.TokenSpaceGuidCName:
PcdItemObj.SetTokenSpaceGuidValue(GuidInDec.GuidString)
if PcdItemObj.GetItemType().upper() == DT.TAB_INF_PATCH_PCD.upper():
#
# Validate Value.
#
# convert the value from a decimal 0 to a formatted hex value.
if PcdItem[1] == "0":
DatumType = PcdItemObj.GetDatumType()
if DatumType == "UINT8":
PcdItem[1] = "0x00"
if DatumType == "UINT16":
PcdItem[1] = "0x0000"
if DatumType == "UINT32":
PcdItem[1] = "0x00000000"
if DatumType == "UINT64":
PcdItem[1] = "0x0000000000000000"
if ValidatePcdValueOnDatumType(PcdItem[1], PcdItemObj.GetDatumType()):
PcdItemObj.SetDefaultValue(PcdItem[1])
else:
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_ASBUILD_PCD_VALUE_INVALID % ("\"" + PcdItem[1] + "\"", "\"" +
PcdItemObj.GetDatumType() + "\""),
File=CurrentLineOfPcdItem[2],
Line=CurrentLineOfPcdItem[1],
ExtraData=CurrentLineOfPcdItem[0])
#
# validate offset
#
if PcdItemObj.GetItemType().upper() == DT.TAB_INF_PATCH_PCD.upper():
if not IsHexDigitUINT32(PcdItem[2]):
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_ASBUILD_PCD_OFFSET_FORMAT_INVALID % ("\"" + PcdItem[2] + "\""),
File=CurrentLineOfPcdItem[2],
Line=CurrentLineOfPcdItem[1],
ExtraData=CurrentLineOfPcdItem[0])
PcdItemObj.SetOffset(PcdItem[2])
if PcdItemObj.GetToken() == '' or PcdItemObj.GetDatumType() == '':
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_ASBUILD_PCD_DECLARITION_MISS % ("\"" + PcdItem[0] + "\""),
File=CurrentLineOfPcdItem[2],
Line=CurrentLineOfPcdItem[1],
ExtraData=CurrentLineOfPcdItem[0])
return PcdItemObj
def ValidatePcdValueOnDatumType(Value, Type):
Value = Value.strip()
#
# Boolean type only allow 0x00 or 0x01 as value per INF spec
#
if Type == 'BOOLEAN':
if not (Value == '0x00' or Value == '0x01'):
return False
elif Type == 'VOID*':
if not Value.startswith("{"):
return False
if not Value.endswith("}"):
return False
#
# Strip "{" at head and "}" at tail.
#
Value = Value[1:-1]
ValueList = GetSplitValueList(Value, DT.TAB_COMMA_SPLIT)
ReIsValidHexByte = re.compile("^0x[0-9a-f]{1,2}$", re.IGNORECASE)
for ValueItem in ValueList:
if not ReIsValidHexByte.match(ValueItem):
return False
elif Type == 'UINT8' or Type == 'UINT16' or Type == 'UINT32' or Type == 'UINT64':
ReIsValidUint8z = re.compile('^0[x|X][a-fA-F0-9]{2}$')
ReIsValidUint16z = re.compile('^0[x|X][a-fA-F0-9]{4}$')
ReIsValidUint32z = re.compile('^0[x|X][a-fA-F0-9]{8}$')
ReIsValidUint64z = re.compile('^0[x|X][a-fA-F0-9]{16}$')
if not ReIsValidUint8z.match(Value) and Type == 'UINT8':
return False
elif not ReIsValidUint16z.match(Value) and Type == 'UINT16':
return False
elif not ReIsValidUint32z.match(Value) and Type == 'UINT32':
return False
elif not ReIsValidUint64z.match(Value) and Type == 'UINT64':
return False
else:
#
# Since we assume the DEC file always correct, should never go to here.
#
pass
return True
def SetPcdName(PcdItem, CurrentLineOfPcdItem, PcdItemObj):
#
# Only PCD Name specified
# <PcdName> ::= <TokenSpaceGuidCName> "." <TokenCName>
#
PcdId = GetSplitValueList(PcdItem[0], DT.TAB_SPLIT)
if len(PcdId) != 2:
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_PCD_NAME_FORMAT_ERROR,
File=CurrentLineOfPcdItem[2],
Line=CurrentLineOfPcdItem[1],
ExtraData=CurrentLineOfPcdItem[0])
else:
#
# Validate PcdTokenSpaceGuidCName
#
if not IsValidCVariableName(PcdId[0]):
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_PCD_CVAR_GUID,
File=CurrentLineOfPcdItem[2],
Line=CurrentLineOfPcdItem[1],
ExtraData=PcdId[0])
if not IsValidCVariableName(PcdId[1]):
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_PCD_CVAR_PCDCNAME,
File=CurrentLineOfPcdItem[2],
Line=CurrentLineOfPcdItem[1],
ExtraData=PcdId[1])
PcdItemObj.SetTokenSpaceGuidCName(PcdId[0])
PcdItemObj.SetCName(PcdId[1])
return PcdItemObj
| edk2-master | BaseTools/Source/Python/UPT/Object/Parser/InfPcdObject.py |
## @file
# This file is used to define class objects of [Defines] section for INF file.
# It will consumed by InfParser
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
'''
InfDefineObject
'''
import os
import re
from Logger import StringTable as ST
from Logger import ToolError
from Library import GlobalData
from Library import DataType as DT
from Library.StringUtils import GetSplitValueList
from Library.Misc import CheckGuidRegFormat
from Library.Misc import Sdict
from Library.Misc import ConvPathFromAbsToRel
from Library.Misc import ValidateUNIFilePath
from Library.ExpressionValidate import IsValidFeatureFlagExp
from Library.ParserValidate import IsValidWord
from Library.ParserValidate import IsValidInfMoudleType
from Library.ParserValidate import IsValidHex
from Library.ParserValidate import IsValidHexVersion
from Library.ParserValidate import IsValidDecVersion
from Library.ParserValidate import IsValidCVariableName
from Library.ParserValidate import IsValidBoolType
from Library.ParserValidate import IsValidPath
from Library.ParserValidate import IsValidFamily
from Library.ParserValidate import IsValidIdentifier
from Library.ParserValidate import IsValidDecVersionVal
from Object.Parser.InfCommonObject import InfLineCommentObject
from Object.Parser.InfCommonObject import CurrentLine
from Object.Parser.InfCommonObject import InfSectionCommonDef
from Object.Parser.InfMisc import ErrorInInf
from Object.Parser.InfDefineCommonObject import InfDefineLibraryItem
from Object.Parser.InfDefineCommonObject import InfDefineEntryPointItem
from Object.Parser.InfDefineCommonObject import InfDefineUnloadImageItem
from Object.Parser.InfDefineCommonObject import InfDefineConstructorItem
from Object.Parser.InfDefineCommonObject import InfDefineDestructorItem
class InfDefSectionOptionRomInfo():
def __init__(self):
self.PciVendorId = None
self.PciDeviceId = None
self.PciClassCode = None
self.PciRevision = None
self.PciCompress = None
self.CurrentLine = ['', -1, '']
def SetPciVendorId(self, PciVendorId, Comments):
#
# Value has been set before.
#
if self.PciVendorId is not None:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND%(DT.TAB_INF_DEFINES_PCI_VENDOR_ID),
LineInfo=self.CurrentLine)
return False
#
# The PciVendorId should be hex string.
#
if (IsValidHex(PciVendorId)):
self.PciVendorId = InfDefMember()
self.PciVendorId.SetValue(PciVendorId)
self.PciVendorId.Comments = Comments
return True
else:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(PciVendorId),
LineInfo=self.CurrentLine)
return False
def GetPciVendorId(self):
return self.PciVendorId
def SetPciDeviceId(self, PciDeviceId, Comments):
#
# Value has been set before.
#
if self.PciDeviceId is not None:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND%(DT.TAB_INF_DEFINES_PCI_DEVICE_ID),
LineInfo=self.CurrentLine)
return False
#
# The PciDeviceId should be hex string.
#
if (IsValidHex(PciDeviceId)):
self.PciDeviceId = InfDefMember()
self.PciDeviceId.SetValue(PciDeviceId)
self.PciDeviceId.Comments = Comments
return True
else:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(PciDeviceId),
LineInfo=self.CurrentLine)
return False
def GetPciDeviceId(self):
return self.PciDeviceId
def SetPciClassCode(self, PciClassCode, Comments):
#
# Value has been set before.
#
if self.PciClassCode is not None:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND%(DT.TAB_INF_DEFINES_PCI_CLASS_CODE),
LineInfo=self.CurrentLine)
return False
#
# The PciClassCode should be 4 bytes hex string.
#
if (IsValidHex(PciClassCode)):
self.PciClassCode = InfDefMember()
self.PciClassCode.SetValue(PciClassCode)
self.PciClassCode.Comments = Comments
return True
else:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%\
(PciClassCode),
LineInfo=self.CurrentLine)
return False
def GetPciClassCode(self):
return self.PciClassCode
def SetPciRevision(self, PciRevision, Comments):
#
# Value has been set before.
#
if self.PciRevision is not None:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND%(DT.TAB_INF_DEFINES_PCI_REVISION),
LineInfo=self.CurrentLine)
return False
#
# The PciRevision should be 4 bytes hex string.
#
if (IsValidHex(PciRevision)):
self.PciRevision = InfDefMember()
self.PciRevision.SetValue(PciRevision)
self.PciRevision.Comments = Comments
return True
else:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(PciRevision),
LineInfo=self.CurrentLine)
return False
def GetPciRevision(self):
return self.PciRevision
def SetPciCompress(self, PciCompress, Comments):
#
# Value has been set before.
#
if self.PciCompress is not None:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND%(DT.TAB_INF_DEFINES_PCI_COMPRESS),
LineInfo=self.CurrentLine)
return False
#
# The PciCompress should be 'TRUE' or 'FALSE'.
#
if (PciCompress == 'TRUE' or PciCompress == 'FALSE'):
self.PciCompress = InfDefMember()
self.PciCompress.SetValue(PciCompress)
self.PciCompress.Comments = Comments
return True
else:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(PciCompress),
LineInfo=self.CurrentLine)
return False
def GetPciCompress(self):
return self.PciCompress
##
# INF [Define] section Object
#
class InfDefSection(InfDefSectionOptionRomInfo):
def __init__(self):
self.BaseName = None
self.FileGuid = None
self.ModuleType = None
self.ModuleUniFileName = None
self.InfVersion = None
self.EdkReleaseVersion = None
self.UefiSpecificationVersion = None
self.PiSpecificationVersion = None
self.LibraryClass = []
self.Package = None
self.VersionString = None
self.PcdIsDriver = None
self.EntryPoint = []
self.UnloadImages = []
self.Constructor = []
self.Destructor = []
self.Shadow = None
self.CustomMakefile = []
self.Specification = []
self.UefiHiiResourceSection = None
self.DpxSource = []
self.CurrentLine = ['', -1, '']
InfDefSectionOptionRomInfo.__init__(self)
## SetHeadComment
#
# @param BaseName: BaseName
#
def SetBaseName(self, BaseName, Comments):
#
# Value has been set before.
#
if self.BaseName is not None:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND%(DT.TAB_INF_DEFINES_BASE_NAME),
LineInfo=self.CurrentLine)
return False
if not (BaseName == '' or BaseName is None):
if IsValidWord(BaseName) and not BaseName.startswith("_"):
self.BaseName = InfDefMember()
self.BaseName.SetValue(BaseName)
self.BaseName.Comments = Comments
return True
else:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_NAME_INVALID%(BaseName),
LineInfo=self.CurrentLine)
return False
## GetBaseName
#
def GetBaseName(self):
return self.BaseName
## SetFileGuid
#
# @param FileGuid: FileGuid
#
def SetFileGuid(self, FileGuid, Comments):
#
# Value has been set before.
#
if self.FileGuid is not None:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND\
%(DT.TAB_INF_DEFINES_FILE_GUID),
LineInfo=self.CurrentLine)
return False
#
# Do verification of GUID content/format
#
if (CheckGuidRegFormat(FileGuid)):
self.FileGuid = InfDefMember()
self.FileGuid.SetValue(FileGuid)
self.FileGuid.Comments = Comments
return True
else:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_GUID_INVALID%(FileGuid),
LineInfo=self.CurrentLine)
return False
## GetFileGuid
#
def GetFileGuid(self):
return self.FileGuid
## SetModuleType
#
# @param ModuleType: ModuleType
#
def SetModuleType(self, ModuleType, Comments):
#
# Value has been set before.
#
if self.ModuleType is not None:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND\
%(DT.TAB_INF_DEFINES_MODULE_TYPE),
LineInfo=self.CurrentLine)
return False
#
# Valid Module Type or not
#
if (IsValidInfMoudleType(ModuleType)):
self.ModuleType = InfDefMember()
self.ModuleType.SetValue(ModuleType)
self.ModuleType.CurrentLine = CurrentLine()
self.ModuleType.CurrentLine.SetLineNo(self.CurrentLine[1])
self.ModuleType.CurrentLine.SetLineString(self.CurrentLine[2])
self.ModuleType.CurrentLine.SetFileName(self.CurrentLine[0])
self.ModuleType.Comments = Comments
return True
else:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_MODULETYPE_INVALID%\
(ModuleType),
LineInfo=self.CurrentLine)
return False
## GetModuleType
#
def GetModuleType(self):
return self.ModuleType
## SetModuleUniFileName
#
# @param ModuleUniFileName: ModuleUniFileName
#
def SetModuleUniFileName(self, ModuleUniFileName, Comments):
if Comments:
pass
if self.ModuleUniFileName is not None:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND%(DT.TAB_INF_DEFINES_MODULE_UNI_FILE),
LineInfo=self.CurrentLine)
self.ModuleUniFileName = ModuleUniFileName
## GetModuleType
#
def GetModuleUniFileName(self):
return self.ModuleUniFileName
## SetInfVersion
#
# @param InfVersion: InfVersion
#
def SetInfVersion(self, InfVersion, Comments):
#
# Value has been set before.
#
if self.InfVersion is not None:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND\
%(DT.TAB_INF_DEFINES_INF_VERSION),
LineInfo=self.CurrentLine)
return False
#
# The InfVersion should be 4 bytes hex string.
#
if (IsValidHex(InfVersion)):
if (InfVersion < '0x00010005'):
ErrorInInf(ST.ERR_INF_PARSER_NOT_SUPPORT_EDKI_INF,
ErrorCode=ToolError.EDK1_INF_ERROR,
LineInfo=self.CurrentLine)
elif IsValidDecVersionVal(InfVersion):
if (InfVersion < 65541):
ErrorInInf(ST.ERR_INF_PARSER_NOT_SUPPORT_EDKI_INF,
ErrorCode=ToolError.EDK1_INF_ERROR,
LineInfo=self.CurrentLine)
else:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(InfVersion),
LineInfo=self.CurrentLine)
return False
self.InfVersion = InfDefMember()
self.InfVersion.SetValue(InfVersion)
self.InfVersion.Comments = Comments
return True
## GetInfVersion
#
def GetInfVersion(self):
return self.InfVersion
## SetEdkReleaseVersion
#
# @param EdkReleaseVersion: EdkReleaseVersion
#
def SetEdkReleaseVersion(self, EdkReleaseVersion, Comments):
#
# Value has been set before.
#
if self.EdkReleaseVersion is not None:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND\
%(DT.TAB_INF_DEFINES_EDK_RELEASE_VERSION),
LineInfo=self.CurrentLine)
return False
#
# The EdkReleaseVersion should be 4 bytes hex string.
#
if IsValidHexVersion(EdkReleaseVersion) or \
IsValidDecVersionVal(EdkReleaseVersion):
self.EdkReleaseVersion = InfDefMember()
self.EdkReleaseVersion.SetValue(EdkReleaseVersion)
self.EdkReleaseVersion.Comments = Comments
return True
else:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID\
%(EdkReleaseVersion),
LineInfo=self.CurrentLine)
return False
## GetEdkReleaseVersion
#
def GetEdkReleaseVersion(self):
return self.EdkReleaseVersion
## SetUefiSpecificationVersion
#
# @param UefiSpecificationVersion: UefiSpecificationVersion
#
def SetUefiSpecificationVersion(self, UefiSpecificationVersion, Comments):
#
# Value has been set before.
#
if self.UefiSpecificationVersion is not None:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND\
%(DT.TAB_INF_DEFINES_UEFI_SPECIFICATION_VERSION),
LineInfo=self.CurrentLine)
return False
#
# The EdkReleaseVersion should be 4 bytes hex string.
#
if IsValidHexVersion(UefiSpecificationVersion) or \
IsValidDecVersionVal(UefiSpecificationVersion):
self.UefiSpecificationVersion = InfDefMember()
self.UefiSpecificationVersion.SetValue(UefiSpecificationVersion)
self.UefiSpecificationVersion.Comments = Comments
return True
else:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID\
%(UefiSpecificationVersion),
LineInfo=self.CurrentLine)
return False
## GetUefiSpecificationVersion
#
def GetUefiSpecificationVersion(self):
return self.UefiSpecificationVersion
## SetPiSpecificationVersion
#
# @param PiSpecificationVersion: PiSpecificationVersion
#
def SetPiSpecificationVersion(self, PiSpecificationVersion, Comments):
#
# Value has been set before.
#
if self.PiSpecificationVersion is not None:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND\
%(DT.TAB_INF_DEFINES_PI_SPECIFICATION_VERSION),
LineInfo=self.CurrentLine)
return False
#
# The EdkReleaseVersion should be 4 bytes hex string.
#
if IsValidHexVersion(PiSpecificationVersion) or \
IsValidDecVersionVal(PiSpecificationVersion):
self.PiSpecificationVersion = InfDefMember()
self.PiSpecificationVersion.SetValue(PiSpecificationVersion)
self.PiSpecificationVersion.Comments = Comments
return True
else:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID\
%(PiSpecificationVersion),
LineInfo=self.CurrentLine)
return False
## GetPiSpecificationVersion
#
def GetPiSpecificationVersion(self):
return self.PiSpecificationVersion
## SetLibraryClass
#
# @param LibraryClass: LibraryClass
#
def SetLibraryClass(self, LibraryClass, Comments):
ValueList = GetSplitValueList(LibraryClass)
Name = ValueList[0]
if IsValidWord(Name):
InfDefineLibraryItemObj = InfDefineLibraryItem()
InfDefineLibraryItemObj.SetLibraryName(Name)
InfDefineLibraryItemObj.Comments = Comments
if len(ValueList) == 2:
Type = ValueList[1]
TypeList = GetSplitValueList(Type, ' ')
TypeList = [Type for Type in TypeList if Type != '']
for Item in TypeList:
if Item not in DT.MODULE_LIST:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(Item),
LineInfo=self.CurrentLine)
return False
InfDefineLibraryItemObj.SetTypes(TypeList)
self.LibraryClass.append(InfDefineLibraryItemObj)
else:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(Name),
LineInfo=self.CurrentLine)
return False
return True
def GetLibraryClass(self):
return self.LibraryClass
def SetVersionString(self, VersionString, Comments):
#
# Value has been set before.
#
if self.VersionString is not None:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND\
%(DT.TAB_INF_DEFINES_VERSION_STRING),
LineInfo=self.CurrentLine)
return False
if not IsValidDecVersion(VersionString):
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID\
%(VersionString),
LineInfo=self.CurrentLine)
self.VersionString = InfDefMember()
self.VersionString.SetValue(VersionString)
self.VersionString.Comments = Comments
return True
def GetVersionString(self):
return self.VersionString
def SetPcdIsDriver(self, PcdIsDriver, Comments):
#
# Value has been set before.
#
if self.PcdIsDriver is not None:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND\
%(DT.TAB_INF_DEFINES_PCD_IS_DRIVER),
LineInfo=self.CurrentLine)
return False
if PcdIsDriver == 'PEI_PCD_DRIVER' or PcdIsDriver == 'DXE_PCD_DRIVER':
self.PcdIsDriver = InfDefMember()
self.PcdIsDriver.SetValue(PcdIsDriver)
self.PcdIsDriver.Comments = Comments
return True
else:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(PcdIsDriver),
LineInfo=self.CurrentLine)
return False
def GetPcdIsDriver(self):
return self.PcdIsDriver
#
# SetEntryPoint
#
def SetEntryPoint(self, EntryPoint, Comments):
#
# It can be a list
#
ValueList = []
TokenList = GetSplitValueList(EntryPoint, DT.TAB_VALUE_SPLIT)
ValueList[0:len(TokenList)] = TokenList
InfDefineEntryPointItemObj = InfDefineEntryPointItem()
if not IsValidCVariableName(ValueList[0]):
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%\
(ValueList[0]),
LineInfo=self.CurrentLine)
InfDefineEntryPointItemObj.SetCName(ValueList[0])
if len(ValueList) == 2:
if ValueList[1].strip() == '':
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%\
(ValueList[1]),
LineInfo=self.CurrentLine)
#
# Validate FFE
#
FeatureFlagRtv = IsValidFeatureFlagExp(ValueList[1].strip())
if not FeatureFlagRtv[0]:
ErrorInInf(ST.ERR_INF_PARSER_FEATURE_FLAG_EXP_SYNTAX_INVLID%\
(FeatureFlagRtv[1]),
LineInfo=self.CurrentLine)
InfDefineEntryPointItemObj.SetFeatureFlagExp(ValueList[1])
if len(ValueList) > 2:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(EntryPoint),
LineInfo=self.CurrentLine)
InfDefineEntryPointItemObj.Comments = Comments
self.EntryPoint.append(InfDefineEntryPointItemObj)
def GetEntryPoint(self):
return self.EntryPoint
#
# SetUnloadImages
#
def SetUnloadImages(self, UnloadImages, Comments):
#
# It can be a list
#
ValueList = []
TokenList = GetSplitValueList(UnloadImages, DT.TAB_VALUE_SPLIT)
ValueList[0:len(TokenList)] = TokenList
InfDefineUnloadImageItemObj = InfDefineUnloadImageItem()
if not IsValidCVariableName(ValueList[0]):
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(ValueList[0]),
LineInfo=self.CurrentLine)
InfDefineUnloadImageItemObj.SetCName(ValueList[0])
if len(ValueList) == 2:
if ValueList[1].strip() == '':
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(ValueList[1]),
LineInfo=self.CurrentLine)
#
# Validate FFE
#
FeatureFlagRtv = IsValidFeatureFlagExp(ValueList[1].strip())
if not FeatureFlagRtv[0]:
ErrorInInf(ST.ERR_INF_PARSER_FEATURE_FLAG_EXP_SYNTAX_INVLID%(FeatureFlagRtv[1]),
LineInfo=self.CurrentLine)
InfDefineUnloadImageItemObj.SetFeatureFlagExp(ValueList[1])
if len(ValueList) > 2:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(UnloadImages),
LineInfo=self.CurrentLine)
InfDefineUnloadImageItemObj.Comments = Comments
self.UnloadImages.append(InfDefineUnloadImageItemObj)
def GetUnloadImages(self):
return self.UnloadImages
#
# SetConstructor
#
def SetConstructor(self, Constructor, Comments):
#
# It can be a list
#
ValueList = []
TokenList = GetSplitValueList(Constructor, DT.TAB_VALUE_SPLIT)
ValueList[0:len(TokenList)] = TokenList
InfDefineConstructorItemObj = InfDefineConstructorItem()
if not IsValidCVariableName(ValueList[0]):
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(ValueList[0]),
LineInfo=self.CurrentLine)
InfDefineConstructorItemObj.SetCName(ValueList[0])
if len(ValueList) >= 2:
ModList = GetSplitValueList(ValueList[1], ' ')
if ValueList[1].strip() == '':
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(ValueList[1]),
LineInfo=self.CurrentLine)
for ModItem in ModList:
if ModItem not in DT.MODULE_LIST:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_MODULETYPE_INVALID%(ModItem),
LineInfo=self.CurrentLine)
InfDefineConstructorItemObj.SetSupModList(ModList)
if len(ValueList) == 3:
if ValueList[2].strip() == '':
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(ValueList[2]),
LineInfo=self.CurrentLine)
#
# Validate FFE
#
FeatureFlagRtv = IsValidFeatureFlagExp(ValueList[2].strip())
if not FeatureFlagRtv[0]:
ErrorInInf(ST.ERR_INF_PARSER_FEATURE_FLAG_EXP_SYNTAX_INVLID%(FeatureFlagRtv[2]),
LineInfo=self.CurrentLine)
InfDefineConstructorItemObj.SetFeatureFlagExp(ValueList[2])
if len(ValueList) > 3:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(Constructor),
LineInfo=self.CurrentLine)
InfDefineConstructorItemObj.Comments = Comments
self.Constructor.append(InfDefineConstructorItemObj)
def GetConstructor(self):
return self.Constructor
#
# SetDestructor
#
def SetDestructor(self, Destructor, Comments):
#
# It can be a list and only 1 set to TRUE
#
ValueList = []
TokenList = GetSplitValueList(Destructor, DT.TAB_VALUE_SPLIT)
ValueList[0:len(TokenList)] = TokenList
InfDefineDestructorItemObj = InfDefineDestructorItem()
if not IsValidCVariableName(ValueList[0]):
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(ValueList[0]),
LineInfo=self.CurrentLine)
InfDefineDestructorItemObj.SetCName(ValueList[0])
if len(ValueList) >= 2:
ModList = GetSplitValueList(ValueList[1].strip(), ' ')
if ValueList[1].strip() == '':
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(ValueList[1]),
LineInfo=self.CurrentLine)
for ModItem in ModList:
if ModItem not in DT.MODULE_LIST:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_MODULETYPE_INVALID%(ModItem),
LineInfo=self.CurrentLine)
InfDefineDestructorItemObj.SetSupModList(ModList)
if len(ValueList) == 3:
if ValueList[2].strip() == '':
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(ValueList[2]),
LineInfo=self.CurrentLine)
#
# Validate FFE
#
FeatureFlagRtv = IsValidFeatureFlagExp(ValueList[2].strip())
if not FeatureFlagRtv[0]:
ErrorInInf(ST.ERR_INF_PARSER_FEATURE_FLAG_EXP_SYNTAX_INVLID%(FeatureFlagRtv[1]),
LineInfo=self.CurrentLine)
InfDefineDestructorItemObj.SetFeatureFlagExp(ValueList[2])
if len(ValueList) > 3:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(Destructor),
LineInfo=self.CurrentLine)
InfDefineDestructorItemObj.Comments = Comments
self.Destructor.append(InfDefineDestructorItemObj)
def GetDestructor(self):
return self.Destructor
def SetShadow(self, Shadow, Comments):
#
# Value has been set before.
#
if self.Shadow is not None:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND%(DT.TAB_INF_DEFINES_SHADOW),
LineInfo=self.CurrentLine)
return False
if (IsValidBoolType(Shadow)):
self.Shadow = InfDefMember()
self.Shadow.SetValue(Shadow)
self.Shadow.Comments = Comments
return True
else:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(Shadow),
LineInfo=self.CurrentLine)
return False
def GetShadow(self):
return self.Shadow
#
# <Family> ::= {"MSFT"} {"GCC"}
# <CustomMake> ::= [<Family> "|"] <Filename>
#
def SetCustomMakefile(self, CustomMakefile, Comments):
if not (CustomMakefile == '' or CustomMakefile is None):
ValueList = GetSplitValueList(CustomMakefile)
if len(ValueList) == 1:
FileName = ValueList[0]
Family = ''
else:
Family = ValueList[0]
FileName = ValueList[1]
Family = Family.strip()
if Family != '':
if not IsValidFamily(Family):
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(Family),
LineInfo=self.CurrentLine)
return False
#
# The MakefileName specified file should exist
#
IsValidFileFlag = False
ModulePath = os.path.split(self.CurrentLine[0])[0]
if IsValidPath(FileName, ModulePath):
IsValidFileFlag = True
else:
ErrorInInf(ST.ERR_INF_PARSER_FILE_NOT_EXIST_OR_NAME_INVALID%(FileName),
LineInfo=self.CurrentLine)
return False
if IsValidFileFlag:
FileName = ConvPathFromAbsToRel(FileName, GlobalData.gINF_MODULE_DIR)
self.CustomMakefile.append((Family, FileName, Comments))
IsValidFileFlag = False
return True
else:
return False
def GetCustomMakefile(self):
return self.CustomMakefile
#
# ["SPEC" <Spec> <EOL>]*{0,}
# <Spec> ::= <Word> "=" <VersionVal>
# <VersionVal> ::= {<HexVersion>] {<DecVersion>}
# <HexNumber> ::= "0x" [<HexDigit>]{1,}
# <DecVersion> ::= (0-9){1,} ["." (0-9){1,2}]
#
def SetSpecification(self, Specification, Comments):
#
# Valid the value of Specification
#
__ValueList = []
TokenList = GetSplitValueList(Specification, DT.TAB_EQUAL_SPLIT, 1)
__ValueList[0:len(TokenList)] = TokenList
if len(__ValueList) != 2:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_NO_NAME + ' Or ' + ST.ERR_INF_PARSER_DEFINE_ITEM_NO_VALUE,
LineInfo=self.CurrentLine)
Name = __ValueList[0].strip()
Version = __ValueList[1].strip()
if IsValidIdentifier(Name):
if IsValidDecVersion(Version):
self.Specification.append((Name, Version, Comments))
return True
else:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(Version),
LineInfo=self.CurrentLine)
return False
else:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(Name),
LineInfo=self.CurrentLine)
return False
return True
def GetSpecification(self):
return self.Specification
#
# [<UefiHiiResource> <EOL>]{0,1}
# <UefiHiiResource> ::= "UEFI_HII_RESOURCE_SECTION" "=" <BoolType>
#
def SetUefiHiiResourceSection(self, UefiHiiResourceSection, Comments):
#
# Value has been set before.
#
if self.UefiHiiResourceSection is not None:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_ITEM_MORE_THAN_ONE_FOUND
%(DT.TAB_INF_DEFINES_UEFI_HII_RESOURCE_SECTION),
LineInfo=self.CurrentLine)
return False
if not (UefiHiiResourceSection == '' or UefiHiiResourceSection is None):
if (IsValidBoolType(UefiHiiResourceSection)):
self.UefiHiiResourceSection = InfDefMember()
self.UefiHiiResourceSection.SetValue(UefiHiiResourceSection)
self.UefiHiiResourceSection.Comments = Comments
return True
else:
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(UefiHiiResourceSection),
LineInfo=self.CurrentLine)
return False
else:
return False
def GetUefiHiiResourceSection(self):
return self.UefiHiiResourceSection
def SetDpxSource(self, DpxSource, Comments):
#
# The MakefileName specified file should exist
#
IsValidFileFlag = False
ModulePath = os.path.split(self.CurrentLine[0])[0]
if IsValidPath(DpxSource, ModulePath):
IsValidFileFlag = True
else:
ErrorInInf(ST.ERR_INF_PARSER_FILE_NOT_EXIST_OR_NAME_INVALID%(DpxSource),
LineInfo=self.CurrentLine)
return False
if IsValidFileFlag:
DpxSource = ConvPathFromAbsToRel(DpxSource,
GlobalData.gINF_MODULE_DIR)
self.DpxSource.append((DpxSource, Comments))
IsValidFileFlag = False
return True
def GetDpxSource(self):
return self.DpxSource
gFUNCTION_MAPPING_FOR_DEFINE_SECTION = {
#
# Required Fields
#
DT.TAB_INF_DEFINES_BASE_NAME : InfDefSection.SetBaseName,
DT.TAB_INF_DEFINES_FILE_GUID : InfDefSection.SetFileGuid,
DT.TAB_INF_DEFINES_MODULE_TYPE : InfDefSection.SetModuleType,
#
# Required by EDKII style INF file
#
DT.TAB_INF_DEFINES_INF_VERSION : InfDefSection.SetInfVersion,
#
# Optional Fields
#
DT.TAB_INF_DEFINES_MODULE_UNI_FILE : InfDefSection.SetModuleUniFileName,
DT.TAB_INF_DEFINES_EDK_RELEASE_VERSION : InfDefSection.SetEdkReleaseVersion,
DT.TAB_INF_DEFINES_UEFI_SPECIFICATION_VERSION : InfDefSection.SetUefiSpecificationVersion,
DT.TAB_INF_DEFINES_PI_SPECIFICATION_VERSION : InfDefSection.SetPiSpecificationVersion,
DT.TAB_INF_DEFINES_LIBRARY_CLASS : InfDefSection.SetLibraryClass,
DT.TAB_INF_DEFINES_VERSION_STRING : InfDefSection.SetVersionString,
DT.TAB_INF_DEFINES_PCD_IS_DRIVER : InfDefSection.SetPcdIsDriver,
DT.TAB_INF_DEFINES_ENTRY_POINT : InfDefSection.SetEntryPoint,
DT.TAB_INF_DEFINES_UNLOAD_IMAGE : InfDefSection.SetUnloadImages,
DT.TAB_INF_DEFINES_CONSTRUCTOR : InfDefSection.SetConstructor,
DT.TAB_INF_DEFINES_DESTRUCTOR : InfDefSection.SetDestructor,
DT.TAB_INF_DEFINES_SHADOW : InfDefSection.SetShadow,
DT.TAB_INF_DEFINES_PCI_VENDOR_ID : InfDefSection.SetPciVendorId,
DT.TAB_INF_DEFINES_PCI_DEVICE_ID : InfDefSection.SetPciDeviceId,
DT.TAB_INF_DEFINES_PCI_CLASS_CODE : InfDefSection.SetPciClassCode,
DT.TAB_INF_DEFINES_PCI_REVISION : InfDefSection.SetPciRevision,
DT.TAB_INF_DEFINES_PCI_COMPRESS : InfDefSection.SetPciCompress,
DT.TAB_INF_DEFINES_CUSTOM_MAKEFILE : InfDefSection.SetCustomMakefile,
DT.TAB_INF_DEFINES_SPEC : InfDefSection.SetSpecification,
DT.TAB_INF_DEFINES_UEFI_HII_RESOURCE_SECTION : InfDefSection.SetUefiHiiResourceSection,
DT.TAB_INF_DEFINES_DPX_SOURCE : InfDefSection.SetDpxSource
}
## InfDefMember
#
#
class InfDefMember():
def __init__(self, Name='', Value=''):
self.Comments = InfLineCommentObject()
self.Name = Name
self.Value = Value
self.CurrentLine = CurrentLine()
def GetName(self):
return self.Name
def SetName(self, Name):
self.Name = Name
def GetValue(self):
return self.Value
def SetValue(self, Value):
self.Value = Value
## InfDefObject
#
#
class InfDefObject(InfSectionCommonDef):
def __init__(self):
self.Defines = Sdict()
InfSectionCommonDef.__init__(self)
def SetDefines(self, DefineContent, Arch = None):
#
# Validate Arch
#
HasFoundInfVersionFalg = False
LineInfo = ['', -1, '']
ArchListString = ' '.join(Arch)
#
# Parse Define items.
#
for InfDefMemberObj in DefineContent:
ProcessFunc = None
Name = InfDefMemberObj.GetName()
Value = InfDefMemberObj.GetValue()
if Name == DT.TAB_INF_DEFINES_MODULE_UNI_FILE:
ValidateUNIFilePath(Value)
Value = os.path.join(os.path.dirname(InfDefMemberObj.CurrentLine.FileName), Value)
if not os.path.isfile(Value) or not os.path.exists(Value):
LineInfo[0] = InfDefMemberObj.CurrentLine.GetFileName()
LineInfo[1] = InfDefMemberObj.CurrentLine.GetLineNo()
LineInfo[2] = InfDefMemberObj.CurrentLine.GetLineString()
ErrorInInf(ST.ERR_INF_PARSER_FILE_NOT_EXIST_OR_NAME_INVALID%(Name),
LineInfo=LineInfo)
InfLineCommentObj = InfLineCommentObject()
InfLineCommentObj.SetHeaderComments(InfDefMemberObj.Comments.GetHeaderComments())
InfLineCommentObj.SetTailComments(InfDefMemberObj.Comments.GetTailComments())
if Name == 'COMPONENT_TYPE':
ErrorInInf(ST.ERR_INF_PARSER_NOT_SUPPORT_EDKI_INF,
ErrorCode=ToolError.EDK1_INF_ERROR,
RaiseError=True)
if Name == DT.TAB_INF_DEFINES_INF_VERSION:
HasFoundInfVersionFalg = True
if not (Name == '' or Name is None):
#
# Process "SPEC" Keyword definition.
#
ReName = re.compile(r"SPEC ", re.DOTALL)
if ReName.match(Name):
SpecValue = Name[Name.find("SPEC") + len("SPEC"):].strip()
Name = "SPEC"
Value = SpecValue + " = " + Value
if ArchListString in self.Defines:
DefineList = self.Defines[ArchListString]
LineInfo[0] = InfDefMemberObj.CurrentLine.GetFileName()
LineInfo[1] = InfDefMemberObj.CurrentLine.GetLineNo()
LineInfo[2] = InfDefMemberObj.CurrentLine.GetLineString()
DefineList.CurrentLine = LineInfo
#
# Found the process function from mapping table.
#
if Name not in gFUNCTION_MAPPING_FOR_DEFINE_SECTION.keys():
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_SECTION_KEYWORD_INVALID%(Name),
LineInfo=LineInfo)
else:
ProcessFunc = gFUNCTION_MAPPING_FOR_DEFINE_SECTION[Name]
if (ProcessFunc is not None):
ProcessFunc(DefineList, Value, InfLineCommentObj)
self.Defines[ArchListString] = DefineList
else:
DefineList = InfDefSection()
LineInfo[0] = InfDefMemberObj.CurrentLine.GetFileName()
LineInfo[1] = InfDefMemberObj.CurrentLine.GetLineNo()
LineInfo[2] = InfDefMemberObj.CurrentLine.GetLineString()
DefineList.CurrentLine = LineInfo
#
# Found the process function from mapping table.
#
if Name not in gFUNCTION_MAPPING_FOR_DEFINE_SECTION.keys():
ErrorInInf(ST.ERR_INF_PARSER_DEFINE_SECTION_KEYWORD_INVALID%(Name),
LineInfo=LineInfo)
#
# Found the process function from mapping table.
#
else:
ProcessFunc = gFUNCTION_MAPPING_FOR_DEFINE_SECTION[Name]
if (ProcessFunc is not None):
ProcessFunc(DefineList, Value, InfLineCommentObj)
self.Defines[ArchListString] = DefineList
#
# After set, check whether INF_VERSION defined.
#
if not HasFoundInfVersionFalg:
ErrorInInf(ST.ERR_INF_PARSER_NOT_SUPPORT_EDKI_INF,
ErrorCode=ToolError.EDK1_INF_ERROR,
RaiseError=True)
return True
def GetDefines(self):
return self.Defines
| edk2-master | BaseTools/Source/Python/UPT/Object/Parser/InfDefineObject.py |
## @file
# This file is used to define class objects of INF file [Depex] section.
# It will consumed by InfParser.
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
'''
InfDepexObject
'''
from Library import DataType as DT
from Library import GlobalData
import Logger.Log as Logger
from Logger import ToolError
from Logger import StringTable as ST
from Object.Parser.InfCommonObject import InfSectionCommonDef
from Library.ParserValidate import IsValidArch
class InfDepexContentItem():
def __init__(self):
self.SectionType = ''
self.SectionString = ''
def SetSectionType(self, SectionType):
self.SectionType = SectionType
def GetSectionType(self):
return self.SectionType
def SetSectionString(self, SectionString):
self.SectionString = SectionString
def GetSectionString(self):
return self.SectionString
class InfDepexItem():
def __init__(self):
self.DepexContent = ''
self.ModuleType = ''
self.SupArch = ''
self.HelpString = ''
self.FeatureFlagExp = ''
self.InfDepexContentItemList = []
def SetFeatureFlagExp(self, FeatureFlagExp):
self.FeatureFlagExp = FeatureFlagExp
def GetFeatureFlagExp(self):
return self.FeatureFlagExp
def SetSupArch(self, Arch):
self.SupArch = Arch
def GetSupArch(self):
return self.SupArch
def SetHelpString(self, HelpString):
self.HelpString = HelpString
def GetHelpString(self):
return self.HelpString
def SetModuleType(self, Type):
self.ModuleType = Type
def GetModuleType(self):
return self.ModuleType
def SetDepexConent(self, Content):
self.DepexContent = Content
def GetDepexContent(self):
return self.DepexContent
def SetInfDepexContentItemList(self, InfDepexContentItemList):
self.InfDepexContentItemList = InfDepexContentItemList
def GetInfDepexContentItemList(self):
return self.InfDepexContentItemList
## InfDepexObject
#
#
#
class InfDepexObject(InfSectionCommonDef):
def __init__(self):
self.Depex = []
self.AllContent = ''
self.SectionContent = ''
InfSectionCommonDef.__init__(self)
def SetDepex(self, DepexContent, KeyList=None, CommentList=None):
for KeyItem in KeyList:
Arch = KeyItem[0]
ModuleType = KeyItem[1]
InfDepexItemIns = InfDepexItem()
#
# Validate Arch
#
if IsValidArch(Arch.strip().upper()):
InfDepexItemIns.SetSupArch(Arch)
else:
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_DEFINE_NAME_INVALID % (Arch),
File=GlobalData.gINF_MODULE_NAME,
Line=KeyItem[2])
#
# Validate Module Type
#
if ModuleType and ModuleType != 'COMMON':
if ModuleType in DT.VALID_DEPEX_MODULE_TYPE_LIST:
InfDepexItemIns.SetModuleType(ModuleType)
else:
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_DEPEX_SECTION_MODULE_TYPE_ERROR % (ModuleType),
File=GlobalData.gINF_MODULE_NAME,
Line=KeyItem[2])
#
# Parser content in [Depex] section.
#
DepexString = ''
HelpString = ''
#
# Get Depex Expression
#
for Line in DepexContent:
LineContent = Line[0].strip()
if LineContent.find(DT.TAB_COMMENT_SPLIT) > -1:
LineContent = LineContent[:LineContent.find(DT.TAB_COMMENT_SPLIT)]
if LineContent:
DepexString = DepexString + LineContent + DT.END_OF_LINE
continue
if DepexString.endswith(DT.END_OF_LINE):
DepexString = DepexString[:-1]
if not DepexString.strip():
continue
#
# Get Help Text
#
for HelpLine in CommentList:
HelpString = HelpString + HelpLine + DT.END_OF_LINE
if HelpString.endswith(DT.END_OF_LINE):
HelpString = HelpString[:-1]
InfDepexItemIns.SetDepexConent(DepexString)
InfDepexItemIns.SetHelpString(HelpString)
self.Depex.append(InfDepexItemIns)
return True
def GetDepex(self):
return self.Depex
def GetAllContent(self):
return self.AllContent
| edk2-master | BaseTools/Source/Python/UPT/Object/Parser/InfDepexObject.py |
## @file
# Python 'Object' package initialization file.
#
# This file is required to make Python interpreter treat the directory
# as containing package.
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
'''
PARSER
'''
| edk2-master | BaseTools/Source/Python/UPT/Object/Parser/__init__.py |
## @file
# This file is used to define class objects of INF file [Ppis] section.
# It will consumed by InfParser.
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
'''
InfPpiObject
'''
from Library.ParserValidate import IsValidCVariableName
from Library.CommentParsing import ParseComment
from Library.ExpressionValidate import IsValidFeatureFlagExp
from Library.Misc import Sdict
from Library import DataType as DT
import Logger.Log as Logger
from Logger import ToolError
from Logger import StringTable as ST
def ParsePpiComment(CommentsList, InfPpiItemObj):
PreNotify = None
PreUsage = None
PreHelpText = ''
BlockFlag = -1
CommentInsList = []
Count = 0
for CommentItem in CommentsList:
Count = Count + 1
CommentItemUsage, \
CommentItemNotify, \
CommentItemString, \
CommentItemHelpText = \
ParseComment(CommentItem,
DT.ALL_USAGE_TOKENS,
DT.PPI_NOTIFY_TOKENS,
['PPI'],
False)
#
# To avoid PyLint error
#
if CommentItemString:
pass
if CommentItemHelpText is None:
CommentItemHelpText = ''
if Count == len(CommentsList) and CommentItemUsage == CommentItemNotify == DT.ITEM_UNDEFINED:
CommentItemHelpText = DT.END_OF_LINE
#
# For the Last comment Item, set BlockFlag.
#
if Count == len(CommentsList):
if BlockFlag == 1 or BlockFlag == 2:
if CommentItemUsage == CommentItemNotify == DT.ITEM_UNDEFINED:
BlockFlag = 4
else:
BlockFlag = 3
elif BlockFlag == -1:
BlockFlag = 4
#
# Comment USAGE and NOTIFY information are "UNDEFINED"
#
if BlockFlag == -1 or BlockFlag == 1 or BlockFlag == 2:
if CommentItemUsage == CommentItemNotify == DT.ITEM_UNDEFINED:
if BlockFlag == -1:
BlockFlag = 1
elif BlockFlag == 1:
BlockFlag = 2
else:
if BlockFlag == 1 or BlockFlag == 2:
BlockFlag = 3
#
# An item have Usage or Notify information and the first time get this information
#
elif BlockFlag == -1:
BlockFlag = 4
#
# Combine two comment line if they are generic comment
#
if CommentItemUsage == CommentItemNotify == PreUsage == PreNotify == DT.ITEM_UNDEFINED:
CommentItemHelpText = PreHelpText + DT.END_OF_LINE + CommentItemHelpText
#
# Store this information for next line may still need combine operation.
#
PreHelpText = CommentItemHelpText
if BlockFlag == 4:
CommentItemIns = InfPpiItemCommentContent()
CommentItemIns.SetUsage(CommentItemUsage)
CommentItemIns.SetNotify(CommentItemNotify)
CommentItemIns.SetHelpStringItem(CommentItemHelpText)
CommentInsList.append(CommentItemIns)
BlockFlag = -1
PreUsage = None
PreNotify = None
PreHelpText = ''
elif BlockFlag == 3:
#
# Add previous help string
#
CommentItemIns = InfPpiItemCommentContent()
CommentItemIns.SetUsage(DT.ITEM_UNDEFINED)
CommentItemIns.SetNotify(DT.ITEM_UNDEFINED)
if PreHelpText == '' or PreHelpText.endswith(DT.END_OF_LINE):
PreHelpText += DT.END_OF_LINE
CommentItemIns.SetHelpStringItem(PreHelpText)
CommentInsList.append(CommentItemIns)
#
# Add Current help string
#
CommentItemIns = InfPpiItemCommentContent()
CommentItemIns.SetUsage(CommentItemUsage)
CommentItemIns.SetNotify(CommentItemNotify)
CommentItemIns.SetHelpStringItem(CommentItemHelpText)
CommentInsList.append(CommentItemIns)
BlockFlag = -1
PreUsage = None
PreNotify = None
PreHelpText = ''
else:
PreUsage = CommentItemUsage
PreNotify = CommentItemNotify
PreHelpText = CommentItemHelpText
InfPpiItemObj.SetCommentList(CommentInsList)
return InfPpiItemObj
class InfPpiItemCommentContent():
def __init__(self):
#
# ## SOMETIMES_CONSUMES ## HelpString
#
self.UsageItem = ''
#
# Help String
#
self.HelpStringItem = ''
self.Notify = ''
self.CommentList = []
def SetUsage(self, UsageItem):
self.UsageItem = UsageItem
def GetUsage(self):
return self.UsageItem
def SetNotify(self, Notify):
if Notify != DT.ITEM_UNDEFINED:
self.Notify = 'true'
def GetNotify(self):
return self.Notify
def SetHelpStringItem(self, HelpStringItem):
self.HelpStringItem = HelpStringItem
def GetHelpStringItem(self):
return self.HelpStringItem
class InfPpiItem():
def __init__(self):
self.Name = ''
self.FeatureFlagExp = ''
self.SupArchList = []
self.CommentList = []
def SetName(self, Name):
self.Name = Name
def GetName(self):
return self.Name
def SetSupArchList(self, SupArchList):
self.SupArchList = SupArchList
def GetSupArchList(self):
return self.SupArchList
def SetCommentList(self, CommentList):
self.CommentList = CommentList
def GetCommentList(self):
return self.CommentList
def SetFeatureFlagExp(self, FeatureFlagExp):
self.FeatureFlagExp = FeatureFlagExp
def GetFeatureFlagExp(self):
return self.FeatureFlagExp
##
#
#
#
class InfPpiObject():
def __init__(self):
self.Ppis = Sdict()
#
# Macro defined in this section should be only used in this section.
#
self.Macros = {}
def SetPpi(self, PpiList, Arch = None):
__SupArchList = []
for ArchItem in Arch:
#
# Validate Arch
#
if (ArchItem == '' or ArchItem is None):
ArchItem = 'COMMON'
__SupArchList.append(ArchItem)
for Item in PpiList:
#
# Get Comment content of this protocol
#
CommentsList = None
if len(Item) == 3:
CommentsList = Item[1]
CurrentLineOfItem = Item[2]
Item = Item[0]
InfPpiItemObj = InfPpiItem()
if len(Item) >= 1 and len(Item) <= 2:
#
# Only CName contained
#
if not IsValidCVariableName(Item[0]):
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_INVALID_CNAME%(Item[0]),
File=CurrentLineOfItem[2],
Line=CurrentLineOfItem[1],
ExtraData=CurrentLineOfItem[0])
if (Item[0] != ''):
InfPpiItemObj.SetName(Item[0])
else:
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_CNAME_MISSING,
File=CurrentLineOfItem[2],
Line=CurrentLineOfItem[1],
ExtraData=CurrentLineOfItem[0])
#
# Have FeatureFlag information
#
if len(Item) == 2:
#
# Contained CName and Feature Flag Express
# <statements> ::= <CName> ["|" <FeatureFlagExpress>]
# Item[1] should not be empty
#
if Item[1].strip() == '':
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_FEATURE_FLAG_EXP_MISSING,
File=CurrentLineOfItem[2],
Line=CurrentLineOfItem[1],
ExtraData=CurrentLineOfItem[0])
#
# Validate Feature Flag Express for PPI entry
# Item[1] contain FFE information
#
FeatureFlagRtv = IsValidFeatureFlagExp(Item[1].strip())
if not FeatureFlagRtv[0]:
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_FEATURE_FLAG_EXP_SYNTAX_INVLID%(FeatureFlagRtv[1]),
File=CurrentLineOfItem[2],
Line=CurrentLineOfItem[1],
ExtraData=CurrentLineOfItem[0])
InfPpiItemObj.SetFeatureFlagExp(Item[1])
if len(Item) != 1 and len(Item) != 2:
#
# Invalid format of Ppi statement
#
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_GUID_PPI_PROTOCOL_SECTION_CONTENT_ERROR,
File=CurrentLineOfItem[2],
Line=CurrentLineOfItem[1],
ExtraData=CurrentLineOfItem[0])
#
# Get/Set Usage and HelpString for PPI entry
#
if CommentsList is not None and len(CommentsList) != 0:
InfPpiItemObj = ParsePpiComment(CommentsList, InfPpiItemObj)
else:
CommentItemIns = InfPpiItemCommentContent()
CommentItemIns.SetUsage(DT.ITEM_UNDEFINED)
CommentItemIns.SetNotify(DT.ITEM_UNDEFINED)
InfPpiItemObj.SetCommentList([CommentItemIns])
InfPpiItemObj.SetSupArchList(__SupArchList)
#
# Determine PPI name duplicate. Follow below rule:
#
# A PPI must not be duplicated within a [Ppis] section.
# A PPI may appear in multiple architectural [Ppis]
# sections. A PPI listed in an architectural [Ppis]
# section must not be listed in the common architectural
# [Ppis] section.
#
# NOTE: This check will not report error now.
#
for Item in self.Ppis:
if Item.GetName() == InfPpiItemObj.GetName():
ItemSupArchList = Item.GetSupArchList()
for ItemArch in ItemSupArchList:
for PpiItemObjArch in __SupArchList:
if ItemArch == PpiItemObjArch:
#
# ST.ERR_INF_PARSER_ITEM_DUPLICATE
#
pass
if ItemArch.upper() == 'COMMON' or PpiItemObjArch.upper() == 'COMMON':
#
# ST.ERR_INF_PARSER_ITEM_DUPLICATE_COMMON
#
pass
if (InfPpiItemObj) in self.Ppis:
PpiList = self.Ppis[InfPpiItemObj]
PpiList.append(InfPpiItemObj)
self.Ppis[InfPpiItemObj] = PpiList
else:
PpiList = []
PpiList.append(InfPpiItemObj)
self.Ppis[InfPpiItemObj] = PpiList
return True
def GetPpi(self):
return self.Ppis
| edk2-master | BaseTools/Source/Python/UPT/Object/Parser/InfPpiObject.py |
## @file
# This file is used to define class objects of INF file miscellaneous.
# Include BootMode/HOB/Event and others. It will consumed by InfParser.
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
'''
InfMisc
'''
import Logger.Log as Logger
from Logger import ToolError
from Library import DataType as DT
from Object.Parser.InfCommonObject import InfSectionCommonDef
from Library.Misc import Sdict
##
# BootModeObject
#
class InfBootModeObject():
def __init__(self):
self.SupportedBootModes = ''
self.HelpString = ''
self.Usage = ''
def SetSupportedBootModes(self, SupportedBootModes):
self.SupportedBootModes = SupportedBootModes
def GetSupportedBootModes(self):
return self.SupportedBootModes
def SetHelpString(self, HelpString):
self.HelpString = HelpString
def GetHelpString(self):
return self.HelpString
def SetUsage(self, Usage):
self.Usage = Usage
def GetUsage(self):
return self.Usage
##
# EventObject
#
class InfEventObject():
def __init__(self):
self.EventType = ''
self.HelpString = ''
self.Usage = ''
def SetEventType(self, EventType):
self.EventType = EventType
def GetEventType(self):
return self.EventType
def SetHelpString(self, HelpString):
self.HelpString = HelpString
def GetHelpString(self):
return self.HelpString
def SetUsage(self, Usage):
self.Usage = Usage
def GetUsage(self):
return self.Usage
##
# HobObject
#
class InfHobObject():
def __init__(self):
self.HobType = ''
self.Usage = ''
self.SupArchList = []
self.HelpString = ''
def SetHobType(self, HobType):
self.HobType = HobType
def GetHobType(self):
return self.HobType
def SetUsage(self, Usage):
self.Usage = Usage
def GetUsage(self):
return self.Usage
def SetSupArchList(self, ArchList):
self.SupArchList = ArchList
def GetSupArchList(self):
return self.SupArchList
def SetHelpString(self, HelpString):
self.HelpString = HelpString
def GetHelpString(self):
return self.HelpString
##
# InfSpecialCommentObject
#
class InfSpecialCommentObject(InfSectionCommonDef):
def __init__(self):
self.SpecialComments = Sdict()
InfSectionCommonDef.__init__(self)
def SetSpecialComments(self, SepcialSectionList = None, Type = ''):
if Type == DT.TYPE_HOB_SECTION or \
Type == DT.TYPE_EVENT_SECTION or \
Type == DT.TYPE_BOOTMODE_SECTION:
for Item in SepcialSectionList:
if Type in self.SpecialComments:
ObjList = self.SpecialComments[Type]
ObjList.append(Item)
self.SpecialComments[Type] = ObjList
else:
ObjList = []
ObjList.append(Item)
self.SpecialComments[Type] = ObjList
return True
def GetSpecialComments(self):
return self.SpecialComments
## ErrorInInf
#
# An encapsulate of Error for INF parser.
#
def ErrorInInf(Message=None, ErrorCode=None, LineInfo=None, RaiseError=True):
if ErrorCode is None:
ErrorCode = ToolError.FORMAT_INVALID
if LineInfo is None:
LineInfo = ['', -1, '']
Logger.Error("InfParser",
ErrorCode,
Message=Message,
File=LineInfo[0],
Line=LineInfo[1],
ExtraData=LineInfo[2],
RaiseError=RaiseError)
| edk2-master | BaseTools/Source/Python/UPT/Object/Parser/InfMisc.py |
## @file
# This file is used to define class objects of INF file [Binaries] section.
# It will consumed by InfParser.
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
'''
InfBinaryObject
'''
import os
from copy import deepcopy
from Library import DataType as DT
from Library import GlobalData
import Logger.Log as Logger
from Logger import ToolError
from Logger import StringTable as ST
from Library.Misc import Sdict
from Object.Parser.InfCommonObject import InfSectionCommonDef
from Object.Parser.InfCommonObject import CurrentLine
from Library.Misc import ConvPathFromAbsToRel
from Library.ExpressionValidate import IsValidFeatureFlagExp
from Library.Misc import ValidFile
from Library.ParserValidate import IsValidPath
class InfBianryItem():
def __init__(self):
self.FileName = ''
self.Target = ''
self.FeatureFlagExp = ''
self.HelpString = ''
self.Type = ''
self.SupArchList = []
def SetFileName(self, FileName):
self.FileName = FileName
def GetFileName(self):
return self.FileName
def SetTarget(self, Target):
self.Target = Target
def GetTarget(self):
return self.Target
def SetFeatureFlagExp(self, FeatureFlagExp):
self.FeatureFlagExp = FeatureFlagExp
def GetFeatureFlagExp(self):
return self.FeatureFlagExp
def SetHelpString(self, HelpString):
self.HelpString = HelpString
def GetHelpString(self):
return self.HelpString
def SetType(self, Type):
self.Type = Type
def GetType(self):
return self.Type
def SetSupArchList(self, SupArchList):
self.SupArchList = SupArchList
def GetSupArchList(self):
return self.SupArchList
class InfBianryVerItem(InfBianryItem, CurrentLine):
def __init__(self):
InfBianryItem.__init__(self)
CurrentLine.__init__(self)
self.VerTypeName = ''
def SetVerTypeName(self, VerTypeName):
self.VerTypeName = VerTypeName
def GetVerTypeName(self):
return self.VerTypeName
class InfBianryUiItem(InfBianryItem, CurrentLine):
def __init__(self):
InfBianryItem.__init__(self)
CurrentLine.__init__(self)
self.UiTypeName = ''
def SetUiTypeName(self, UiTypeName):
self.UiTypeName = UiTypeName
def GetVerTypeName(self):
return self.UiTypeName
class InfBianryCommonItem(InfBianryItem, CurrentLine):
def __init__(self):
self.CommonType = ''
self.TagName = ''
self.Family = ''
self.GuidValue = ''
InfBianryItem.__init__(self)
CurrentLine.__init__(self)
def SetCommonType(self, CommonType):
self.CommonType = CommonType
def GetCommonType(self):
return self.CommonType
def SetTagName(self, TagName):
self.TagName = TagName
def GetTagName(self):
return self.TagName
def SetFamily(self, Family):
self.Family = Family
def GetFamily(self):
return self.Family
def SetGuidValue(self, GuidValue):
self.GuidValue = GuidValue
def GetGuidValue(self):
return self.GuidValue
##
#
#
#
class InfBinariesObject(InfSectionCommonDef):
def __init__(self):
self.Binaries = Sdict()
#
# Macro defined in this section should be only used in this section.
#
self.Macros = {}
InfSectionCommonDef.__init__(self)
## CheckVer
#
#
def CheckVer(self, Ver, __SupArchList):
#
# Check Ver
#
for VerItem in Ver:
IsValidFileFlag = False
VerContent = VerItem[0]
VerComment = VerItem[1]
VerCurrentLine = VerItem[2]
GlobalData.gINF_CURRENT_LINE = VerCurrentLine
InfBianryVerItemObj = None
#
# Should not less than 2 elements
#
if len(VerContent) < 2:
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_BINARY_ITEM_FORMAT_INVALID % (VerContent[0], 2),
File=VerCurrentLine.GetFileName(),
Line=VerCurrentLine.GetLineNo(),
ExtraData=VerCurrentLine.GetLineString())
return False
if len(VerContent) > 4:
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_BINARY_ITEM_FORMAT_INVALID_MAX % (VerContent[0], 4),
File=VerCurrentLine.GetFileName(),
Line=VerCurrentLine.GetLineNo(),
ExtraData=VerCurrentLine.GetLineString())
return False
if len(VerContent) >= 2:
#
# Create a Ver Object.
#
InfBianryVerItemObj = InfBianryVerItem()
if VerContent[0] != DT.BINARY_FILE_TYPE_VER:
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_BINARY_VER_TYPE % DT.BINARY_FILE_TYPE_VER,
File=VerCurrentLine.GetFileName(),
Line=VerCurrentLine.GetLineNo(),
ExtraData=VerCurrentLine.GetLineString())
InfBianryVerItemObj.SetVerTypeName(VerContent[0])
InfBianryVerItemObj.SetType(VerContent[0])
#
# Verify File exist or not
#
FullFileName = os.path.normpath(os.path.realpath(os.path.join(GlobalData.gINF_MODULE_DIR,
VerContent[1])))
if not (ValidFile(FullFileName) or ValidFile(VerContent[1])):
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_BINARY_ITEM_FILE_NOT_EXIST % (VerContent[1]),
File=VerCurrentLine.GetFileName(),
Line=VerCurrentLine.GetLineNo(),
ExtraData=VerCurrentLine.GetLineString())
#
# Validate file exist/format.
#
if IsValidPath(VerContent[1], GlobalData.gINF_MODULE_DIR):
IsValidFileFlag = True
else:
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_FILE_NOT_EXIST_OR_NAME_INVALID % (VerContent[1]),
File=VerCurrentLine.GetFileName(),
Line=VerCurrentLine.GetLineNo(),
ExtraData=VerCurrentLine.GetLineString())
return False
if IsValidFileFlag:
VerContent[0] = ConvPathFromAbsToRel(VerContent[0],
GlobalData.gINF_MODULE_DIR)
InfBianryVerItemObj.SetFileName(VerContent[1])
if len(VerContent) >= 3:
#
# Add Target information
#
InfBianryVerItemObj.SetTarget(VerContent[2])
if len(VerContent) == 4:
if VerContent[3].strip() == '':
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_FEATURE_FLAG_EXP_MISSING,
File=VerCurrentLine.GetFileName(),
Line=VerCurrentLine.GetLineNo(),
ExtraData=VerCurrentLine.GetLineString())
#
# Validate Feature Flag Express
#
FeatureFlagRtv = IsValidFeatureFlagExp(VerContent[3].\
strip())
if not FeatureFlagRtv[0]:
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_FEATURE_FLAG_EXP_SYNTAX_INVLID % (FeatureFlagRtv[1]),
File=VerCurrentLine.GetFileName(),
Line=VerCurrentLine.GetLineNo(),
ExtraData=VerCurrentLine.GetLineString())
InfBianryVerItemObj.SetFeatureFlagExp(VerContent[3])
InfBianryVerItemObj.SetSupArchList(__SupArchList)
#
# Determine binary file name duplicate. Follow below rule:
#
# A binary filename must not be duplicated within
# a [Binaries] section. A binary filename may appear in
# multiple architectural [Binaries] sections. A binary
# filename listed in an architectural [Binaries] section
# must not be listed in the common architectural
# [Binaries] section.
#
# NOTE: This check will not report error now.
#
for Item in self.Binaries:
if Item.GetFileName() == InfBianryVerItemObj.GetFileName():
ItemSupArchList = Item.GetSupArchList()
for ItemArch in ItemSupArchList:
for VerItemObjArch in __SupArchList:
if ItemArch == VerItemObjArch:
#
# ST.ERR_INF_PARSER_ITEM_DUPLICATE
#
pass
if ItemArch.upper() == 'COMMON' or VerItemObjArch.upper() == 'COMMON':
#
# ERR_INF_PARSER_ITEM_DUPLICATE_COMMON
#
pass
if InfBianryVerItemObj is not None:
if (InfBianryVerItemObj) in self.Binaries:
BinariesList = self.Binaries[InfBianryVerItemObj]
BinariesList.append((InfBianryVerItemObj, VerComment))
self.Binaries[InfBianryVerItemObj] = BinariesList
else:
BinariesList = []
BinariesList.append((InfBianryVerItemObj, VerComment))
self.Binaries[InfBianryVerItemObj] = BinariesList
## ParseCommonBinary
#
# ParseCommonBinary
#
def ParseCommonBinary(self, CommonBinary, __SupArchList):
#
# Check common binary definitions
# Type | FileName | Target | Family | TagName | FeatureFlagExp
#
for Item in CommonBinary:
IsValidFileFlag = False
ItemContent = Item[0]
ItemComment = Item[1]
CurrentLineOfItem = Item[2]
GlobalData.gINF_CURRENT_LINE = CurrentLineOfItem
InfBianryCommonItemObj = None
if ItemContent[0] == 'SUBTYPE_GUID':
if len(ItemContent) < 3:
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_BINARY_ITEM_FORMAT_INVALID % (ItemContent[0], 3),
File=CurrentLineOfItem.GetFileName(),
Line=CurrentLineOfItem.GetLineNo(),
ExtraData=CurrentLineOfItem.GetLineString())
return False
else:
if len(ItemContent) < 2:
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_BINARY_ITEM_FORMAT_INVALID % (ItemContent[0], 2),
File=CurrentLineOfItem.GetFileName(),
Line=CurrentLineOfItem.GetLineNo(),
ExtraData=CurrentLineOfItem.GetLineString())
return False
if len(ItemContent) > 7:
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_BINARY_ITEM_FORMAT_INVALID_MAX % (ItemContent[0], 7),
File=CurrentLineOfItem.GetFileName(),
Line=CurrentLineOfItem.GetLineNo(),
ExtraData=CurrentLineOfItem.GetLineString())
return False
if len(ItemContent) >= 2:
#
# Create a Common Object.
#
InfBianryCommonItemObj = InfBianryCommonItem()
#
# Convert Binary type.
#
BinaryFileType = ItemContent[0].strip()
if BinaryFileType == 'RAW' or BinaryFileType == 'ACPI' or BinaryFileType == 'ASL':
BinaryFileType = 'BIN'
if BinaryFileType not in DT.BINARY_FILE_TYPE_LIST:
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_BINARY_ITEM_INVALID_FILETYPE % \
(DT.BINARY_FILE_TYPE_LIST.__str__()),
File=CurrentLineOfItem.GetFileName(),
Line=CurrentLineOfItem.GetLineNo(),
ExtraData=CurrentLineOfItem.GetLineString())
if BinaryFileType == 'SUBTYPE_GUID':
BinaryFileType = 'FREEFORM'
if BinaryFileType == 'LIB' or BinaryFileType == 'UEFI_APP':
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_BINARY_ITEM_INVALID_FILETYPE % \
(DT.BINARY_FILE_TYPE_LIST.__str__()),
File=CurrentLineOfItem.GetFileName(),
Line=CurrentLineOfItem.GetLineNo(),
ExtraData=CurrentLineOfItem.GetLineString())
InfBianryCommonItemObj.SetType(BinaryFileType)
InfBianryCommonItemObj.SetCommonType(ItemContent[0])
FileName = ''
if BinaryFileType == 'FREEFORM':
InfBianryCommonItemObj.SetGuidValue(ItemContent[1])
if len(ItemContent) >= 3:
FileName = ItemContent[2]
else:
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_BINARY_ITEM_FILENAME_NOT_EXIST,
File=CurrentLineOfItem.GetFileName(),
Line=CurrentLineOfItem.GetLineNo(),
ExtraData=CurrentLineOfItem.GetLineString())
else:
FileName = ItemContent[1]
#
# Verify File exist or not
#
FullFileName = os.path.normpath(os.path.realpath(os.path.join(GlobalData.gINF_MODULE_DIR,
FileName)))
if not (ValidFile(FullFileName) or ValidFile(FileName)):
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_BINARY_ITEM_FILE_NOT_EXIST % (FileName),
File=CurrentLineOfItem.GetFileName(),
Line=CurrentLineOfItem.GetLineNo(),
ExtraData=CurrentLineOfItem.GetLineString())
#
# Validate file exist/format.
#
if IsValidPath(FileName, GlobalData.gINF_MODULE_DIR):
IsValidFileFlag = True
else:
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_FILE_NOT_EXIST_OR_NAME_INVALID % (FileName),
File=CurrentLineOfItem.GetFileName(),
Line=CurrentLineOfItem.GetLineNo(),
ExtraData=CurrentLineOfItem.GetLineString())
return False
if IsValidFileFlag:
ItemContent[0] = ConvPathFromAbsToRel(ItemContent[0], GlobalData.gINF_MODULE_DIR)
InfBianryCommonItemObj.SetFileName(FileName)
if len(ItemContent) >= 3:
#
# Add Target information
#
if BinaryFileType != 'FREEFORM':
InfBianryCommonItemObj.SetTarget(ItemContent[2])
if len(ItemContent) >= 4:
#
# Add Family information
#
if BinaryFileType != 'FREEFORM':
InfBianryCommonItemObj.SetFamily(ItemContent[3])
else:
InfBianryCommonItemObj.SetTarget(ItemContent[3])
if len(ItemContent) >= 5:
#
# TagName entries are build system specific. If there
# is content in the entry, the tool must exit
# gracefully with an error message that indicates build
# system specific content cannot be distributed using
# the UDP
#
if BinaryFileType != 'FREEFORM':
if ItemContent[4].strip() != '':
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_TAGNAME_NOT_PERMITTED % (ItemContent[4]),
File=CurrentLineOfItem.GetFileName(),
Line=CurrentLineOfItem.GetLineNo(),
ExtraData=CurrentLineOfItem.GetLineString())
else:
InfBianryCommonItemObj.SetFamily(ItemContent[4])
if len(ItemContent) >= 6:
#
# Add FeatureFlagExp
#
if BinaryFileType != 'FREEFORM':
if ItemContent[5].strip() == '':
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_FEATURE_FLAG_EXP_MISSING,
File=CurrentLineOfItem.GetFileName(),
Line=CurrentLineOfItem.GetLineNo(),
ExtraData=CurrentLineOfItem.GetLineString())
#
# Validate Feature Flag Express
#
FeatureFlagRtv = IsValidFeatureFlagExp(ItemContent[5].strip())
if not FeatureFlagRtv[0]:
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_FEATURE_FLAG_EXP_SYNTAX_INVLID % (FeatureFlagRtv[1]),
File=CurrentLineOfItem.GetFileName(),
Line=CurrentLineOfItem.GetLineNo(),
ExtraData=CurrentLineOfItem.GetLineString())
InfBianryCommonItemObj.SetFeatureFlagExp(ItemContent[5])
else:
if ItemContent[5].strip() != '':
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_TAGNAME_NOT_PERMITTED % (ItemContent[5]),
File=CurrentLineOfItem.GetFileName(),
Line=CurrentLineOfItem.GetLineNo(),
ExtraData=CurrentLineOfItem.GetLineString())
if len(ItemContent) == 7:
if ItemContent[6].strip() == '':
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_FEATURE_FLAG_EXP_MISSING,
File=CurrentLineOfItem.GetFileName(),
Line=CurrentLineOfItem.GetLineNo(),
ExtraData=CurrentLineOfItem.GetLineString())
#
# Validate Feature Flag Express
#
FeatureFlagRtv = IsValidFeatureFlagExp(ItemContent[6].strip())
if not FeatureFlagRtv[0]:
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_FEATURE_FLAG_EXP_SYNTAX_INVLID % (FeatureFlagRtv[1]),
File=CurrentLineOfItem.GetFileName(),
Line=CurrentLineOfItem.GetLineNo(),
ExtraData=CurrentLineOfItem.GetLineString())
InfBianryCommonItemObj.SetFeatureFlagExp(ItemContent[6])
InfBianryCommonItemObj.SetSupArchList(__SupArchList)
#
# Determine binary file name duplicate. Follow below rule:
#
# A binary filename must not be duplicated within
# a [Binaries] section. A binary filename may appear in
# multiple architectural [Binaries] sections. A binary
# filename listed in an architectural [Binaries] section
# must not be listed in the common architectural
# [Binaries] section.
#
# NOTE: This check will not report error now.
#
# for Item in self.Binaries:
# if Item.GetFileName() == InfBianryCommonItemObj.GetFileName():
# ItemSupArchList = Item.GetSupArchList()
# for ItemArch in ItemSupArchList:
# for ComItemObjArch in __SupArchList:
# if ItemArch == ComItemObjArch:
# #
# # ST.ERR_INF_PARSER_ITEM_DUPLICATE
# #
# pass
#
# if ItemArch.upper() == 'COMMON' or ComItemObjArch.upper() == 'COMMON':
# #
# # ERR_INF_PARSER_ITEM_DUPLICATE_COMMON
# #
# pass
if InfBianryCommonItemObj is not None:
if (InfBianryCommonItemObj) in self.Binaries:
BinariesList = self.Binaries[InfBianryCommonItemObj]
BinariesList.append((InfBianryCommonItemObj, ItemComment))
self.Binaries[InfBianryCommonItemObj] = BinariesList
else:
BinariesList = []
BinariesList.append((InfBianryCommonItemObj, ItemComment))
self.Binaries[InfBianryCommonItemObj] = BinariesList
def SetBinary(self, UiInf=None, Ver=None, CommonBinary=None, ArchList=None):
__SupArchList = []
for ArchItem in ArchList:
#
# Validate Arch
#
if (ArchItem == '' or ArchItem is None):
ArchItem = 'COMMON'
__SupArchList.append(ArchItem)
if UiInf is not None:
if len(UiInf) > 0:
#
# Check UI
#
for UiItem in UiInf:
IsValidFileFlag = False
InfBianryUiItemObj = None
UiContent = UiItem[0]
UiComment = UiItem[1]
UiCurrentLine = UiItem[2]
GlobalData.gINF_CURRENT_LINE = deepcopy(UiItem[2])
#
# Should not less than 2 elements
#
if len(UiContent) < 2:
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_BINARY_ITEM_FORMAT_INVALID % (UiContent[0], 2),
File=UiCurrentLine.GetFileName(),
Line=UiCurrentLine.GetLineNo(),
ExtraData=UiCurrentLine.GetLineString())
return False
if len(UiContent) > 4:
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_BINARY_ITEM_FORMAT_INVALID_MAX % (UiContent[0], 4),
File=UiCurrentLine.GetFileName(),
Line=UiCurrentLine.GetLineNo(),
ExtraData=UiCurrentLine.GetLineString())
return False
if len(UiContent) >= 2:
#
# Create an Ui Object.
#
InfBianryUiItemObj = InfBianryUiItem()
if UiContent[0] != 'UI':
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_BINARY_VER_TYPE % ('UI'),
File=UiCurrentLine.GetFileName(),
Line=UiCurrentLine.GetLineNo(),
ExtraData=UiCurrentLine.GetLineString())
InfBianryUiItemObj.SetUiTypeName(UiContent[0])
InfBianryUiItemObj.SetType(UiContent[0])
#
# Verify File exist or not
#
FullFileName = os.path.normpath(os.path.realpath(os.path.join(GlobalData.gINF_MODULE_DIR,
UiContent[1])))
if not (ValidFile(FullFileName) or ValidFile(UiContent[1])):
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_BINARY_ITEM_FILE_NOT_EXIST % (UiContent[1]),
File=UiCurrentLine.GetFileName(),
Line=UiCurrentLine.GetLineNo(),
ExtraData=UiCurrentLine.GetLineString())
#
# Validate file exist/format.
#
if IsValidPath(UiContent[1], GlobalData.gINF_MODULE_DIR):
IsValidFileFlag = True
else:
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_FILE_NOT_EXIST_OR_NAME_INVALID % (UiContent[1]),
File=UiCurrentLine.GetFileName(),
Line=UiCurrentLine.GetLineNo(),
ExtraData=UiCurrentLine.GetLineString())
return False
if IsValidFileFlag:
UiContent[0] = ConvPathFromAbsToRel(UiContent[0], GlobalData.gINF_MODULE_DIR)
InfBianryUiItemObj.SetFileName(UiContent[1])
if len(UiContent) >= 3:
#
# Add Target information
#
InfBianryUiItemObj.SetTarget(UiContent[2])
if len(UiContent) == 4:
if UiContent[3].strip() == '':
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_FEATURE_FLAG_EXP_MISSING,
File=UiCurrentLine.GetFileName(),
Line=UiCurrentLine.GetLineNo(),
ExtraData=UiCurrentLine.GetLineString())
#
# Validate Feature Flag Express
#
FeatureFlagRtv = IsValidFeatureFlagExp(UiContent[3].strip())
if not FeatureFlagRtv[0]:
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_FEATURE_FLAG_EXP_SYNTAX_INVLID % (FeatureFlagRtv[1]),
File=UiCurrentLine.GetFileName(),
Line=UiCurrentLine.GetLineNo(),
ExtraData=UiCurrentLine.GetLineString())
InfBianryUiItemObj.SetFeatureFlagExp(UiContent[3])
InfBianryUiItemObj.SetSupArchList(__SupArchList)
#
# Determine binary file name duplicate. Follow below rule:
#
# A binary filename must not be duplicated within
# a [Binaries] section. A binary filename may appear in
# multiple architectural [Binaries] sections. A binary
# filename listed in an architectural [Binaries] section
# must not be listed in the common architectural
# [Binaries] section.
#
# NOTE: This check will not report error now.
#
# for Item in self.Binaries:
# if Item.GetFileName() == InfBianryUiItemObj.GetFileName():
# ItemSupArchList = Item.GetSupArchList()
# for ItemArch in ItemSupArchList:
# for UiItemObjArch in __SupArchList:
# if ItemArch == UiItemObjArch:
# #
# # ST.ERR_INF_PARSER_ITEM_DUPLICATE
# #
# pass
# if ItemArch.upper() == 'COMMON' or UiItemObjArch.upper() == 'COMMON':
# #
# # ERR_INF_PARSER_ITEM_DUPLICATE_COMMON
# #
# pass
if InfBianryUiItemObj is not None:
if (InfBianryUiItemObj) in self.Binaries:
BinariesList = self.Binaries[InfBianryUiItemObj]
BinariesList.append((InfBianryUiItemObj, UiComment))
self.Binaries[InfBianryUiItemObj] = BinariesList
else:
BinariesList = []
BinariesList.append((InfBianryUiItemObj, UiComment))
self.Binaries[InfBianryUiItemObj] = BinariesList
if Ver is not None and len(Ver) > 0:
self.CheckVer(Ver, __SupArchList)
if CommonBinary and len(CommonBinary) > 0:
self.ParseCommonBinary(CommonBinary, __SupArchList)
return True
def GetBinary(self):
return self.Binaries
| edk2-master | BaseTools/Source/Python/UPT/Object/Parser/InfBinaryObject.py |
## @file
# This file is used to define class objects of INF file [Packages] section.
# It will consumed by InfParser.
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
'''
InfPackageObject
'''
from Logger import StringTable as ST
from Logger import ToolError
import Logger.Log as Logger
from Library import GlobalData
from Library.Misc import Sdict
from Library.ParserValidate import IsValidPath
from Library.ExpressionValidate import IsValidFeatureFlagExp
class InfPackageItem():
def __init__(self,
PackageName = '',
FeatureFlagExp = '',
HelpString = ''):
self.PackageName = PackageName
self.FeatureFlagExp = FeatureFlagExp
self.HelpString = HelpString
self.SupArchList = []
def SetPackageName(self, PackageName):
self.PackageName = PackageName
def GetPackageName(self):
return self.PackageName
def SetFeatureFlagExp(self, FeatureFlagExp):
self.FeatureFlagExp = FeatureFlagExp
def GetFeatureFlagExp(self):
return self.FeatureFlagExp
def SetHelpString(self, HelpString):
self.HelpString = HelpString
def GetHelpString(self):
return self.HelpString
def SetSupArchList(self, SupArchList):
self.SupArchList = SupArchList
def GetSupArchList(self):
return self.SupArchList
## INF package section
#
#
#
class InfPackageObject():
def __init__(self):
self.Packages = Sdict()
#
# Macro defined in this section should be only used in this section.
#
self.Macros = {}
def SetPackages(self, PackageData, Arch = None):
IsValidFileFlag = False
SupArchList = []
for ArchItem in Arch:
#
# Validate Arch
#
if (ArchItem == '' or ArchItem is None):
ArchItem = 'COMMON'
SupArchList.append(ArchItem)
for PackageItem in PackageData:
PackageItemObj = InfPackageItem()
HelpStringObj = PackageItem[1]
CurrentLineOfPackItem = PackageItem[2]
PackageItem = PackageItem[0]
if HelpStringObj is not None:
HelpString = HelpStringObj.HeaderComments + HelpStringObj.TailComments
PackageItemObj.SetHelpString(HelpString)
if len(PackageItem) >= 1:
#
# Validate file exist/format.
#
if IsValidPath(PackageItem[0], ''):
IsValidFileFlag = True
elif IsValidPath(PackageItem[0], GlobalData.gINF_MODULE_DIR):
IsValidFileFlag = True
elif IsValidPath(PackageItem[0], GlobalData.gWORKSPACE):
IsValidFileFlag = True
else:
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_FILE_NOT_EXIST_OR_NAME_INVALID%(PackageItem[0]),
File=CurrentLineOfPackItem[2],
Line=CurrentLineOfPackItem[1],
ExtraData=CurrentLineOfPackItem[0])
return False
if IsValidFileFlag:
PackageItemObj.SetPackageName(PackageItem[0])
if len(PackageItem) == 2:
#
# Validate Feature Flag Express
#
if PackageItem[1].strip() == '':
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_FEATURE_FLAG_EXP_MISSING,
File=CurrentLineOfPackItem[2],
Line=CurrentLineOfPackItem[1],
ExtraData=CurrentLineOfPackItem[0])
#
# Validate FFE
#
FeatureFlagRtv = IsValidFeatureFlagExp(PackageItem[1].strip())
if not FeatureFlagRtv[0]:
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_FEATURE_FLAG_EXP_SYNTAX_INVLID%(FeatureFlagRtv[1]),
File=CurrentLineOfPackItem[2],
Line=CurrentLineOfPackItem[1],
ExtraData=CurrentLineOfPackItem[0])
PackageItemObj.SetFeatureFlagExp(PackageItem[1].strip())
if len(PackageItem) > 2:
#
# Invalid format of Package statement
#
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_PACKAGE_SECTION_CONTENT_ERROR,
File=CurrentLineOfPackItem[2],
Line=CurrentLineOfPackItem[1],
ExtraData=CurrentLineOfPackItem[0])
PackageItemObj.SetSupArchList(SupArchList)
#
# Determine package file name duplicate. Follow below rule:
#
# A package filename must not be duplicated within a [Packages]
# section. Package filenames may appear in multiple architectural
# [Packages] sections. A package filename listed in an
# architectural [Packages] section must not be listed in the common
# architectural [Packages] section.
#
# NOTE: This check will not report error now.
#
for Item in self.Packages:
if Item.GetPackageName() == PackageItemObj.GetPackageName():
ItemSupArchList = Item.GetSupArchList()
for ItemArch in ItemSupArchList:
for PackageItemObjArch in SupArchList:
if ItemArch == PackageItemObjArch:
#
# ST.ERR_INF_PARSER_ITEM_DUPLICATE
#
pass
if ItemArch.upper() == 'COMMON' or PackageItemObjArch.upper() == 'COMMON':
#
# ST.ERR_INF_PARSER_ITEM_DUPLICATE_COMMON
#
pass
if (PackageItemObj) in self.Packages:
PackageList = self.Packages[PackageItemObj]
PackageList.append(PackageItemObj)
self.Packages[PackageItemObj] = PackageList
else:
PackageList = []
PackageList.append(PackageItemObj)
self.Packages[PackageItemObj] = PackageList
return True
def GetPackages(self, Arch = None):
if Arch is None:
return self.Packages
| edk2-master | BaseTools/Source/Python/UPT/Object/Parser/InfPackagesObject.py |