Bob,

Given this is a new feature that needs a lot of review and discussion can it get moved over to the edk2-basetools project. Since the Python Basetools RFC was accepted months ago and the CI system updated to use those artifacts (instead of the source), that is where Basetools/Source/Python should be enhanced going forward.



A few other comments.

There are some common types in python already supported (EfiTime, Variables, authenticated variables) here:

https://github.com/tianocore/edk2-pytool-library/blob/5b2dbd7b315743caa626c1a4657c642d491ea8c3/edk2toollib/uefi/authenticated_variables_structure_support.py

https://github.com/tianocore/edk2-pytool-library/blob/5b2dbd7b315743caa626c1a4657c642d491ea8c3/edk2toollib/uefi/edk2/variable_format.py

I also don't understand the reason for all this in ctypes and basically writing c-code in python. We have found that developing in native python and then at the point of binary serialization converting to/from a binary layout is a much better experience.

Thanks
Sean




On 8/13/2021 4:45 AM, Bob Feng wrote:
REF: https://bugzilla.tianocore.org/show_bug.cgi?id=3562

Create a new build option to enable vfrcompile to generate Json
format EFI variable information file and read it to generate
the EFI variable default value binary file.

Signed-off-by: Bob Feng <bob.c.f...@intel.com>
Cc: Liming Gao <gaolim...@byosoft.com.cn>
Cc: Yuwei Chen <yuwei.c...@intel.com>
---
  BaseTools/Source/Python/AutoGen/DataPipe.py   |   2 +
  .../Source/Python/AutoGen/GenDefaultVar.py    | 498 ++++++++++++++++++
  .../Source/Python/AutoGen/ModuleAutoGen.py    |   9 +
  .../Python/AutoGen/ModuleAutoGenHelper.py     |   4 +
  BaseTools/Source/Python/Common/GlobalData.py  |   5 +
  BaseTools/Source/Python/build/build.py        |  18 +
  BaseTools/Source/Python/build/buildoptions.py |   1 +
  7 files changed, 537 insertions(+)
  create mode 100644 BaseTools/Source/Python/AutoGen/GenDefaultVar.py

diff --git a/BaseTools/Source/Python/AutoGen/DataPipe.py 
b/BaseTools/Source/Python/AutoGen/DataPipe.py
index 86ac2b928d9c..fa0c36b98f21 100755
--- a/BaseTools/Source/Python/AutoGen/DataPipe.py
+++ b/BaseTools/Source/Python/AutoGen/DataPipe.py
@@ -165,5 +165,7 @@ class MemoryDataPipe(DataPipe):
          self.DataContainer = {"BinCacheSource":GlobalData.gBinCacheSource}

          self.DataContainer = {"BinCacheDest":GlobalData.gBinCacheDest}

          self.DataContainer = 
{"EnableGenfdsMultiThread":GlobalData.gEnableGenfdsMultiThread}

+

+        self.DataContainer = {"GenDefaultVarBin": GlobalData.gGenDefaultVarBin}

diff --git a/BaseTools/Source/Python/AutoGen/GenDefaultVar.py 
b/BaseTools/Source/Python/AutoGen/GenDefaultVar.py
new file mode 100644
index 000000000000..b82cce18ed26
--- /dev/null
+++ b/BaseTools/Source/Python/AutoGen/GenDefaultVar.py
@@ -0,0 +1,498 @@
+import json

+from ctypes import *

+import re

+import copy

+from struct import unpack

+import os

+

+class GUID(Structure):

+    _fields_ = [

+        ('Guid1',            c_uint32),

+        ('Guid2',            c_uint16),

+        ('Guid3',            c_uint16),

+        ('Guid4',            ARRAY(c_uint8, 8)),

+    ]

+

+    def from_list(self, listformat):

+        self.Guid1 = listformat[0]

+        self.Guid2 = listformat[1]

+        self.Guid3 = listformat[2]

+        for i in range(8):

+            self.Guid4[i] = listformat[i+3]

+

+    def __cmp__(self, otherguid):

+        if isinstance(otherguid, GUID):

+            return 1

+        rt = False

+        if self.Guid1 == otherguid.Guid1 and self.Guid2 == otherguid.Guid2 and 
self.Guid3 == otherguid.Guid3:

+            rt = True

+            for i in range(8):

+                rt = rt & (self.Guid4[i] == otherguid.Guid4[i])

+        return rt

+

+

+class TIME(Structure):

+    _fields_ = [

+        ('Year',             c_uint16),

+        ('Month',            c_uint8),

+        ('Day',              c_uint8),

+        ('Hour',             c_uint8),

+        ('Minute',           c_uint8),

+        ('Second',           c_uint8),

+        ('Pad1',             c_uint8),

+        ('Nanosecond',       c_uint32),

+        ('TimeZone',         c_uint16),

+        ('Daylight',         c_uint8),

+        ('Pad2',             c_uint8),

+    ]

+    def __init__(self):

+        self.Year = 0x0

+        self.Month = 0x0

+        self.Day = 0x0

+        self.Hour = 0x0

+        self.Minute = 0x0

+        self.Second = 0x0

+        self.Pad1 = 0x0

+        self.Nanosecond = 0x0

+        self.TimeZone = 0x0

+        self.Daylight = 0x0

+        self.Pad2 = 0x0

+

+

+EFI_VARIABLE_GUID = [0xddcf3616, 0x3275, 0x4164,

+                     0x98, 0xb6, 0xfe, 0x85, 0x70, 0x7f, 0xfe, 0x7d]

+EFI_AUTHENTICATED_VARIABLE_GUID = [

+    0xaaf32c78, 0x947b, 0x439a, 0xa1, 0x80, 0x2e, 0x14, 0x4e, 0xc3, 0x77, 0x92]

+

+AuthVarGuid = GUID()

+AuthVarGuid.from_list(EFI_AUTHENTICATED_VARIABLE_GUID)

+VarGuid = GUID()

+VarGuid.from_list(EFI_VARIABLE_GUID)

+

+# Variable Store Header Format.

+VARIABLE_STORE_FORMATTED = 0x5a

+# Variable Store Header State.

+VARIABLE_STORE_HEALTHY = 0xfe

+

+

+class VARIABLE_STORE_HEADER(Structure):

+    _fields_ = [

+        ('Signature',                GUID),

+        ('Size',                     c_uint32),

+        ('Format',                   c_uint8),

+        ('State',                    c_uint8),

+        ('Reserved',                 c_uint16),

+        ('Reserved1',                c_uint32),

+    ]

+

+

+# Variable data start flag.

+VARIABLE_DATA = 0x55AA

+

+# Variable State flags.

+VAR_IN_DELETED_TRANSITION = 0xfe

+VAR_DELETED = 0xfd

+VAR_HEADER_VALID_ONLY = 0x7f

+VAR_ADDED = 0x3f

+

+

+class VARIABLE_HEADER(Structure):

+    _fields_ = [

+        ('StartId',                  c_uint16),

+        ('State',                    c_uint8),

+        ('Reserved',                 c_uint8),

+        ('Attributes',               c_uint32),

+        ('NameSize',                 c_uint32),

+        ('DataSize',                 c_uint32),

+        ('VendorGuid',               GUID),

+    ]

+

+

+class AUTHENTICATED_VARIABLE_HEADER(Structure):

+    _fields_ = [

+        ('StartId',                  c_uint16),

+        ('State',                    c_uint8),

+        ('Reserved',                 c_uint8),

+        ('Attributes',               c_uint32),

+        ('MonotonicCount',           c_uint64),

+        ('TimeStamp',                TIME),

+        ('PubKeyIndex',              c_uint32),

+        ('NameSize',                 c_uint32),

+        ('DataSize',                 c_uint32),

+        ('VendorGuid',               GUID),

+    ]

+    _pack_ = 1

+

+

+# Alignment of Variable Data Header in Variable Store region.

+HEADER_ALIGNMENT = 4

+

+

+class DEFAULT_INFO(Structure):

+    _fields_ = [

+        ('DefaultId',                c_uint16),

+        ('BoardId',                  c_uint16),

+    ]

+

+

+class DEFAULT_DATA(Structure):

+    _fields_ = [

+        ('HeaderSize',               c_uint16),

+        ('DefaultInfo',              DEFAULT_INFO),

+    ]

+

+class DELTA_DATA(Structure):

+    _fields_ = [

+        ('Offset', c_uint16),

+        ('Value', c_uint8),

+    ]

+    _pack_ = 1

+

+array_re = re.compile(

+    "(?P<mType>[a-z_A-Z][a-z_A-Z0-9]*)\[(?P<mSize>[1-9][0-9]*)\]")

+

+

+class VarField():

+    def __init__(self):

+        self.Offset = 0

+        self.Value = 0

+        self.Size = 0

+

+    @property

+    def Type(self):

+        if self.Size == 1:

+            return "UINT8"

+        if self.Size == 2:

+            return "UINT16"

+        if self.Size == 4:

+            return "UINT32"

+        if self.Size == 8:

+            return "UINT64"

+

+        return "UINT8"

+

+

+BASIC_TYPE = {

+    "BOOLEAN": 1,

+    "UINT8": 1,

+    "UINT16": 2,

+    "UINT32": 4,

+    "UINT64": 8

+}

+class CStruct():

+

+

+    def __init__(self, typedefs):

+        self.TypeDefs = typedefs

+        self.TypeStack = copy.deepcopy(typedefs)

+        self.finalDefs = {}

+

+    def CalStuctSize(self, sType):

+        rt = 0

+        if sType in BASIC_TYPE:

+            return BASIC_TYPE[sType]

+

+        ma = array_re.match(sType)

+        if ma:

+            mType = ma.group('mType')

+            mSize = ma.group('mSize')

+            rt += int(mSize) * self.CalStuctSize(mType)

+        else:

+            for subType in self.TypeDefs[sType]:

+                rt += self.CalStuctSize(subType['Type'])

+

+        return rt

+

+    def expend(self, fielditem):

+        fieldname = fielditem['Name']

+        fieldType = fielditem['Type']

+        fieldOffset = fielditem['Offset']

+

+        ma = array_re.match(fieldType)

+        if ma:

+            mType = ma.group('mType')

+            mSize = ma.group('mSize')

+            return [{"Name": "%s[%d]" % (fieldname, i), "Type": mType, 
"Offset": (fieldOffset + i*self.CalStuctSize(mType))} for i in range(int(mSize))]

+        else:

+            return [{"Name": "%s.%s" % (fieldname, item['Name']), "Type":item['Type'], 
"Offset": (fieldOffset + item['Offset'])} for item in self.TypeDefs[fielditem['Type']]]

+

+    def ExpandTypes(self):

+        if not self.finalDefs:

+            for datatype in self.TypeStack:

+                result = []

+                mTypeStack = self.TypeStack[datatype]

+                while len(mTypeStack) > 0:

+                    item = mTypeStack.pop()

+                    if item['Type'] in self.BASIC_TYPE:

+                        result.append(item)

+                    elif item['Type'] == '(null)':

+                        continue

+                    else:

+                        for expand_item in self.expend(item):

+                            mTypeStack.append(expand_item)

+                self.finalDefs[datatype] = result

+            self.finalDefs

+        return self.finalDefs

+

+def Get_Occupied_Size(FileLength, alignment):

+    if FileLength % alignment == 0:

+        return FileLength

+    return FileLength + (alignment-(FileLength % alignment))

+

+def Occupied_Size(buffer, alignment):

+    FileLength = len(buffer)

+    if FileLength % alignment != 0:

+        buffer += b'\0' * (alignment-(FileLength % alignment))

+    return buffer

+

+def PackStruct(cStruct):

+    length = sizeof(cStruct)

+    p = cast(pointer(cStruct), POINTER(c_char * length))

+    return p.contents.raw

+

+def calculate_delta(default, theother):

+

+    if len(default) - len(theother) != 0:

+        return []

+

+    data_delta = []

+    for i in range(len(default)):

+        if default[i] != theother[i]:

+            data_delta.append([i, theother[i]])

+    return data_delta

+

+class Variable():

+    def __init__(self):

+        self.mAlign = 1

+        self.mTotalSize = 1

+        self.mValue = {}  # {defaultstore: value}

+        self.mBin = {}

+        self.fields = {}  # {defaultstore: fileds}

+        self.delta = {}

+        self.attributes = 0

+        self.mType = ''

+        self.guid = ''

+        self.mName = ''

+        self.cDefs = None

+

+    @property

+    def GuidArray(self):

+

+        guid_array = []

+        guid = self.guid.strip().strip("{").strip("}")

+        for item in guid.split(","):

+            field = item.strip().strip("{").strip("}")

+            guid_array.append(int(field,16))

+        return guid_array

+

+    def update_delta_offset(self,base):

+        for default_id in self.delta:

+            for delta_list in self.delta[default_id]:

+                delta_list[0] += base

+

+    def pack(self):

+

+        for defaultid in self.mValue:

+            var_value = self.mValue[defaultid]

+            auth_var = AUTHENTICATED_VARIABLE_HEADER()

+            auth_var.StartId = VARIABLE_DATA

+            auth_var.State = VAR_ADDED

+            auth_var.Reserved = 0x00

+            auth_var.Attributes = 0x00000007

+            auth_var.MonotonicCount = 0x0

+            auth_var.TimeStamp = TIME()

+            auth_var.PubKeyIndex = 0x0

+            var_name_buffer = self.mName.encode('utf-16le') + b'\0\0'

+            auth_var.NameSize = len(var_name_buffer)

+            auth_var.DataSize = len(var_value)

+            vendor_guid = GUID()

+            vendor_guid.from_list(self.GuidArray)

+            auth_var.VendorGuid = vendor_guid

+

+            self.mBin[defaultid] = PackStruct(auth_var) + 
Occupied_Size(var_name_buffer + var_value, 4)

+

+    def TypeCheck(self,data_type, data_size):

+        if BASIC_TYPE[data_type] == data_size:

+            return True

+        return False

+

+    def ValueToBytes(self,data_type,data_value,data_size):

+

+        rt = b''

+        if not self.TypeCheck(data_type, data_size):

+            print(data_type,data_value,data_size)

+

+        if data_type == "BOOLEAN" or data_type == 'UINT8':

+            p = cast(pointer(c_uint8(int(data_value,16))), POINTER(c_char * 1))

+            rt = p.contents.raw

+        elif data_type == 'UINT16':

+            p = cast(pointer(c_uint16(int(data_value,16))), POINTER(c_char * 
2))

+            rt = p.contents.raw

+        elif data_type == 'UINT32':

+            p = cast(pointer(c_uint32(int(data_value,16))), POINTER(c_char * 
4))

+            rt = p.contents.raw

+        elif data_type == 'UINT64':

+            p = cast(pointer(c_uint64(int(data_value,16))), POINTER(c_char * 
8))

+            rt = p.contents.raw

+

+        return rt

+

+    def serial(self):

+        for defaultstore in self.fields:

+            vValue = b''

+            vfields = {vf.Offset: vf for vf in self.fields[defaultstore]}

+            i = 0

+            while i < self.mTotalSize:

+                if i in vfields:

+                    vfield = vfields[i]

+                    vValue += self.ValueToBytes(vfield.Type, 
vfield.Value,vfield.Size)

+                    i += vfield.Size

+                else:

+                    vValue += self.ValueToBytes('UINT8','0x00',1)

+                    i += 1

+

+            self.mValue[defaultstore] = vValue

+        standard_default = self.mValue[0]

+

+        for defaultid in self.mValue:

+            if defaultid == 0:

+                continue

+            others_default = self.mValue[defaultid]

+

+            self.delta.setdefault(defaultid, []).extend(calculate_delta(

+                standard_default, others_default))

+

+class DefaultVariableGenerator():

+    def __init__(self):

+        self.NvVarInfo = []

+

+    def LoadNvVariableInfo(self, VarInfoFilelist):

+

+        VarDataDict = {}

+        DataStruct = {}

+        VarDefine = {}

+        VarAttributes = {}

+        for VarInfoFile in VarInfoFilelist:

+            with open(VarInfoFile.strip(), "r") as fd:

+                data = json.load(fd)

+

+            DataStruct.update(data.get("DataStruct", {}))

+            Data = data.get("Data")

+            VarDefine.update(data.get("VarDefine"))

+            VarAttributes.update(data.get("DataStructAttribute"))

+

+            for vardata in Data:

+                if vardata['VendorGuid'] == 'NA':

+                    continue

+                VarDataDict.setdefault(

+                    (vardata['VendorGuid'], vardata["VarName"]), 
[]).append(vardata)

+

+        cStructDefs = CStruct(DataStruct)

+        for guid, varname in VarDataDict:

+            v = Variable()

+            v.guid = guid

+            vardef = VarDefine.get(varname)

+            if vardef is None:

+                for var in VarDefine:

+                    if VarDefine[var]['Type'] == varname:

+                        vardef = VarDefine[var]

+                        break

+                else:

+                    continue

+            v.attributes = vardef['Attributes']

+            v.mType = vardef['Type']

+            v.mAlign = VarAttributes[v.mType]['Alignment']

+            v.mTotalSize = VarAttributes[v.mType]['TotalSize']

+            v.Struct = DataStruct[v.mType]

+            v.mName = varname

+            v.cDefs = cStructDefs

+            for fieldinfo in VarDataDict.get((guid, varname), []):

+                vf = VarField()

+                vf.Offset = fieldinfo['Offset']

+                vf.Value = fieldinfo['Value']

+                vf.Size = fieldinfo['Size']

+                v.fields.setdefault(

+                    int(fieldinfo['DefaultStore'], 10), []).append(vf)

+            v.serial()

+            v.pack()

+            self.NvVarInfo.append(v)

+

+    def PackDeltaData(self):

+

+        default_id_set = set()

+        for v in self.NvVarInfo:

+            default_id_set |= set(v.mBin.keys())

+

+        if default_id_set:

+            default_id_set.remove(0)

+        delta_buff_set = {}

+        for defaultid in default_id_set:

+            delta_buff = b''

+            for v in self.NvVarInfo:

+                delta_list = v.delta.get(defaultid,[])

+                for delta in delta_list:

+                    delta_data = DELTA_DATA()

+                    delta_data.Offset, delta_data.Value = delta

+                    delta_buff += PackStruct(delta_data)

+            delta_buff_set[defaultid] = delta_buff

+

+        return delta_buff_set

+

+    def PackDefaultData(self):

+

+        default_data_header = DEFAULT_DATA()

+        default_data_header.HeaderSize = sizeof(DEFAULT_DATA)

+        default_data_header.DefaultInfo.DefaultId = 0x0

+        default_data_header.DefaultInfo.BoardId = 0x0

+        default_data_header_buffer = PackStruct(default_data_header)

+

+

+        variable_store = VARIABLE_STORE_HEADER()

+        variable_store.Signature = AuthVarGuid

+

+        variable_store_size = Get_Occupied_Size(sizeof(DEFAULT_DATA) + 
sizeof(VARIABLE_STORE_HEADER), 4)

+        for v in self.NvVarInfo:

+            variable_store_size += Get_Occupied_Size(len(v.mBin[0]), 4)

+

+        variable_store.Size = variable_store_size

+        variable_store.Format = VARIABLE_STORE_FORMATTED

+        variable_store.State = VARIABLE_STORE_HEALTHY

+        variable_store.Reserved = 0x0

+        variable_store.Reserved2 = 0x0

+

+        variable_storage_header_buffer = PackStruct(variable_store)

+

+        variable_data = b''

+        v_offset = 0

+        for v in self.NvVarInfo:

+            v.update_delta_offset(v_offset)

+            variable_data += Occupied_Size(v.mBin[0],4)

+            v_offset += Get_Occupied_Size(len(v.mBin[0]),4)

+

+

+        final_buff = Occupied_Size(default_data_header_buffer + 
variable_storage_header_buffer,4) + variable_data

+

+        return final_buff

+

+    def generate(self, jsonlistfile,output_folder):

+        if not os.path.exists(jsonlistfile):

+            return

+        if not os.path.exists(output_folder):

+            os.makedirs(output_folder)

+        try:

+            with open(jsonlistfile,"r") as fd:

+                filelist = fd.readlines()

+            genVar = DefaultVariableGenerator()

+            genVar.LoadNvVariableInfo(filelist)

+            with open(os.path.join(output_folder, "default.bin"), "wb") as fd:

+                fd.write(genVar.PackDefaultData())

+

+            delta_set = genVar.PackDeltaData()

+            for default_id in delta_set:

+                with open(os.path.join(output_folder, "defaultdelta_%s.bin" % 
default_id), "wb") as fd:

+                    fd.write(delta_set[default_id])

+        except:

+            print("generate varbin file failed")

+

+

+

diff --git a/BaseTools/Source/Python/AutoGen/ModuleAutoGen.py 
b/BaseTools/Source/Python/AutoGen/ModuleAutoGen.py
index d70b0d7ae828..0daf3352f91b 100755
--- a/BaseTools/Source/Python/AutoGen/ModuleAutoGen.py
+++ b/BaseTools/Source/Python/AutoGen/ModuleAutoGen.py
@@ -433,10 +433,19 @@ class ModuleAutoGen(AutoGen):
      ## Return the directory to store auto-gened source files of the module

      @cached_property

      def DebugDir(self):

          return _MakeDir((self.BuildDir, "DEBUG"))

+

+    @cached_property

+    def DefaultVarJsonFiles(self):

+        rt = []

+        for SrcFile in self.SourceFileList:

+            if SrcFile.Ext.lower() == '.vfr':

+                
rt.append(os.path.join(self.DebugDir,os.path.join(os.path.dirname(SrcFile.File), 
"{}_var.json".format(SrcFile.BaseName))))

+        return rt

+

      ## Return the path of custom file

      @cached_property

      def CustomMakefile(self):

          RetVal = {}

          for Type in self.Module.CustomMakefile:

diff --git a/BaseTools/Source/Python/AutoGen/ModuleAutoGenHelper.py 
b/BaseTools/Source/Python/AutoGen/ModuleAutoGenHelper.py
index 036fdac3d7df..b46d041f58ab 100644
--- a/BaseTools/Source/Python/AutoGen/ModuleAutoGenHelper.py
+++ b/BaseTools/Source/Python/AutoGen/ModuleAutoGenHelper.py
@@ -644,10 +644,14 @@ class PlatformInfo(AutoGenInfo):
                              if Attr != 'PATH':

                                  BuildOptions[ExpandedTool][Attr] += " " + 
mws.handleWsMacro(Value)

                              else:

                                  BuildOptions[ExpandedTool][Attr] = 
mws.handleWsMacro(Value)

+        if self.DataPipe.Get("GenDefaultVarBin"):

+            if BuildOptions.get('VFR',{}).get('FLAGS'):

+                BuildOptions['VFR']['FLAGS'] += " " + "--variable"

+

          return BuildOptions, BuildRuleOrder

      def ApplyLibraryInstance(self,module):

          alldeps = self.DataPipe.Get("DEPS")

          if alldeps is None:

diff --git a/BaseTools/Source/Python/Common/GlobalData.py 
b/BaseTools/Source/Python/Common/GlobalData.py
index 61ab3f7e24cd..c68ca8fbb3f7 100755
--- a/BaseTools/Source/Python/Common/GlobalData.py
+++ b/BaseTools/Source/Python/Common/GlobalData.py
@@ -88,10 +88,15 @@ gIgnoreSource = False
  #

  gFdfParser = None

  BuildOptionPcd = []

+#

+# Build flag for generate default variable binary file

+#

+gGenDefaultVarBin = False

+

  #

  # Mixed PCD name dict

  #

  MixedPcd = {}

diff --git a/BaseTools/Source/Python/build/build.py 
b/BaseTools/Source/Python/build/build.py
index 02b489892422..2f6fc6b20faf 100755
--- a/BaseTools/Source/Python/build/build.py
+++ b/BaseTools/Source/Python/build/build.py
@@ -750,10 +750,11 @@ class Build():
          GlobalData.gUseHashCache = BuildOptions.UseHashCache

          GlobalData.gBinCacheDest   = BuildOptions.BinCacheDest

          GlobalData.gBinCacheSource = BuildOptions.BinCacheSource

          GlobalData.gEnableGenfdsMultiThread = not 
BuildOptions.NoGenfdsMultiThread

          GlobalData.gDisableIncludePathCheck = 
BuildOptions.DisableIncludePathCheck

+        GlobalData.gGenDefaultVarBin = BuildOptions.GenDefaultVarBin

          if GlobalData.gBinCacheDest and not GlobalData.gUseHashCache:

              EdkLogger.error("build", OPTION_NOT_SUPPORTED, 
ExtraData="--binary-destination must be used together with --hash.")

          if GlobalData.gBinCacheSource and not GlobalData.gUseHashCache:

@@ -1459,10 +1460,14 @@ class Build():
              self.BuildModules = []

              return True

          # genfds

          if Target == 'fds':

+            if GlobalData.gGenDefaultVarBin:

+                from AutoGen.GenDefaultVar import DefaultVariableGenerator

+                variable_info_filelist = 
os.path.join(AutoGenObject.BuildDir,"variable_info_filelist.txt")

+                
DefaultVariableGenerator().generate(variable_info_filelist,AutoGenObject.FvDir)

              if GenFdsApi(AutoGenObject.GenFdsCommandDict, self.Db):

                  EdkLogger.error("build", COMMAND_FAILURE)

              Threshold = self.GetFreeSizeThreshold()

              if Threshold:

                  self.CheckFreeSizeThreshold(Threshold, AutoGenObject.FvDir)

@@ -2247,10 +2252,19 @@ class Build():
          AutoGenIdFile = 
os.path.join(GlobalData.gConfDirectory,".AutoGenIdFile.txt")

          with open(AutoGenIdFile,"w") as fw:

              fw.write("Arch=%s\n" % "|".join((Wa.ArchList)))

              fw.write("BuildDir=%s\n" % Wa.BuildDir)

              fw.write("PlatformGuid=%s\n" % str(Wa.AutoGenObjectList[0].Guid))

+        variable_info_filelist = 
os.path.join(Wa.BuildDir,"variable_info_filelist.txt")

+        vfr_var_json = []

+        if GlobalData.gGenDefaultVarBin:

+            for ma in self.AllModules:

+                vfr_var_json.extend(ma.DefaultVarJsonFiles)

+            SaveFileOnChange(variable_info_filelist, "\n".join(vfr_var_json), 
False)

+        else:

+            if os.path.exists(variable_info_filelist):

+                os.remove(variable_info_filelist)

          if GlobalData.gBinCacheSource:

              BuildModules.extend(self.MakeCacheMiss)

          elif GlobalData.gUseHashCache and not GlobalData.gBinCacheDest:

              BuildModules.extend(self.PreMakeCacheMiss)

@@ -2359,10 +2373,14 @@ class Build():
                      if self.Fdf:

                          #

                          # Generate FD image if there's a FDF file found

                          #

+                        if GlobalData.gGenDefaultVarBin:

+                            from AutoGen.GenDefaultVar import 
DefaultVariableGenerator

+                            variable_info_filelist = 
os.path.join(Wa.BuildDir,"variable_info_filelist.txt")

+                            
DefaultVariableGenerator().generate(variable_info_filelist,Wa.FvDir)

                          GenFdsStart = time.time()

                          if GenFdsApi(Wa.GenFdsCommandDict, self.Db):

                              EdkLogger.error("build", COMMAND_FAILURE)

                          Threshold = self.GetFreeSizeThreshold()

                          if Threshold:

diff --git a/BaseTools/Source/Python/build/buildoptions.py 
b/BaseTools/Source/Python/build/buildoptions.py
index 39d92cff209d..6886ba7f8eb6 100644
--- a/BaseTools/Source/Python/build/buildoptions.py
+++ b/BaseTools/Source/Python/build/buildoptions.py
@@ -99,7 +99,8 @@ class MyOptionParser():
          Parser.add_option("--hash", action="store_true", dest="UseHashCache", 
default=False, help="Enable hash-based caching during build process.")

          Parser.add_option("--binary-destination", action="store", type="string", 
dest="BinCacheDest", help="Generate a cache of binary files in the specified directory.")

          Parser.add_option("--binary-source", action="store", type="string", 
dest="BinCacheSource", help="Consume a cache of binary files from the specified directory.")

          Parser.add_option("--genfds-multi-thread", action="store_true", 
dest="GenfdsMultiThread", default=True, help="Enable GenFds multi thread to generate ffs file.")

          Parser.add_option("--no-genfds-multi-thread", action="store_true", 
dest="NoGenfdsMultiThread", default=False, help="Disable GenFds multi thread to generate ffs file.")

+        Parser.add_option("--gen-default-variable-bin", action="store_true", 
dest="GenDefaultVarBin", default=False, help="Generate default variable binary file.")

          Parser.add_option("--disable-include-path-check", action="store_true", 
dest="DisableIncludePathCheck", default=False, help="Disable the include path check for outside of 
package.")

          self.BuildOption, self.BuildTarget = Parser.parse_args()



-=-=-=-=-=-=-=-=-=-=-=-
Groups.io Links: You receive all messages sent to this group.
View/Reply Online (#79290): https://edk2.groups.io/g/devel/message/79290
Mute This Topic: https://groups.io/mt/84861462/21656
Group Owner: devel+ow...@edk2.groups.io
Unsubscribe: https://edk2.groups.io/g/devel/unsub [arch...@mail-archive.com]
-=-=-=-=-=-=-=-=-=-=-=-


Reply via email to