summaryrefslogtreecommitdiffstats
path: root/IntelFsp2Pkg/Tools/ConfigEditor
diff options
context:
space:
mode:
Diffstat (limited to 'IntelFsp2Pkg/Tools/ConfigEditor')
-rw-r--r--IntelFsp2Pkg/Tools/ConfigEditor/CommonUtility.py504
-rw-r--r--IntelFsp2Pkg/Tools/ConfigEditor/ConfigEditor.py1499
-rw-r--r--IntelFsp2Pkg/Tools/ConfigEditor/GenYamlCfg.py2252
-rw-r--r--IntelFsp2Pkg/Tools/ConfigEditor/SingleSign.py324
4 files changed, 4579 insertions, 0 deletions
diff --git a/IntelFsp2Pkg/Tools/ConfigEditor/CommonUtility.py b/IntelFsp2Pkg/Tools/ConfigEditor/CommonUtility.py
new file mode 100644
index 0000000000..1229279116
--- /dev/null
+++ b/IntelFsp2Pkg/Tools/ConfigEditor/CommonUtility.py
@@ -0,0 +1,504 @@
+#!/usr/bin/env python
+# @ CommonUtility.py
+# Common utility script
+#
+# Copyright (c) 2016 - 2021, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+##
+
+import os
+import sys
+import shutil
+import subprocess
+import string
+from ctypes import ARRAY, c_char, c_uint16, c_uint32, \
+ c_uint8, Structure, sizeof
+from importlib.machinery import SourceFileLoader
+from SingleSign import single_sign_gen_pub_key
+
+
+# Key types defined should match with cryptolib.h
+PUB_KEY_TYPE = {
+ "RSA": 1,
+ "ECC": 2,
+ "DSA": 3,
+ }
+
+# Signing type schemes defined should match with cryptolib.h
+SIGN_TYPE_SCHEME = {
+ "RSA_PKCS1": 1,
+ "RSA_PSS": 2,
+ "ECC": 3,
+ "DSA": 4,
+ }
+
+# Hash values defined should match with cryptolib.h
+HASH_TYPE_VALUE = {
+ "SHA2_256": 1,
+ "SHA2_384": 2,
+ "SHA2_512": 3,
+ "SM3_256": 4,
+ }
+
+# Hash values defined should match with cryptolib.h
+HASH_VAL_STRING = dict(map(reversed, HASH_TYPE_VALUE.items()))
+
+AUTH_TYPE_HASH_VALUE = {
+ "SHA2_256": 1,
+ "SHA2_384": 2,
+ "SHA2_512": 3,
+ "SM3_256": 4,
+ "RSA2048SHA256": 1,
+ "RSA3072SHA384": 2,
+ }
+
+HASH_DIGEST_SIZE = {
+ "SHA2_256": 32,
+ "SHA2_384": 48,
+ "SHA2_512": 64,
+ "SM3_256": 32,
+ }
+
+
+class PUB_KEY_HDR (Structure):
+ _pack_ = 1
+ _fields_ = [
+ ('Identifier', ARRAY(c_char, 4)), # signature ('P', 'U', 'B', 'K')
+ ('KeySize', c_uint16), # Length of Public Key
+ ('KeyType', c_uint8), # RSA or ECC
+ ('Reserved', ARRAY(c_uint8, 1)),
+ ('KeyData', ARRAY(c_uint8, 0)),
+ ]
+
+ def __init__(self):
+ self.Identifier = b'PUBK'
+
+
+class SIGNATURE_HDR (Structure):
+ _pack_ = 1
+ _fields_ = [
+ ('Identifier', ARRAY(c_char, 4)),
+ ('SigSize', c_uint16),
+ ('SigType', c_uint8),
+ ('HashAlg', c_uint8),
+ ('Signature', ARRAY(c_uint8, 0)),
+ ]
+
+ def __init__(self):
+ self.Identifier = b'SIGN'
+
+
+class LZ_HEADER(Structure):
+ _pack_ = 1
+ _fields_ = [
+ ('signature', ARRAY(c_char, 4)),
+ ('compressed_len', c_uint32),
+ ('length', c_uint32),
+ ('version', c_uint16),
+ ('svn', c_uint8),
+ ('attribute', c_uint8)
+ ]
+ _compress_alg = {
+ b'LZDM': 'Dummy',
+ b'LZ4 ': 'Lz4',
+ b'LZMA': 'Lzma',
+ }
+
+
+def print_bytes(data, indent=0, offset=0, show_ascii=False):
+ bytes_per_line = 16
+ printable = ' ' + string.ascii_letters + string.digits + string.punctuation
+ str_fmt = '{:s}{:04x}: {:%ds} {:s}' % (bytes_per_line * 3)
+ bytes_per_line
+ data_array = bytearray(data)
+ for idx in range(0, len(data_array), bytes_per_line):
+ hex_str = ' '.join(
+ '%02X' % val for val in data_array[idx:idx + bytes_per_line])
+ asc_str = ''.join('%c' % (val if (chr(val) in printable) else '.')
+ for val in data_array[idx:idx + bytes_per_line])
+ print(str_fmt.format(
+ indent * ' ',
+ offset + idx, hex_str,
+ ' ' + asc_str if show_ascii else ''))
+
+
+def get_bits_from_bytes(bytes, start, length):
+ if length == 0:
+ return 0
+ byte_start = (start) // 8
+ byte_end = (start + length - 1) // 8
+ bit_start = start & 7
+ mask = (1 << length) - 1
+ val = bytes_to_value(bytes[byte_start:byte_end + 1])
+ val = (val >> bit_start) & mask
+ return val
+
+
+def set_bits_to_bytes(bytes, start, length, bvalue):
+ if length == 0:
+ return
+ byte_start = (start) // 8
+ byte_end = (start + length - 1) // 8
+ bit_start = start & 7
+ mask = (1 << length) - 1
+ val = bytes_to_value(bytes[byte_start:byte_end + 1])
+ val &= ~(mask << bit_start)
+ val |= ((bvalue & mask) << bit_start)
+ bytes[byte_start:byte_end+1] = value_to_bytearray(
+ val,
+ byte_end + 1 - byte_start)
+
+
+def value_to_bytes(value, length):
+ return value.to_bytes(length, 'little')
+
+
+def bytes_to_value(bytes):
+ return int.from_bytes(bytes, 'little')
+
+
+def value_to_bytearray(value, length):
+ return bytearray(value_to_bytes(value, length))
+
+# def value_to_bytearray (value, length):
+ return bytearray(value_to_bytes(value, length))
+
+
+def get_aligned_value(value, alignment=4):
+ if alignment != (1 << (alignment.bit_length() - 1)):
+ raise Exception(
+ 'Alignment (0x%x) should to be power of 2 !' % alignment)
+ value = (value + (alignment - 1)) & ~(alignment - 1)
+ return value
+
+
+def get_padding_length(data_len, alignment=4):
+ new_data_len = get_aligned_value(data_len, alignment)
+ return new_data_len - data_len
+
+
+def get_file_data(file, mode='rb'):
+ return open(file, mode).read()
+
+
+def gen_file_from_object(file, object):
+ open(file, 'wb').write(object)
+
+
+def gen_file_with_size(file, size):
+ open(file, 'wb').write(b'\xFF' * size)
+
+
+def check_files_exist(base_name_list, dir='', ext=''):
+ for each in base_name_list:
+ if not os.path.exists(os.path.join(dir, each + ext)):
+ return False
+ return True
+
+
+def load_source(name, filepath):
+ mod = SourceFileLoader(name, filepath).load_module()
+ return mod
+
+
+def get_openssl_path():
+ if os.name == 'nt':
+ if 'OPENSSL_PATH' not in os.environ:
+ openssl_dir = "C:\\Openssl\\bin\\"
+ if os.path.exists(openssl_dir):
+ os.environ['OPENSSL_PATH'] = openssl_dir
+ else:
+ os.environ['OPENSSL_PATH'] = "C:\\Openssl\\"
+ if 'OPENSSL_CONF' not in os.environ:
+ openssl_cfg = "C:\\Openssl\\openssl.cfg"
+ if os.path.exists(openssl_cfg):
+ os.environ['OPENSSL_CONF'] = openssl_cfg
+ openssl = os.path.join(
+ os.environ.get('OPENSSL_PATH', ''),
+ 'openssl.exe')
+ else:
+ # Get openssl path for Linux cases
+ openssl = shutil.which('openssl')
+
+ return openssl
+
+
+def run_process(arg_list, print_cmd=False, capture_out=False):
+ sys.stdout.flush()
+ if os.name == 'nt' and os.path.splitext(arg_list[0])[1] == '' and \
+ os.path.exists(arg_list[0] + '.exe'):
+ arg_list[0] += '.exe'
+ if print_cmd:
+ print(' '.join(arg_list))
+
+ exc = None
+ result = 0
+ output = ''
+ try:
+ if capture_out:
+ output = subprocess.check_output(arg_list).decode()
+ else:
+ result = subprocess.call(arg_list)
+ except Exception as ex:
+ result = 1
+ exc = ex
+
+ if result:
+ if not print_cmd:
+ print('Error in running process:\n %s' % ' '.join(arg_list))
+ if exc is None:
+ sys.exit(1)
+ else:
+ raise exc
+
+ return output
+
+
+# Adjust hash type algorithm based on Public key file
+def adjust_hash_type(pub_key_file):
+ key_type = get_key_type(pub_key_file)
+ if key_type == 'RSA2048':
+ hash_type = 'SHA2_256'
+ elif key_type == 'RSA3072':
+ hash_type = 'SHA2_384'
+ else:
+ hash_type = None
+
+ return hash_type
+
+
+def rsa_sign_file(
+ priv_key, pub_key, hash_type, sign_scheme,
+ in_file, out_file, inc_dat=False, inc_key=False):
+
+ bins = bytearray()
+ if inc_dat:
+ bins.extend(get_file_data(in_file))
+
+
+# def single_sign_file(priv_key, hash_type, sign_scheme, in_file, out_file):
+
+ out_data = get_file_data(out_file)
+
+ sign = SIGNATURE_HDR()
+ sign.SigSize = len(out_data)
+ sign.SigType = SIGN_TYPE_SCHEME[sign_scheme]
+ sign.HashAlg = HASH_TYPE_VALUE[hash_type]
+
+ bins.extend(bytearray(sign) + out_data)
+ if inc_key:
+ key = gen_pub_key(priv_key, pub_key)
+ bins.extend(key)
+
+ if len(bins) != len(out_data):
+ gen_file_from_object(out_file, bins)
+
+
+def get_key_type(in_key):
+
+ # Check in_key is file or key Id
+ if not os.path.exists(in_key):
+ key = bytearray(gen_pub_key(in_key))
+ else:
+ # Check for public key in binary format.
+ key = bytearray(get_file_data(in_key))
+
+ pub_key_hdr = PUB_KEY_HDR.from_buffer(key)
+ if pub_key_hdr.Identifier != b'PUBK':
+ pub_key = gen_pub_key(in_key)
+ pub_key_hdr = PUB_KEY_HDR.from_buffer(pub_key)
+
+ key_type = next(
+ (key for key,
+ value in PUB_KEY_TYPE.items() if value == pub_key_hdr.KeyType))
+ return '%s%d' % (key_type, (pub_key_hdr.KeySize - 4) * 8)
+
+
+def get_auth_hash_type(key_type, sign_scheme):
+ if key_type == "RSA2048" and sign_scheme == "RSA_PKCS1":
+ hash_type = 'SHA2_256'
+ auth_type = 'RSA2048_PKCS1_SHA2_256'
+ elif key_type == "RSA3072" and sign_scheme == "RSA_PKCS1":
+ hash_type = 'SHA2_384'
+ auth_type = 'RSA3072_PKCS1_SHA2_384'
+ elif key_type == "RSA2048" and sign_scheme == "RSA_PSS":
+ hash_type = 'SHA2_256'
+ auth_type = 'RSA2048_PSS_SHA2_256'
+ elif key_type == "RSA3072" and sign_scheme == "RSA_PSS":
+ hash_type = 'SHA2_384'
+ auth_type = 'RSA3072_PSS_SHA2_384'
+ else:
+ hash_type = ''
+ auth_type = ''
+ return auth_type, hash_type
+
+
+# def single_sign_gen_pub_key(in_key, pub_key_file=None):
+
+
+def gen_pub_key(in_key, pub_key=None):
+
+ keydata = single_sign_gen_pub_key(in_key, pub_key)
+
+ publickey = PUB_KEY_HDR()
+ publickey.KeySize = len(keydata)
+ publickey.KeyType = PUB_KEY_TYPE['RSA']
+
+ key = bytearray(publickey) + keydata
+
+ if pub_key:
+ gen_file_from_object(pub_key, key)
+
+ return key
+
+
+def decompress(in_file, out_file, tool_dir=''):
+ if not os.path.isfile(in_file):
+ raise Exception("Invalid input file '%s' !" % in_file)
+
+ # Remove the Lz Header
+ fi = open(in_file, 'rb')
+ di = bytearray(fi.read())
+ fi.close()
+
+ lz_hdr = LZ_HEADER.from_buffer(di)
+ offset = sizeof(lz_hdr)
+ if lz_hdr.signature == b"LZDM" or lz_hdr.compressed_len == 0:
+ fo = open(out_file, 'wb')
+ fo.write(di[offset:offset + lz_hdr.compressed_len])
+ fo.close()
+ return
+
+ temp = os.path.splitext(out_file)[0] + '.tmp'
+ if lz_hdr.signature == b"LZMA":
+ alg = "Lzma"
+ elif lz_hdr.signature == b"LZ4 ":
+ alg = "Lz4"
+ else:
+ raise Exception("Unsupported compression '%s' !" % lz_hdr.signature)
+
+ fo = open(temp, 'wb')
+ fo.write(di[offset:offset + lz_hdr.compressed_len])
+ fo.close()
+
+ compress_tool = "%sCompress" % alg
+ if alg == "Lz4":
+ try:
+ cmdline = [
+ os.path.join(tool_dir, compress_tool),
+ "-d",
+ "-o", out_file,
+ temp]
+ run_process(cmdline, False, True)
+ except Exception:
+ msg_string = "Could not find/use CompressLz4 tool, " \
+ "trying with python lz4..."
+ print(msg_string)
+ try:
+ import lz4.block
+ if lz4.VERSION != '3.1.1':
+ msg_string = "Recommended lz4 module version " \
+ "is '3.1.1'," + lz4.VERSION \
+ + " is currently installed."
+ print(msg_string)
+ except ImportError:
+ msg_string = "Could not import lz4, use " \
+ "'python -m pip install lz4==3.1.1' " \
+ "to install it."
+ print(msg_string)
+ exit(1)
+ decompress_data = lz4.block.decompress(get_file_data(temp))
+ with open(out_file, "wb") as lz4bin:
+ lz4bin.write(decompress_data)
+ else:
+ cmdline = [
+ os.path.join(tool_dir, compress_tool),
+ "-d",
+ "-o", out_file,
+ temp]
+ run_process(cmdline, False, True)
+ os.remove(temp)
+
+
+def compress(in_file, alg, svn=0, out_path='', tool_dir=''):
+ if not os.path.isfile(in_file):
+ raise Exception("Invalid input file '%s' !" % in_file)
+
+ basename, ext = os.path.splitext(os.path.basename(in_file))
+ if out_path:
+ if os.path.isdir(out_path):
+ out_file = os.path.join(out_path, basename + '.lz')
+ else:
+ out_file = os.path.join(out_path)
+ else:
+ out_file = os.path.splitext(in_file)[0] + '.lz'
+
+ if alg == "Lzma":
+ sig = "LZMA"
+ elif alg == "Tiano":
+ sig = "LZUF"
+ elif alg == "Lz4":
+ sig = "LZ4 "
+ elif alg == "Dummy":
+ sig = "LZDM"
+ else:
+ raise Exception("Unsupported compression '%s' !" % alg)
+
+ in_len = os.path.getsize(in_file)
+ if in_len > 0:
+ compress_tool = "%sCompress" % alg
+ if sig == "LZDM":
+ shutil.copy(in_file, out_file)
+ compress_data = get_file_data(out_file)
+ elif sig == "LZ4 ":
+ try:
+ cmdline = [
+ os.path.join(tool_dir, compress_tool),
+ "-e",
+ "-o", out_file,
+ in_file]
+ run_process(cmdline, False, True)
+ compress_data = get_file_data(out_file)
+ except Exception:
+ msg_string = "Could not find/use CompressLz4 tool, " \
+ "trying with python lz4..."
+ print(msg_string)
+ try:
+ import lz4.block
+ if lz4.VERSION != '3.1.1':
+ msg_string = "Recommended lz4 module version " \
+ "is '3.1.1', " + lz4.VERSION \
+ + " is currently installed."
+ print(msg_string)
+ except ImportError:
+ msg_string = "Could not import lz4, use " \
+ "'python -m pip install lz4==3.1.1' " \
+ "to install it."
+ print(msg_string)
+ exit(1)
+ compress_data = lz4.block.compress(
+ get_file_data(in_file),
+ mode='high_compression')
+ elif sig == "LZMA":
+ cmdline = [
+ os.path.join(tool_dir, compress_tool),
+ "-e",
+ "-o", out_file,
+ in_file]
+ run_process(cmdline, False, True)
+ compress_data = get_file_data(out_file)
+ else:
+ compress_data = bytearray()
+
+ lz_hdr = LZ_HEADER()
+ lz_hdr.signature = sig.encode()
+ lz_hdr.svn = svn
+ lz_hdr.compressed_len = len(compress_data)
+ lz_hdr.length = os.path.getsize(in_file)
+ data = bytearray()
+ data.extend(lz_hdr)
+ data.extend(compress_data)
+ gen_file_from_object(out_file, data)
+
+ return out_file
diff --git a/IntelFsp2Pkg/Tools/ConfigEditor/ConfigEditor.py b/IntelFsp2Pkg/Tools/ConfigEditor/ConfigEditor.py
new file mode 100644
index 0000000000..a7f79bbc96
--- /dev/null
+++ b/IntelFsp2Pkg/Tools/ConfigEditor/ConfigEditor.py
@@ -0,0 +1,1499 @@
+# @ ConfigEditor.py
+#
+# Copyright(c) 2018 - 2021, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+##
+
+import os
+import sys
+import marshal
+import tkinter
+import tkinter.ttk as ttk
+import tkinter.messagebox as messagebox
+import tkinter.filedialog as filedialog
+
+from pathlib import Path
+from GenYamlCfg import CGenYamlCfg, bytes_to_value, \
+ bytes_to_bracket_str, value_to_bytes, array_str_to_value
+from ctypes import sizeof, Structure, ARRAY, c_uint8, c_uint64, c_char, \
+ c_uint32, c_uint16
+from functools import reduce
+
+sys.path.insert(0, '..')
+from FspDscBsf2Yaml import bsf_to_dsc, dsc_to_yaml # noqa
+
+
+sys.dont_write_bytecode = True
+
+
+class create_tool_tip(object):
+ '''
+ create a tooltip for a given widget
+ '''
+ in_progress = False
+
+ def __init__(self, widget, text=''):
+ self.top_win = None
+ self.widget = widget
+ self.text = text
+ self.widget.bind("<Enter>", self.enter)
+ self.widget.bind("<Leave>", self.leave)
+
+ def enter(self, event=None):
+ if self.in_progress:
+ return
+ if self.widget.winfo_class() == 'Treeview':
+ # Only show help when cursor is on row header.
+ rowid = self.widget.identify_row(event.y)
+ if rowid != '':
+ return
+ else:
+ x, y, cx, cy = self.widget.bbox("insert")
+
+ cursor = self.widget.winfo_pointerxy()
+ x = self.widget.winfo_rootx() + 35
+ y = self.widget.winfo_rooty() + 20
+ if cursor[1] > y and cursor[1] < y + 20:
+ y += 20
+
+ # creates a toplevel window
+ self.top_win = tkinter.Toplevel(self.widget)
+ # Leaves only the label and removes the app window
+ self.top_win.wm_overrideredirect(True)
+ self.top_win.wm_geometry("+%d+%d" % (x, y))
+ label = tkinter.Message(self.top_win,
+ text=self.text,
+ justify='left',
+ background='bisque',
+ relief='solid',
+ borderwidth=1,
+ font=("times", "10", "normal"))
+ label.pack(ipadx=1)
+ self.in_progress = True
+
+ def leave(self, event=None):
+ if self.top_win:
+ self.top_win.destroy()
+ self.in_progress = False
+
+
+class validating_entry(tkinter.Entry):
+ def __init__(self, master, **kw):
+ tkinter.Entry.__init__(*(self, master), **kw)
+ self.parent = master
+ self.old_value = ''
+ self.last_value = ''
+ self.variable = tkinter.StringVar()
+ self.variable.trace("w", self.callback)
+ self.config(textvariable=self.variable)
+ self.config({"background": "#c0c0c0"})
+ self.bind("<Return>", self.move_next)
+ self.bind("<Tab>", self.move_next)
+ self.bind("<Escape>", self.cancel)
+ for each in ['BackSpace', 'Delete']:
+ self.bind("<%s>" % each, self.ignore)
+ self.display(None)
+
+ def ignore(self, even):
+ return "break"
+
+ def move_next(self, event):
+ if self.row < 0:
+ return
+ row, col = self.row, self.col
+ txt, row_id, col_id = self.parent.get_next_cell(row, col)
+ self.display(txt, row_id, col_id)
+ return "break"
+
+ def cancel(self, event):
+ self.variable.set(self.old_value)
+ self.display(None)
+
+ def display(self, txt, row_id='', col_id=''):
+ if txt is None:
+ self.row = -1
+ self.col = -1
+ self.place_forget()
+ else:
+ row = int('0x' + row_id[1:], 0) - 1
+ col = int(col_id[1:]) - 1
+ self.row = row
+ self.col = col
+ self.old_value = txt
+ self.last_value = txt
+ x, y, width, height = self.parent.bbox(row_id, col)
+ self.place(x=x, y=y, w=width)
+ self.variable.set(txt)
+ self.focus_set()
+ self.icursor(0)
+
+ def callback(self, *Args):
+ cur_val = self.variable.get()
+ new_val = self.validate(cur_val)
+ if new_val is not None and self.row >= 0:
+ self.last_value = new_val
+ self.parent.set_cell(self.row, self.col, new_val)
+ self.variable.set(self.last_value)
+
+ def validate(self, value):
+ if len(value) > 0:
+ try:
+ int(value, 16)
+ except Exception:
+ return None
+
+ # Normalize the cell format
+ self.update()
+ cell_width = self.winfo_width()
+ max_len = custom_table.to_byte_length(cell_width) * 2
+ cur_pos = self.index("insert")
+ if cur_pos == max_len + 1:
+ value = value[-max_len:]
+ else:
+ value = value[:max_len]
+ if value == '':
+ value = '0'
+ fmt = '%%0%dX' % max_len
+ return fmt % int(value, 16)
+
+
+class custom_table(ttk.Treeview):
+ _Padding = 20
+ _Char_width = 6
+
+ def __init__(self, parent, col_hdr, bins):
+ cols = len(col_hdr)
+
+ col_byte_len = []
+ for col in range(cols): # Columns
+ col_byte_len.append(int(col_hdr[col].split(':')[1]))
+
+ byte_len = sum(col_byte_len)
+ rows = (len(bins) + byte_len - 1) // byte_len
+
+ self.rows = rows
+ self.cols = cols
+ self.col_byte_len = col_byte_len
+ self.col_hdr = col_hdr
+
+ self.size = len(bins)
+ self.last_dir = ''
+
+ style = ttk.Style()
+ style.configure("Custom.Treeview.Heading",
+ font=('calibri', 10, 'bold'),
+ foreground="blue")
+ ttk.Treeview.__init__(self, parent, height=rows,
+ columns=[''] + col_hdr, show='headings',
+ style="Custom.Treeview",
+ selectmode='none')
+ self.bind("<Button-1>", self.click)
+ self.bind("<FocusOut>", self.focus_out)
+ self.entry = validating_entry(self, width=4, justify=tkinter.CENTER)
+
+ self.heading(0, text='LOAD')
+ self.column(0, width=60, stretch=0, anchor=tkinter.CENTER)
+
+ for col in range(cols): # Columns
+ text = col_hdr[col].split(':')[0]
+ byte_len = int(col_hdr[col].split(':')[1])
+ self.heading(col+1, text=text)
+ self.column(col+1, width=self.to_cell_width(byte_len),
+ stretch=0, anchor=tkinter.CENTER)
+ idx = 0
+ for row in range(rows): # Rows
+ text = '%04X' % (row * len(col_hdr))
+ vals = ['%04X:' % (cols * row)]
+ for col in range(cols): # Columns
+ if idx >= len(bins):
+ break
+ byte_len = int(col_hdr[col].split(':')[1])
+ value = bytes_to_value(bins[idx:idx+byte_len])
+ hex = ("%%0%dX" % (byte_len * 2)) % value
+ vals.append(hex)
+ idx += byte_len
+ self.insert('', 'end', values=tuple(vals))
+ if idx >= len(bins):
+ break
+
+ @staticmethod
+ def to_cell_width(byte_len):
+ return byte_len * 2 * custom_table._Char_width + custom_table._Padding
+
+ @staticmethod
+ def to_byte_length(cell_width):
+ return(cell_width - custom_table._Padding) \
+ // (2 * custom_table._Char_width)
+
+ def focus_out(self, event):
+ self.entry.display(None)
+
+ def refresh_bin(self, bins):
+ if not bins:
+ return
+
+ # Reload binary into widget
+ bin_len = len(bins)
+ for row in range(self.rows):
+ iid = self.get_children()[row]
+ for col in range(self.cols):
+ idx = row * sum(self.col_byte_len) + \
+ sum(self.col_byte_len[:col])
+ byte_len = self.col_byte_len[col]
+ if idx + byte_len <= self.size:
+ byte_len = int(self.col_hdr[col].split(':')[1])
+ if idx + byte_len > bin_len:
+ val = 0
+ else:
+ val = bytes_to_value(bins[idx:idx+byte_len])
+ hex_val = ("%%0%dX" % (byte_len * 2)) % val
+ self.set(iid, col + 1, hex_val)
+
+ def get_cell(self, row, col):
+ iid = self.get_children()[row]
+ txt = self.item(iid, 'values')[col]
+ return txt
+
+ def get_next_cell(self, row, col):
+ rows = self.get_children()
+ col += 1
+ if col > self.cols:
+ col = 1
+ row += 1
+ cnt = row * sum(self.col_byte_len) + sum(self.col_byte_len[:col])
+ if cnt > self.size:
+ # Reached the last cell, so roll back to beginning
+ row = 0
+ col = 1
+
+ txt = self.get_cell(row, col)
+ row_id = rows[row]
+ col_id = '#%d' % (col + 1)
+ return(txt, row_id, col_id)
+
+ def set_cell(self, row, col, val):
+ iid = self.get_children()[row]
+ self.set(iid, col, val)
+
+ def load_bin(self):
+ # Load binary from file
+ path = filedialog.askopenfilename(
+ initialdir=self.last_dir,
+ title="Load binary file",
+ filetypes=(("Binary files", "*.bin"), (
+ "binary files", "*.bin")))
+ if path:
+ self.last_dir = os.path.dirname(path)
+ fd = open(path, 'rb')
+ bins = bytearray(fd.read())[:self.size]
+ fd.close()
+ bins.extend(b'\x00' * (self.size - len(bins)))
+ return bins
+
+ return None
+
+ def click(self, event):
+ row_id = self.identify_row(event.y)
+ col_id = self.identify_column(event.x)
+ if row_id == '' and col_id == '#1':
+ # Clicked on "LOAD" cell
+ bins = self.load_bin()
+ self.refresh_bin(bins)
+ return
+
+ if col_id == '#1':
+ # Clicked on column 1(Offset column)
+ return
+
+ item = self.identify('item', event.x, event.y)
+ if not item or not col_id:
+ # Not clicked on valid cell
+ return
+
+ # Clicked cell
+ row = int('0x' + row_id[1:], 0) - 1
+ col = int(col_id[1:]) - 1
+ if row * self.cols + col > self.size:
+ return
+
+ vals = self.item(item, 'values')
+ if col < len(vals):
+ txt = self.item(item, 'values')[col]
+ self.entry.display(txt, row_id, col_id)
+
+ def get(self):
+ bins = bytearray()
+ row_ids = self.get_children()
+ for row_id in row_ids:
+ row = int('0x' + row_id[1:], 0) - 1
+ for col in range(self.cols):
+ idx = row * sum(self.col_byte_len) + \
+ sum(self.col_byte_len[:col])
+ byte_len = self.col_byte_len[col]
+ if idx + byte_len > self.size:
+ break
+ hex = self.item(row_id, 'values')[col + 1]
+ values = value_to_bytes(int(hex, 16)
+ & ((1 << byte_len * 8) - 1), byte_len)
+ bins.extend(values)
+ return bins
+
+
+class c_uint24(Structure):
+ """Little-Endian 24-bit Unsigned Integer"""
+ _pack_ = 1
+ _fields_ = [('Data', (c_uint8 * 3))]
+
+ def __init__(self, val=0):
+ self.set_value(val)
+
+ def __str__(self, indent=0):
+ return '0x%.6x' % self.value
+
+ def __int__(self):
+ return self.get_value()
+
+ def set_value(self, val):
+ self.Data[0:3] = Val2Bytes(val, 3)
+
+ def get_value(self):
+ return Bytes2Val(self.Data[0:3])
+
+ value = property(get_value, set_value)
+
+
+class EFI_FIRMWARE_VOLUME_HEADER(Structure):
+ _fields_ = [
+ ('ZeroVector', ARRAY(c_uint8, 16)),
+ ('FileSystemGuid', ARRAY(c_uint8, 16)),
+ ('FvLength', c_uint64),
+ ('Signature', ARRAY(c_char, 4)),
+ ('Attributes', c_uint32),
+ ('HeaderLength', c_uint16),
+ ('Checksum', c_uint16),
+ ('ExtHeaderOffset', c_uint16),
+ ('Reserved', c_uint8),
+ ('Revision', c_uint8)
+ ]
+
+
+class EFI_FIRMWARE_VOLUME_EXT_HEADER(Structure):
+ _fields_ = [
+ ('FvName', ARRAY(c_uint8, 16)),
+ ('ExtHeaderSize', c_uint32)
+ ]
+
+
+class EFI_FFS_INTEGRITY_CHECK(Structure):
+ _fields_ = [
+ ('Header', c_uint8),
+ ('File', c_uint8)
+ ]
+
+
+class EFI_FFS_FILE_HEADER(Structure):
+ _fields_ = [
+ ('Name', ARRAY(c_uint8, 16)),
+ ('IntegrityCheck', EFI_FFS_INTEGRITY_CHECK),
+ ('Type', c_uint8),
+ ('Attributes', c_uint8),
+ ('Size', c_uint24),
+ ('State', c_uint8)
+ ]
+
+
+class EFI_COMMON_SECTION_HEADER(Structure):
+ _fields_ = [
+ ('Size', c_uint24),
+ ('Type', c_uint8)
+ ]
+
+
+class EFI_SECTION_TYPE:
+ """Enumeration of all valid firmware file section types."""
+ ALL = 0x00
+ COMPRESSION = 0x01
+ GUID_DEFINED = 0x02
+ DISPOSABLE = 0x03
+ PE32 = 0x10
+ PIC = 0x11
+ TE = 0x12
+ DXE_DEPEX = 0x13
+ VERSION = 0x14
+ USER_INTERFACE = 0x15
+ COMPATIBILITY16 = 0x16
+ FIRMWARE_VOLUME_IMAGE = 0x17
+ FREEFORM_SUBTYPE_GUID = 0x18
+ RAW = 0x19
+ PEI_DEPEX = 0x1b
+ SMM_DEPEX = 0x1c
+
+
+class FSP_COMMON_HEADER(Structure):
+ _fields_ = [
+ ('Signature', ARRAY(c_char, 4)),
+ ('HeaderLength', c_uint32)
+ ]
+
+
+class FSP_INFORMATION_HEADER(Structure):
+ _fields_ = [
+ ('Signature', ARRAY(c_char, 4)),
+ ('HeaderLength', c_uint32),
+ ('Reserved1', c_uint16),
+ ('SpecVersion', c_uint8),
+ ('HeaderRevision', c_uint8),
+ ('ImageRevision', c_uint32),
+ ('ImageId', ARRAY(c_char, 8)),
+ ('ImageSize', c_uint32),
+ ('ImageBase', c_uint32),
+ ('ImageAttribute', c_uint16),
+ ('ComponentAttribute', c_uint16),
+ ('CfgRegionOffset', c_uint32),
+ ('CfgRegionSize', c_uint32),
+ ('Reserved2', c_uint32),
+ ('TempRamInitEntryOffset', c_uint32),
+ ('Reserved3', c_uint32),
+ ('NotifyPhaseEntryOffset', c_uint32),
+ ('FspMemoryInitEntryOffset', c_uint32),
+ ('TempRamExitEntryOffset', c_uint32),
+ ('FspSiliconInitEntryOffset', c_uint32)
+ ]
+
+
+class FSP_EXTENDED_HEADER(Structure):
+ _fields_ = [
+ ('Signature', ARRAY(c_char, 4)),
+ ('HeaderLength', c_uint32),
+ ('Revision', c_uint8),
+ ('Reserved', c_uint8),
+ ('FspProducerId', ARRAY(c_char, 6)),
+ ('FspProducerRevision', c_uint32),
+ ('FspProducerDataSize', c_uint32)
+ ]
+
+
+class FSP_PATCH_TABLE(Structure):
+ _fields_ = [
+ ('Signature', ARRAY(c_char, 4)),
+ ('HeaderLength', c_uint16),
+ ('HeaderRevision', c_uint8),
+ ('Reserved', c_uint8),
+ ('PatchEntryNum', c_uint32)
+ ]
+
+
+class Section:
+ def __init__(self, offset, secdata):
+ self.SecHdr = EFI_COMMON_SECTION_HEADER.from_buffer(secdata, 0)
+ self.SecData = secdata[0:int(self.SecHdr.Size)]
+ self.Offset = offset
+
+
+def AlignPtr(offset, alignment=8):
+ return (offset + alignment - 1) & ~(alignment - 1)
+
+
+def Bytes2Val(bytes):
+ return reduce(lambda x, y: (x << 8) | y, bytes[:: -1])
+
+
+def Val2Bytes(value, blen):
+ return [(value >> (i*8) & 0xff) for i in range(blen)]
+
+
+class FirmwareFile:
+ def __init__(self, offset, filedata):
+ self.FfsHdr = EFI_FFS_FILE_HEADER.from_buffer(filedata, 0)
+ self.FfsData = filedata[0:int(self.FfsHdr.Size)]
+ self.Offset = offset
+ self.SecList = []
+
+ def ParseFfs(self):
+ ffssize = len(self.FfsData)
+ offset = sizeof(self.FfsHdr)
+ if self.FfsHdr.Name != '\xff' * 16:
+ while offset < (ffssize - sizeof(EFI_COMMON_SECTION_HEADER)):
+ sechdr = EFI_COMMON_SECTION_HEADER.from_buffer(
+ self.FfsData, offset)
+ sec = Section(
+ offset, self.FfsData[offset:offset + int(sechdr.Size)])
+ self.SecList.append(sec)
+ offset += int(sechdr.Size)
+ offset = AlignPtr(offset, 4)
+
+
+class FirmwareVolume:
+ def __init__(self, offset, fvdata):
+ self.FvHdr = EFI_FIRMWARE_VOLUME_HEADER.from_buffer(fvdata, 0)
+ self.FvData = fvdata[0: self.FvHdr.FvLength]
+ self.Offset = offset
+ if self.FvHdr.ExtHeaderOffset > 0:
+ self.FvExtHdr = EFI_FIRMWARE_VOLUME_EXT_HEADER.from_buffer(
+ self.FvData, self.FvHdr.ExtHeaderOffset)
+ else:
+ self.FvExtHdr = None
+ self.FfsList = []
+
+ def ParseFv(self):
+ fvsize = len(self.FvData)
+ if self.FvExtHdr:
+ offset = self.FvHdr.ExtHeaderOffset + self.FvExtHdr.ExtHeaderSize
+ else:
+ offset = self.FvHdr.HeaderLength
+ offset = AlignPtr(offset)
+ while offset < (fvsize - sizeof(EFI_FFS_FILE_HEADER)):
+ ffshdr = EFI_FFS_FILE_HEADER.from_buffer(self.FvData, offset)
+ if (ffshdr.Name == '\xff' * 16) and \
+ (int(ffshdr.Size) == 0xFFFFFF):
+ offset = fvsize
+ else:
+ ffs = FirmwareFile(
+ offset, self.FvData[offset:offset + int(ffshdr.Size)])
+ ffs.ParseFfs()
+ self.FfsList.append(ffs)
+ offset += int(ffshdr.Size)
+ offset = AlignPtr(offset)
+
+
+class FspImage:
+ def __init__(self, offset, fih, fihoff, patch):
+ self.Fih = fih
+ self.FihOffset = fihoff
+ self.Offset = offset
+ self.FvIdxList = []
+ self.Type = "XTMSXXXXOXXXXXXX"[(fih.ComponentAttribute >> 12) & 0x0F]
+ self.PatchList = patch
+ self.PatchList.append(fihoff + 0x1C)
+
+ def AppendFv(self, FvIdx):
+ self.FvIdxList.append(FvIdx)
+
+ def Patch(self, delta, fdbin):
+ count = 0
+ applied = 0
+ for idx, patch in enumerate(self.PatchList):
+ ptype = (patch >> 24) & 0x0F
+ if ptype not in [0x00, 0x0F]:
+ raise Exception('ERROR: Invalid patch type %d !' % ptype)
+ if patch & 0x80000000:
+ patch = self.Fih.ImageSize - (0x1000000 - (patch & 0xFFFFFF))
+ else:
+ patch = patch & 0xFFFFFF
+ if (patch < self.Fih.ImageSize) and \
+ (patch + sizeof(c_uint32) <= self.Fih.ImageSize):
+ offset = patch + self.Offset
+ value = Bytes2Val(fdbin[offset:offset+sizeof(c_uint32)])
+ value += delta
+ fdbin[offset:offset+sizeof(c_uint32)] = Val2Bytes(
+ value, sizeof(c_uint32))
+ applied += 1
+ count += 1
+ # Don't count the FSP base address patch entry appended at the end
+ if count != 0:
+ count -= 1
+ applied -= 1
+ return (count, applied)
+
+
+class FirmwareDevice:
+ def __init__(self, offset, FdData):
+ self.FvList = []
+ self.FspList = []
+ self.FspExtList = []
+ self.FihList = []
+ self.BuildList = []
+ self.OutputText = ""
+ self.Offset = 0
+ self.FdData = FdData
+
+ def ParseFd(self):
+ offset = 0
+ fdsize = len(self.FdData)
+ self.FvList = []
+ while offset < (fdsize - sizeof(EFI_FIRMWARE_VOLUME_HEADER)):
+ fvh = EFI_FIRMWARE_VOLUME_HEADER.from_buffer(self.FdData, offset)
+ if b'_FVH' != fvh.Signature:
+ raise Exception("ERROR: Invalid FV header !")
+ fv = FirmwareVolume(
+ offset, self.FdData[offset:offset + fvh.FvLength])
+ fv.ParseFv()
+ self.FvList.append(fv)
+ offset += fv.FvHdr.FvLength
+
+ def CheckFsp(self):
+ if len(self.FspList) == 0:
+ return
+
+ fih = None
+ for fsp in self.FspList:
+ if not fih:
+ fih = fsp.Fih
+ else:
+ newfih = fsp.Fih
+ if (newfih.ImageId != fih.ImageId) or \
+ (newfih.ImageRevision != fih.ImageRevision):
+ raise Exception(
+ "ERROR: Inconsistent FSP ImageId or "
+ "ImageRevision detected !")
+
+ def ParseFsp(self):
+ flen = 0
+ for idx, fv in enumerate(self.FvList):
+ # Check if this FV contains FSP header
+ if flen == 0:
+ if len(fv.FfsList) == 0:
+ continue
+ ffs = fv.FfsList[0]
+ if len(ffs.SecList) == 0:
+ continue
+ sec = ffs.SecList[0]
+ if sec.SecHdr.Type != EFI_SECTION_TYPE.RAW:
+ continue
+ fihoffset = ffs.Offset + sec.Offset + sizeof(sec.SecHdr)
+ fspoffset = fv.Offset
+ offset = fspoffset + fihoffset
+ fih = FSP_INFORMATION_HEADER.from_buffer(self.FdData, offset)
+ self.FihList.append(fih)
+ if b'FSPH' != fih.Signature:
+ continue
+
+ offset += fih.HeaderLength
+
+ offset = AlignPtr(offset, 2)
+ Extfih = FSP_EXTENDED_HEADER.from_buffer(self.FdData, offset)
+ self.FspExtList.append(Extfih)
+ offset = AlignPtr(offset, 4)
+ plist = []
+ while True:
+ fch = FSP_COMMON_HEADER.from_buffer(self.FdData, offset)
+ if b'FSPP' != fch.Signature:
+ offset += fch.HeaderLength
+ offset = AlignPtr(offset, 4)
+ else:
+ fspp = FSP_PATCH_TABLE.from_buffer(
+ self.FdData, offset)
+ offset += sizeof(fspp)
+ start_offset = offset + 32
+ end_offset = offset + 32
+ while True:
+ end_offset += 1
+ if(self.FdData[
+ end_offset: end_offset + 1] == b'\xff'):
+ break
+ self.BuildList.append(
+ self.FdData[start_offset:end_offset])
+ pdata = (c_uint32 * fspp.PatchEntryNum).from_buffer(
+ self.FdData, offset)
+ plist = list(pdata)
+ break
+
+ fsp = FspImage(fspoffset, fih, fihoffset, plist)
+ fsp.AppendFv(idx)
+ self.FspList.append(fsp)
+ flen = fsp.Fih.ImageSize - fv.FvHdr.FvLength
+ else:
+ fsp.AppendFv(idx)
+ flen -= fv.FvHdr.FvLength
+ if flen < 0:
+ raise Exception("ERROR: Incorrect FV size in image !")
+ self.CheckFsp()
+
+ def OutputFsp(self):
+ def copy_text_to_clipboard():
+ window.clipboard_clear()
+ window.clipboard_append(self.OutputText)
+
+ window = tkinter.Tk()
+ window.title("Fsp Headers")
+ window.resizable(0, 0)
+ # Window Size
+ window.geometry("300x400+350+150")
+ frame = tkinter.Frame(window)
+ frame.pack(side=tkinter.BOTTOM)
+ # Vertical (y) Scroll Bar
+ scroll = tkinter.Scrollbar(window)
+ scroll.pack(side=tkinter.RIGHT, fill=tkinter.Y)
+ text = tkinter.Text(window,
+ wrap=tkinter.NONE, yscrollcommand=scroll.set)
+ i = 0
+ self.OutputText = self.OutputText + "Fsp Header Details \n\n"
+ while i < len(self.FihList):
+ try:
+ self.OutputText += str(self.BuildList[i].decode()) + "\n"
+ except Exception:
+ self.OutputText += "No description found\n"
+ self.OutputText += "FSP Header :\n "
+ self.OutputText += "Signature : " + \
+ str(self.FihList[i].Signature.decode('utf-8')) + "\n "
+ self.OutputText += "Header Length : " + \
+ str(hex(self.FihList[i].HeaderLength)) + "\n "
+ self.OutputText += "Header Revision : " + \
+ str(hex(self.FihList[i].HeaderRevision)) + "\n "
+ self.OutputText += "Spec Version : " + \
+ str(hex(self.FihList[i].SpecVersion)) + "\n "
+ self.OutputText += "Image Revision : " + \
+ str(hex(self.FihList[i].ImageRevision)) + "\n "
+ self.OutputText += "Image Id : " + \
+ str(self.FihList[i].ImageId.decode('utf-8')) + "\n "
+ self.OutputText += "Image Size : " + \
+ str(hex(self.FihList[i].ImageSize)) + "\n "
+ self.OutputText += "Image Base : " + \
+ str(hex(self.FihList[i].ImageBase)) + "\n "
+ self.OutputText += "Image Attribute : " + \
+ str(hex(self.FihList[i].ImageAttribute)) + "\n "
+ self.OutputText += "Cfg Region Offset : " + \
+ str(hex(self.FihList[i].CfgRegionOffset)) + "\n "
+ self.OutputText += "Cfg Region Size : " + \
+ str(hex(self.FihList[i].CfgRegionSize)) + "\n "
+ self.OutputText += "API Entry Num : " + \
+ str(hex(self.FihList[i].Reserved2)) + "\n "
+ self.OutputText += "Temp Ram Init Entry : " + \
+ str(hex(self.FihList[i].TempRamInitEntryOffset)) + "\n "
+ self.OutputText += "FSP Init Entry : " + \
+ str(hex(self.FihList[i].Reserved3)) + "\n "
+ self.OutputText += "Notify Phase Entry : " + \
+ str(hex(self.FihList[i].NotifyPhaseEntryOffset)) + "\n "
+ self.OutputText += "Fsp Memory Init Entry : " + \
+ str(hex(self.FihList[i].FspMemoryInitEntryOffset)) + "\n "
+ self.OutputText += "Temp Ram Exit Entry : " + \
+ str(hex(self.FihList[i].TempRamExitEntryOffset)) + "\n "
+ self.OutputText += "Fsp Silicon Init Entry : " + \
+ str(hex(self.FihList[i].FspSiliconInitEntryOffset)) + "\n\n"
+ self.OutputText += "FSP Extended Header:\n "
+ self.OutputText += "Signature : " + \
+ str(self.FspExtList[i].Signature.decode('utf-8')) + "\n "
+ self.OutputText += "Header Length : " + \
+ str(hex(self.FspExtList[i].HeaderLength)) + "\n "
+ self.OutputText += "Header Revision : " + \
+ str(hex(self.FspExtList[i].Revision)) + "\n "
+ self.OutputText += "Fsp Producer Id : " + \
+ str(self.FspExtList[i].FspProducerId.decode('utf-8')) + "\n "
+ self.OutputText += "FspProducerRevision : " + \
+ str(hex(self.FspExtList[i].FspProducerRevision)) + "\n\n"
+ i += 1
+ text.insert(tkinter.INSERT, self.OutputText)
+ text.pack()
+ # Configure the scrollbars
+ scroll.config(command=text.yview)
+ copy_button = tkinter.Button(
+ window, text="Copy to Clipboard", command=copy_text_to_clipboard)
+ copy_button.pack(in_=frame, side=tkinter.LEFT, padx=20, pady=10)
+ exit_button = tkinter.Button(
+ window, text="Close", command=window.destroy)
+ exit_button.pack(in_=frame, side=tkinter.RIGHT, padx=20, pady=10)
+ window.mainloop()
+
+
+class state:
+ def __init__(self):
+ self.state = False
+
+ def set(self, value):
+ self.state = value
+
+ def get(self):
+ return self.state
+
+
+class application(tkinter.Frame):
+ def __init__(self, master=None):
+ root = master
+
+ self.debug = True
+ self.mode = 'FSP'
+ self.last_dir = '.'
+ self.page_id = ''
+ self.page_list = {}
+ self.conf_list = {}
+ self.cfg_data_obj = None
+ self.org_cfg_data_bin = None
+ self.in_left = state()
+ self.in_right = state()
+
+ # Check if current directory contains a file with a .yaml extension
+ # if not default self.last_dir to a Platform directory where it is
+ # easier to locate *BoardPkg\CfgData\*Def.yaml files
+ self.last_dir = '.'
+ if not any(fname.endswith('.yaml') for fname in os.listdir('.')):
+ platform_path = Path(os.path.realpath(__file__)).parents[2].\
+ joinpath('Platform')
+ if platform_path.exists():
+ self.last_dir = platform_path
+
+ tkinter.Frame.__init__(self, master, borderwidth=2)
+
+ self.menu_string = [
+ 'Save Config Data to Binary', 'Load Config Data from Binary',
+ 'Show Binary Information',
+ 'Load Config Changes from Delta File',
+ 'Save Config Changes to Delta File',
+ 'Save Full Config Data to Delta File',
+ 'Open Config BSF file'
+ ]
+
+ root.geometry("1200x800")
+
+ paned = ttk.Panedwindow(root, orient=tkinter.HORIZONTAL)
+ paned.pack(fill=tkinter.BOTH, expand=True, padx=(4, 4))
+
+ status = tkinter.Label(master, text="", bd=1, relief=tkinter.SUNKEN,
+ anchor=tkinter.W)
+ status.pack(side=tkinter.BOTTOM, fill=tkinter.X)
+
+ frame_left = ttk.Frame(paned, height=800, relief="groove")
+
+ self.left = ttk.Treeview(frame_left, show="tree")
+
+ # Set up tree HScroller
+ pady = (10, 10)
+ self.tree_scroll = ttk.Scrollbar(frame_left,
+ orient="vertical",
+ command=self.left.yview)
+ self.left.configure(yscrollcommand=self.tree_scroll.set)
+ self.left.bind("<<TreeviewSelect>>", self.on_config_page_select_change)
+ self.left.bind("<Enter>", lambda e: self.in_left.set(True))
+ self.left.bind("<Leave>", lambda e: self.in_left.set(False))
+ self.left.bind("<MouseWheel>", self.on_tree_scroll)
+
+ self.left.pack(side='left',
+ fill=tkinter.BOTH,
+ expand=True,
+ padx=(5, 0),
+ pady=pady)
+ self.tree_scroll.pack(side='right', fill=tkinter.Y,
+ pady=pady, padx=(0, 5))
+
+ frame_right = ttk.Frame(paned, relief="groove")
+ self.frame_right = frame_right
+
+ self.conf_canvas = tkinter.Canvas(frame_right, highlightthickness=0)
+ self.page_scroll = ttk.Scrollbar(frame_right,
+ orient="vertical",
+ command=self.conf_canvas.yview)
+ self.right_grid = ttk.Frame(self.conf_canvas)
+ self.conf_canvas.configure(yscrollcommand=self.page_scroll.set)
+ self.conf_canvas.pack(side='left',
+ fill=tkinter.BOTH,
+ expand=True,
+ pady=pady,
+ padx=(5, 0))
+ self.page_scroll.pack(side='right', fill=tkinter.Y,
+ pady=pady, padx=(0, 5))
+ self.conf_canvas.create_window(0, 0, window=self.right_grid,
+ anchor='nw')
+ self.conf_canvas.bind('<Enter>', lambda e: self.in_right.set(True))
+ self.conf_canvas.bind('<Leave>', lambda e: self.in_right.set(False))
+ self.conf_canvas.bind("<Configure>", self.on_canvas_configure)
+ self.conf_canvas.bind_all("<MouseWheel>", self.on_page_scroll)
+
+ paned.add(frame_left, weight=2)
+ paned.add(frame_right, weight=10)
+
+ style = ttk.Style()
+ style.layout("Treeview", [('Treeview.treearea', {'sticky': 'nswe'})])
+
+ menubar = tkinter.Menu(root)
+ file_menu = tkinter.Menu(menubar, tearoff=0)
+ file_menu.add_command(label="Open Config YAML file",
+ command=self.load_from_yaml)
+ file_menu.add_command(label=self.menu_string[6],
+ command=self.load_from_bsf_file)
+ file_menu.add_command(label=self.menu_string[2],
+ command=self.load_from_fd)
+ file_menu.add_command(label=self.menu_string[0],
+ command=self.save_to_bin,
+ state='disabled')
+ file_menu.add_command(label=self.menu_string[1],
+ command=self.load_from_bin,
+ state='disabled')
+ file_menu.add_command(label=self.menu_string[3],
+ command=self.load_from_delta,
+ state='disabled')
+ file_menu.add_command(label=self.menu_string[4],
+ command=self.save_to_delta,
+ state='disabled')
+ file_menu.add_command(label=self.menu_string[5],
+ command=self.save_full_to_delta,
+ state='disabled')
+ file_menu.add_command(label="About", command=self.about)
+ menubar.add_cascade(label="File", menu=file_menu)
+ self.file_menu = file_menu
+
+ root.config(menu=menubar)
+
+ if len(sys.argv) > 1:
+ path = sys.argv[1]
+ if not path.endswith('.yaml') and not path.endswith('.pkl'):
+ messagebox.showerror('LOADING ERROR',
+ "Unsupported file '%s' !" % path)
+ return
+ else:
+ self.load_cfg_file(path)
+
+ if len(sys.argv) > 2:
+ path = sys.argv[2]
+ if path.endswith('.dlt'):
+ self.load_delta_file(path)
+ elif path.endswith('.bin'):
+ self.load_bin_file(path)
+ else:
+ messagebox.showerror('LOADING ERROR',
+ "Unsupported file '%s' !" % path)
+ return
+
+ def set_object_name(self, widget, name):
+ self.conf_list[id(widget)] = name
+
+ def get_object_name(self, widget):
+ if id(widget) in self.conf_list:
+ return self.conf_list[id(widget)]
+ else:
+ return None
+
+ def limit_entry_size(self, variable, limit):
+ value = variable.get()
+ if len(value) > limit:
+ variable.set(value[:limit])
+
+ def on_canvas_configure(self, event):
+ self.right_grid.grid_columnconfigure(0, minsize=event.width)
+
+ def on_tree_scroll(self, event):
+ if not self.in_left.get() and self.in_right.get():
+ # This prevents scroll event from being handled by both left and
+ # right frame at the same time.
+ self.on_page_scroll(event)
+ return 'break'
+
+ def on_page_scroll(self, event):
+ if self.in_right.get():
+ # Only scroll when it is in active area
+ min, max = self.page_scroll.get()
+ if not((min == 0.0) and (max == 1.0)):
+ self.conf_canvas.yview_scroll(-1 * int(event.delta / 120),
+ 'units')
+
+ def update_visibility_for_widget(self, widget, args):
+
+ visible = True
+ item = self.get_config_data_item_from_widget(widget, True)
+ if item is None:
+ return visible
+ elif not item:
+ return visible
+
+ result = 1
+ if item['condition']:
+ result = self.evaluate_condition(item)
+ if result == 2:
+ # Gray
+ widget.configure(state='disabled')
+ elif result == 0:
+ # Hide
+ visible = False
+ widget.grid_remove()
+ else:
+ # Show
+ widget.grid()
+ widget.configure(state='normal')
+
+ return visible
+
+ def update_widgets_visibility_on_page(self):
+ self.walk_widgets_in_layout(self.right_grid,
+ self.update_visibility_for_widget)
+
+ def combo_select_changed(self, event):
+ self.update_config_data_from_widget(event.widget, None)
+ self.update_widgets_visibility_on_page()
+
+ def edit_num_finished(self, event):
+ widget = event.widget
+ item = self.get_config_data_item_from_widget(widget)
+ if not item:
+ return
+ parts = item['type'].split(',')
+ if len(parts) > 3:
+ min = parts[2].lstrip()[1:]
+ max = parts[3].rstrip()[:-1]
+ min_val = array_str_to_value(min)
+ max_val = array_str_to_value(max)
+ text = widget.get()
+ if ',' in text:
+ text = '{ %s }' % text
+ try:
+ value = array_str_to_value(text)
+ if value < min_val or value > max_val:
+ raise Exception('Invalid input!')
+ self.set_config_item_value(item, text)
+ except Exception:
+ pass
+
+ text = item['value'].strip('{').strip('}').strip()
+ widget.delete(0, tkinter.END)
+ widget.insert(0, text)
+
+ self.update_widgets_visibility_on_page()
+
+ def update_page_scroll_bar(self):
+ # Update scrollbar
+ self.frame_right.update()
+ self.conf_canvas.config(scrollregion=self.conf_canvas.bbox("all"))
+
+ def on_config_page_select_change(self, event):
+ self.update_config_data_on_page()
+ sel = self.left.selection()
+ if len(sel) > 0:
+ page_id = sel[0]
+ self.build_config_data_page(page_id)
+ self.update_widgets_visibility_on_page()
+ self.update_page_scroll_bar()
+
+ def walk_widgets_in_layout(self, parent, callback_function, args=None):
+ for widget in parent.winfo_children():
+ callback_function(widget, args)
+
+ def clear_widgets_inLayout(self, parent=None):
+ if parent is None:
+ parent = self.right_grid
+
+ for widget in parent.winfo_children():
+ widget.destroy()
+
+ parent.grid_forget()
+ self.conf_list.clear()
+
+ def build_config_page_tree(self, cfg_page, parent):
+ for page in cfg_page['child']:
+ page_id = next(iter(page))
+ # Put CFG items into related page list
+ self.page_list[page_id] = self.cfg_data_obj.get_cfg_list(page_id)
+ self.page_list[page_id].sort(key=lambda x: x['order'])
+ page_name = self.cfg_data_obj.get_page_title(page_id)
+ child = self.left.insert(
+ parent, 'end',
+ iid=page_id, text=page_name,
+ value=0)
+ if len(page[page_id]) > 0:
+ self.build_config_page_tree(page[page_id], child)
+
+ def is_config_data_loaded(self):
+ return True if len(self.page_list) else False
+
+ def set_current_config_page(self, page_id):
+ self.page_id = page_id
+
+ def get_current_config_page(self):
+ return self.page_id
+
+ def get_current_config_data(self):
+ page_id = self.get_current_config_page()
+ if page_id in self.page_list:
+ return self.page_list[page_id]
+ else:
+ return []
+
+ invalid_values = {}
+
+ def build_config_data_page(self, page_id):
+ self.clear_widgets_inLayout()
+ self.set_current_config_page(page_id)
+ disp_list = []
+ for item in self.get_current_config_data():
+ disp_list.append(item)
+ row = 0
+ disp_list.sort(key=lambda x: x['order'])
+ for item in disp_list:
+ self.add_config_item(item, row)
+ row += 2
+ if self.invalid_values:
+ string = 'The following contails invalid options/values \n\n'
+ for i in self.invalid_values:
+ string += i + ": " + str(self.invalid_values[i]) + "\n"
+ reply = messagebox.showwarning('Warning!', string)
+ if reply == 'ok':
+ self.invalid_values.clear()
+
+ fsp_version = ''
+
+ def load_config_data(self, file_name):
+ gen_cfg_data = CGenYamlCfg()
+ if file_name.endswith('.pkl'):
+ with open(file_name, "rb") as pkl_file:
+ gen_cfg_data.__dict__ = marshal.load(pkl_file)
+ gen_cfg_data.prepare_marshal(False)
+ elif file_name.endswith('.yaml'):
+ if gen_cfg_data.load_yaml(file_name) != 0:
+ raise Exception(gen_cfg_data.get_last_error())
+ else:
+ raise Exception('Unsupported file "%s" !' % file_name)
+ # checking fsp version
+ if gen_cfg_data.detect_fsp():
+ self.fsp_version = '2.X'
+ else:
+ self.fsp_version = '1.X'
+ return gen_cfg_data
+
+ def about(self):
+ msg = 'Configuration Editor\n--------------------------------\n \
+ Version 0.8\n2021'
+ lines = msg.split('\n')
+ width = 30
+ text = []
+ for line in lines:
+ text.append(line.center(width, ' '))
+ messagebox.showinfo('Config Editor', '\n'.join(text))
+
+ def update_last_dir(self, path):
+ self.last_dir = os.path.dirname(path)
+
+ def get_open_file_name(self, ftype):
+ if self.is_config_data_loaded():
+ if ftype == 'dlt':
+ question = ''
+ elif ftype == 'bin':
+ question = 'All configuration will be reloaded from BIN file, \
+ continue ?'
+ elif ftype == 'yaml':
+ question = ''
+ elif ftype == 'bsf':
+ question = ''
+ else:
+ raise Exception('Unsupported file type !')
+ if question:
+ reply = messagebox.askquestion('', question, icon='warning')
+ if reply == 'no':
+ return None
+
+ if ftype == 'yaml':
+ if self.mode == 'FSP':
+ file_type = 'YAML'
+ file_ext = 'yaml'
+ else:
+ file_type = 'YAML or PKL'
+ file_ext = 'pkl *.yaml'
+ else:
+ file_type = ftype.upper()
+ file_ext = ftype
+
+ path = filedialog.askopenfilename(
+ initialdir=self.last_dir,
+ title="Load file",
+ filetypes=(("%s files" % file_type, "*.%s" % file_ext), (
+ "all files", "*.*")))
+ if path:
+ self.update_last_dir(path)
+ return path
+ else:
+ return None
+
+ def load_from_delta(self):
+ path = self.get_open_file_name('dlt')
+ if not path:
+ return
+ self.load_delta_file(path)
+
+ def load_delta_file(self, path):
+ self.reload_config_data_from_bin(self.org_cfg_data_bin)
+ try:
+ self.cfg_data_obj.override_default_value(path)
+ except Exception as e:
+ messagebox.showerror('LOADING ERROR', str(e))
+ return
+ self.update_last_dir(path)
+ self.refresh_config_data_page()
+
+ def load_from_bin(self):
+ path = filedialog.askopenfilename(
+ initialdir=self.last_dir,
+ title="Load file",
+ filetypes={("Binaries", "*.fv *.fd *.bin *.rom")})
+ if not path:
+ return
+ self.load_bin_file(path)
+
+ def load_bin_file(self, path):
+ with open(path, 'rb') as fd:
+ bin_data = bytearray(fd.read())
+ if len(bin_data) < len(self.org_cfg_data_bin):
+ messagebox.showerror('Binary file size is smaller than what \
+ YAML requires !')
+ return
+
+ try:
+ self.reload_config_data_from_bin(bin_data)
+ except Exception as e:
+ messagebox.showerror('LOADING ERROR', str(e))
+ return
+
+ def load_from_bsf_file(self):
+ path = self.get_open_file_name('bsf')
+ if not path:
+ return
+ self.load_bsf_file(path)
+
+ def load_bsf_file(self, path):
+ bsf_file = path
+ dsc_file = os.path.splitext(bsf_file)[0] + '.dsc'
+ yaml_file = os.path.splitext(bsf_file)[0] + '.yaml'
+ bsf_to_dsc(bsf_file, dsc_file)
+ dsc_to_yaml(dsc_file, yaml_file)
+
+ self.load_cfg_file(yaml_file)
+ return
+
+ def load_from_fd(self):
+ path = filedialog.askopenfilename(
+ initialdir=self.last_dir,
+ title="Load file",
+ filetypes={("Binaries", "*.fv *.fd *.bin *.rom")})
+ if not path:
+ return
+ self.load_fd_file(path)
+
+ def load_fd_file(self, path):
+ with open(path, 'rb') as fd:
+ bin_data = bytearray(fd.read())
+
+ fd = FirmwareDevice(0, bin_data)
+ fd.ParseFd()
+ fd.ParseFsp()
+ fd.OutputFsp()
+
+ def load_cfg_file(self, path):
+ # Save current values in widget and clear database
+ self.clear_widgets_inLayout()
+ self.left.delete(*self.left.get_children())
+
+ self.cfg_data_obj = self.load_config_data(path)
+
+ self.update_last_dir(path)
+ self.org_cfg_data_bin = self.cfg_data_obj.generate_binary_array()
+ self.build_config_page_tree(self.cfg_data_obj.get_cfg_page()['root'],
+ '')
+
+ msg_string = 'Click YES if it is FULL FSP '\
+ + self.fsp_version + ' Binary'
+ reply = messagebox.askquestion('Form', msg_string)
+ if reply == 'yes':
+ self.load_from_bin()
+
+ for menu in self.menu_string:
+ self.file_menu.entryconfig(menu, state="normal")
+
+ return 0
+
+ def load_from_yaml(self):
+ path = self.get_open_file_name('yaml')
+ if not path:
+ return
+
+ self.load_cfg_file(path)
+
+ def get_save_file_name(self, extension):
+ path = filedialog.asksaveasfilename(
+ initialdir=self.last_dir,
+ title="Save file",
+ defaultextension=extension)
+ if path:
+ self.last_dir = os.path.dirname(path)
+ return path
+ else:
+ return None
+
+ def save_delta_file(self, full=False):
+ path = self.get_save_file_name(".dlt")
+ if not path:
+ return
+
+ self.update_config_data_on_page()
+ new_data = self.cfg_data_obj.generate_binary_array()
+ self.cfg_data_obj.generate_delta_file_from_bin(path,
+ self.org_cfg_data_bin,
+ new_data, full)
+
+ def save_to_delta(self):
+ self.save_delta_file()
+
+ def save_full_to_delta(self):
+ self.save_delta_file(True)
+
+ def save_to_bin(self):
+ path = self.get_save_file_name(".bin")
+ if not path:
+ return
+
+ self.update_config_data_on_page()
+ bins = self.cfg_data_obj.save_current_to_bin()
+
+ with open(path, 'wb') as fd:
+ fd.write(bins)
+
+ def refresh_config_data_page(self):
+ self.clear_widgets_inLayout()
+ self.on_config_page_select_change(None)
+
+ def reload_config_data_from_bin(self, bin_dat):
+ self.cfg_data_obj.load_default_from_bin(bin_dat)
+ self.refresh_config_data_page()
+
+ def set_config_item_value(self, item, value_str):
+ itype = item['type'].split(',')[0]
+ if itype == "Table":
+ new_value = value_str
+ elif itype == "EditText":
+ length = (self.cfg_data_obj.get_cfg_item_length(item) + 7) // 8
+ new_value = value_str[:length]
+ if item['value'].startswith("'"):
+ new_value = "'%s'" % new_value
+ else:
+ try:
+ new_value = self.cfg_data_obj.reformat_value_str(
+ value_str,
+ self.cfg_data_obj.get_cfg_item_length(item),
+ item['value'])
+ except Exception:
+ print("WARNING: Failed to format value string '%s' for '%s' !"
+ % (value_str, item['path']))
+ new_value = item['value']
+
+ if item['value'] != new_value:
+ if self.debug:
+ print('Update %s from %s to %s !'
+ % (item['cname'], item['value'], new_value))
+ item['value'] = new_value
+
+ def get_config_data_item_from_widget(self, widget, label=False):
+ name = self.get_object_name(widget)
+ if not name or not len(self.page_list):
+ return None
+
+ if name.startswith('LABEL_'):
+ if label:
+ path = name[6:]
+ else:
+ return None
+ else:
+ path = name
+ item = self.cfg_data_obj.get_item_by_path(path)
+ return item
+
+ def update_config_data_from_widget(self, widget, args):
+ item = self.get_config_data_item_from_widget(widget)
+ if item is None:
+ return
+ elif not item:
+ if isinstance(widget, tkinter.Label):
+ return
+ raise Exception('Failed to find "%s" !' %
+ self.get_object_name(widget))
+
+ itype = item['type'].split(',')[0]
+ if itype == "Combo":
+ opt_list = self.cfg_data_obj.get_cfg_item_options(item)
+ tmp_list = [opt[0] for opt in opt_list]
+ idx = widget.current()
+ if idx != -1:
+ self.set_config_item_value(item, tmp_list[idx])
+ elif itype in ["EditNum", "EditText"]:
+ self.set_config_item_value(item, widget.get())
+ elif itype in ["Table"]:
+ new_value = bytes_to_bracket_str(widget.get())
+ self.set_config_item_value(item, new_value)
+
+ def evaluate_condition(self, item):
+ try:
+ result = self.cfg_data_obj.evaluate_condition(item)
+ except Exception:
+ print("WARNING: Condition '%s' is invalid for '%s' !"
+ % (item['condition'], item['path']))
+ result = 1
+ return result
+
+ def add_config_item(self, item, row):
+ parent = self.right_grid
+
+ name = tkinter.Label(parent, text=item['name'], anchor="w")
+
+ parts = item['type'].split(',')
+ itype = parts[0]
+ widget = None
+
+ if itype == "Combo":
+ # Build
+ opt_list = self.cfg_data_obj.get_cfg_item_options(item)
+ current_value = self.cfg_data_obj.get_cfg_item_value(item, False)
+ option_list = []
+ current = None
+
+ for idx, option in enumerate(opt_list):
+ option_str = option[0]
+ try:
+ option_value = self.cfg_data_obj.get_value(
+ option_str,
+ len(option_str), False)
+ except Exception:
+ option_value = 0
+ print('WARNING: Option "%s" has invalid format for "%s" !'
+ % (option_str, item['path']))
+ if option_value == current_value:
+ current = idx
+ option_list.append(option[1])
+
+ widget = ttk.Combobox(parent, value=option_list, state="readonly")
+ widget.bind("<<ComboboxSelected>>", self.combo_select_changed)
+ widget.unbind_class("TCombobox", "<MouseWheel>")
+
+ if current is None:
+ print('WARNING: Value "%s" is an invalid option for "%s" !' %
+ (current_value, item['path']))
+ self.invalid_values[item['path']] = current_value
+ else:
+ widget.current(current)
+
+ elif itype in ["EditNum", "EditText"]:
+ txt_val = tkinter.StringVar()
+ widget = tkinter.Entry(parent, textvariable=txt_val)
+ value = item['value'].strip("'")
+ if itype in ["EditText"]:
+ txt_val.trace(
+ 'w',
+ lambda *args: self.limit_entry_size
+ (txt_val, (self.cfg_data_obj.get_cfg_item_length(item)
+ + 7) // 8))
+ elif itype in ["EditNum"]:
+ value = item['value'].strip("{").strip("}").strip()
+ widget.bind("<FocusOut>", self.edit_num_finished)
+ txt_val.set(value)
+
+ elif itype in ["Table"]:
+ bins = self.cfg_data_obj.get_cfg_item_value(item, True)
+ col_hdr = item['option'].split(',')
+ widget = custom_table(parent, col_hdr, bins)
+
+ else:
+ if itype and itype not in ["Reserved"]:
+ print("WARNING: Type '%s' is invalid for '%s' !" %
+ (itype, item['path']))
+ self.invalid_values[item['path']] = itype
+
+ if widget:
+ create_tool_tip(widget, item['help'])
+ self.set_object_name(name, 'LABEL_' + item['path'])
+ self.set_object_name(widget, item['path'])
+ name.grid(row=row, column=0, padx=10, pady=5, sticky="nsew")
+ widget.grid(row=row + 1, rowspan=1, column=0,
+ padx=10, pady=5, sticky="nsew")
+
+ def update_config_data_on_page(self):
+ self.walk_widgets_in_layout(self.right_grid,
+ self.update_config_data_from_widget)
+
+
+if __name__ == '__main__':
+ root = tkinter.Tk()
+ app = application(master=root)
+ root.title("Config Editor")
+ root.mainloop()
diff --git a/IntelFsp2Pkg/Tools/ConfigEditor/GenYamlCfg.py b/IntelFsp2Pkg/Tools/ConfigEditor/GenYamlCfg.py
new file mode 100644
index 0000000000..25fd9c547e
--- /dev/null
+++ b/IntelFsp2Pkg/Tools/ConfigEditor/GenYamlCfg.py
@@ -0,0 +1,2252 @@
+# @ GenYamlCfg.py
+#
+# Copyright (c) 2020 - 2021, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+#
+
+import os
+import sys
+import re
+import marshal
+import string
+import operator as op
+import ast
+import tkinter.messagebox as messagebox
+
+from datetime import date
+from collections import OrderedDict
+from CommonUtility import value_to_bytearray, value_to_bytes, \
+ bytes_to_value, get_bits_from_bytes, set_bits_to_bytes
+
+# Generated file copyright header
+__copyright_tmp__ = """/** @file
+
+ Platform Configuration %s File.
+
+ Copyright (c) %4d, Intel Corporation. All rights reserved.<BR>
+ SPDX-License-Identifier: BSD-2-Clause-Patent
+
+ This file is automatically generated. Please do NOT modify !!!
+
+**/
+"""
+
+
+def get_copyright_header(file_type, allow_modify=False):
+ file_description = {
+ 'yaml': 'Boot Setting',
+ 'dlt': 'Delta',
+ 'inc': 'C Binary Blob',
+ 'h': 'C Struct Header'
+ }
+ if file_type in ['yaml', 'dlt']:
+ comment_char = '#'
+ else:
+ comment_char = ''
+ lines = __copyright_tmp__.split('\n')
+ if allow_modify:
+ lines = [line for line in lines if 'Please do NOT modify' not in line]
+ copyright_hdr = '\n'.join('%s%s' % (comment_char, line)
+ for line in lines)[:-1] + '\n'
+ return copyright_hdr % (file_description[file_type], date.today().year)
+
+
+def check_quote(text):
+ if (text[0] == "'" and text[-1] == "'") or (text[0] == '"'
+ and text[-1] == '"'):
+ return True
+ return False
+
+
+def strip_quote(text):
+ new_text = text.strip()
+ if check_quote(new_text):
+ return new_text[1:-1]
+ return text
+
+
+def strip_delimiter(text, delim):
+ new_text = text.strip()
+ if new_text:
+ if new_text[0] == delim[0] and new_text[-1] == delim[-1]:
+ return new_text[1:-1]
+ return text
+
+
+def bytes_to_bracket_str(bytes):
+ return '{ %s }' % (', '.join('0x%02x' % i for i in bytes))
+
+
+def array_str_to_value(val_str):
+ val_str = val_str.strip()
+ val_str = strip_delimiter(val_str, '{}')
+ val_str = strip_quote(val_str)
+ value = 0
+ for each in val_str.split(',')[::-1]:
+ each = each.strip()
+ value = (value << 8) | int(each, 0)
+ return value
+
+
+def write_lines(lines, file):
+ fo = open(file, "w")
+ fo.write(''.join([x[0] for x in lines]))
+ fo.close()
+
+
+def read_lines(file):
+ if not os.path.exists(file):
+ test_file = os.path.basename(file)
+ if os.path.exists(test_file):
+ file = test_file
+ fi = open(file, 'r')
+ lines = fi.readlines()
+ fi.close()
+ return lines
+
+
+def expand_file_value(path, value_str):
+ result = bytearray()
+ match = re.match("\\{\\s*FILE:(.+)\\}", value_str)
+ if match:
+ file_list = match.group(1).split(',')
+ for file in file_list:
+ file = file.strip()
+ bin_path = os.path.join(path, file)
+ result.extend(bytearray(open(bin_path, 'rb').read()))
+ print('\n\n result ', result)
+ return result
+
+
+class ExpressionEval(ast.NodeVisitor):
+ operators = {
+ ast.Add: op.add,
+ ast.Sub: op.sub,
+ ast.Mult: op.mul,
+ ast.Div: op.floordiv,
+ ast.Mod: op.mod,
+ ast.Eq: op.eq,
+ ast.NotEq: op.ne,
+ ast.Gt: op.gt,
+ ast.Lt: op.lt,
+ ast.GtE: op.ge,
+ ast.LtE: op.le,
+ ast.BitXor: op.xor,
+ ast.BitAnd: op.and_,
+ ast.BitOr: op.or_,
+ ast.Invert: op.invert,
+ ast.USub: op.neg
+ }
+
+ def __init__(self):
+ self._debug = False
+ self._expression = ''
+ self._namespace = {}
+ self._get_variable = None
+
+ def eval(self, expr, vars={}):
+ self._expression = expr
+ if type(vars) is dict:
+ self._namespace = vars
+ self._get_variable = None
+ else:
+ self._namespace = {}
+ self._get_variable = vars
+ node = ast.parse(self._expression, mode='eval')
+ result = self.visit(node.body)
+ if self._debug:
+ print('EVAL [ %s ] = %s' % (expr, str(result)))
+ return result
+
+ def visit_Name(self, node):
+ if self._get_variable is not None:
+ return self._get_variable(node.id)
+ else:
+ return self._namespace[node.id]
+
+ def visit_Num(self, node):
+ return node.n
+
+ def visit_NameConstant(self, node):
+ return node.value
+
+ def visit_BoolOp(self, node):
+ result = False
+ if isinstance(node.op, ast.And):
+ for value in node.values:
+ result = self.visit(value)
+ if not result:
+ break
+ elif isinstance(node.op, ast.Or):
+ for value in node.values:
+ result = self.visit(value)
+ if result:
+ break
+ return True if result else False
+
+ def visit_UnaryOp(self, node):
+ val = self.visit(node.operand)
+ return ExpressionEval.operators[type(node.op)](val)
+
+ def visit_BinOp(self, node):
+ lhs = self.visit(node.left)
+ rhs = self.visit(node.right)
+ return ExpressionEval.operators[type(node.op)](lhs, rhs)
+
+ def visit_Compare(self, node):
+ right = self.visit(node.left)
+ result = True
+ for operation, comp in zip(node.ops, node.comparators):
+ if not result:
+ break
+ left = right
+ right = self.visit(comp)
+ result = ExpressionEval.operators[type(operation)](left, right)
+ return result
+
+ def visit_Call(self, node):
+ if node.func.id in ['ternary']:
+ condition = self.visit(node.args[0])
+ val_true = self.visit(node.args[1])
+ val_false = self.visit(node.args[2])
+ return val_true if condition else val_false
+ elif node.func.id in ['offset', 'length']:
+ if self._get_variable is not None:
+ return self._get_variable(node.args[0].s, node.func.id)
+ else:
+ raise ValueError("Unsupported function: " + repr(node))
+
+ def generic_visit(self, node):
+ raise ValueError("malformed node or string: " + repr(node))
+
+
+class CFG_YAML():
+ TEMPLATE = 'template'
+ CONFIGS = 'configs'
+ VARIABLE = 'variable'
+
+ def __init__(self):
+ self.log_line = False
+ self.allow_template = False
+ self.cfg_tree = None
+ self.tmp_tree = None
+ self.var_dict = None
+ self.def_dict = {}
+ self.yaml_path = ''
+ self.lines = []
+ self.full_lines = []
+ self.index = 0
+ self.re_expand = re.compile(
+ r'(.+:\s+|\s*\-\s*)!expand\s+\{\s*(\w+_TMPL)\s*:\s*\[(.+)]\s*\}')
+ self.re_include = re.compile(r'(.+:\s+|\s*\-\s*)!include\s+(.+)')
+
+ @staticmethod
+ def count_indent(line):
+ return next((i for i, c in enumerate(line) if not c.isspace()),
+ len(line))
+
+ @staticmethod
+ def substitue_args(text, arg_dict):
+ for arg in arg_dict:
+ text = text.replace('$' + arg, arg_dict[arg])
+ return text
+
+ @staticmethod
+ def dprint(*args):
+ pass
+
+ def process_include(self, line, insert=True):
+ match = self.re_include.match(line)
+ if not match:
+ raise Exception("Invalid !include format '%s' !" % line.strip())
+
+ prefix = match.group(1)
+ include = match.group(2)
+ if prefix.strip() == '-':
+ prefix = ''
+ adjust = 0
+ else:
+ adjust = 2
+
+ include = strip_quote(include)
+ request = CFG_YAML.count_indent(line) + adjust
+
+ if self.log_line:
+ # remove the include line itself
+ del self.full_lines[-1]
+
+ inc_path = os.path.join(self.yaml_path, include)
+ if not os.path.exists(inc_path):
+ # try relative path to project root
+ try_path = os.path.join(os.path.dirname(os.path.realpath(__file__)
+ ), "../..", include)
+ if os.path.exists(try_path):
+ inc_path = try_path
+ else:
+ raise Exception("ERROR: Cannot open file '%s'." % inc_path)
+
+ lines = read_lines(inc_path)
+ current = 0
+ same_line = False
+ for idx, each in enumerate(lines):
+ start = each.lstrip()
+ if start == '' or start[0] == '#':
+ continue
+
+ if start[0] == '>':
+ # append the content directly at the same line
+ same_line = True
+
+ start = idx
+ current = CFG_YAML.count_indent(each)
+ break
+
+ lines = lines[start+1:] if same_line else lines[start:]
+ leading = ''
+ if same_line:
+ request = len(prefix)
+ leading = '>'
+
+ lines = [prefix + '%s\n' % leading] + [' ' * request +
+ i[current:] for i in lines]
+ if insert:
+ self.lines = lines + self.lines
+
+ return lines
+
+ def process_expand(self, line):
+ match = self.re_expand.match(line)
+ if not match:
+ raise Exception("Invalid !expand format '%s' !" % line.strip())
+ lines = []
+ prefix = match.group(1)
+ temp_name = match.group(2)
+ args = match.group(3)
+
+ if prefix.strip() == '-':
+ indent = 0
+ else:
+ indent = 2
+ lines = self.process_expand_template(temp_name, prefix, args, indent)
+ self.lines = lines + self.lines
+
+ def process_expand_template(self, temp_name, prefix, args, indent=2):
+ # expand text with arg substitution
+ if temp_name not in self.tmp_tree:
+ raise Exception("Could not find template '%s' !" % temp_name)
+ parts = args.split(',')
+ parts = [i.strip() for i in parts]
+ num = len(parts)
+ arg_dict = dict(zip(['(%d)' % (i + 1) for i in range(num)], parts))
+ str_data = self.tmp_tree[temp_name]
+ text = DefTemplate(str_data).safe_substitute(self.def_dict)
+ text = CFG_YAML.substitue_args(text, arg_dict)
+ target = CFG_YAML.count_indent(prefix) + indent
+ current = CFG_YAML.count_indent(text)
+ padding = target * ' '
+ if indent == 0:
+ leading = []
+ else:
+ leading = [prefix + '\n']
+ text = leading + [(padding + i + '\n')[current:]
+ for i in text.splitlines()]
+ return text
+
+ def load_file(self, yaml_file):
+ self.index = 0
+ self.lines = read_lines(yaml_file)
+
+ def peek_line(self):
+ if len(self.lines) == 0:
+ return None
+ else:
+ return self.lines[0]
+
+ def put_line(self, line):
+ self.lines.insert(0, line)
+ if self.log_line:
+ del self.full_lines[-1]
+
+ def get_line(self):
+ if len(self.lines) == 0:
+ return None
+ else:
+ line = self.lines.pop(0)
+ if self.log_line:
+ self.full_lines.append(line.rstrip())
+ return line
+
+ def get_multiple_line(self, indent):
+ text = ''
+ newind = indent + 1
+ while True:
+ line = self.peek_line()
+ if line is None:
+ break
+ sline = line.strip()
+ if sline != '':
+ newind = CFG_YAML.count_indent(line)
+ if newind <= indent:
+ break
+ self.get_line()
+ if sline != '':
+ text = text + line
+ return text
+
+ def traverse_cfg_tree(self, handler):
+ def _traverse_cfg_tree(root, level=0):
+ # config structure
+ for key in root:
+ if type(root[key]) is OrderedDict:
+ level += 1
+ handler(key, root[key], level)
+ _traverse_cfg_tree(root[key], level)
+ level -= 1
+ _traverse_cfg_tree(self.cfg_tree)
+
+ def count(self):
+ def _count(name, cfgs, level):
+ num[0] += 1
+ num = [0]
+ self.traverse_cfg_tree(_count)
+ return num[0]
+
+ def parse(self, parent_name='', curr=None, level=0):
+ child = None
+ last_indent = None
+ key = ''
+ temp_chk = {}
+
+ while True:
+ line = self.get_line()
+ if line is None:
+ break
+
+ curr_line = line.strip()
+ if curr_line == '' or curr_line[0] == '#':
+ continue
+
+ indent = CFG_YAML.count_indent(line)
+ if last_indent is None:
+ last_indent = indent
+
+ if indent != last_indent:
+ # outside of current block, put the line back to queue
+ self.put_line(' ' * indent + curr_line)
+
+ if curr_line.endswith(': >'):
+ # multiline marker
+ old_count = len(self.full_lines)
+ line = self.get_multiple_line(indent)
+ if self.log_line and not self.allow_template \
+ and '!include ' in line:
+ # expand include in template
+ new_lines = []
+ lines = line.splitlines()
+ for idx, each in enumerate(lines):
+ if '!include ' in each:
+ new_line = ''.join(self.process_include(each,
+ False))
+ new_lines.append(new_line)
+ else:
+ new_lines.append(each)
+ self.full_lines = self.full_lines[:old_count] + new_lines
+ curr_line = curr_line + line
+
+ if indent > last_indent:
+ # child nodes
+ if child is None:
+ raise Exception('Unexpected format at line: %s'
+ % (curr_line))
+
+ level += 1
+ self.parse(key, child, level)
+ level -= 1
+ line = self.peek_line()
+ if line is not None:
+ curr_line = line.strip()
+ indent = CFG_YAML.count_indent(line)
+ if indent >= last_indent:
+ # consume the line
+ self.get_line()
+ else:
+ # end of file
+ indent = -1
+
+ if curr is None:
+ curr = OrderedDict()
+
+ if indent < last_indent:
+ return curr
+
+ marker1 = curr_line[0]
+ marker2 = curr_line[-1]
+ start = 1 if marker1 == '-' else 0
+ pos = curr_line.find(': ')
+ if pos > 0:
+ child = None
+ key = curr_line[start:pos].strip()
+ if curr_line[pos + 2] == '>':
+ curr[key] = curr_line[pos + 3:]
+ else:
+ # XXXX: !include / !expand
+ if '!include ' in curr_line:
+ self.process_include(line)
+ elif '!expand ' in curr_line:
+ if self.allow_template and not self.log_line:
+ self.process_expand(line)
+ else:
+ value_str = curr_line[pos + 2:].strip()
+ curr[key] = value_str
+ if self.log_line and value_str[0] == '{':
+ # expand {FILE: xxxx} format in the log line
+ if value_str[1:].rstrip().startswith('FILE:'):
+ value_bytes = expand_file_value(
+ self.yaml_path, value_str)
+ value_str = bytes_to_bracket_str(value_bytes)
+ self.full_lines[-1] = line[
+ :indent] + curr_line[:pos + 2] + value_str
+
+ elif marker2 == ':':
+ child = OrderedDict()
+ key = curr_line[start:-1].strip()
+ if key == '$ACTION':
+ # special virtual nodes, rename to ensure unique key
+ key = '$ACTION_%04X' % self.index
+ self.index += 1
+ if key in curr:
+ if key not in temp_chk:
+ # check for duplicated keys at same level
+ temp_chk[key] = 1
+ else:
+ raise Exception("Duplicated item '%s:%s' found !"
+ % (parent_name, key))
+
+ curr[key] = child
+ if self.var_dict is None and key == CFG_YAML.VARIABLE:
+ self.var_dict = child
+ if self.tmp_tree is None and key == CFG_YAML.TEMPLATE:
+ self.tmp_tree = child
+ if self.var_dict:
+ for each in self.var_dict:
+ txt = self.var_dict[each]
+ if type(txt) is str:
+ self.def_dict['(%s)' % each] = txt
+ if self.tmp_tree and key == CFG_YAML.CONFIGS:
+ # apply template for the main configs
+ self.allow_template = True
+ else:
+ child = None
+ # - !include cfg_opt.yaml
+ if '!include ' in curr_line:
+ self.process_include(line)
+
+ return curr
+
+ def load_yaml(self, opt_file):
+ self.var_dict = None
+ self.yaml_path = os.path.dirname(opt_file)
+ self.load_file(opt_file)
+ yaml_tree = self.parse()
+ self.tmp_tree = yaml_tree[CFG_YAML.TEMPLATE]
+ self.cfg_tree = yaml_tree[CFG_YAML.CONFIGS]
+ return self.cfg_tree
+
+ def expand_yaml(self, opt_file):
+ self.log_line = True
+ self.load_yaml(opt_file)
+ self.log_line = False
+ text = '\n'.join(self.full_lines)
+ self.full_lines = []
+ return text
+
+
+class DefTemplate(string.Template):
+ idpattern = '\\([_A-Z][_A-Z0-9]*\\)|[_A-Z][_A-Z0-9]*'
+
+
+class CGenYamlCfg:
+ STRUCT = '$STRUCT'
+ bits_width = {'b': 1, 'B': 8, 'W': 16, 'D': 32, 'Q': 64}
+ builtin_option = {'$EN_DIS': [('0', 'Disable'), ('1', 'Enable')]}
+ exclude_struct = ['FSP_UPD_HEADER', 'FSPT_ARCH_UPD',
+ 'FSPM_ARCH_UPD', 'FSPS_ARCH_UPD',
+ 'GPIO_GPP_*', 'GPIO_CFG_DATA',
+ 'GpioConfPad*', 'GpioPinConfig',
+ 'BOOT_OPTION*', 'PLATFORMID_CFG_DATA', '\\w+_Half[01]']
+ include_tag = ['GPIO_CFG_DATA']
+ keyword_set = set(['name', 'type', 'option', 'help', 'length',
+ 'value', 'order', 'struct', 'condition'])
+
+ def __init__(self):
+ self._mode = ''
+ self._debug = False
+ self._macro_dict = {}
+ self.initialize()
+
+ def initialize(self):
+ self._old_bin = None
+ self._cfg_tree = {}
+ self._tmp_tree = {}
+ self._cfg_list = []
+ self._cfg_page = {'root': {'title': '', 'child': []}}
+ self._cur_page = ''
+ self._var_dict = {}
+ self._def_dict = {}
+ self._yaml_path = ''
+
+ @staticmethod
+ def deep_convert_dict(layer):
+ # convert OrderedDict to list + dict
+ new_list = layer
+ if isinstance(layer, OrderedDict):
+ new_list = list(layer.items())
+ for idx, pair in enumerate(new_list):
+ new_node = CGenYamlCfg.deep_convert_dict(pair[1])
+ new_list[idx] = dict({pair[0]: new_node})
+ return new_list
+
+ @staticmethod
+ def deep_convert_list(layer):
+ if isinstance(layer, list):
+ od = OrderedDict({})
+ for each in layer:
+ if isinstance(each, dict):
+ key = next(iter(each))
+ od[key] = CGenYamlCfg.deep_convert_list(each[key])
+ return od
+ else:
+ return layer
+
+ @staticmethod
+ def expand_include_files(file_path, cur_dir=''):
+ if cur_dir == '':
+ cur_dir = os.path.dirname(file_path)
+ file_path = os.path.basename(file_path)
+
+ input_file_path = os.path.join(cur_dir, file_path)
+ file = open(input_file_path, "r")
+ lines = file.readlines()
+ file.close()
+ new_lines = []
+ for line_num, line in enumerate(lines):
+ match = re.match("^!include\\s*(.+)?$", line.strip())
+ if match:
+ inc_path = match.group(1)
+ tmp_path = os.path.join(cur_dir, inc_path)
+ org_path = tmp_path
+ if not os.path.exists(tmp_path):
+ cur_dir = os.path.join(os.path.dirname
+ (os.path.realpath(__file__)
+ ), "..", "..")
+ tmp_path = os.path.join(cur_dir, inc_path)
+ if not os.path.exists(tmp_path):
+ raise Exception("ERROR: Cannot open include\
+ file '%s'." % org_path)
+ else:
+ new_lines.append(('# Included from file: %s\n' % inc_path,
+ tmp_path, 0))
+ new_lines.append(('# %s\n' % ('=' * 80), tmp_path, 0))
+ new_lines.extend(CGenYamlCfg.expand_include_files
+ (inc_path, cur_dir))
+ else:
+ new_lines.append((line, input_file_path, line_num))
+
+ return new_lines
+
+ @staticmethod
+ def format_struct_field_name(input, count=0):
+ name = ''
+ cap = True
+ if '_' in input:
+ input = input.lower()
+ for each in input:
+ if each == '_':
+ cap = True
+ continue
+ elif cap:
+ each = each.upper()
+ cap = False
+ name = name + each
+
+ if count > 1:
+ name = '%s[%d]' % (name, count)
+
+ return name
+
+ def get_mode(self):
+ return self._mode
+
+ def set_mode(self, mode):
+ self._mode = mode
+
+ def get_last_error(self):
+ return ''
+
+ def get_variable(self, var, attr='value'):
+ if var in self._var_dict:
+ var = self._var_dict[var]
+ return var
+
+ item = self.locate_cfg_item(var, False)
+ if item is None:
+ raise ValueError("Cannot find variable '%s' !" % var)
+
+ if item:
+ if 'indx' in item:
+ item = self.get_item_by_index(item['indx'])
+ if attr == 'offset':
+ var = item['offset']
+ elif attr == 'length':
+ var = item['length']
+ elif attr == 'value':
+ var = self.get_cfg_item_value(item)
+ else:
+ raise ValueError("Unsupported variable attribute '%s' !" %
+ attr)
+ return var
+
+ def eval(self, expr):
+ def _handler(pattern):
+ if pattern.group(1):
+ target = 1
+ else:
+ target = 2
+ result = self.get_variable(pattern.group(target))
+ if result is None:
+ raise ValueError('Unknown variable $(%s) !' %
+ pattern.group(target))
+ return hex(result)
+
+ expr_eval = ExpressionEval()
+ if '$' in expr:
+ # replace known variable first
+ expr = re.sub(r'\$\(([_a-zA-Z][\w\.]*)\)|\$([_a-zA-Z][\w\.]*)',
+ _handler, expr)
+ return expr_eval.eval(expr, self.get_variable)
+
+ def parse_macros(self, macro_def_str):
+ # ['-DABC=1', '-D', 'CFG_DEBUG=1', '-D', 'CFG_OUTDIR=Build']
+ self._macro_dict = {}
+ is_expression = False
+ for macro in macro_def_str:
+ if macro.startswith('-D'):
+ is_expression = True
+ if len(macro) > 2:
+ macro = macro[2:]
+ else:
+ continue
+ if is_expression:
+ is_expression = False
+ match = re.match("(\\w+)=(.+)", macro)
+ if match:
+ self._macro_dict[match.group(1)] = match.group(2)
+ else:
+ match = re.match("(\\w+)", macro)
+ if match:
+ self._macro_dict[match.group(1)] = ''
+ if len(self._macro_dict) == 0:
+ error = 1
+ else:
+ error = 0
+ if self._debug:
+ print("INFO : Macro dictionary:")
+ for each in self._macro_dict:
+ print(" $(%s) = [ %s ]"
+ % (each, self._macro_dict[each]))
+ return error
+
+ def get_cfg_list(self, page_id=None):
+ if page_id is None:
+ # return full list
+ return self._cfg_list
+ else:
+ # build a new list for items under a page ID
+ cfgs = [i for i in self._cfg_list if i['cname'] and
+ (i['page'] == page_id)]
+ return cfgs
+
+ def get_cfg_page(self):
+ return self._cfg_page
+
+ def get_cfg_item_length(self, item):
+ return item['length']
+
+ def get_cfg_item_value(self, item, array=False):
+ value_str = item['value']
+ length = item['length']
+ return self.get_value(value_str, length, array)
+
+ def format_value_to_str(self, value, bit_length, old_value=''):
+ # value is always int
+ length = (bit_length + 7) // 8
+ fmt = ''
+ if old_value.startswith('0x'):
+ fmt = '0x'
+ elif old_value and (old_value[0] in ['"', "'", '{']):
+ fmt = old_value[0]
+ else:
+ fmt = ''
+
+ bvalue = value_to_bytearray(value, length)
+ if fmt in ['"', "'"]:
+ svalue = bvalue.rstrip(b'\x00').decode()
+ value_str = fmt + svalue + fmt
+ elif fmt == "{":
+ value_str = '{ ' + ', '.join(['0x%02x' % i for i in bvalue]) + ' }'
+ elif fmt == '0x':
+ hex_len = length * 2
+ if len(old_value) == hex_len + 2:
+ fstr = '0x%%0%dx' % hex_len
+ else:
+ fstr = '0x%x'
+ value_str = fstr % value
+ else:
+ if length <= 2:
+ value_str = '%d' % value
+ elif length <= 8:
+ value_str = '0x%x' % value
+ else:
+ value_str = '{ ' + ', '.join(['0x%02x' % i for i in
+ bvalue]) + ' }'
+ return value_str
+
+ def reformat_value_str(self, value_str, bit_length, old_value=None):
+ value = self.parse_value(value_str, bit_length, False)
+ if old_value is None:
+ old_value = value_str
+ new_value = self.format_value_to_str(value, bit_length, old_value)
+ return new_value
+
+ def get_value(self, value_str, bit_length, array=True):
+ value_str = value_str.strip()
+ if value_str[0] == "'" and value_str[-1] == "'" or \
+ value_str[0] == '"' and value_str[-1] == '"':
+ value_str = value_str[1:-1]
+ bvalue = bytearray(value_str.encode())
+ if len(bvalue) == 0:
+ bvalue = bytearray(b'\x00')
+ if array:
+ return bvalue
+ else:
+ return bytes_to_value(bvalue)
+ else:
+ if value_str[0] in '{':
+ value_str = value_str[1:-1].strip()
+ value = 0
+ for each in value_str.split(',')[::-1]:
+ each = each.strip()
+ value = (value << 8) | int(each, 0)
+ if array:
+ length = (bit_length + 7) // 8
+ return value_to_bytearray(value, length)
+ else:
+ return value
+
+ def parse_value(self, value_str, bit_length, array=True):
+ length = (bit_length + 7) // 8
+ if check_quote(value_str):
+ value_str = bytes_to_bracket_str(value_str[1:-1].encode())
+ elif (',' in value_str) and (value_str[0] != '{'):
+ value_str = '{ %s }' % value_str
+ if value_str[0] == '{':
+ result = expand_file_value(self._yaml_path, value_str)
+ if len(result) == 0:
+ bin_list = value_str[1:-1].split(',')
+ value = 0
+ bit_len = 0
+ unit_len = 1
+ for idx, element in enumerate(bin_list):
+ each = element.strip()
+ if len(each) == 0:
+ continue
+
+ in_bit_field = False
+ if each[0] in "'" + '"':
+ each_value = bytearray(each[1:-1], 'utf-8')
+ elif ':' in each:
+ match = re.match("^(.+):(\\d+)([b|B|W|D|Q])$", each)
+ if match is None:
+ raise SystemExit("Exception: Invald value\
+list format '%s' !" % each)
+ if match.group(1) == '0' and match.group(2) == '0':
+ unit_len = CGenYamlCfg.bits_width[match.group(3)
+ ] // 8
+ cur_bit_len = int(match.group(2)
+ ) * CGenYamlCfg.bits_width[
+ match.group(3)]
+ value += ((self.eval(match.group(1)) & (
+ 1 << cur_bit_len) - 1)) << bit_len
+ bit_len += cur_bit_len
+ each_value = bytearray()
+ if idx + 1 < len(bin_list):
+ in_bit_field = True
+ else:
+ try:
+ each_value = value_to_bytearray(
+ self.eval(each.strip()), unit_len)
+ except Exception:
+ raise SystemExit("Exception: Value %d cannot \
+fit into %s bytes !" % (each, unit_len))
+
+ if not in_bit_field:
+ if bit_len > 0:
+ if bit_len % 8 != 0:
+ raise SystemExit("Exception: Invalid bit \
+field alignment '%s' !" % value_str)
+ result.extend(value_to_bytes(value, bit_len // 8))
+ value = 0
+ bit_len = 0
+
+ result.extend(each_value)
+
+ elif check_quote(value_str):
+ result = bytearray(value_str[1:-1], 'utf-8') # Excluding quotes
+ else:
+ result = value_to_bytearray(self.eval(value_str), length)
+
+ if len(result) < length:
+ result.extend(b'\x00' * (length - len(result)))
+ elif len(result) > length:
+ raise SystemExit("Exception: Value '%s' is too big to fit \
+into %d bytes !" % (value_str, length))
+
+ if array:
+ return result
+ else:
+ return bytes_to_value(result)
+
+ return result
+
+ def get_cfg_item_options(self, item):
+ tmp_list = []
+ if item['type'] == "Combo":
+ if item['option'] in CGenYamlCfg.builtin_option:
+ for op_val, op_str in CGenYamlCfg.builtin_option[item['option'
+ ]]:
+ tmp_list.append((op_val, op_str))
+ else:
+ opt_list = item['option'].split(',')
+ for option in opt_list:
+ option = option.strip()
+ try:
+ (op_val, op_str) = option.split(':')
+ except Exception:
+ raise SystemExit("Exception: Invalide \
+option format '%s' !" % option)
+ tmp_list.append((op_val, op_str))
+ return tmp_list
+
+ def get_page_title(self, page_id, top=None):
+ if top is None:
+ top = self.get_cfg_page()['root']
+ for node in top['child']:
+ page_key = next(iter(node))
+ if page_id == page_key:
+ return node[page_key]['title']
+ else:
+ result = self.get_page_title(page_id, node[page_key])
+ if result is not None:
+ return result
+ return None
+
+ def print_pages(self, top=None, level=0):
+ if top is None:
+ top = self.get_cfg_page()['root']
+ for node in top['child']:
+ page_id = next(iter(node))
+ print('%s%s: %s' % (' ' * level, page_id, node[page_id]['title']))
+ level += 1
+ self.print_pages(node[page_id], level)
+ level -= 1
+
+ def get_item_by_index(self, index):
+ return self._cfg_list[index]
+
+ def get_item_by_path(self, path):
+ node = self.locate_cfg_item(path)
+ if node:
+ return self.get_item_by_index(node['indx'])
+ else:
+ return None
+
+ def locate_cfg_path(self, item):
+ def _locate_cfg_path(root, level=0):
+ # config structure
+ if item is root:
+ return path
+ for key in root:
+ if type(root[key]) is OrderedDict:
+ level += 1
+ path.append(key)
+ ret = _locate_cfg_path(root[key], level)
+ if ret:
+ return ret
+ path.pop()
+ return None
+ path = []
+ return _locate_cfg_path(self._cfg_tree)
+
+ def locate_cfg_item(self, path, allow_exp=True):
+ def _locate_cfg_item(root, path, level=0):
+ if len(path) == level:
+ return root
+ next_root = root.get(path[level], None)
+ if next_root is None:
+ if allow_exp:
+ raise Exception('Not a valid CFG config option path: %s' %
+ '.'.join(path[:level+1]))
+ else:
+ return None
+ return _locate_cfg_item(next_root, path, level + 1)
+
+ path_nodes = path.split('.')
+ return _locate_cfg_item(self._cfg_tree, path_nodes)
+
+ def traverse_cfg_tree(self, handler, top=None):
+ def _traverse_cfg_tree(root, level=0):
+ # config structure
+ for key in root:
+ if type(root[key]) is OrderedDict:
+ level += 1
+ handler(key, root[key], level)
+ _traverse_cfg_tree(root[key], level)
+ level -= 1
+
+ if top is None:
+ top = self._cfg_tree
+ _traverse_cfg_tree(top)
+
+ def print_cfgs(self, root=None, short=True, print_level=256):
+ def _print_cfgs(name, cfgs, level):
+
+ if 'indx' in cfgs:
+ act_cfg = self.get_item_by_index(cfgs['indx'])
+ else:
+ offset = 0
+ length = 0
+ value = ''
+ if CGenYamlCfg.STRUCT in cfgs:
+ cfg = cfgs[CGenYamlCfg.STRUCT]
+ offset = int(cfg['offset'])
+ length = int(cfg['length'])
+ if 'value' in cfg:
+ value = cfg['value']
+ if length == 0:
+ return
+ act_cfg = dict({'value': value, 'offset': offset,
+ 'length': length})
+ value = act_cfg['value']
+ bit_len = act_cfg['length']
+ offset = (act_cfg['offset'] + 7) // 8
+ if value != '':
+ try:
+ value = self.reformat_value_str(act_cfg['value'],
+ act_cfg['length'])
+ except Exception:
+ value = act_cfg['value']
+ length = bit_len // 8
+ bit_len = '(%db)' % bit_len if bit_len % 8 else '' * 4
+ if level <= print_level:
+ if short and len(value) > 40:
+ value = '%s ... %s' % (value[:20], value[-20:])
+ print('%04X:%04X%-6s %s%s : %s' % (offset, length, bit_len,
+ ' ' * level, name, value))
+
+ self.traverse_cfg_tree(_print_cfgs)
+
+ def build_var_dict(self):
+ def _build_var_dict(name, cfgs, level):
+ if level <= 2:
+ if CGenYamlCfg.STRUCT in cfgs:
+ struct_info = cfgs[CGenYamlCfg.STRUCT]
+ self._var_dict['_LENGTH_%s_' % name] = struct_info[
+ 'length'] // 8
+ self._var_dict['_OFFSET_%s_' % name] = struct_info[
+ 'offset'] // 8
+
+ self._var_dict = {}
+ self.traverse_cfg_tree(_build_var_dict)
+ self._var_dict['_LENGTH_'] = self._cfg_tree[CGenYamlCfg.STRUCT][
+ 'length'] // 8
+ return 0
+
+ def add_cfg_page(self, child, parent, title=''):
+ def _add_cfg_page(cfg_page, child, parent):
+ key = next(iter(cfg_page))
+ if parent == key:
+ cfg_page[key]['child'].append({child: {'title': title,
+ 'child': []}})
+ return True
+ else:
+ result = False
+ for each in cfg_page[key]['child']:
+ if _add_cfg_page(each, child, parent):
+ result = True
+ break
+ return result
+
+ return _add_cfg_page(self._cfg_page, child, parent)
+
+ def set_cur_page(self, page_str):
+ if not page_str:
+ return
+
+ if ',' in page_str:
+ page_list = page_str.split(',')
+ else:
+ page_list = [page_str]
+ for page_str in page_list:
+ parts = page_str.split(':')
+ if len(parts) in [1, 3]:
+ page = parts[0].strip()
+ if len(parts) == 3:
+ # it is a new page definition, add it into tree
+ parent = parts[1] if parts[1] else 'root'
+ parent = parent.strip()
+ if parts[2][0] == '"' and parts[2][-1] == '"':
+ parts[2] = parts[2][1:-1]
+
+ if not self.add_cfg_page(page, parent, parts[2]):
+ raise SystemExit("Error: Cannot find parent page \
+'%s'!" % parent)
+ else:
+ raise SystemExit("Error: Invalid page format '%s' !"
+ % page_str)
+ self._cur_page = page
+
+ def extend_variable(self, line):
+ # replace all variables
+ if line == '':
+ return line
+ loop = 2
+ while loop > 0:
+ line_after = DefTemplate(line).safe_substitute(self._def_dict)
+ if line == line_after:
+ break
+ loop -= 1
+ line = line_after
+ return line_after
+
+ def reformat_number_per_type(self, itype, value):
+ if check_quote(value) or value.startswith('{'):
+ return value
+ parts = itype.split(',')
+ if len(parts) > 3 and parts[0] == 'EditNum':
+ num_fmt = parts[1].strip()
+ else:
+ num_fmt = ''
+ if num_fmt == 'HEX' and not value.startswith('0x'):
+ value = '0x%X' % int(value, 10)
+ elif num_fmt == 'DEC' and value.startswith('0x'):
+ value = '%d' % int(value, 16)
+ return value
+
+ def add_cfg_item(self, name, item, offset, path):
+
+ self.set_cur_page(item.get('page', ''))
+
+ if name[0] == '$':
+ # skip all virtual node
+ return 0
+
+ if not set(item).issubset(CGenYamlCfg.keyword_set):
+ for each in list(item):
+ if each not in CGenYamlCfg.keyword_set:
+ raise Exception("Invalid attribute '%s' for '%s'!" %
+ (each, '.'.join(path)))
+
+ length = item.get('length', 0)
+ if type(length) is str:
+ match = re.match("^(\\d+)([b|B|W|D|Q])([B|W|D|Q]?)\\s*$", length)
+ if match:
+ unit_len = CGenYamlCfg.bits_width[match.group(2)]
+ length = int(match.group(1), 10) * unit_len
+ else:
+ try:
+ length = int(length, 0) * 8
+ except Exception:
+ raise Exception("Invalid length field '%s' for '%s' !" %
+ (length, '.'.join(path)))
+
+ if offset % 8 > 0:
+ raise Exception("Invalid alignment for field '%s' for \
+'%s' !" % (name, '.'.join(path)))
+ else:
+ # define is length in bytes
+ length = length * 8
+
+ if not name.isidentifier():
+ raise Exception("Invalid config name '%s' for '%s' !" %
+ (name, '.'.join(path)))
+
+ itype = str(item.get('type', 'Reserved'))
+ value = str(item.get('value', ''))
+ if value:
+ if not (check_quote(value) or value.startswith('{')):
+ if ',' in value:
+ value = '{ %s }' % value
+ else:
+ value = self.reformat_number_per_type(itype, value)
+
+ help = str(item.get('help', ''))
+ if '\n' in help:
+ help = ' '.join([i.strip() for i in help.splitlines()])
+
+ option = str(item.get('option', ''))
+ if '\n' in option:
+ option = ' '.join([i.strip() for i in option.splitlines()])
+
+ # extend variables for value and condition
+ condition = str(item.get('condition', ''))
+ if condition:
+ condition = self.extend_variable(condition)
+ value = self.extend_variable(value)
+
+ order = str(item.get('order', ''))
+ if order:
+ if '.' in order:
+ (major, minor) = order.split('.')
+ order = int(major, 16)
+ else:
+ order = int(order, 16)
+ else:
+ order = offset
+
+ cfg_item = dict()
+ cfg_item['length'] = length
+ cfg_item['offset'] = offset
+ cfg_item['value'] = value
+ cfg_item['type'] = itype
+ cfg_item['cname'] = str(name)
+ cfg_item['name'] = str(item.get('name', ''))
+ cfg_item['help'] = help
+ cfg_item['option'] = option
+ cfg_item['page'] = self._cur_page
+ cfg_item['order'] = order
+ cfg_item['path'] = '.'.join(path)
+ cfg_item['condition'] = condition
+ if 'struct' in item:
+ cfg_item['struct'] = item['struct']
+ self._cfg_list.append(cfg_item)
+
+ item['indx'] = len(self._cfg_list) - 1
+
+ # remove used info for reducing pkl size
+ item.pop('option', None)
+ item.pop('condition', None)
+ item.pop('help', None)
+ item.pop('name', None)
+ item.pop('page', None)
+
+ return length
+
+ def build_cfg_list(self, cfg_name='', top=None, path=[],
+ info={'offset': 0}):
+ if top is None:
+ top = self._cfg_tree
+ info.clear()
+ info = {'offset': 0}
+
+ start = info['offset']
+ is_leaf = True
+ for key in top:
+ path.append(key)
+ if type(top[key]) is OrderedDict:
+ is_leaf = False
+ self.build_cfg_list(key, top[key], path, info)
+ path.pop()
+
+ if is_leaf:
+ length = self.add_cfg_item(cfg_name, top, info['offset'], path)
+ info['offset'] += length
+ elif cfg_name == '' or (cfg_name and cfg_name[0] != '$'):
+ # check first element for struct
+ first = next(iter(top))
+ struct_str = CGenYamlCfg.STRUCT
+ if first != struct_str:
+ struct_node = OrderedDict({})
+ top[struct_str] = struct_node
+ top.move_to_end(struct_str, False)
+ else:
+ struct_node = top[struct_str]
+ struct_node['offset'] = start
+ struct_node['length'] = info['offset'] - start
+ if struct_node['length'] % 8 != 0:
+ raise SystemExit("Error: Bits length not aligned for %s !" %
+ str(path))
+
+ def get_field_value(self, top=None):
+ def _get_field_value(name, cfgs, level):
+ if 'indx' in cfgs:
+ act_cfg = self.get_item_by_index(cfgs['indx'])
+ if act_cfg['length'] == 0:
+ return
+ value = self.get_value(act_cfg['value'], act_cfg['length'],
+ False)
+ set_bits_to_bytes(result, act_cfg['offset'] -
+ struct_info['offset'], act_cfg['length'],
+ value)
+
+ if top is None:
+ top = self._cfg_tree
+ struct_info = top[CGenYamlCfg.STRUCT]
+ result = bytearray((struct_info['length'] + 7) // 8)
+ self.traverse_cfg_tree(_get_field_value, top)
+ return result
+
+ def set_field_value(self, top, value_bytes, force=False):
+ def _set_field_value(name, cfgs, level):
+ if 'indx' not in cfgs:
+ return
+ act_cfg = self.get_item_by_index(cfgs['indx'])
+ if force or act_cfg['value'] == '':
+ value = get_bits_from_bytes(full_bytes,
+ act_cfg['offset'] -
+ struct_info['offset'],
+ act_cfg['length'])
+ act_val = act_cfg['value']
+ if act_val == '':
+ act_val = '%d' % value
+ act_val = self.reformat_number_per_type(act_cfg
+ ['type'],
+ act_val)
+ act_cfg['value'] = self.format_value_to_str(
+ value, act_cfg['length'], act_val)
+
+ if 'indx' in top:
+ # it is config option
+ value = bytes_to_value(value_bytes)
+ act_cfg = self.get_item_by_index(top['indx'])
+ act_cfg['value'] = self.format_value_to_str(
+ value, act_cfg['length'], act_cfg['value'])
+ else:
+ # it is structure
+ struct_info = top[CGenYamlCfg.STRUCT]
+ length = struct_info['length'] // 8
+ full_bytes = bytearray(value_bytes[:length])
+ if len(full_bytes) < length:
+ full_bytes.extend(bytearray(length - len(value_bytes)))
+ self.traverse_cfg_tree(_set_field_value, top)
+
+ def update_def_value(self):
+ def _update_def_value(name, cfgs, level):
+ if 'indx' in cfgs:
+ act_cfg = self.get_item_by_index(cfgs['indx'])
+ if act_cfg['value'] != '' and act_cfg['length'] > 0:
+ try:
+ act_cfg['value'] = self.reformat_value_str(
+ act_cfg['value'], act_cfg['length'])
+ except Exception:
+ raise Exception("Invalid value expression '%s' \
+for '%s' !" % (act_cfg['value'], act_cfg['path']))
+ else:
+ if CGenYamlCfg.STRUCT in cfgs and 'value' in \
+ cfgs[CGenYamlCfg.STRUCT]:
+ curr = cfgs[CGenYamlCfg.STRUCT]
+ value_bytes = self.get_value(curr['value'],
+ curr['length'], True)
+ self.set_field_value(cfgs, value_bytes)
+
+ self.traverse_cfg_tree(_update_def_value, self._cfg_tree)
+
+ def evaluate_condition(self, item):
+ expr = item['condition']
+ result = self.parse_value(expr, 1, False)
+ return result
+
+ def detect_fsp(self):
+ cfg_segs = self.get_cfg_segment()
+ if len(cfg_segs) == 3:
+ fsp = True
+ for idx, seg in enumerate(cfg_segs):
+ if not seg[0].endswith('UPD_%s' % 'TMS'[idx]):
+ fsp = False
+ break
+ else:
+ fsp = False
+ if fsp:
+ self.set_mode('FSP')
+ return fsp
+
+ def get_cfg_segment(self):
+ def _get_cfg_segment(name, cfgs, level):
+ if 'indx' not in cfgs:
+ if name.startswith('$ACTION_'):
+ if 'find' in cfgs:
+ find[0] = cfgs['find']
+ else:
+ if find[0]:
+ act_cfg = self.get_item_by_index(cfgs['indx'])
+ segments.append([find[0], act_cfg['offset'] // 8, 0])
+ find[0] = ''
+ return
+
+ find = ['']
+ segments = []
+ self.traverse_cfg_tree(_get_cfg_segment, self._cfg_tree)
+ cfg_len = self._cfg_tree[CGenYamlCfg.STRUCT]['length'] // 8
+ if len(segments) == 0:
+ segments.append(['', 0, cfg_len])
+
+ segments.append(['', cfg_len, 0])
+ cfg_segs = []
+ for idx, each in enumerate(segments[:-1]):
+ cfg_segs.append((each[0], each[1],
+ segments[idx+1][1] - each[1]))
+
+ return cfg_segs
+
+ def get_bin_segment(self, bin_data):
+ cfg_segs = self.get_cfg_segment()
+ bin_segs = []
+ for seg in cfg_segs:
+ key = seg[0].encode()
+ if key == 0:
+ bin_segs.append([seg[0], 0, len(bin_data)])
+ break
+ pos = bin_data.find(key)
+ if pos >= 0:
+ # ensure no other match for the key
+ next_pos = bin_data.find(key, pos + len(seg[0]))
+ if next_pos >= 0:
+ if key == b'$SKLFSP$' or key == b'$BSWFSP$':
+ string = ('Warning: Multiple matches for %s in '
+ 'binary!\n\nA workaround applied to such '
+ 'FSP 1.x binary to use second'
+ ' match instead of first match!' % key)
+ messagebox.showwarning('Warning!', string)
+ pos = next_pos
+ else:
+ print("Warning: Multiple matches for '%s' "
+ "in binary, the 1st instance will be used !"
+ % seg[0])
+ bin_segs.append([seg[0], pos, seg[2]])
+ else:
+ raise Exception("Could not find '%s' in binary !"
+ % seg[0])
+
+ return bin_segs
+
+ def extract_cfg_from_bin(self, bin_data):
+ # get cfg bin length
+ cfg_bins = bytearray()
+ bin_segs = self.get_bin_segment(bin_data)
+ for each in bin_segs:
+ cfg_bins.extend(bin_data[each[1]:each[1] + each[2]])
+ return cfg_bins
+
+ def save_current_to_bin(self):
+ cfg_bins = self.generate_binary_array()
+ if self._old_bin is None:
+ return cfg_bins
+
+ bin_data = bytearray(self._old_bin)
+ bin_segs = self.get_bin_segment(self._old_bin)
+ cfg_off = 0
+ for each in bin_segs:
+ length = each[2]
+ bin_data[each[1]:each[1] + length] = cfg_bins[cfg_off:
+ cfg_off
+ + length]
+ cfg_off += length
+ print('Patched the loaded binary successfully !')
+
+ return bin_data
+
+ def load_default_from_bin(self, bin_data):
+ self._old_bin = bin_data
+ cfg_bins = self.extract_cfg_from_bin(bin_data)
+ self.set_field_value(self._cfg_tree, cfg_bins, True)
+ return cfg_bins
+
+ def generate_binary_array(self, path=''):
+ if path == '':
+ top = None
+ else:
+ top = self.locate_cfg_item(path)
+ if not top:
+ raise Exception("Invalid configuration path '%s' !"
+ % path)
+ return self.get_field_value(top)
+
+ def generate_binary(self, bin_file_name, path=''):
+ bin_file = open(bin_file_name, "wb")
+ bin_file.write(self.generate_binary_array(path))
+ bin_file.close()
+ return 0
+
+ def write_delta_file(self, out_file, platform_id, out_lines):
+ dlt_fd = open(out_file, "w")
+ dlt_fd.write("%s\n" % get_copyright_header('dlt', True))
+ if platform_id is not None:
+ dlt_fd.write('#\n')
+ dlt_fd.write('# Delta configuration values for '
+ 'platform ID 0x%04X\n'
+ % platform_id)
+ dlt_fd.write('#\n\n')
+ for line in out_lines:
+ dlt_fd.write('%s\n' % line)
+ dlt_fd.close()
+
+ def override_default_value(self, dlt_file):
+ error = 0
+ dlt_lines = CGenYamlCfg.expand_include_files(dlt_file)
+
+ platform_id = None
+ for line, file_path, line_num in dlt_lines:
+ line = line.strip()
+ if not line or line.startswith('#'):
+ continue
+ match = re.match("\\s*([\\w\\.]+)\\s*\\|\\s*(.+)", line)
+ if not match:
+ raise Exception("Unrecognized line '%s' "
+ "(File:'%s' Line:%d) !"
+ % (line, file_path, line_num + 1))
+
+ path = match.group(1)
+ value_str = match.group(2)
+ top = self.locate_cfg_item(path)
+ if not top:
+ raise Exception(
+ "Invalid configuration '%s' (File:'%s' Line:%d) !" %
+ (path, file_path, line_num + 1))
+
+ if 'indx' in top:
+ act_cfg = self.get_item_by_index(top['indx'])
+ bit_len = act_cfg['length']
+ else:
+ struct_info = top[CGenYamlCfg.STRUCT]
+ bit_len = struct_info['length']
+
+ value_bytes = self.parse_value(value_str, bit_len)
+ self.set_field_value(top, value_bytes, True)
+
+ if path == 'PLATFORMID_CFG_DATA.PlatformId':
+ platform_id = value_str
+
+ if platform_id is None:
+ raise Exception(
+ "PLATFORMID_CFG_DATA.PlatformId is missing "
+ "in file '%s' !" %
+ (dlt_file))
+
+ return error
+
+ def generate_delta_file_from_bin(self, delta_file, old_data,
+ new_data, full=False):
+ new_data = self.load_default_from_bin(new_data)
+ lines = []
+ platform_id = None
+ def_platform_id = 0
+
+ for item in self._cfg_list:
+ if not full and (item['type'] in ['Reserved']):
+ continue
+ old_val = get_bits_from_bytes(old_data, item['offset'],
+ item['length'])
+ new_val = get_bits_from_bytes(new_data, item['offset'],
+ item['length'])
+
+ full_name = item['path']
+ if 'PLATFORMID_CFG_DATA.PlatformId' == full_name:
+ def_platform_id = old_val
+ if new_val != old_val or full:
+ val_str = self.reformat_value_str(item['value'],
+ item['length'])
+ text = '%-40s | %s' % (full_name, val_str)
+ lines.append(text)
+
+ if self.get_mode() != 'FSP':
+ if platform_id is None or def_platform_id == platform_id:
+ platform_id = def_platform_id
+ print("WARNING: 'PlatformId' configuration is "
+ "same as default %d!" % platform_id)
+
+ lines.insert(0, '%-40s | %s\n\n' %
+ ('PLATFORMID_CFG_DATA.PlatformId',
+ '0x%04X' % platform_id))
+ else:
+ platform_id = None
+
+ self.write_delta_file(delta_file, platform_id, lines)
+
+ return 0
+
+ def generate_delta_file(self, delta_file, bin_file, bin_file2, full=False):
+ fd = open(bin_file, 'rb')
+ new_data = self.extract_cfg_from_bin(bytearray(fd.read()))
+ fd.close()
+
+ if bin_file2 == '':
+ old_data = self.generate_binary_array()
+ else:
+ old_data = new_data
+ fd = open(bin_file2, 'rb')
+ new_data = self.extract_cfg_from_bin(bytearray(fd.read()))
+ fd.close()
+
+ return self.generate_delta_file_from_bin(delta_file,
+ old_data, new_data, full)
+
+ def prepare_marshal(self, is_save):
+ if is_save:
+ # Ordered dict is not marshallable, convert to list
+ self._cfg_tree = CGenYamlCfg.deep_convert_dict(self._cfg_tree)
+ else:
+ # Revert it back
+ self._cfg_tree = CGenYamlCfg.deep_convert_list(self._cfg_tree)
+
+ def generate_yml_file(self, in_file, out_file):
+ cfg_yaml = CFG_YAML()
+ text = cfg_yaml.expand_yaml(in_file)
+ yml_fd = open(out_file, "w")
+ yml_fd.write(text)
+ yml_fd.close()
+ return 0
+
+ def write_cfg_header_file(self, hdr_file_name, tag_mode,
+ tag_dict, struct_list):
+ lines = []
+ lines.append('\n\n')
+ if self.get_mode() == 'FSP':
+ lines.append('#include <FspUpd.h>\n')
+
+ tag_mode = tag_mode & 0x7F
+ tag_list = sorted(list(tag_dict.items()), key=lambda x: x[1])
+ for tagname, tagval in tag_list:
+ if (tag_mode == 0 and tagval >= 0x100) or \
+ (tag_mode == 1 and tagval < 0x100):
+ continue
+ lines.append('#define %-30s 0x%03X\n' % (
+ 'CDATA_%s_TAG' % tagname[:-9], tagval))
+ lines.append('\n\n')
+
+ name_dict = {}
+ new_dict = {}
+ for each in struct_list:
+ if (tag_mode == 0 and each['tag'] >= 0x100) or \
+ (tag_mode == 1 and each['tag'] < 0x100):
+ continue
+ new_dict[each['name']] = (each['alias'], each['count'])
+ if each['alias'] not in name_dict:
+ name_dict[each['alias']] = 1
+ lines.extend(self.create_struct(each['alias'],
+ each['node'], new_dict))
+ lines.append('#pragma pack()\n\n')
+
+ self.write_header_file(lines, hdr_file_name)
+
+ def write_header_file(self, txt_body, file_name, type='h'):
+ file_name_def = os.path.basename(file_name).replace('.', '_')
+ file_name_def = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', file_name_def)
+ file_name_def = re.sub('([a-z0-9])([A-Z])', r'\1_\2',
+ file_name_def).upper()
+
+ lines = []
+ lines.append("%s\n" % get_copyright_header(type))
+ lines.append("#ifndef __%s__\n" % file_name_def)
+ lines.append("#define __%s__\n\n" % file_name_def)
+ if type == 'h':
+ lines.append("#pragma pack(1)\n\n")
+ lines.extend(txt_body)
+ if type == 'h':
+ lines.append("#pragma pack()\n\n")
+ lines.append("#endif\n")
+
+ # Don't rewrite if the contents are the same
+ create = True
+ if os.path.exists(file_name):
+ hdr_file = open(file_name, "r")
+ org_txt = hdr_file.read()
+ hdr_file.close()
+
+ new_txt = ''.join(lines)
+ if org_txt == new_txt:
+ create = False
+
+ if create:
+ hdr_file = open(file_name, "w")
+ hdr_file.write(''.join(lines))
+ hdr_file.close()
+
+ def generate_data_inc_file(self, dat_inc_file_name, bin_file=None):
+ # Put a prefix GUID before CFGDATA so that it can be located later on
+ prefix = b'\xa7\xbd\x7f\x73\x20\x1e\x46\xd6\
+xbe\x8f\x64\x12\x05\x8d\x0a\xa8'
+ if bin_file:
+ fin = open(bin_file, 'rb')
+ bin_dat = prefix + bytearray(fin.read())
+ fin.close()
+ else:
+ bin_dat = prefix + self.generate_binary_array()
+
+ file_name = os.path.basename(dat_inc_file_name).upper()
+ file_name = file_name.replace('.', '_')
+
+ txt_lines = []
+
+ txt_lines.append("UINT8 mConfigDataBlob[%d] = {\n" % len(bin_dat))
+ count = 0
+ line = [' ']
+ for each in bin_dat:
+ line.append('0x%02X, ' % each)
+ count = count + 1
+ if (count & 0x0F) == 0:
+ line.append('\n')
+ txt_lines.append(''.join(line))
+ line = [' ']
+ if len(line) > 1:
+ txt_lines.append(''.join(line) + '\n')
+
+ txt_lines.append("};\n\n")
+ self.write_header_file(txt_lines, dat_inc_file_name, 'inc')
+
+ return 0
+
+ def get_struct_array_info(self, input):
+ parts = input.split(':')
+ if len(parts) > 1:
+ var = parts[1]
+ input = parts[0]
+ else:
+ var = ''
+ array_str = input.split('[')
+ name = array_str[0]
+ if len(array_str) > 1:
+ num_str = ''.join(c for c in array_str[-1] if c.isdigit())
+ num_str = '1000' if len(num_str) == 0 else num_str
+ array_num = int(num_str)
+ else:
+ array_num = 0
+ return name, array_num, var
+
+ def process_multilines(self, string, max_char_length):
+ multilines = ''
+ string_length = len(string)
+ current_string_start = 0
+ string_offset = 0
+ break_line_dict = []
+ if len(string) <= max_char_length:
+ while (string_offset < string_length):
+ if string_offset >= 1:
+ if string[string_offset - 1] == '\\' and string[
+ string_offset] == 'n':
+ break_line_dict.append(string_offset + 1)
+ string_offset += 1
+ if break_line_dict != []:
+ for each in break_line_dict:
+ multilines += " %s\n" % string[
+ current_string_start:each].lstrip()
+ current_string_start = each
+ if string_length - current_string_start > 0:
+ multilines += " %s\n" % string[
+ current_string_start:].lstrip()
+ else:
+ multilines = " %s\n" % string
+ else:
+ new_line_start = 0
+ new_line_count = 0
+ found_space_char = False
+ while (string_offset < string_length):
+ if string_offset >= 1:
+ if new_line_count >= max_char_length - 1:
+ if string[string_offset] == ' ' and \
+ string_length - string_offset > 10:
+ break_line_dict.append(new_line_start
+ + new_line_count)
+ new_line_start = new_line_start + new_line_count
+ new_line_count = 0
+ found_space_char = True
+ elif string_offset == string_length - 1 and \
+ found_space_char is False:
+ break_line_dict.append(0)
+ if string[string_offset - 1] == '\\' and string[
+ string_offset] == 'n':
+ break_line_dict.append(string_offset + 1)
+ new_line_start = string_offset + 1
+ new_line_count = 0
+ string_offset += 1
+ new_line_count += 1
+ if break_line_dict != []:
+ break_line_dict.sort()
+ for each in break_line_dict:
+ if each > 0:
+ multilines += " %s\n" % string[
+ current_string_start:each].lstrip()
+ current_string_start = each
+ if string_length - current_string_start > 0:
+ multilines += " %s\n" % \
+ string[current_string_start:].lstrip()
+ return multilines
+
+ def create_field(self, item, name, length, offset, struct,
+ bsf_name, help, option, bits_length=None):
+ pos_name = 28
+ name_line = ''
+ # help_line = ''
+ # option_line = ''
+
+ if length == 0 and name == 'dummy':
+ return '\n'
+
+ if bits_length == 0:
+ return '\n'
+
+ is_array = False
+ if length in [1, 2, 4, 8]:
+ type = "UINT%d" % (length * 8)
+ else:
+ is_array = True
+ type = "UINT8"
+
+ if item and item['value'].startswith('{'):
+ type = "UINT8"
+ is_array = True
+
+ if struct != '':
+ struct_base = struct.rstrip('*')
+ name = '*' * (len(struct) - len(struct_base)) + name
+ struct = struct_base
+ type = struct
+ if struct in ['UINT8', 'UINT16', 'UINT32', 'UINT64']:
+ is_array = True
+ unit = int(type[4:]) // 8
+ length = length / unit
+ else:
+ is_array = False
+
+ if is_array:
+ name = name + '[%d]' % length
+
+ if len(type) < pos_name:
+ space1 = pos_name - len(type)
+ else:
+ space1 = 1
+
+ if bsf_name != '':
+ name_line = " %s\n" % bsf_name
+ else:
+ name_line = "N/A\n"
+
+ # if help != '':
+ # help_line = self.process_multilines(help, 80)
+
+ # if option != '':
+ # option_line = self.process_multilines(option, 80)
+
+ if offset is None:
+ offset_str = '????'
+ else:
+ offset_str = '0x%04X' % offset
+
+ if bits_length is None:
+ bits_length = ''
+ else:
+ bits_length = ' : %d' % bits_length
+
+ # return "\n/** %s%s%s**/\n %s%s%s%s;\n" % (name_line, help_line,
+ # option_line, type, ' ' * space1, name, bits_length)
+ return "\n /* Offset %s: %s */\n %s%s%s%s;\n" % (
+ offset_str, name_line.strip(), type, ' ' * space1,
+ name, bits_length)
+
+ def create_struct(self, cname, top, struct_dict):
+ index = 0
+ last = ''
+ lines = []
+ off_base = -1
+
+ if cname in struct_dict:
+ if struct_dict[cname][2]:
+ return []
+ lines.append('\ntypedef struct {\n')
+ for field in top:
+ if field[0] == '$':
+ continue
+
+ index += 1
+
+ t_item = top[field]
+ if 'indx' not in t_item:
+ if CGenYamlCfg.STRUCT not in top[field]:
+ continue
+
+ if struct_dict[field][1] == 0:
+ continue
+
+ append = True
+ struct_info = top[field][CGenYamlCfg.STRUCT]
+
+ if 'struct' in struct_info:
+ struct, array_num, var = self.get_struct_array_info(
+ struct_info['struct'])
+ if array_num > 0:
+ if last == struct:
+ append = False
+ last = struct
+ if var == '':
+ var = field
+
+ field = CGenYamlCfg.format_struct_field_name(
+ var, struct_dict[field][1])
+ else:
+ struct = struct_dict[field][0]
+ field = CGenYamlCfg.format_struct_field_name(
+ field, struct_dict[field][1])
+
+ if append:
+ offset = t_item['$STRUCT']['offset'] // 8
+ if off_base == -1:
+ off_base = offset
+ line = self.create_field(None, field, 0, 0, struct,
+ '', '', '')
+ lines.append(' %s' % line)
+ last = struct
+ continue
+
+ item = self.get_item_by_index(t_item['indx'])
+ if item['cname'] == 'CfgHeader' and index == 1 or \
+ (item['cname'] == 'CondValue' and index == 2):
+ continue
+
+ bit_length = None
+ length = (item['length'] + 7) // 8
+ match = re.match("^(\\d+)([b|B|W|D|Q])([B|W|D|Q]?)",
+ t_item['length'])
+ if match and match.group(2) == 'b':
+ bit_length = int(match.group(1))
+ if match.group(3) != '':
+ length = CGenYamlCfg.bits_width[match.group(3)] // 8
+ else:
+ length = 4
+ offset = item['offset'] // 8
+ if off_base == -1:
+ off_base = offset
+ struct = item.get('struct', '')
+ name = field
+ prompt = item['name']
+ help = item['help']
+ option = item['option']
+ line = self.create_field(item, name, length, offset, struct,
+ prompt, help, option, bit_length)
+ lines.append(' %s' % line)
+ last = struct
+
+ lines.append('\n} %s;\n\n' % cname)
+
+ return lines
+
+ def write_fsp_sig_header_file(self, hdr_file_name):
+ hdr_fd = open(hdr_file_name, 'w')
+ hdr_fd.write("%s\n" % get_copyright_header('h'))
+ hdr_fd.write("#ifndef __FSPUPD_H__\n"
+ "#define __FSPUPD_H__\n\n"
+ "#include <FspEas.h>\n\n"
+ "#pragma pack(1)\n\n")
+ lines = []
+ for fsp_comp in 'TMS':
+ top = self.locate_cfg_item('FSP%s_UPD' % fsp_comp)
+ if not top:
+ raise Exception('Could not find FSP UPD definition !')
+ bins = self.get_field_value(top)
+ lines.append("#define FSP%s_UPD_SIGNATURE"
+ " 0x%016X /* '%s' */\n\n"
+ % (fsp_comp, bytes_to_value(bins[:8]),
+ bins[:8].decode()))
+ hdr_fd.write(''.join(lines))
+ hdr_fd.write("#pragma pack()\n\n"
+ "#endif\n")
+ hdr_fd.close()
+
+ def create_header_file(self, hdr_file_name, com_hdr_file_name='', path=''):
+
+ def _build_header_struct(name, cfgs, level):
+ if CGenYamlCfg.STRUCT in cfgs:
+ if 'CfgHeader' in cfgs:
+ # collect CFGDATA TAG IDs
+ cfghdr = self.get_item_by_index(cfgs['CfgHeader']['indx'])
+ tag_val = array_str_to_value(cfghdr['value']) >> 20
+ tag_dict[name] = tag_val
+ if level == 1:
+ tag_curr[0] = tag_val
+ struct_dict[name] = (level, tag_curr[0], cfgs)
+ if path == 'FSP_SIG':
+ self.write_fsp_sig_header_file(hdr_file_name)
+ return
+ tag_curr = [0]
+ tag_dict = {}
+ struct_dict = {}
+
+ if path == '':
+ top = None
+ else:
+ top = self.locate_cfg_item(path)
+ if not top:
+ raise Exception("Invalid configuration path '%s' !" % path)
+ _build_header_struct(path, top, 0)
+ self.traverse_cfg_tree(_build_header_struct, top)
+
+ if tag_curr[0] == 0:
+ hdr_mode = 2
+ else:
+ hdr_mode = 1
+
+ if re.match('FSP[TMS]_UPD', path):
+ hdr_mode |= 0x80
+
+ # filter out the items to be built for tags and structures
+ struct_list = []
+ for each in struct_dict:
+ match = False
+ for check in CGenYamlCfg.exclude_struct:
+ if re.match(check, each):
+ match = True
+ if each in tag_dict:
+ if each not in CGenYamlCfg.include_tag:
+ del tag_dict[each]
+ break
+ if not match:
+ struct_list.append({'name': each, 'alias': '', 'count': 0,
+ 'level': struct_dict[each][0],
+ 'tag': struct_dict[each][1],
+ 'node': struct_dict[each][2]})
+
+ # sort by level so that the bottom level struct
+ # will be build first to satisfy dependencies
+ struct_list = sorted(struct_list, key=lambda x: x['level'],
+ reverse=True)
+
+ # Convert XXX_[0-9]+ to XXX as an array hint
+ for each in struct_list:
+ cfgs = each['node']
+ if 'struct' in cfgs['$STRUCT']:
+ each['alias'], array_num, var = self.get_struct_array_info(
+ cfgs['$STRUCT']['struct'])
+ else:
+ match = re.match('(\\w+)(_\\d+)', each['name'])
+ if match:
+ each['alias'] = match.group(1)
+ else:
+ each['alias'] = each['name']
+
+ # count items for array build
+ for idx, each in enumerate(struct_list):
+ if idx > 0:
+ last_struct = struct_list[idx-1]['node']['$STRUCT']
+ curr_struct = each['node']['$STRUCT']
+ if struct_list[idx-1]['alias'] == each['alias'] and \
+ curr_struct['length'] == last_struct['length'] and \
+ curr_struct['offset'] == last_struct['offset'] + \
+ last_struct['length']:
+ for idx2 in range(idx-1, -1, -1):
+ if struct_list[idx2]['count'] > 0:
+ struct_list[idx2]['count'] += 1
+ break
+ continue
+ each['count'] = 1
+
+ # generate common header
+ if com_hdr_file_name:
+ self.write_cfg_header_file(com_hdr_file_name, 0, tag_dict,
+ struct_list)
+
+ # generate platform header
+ self.write_cfg_header_file(hdr_file_name, hdr_mode, tag_dict,
+ struct_list)
+
+ return 0
+
+ def load_yaml(self, cfg_file):
+ cfg_yaml = CFG_YAML()
+ self.initialize()
+ self._cfg_tree = cfg_yaml.load_yaml(cfg_file)
+ self._def_dict = cfg_yaml.def_dict
+ self._yaml_path = os.path.dirname(cfg_file)
+ self.build_cfg_list()
+ self.build_var_dict()
+ self.update_def_value()
+ return 0
+
+
+def usage():
+ print('\n'.join([
+ "GenYamlCfg Version 0.50",
+ "Usage:",
+ " GenYamlCfg GENINC BinFile IncOutFile "
+ " [-D Macros]",
+
+ " GenYamlCfg GENPKL YamlFile PklOutFile "
+ " [-D Macros]",
+ " GenYamlCfg GENBIN YamlFile[;DltFile] BinOutFile "
+ " [-D Macros]",
+ " GenYamlCfg GENDLT YamlFile[;BinFile] DltOutFile "
+ " [-D Macros]",
+ " GenYamlCfg GENYML YamlFile YamlOutFile"
+ " [-D Macros]",
+ " GenYamlCfg GENHDR YamlFile HdrOutFile "
+ " [-D Macros]"
+ ]))
+
+
+def main():
+ # Parse the options and args
+ argc = len(sys.argv)
+ if argc < 4:
+ usage()
+ return 1
+
+ gen_cfg_data = CGenYamlCfg()
+ command = sys.argv[1].upper()
+ out_file = sys.argv[3]
+ if argc >= 5 and gen_cfg_data.parse_macros(sys.argv[4:]) != 0:
+ raise Exception("ERROR: Macro parsing failed !")
+
+ file_list = sys.argv[2].split(';')
+ if len(file_list) >= 2:
+ yml_file = file_list[0]
+ dlt_file = file_list[1]
+ elif len(file_list) == 1:
+ yml_file = file_list[0]
+ dlt_file = ''
+ else:
+ raise Exception("ERROR: Invalid parameter '%s' !" % sys.argv[2])
+ yml_scope = ''
+ if '@' in yml_file:
+ parts = yml_file.split('@')
+ yml_file = parts[0]
+ yml_scope = parts[1]
+
+ if command == "GENDLT" and yml_file.endswith('.dlt'):
+ # It needs to expand an existing DLT file
+ dlt_file = yml_file
+ lines = gen_cfg_data.expand_include_files(dlt_file)
+ write_lines(lines, out_file)
+ return 0
+
+ if command == "GENYML":
+ if not yml_file.lower().endswith('.yaml'):
+ raise Exception('Only YAML file is supported !')
+ gen_cfg_data.generate_yml_file(yml_file, out_file)
+ return 0
+
+ bin_file = ''
+ if (yml_file.lower().endswith('.bin')) and (command == "GENINC"):
+ # It is binary file
+ bin_file = yml_file
+ yml_file = ''
+
+ if bin_file:
+ gen_cfg_data.generate_data_inc_file(out_file, bin_file)
+ return 0
+
+ cfg_bin_file = ''
+ cfg_bin_file2 = ''
+ if dlt_file:
+ if command == "GENDLT":
+ cfg_bin_file = dlt_file
+ dlt_file = ''
+ if len(file_list) >= 3:
+ cfg_bin_file2 = file_list[2]
+
+ if yml_file.lower().endswith('.pkl'):
+ with open(yml_file, "rb") as pkl_file:
+ gen_cfg_data.__dict__ = marshal.load(pkl_file)
+ gen_cfg_data.prepare_marshal(False)
+
+ # Override macro definition again for Pickle file
+ if argc >= 5:
+ gen_cfg_data.parse_macros(sys.argv[4:])
+ else:
+ gen_cfg_data.load_yaml(yml_file)
+ if command == 'GENPKL':
+ gen_cfg_data.prepare_marshal(True)
+ with open(out_file, "wb") as pkl_file:
+ marshal.dump(gen_cfg_data.__dict__, pkl_file)
+ json_file = os.path.splitext(out_file)[0] + '.json'
+ fo = open(json_file, 'w')
+ path_list = []
+ cfgs = {'_cfg_page': gen_cfg_data._cfg_page,
+ '_cfg_list': gen_cfg_data._cfg_list,
+ '_path_list': path_list}
+ # optimize to reduce size
+ path = None
+ for each in cfgs['_cfg_list']:
+ new_path = each['path'][:-len(each['cname'])-1]
+ if path != new_path:
+ path = new_path
+ each['path'] = path
+ path_list.append(path)
+ else:
+ del each['path']
+ if each['order'] == each['offset']:
+ del each['order']
+ del each['offset']
+
+ # value is just used to indicate display type
+ value = each['value']
+ if value.startswith('0x'):
+ hex_len = ((each['length'] + 7) // 8) * 2
+ if len(value) == hex_len:
+ value = 'x%d' % hex_len
+ else:
+ value = 'x'
+ each['value'] = value
+ elif value and value[0] in ['"', "'", '{']:
+ each['value'] = value[0]
+ else:
+ del each['value']
+
+ fo.write(repr(cfgs))
+ fo.close()
+ return 0
+
+ if dlt_file:
+ gen_cfg_data.override_default_value(dlt_file)
+
+ gen_cfg_data.detect_fsp()
+
+ if command == "GENBIN":
+ if len(file_list) == 3:
+ old_data = gen_cfg_data.generate_binary_array()
+ fi = open(file_list[2], 'rb')
+ new_data = bytearray(fi.read())
+ fi.close()
+ if len(new_data) != len(old_data):
+ raise Exception("Binary file '%s' length does not match, \
+ignored !" % file_list[2])
+ else:
+ gen_cfg_data.load_default_from_bin(new_data)
+ gen_cfg_data.override_default_value(dlt_file)
+
+ gen_cfg_data.generate_binary(out_file, yml_scope)
+
+ elif command == "GENDLT":
+ full = True if 'FULL' in gen_cfg_data._macro_dict else False
+ gen_cfg_data.generate_delta_file(out_file, cfg_bin_file,
+ cfg_bin_file2, full)
+
+ elif command == "GENHDR":
+ out_files = out_file.split(';')
+ brd_out_file = out_files[0].strip()
+ if len(out_files) > 1:
+ com_out_file = out_files[1].strip()
+ else:
+ com_out_file = ''
+ gen_cfg_data.create_header_file(brd_out_file, com_out_file, yml_scope)
+
+ elif command == "GENINC":
+ gen_cfg_data.generate_data_inc_file(out_file)
+
+ elif command == "DEBUG":
+ gen_cfg_data.print_cfgs()
+
+ else:
+ raise Exception("Unsuported command '%s' !" % command)
+
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/IntelFsp2Pkg/Tools/ConfigEditor/SingleSign.py b/IntelFsp2Pkg/Tools/ConfigEditor/SingleSign.py
new file mode 100644
index 0000000000..7e008aa68a
--- /dev/null
+++ b/IntelFsp2Pkg/Tools/ConfigEditor/SingleSign.py
@@ -0,0 +1,324 @@
+#!/usr/bin/env python
+# @ SingleSign.py
+# Single signing script
+#
+# Copyright (c) 2020 - 2021, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+##
+
+import os
+import sys
+import re
+import shutil
+import subprocess
+
+SIGNING_KEY = {
+ # Key Id | Key File Name start |
+ # =================================================================
+ # KEY_ID_MASTER is used for signing Slimboot Key Hash Manifest \
+ # container (KEYH Component)
+ "KEY_ID_MASTER_RSA2048": "MasterTestKey_Priv_RSA2048.pem",
+ "KEY_ID_MASTER_RSA3072": "MasterTestKey_Priv_RSA3072.pem",
+
+ # KEY_ID_CFGDATA is used for signing external Config data blob)
+ "KEY_ID_CFGDATA_RSA2048": "ConfigTestKey_Priv_RSA2048.pem",
+ "KEY_ID_CFGDATA_RSA3072": "ConfigTestKey_Priv_RSA3072.pem",
+
+ # KEY_ID_FIRMWAREUPDATE is used for signing capsule firmware update image)
+ "KEY_ID_FIRMWAREUPDATE_RSA2048": "FirmwareUpdateTestKey_Priv_RSA2048.pem",
+ "KEY_ID_FIRMWAREUPDATE_RSA3072": "FirmwareUpdateTestKey_Priv_RSA3072.pem",
+
+ # KEY_ID_CONTAINER is used for signing container header with mono signature
+ "KEY_ID_CONTAINER_RSA2048": "ContainerTestKey_Priv_RSA2048.pem",
+ "KEY_ID_CONTAINER_RSA3072": "ContainerTestKey_Priv_RSA3072.pem",
+
+ # CONTAINER_COMP1_KEY_ID is used for signing container components
+ "KEY_ID_CONTAINER_COMP_RSA2048": "ContainerCompTestKey_Priv_RSA2048.pem",
+ "KEY_ID_CONTAINER_COMP_RSA3072": "ContainerCompTestKey_Priv_RSA3072.pem",
+
+ # KEY_ID_OS1_PUBLIC, KEY_ID_OS2_PUBLIC is used for referencing \
+ # Boot OS public keys
+ "KEY_ID_OS1_PUBLIC_RSA2048": "OS1_TestKey_Pub_RSA2048.pem",
+ "KEY_ID_OS1_PUBLIC_RSA3072": "OS1_TestKey_Pub_RSA3072.pem",
+
+ "KEY_ID_OS2_PUBLIC_RSA2048": "OS2_TestKey_Pub_RSA2048.pem",
+ "KEY_ID_OS2_PUBLIC_RSA3072": "OS2_TestKey_Pub_RSA3072.pem",
+
+ }
+
+MESSAGE_SBL_KEY_DIR = """!!! PRE-REQUISITE: Path to SBL_KEY_DIR has.
+to be set with SBL KEYS DIRECTORY !!! \n!!! Generate keys.
+using GenerateKeys.py available in BootloaderCorePkg/Tools.
+directory !!! \n !!! Run $python.
+BootloaderCorePkg/Tools/GenerateKeys.py -k $PATH_TO_SBL_KEY_DIR !!!\n
+!!! Set SBL_KEY_DIR environ with path to SBL KEYS DIR !!!\n"
+!!! Windows $set SBL_KEY_DIR=$PATH_TO_SBL_KEY_DIR !!!\n
+!!! Linux $export SBL_KEY_DIR=$PATH_TO_SBL_KEY_DIR !!!\n"""
+
+
+def get_openssl_path():
+ if os.name == 'nt':
+ if 'OPENSSL_PATH' not in os.environ:
+ openssl_dir = "C:\\Openssl\\bin\\"
+ if os.path.exists(openssl_dir):
+ os.environ['OPENSSL_PATH'] = openssl_dir
+ else:
+ os.environ['OPENSSL_PATH'] = "C:\\Openssl\\"
+ if 'OPENSSL_CONF' not in os.environ:
+ openssl_cfg = "C:\\Openssl\\openssl.cfg"
+ if os.path.exists(openssl_cfg):
+ os.environ['OPENSSL_CONF'] = openssl_cfg
+ openssl = os.path.join(
+ os.environ.get('OPENSSL_PATH', ''),
+ 'openssl.exe')
+ else:
+ # Get openssl path for Linux cases
+ openssl = shutil.which('openssl')
+
+ return openssl
+
+
+def run_process(arg_list, print_cmd=False, capture_out=False):
+ sys.stdout.flush()
+ if print_cmd:
+ print(' '.join(arg_list))
+
+ exc = None
+ result = 0
+ output = ''
+ try:
+ if capture_out:
+ output = subprocess.check_output(arg_list).decode()
+ else:
+ result = subprocess.call(arg_list)
+ except Exception as ex:
+ result = 1
+ exc = ex
+
+ if result:
+ if not print_cmd:
+ print('Error in running process:\n %s' % ' '.join(arg_list))
+ if exc is None:
+ sys.exit(1)
+ else:
+ raise exc
+
+ return output
+
+
+def check_file_pem_format(priv_key):
+ # Check for file .pem format
+ key_name = os.path.basename(priv_key)
+ if os.path.splitext(key_name)[1] == ".pem":
+ return True
+ else:
+ return False
+
+
+def get_key_id(priv_key):
+ # Extract base name if path is provided.
+ key_name = os.path.basename(priv_key)
+ # Check for KEY_ID in key naming.
+ if key_name.startswith('KEY_ID'):
+ return key_name
+ else:
+ return None
+
+
+def get_sbl_key_dir():
+ # Check Key store setting SBL_KEY_DIR path
+ if 'SBL_KEY_DIR' not in os.environ:
+ exception_string = "ERROR: SBL_KEY_DIR is not defined." \
+ " Set SBL_KEY_DIR with SBL Keys directory!!\n"
+ raise Exception(exception_string + MESSAGE_SBL_KEY_DIR)
+
+ sbl_key_dir = os.environ.get('SBL_KEY_DIR')
+ if not os.path.exists(sbl_key_dir):
+ exception_string = "ERROR:SBL_KEY_DIR set " + sbl_key_dir \
+ + " is not valid." \
+ " Set the correct SBL_KEY_DIR path !!\n" \
+ + MESSAGE_SBL_KEY_DIR
+ raise Exception(exception_string)
+ else:
+ return sbl_key_dir
+
+
+def get_key_from_store(in_key):
+
+ # Check in_key is path to key
+ if os.path.exists(in_key):
+ return in_key
+
+ # Get Slimboot key dir path
+ sbl_key_dir = get_sbl_key_dir()
+
+ # Extract if in_key is key_id
+ priv_key = get_key_id(in_key)
+ if priv_key is not None:
+ if (priv_key in SIGNING_KEY):
+ # Generate key file name from key id
+ priv_key_file = SIGNING_KEY[priv_key]
+ else:
+ exception_string = "KEY_ID" + priv_key + "is not found " \
+ "is not found in supported KEY IDs!!"
+ raise Exception(exception_string)
+ elif check_file_pem_format(in_key):
+ # check if file name is provided in pem format
+ priv_key_file = in_key
+ else:
+ priv_key_file = None
+ raise Exception('key provided %s is not valid!' % in_key)
+
+ # Create a file path
+ # Join Key Dir and priv_key_file
+ try:
+ priv_key = os.path.join(sbl_key_dir, priv_key_file)
+ except Exception:
+ raise Exception('priv_key is not found %s!' % priv_key)
+
+ # Check for priv_key construted based on KEY ID exists in specified path
+ if not os.path.isfile(priv_key):
+ exception_string = "!!! ERROR: Key file corresponding to" \
+ + in_key + "do not exist in Sbl key " \
+ "directory at" + sbl_key_dir + "!!! \n" \
+ + MESSAGE_SBL_KEY_DIR
+ raise Exception(exception_string)
+
+ return priv_key
+
+#
+# Sign an file using openssl
+#
+# priv_key [Input] Key Id or Path to Private key
+# hash_type [Input] Signing hash
+# sign_scheme[Input] Sign/padding scheme
+# in_file [Input] Input file to be signed
+# out_file [Input/Output] Signed data file
+#
+
+
+def single_sign_file(priv_key, hash_type, sign_scheme, in_file, out_file):
+
+ _hash_type_string = {
+ "SHA2_256": 'sha256',
+ "SHA2_384": 'sha384',
+ "SHA2_512": 'sha512',
+ }
+
+ _hash_digest_Size = {
+ # Hash_string : Hash_Size
+ "SHA2_256": 32,
+ "SHA2_384": 48,
+ "SHA2_512": 64,
+ "SM3_256": 32,
+ }
+
+ _sign_scheme_string = {
+ "RSA_PKCS1": 'pkcs1',
+ "RSA_PSS": 'pss',
+ }
+
+ priv_key = get_key_from_store(priv_key)
+
+ # Temporary files to store hash generated
+ hash_file_tmp = out_file+'.hash.tmp'
+ hash_file = out_file+'.hash'
+
+ # Generate hash using openssl dgst in hex format
+ cmdargs = [get_openssl_path(),
+ 'dgst',
+ '-'+'%s' % _hash_type_string[hash_type],
+ '-out', '%s' % hash_file_tmp, '%s' % in_file]
+ run_process(cmdargs)
+
+ # Extract hash form dgst command output and convert to ascii
+ with open(hash_file_tmp, 'r') as fin:
+ hashdata = fin.read()
+ fin.close()
+
+ try:
+ hashdata = hashdata.rsplit('=', 1)[1].strip()
+ except Exception:
+ raise Exception('Hash Data not found for signing!')
+
+ if len(hashdata) != (_hash_digest_Size[hash_type] * 2):
+ raise Exception('Hash Data size do match with for hash type!')
+
+ hashdata_bytes = bytearray.fromhex(hashdata)
+ open(hash_file, 'wb').write(hashdata_bytes)
+
+ print("Key used for Singing %s !!" % priv_key)
+
+ # sign using Openssl pkeyutl
+ cmdargs = [get_openssl_path(),
+ 'pkeyutl', '-sign', '-in', '%s' % hash_file,
+ '-inkey', '%s' % priv_key, '-out',
+ '%s' % out_file, '-pkeyopt',
+ 'digest:%s' % _hash_type_string[hash_type],
+ '-pkeyopt', 'rsa_padding_mode:%s' %
+ _sign_scheme_string[sign_scheme]]
+
+ run_process(cmdargs)
+
+ return
+
+#
+# Extract public key using openssl
+#
+# in_key [Input] Private key or public key in pem format
+# pub_key_file [Input/Output] Public Key to a file
+#
+# return keydata (mod, exp) in bin format
+#
+
+
+def single_sign_gen_pub_key(in_key, pub_key_file=None):
+
+ in_key = get_key_from_store(in_key)
+
+ # Expect key to be in PEM format
+ is_prv_key = False
+ cmdline = [get_openssl_path(), 'rsa', '-pubout', '-text', '-noout',
+ '-in', '%s' % in_key]
+ # Check if it is public key or private key
+ text = open(in_key, 'r').read()
+ if '-BEGIN RSA PRIVATE KEY-' in text:
+ is_prv_key = True
+ elif '-BEGIN PUBLIC KEY-' in text:
+ cmdline.extend(['-pubin'])
+ else:
+ raise Exception('Unknown key format "%s" !' % in_key)
+
+ if pub_key_file:
+ cmdline.extend(['-out', '%s' % pub_key_file])
+ capture = False
+ else:
+ capture = True
+
+ output = run_process(cmdline, capture_out=capture)
+ if not capture:
+ output = text = open(pub_key_file, 'r').read()
+ data = output.replace('\r', '')
+ data = data.replace('\n', '')
+ data = data.replace(' ', '')
+
+ # Extract the modulus
+ if is_prv_key:
+ match = re.search('modulus(.*)publicExponent:\\s+(\\d+)\\s+', data)
+ else:
+ match = re.search('Modulus(?:.*?):(.*)Exponent:\\s+(\\d+)\\s+', data)
+ if not match:
+ raise Exception('Public key not found!')
+ modulus = match.group(1).replace(':', '')
+ exponent = int(match.group(2))
+
+ mod = bytearray.fromhex(modulus)
+ # Remove the '00' from the front if the MSB is 1
+ if mod[0] == 0 and (mod[1] & 0x80):
+ mod = mod[1:]
+ exp = bytearray.fromhex('{:08x}'.format(exponent))
+
+ keydata = mod + exp
+
+ return keydata