Copied latest chip data from PRD project
Change-Id: I7fcaff1fd30b725abe8905df76d91a73e4572c08
Signed-off-by: Zane Shelley <zshelle@us.ibm.com>
diff --git a/chip_data/pyprd/__init__.py b/chip_data/pyprd/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/chip_data/pyprd/__init__.py
diff --git a/chip_data/pyprd/chip_data/__init__.py b/chip_data/pyprd/chip_data/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/chip_data/pyprd/chip_data/__init__.py
diff --git a/chip_data/pyprd/chip_data/binary.py b/chip_data/pyprd/chip_data/binary.py
new file mode 100644
index 0000000..5041b4a
--- /dev/null
+++ b/chip_data/pyprd/chip_data/binary.py
@@ -0,0 +1,304 @@
+from pyprd.chip_data import chip_data as cd
+from pyprd.util.hash import hash_string
+from pyprd.util.model_ec import supported as supported_model_ec
+
+# -----------------------------------------------------------------------------
+# Generic tools to convert to data bytes
+
+
+def _bin(num_bytes: int, value: int) -> bytes:
+ """Converts integers to bytes in big endian format."""
+ return value.to_bytes(num_bytes, "big")
+
+
+def _hash(num_bytes: int, string: str) -> bytes:
+ """Hashes a string and converts to bytes."""
+ return _bin(num_bytes, hash_string(num_bytes, string))
+
+
+def _num(num_bytes: int, iterable: iter) -> bytes:
+ """Gets the length of an iterable and converts to bytes."""
+ return _bin(num_bytes, len(iterable))
+
+
+# -----------------------------------------------------------------------------
+# General fields with specific byte sizes
+
+
+def _model_ec(value: int) -> bytes:
+ return _bin(4, value)
+
+
+def _version(value: int) -> bytes:
+ return _bin(1, value)
+
+
+def _node_name(name: str) -> bytes:
+ return _hash(2, name)
+
+
+def _num_nodes(iterable: iter) -> bytes:
+ return _num(2, iterable)
+
+
+def _reg_name(name: str) -> bytes:
+ return _hash(3, name)
+
+
+def _num_regs(iterable: iter) -> bytes:
+ return _num(3, iterable)
+
+
+def _inst(value: int) -> bytes:
+ return _bin(1, value)
+
+
+def _num_inst(iterable: iter) -> bytes:
+ return _num(1, iterable)
+
+
+def _pos(value: int) -> bytes:
+ return _bin(1, value)
+
+
+def _reg_type(value: str) -> bytes:
+ m = {"SCOM": 1, "IDSCOM": 2}
+ return _bin(1, m[value])
+
+
+def _reg_flags(reg_access: str) -> bytes:
+ m = {"RW": 0xC0, "RO": 0x80, "WO": 0x40}
+ return _bin(1, m[reg_access])
+
+
+def _reg_addr(reg_type: str, reg_addr: int) -> bytes:
+ m = {"SCOM": 4, "IDSCOM": 8}
+ return _bin(m[reg_type], reg_addr)
+
+
+def _reg_val(reg_type: str, reg_val: int) -> bytes:
+ m = {"SCOM": 8, "IDSCOM": 8}
+ return _bin(m[reg_type], reg_val)
+
+
+def _attn_type(value: str) -> bytes:
+ m = {"CS": 1, "UCS": 2, "RE": 3, "SPA": 4, "HA": 5}
+ return _bin(1, m[value])
+
+
+def _num_attn_types(iterable: iter) -> bytes:
+ return _num(1, iterable)
+
+
+# -----------------------------------------------------------------------------
+# Isolation node capture register support
+
+
+def _cap_regs(
+ node_inst: int,
+ node_cap_groups: list,
+ node_bits: dict,
+ base_cap_groups: dict,
+) -> list:
+ out = []
+
+ for group in node_cap_groups:
+ for reg in base_cap_groups[group.group_name]:
+ out.append(
+ _reg_name(reg.reg_name)
+ + _inst(reg.reg_inst[group.group_inst[node_inst]])
+ + _pos(0xFF)
+ )
+
+ for pos, bit in node_bits.items():
+ for group in bit.capture_groups:
+ for reg in base_cap_groups[group.group_name]:
+ out.append(
+ _reg_name(reg.reg_name)
+ + _inst(reg.reg_inst[group.group_inst[node_inst]])
+ + _pos(pos)
+ )
+
+ return out
+
+
+def _num_cap_regs(iterable: iter) -> bytes:
+ return _num(1, iterable)
+
+
+# -----------------------------------------------------------------------------
+# Isolation node rule and expression support
+
+
+def _expr_type(value: int) -> bytes:
+ return _bin(1, value)
+
+
+def _num_exprs(iterable: iter) -> bytes:
+ return _num(1, iterable)
+
+
+def _shift_value(value: int) -> bytes:
+ return _bin(1, value)
+
+
+def _expr(node_inst: int, expr: object) -> bytes:
+ data = bytes()
+
+ if "reg" == expr.expr_type:
+ data += _expr_type(0x01)
+ data += _reg_name(expr.reg_name)
+ data += _inst(expr.reg_inst[node_inst] if expr.reg_inst else node_inst)
+
+ elif "int" == expr.expr_type:
+ data += _expr_type(0x02)
+ data += _reg_val(expr.reg_type, expr.int_value)
+
+ elif "and" == expr.expr_type:
+ data += _expr_type(0x10)
+ data += _num_exprs(expr.exprs)
+ for e in expr.exprs:
+ data += _expr(node_inst, e)
+
+ elif "or" == expr.expr_type:
+ data += _expr_type(0x11)
+ data += _num_exprs(expr.exprs)
+ for e in expr.exprs:
+ data += _expr(node_inst, e)
+
+ elif "not" == expr.expr_type:
+ data += _expr_type(0x12)
+ data += _expr(node_inst, expr.expr)
+
+ elif "lshift" == expr.expr_type:
+ data += _expr_type(0x13)
+ data += _shift_value(expr.shift_value)
+ data += _expr(node_inst, expr.expr)
+
+ elif "rshift" == expr.expr_type:
+ data += _expr_type(0x14)
+ data += _shift_value(expr.shift_value)
+ data += _expr(node_inst, expr.expr)
+
+ return data
+
+
+def _rules(node_inst: int, rules: list) -> list:
+ out = []
+
+ for rule in rules:
+ if node_inst in rule.node_inst:
+ for attn_type in rule.attn_type:
+ out.append(_attn_type(attn_type) + _expr(node_inst, rule.expr))
+
+ return out
+
+
+def _num_rules(iterable: iter) -> bytes:
+ return _num(1, iterable)
+
+
+# -----------------------------------------------------------------------------
+# Isolation node child node support
+
+
+def _child_nodes(node_inst: int, bits: dict) -> list:
+ out = []
+
+ for pos, bit in bits.items():
+ if bit.child_node:
+ name = bit.child_node["name"]
+
+ # The bit instance map is optional and matches the node instance if
+ # not present.
+ inst = node_inst
+ if "inst" in bit.child_node:
+ inst = bit.child_node["inst"][node_inst]
+
+ out.append(_pos(pos) + _node_name(name) + _inst(inst))
+
+ return out
+
+
+def _num_child_nodes(iterable: iter) -> bytes:
+ return _num(1, iterable)
+
+
+# -----------------------------------------------------------------------------
+# Public functions
+
+
+def binary_encode(model_ec: str, base: cd.Base, fp: object):
+ """
+ Encodes a Chip Data object into binary and writes to the given file
+ pointer. Note that the Chip Data Base object could describe more than one
+ model/EC, but the Chip Data binary files only target one model/EC.
+ Therefore, the user must specify which model/EC to target.
+ """
+ assert model_ec in supported_model_ec
+ assert model_ec in base.model_ec
+
+ data = bytes()
+
+ # Header information.
+ data += "CHIPDATA".encode()
+ data += _model_ec(supported_model_ec[model_ec].id)
+ data += _version(2)
+
+ # Register information.
+ data += "REGS".encode()
+ data += _num_regs(base.registers)
+
+ for reg_name, register in sorted(base.registers.items()):
+ data += _reg_name(reg_name)
+ data += _reg_type(register.reg_type)
+ data += _reg_flags(register.access)
+ data += _num_inst(register.instances)
+
+ for reg_inst, reg_addr in sorted(register.instances.items()):
+ data += _inst(reg_inst)
+ data += _reg_addr(register.reg_type, reg_addr)
+
+ # Isolation node information.
+ data += "NODE".encode()
+ data += _num_nodes(base.isolation_nodes)
+
+ for node_name, iso_node in sorted(base.isolation_nodes.items()):
+ data += _node_name(node_name)
+ data += _reg_type(iso_node.reg_type)
+ data += _num_inst(iso_node.instances)
+
+ for node_inst in sorted(iso_node.instances):
+ cap_regs = _cap_regs(
+ node_inst,
+ iso_node.capture_groups,
+ iso_node.bits,
+ base.capture_groups,
+ )
+ rules = _rules(node_inst, iso_node.rules)
+ child_nodes = _child_nodes(node_inst, iso_node.bits)
+
+ data += _inst(node_inst)
+ data += _num_cap_regs(cap_regs)
+ data += _num_rules(rules)
+ data += _num_child_nodes(child_nodes)
+
+ for cap_reg in cap_regs:
+ data += cap_reg
+
+ for rule in rules:
+ data += rule
+
+ for child_node in child_nodes:
+ data += child_node
+
+ # Root node node information.
+ data += "ROOT".encode()
+ data += _num_attn_types(base.root_nodes)
+
+ for attn_type, root_node in sorted(base.root_nodes.items()):
+ data += _attn_type(attn_type)
+ data += _node_name(root_node.name)
+ data += _inst(root_node.inst)
+
+ fp.write(data)
diff --git a/chip_data/pyprd/chip_data/chip_data.py b/chip_data/pyprd/chip_data/chip_data.py
new file mode 100644
index 0000000..efaa7cd
--- /dev/null
+++ b/chip_data/pyprd/chip_data/chip_data.py
@@ -0,0 +1,337 @@
+import copy
+import re
+
+
+def _check_version(version: int) -> int:
+ assert 1 <= version and version <= 1, "Unsupported format version: " + str(
+ version
+ )
+
+ return version
+
+
+def _check_model_ec(model_ec: list) -> list:
+ supported = [
+ "P10_10",
+ "P10_20",
+ "EXPLORER_11",
+ "EXPLORER_20",
+ "ODYSSEY_10",
+ ]
+
+ for v in model_ec:
+ assert v in supported, "Unsupported model/EC level: " + model_ec
+
+ return model_ec
+
+
+def _check_access(access: str) -> str:
+ supported = ["RW", "RO", "WO"]
+ assert access in supported, "Unsupported access type: " + access
+
+ return access
+
+
+def _check_attn_type(attn_type: [str, list]) -> [str, list]:
+ supported = ["CS", "RE", "SPA", "UCS", "HA"]
+
+ t = type(attn_type)
+
+ if t is str:
+ assert attn_type in supported, (
+ "Unsupported attention type: " + attn_type
+ )
+
+ elif t is list:
+ for v in attn_type:
+ _check_attn_type(v)
+
+ else:
+ raise ValueError("Invalid object type: " + t)
+
+ return attn_type
+
+
+def _check_reg_type(reg_type: str) -> str:
+ supported = ["SCOM", "IDSCOM"]
+ assert reg_type in supported, "Unsupported register type: " + reg_type
+
+ return reg_type
+
+
+def _check_expr_type(expr_type: str) -> str:
+ supported = ["reg", "int", "and", "or", "not", "lshift", "rshift"]
+ assert expr_type in supported, "Unsupported expression type: " + expr_type
+
+ return expr_type
+
+
+def _check_name(name: str) -> str:
+ assert re.fullmatch("\\w+", name), "Invalid name value: " + name
+
+ return name
+
+
+def _check_instance(instance: [str, list, dict]) -> [str, list, dict]:
+ t = type(instance)
+
+ if t is int:
+ assert 0 <= instance, "Invalid instance value: " + str(instance)
+
+ elif t is list:
+ for v in instance:
+ _check_instance(v)
+
+ elif t is dict:
+ for k, v in instance.items():
+ _check_instance(k)
+ _check_instance(v)
+
+ else:
+ raise ValueError("Invalid object type: " + t)
+
+ return instance
+
+
+def _check_integer(integer: int) -> int:
+ assert 0 <= integer, "Invalid integer value: " + str(integer)
+
+ return integer
+
+
+def _get_range(r: str) -> list:
+ assert re.fullmatch("[0-9]+(:[0-9]+)?", r), "Invalid range: " + r
+
+ s = [int(i) for i in r.split(":")]
+ if 2 == len(s):
+ s = list(range(s[0], s[1] + 1))
+
+ return s
+
+
+class Register:
+ def __init__(self, reg_type: str = "SCOM", access: str = "RW"):
+ self.reg_type = _check_reg_type(reg_type)
+ self.access = _check_access(access)
+ self.instances = {}
+
+ def addInstance(self, instance: int, address: int):
+ assert (
+ _check_instance(instance) not in self.instances
+ ), "Duplicate instance added to register: " + str(instance)
+
+ self.instances[instance] = _check_integer(address)
+
+
+class RuleExpression:
+ def __init__(self, expr_type: str):
+ self.expr_type = _check_expr_type(expr_type)
+
+
+class ExprReg(RuleExpression):
+ def __init__(self, reg_name: str, reg_inst: dict = {}):
+ super().__init__("reg")
+ self.reg_name = _check_name(reg_name)
+ self.reg_inst = _check_instance(reg_inst)
+
+
+class ExprInt(RuleExpression):
+ def __init__(self, reg_type: str, int_value: int):
+ super().__init__("int")
+ self.reg_type = _check_reg_type(reg_type)
+ self.int_value = _check_integer(int_value)
+
+
+class ExprAnd(RuleExpression):
+ def __init__(self, exprs: list):
+ super().__init__("and")
+ self.exprs = exprs
+
+
+class ExprOr(RuleExpression):
+ def __init__(self, exprs: list):
+ super().__init__("or")
+ self.exprs = exprs
+
+
+class ExprNot(RuleExpression):
+ def __init__(self, expr: object):
+ super().__init__("not")
+ self.expr = expr
+
+
+class ExprLeftShift(RuleExpression):
+ def __init__(self, expr: object, shift_value: int):
+ super().__init__("lshift")
+ self.expr = expr
+ self.shift_value = _check_integer(shift_value)
+
+
+class ExprRightShift(RuleExpression):
+ def __init__(self, expr: object, shift_value: int):
+ super().__init__("rshift")
+ self.expr = expr
+ self.shift_value = _check_integer(shift_value)
+
+
+class IsolationRule:
+ def __init__(self, attn_type: list, node_inst: list, expr: object):
+ self.attn_type = _check_attn_type(attn_type)
+ self.node_inst = _check_instance(node_inst)
+ self.expr = expr
+
+
+class CaptureGroup:
+ def __init__(self, group_name: str, group_inst: dict):
+ self.group_name = _check_name(group_name)
+ self.group_inst = _check_instance(group_inst)
+
+
+class IsolationBit:
+ def __init__(self, desc: str):
+ self.desc = desc
+ self.child_node = {}
+ self.capture_groups = []
+
+ def addChildNode(self, name: str, inst: dict = {}):
+ assert not self.child_node, (
+ "Multiple child nodes given: "
+ + self.child_node["name"]
+ + " and "
+ + name
+ )
+
+ self.child_node = {"name": _check_name(name)}
+
+ if inst:
+ self.child_node["inst"] = _check_instance(inst)
+
+ def addCaptureGroup(self, group: CaptureGroup):
+ self.capture_groups.append(group)
+
+
+class IsolationNode:
+ def __init__(self, reg_type: str = "SCOM"):
+ self.reg_type = _check_reg_type(reg_type)
+ self.instances = []
+ self.rules = []
+ self.bits = {}
+ self.capture_groups = []
+
+ def addRule(self, rule: IsolationRule):
+ self.rules.append(rule)
+
+ # Keep a running list of node instance from each rule added to this
+ # isolation node.
+ self.instances = sorted(
+ list(set().union(self.instances, rule.node_inst))
+ )
+
+ def addBit(self, position: str, bit: IsolationBit):
+ positions = _get_range(position)
+ assert not any([p in self.bits.keys() for p in positions]), (
+ "Duplicate bit in isolation node: " + position
+ )
+
+ for p in positions:
+ self.bits[p] = bit
+
+ def addCaptureGroup(self, group: CaptureGroup):
+ self.capture_groups.append(group)
+
+
+class RootNode:
+ def __init__(self, name: str, inst: int):
+ self.name = _check_name(name)
+ self.inst = _check_instance(inst)
+
+
+class CaptureRegister:
+ def __init__(self, reg_name: str, reg_inst: dict):
+ self.reg_name = _check_name(reg_name)
+ self.reg_inst = _check_instance(reg_inst)
+
+
+class Base:
+ def __init__(self, version: int, model_ec: list):
+ self.version = _check_version(version)
+ self.model_ec = _check_model_ec(model_ec)
+ self.registers = {}
+ self.isolation_nodes = {}
+ self.root_nodes = {}
+ self.capture_groups = {}
+
+ def addRegister(self, reg_name: str, register: Register):
+ assert _check_name(reg_name) not in self.registers, (
+ "Duplicate register in base: " + reg_name
+ )
+
+ self.registers[reg_name] = register
+
+ def addIsolationNode(self, node_name: str, iso_node: IsolationNode):
+ assert _check_name(node_name) not in self.isolation_nodes, (
+ "Duplicate isolation node in base: " + node_name
+ )
+
+ self.isolation_nodes[node_name] = iso_node
+
+ def addRootNode(self, attn_type: str, root_node: RootNode):
+ assert _check_attn_type(attn_type) not in self.root_nodes, (
+ "Duplicate root node in base: " + attn_type
+ )
+
+ self.root_nodes[attn_type] = root_node
+
+ def addCaptureRegister(self, group_name: str, cap_reg: CaptureRegister):
+ if _check_name(group_name) not in self.capture_groups:
+ self.capture_groups[group_name] = []
+
+ self.capture_groups[group_name].append(cap_reg)
+
+ def split(self) -> list:
+ """
+ Returns a list of new Base objects for each model/EC defined by this
+ Base object. The data is copied. So, this object will not be modified.
+ """
+
+ new_bases = list()
+
+ for model_ec in self.model_ec:
+ new_base = Base(self.version, [model_ec])
+
+ # autopep8: off
+ new_base.registers = copy.deepcopy(self.registers)
+ new_base.isolation_nodes = copy.deepcopy(self.isolation_nodes)
+ new_base.root_nodes = copy.deepcopy(self.root_nodes)
+ new_base.capture_groups = copy.deepcopy(self.capture_groups)
+ # autopep8: on
+
+ new_bases.append(new_base)
+
+ return new_bases
+
+ def merge(self, that_base: object) -> None:
+ """
+ Takes data from the given Base object and merges it into this object.
+ """
+
+ assert set(self.model_ec) == set(
+ that_base.model_ec
+ ), "Cannot merge Base objects with different model_ec values"
+
+ assert (
+ self.version == that_base.version
+ ), "Cannot merge Base objects with different versions"
+
+ for reg_name, register in that_base.registers.items():
+ self.addRegister(reg_name, register)
+
+ for node_name, iso_node in that_base.isolation_nodes.items():
+ self.addIsolationNode(node_name, iso_node)
+
+ for attn_type, root_node in that_base.root_nodes.items():
+ self.addRootNode(attn_type, root_node)
+
+ for group_name, cap_regs in that_base.capture_groups.items():
+ for cap_reg in cap_regs:
+ self.addCaptureRegister(group_name, cap_reg)
diff --git a/chip_data/pyprd/chip_data/json.py b/chip_data/pyprd/chip_data/json.py
new file mode 100644
index 0000000..67a3e06
--- /dev/null
+++ b/chip_data/pyprd/chip_data/json.py
@@ -0,0 +1,344 @@
+import json
+
+from pyprd.chip_data import chip_data as cd
+
+# Encoder support --------------------------------------------------------------
+# This extends the json.JSONEncoder class so that we can use a lot of the
+# built-in json support.
+
+
+def _get_addr_str(reg_type: str, address: int) -> str:
+ fmt = {
+ "SCOM": "0x{:08X}", # 4-byte address
+ "IDSCOM": "0x{:016X}", # 8-byte address
+ }
+
+ return fmt[reg_type].format(address) # throws exception if key not found
+
+
+def _get_value_str(reg_type: str, value: int) -> str:
+ fmt = {
+ "SCOM": "0x{:016X}", # 8-byte value
+ "IDSCOM": "0x{:016X}", # 8-byte value
+ }
+
+ return fmt[reg_type].format(value)
+
+
+class _ChipDataEncoder(json.JSONEncoder):
+ def default(self, o: object) -> dict:
+ if isinstance(o, cd.Base):
+ j = {
+ "version": o.version,
+ "model_ec": o.model_ec,
+ }
+
+ if o.registers:
+ j["registers"] = o.registers
+
+ if o.isolation_nodes:
+ j["isolation_nodes"] = o.isolation_nodes
+
+ if o.root_nodes:
+ j["root_nodes"] = o.root_nodes
+
+ if o.capture_groups:
+ j["capture_groups"] = o.capture_groups
+
+ return j
+
+ if isinstance(o, cd.Register):
+ j = {}
+
+ if "SCOM" != o.reg_type: # Don't add default to save space.
+ j["reg_type"] = o.reg_type
+
+ if "RW" != o.access: # Don't add default to save space.
+ j["access"] = o.access
+
+ j["instances"] = dict(
+ zip(
+ o.instances.keys(),
+ map(
+ lambda v: _get_addr_str(o.reg_type, v),
+ o.instances.values(),
+ ),
+ )
+ )
+
+ return j
+
+ if isinstance(o, cd.IsolationNode):
+ j = {}
+
+ if "SCOM" != o.reg_type: # Don't add default to save space.
+ j["reg_type"] = o.reg_type
+
+ j["instances"] = o.instances
+ j["rules"] = o.rules
+ j["bits"] = o.bits
+
+ if o.capture_groups:
+ j["capture_groups"] = o.capture_groups
+
+ return j
+
+ if isinstance(o, cd.IsolationRule):
+ return {
+ "attn_type": o.attn_type,
+ "node_inst": o.node_inst,
+ "expr": o.expr,
+ }
+
+ if isinstance(o, cd.ExprReg):
+ j = {
+ "expr_type": o.expr_type,
+ "reg_name": o.reg_name,
+ }
+
+ if o.reg_inst:
+ j["reg_inst"]: o.reg_inst
+
+ return j
+
+ if isinstance(o, cd.ExprInt):
+ return {
+ "expr_type": o.expr_type,
+ "int_value": _get_value_str(o.reg_type, o.int_value),
+ }
+
+ if isinstance(o, cd.ExprAnd):
+ return {
+ "expr_type": o.expr_type,
+ "exprs": o.exprs,
+ }
+
+ if isinstance(o, cd.ExprOr):
+ return {
+ "expr_type": o.expr_type,
+ "exprs": o.exprs,
+ }
+
+ if isinstance(o, cd.ExprNot):
+ return {
+ "expr_type": o.expr_type,
+ "expr": o.expr,
+ }
+
+ if isinstance(o, cd.ExprLeftShift):
+ return {
+ "expr_type": o.expr_type,
+ "expr": o.expr,
+ "shift_value": o.shift_value,
+ }
+
+ if isinstance(o, cd.ExprRightShift):
+ return {
+ "expr_type": o.expr_type,
+ "expr": o.expr,
+ "shift_value": o.shift_value,
+ }
+
+ if isinstance(o, cd.IsolationBit):
+ j = {"desc": o.desc}
+
+ if o.child_node:
+ j["child_node"] = {"name": o.child_node["name"]}
+
+ if o.child_node["inst"]:
+ j["child_node"]["inst"] = o.child_node["inst"]
+
+ if o.capture_groups:
+ j["capture_groups"] = o.capture_groups
+
+ return j
+
+ if isinstance(o, cd.RootNode):
+ return {
+ "name": o.name,
+ "inst": o.inst,
+ }
+
+ if isinstance(o, cd.CaptureGroup):
+ return {
+ "group_name": o.group_name,
+ "group_inst": o.group_inst,
+ }
+
+ if isinstance(o, cd.CaptureRegister):
+ return {
+ "reg_name": o.reg_name,
+ "reg_inst": o.reg_inst,
+ }
+
+ # Call the default method for other types
+ return json.JSONEncoder.default(self, o)
+
+
+# Decoder support --------------------------------------------------------------
+# Unfortunately, we cannot extent the json.JSONDecoder like we did with the
+# encoder because when using a custom encoder it is applied to all JSON objects
+# in the string/file. Since the Chip Data JSON has a complex design with many
+# different JSON object formats, we can't use this approach without adding a
+# property to each object indicating what type of object it is. That would end
+# up bloating the JSON and is not desired. Instead, we'll create a series of
+# decoder functions. Then the decoder helper at the bottom of the file will use
+# the default decoder and apply these decoder functions on the returned
+# dictionary. This is not as efficient, but will be the approach for now.
+
+
+def _decodeInstanceMap(d: dict) -> dict:
+ # Need to convert the keys to integers.
+ return dict(zip(map(lambda k: int(k), d.keys()), d.values()))
+
+
+def _decodeRegister(d: dict) -> cd.Register:
+ reg_type = d["reg_type"] if "reg_type" in d else "SCOM"
+ access = d["access"] if "access" in d else "RW"
+
+ register = cd.Register(reg_type, access)
+
+ for k, v in d["instances"].items():
+ register.addInstance(int(k), int(v, 16))
+
+ return register
+
+
+def _decodeRuleExpression(reg_type: str, d: dict) -> object:
+ expr_type = d["expr_type"]
+
+ if "reg" == expr_type:
+ reg_inst = _decodeInstanceMap(d["reg_inst"]) if "reg_inst" in d else {}
+ return cd.ExprReg(d["reg_name"], reg_inst)
+
+ if "int" == expr_type:
+ return cd.ExprInt(reg_type, int(d["int_value"], 16))
+
+ if "and" == expr_type:
+ return cd.ExprAnd(
+ list(map(lambda e: _decodeRuleExpression(reg_type, e), d["exprs"]))
+ )
+
+ if "or" == expr_type:
+ return cd.ExprOr(
+ list(map(lambda e: _decodeRuleExpression(reg_type, e), d["exprs"]))
+ )
+
+ if "not" == expr_type:
+ return cd.ExprNot(_decodeRuleExpression(reg_type, d["expr"]))
+
+ if "lshift" == expr_type:
+ return cd.ExprLeftShift(
+ _decodeRuleExpression(reg_type, d["expr"]), d["shift_value"]
+ )
+
+ if "rshift" == expr_type:
+ return cd.ExprRightShift(
+ _decodeRuleExpression(reg_type, d["expr"]), d["shift_value"]
+ )
+
+
+def _decodeIsolationRule(reg_type: str, d: dict) -> cd.IsolationRule:
+ return cd.IsolationRule(
+ d["attn_type"],
+ d["node_inst"],
+ _decodeRuleExpression(reg_type, d["expr"]),
+ )
+
+
+def _decodeCaptureGroup(d: dict) -> cd.CaptureGroup:
+ return cd.CaptureGroup(
+ d["group_name"], _decodeInstanceMap(d["group_inst"])
+ )
+
+
+def _decodeIsolationBit(d: dict) -> cd.IsolationBit:
+ bit = cd.IsolationBit(d["desc"])
+
+ if "child_node" in d:
+ if "inst" in d["child_node"]:
+ inst = _decodeInstanceMap(d["child_node"]["inst"])
+ else:
+ inst = {}
+
+ bit.addChildNode(d["child_node"]["name"], inst)
+
+ if "capture_groups" in d:
+ for e in d["capture_groups"]:
+ bit.addCaptureGroup(_decodeCaptureGroup(e))
+
+ return bit
+
+
+def _decodeIsolationNode(d: dict) -> cd.IsolationNode:
+ reg_type = d["reg_type"] if "reg_type" in d else "SCOM"
+
+ node = cd.IsolationNode(reg_type)
+
+ # Don't need to parse `instances` because that will be recreated in the
+ # `addRule()` function.
+
+ for e in d["rules"]:
+ node.addRule(_decodeIsolationRule(reg_type, e))
+
+ for k, v in d["bits"].items():
+ node.addBit(k, _decodeIsolationBit(v))
+
+ if "capture_groups" in d:
+ for e in d["capture_groups"]:
+ node.addCaptureGroup(_decodeCaptureGroup(e))
+
+ return node
+
+
+def _decodeRootNode(d: dict) -> cd.RootNode:
+ return cd.RootNode(d["name"], d["inst"])
+
+
+def _decodeCaptureRegister(d: dict) -> cd.CaptureRegister:
+ return cd.CaptureRegister(d["reg_name"], _decodeInstanceMap(d["reg_inst"]))
+
+
+def _decodeBase(d: dict) -> cd.Base:
+ base = cd.Base(d["version"], d["model_ec"])
+
+ if "registers" in d:
+ for k, v in d["registers"].items():
+ base.addRegister(k, _decodeRegister(v))
+
+ if "isolation_nodes" in d:
+ for k, v in d["isolation_nodes"].items():
+ base.addIsolationNode(k, _decodeIsolationNode(v))
+
+ if "root_nodes" in d:
+ for k, v in d["root_nodes"].items():
+ base.addRootNode(k, _decodeRootNode(v))
+
+ if "capture_groups" in d:
+ for k, v in d["capture_groups"].items():
+ for e in v:
+ base.addCaptureRegister(k, _decodeCaptureRegister(e))
+
+ return base
+
+
+# ------------------------------------------------------------------------------
+
+
+def json_encode(obj: object, fp: object, indent=None):
+ json.dump(obj, fp, cls=_ChipDataEncoder, indent=indent)
+ fp.write("\n")
+
+
+def json_encodes(obj: object, indent=None) -> str:
+ return json.dump(obj, cls=_ChipDataEncoder, indent=indent)
+
+
+def json_decode(fp: object) -> object:
+ # See note above for why we didn't extend the json.JSONDecoder class
+ return _decodeBase(json.load(fp))
+
+
+def json_decodes(s: str) -> object:
+ # See note above for why we didn't extend the json.JSONDecoder class
+ return _decodeBase(json.loads(s))
diff --git a/chip_data/pyprd/chip_data/peltool.py b/chip_data/pyprd/chip_data/peltool.py
new file mode 100644
index 0000000..89724d7
--- /dev/null
+++ b/chip_data/pyprd/chip_data/peltool.py
@@ -0,0 +1,77 @@
+import json
+from collections import namedtuple
+
+from pyprd.chip_data import chip_data as cd
+from pyprd.util.hash import hash_string_format
+from pyprd.util.model_ec import supported as supported_model_ec
+
+AttnType = namedtuple("AttnType", "id desc")
+
+_attn_types = {
+ "CS": AttnType(1, "checkstop"),
+ "UCS": AttnType(2, "unit checkstop"),
+ "RE": AttnType(3, "recoverable"),
+ "SPA": AttnType(4, "special attention"),
+}
+
+
+def _format_addr(reg_type: str, reg_addr: int) -> str:
+ m = {"SCOM": 4, "IDSCOM": 8}
+ return "0x{:0{}x}".format(reg_addr, m[reg_type] * 2)
+
+
+# -----------------------------------------------------------------------------
+# Public functions
+
+
+def peltool_encode(model_ec: str, base: cd.Base, fp: object):
+ """
+ Pulls the necessary information from the Chip Data object and writes the
+ eBMC PEL parser (peltool) JSON data to the given file pointer. Note that
+ the Chip Data Base object could describe more than one model/EC, but the
+ JSON data only targets one model/EC. Therefore, the user must specify which
+ model/EC to target.
+ """
+ assert model_ec in supported_model_ec
+ assert model_ec in base.model_ec
+
+ data = {
+ "model_ec": {
+ "desc": supported_model_ec[model_ec].desc,
+ "id": "{:08x}".format(supported_model_ec[model_ec].id),
+ "type": supported_model_ec[model_ec].type,
+ },
+ "attn_types": {},
+ "registers": {},
+ "signatures": {},
+ }
+
+ for attn_type in base.root_nodes.keys():
+ key, desc = _attn_types[attn_type]
+
+ assert key not in data["attn_types"]
+
+ data["attn_types"][key] = desc
+
+ for reg_name, register in base.registers.items():
+ key = hash_string_format(3, reg_name)
+
+ assert key not in data["registers"]
+
+ reg = data["registers"][key] = [reg_name, {}]
+
+ for reg_inst, reg_addr in register.instances.items():
+ reg[1][reg_inst] = _format_addr(register.reg_type, reg_addr)
+
+ for node_name, iso_node in base.isolation_nodes.items():
+ key = hash_string_format(2, node_name)
+
+ assert key not in data["signatures"]
+
+ node = data["signatures"][key] = [node_name, {}]
+
+ for pos, bit in iso_node.bits.items():
+ node[1][pos] = bit.desc
+
+ json.dump(data, fp, indent=4, sort_keys=True)
+ fp.write("\n")
diff --git a/chip_data/pyprd/util/__init__.py b/chip_data/pyprd/util/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/chip_data/pyprd/util/__init__.py
diff --git a/chip_data/pyprd/util/hash.py b/chip_data/pyprd/util/hash.py
new file mode 100644
index 0000000..a807d0b
--- /dev/null
+++ b/chip_data/pyprd/util/hash.py
@@ -0,0 +1,42 @@
+def hash_string(num_bytes: int, string: str) -> int:
+ """
+ Converts a string into an integer hash value. This is primarily used to
+ convert register and isolation node names from the Chip and RAS Data into
+ integer values to save space in the data.
+ """
+
+ # This hash is a simple "n*s[0] + (n-1)*s[1] + ... + s[n-1]" algorithm,
+ # where s[i] is a chunk from the input string the length of i_bytes.
+
+ # Currently only supporting 1:8 byte hashes
+ assert 1 <= num_bytes and num_bytes <= 8
+
+ # Start hashing each chunk
+ sumA = 0
+ sumB = 0
+
+ # Iterate one chunk at a time
+ for i in range(0, len(string), num_bytes):
+ # Combine each chunk into a single integer value. If we reach the end
+ # of the string, pad with null characters.
+ chunk = 0
+ for j in range(0, num_bytes):
+ chunk <<= 8
+ chunk |= ord(string[i + j]) if (i + j < len(string)) else ord("\0")
+
+ # Apply the simple hash
+ sumA += chunk
+ sumB += sumA
+
+ # Mask off everything except the target number of bytes.
+ mask = 0xFFFFFFFFFFFFFFFF
+ sumB &= mask >> ((8 - num_bytes) * 8)
+
+ return sumB
+
+
+def hash_string_format(num_bytes: int, string: str) -> str:
+ """
+ Returns a formatted hex string of the given string's hash value.
+ """
+ return "{0:0{1}x}".format(hash_string(num_bytes, string), num_bytes * 2)
diff --git a/chip_data/pyprd/util/model_ec.py b/chip_data/pyprd/util/model_ec.py
new file mode 100644
index 0000000..20b3954
--- /dev/null
+++ b/chip_data/pyprd/util/model_ec.py
@@ -0,0 +1,11 @@
+from collections import namedtuple
+
+ModelEc = namedtuple("ModelEc", "id type desc")
+
+supported = {
+ "EXPLORER_11": ModelEc(0x60D20011, "ocmb", "Explorer DD1.1"),
+ "EXPLORER_20": ModelEc(0x60D20020, "ocmb", "Explorer DD2.0"),
+ "ODYSSEY_10": ModelEc(0x60C00010, "ocmb", "Odyssey DD1.0"),
+ "P10_10": ModelEc(0x20DA0010, "proc", "P10 1.0"),
+ "P10_20": ModelEc(0x20DA0020, "proc", "P10 2.0"),
+}