Build chip data from new JSON format
The chip data XML format has been deprecated and the new JSON format
will be used going forward.
Signed-off-by: Zane Shelley <zshelle@us.ibm.com>
Change-Id: I2f44ae7bb921c9ab38c2664ab5cc2a9d6e2fd66a
diff --git a/chip_data/.gitignore b/chip_data/.gitignore
new file mode 100644
index 0000000..796b96d
--- /dev/null
+++ b/chip_data/.gitignore
@@ -0,0 +1 @@
+/build
diff --git a/chip_data/json_list.sh b/chip_data/json_list.sh
new file mode 100755
index 0000000..e59b127
--- /dev/null
+++ b/chip_data/json_list.sh
@@ -0,0 +1,14 @@
+#!/bin/sh
+
+# Verify input.
+if [ ! -d "$1" ]; then
+ echo "Invalid directory: $1" 1>&2
+ exit 1
+fi
+
+# Simply list out all of the JSON files in the given directory.
+for i in "$1"/*.json; do
+ if [ -f "$i" ]; then
+ echo "$i"
+ fi
+done
diff --git a/chip_data/meson.build b/chip_data/meson.build
new file mode 100644
index 0000000..1741447
--- /dev/null
+++ b/chip_data/meson.build
@@ -0,0 +1,51 @@
+#-------------------------------------------------------------------------------
+# Chip Data Files
+#-------------------------------------------------------------------------------
+
+build_cdb = find_program('parse_chip_data.py')
+
+# The key for each entry in this dictionary is a subdirectory containing XML for
+# a chip model. The value for each entry contains the expected output files that
+# will be produced for each chip model. It is important to note that the script
+# will generate all output files, regardless of what is listed, when the script
+# is run. However, this list must be kept in sync with the expected output so
+# that meson will know to run the script when an output file has changed or is
+# missing.
+cdb_files = {}
+
+chip_config = get_option('chip_config')
+
+if 'p10' in chip_config
+ cdb_files += {'p10_10' : ['chip_data_p10_10.cdb']}
+ cdb_files += {'p10_20' : ['chip_data_p10_20.cdb']}
+endif
+
+if 'explorer' in chip_config
+ cdb_files += {'explorer' : ['chip_data_explorer_11.cdb',
+ 'chip_data_explorer_20.cdb']}
+endif
+
+if 'odyssey' in chip_config
+ cdb_files += {'odyssey' : ['chip_data_odyssey_10.cdb']}
+endif
+
+foreach chip_dir, out_files : cdb_files
+
+ source_dir = meson.current_source_dir() + '/' + chip_dir
+ build_dir = meson.current_build_dir()
+
+ # Get all JSON files in the chip directory. This is a bit of a workaround
+ # because meson does not allow wildcards.
+ json_list = run_command('json_list.sh', source_dir)
+ in_files = json_list.stdout().strip().split('\n')
+
+ custom_target('build_cdb_' + chip_dir, build_by_default : true,
+ input : in_files, output : out_files,
+ command : [ build_cdb, 'bin', source_dir, build_dir ],
+ install : true,
+ install_dir : join_paths(get_option('prefix'),
+ get_option('datadir'),
+ meson.project_name()))
+
+endforeach
+
diff --git a/chip_data/pyprd/chip_data/peltool.py b/chip_data/pyprd/chip_data/peltool.py
index 89724d7..814d17c 100644
--- a/chip_data/pyprd/chip_data/peltool.py
+++ b/chip_data/pyprd/chip_data/peltool.py
@@ -8,10 +8,11 @@
AttnType = namedtuple("AttnType", "id desc")
_attn_types = {
- "CS": AttnType(1, "checkstop"),
+ "CS": AttnType(1, "chip checkstop"),
"UCS": AttnType(2, "unit checkstop"),
"RE": AttnType(3, "recoverable"),
"SPA": AttnType(4, "special attention"),
+ "HA": AttnType(5, "host attention"),
}
diff --git a/chip_data/pyprd/util/model_ec.py b/chip_data/pyprd/util/model_ec.py
index 20b3954..e7ee5f7 100644
--- a/chip_data/pyprd/util/model_ec.py
+++ b/chip_data/pyprd/util/model_ec.py
@@ -3,9 +3,9 @@
ModelEc = namedtuple("ModelEc", "id type desc")
supported = {
- "EXPLORER_11": ModelEc(0x60D20011, "ocmb", "Explorer DD1.1"),
- "EXPLORER_20": ModelEc(0x60D20020, "ocmb", "Explorer DD2.0"),
- "ODYSSEY_10": ModelEc(0x60C00010, "ocmb", "Odyssey DD1.0"),
+ "EXPLORER_11": ModelEc(0x60D20011, "ocmb", "Explorer 1.1"),
+ "EXPLORER_20": ModelEc(0x60D20020, "ocmb", "Explorer 2.0"),
+ "ODYSSEY_10": ModelEc(0x60C00010, "ocmb", "Odyssey 1.0"),
"P10_10": ModelEc(0x20DA0010, "proc", "P10 1.0"),
"P10_20": ModelEc(0x20DA0020, "proc", "P10 2.0"),
}
diff --git a/chip_data/setup.py b/chip_data/setup.py
new file mode 100644
index 0000000..408c88a
--- /dev/null
+++ b/chip_data/setup.py
@@ -0,0 +1,56 @@
+import os
+
+from parse_chip_data import gen_peltool_json
+from setuptools import setup
+from setuptools.command.build_py import build_py
+
+# Typically in files like this we'd use find_packages() to traverse directories
+# for any static packages. However, we are trying to add data to a package that
+# will actually exist in another repository. Therefore, we have to explicitly
+# list out the package name, directory, and data information.
+
+# We are building data for the following module:
+package_name = "pel.hwdiags"
+
+# Since we are not using find_packages() we have to provide a package directory,
+# but in this case nothing exists because there are no static package
+# directories. Therefore, we will just use the empty string.
+package_dir = ""
+
+# Split the package data directory into its components.
+data_dir_components = [*package_name.split("."), "data"]
+
+# It is important to note that '/' must be used as the path separator, even on
+# Windows. Setuptools will automatically convert the slashes where appropriate.
+package_data_glob = "/".join(data_dir_components)
+
+
+# This is a custom build class that is used to dynamically build the data files.
+class my_build_py(build_py):
+ def run(self):
+ if not self.dry_run: # honor --dry-run flag
+ # Make sure the build directory for the data exists.
+ # Yes, os.path.join() is necessary in this case, which is different
+ # that what is stated above regarding package_data_glob.
+ data_dir = os.path.join(self.build_lib, *data_dir_components)
+ self.mkpath(data_dir)
+
+ # Generate the PEL parser data JSON from the Chip Data XML.
+ # TODO: The list of data file directories will need to be
+ # configurable via the package config in the bitbake recipes.
+ for chip in ("p10_10", "p10_20", "explorer", "odyssey"):
+ gen_peltool_json(chip, data_dir)
+
+ # Call the superclass run() to ensure everything else builds.
+ super().run()
+
+
+setup(
+ name="openpower-hw-diags-pel-parser-data",
+ version=os.getenv("PELTOOL_VERSION", "1.0"),
+ classifiers=["License :: OSI Approved :: Apache Software License"],
+ cmdclass={"build_py": my_build_py}, # register custom build class
+ packages=[package_name],
+ package_dir={package_name: package_dir},
+ package_data={package_name: [package_data_glob]},
+)