black: re-format

black and isort are enabled in the openbmc-build-scripts on Python files
to have a consistent formatting.  Re-run the formatter on the whole
repository.

Change-Id: I0acc99c570b2a51383f46c62a04367e50f7c42b9
Signed-off-by: Patrick Williams <patrick@stwcx.xyz>
diff --git a/tools/fw-update/pldm_fwup_pkg_creator.py b/tools/fw-update/pldm_fwup_pkg_creator.py
index 7b57cca..ad60bbf 100755
--- a/tools/fw-update/pldm_fwup_pkg_creator.py
+++ b/tools/fw-update/pldm_fwup_pkg_creator.py
@@ -4,30 +4,34 @@
 
 import argparse
 import binascii
-from datetime import datetime
 import json
+import math
 import os
 import struct
 import sys
+from datetime import datetime
 
-import math
 from bitarray import bitarray
 from bitarray.util import ba2int
 
-string_types = dict([
-    ("Unknown", 0),
-    ("ASCII", 1),
-    ("UTF8", 2),
-    ("UTF16", 3),
-    ("UTF16LE", 4),
-    ("UTF16BE", 5)])
+string_types = dict(
+    [
+        ("Unknown", 0),
+        ("ASCII", 1),
+        ("UTF8", 2),
+        ("UTF16", 3),
+        ("UTF16LE", 4),
+        ("UTF16BE", 5),
+    ]
+)
 
 initial_descriptor_type_name_length = {
     0x0000: ["PCI Vendor ID", 2],
     0x0001: ["IANA Enterprise ID", 4],
     0x0002: ["UUID", 16],
     0x0003: ["PnP Vendor ID", 3],
-    0x0004: ["ACPI Vendor ID", 4]}
+    0x0004: ["ACPI Vendor ID", 4],
+}
 
 descriptor_type_name_length = {
     0x0000: ["PCI Vendor ID", 2],
@@ -40,7 +44,8 @@
     0x0102: ["PCI Subsystem ID", 2],
     0x0103: ["PCI Revision ID", 1],
     0x0104: ["PnP Product Identifier", 4],
-    0x0105: ["ACPI Product Identifier", 4]}
+    0x0105: ["ACPI Product Identifier", 4],
+}
 
 
 def check_string_length(string):
@@ -50,20 +55,20 @@
 
 
 def write_pkg_release_date_time(pldm_fw_up_pkg, release_date_time):
-    '''
+    """
     Write the timestamp into the package header. The timestamp is formatted as
     series of 13 bytes defined in DSP0240 specification.
 
         Parameters:
             pldm_fw_up_pkg: PLDM FW update package
             release_date_time: Package Release Date Time
-    '''
+    """
     time = release_date_time.time()
     date = release_date_time.date()
-    us_bytes = time.microsecond.to_bytes(3, byteorder='little')
+    us_bytes = time.microsecond.to_bytes(3, byteorder="little")
     pldm_fw_up_pkg.write(
         struct.pack(
-            '<hBBBBBBBBHB',
+            "<hBBBBBBBBHB",
             0,
             us_bytes[0],
             us_bytes[1],
@@ -74,34 +79,39 @@
             date.day,
             date.month,
             date.year,
-            0))
+            0,
+        )
+    )
 
 
 def write_package_version_string(pldm_fw_up_pkg, metadata):
-    '''
+    """
     Write PackageVersionStringType, PackageVersionStringLength and
     PackageVersionString to the package header.
 
         Parameters:
             pldm_fw_up_pkg: PLDM FW update package
             metadata: metadata about PLDM FW update package
-    '''
+    """
     # Hardcoded string type to ASCII
     string_type = string_types["ASCII"]
-    package_version_string = \
-        metadata["PackageHeaderInformation"]["PackageVersionString"]
+    package_version_string = metadata["PackageHeaderInformation"][
+        "PackageVersionString"
+    ]
     check_string_length(package_version_string)
-    format_string = '<BB' + str(len(package_version_string)) + 's'
+    format_string = "<BB" + str(len(package_version_string)) + "s"
     pldm_fw_up_pkg.write(
         struct.pack(
             format_string,
             string_type,
             len(package_version_string),
-            package_version_string.encode('ascii')))
+            package_version_string.encode("ascii"),
+        )
+    )
 
 
 def write_component_bitmap_bit_length(pldm_fw_up_pkg, metadata):
-    '''
+    """
     ComponentBitmapBitLength in the package header indicates the number of bits
     that will be used represent the bitmap in the ApplicableComponents field
     for a matching device. The value shall be a multiple of 8 and be large
@@ -116,7 +126,7 @@
             ComponentBitmapBitLength: number of bits that will be used
             represent the bitmap in the ApplicableComponents field for a
             matching device
-    '''
+    """
     # The script supports upto 32 components now
     max_components = 32
     bitmap_multiple = 8
@@ -124,14 +134,15 @@
     num_components = len(metadata["ComponentImageInformationArea"])
     if num_components > max_components:
         sys.exit("ERROR: only upto 32 components supported now")
-    component_bitmap_bit_length = bitmap_multiple * \
-        math.ceil(num_components/bitmap_multiple)
-    pldm_fw_up_pkg.write(struct.pack('<H', int(component_bitmap_bit_length)))
+    component_bitmap_bit_length = bitmap_multiple * math.ceil(
+        num_components / bitmap_multiple
+    )
+    pldm_fw_up_pkg.write(struct.pack("<H", int(component_bitmap_bit_length)))
     return component_bitmap_bit_length
 
 
 def write_pkg_header_info(pldm_fw_up_pkg, metadata):
-    '''
+    """
     ComponentBitmapBitLength in the package header indicates the number of bits
     that will be used represent the bitmap in the ApplicableComponents field
     for a matching device. The value shall be a multiple of 8 and be large
@@ -146,37 +157,38 @@
             ComponentBitmapBitLength: number of bits that will be used
             represent the bitmap in the ApplicableComponents field for a
             matching device
-    '''
+    """
     uuid = metadata["PackageHeaderInformation"]["PackageHeaderIdentifier"]
     package_header_identifier = bytearray.fromhex(uuid)
     pldm_fw_up_pkg.write(package_header_identifier)
 
-    package_header_format_revision = \
-        metadata["PackageHeaderInformation"]["PackageHeaderFormatVersion"]
+    package_header_format_revision = metadata["PackageHeaderInformation"][
+        "PackageHeaderFormatVersion"
+    ]
     # Size will be computed and updated subsequently
     package_header_size = 0
     pldm_fw_up_pkg.write(
-        struct.pack(
-            '<BH',
-            package_header_format_revision,
-            package_header_size))
+        struct.pack("<BH", package_header_format_revision, package_header_size)
+    )
 
     try:
         release_date_time = datetime.strptime(
             metadata["PackageHeaderInformation"]["PackageReleaseDateTime"],
-            "%d/%m/%Y %H:%M:%S")
+            "%d/%m/%Y %H:%M:%S",
+        )
         write_pkg_release_date_time(pldm_fw_up_pkg, release_date_time)
     except KeyError:
         write_pkg_release_date_time(pldm_fw_up_pkg, datetime.now())
 
     component_bitmap_bit_length = write_component_bitmap_bit_length(
-        pldm_fw_up_pkg, metadata)
+        pldm_fw_up_pkg, metadata
+    )
     write_package_version_string(pldm_fw_up_pkg, metadata)
     return component_bitmap_bit_length
 
 
 def get_applicable_components(device, components, component_bitmap_bit_length):
-    '''
+    """
     This function figures out the components applicable for the device and sets
     the ApplicableComponents bitfield accordingly.
 
@@ -187,10 +199,11 @@
 
         Returns:
             The ApplicableComponents bitfield
-    '''
+    """
     applicable_components_list = device["ApplicableComponents"]
-    applicable_components = bitarray(component_bitmap_bit_length,
-                                     endian='little')
+    applicable_components = bitarray(
+        component_bitmap_bit_length, endian="little"
+    )
     applicable_components.setall(0)
     for component in components:
         if component["ComponentIdentifier"] in applicable_components_list:
@@ -199,7 +212,7 @@
 
 
 def prepare_record_descriptors(descriptors):
-    '''
+    """
     This function processes the Descriptors and prepares the RecordDescriptors
     section of the the firmware device ID record.
 
@@ -208,7 +221,7 @@
 
         Returns:
             RecordDescriptors, DescriptorCount
-    '''
+    """
     record_descriptors = bytearray()
     vendor_defined_desc_type = 65535
     vendor_desc_title_str_type_len = 1
@@ -216,59 +229,73 @@
     descriptor_count = 0
 
     for descriptor in descriptors:
-
         descriptor_type = descriptor["DescriptorType"]
         if descriptor_count == 0:
-            if initial_descriptor_type_name_length.get(descriptor_type) \
-                    is None:
+            if (
+                initial_descriptor_type_name_length.get(descriptor_type)
+                is None
+            ):
                 sys.exit("ERROR: Initial descriptor type not supported")
         else:
-            if descriptor_type_name_length.get(descriptor_type) is None and \
-                    descriptor_type != vendor_defined_desc_type:
+            if (
+                descriptor_type_name_length.get(descriptor_type) is None
+                and descriptor_type != vendor_defined_desc_type
+            ):
                 sys.exit("ERROR: Descriptor type not supported")
 
         if descriptor_type == vendor_defined_desc_type:
-            vendor_desc_title_str = \
-                descriptor["VendorDefinedDescriptorTitleString"]
+            vendor_desc_title_str = descriptor[
+                "VendorDefinedDescriptorTitleString"
+            ]
             vendor_desc_data = descriptor["VendorDefinedDescriptorData"]
             check_string_length(vendor_desc_title_str)
             vendor_desc_title_str_type = string_types["ASCII"]
-            descriptor_length = vendor_desc_title_str_type_len + \
-                vendor_desc_title_str_len_len + len(vendor_desc_title_str) + \
-                len(bytearray.fromhex(vendor_desc_data))
-            format_string = '<HHBB' + str(len(vendor_desc_title_str)) + 's'
-            record_descriptors.extend(struct.pack(
-                format_string,
-                descriptor_type,
-                descriptor_length,
-                vendor_desc_title_str_type,
-                len(vendor_desc_title_str),
-                vendor_desc_title_str.encode('ascii')))
+            descriptor_length = (
+                vendor_desc_title_str_type_len
+                + vendor_desc_title_str_len_len
+                + len(vendor_desc_title_str)
+                + len(bytearray.fromhex(vendor_desc_data))
+            )
+            format_string = "<HHBB" + str(len(vendor_desc_title_str)) + "s"
+            record_descriptors.extend(
+                struct.pack(
+                    format_string,
+                    descriptor_type,
+                    descriptor_length,
+                    vendor_desc_title_str_type,
+                    len(vendor_desc_title_str),
+                    vendor_desc_title_str.encode("ascii"),
+                )
+            )
             record_descriptors.extend(bytearray.fromhex(vendor_desc_data))
             descriptor_count += 1
         else:
             descriptor_type = descriptor["DescriptorType"]
             descriptor_data = descriptor["DescriptorData"]
             descriptor_length = len(bytearray.fromhex(descriptor_data))
-            if descriptor_length != \
-                    descriptor_type_name_length.get(descriptor_type)[1]:
-                err_string = "ERROR: Descriptor type - " + \
-                    descriptor_type_name_length.get(descriptor_type)[0] + \
-                    " length is incorrect"
+            if (
+                descriptor_length
+                != descriptor_type_name_length.get(descriptor_type)[1]
+            ):
+                err_string = (
+                    "ERROR: Descriptor type - "
+                    + descriptor_type_name_length.get(descriptor_type)[0]
+                    + " length is incorrect"
+                )
                 sys.exit(err_string)
-            format_string = '<HH'
-            record_descriptors.extend(struct.pack(
-                format_string,
-                descriptor_type,
-                descriptor_length))
+            format_string = "<HH"
+            record_descriptors.extend(
+                struct.pack(format_string, descriptor_type, descriptor_length)
+            )
             record_descriptors.extend(bytearray.fromhex(descriptor_data))
             descriptor_count += 1
     return record_descriptors, descriptor_count
 
 
-def write_fw_device_identification_area(pldm_fw_up_pkg, metadata,
-                                        component_bitmap_bit_length):
-    '''
+def write_fw_device_identification_area(
+    pldm_fw_up_pkg, metadata, component_bitmap_bit_length
+):
+    """
     Write firmware device ID records into the PLDM package header
 
     This function writes the DeviceIDRecordCount and the
@@ -280,18 +307,19 @@
             pldm_fw_up_pkg: PLDM FW update package
             metadata: metadata about PLDM FW update package
             component_bitmap_bit_length: length of the ComponentBitmapBitLength
-    '''
+    """
     # The spec limits the number of firmware device ID records to 255
     max_device_id_record_count = 255
     devices = metadata["FirmwareDeviceIdentificationArea"]
     device_id_record_count = len(devices)
     if device_id_record_count > max_device_id_record_count:
         sys.exit(
-            "ERROR: there can be only upto 255 entries in the \
-                FirmwareDeviceIdentificationArea section")
+            "ERROR: there can be only upto 255 entries in the                "
+            " FirmwareDeviceIdentificationArea section"
+        )
 
     # DeviceIDRecordCount
-    pldm_fw_up_pkg.write(struct.pack('<B', device_id_record_count))
+    pldm_fw_up_pkg.write(struct.pack("<B", device_id_record_count))
 
     for device in devices:
         # RecordLength size
@@ -301,7 +329,7 @@
         record_length += 1
 
         # DeviceUpdateOptionFlags
-        device_update_option_flags = bitarray(32, endian='little')
+        device_update_option_flags = bitarray(32, endian="little")
         device_update_option_flags.setall(0)
         # Continue component updates after failure
         supported_device_update_option_flags = [0]
@@ -316,8 +344,9 @@
         record_length += 1
 
         # ComponentImageSetVersionStringLength
-        component_image_set_version_string = \
-            device["ComponentImageSetVersionString"]
+        component_image_set_version_string = device[
+            "ComponentImageSetVersionString"
+        ]
         check_string_length(component_image_set_version_string)
         record_length += len(component_image_set_version_string)
         record_length += 1
@@ -329,23 +358,28 @@
 
         # ApplicableComponents
         components = metadata["ComponentImageInformationArea"]
-        applicable_components = \
-            get_applicable_components(device,
-                                      components,
-                                      component_bitmap_bit_length)
-        applicable_components_bitfield_length = \
-            round(len(applicable_components)/8)
+        applicable_components = get_applicable_components(
+            device, components, component_bitmap_bit_length
+        )
+        applicable_components_bitfield_length = round(
+            len(applicable_components) / 8
+        )
         record_length += applicable_components_bitfield_length
 
         # RecordDescriptors
         descriptors = device["Descriptors"]
-        record_descriptors, descriptor_count = \
-            prepare_record_descriptors(descriptors)
+        record_descriptors, descriptor_count = prepare_record_descriptors(
+            descriptors
+        )
         record_length += len(record_descriptors)
 
-        format_string = '<HBIBBH' + \
-            str(applicable_components_bitfield_length) + 's' + \
-            str(len(component_image_set_version_string)) + 's'
+        format_string = (
+            "<HBIBBH"
+            + str(applicable_components_bitfield_length)
+            + "s"
+            + str(len(component_image_set_version_string))
+            + "s"
+        )
         pldm_fw_up_pkg.write(
             struct.pack(
                 format_string,
@@ -356,12 +390,14 @@
                 len(component_image_set_version_string),
                 fw_device_pkg_data_length,
                 applicable_components.tobytes(),
-                component_image_set_version_string.encode('ascii')))
+                component_image_set_version_string.encode("ascii"),
+            )
+        )
         pldm_fw_up_pkg.write(record_descriptors)
 
 
 def write_component_image_info_area(pldm_fw_up_pkg, metadata, image_files):
-    '''
+    """
     Write component image information area into the PLDM package header
 
     This function writes the ComponentImageCount and the
@@ -374,10 +410,10 @@
         pldm_fw_up_pkg: PLDM FW update package
         metadata: metadata about PLDM FW update package
         image_files: component images
-    '''
+    """
     components = metadata["ComponentImageInformationArea"]
     # ComponentImageCount
-    pldm_fw_up_pkg.write(struct.pack('<H', len(components)))
+    pldm_fw_up_pkg.write(struct.pack("<H", len(components)))
     component_location_offsets = []
     # ComponentLocationOffset position in individual component image
     # information
@@ -386,44 +422,47 @@
     for component in components:
         # Record the location of the ComponentLocationOffset to be updated
         # after appending images to the firmware update package
-        component_location_offsets.append(pldm_fw_up_pkg.tell() +
-                                          component_location_offset_pos)
+        component_location_offsets.append(
+            pldm_fw_up_pkg.tell() + component_location_offset_pos
+        )
 
         # ComponentClassification
         component_classification = component["ComponentClassification"]
         if component_classification < 0 or component_classification > 0xFFFF:
             sys.exit(
-                "ERROR: ComponentClassification should be [0x0000 - 0xFFFF]")
+                "ERROR: ComponentClassification should be [0x0000 - 0xFFFF]"
+            )
 
         # ComponentIdentifier
         component_identifier = component["ComponentIdentifier"]
         if component_identifier < 0 or component_identifier > 0xFFFF:
-            sys.exit(
-                "ERROR: ComponentIdentifier should be [0x0000 - 0xFFFF]")
+            sys.exit("ERROR: ComponentIdentifier should be [0x0000 - 0xFFFF]")
 
         # ComponentComparisonStamp not supported
         component_comparison_stamp = 0xFFFFFFFF
 
         # ComponentOptions
-        component_options = bitarray(16, endian='little')
+        component_options = bitarray(16, endian="little")
         component_options.setall(0)
         supported_component_options = [0, 1, 2]
         for option in component["ComponentOptions"]:
             if option not in supported_component_options:
                 sys.exit(
-                    "ERROR: unsupported ComponentOption in\
-                    ComponentImageInformationArea section")
+                    "ERROR: unsupported ComponentOption in                   "
+                    " ComponentImageInformationArea section"
+                )
             component_options[option] = 1
 
         # RequestedComponentActivationMethod
-        requested_component_activation_method = bitarray(16, endian='little')
+        requested_component_activation_method = bitarray(16, endian="little")
         requested_component_activation_method.setall(0)
         supported_requested_component_activation_method = [0, 1, 2, 3, 4, 5]
         for option in component["RequestedComponentActivationMethod"]:
             if option not in supported_requested_component_activation_method:
                 sys.exit(
-                    "ERROR: unsupported RequestedComponent\
-                        ActivationMethod entry")
+                    "ERROR: unsupported RequestedComponent                    "
+                    "    ActivationMethod entry"
+                )
             requested_component_activation_method[option] = 1
 
         # ComponentLocationOffset
@@ -437,7 +476,7 @@
         component_version_string = component["ComponentVersionString"]
         check_string_length(component_version_string)
 
-        format_string = '<HHIHHIIBB' + str(len(component_version_string)) + 's'
+        format_string = "<HHIHHIIBB" + str(len(component_version_string)) + "s"
         pldm_fw_up_pkg.write(
             struct.pack(
                 format_string,
@@ -450,7 +489,9 @@
                 component_size,
                 component_version_string_type,
                 len(component_version_string),
-                component_version_string.encode('ascii')))
+                component_version_string.encode("ascii"),
+            )
+        )
 
     index = 0
     pkg_header_checksum_size = 4
@@ -459,55 +500,53 @@
     for offset in component_location_offsets:
         file_size = os.stat(image_files[index]).st_size
         pldm_fw_up_pkg.seek(offset)
-        pldm_fw_up_pkg.write(
-            struct.pack(
-                '<II', start_offset, file_size))
+        pldm_fw_up_pkg.write(struct.pack("<II", start_offset, file_size))
         start_offset += file_size
         index += 1
     pldm_fw_up_pkg.seek(0, os.SEEK_END)
 
 
 def write_pkg_header_checksum(pldm_fw_up_pkg):
-    '''
+    """
     Write PackageHeaderChecksum into the PLDM package header.
 
         Parameters:
             pldm_fw_up_pkg: PLDM FW update package
-    '''
+    """
     pldm_fw_up_pkg.seek(0)
     package_header_checksum = binascii.crc32(pldm_fw_up_pkg.read())
     pldm_fw_up_pkg.seek(0, os.SEEK_END)
-    pldm_fw_up_pkg.write(struct.pack('<I', package_header_checksum))
+    pldm_fw_up_pkg.write(struct.pack("<I", package_header_checksum))
 
 
 def update_pkg_header_size(pldm_fw_up_pkg):
-    '''
+    """
     Update PackageHeader in the PLDM package header. The package header size
     which is the count of all bytes in the PLDM package header structure is
     calculated once the package header contents is complete.
 
         Parameters:
             pldm_fw_up_pkg: PLDM FW update package
-    '''
+    """
     pkg_header_checksum_size = 4
     file_size = pldm_fw_up_pkg.tell() + pkg_header_checksum_size
     pkg_header_size_offset = 17
     # Seek past PackageHeaderIdentifier and PackageHeaderFormatRevision
     pldm_fw_up_pkg.seek(pkg_header_size_offset)
-    pldm_fw_up_pkg.write(struct.pack('<H', file_size))
+    pldm_fw_up_pkg.write(struct.pack("<H", file_size))
     pldm_fw_up_pkg.seek(0, os.SEEK_END)
 
 
 def append_component_images(pldm_fw_up_pkg, image_files):
-    '''
+    """
     Append the component images to the firmware update package.
 
         Parameters:
             pldm_fw_up_pkg: PLDM FW update package
             image_files: component images
-    '''
+    """
     for image in image_files:
-        with open(image, 'rb') as file:
+        with open(image, "rb") as file:
             for line in file:
                 pldm_fw_up_pkg.write(line)
 
@@ -515,13 +554,18 @@
 def main():
     """Create PLDM FW update (DSP0267) package based on a JSON metadata file"""
     parser = argparse.ArgumentParser()
-    parser.add_argument("pldmfwuppkgname",
-                        help="Name of the PLDM FW update package")
+    parser.add_argument(
+        "pldmfwuppkgname", help="Name of the PLDM FW update package"
+    )
     parser.add_argument("metadatafile", help="Path of metadata JSON file")
     parser.add_argument(
-        "images", nargs='+',
-        help="One or more firmware image paths, in the same order as\
-            ComponentImageInformationArea entries")
+        "images",
+        nargs="+",
+        help=(
+            "One or more firmware image paths, in the same order as           "
+            " ComponentImageInformationArea entries"
+        ),
+    )
 
     args = parser.parse_args()
     image_files = args.images
@@ -533,18 +577,22 @@
 
     # Validate the number of component images
     if len(image_files) != len(metadata["ComponentImageInformationArea"]):
-        sys.exit("ERROR: number of images passed != number of entries \
-            in ComponentImageInformationArea")
+        sys.exit(
+            "ERROR: number of images passed != number of entries            "
+            " in ComponentImageInformationArea"
+        )
 
     try:
-        with open(args.pldmfwuppkgname, 'w+b') as pldm_fw_up_pkg:
-            component_bitmap_bit_length = write_pkg_header_info(pldm_fw_up_pkg,
-                                                                metadata)
-            write_fw_device_identification_area(pldm_fw_up_pkg,
-                                                metadata,
-                                                component_bitmap_bit_length)
-            write_component_image_info_area(pldm_fw_up_pkg, metadata,
-                                            image_files)
+        with open(args.pldmfwuppkgname, "w+b") as pldm_fw_up_pkg:
+            component_bitmap_bit_length = write_pkg_header_info(
+                pldm_fw_up_pkg, metadata
+            )
+            write_fw_device_identification_area(
+                pldm_fw_up_pkg, metadata, component_bitmap_bit_length
+            )
+            write_component_image_info_area(
+                pldm_fw_up_pkg, metadata, image_files
+            )
             update_pkg_header_size(pldm_fw_up_pkg)
             write_pkg_header_checksum(pldm_fw_up_pkg)
             append_component_images(pldm_fw_up_pkg, image_files)
diff --git a/tools/visualize-pdr/pldm_visualise_pdrs.py b/tools/visualize-pdr/pldm_visualise_pdrs.py
index 70e7a2b..4c90183 100755
--- a/tools/visualize-pdr/pldm_visualise_pdrs.py
+++ b/tools/visualize-pdr/pldm_visualise_pdrs.py
@@ -3,45 +3,46 @@
 """Tool to visualize PLDM PDR's"""
 
 import argparse
-import json
 import hashlib
-import sys
-from datetime import datetime
-import paramiko
-from graphviz import Digraph
-from tabulate import tabulate
+import json
 import os
 import shlex
 import shutil
 import subprocess
+import sys
+from datetime import datetime
+
+import paramiko
+from graphviz import Digraph
+from tabulate import tabulate
 
 
 class Process:
-    """ Interface definition for interacting with a process created by an
-        Executor."""
+    """Interface definition for interacting with a process created by an
+    Executor."""
 
     def __init__(self, stdout, stderr):
-        """ Construct a Process object.  Process object clients can read the
-            process stdout and stderr with os.read(), and can wait for the
-            process to exit.
+        """Construct a Process object.  Process object clients can read the
+        process stdout and stderr with os.read(), and can wait for the
+        process to exit.
 
-            Parameters:
-                stdout: os.read()able stream representing stdout
-                stderr: os.read()able stream representing stderr
+        Parameters:
+            stdout: os.read()able stream representing stdout
+            stderr: os.read()able stream representing stderr
         """
 
         self.stdout = stdout
         self.stderr = stderr
 
     def wait(self):
-        """ Wait for the process to finish, and return its exit status."""
+        """Wait for the process to finish, and return its exit status."""
 
         raise NotImplementedError
 
 
 class Executor:
-    """ Interface definition for interacting with executors.  An executor is an
-        object that can run a program."""
+    """Interface definition for interacting with executors.  An executor is an
+    object that can run a program."""
 
     def exec_command(self, cmd):
         raise NotImplementedError
@@ -51,8 +52,8 @@
 
 
 class ParamikoProcess(Process):
-    """ Concrete implementation of the Process interface that adapts Paramiko
-        interfaces to the Process interface requirements."""
+    """Concrete implementation of the Process interface that adapts Paramiko
+    interfaces to the Process interface requirements."""
 
     def __init__(self, stdout, stderr):
         super(ParamikoProcess, self).__init__(stdout, stderr)
@@ -62,25 +63,26 @@
 
 
 class ParamikoExecutor(Executor):
-    """ Concrete implementation of the Executor interface that uses
-        Paramiko to connect to a remote BMC to run the program."""
+    """Concrete implementation of the Executor interface that uses
+    Paramiko to connect to a remote BMC to run the program."""
 
     def __init__(self, hostname, uname, passwd, port, **kw):
-        """ This function is responsible for connecting to the BMC via
-            ssh and returning an executor object.
+        """This function is responsible for connecting to the BMC via
+        ssh and returning an executor object.
 
-            Parameters:
-                hostname: hostname/IP address of BMC
-                uname: ssh username of BMC
-                passwd: ssh password of BMC
-                port: ssh port of BMC
+        Parameters:
+            hostname: hostname/IP address of BMC
+            uname: ssh username of BMC
+            passwd: ssh password of BMC
+            port: ssh port of BMC
         """
 
         super(ParamikoExecutor, self).__init__()
         self.client = paramiko.SSHClient()
         self.client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
         self.client.connect(
-            hostname, username=uname, password=passwd, port=port, **kw)
+            hostname, username=uname, password=passwd, port=port, **kw
+        )
 
     def exec_command(self, cmd):
         _, stdout, stderr = self.client.exec_command(cmd)
@@ -108,19 +110,19 @@
         args = shlex.split(cmd)
         args[0] = shutil.which(args[0])
         p = subprocess.Popen(
-            args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+            args, stdout=subprocess.PIPE, stderr=subprocess.PIPE
+        )
         return SubprocessProcess(p)
 
 
 def prepare_summary_report(state_sensor_pdr, state_effecter_pdr):
+    """This function is responsible to parse the state sensor pdr
+    and the state effecter pdr dictionaries and creating the
+    summary table.
 
-    """ This function is responsible to parse the state sensor pdr
-        and the state effecter pdr dictionaries and creating the
-        summary table.
-
-        Parameters:
-            state_sensor_pdr: list of state sensor pdrs
-            state_effecter_pdr: list of state effecter pdrs
+    Parameters:
+        state_sensor_pdr: list of state sensor pdrs
+        state_effecter_pdr: list of state effecter pdrs
 
     """
 
@@ -129,12 +131,17 @@
     summary_table.append(headers)
     for value in state_sensor_pdr.values():
         summary_record = []
-        sensor_possible_states = ''
+        sensor_possible_states = ""
         for sensor_state in value["possibleStates[0]"]:
-            sensor_possible_states += sensor_state+"\n"
-        summary_record.extend([value["sensorID"], value["entityType"],
-                               value["stateSetID[0]"],
-                               sensor_possible_states])
+            sensor_possible_states += sensor_state + "\n"
+        summary_record.extend(
+            [
+                value["sensorID"],
+                value["entityType"],
+                value["stateSetID[0]"],
+                sensor_possible_states,
+            ]
+        )
         summary_table.append(summary_record)
     print("Created at : ", datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
     print(tabulate(summary_table, tablefmt="fancy_grid", headers="firstrow"))
@@ -144,72 +151,107 @@
     summary_table.append(headers)
     for value in state_effecter_pdr.values():
         summary_record = []
-        effecter_possible_states = ''
+        effecter_possible_states = ""
         for state in value["possibleStates[0]"]:
-            effecter_possible_states += state+"\n"
-        summary_record.extend([value["effecterID"], value["entityType"],
-                               value["stateSetID[0]"],
-                               effecter_possible_states])
+            effecter_possible_states += state + "\n"
+        summary_record.extend(
+            [
+                value["effecterID"],
+                value["entityType"],
+                value["stateSetID[0]"],
+                effecter_possible_states,
+            ]
+        )
         summary_table.append(summary_record)
     print(tabulate(summary_table, tablefmt="fancy_grid", headers="firstrow"))
 
 
 def draw_entity_associations(pdr, counter):
+    """This function is responsible to create a picture that captures
+    the entity association hierarchy based on the entity association
+    PDR's received from the BMC.
 
-    """ This function is responsible to create a picture that captures
-        the entity association hierarchy based on the entity association
-        PDR's received from the BMC.
-
-        Parameters:
-            pdr: list of entity association PDR's
-            counter: variable to capture the count of PDR's to unflatten
-                     the tree
+    Parameters:
+        pdr: list of entity association PDR's
+        counter: variable to capture the count of PDR's to unflatten
+                 the tree
 
     """
 
-    dot = Digraph('entity_hierarchy', node_attr={'color': 'lightblue1',
-                                                 'style': 'filled'})
-    dot.attr(label=r'\n\nEntity Relation Diagram < ' +
-             str(datetime.now().strftime("%Y-%m-%d %H:%M:%S"))+'>\n')
-    dot.attr(fontsize='20')
+    dot = Digraph(
+        "entity_hierarchy",
+        node_attr={"color": "lightblue1", "style": "filled"},
+    )
+    dot.attr(
+        label=r"\n\nEntity Relation Diagram < "
+        + str(datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
+        + ">\n"
+    )
+    dot.attr(fontsize="20")
     edge_list = []
     for value in pdr.values():
-        parentnode = str(value["containerEntityType"]) + \
-                     str(value["containerEntityInstanceNumber"])
-        dot.node(hashlib.md5((parentnode +
-                              str(value["containerEntityContainerID"]))
-                             .encode()).hexdigest(), parentnode)
+        parentnode = str(value["containerEntityType"]) + str(
+            value["containerEntityInstanceNumber"]
+        )
+        dot.node(
+            hashlib.md5(
+                (
+                    parentnode + str(value["containerEntityContainerID"])
+                ).encode()
+            ).hexdigest(),
+            parentnode,
+        )
 
-        for i in range(1, value["containedEntityCount"]+1):
-            childnode = str(value[f"containedEntityType[{i}]"]) + \
-                        str(value[f"containedEntityInstanceNumber[{i}]"])
+        for i in range(1, value["containedEntityCount"] + 1):
+            childnode = str(value[f"containedEntityType[{i}]"]) + str(
+                value[f"containedEntityInstanceNumber[{i}]"]
+            )
             cid = str(value[f"containedEntityContainerID[{i}]"])
-            dot.node(hashlib.md5((childnode + cid)
-                                 .encode()).hexdigest(), childnode)
+            dot.node(
+                hashlib.md5((childnode + cid).encode()).hexdigest(), childnode
+            )
 
-            if [hashlib.md5((parentnode +
-                            str(value["containerEntityContainerID"]))
-                            .encode()).hexdigest(),
-               hashlib.md5((childnode + cid)
-                           .encode()).hexdigest()] not in edge_list:
-                edge_list.append([hashlib.md5((parentnode +
-                                  str(value["containerEntityContainerID"]))
-                                              .encode()).hexdigest(),
-                                  hashlib.md5((childnode + cid)
-                                              .encode()).hexdigest()])
-                dot.edge(hashlib.md5((parentnode +
-                                      str(value["containerEntityContainerID"]))
-                                     .encode()).hexdigest(),
-                         hashlib.md5((childnode + cid).encode()).hexdigest())
-    unflattentree = dot.unflatten(stagger=(round(counter/3)))
-    unflattentree.render(filename='entity_association_' +
-                         str(datetime.now().strftime("%Y-%m-%d_%H-%M-%S")),
-                         view=False, cleanup=True, format='pdf')
+            if [
+                hashlib.md5(
+                    (
+                        parentnode + str(value["containerEntityContainerID"])
+                    ).encode()
+                ).hexdigest(),
+                hashlib.md5((childnode + cid).encode()).hexdigest(),
+            ] not in edge_list:
+                edge_list.append(
+                    [
+                        hashlib.md5(
+                            (
+                                parentnode
+                                + str(value["containerEntityContainerID"])
+                            ).encode()
+                        ).hexdigest(),
+                        hashlib.md5((childnode + cid).encode()).hexdigest(),
+                    ]
+                )
+                dot.edge(
+                    hashlib.md5(
+                        (
+                            parentnode
+                            + str(value["containerEntityContainerID"])
+                        ).encode()
+                    ).hexdigest(),
+                    hashlib.md5((childnode + cid).encode()).hexdigest(),
+                )
+    unflattentree = dot.unflatten(stagger=(round(counter / 3)))
+    unflattentree.render(
+        filename="entity_association_"
+        + str(datetime.now().strftime("%Y-%m-%d_%H-%M-%S")),
+        view=False,
+        cleanup=True,
+        format="pdf",
+    )
 
 
 class PLDMToolError(Exception):
-    """ Exception class intended to be used to hold pldmtool invocation failure
-        information such as exit status and stderr.
+    """Exception class intended to be used to hold pldmtool invocation failure
+    information such as exit status and stderr.
 
     """
 
@@ -224,25 +266,25 @@
 
 
 def process_pldmtool_output(process):
-    """ Ensure pldmtool runs without error and if it does fail, detect that and
-        show the pldmtool exit status and it's stderr.
+    """Ensure pldmtool runs without error and if it does fail, detect that and
+    show the pldmtool exit status and it's stderr.
 
-        A simpler implementation would just wait for the pldmtool exit status
-        prior to attempting to decode it's stdout.  Instead, optimize for the
-        no error case and allow the json decoder to consume pldmtool stdout as
-        soon as it is available (in parallel).  This results in the following
-        error scenarios:
-            - pldmtool fails and the decoder fails
-              Ignore the decoder fail and throw PLDMToolError.
-            - pldmtool fails and the decoder doesn't fail
-              Throw PLDMToolError.
-            - pldmtool doesn't fail and the decoder does fail
-              This is a pldmtool bug - re-throw the decoder error.
+    A simpler implementation would just wait for the pldmtool exit status
+    prior to attempting to decode it's stdout.  Instead, optimize for the
+    no error case and allow the json decoder to consume pldmtool stdout as
+    soon as it is available (in parallel).  This results in the following
+    error scenarios:
+        - pldmtool fails and the decoder fails
+          Ignore the decoder fail and throw PLDMToolError.
+        - pldmtool fails and the decoder doesn't fail
+          Throw PLDMToolError.
+        - pldmtool doesn't fail and the decoder does fail
+          This is a pldmtool bug - re-throw the decoder error.
 
-        Parameters:
-            process: A Process object providing process control functions like
-                     wait, and access functions such as reading stdout and
-                     stderr.
+    Parameters:
+        process: A Process object providing process control functions like
+                 wait, and access functions such as reading stdout and
+                 stderr.
 
     """
 
@@ -269,15 +311,15 @@
 
 
 def get_pdrs_one_at_a_time(executor):
-    """ Using pldmtool, generate (record handle, PDR) tuples for each record in
-        the PDR repository.
+    """Using pldmtool, generate (record handle, PDR) tuples for each record in
+    the PDR repository.
 
-        Parameters:
-            executor: executor object for running pldmtool
+    Parameters:
+        executor: executor object for running pldmtool
 
     """
 
-    command_fmt = 'pldmtool platform getpdr -d {}'
+    command_fmt = "pldmtool platform getpdr -d {}"
     record_handle = 0
     while True:
         process = executor.exec_command(command_fmt.format(str(record_handle)))
@@ -289,19 +331,19 @@
 
 
 def get_all_pdrs_at_once(executor):
-    """ Using pldmtool, generate (record handle, PDR) tuples for each record in
-        the PDR repository.  Use pldmtool platform getpdr --all.
+    """Using pldmtool, generate (record handle, PDR) tuples for each record in
+    the PDR repository.  Use pldmtool platform getpdr --all.
 
-        Parameters:
-            executor: executor object for running pldmtool
+    Parameters:
+        executor: executor object for running pldmtool
 
     """
 
-    process = executor.exec_command('pldmtool platform getpdr -a')
+    process = executor.exec_command("pldmtool platform getpdr -a")
     all_pdrs = process_pldmtool_output(process)
 
     # Explicitly request record 0 to find out what the real first record is.
-    process = executor.exec_command('pldmtool platform getpdr -d 0')
+    process = executor.exec_command("pldmtool platform getpdr -d 0")
     pdr_0 = process_pldmtool_output(process)
     record_handle = pdr_0["recordHandle"]
 
@@ -313,17 +355,18 @@
                 if record_handle == 0:
                     return
         raise RuntimeError(
-            "Dangling reference to record {}".format(record_handle))
+            "Dangling reference to record {}".format(record_handle)
+        )
 
 
 def get_pdrs(executor):
-    """ Using pldmtool, generate (record handle, PDR) tuples for each record in
-        the PDR repository.  Use pldmtool platform getpdr --all or fallback on
-        getting them one at a time if pldmtool doesn't support the --all
-        option.
+    """Using pldmtool, generate (record handle, PDR) tuples for each record in
+    the PDR repository.  Use pldmtool platform getpdr --all or fallback on
+    getting them one at a time if pldmtool doesn't support the --all
+    option.
 
-        Parameters:
-            executor: executor object for running pldmtool
+    Parameters:
+        executor: executor object for running pldmtool
 
     """
     try:
@@ -344,13 +387,12 @@
 
 
 def fetch_pdrs_from_bmc(executor):
+    """This is the core function that would fire the getPDR pldmtool command
+    and it then agreegates the data received from all the calls into the
+    respective dictionaries based on the PDR Type.
 
-    """ This is the core function that would fire the getPDR pldmtool command
-        and it then agreegates the data received from all the calls into the
-        respective dictionaries based on the PDR Type.
-
-        Parameters:
-            executor: executor object for running pldmtool
+    Parameters:
+        executor: executor object for running pldmtool
 
     """
 
@@ -364,7 +406,8 @@
     for handle_number, my_dic in get_pdrs(executor):
         if sys.stdout.isatty():
             sys.stdout.write(
-                "Fetching PDR's from BMC : %8d\r" % (handle_number))
+                "Fetching PDR's from BMC : %8d\r" % (handle_number)
+            )
             sys.stdout.flush()
         if my_dic["PDRType"] == "Entity Association PDR":
             entity_association_pdr[handle_number] = my_dic
@@ -380,33 +423,42 @@
             numeric_pdr[handle_number] = my_dic
     executor.close()
 
-    total_pdrs = len(entity_association_pdr.keys()) + len(tl_pdr.keys()) + \
-        len(state_effecter_pdr.keys()) + len(numeric_pdr.keys()) + \
-        len(state_sensor_pdr.keys()) + len(fru_record_set_pdr.keys())
-    print("\nSuccessfully fetched " + str(total_pdrs) + " PDR\'s")
+    total_pdrs = (
+        len(entity_association_pdr.keys())
+        + len(tl_pdr.keys())
+        + len(state_effecter_pdr.keys())
+        + len(numeric_pdr.keys())
+        + len(state_sensor_pdr.keys())
+        + len(fru_record_set_pdr.keys())
+    )
+    print("\nSuccessfully fetched " + str(total_pdrs) + " PDR's")
     print("Number of FRU Record PDR's : ", len(fru_record_set_pdr.keys()))
     print("Number of TerminusLocator PDR's : ", len(tl_pdr.keys()))
     print("Number of State Sensor PDR's : ", len(state_sensor_pdr.keys()))
     print("Number of State Effecter PDR's : ", len(state_effecter_pdr.keys()))
     print("Number of Numeric Effecter PDR's : ", len(numeric_pdr.keys()))
-    print("Number of Entity Association PDR's : ",
-          len(entity_association_pdr.keys()))
-    return (entity_association_pdr, state_sensor_pdr,
-            state_effecter_pdr, len(fru_record_set_pdr.keys()))
+    print(
+        "Number of Entity Association PDR's : ",
+        len(entity_association_pdr.keys()),
+    )
+    return (
+        entity_association_pdr,
+        state_sensor_pdr,
+        state_effecter_pdr,
+        len(fru_record_set_pdr.keys()),
+    )
 
 
 def main():
+    """Create a summary table capturing the information of all the PDR's
+    from the BMC & also create a diagram that captures the entity
+    association hierarchy."""
 
-    """ Create a summary table capturing the information of all the PDR's
-        from the BMC & also create a diagram that captures the entity
-        association hierarchy."""
-
-    parser = argparse.ArgumentParser(prog='pldm_visualise_pdrs.py')
-    parser.add_argument('--bmc', type=str, help="BMC IPAddress/BMC Hostname")
-    parser.add_argument('--user', type=str, help="BMC username")
-    parser.add_argument('--password', type=str, help="BMC Password")
-    parser.add_argument('--port', type=int, help="BMC SSH port",
-                        default=22)
+    parser = argparse.ArgumentParser(prog="pldm_visualise_pdrs.py")
+    parser.add_argument("--bmc", type=str, help="BMC IPAddress/BMC Hostname")
+    parser.add_argument("--user", type=str, help="BMC username")
+    parser.add_argument("--password", type=str, help="BMC Password")
+    parser.add_argument("--port", type=int, help="BMC SSH port", default=22)
     args = parser.parse_args()
 
     extra_cfg = {}
@@ -417,26 +469,34 @@
                 ssh_config.parse(f)
                 host_config = ssh_config.lookup(args.bmc)
                 if host_config:
-                    if 'hostname' in host_config:
-                        args.bmc = host_config['hostname']
-                    if 'user' in host_config and args.user is None:
-                        args.user = host_config['user']
-                    if 'proxycommand' in host_config:
-                        extra_cfg['sock'] = paramiko.ProxyCommand(
-                            host_config['proxycommand'])
+                    if "hostname" in host_config:
+                        args.bmc = host_config["hostname"]
+                    if "user" in host_config and args.user is None:
+                        args.user = host_config["user"]
+                    if "proxycommand" in host_config:
+                        extra_cfg["sock"] = paramiko.ProxyCommand(
+                            host_config["proxycommand"]
+                        )
         except FileNotFoundError:
             pass
 
         executor = ParamikoExecutor(
-            args.bmc, args.user, args.password, args.port, **extra_cfg)
-    elif shutil.which('pldmtool'):
+            args.bmc, args.user, args.password, args.port, **extra_cfg
+        )
+    elif shutil.which("pldmtool"):
         executor = SubprocessExecutor()
     else:
-        sys.exit("Can't find any PDRs: specify remote BMC with --bmc or "
-                 "install pldmtool.")
+        sys.exit(
+            "Can't find any PDRs: specify remote BMC with --bmc or "
+            "install pldmtool."
+        )
 
-    association_pdr, state_sensor_pdr, state_effecter_pdr, counter = \
-        fetch_pdrs_from_bmc(executor)
+    (
+        association_pdr,
+        state_sensor_pdr,
+        state_effecter_pdr,
+        counter,
+    ) = fetch_pdrs_from_bmc(executor)
     draw_entity_associations(association_pdr, counter)
     prepare_summary_report(state_sensor_pdr, state_effecter_pdr)