Revert "black: re-format"

This reverts commit 5731818de0ce446ceaafc7e75ae39da1b69942ae.

Signed-off-by: George Keishing <gkeishin@in.ibm.com>
Change-Id: Ie61cdc8c7f2825b0d9d66be87a6a3a058de2b372
diff --git a/ffdc/ffdc_collector.py b/ffdc/ffdc_collector.py
index 34f6d89..9afd0ad 100644
--- a/ffdc/ffdc_collector.py
+++ b/ffdc/ffdc_collector.py
@@ -4,19 +4,19 @@
 See class prolog below for details.
 """
 
-import json
-import logging
-import os
-import platform
-import re
-import subprocess
-import sys
-import time
-from errno import EACCES, EPERM
-
-import yaml
 from ssh_utility import SSHRemoteclient
 from telnet_utility import TelnetRemoteclient
+from errno import EACCES, EPERM
+
+import os
+import re
+import sys
+import yaml
+import json
+import time
+import logging
+import platform
+import subprocess
 
 script_dir = os.path.dirname(os.path.abspath(__file__))
 sys.path.append(script_dir)
@@ -42,11 +42,11 @@
        - arg1
        - arg2
 """
-plugin_dir = __file__.split(__file__.split("/")[-1])[0] + "/plugins"
+plugin_dir = __file__.split(__file__.split("/")[-1])[0] + '/plugins'
 sys.path.append(plugin_dir)
 try:
     for module in os.listdir(plugin_dir):
-        if module == "__init__.py" or module[-3:] != ".py":
+        if module == '__init__.py' or module[-3:] != '.py':
             continue
         plugin_module = "plugins." + module[:-3]
         # To access the module plugin.<module name>.<function>
@@ -98,35 +98,34 @@
 global_plugin_type_list = []
 
 # Path where logs are to be stored or written.
-global_log_store_path = ""
+global_log_store_path = ''
 
 # Plugin error state defaults.
 plugin_error_dict = {
-    "exit_on_error": False,
-    "continue_on_error": False,
+    'exit_on_error': False,
+    'continue_on_error': False,
 }
 
 
 class ffdc_collector:
+
     r"""
     Execute commands from configuration file to collect log files.
     Fetch and store generated files at the specified location.
 
     """
 
-    def __init__(
-        self,
-        hostname,
-        username,
-        password,
-        ffdc_config,
-        location,
-        remote_type,
-        remote_protocol,
-        env_vars,
-        econfig,
-        log_level,
-    ):
+    def __init__(self,
+                 hostname,
+                 username,
+                 password,
+                 ffdc_config,
+                 location,
+                 remote_type,
+                 remote_protocol,
+                 env_vars,
+                 econfig,
+                 log_level):
         r"""
         Description of argument(s):
 
@@ -156,7 +155,7 @@
         self.env_vars = env_vars
         self.econfig = econfig
         self.start_time = 0
-        self.elapsed_time = ""
+        self.elapsed_time = ''
         self.logger = None
 
         # Set prefix values for scp files and directory.
@@ -174,7 +173,7 @@
 
         if self.verify_script_env():
             # Load default or user define YAML configuration file.
-            with open(self.ffdc_config, "r") as file:
+            with open(self.ffdc_config, 'r') as file:
                 try:
                     self.ffdc_actions = yaml.load(file, Loader=yaml.SafeLoader)
                 except yaml.YAMLError as e:
@@ -183,9 +182,7 @@
 
             if self.target_type not in self.ffdc_actions.keys():
                 self.logger.error(
-                    "\n\tERROR: %s is not listed in %s.\n\n"
-                    % (self.target_type, self.ffdc_config)
-                )
+                    "\n\tERROR: %s is not listed in %s.\n\n" % (self.target_type, self.ffdc_config))
                 sys.exit(-1)
         else:
             sys.exit(-1)
@@ -196,62 +193,43 @@
         self.load_env()
 
     def verify_script_env(self):
+
         # Import to log version
         import click
         import paramiko
 
         run_env_ok = True
 
-        redfishtool_version = (
-            self.run_tool_cmd("redfishtool -V").split(" ")[2].strip("\n")
-        )
-        ipmitool_version = self.run_tool_cmd("ipmitool -V").split(" ")[2]
+        redfishtool_version = self.run_tool_cmd('redfishtool -V').split(' ')[2].strip('\n')
+        ipmitool_version = self.run_tool_cmd('ipmitool -V').split(' ')[2]
 
         self.logger.info("\n\t---- Script host environment ----")
-        self.logger.info(
-            "\t{:<10}  {:<10}".format("Script hostname", os.uname()[1])
-        )
-        self.logger.info(
-            "\t{:<10}  {:<10}".format("Script host os", platform.platform())
-        )
-        self.logger.info(
-            "\t{:<10}  {:>10}".format("Python", platform.python_version())
-        )
-        self.logger.info("\t{:<10}  {:>10}".format("PyYAML", yaml.__version__))
-        self.logger.info("\t{:<10}  {:>10}".format("click", click.__version__))
-        self.logger.info(
-            "\t{:<10}  {:>10}".format("paramiko", paramiko.__version__)
-        )
-        self.logger.info(
-            "\t{:<10}  {:>9}".format("redfishtool", redfishtool_version)
-        )
-        self.logger.info(
-            "\t{:<10}  {:>12}".format("ipmitool", ipmitool_version)
-        )
+        self.logger.info("\t{:<10}  {:<10}".format('Script hostname', os.uname()[1]))
+        self.logger.info("\t{:<10}  {:<10}".format('Script host os', platform.platform()))
+        self.logger.info("\t{:<10}  {:>10}".format('Python', platform.python_version()))
+        self.logger.info("\t{:<10}  {:>10}".format('PyYAML', yaml.__version__))
+        self.logger.info("\t{:<10}  {:>10}".format('click', click.__version__))
+        self.logger.info("\t{:<10}  {:>10}".format('paramiko', paramiko.__version__))
+        self.logger.info("\t{:<10}  {:>9}".format('redfishtool', redfishtool_version))
+        self.logger.info("\t{:<10}  {:>12}".format('ipmitool', ipmitool_version))
 
-        if eval(yaml.__version__.replace(".", ",")) < (5, 3, 0):
-            self.logger.error(
-                "\n\tERROR: Python or python packages do not meet minimum"
-                " version requirement."
-            )
-            self.logger.error(
-                "\tERROR: PyYAML version 5.3.0 or higher is needed.\n"
-            )
+        if eval(yaml.__version__.replace('.', ',')) < (5, 3, 0):
+            self.logger.error("\n\tERROR: Python or python packages do not meet minimum version requirement.")
+            self.logger.error("\tERROR: PyYAML version 5.3.0 or higher is needed.\n")
             run_env_ok = False
 
         self.logger.info("\t---- End script host environment ----")
         return run_env_ok
 
-    def script_logging(self, log_level_attr):
+    def script_logging(self,
+                       log_level_attr):
         r"""
         Create logger
 
         """
         self.logger = logging.getLogger()
         self.logger.setLevel(log_level_attr)
-        log_file_handler = logging.FileHandler(
-            self.ffdc_dir_path + "collector.log"
-        )
+        log_file_handler = logging.FileHandler(self.ffdc_dir_path + "collector.log")
 
         stdout_handler = logging.StreamHandler(sys.stdout)
         self.logger.addHandler(log_file_handler)
@@ -267,15 +245,11 @@
         """
         response = os.system("ping -c 1 %s  2>&1 >/dev/null" % self.hostname)
         if response == 0:
-            self.logger.info(
-                "\n\t[Check] %s is ping-able.\t\t [OK]" % self.hostname
-            )
+            self.logger.info("\n\t[Check] %s is ping-able.\t\t [OK]" % self.hostname)
             return True
         else:
             self.logger.error(
-                "\n\tERROR: %s is not ping-able. FFDC collection aborted.\n"
-                % self.hostname
-            )
+                "\n\tERROR: %s is not ping-able. FFDC collection aborted.\n" % self.hostname)
             sys.exit(-1)
 
     def collect_ffdc(self):
@@ -284,9 +258,7 @@
 
         """
 
-        self.logger.info(
-            "\n\t---- Start communicating with %s ----" % self.hostname
-        )
+        self.logger.info("\n\t---- Start communicating with %s ----" % self.hostname)
         self.start_time = time.time()
 
         # Find the list of target and protocol supported.
@@ -298,40 +270,24 @@
                 continue
 
             for k, v in config_dict[target_type].items():
-                if (
-                    config_dict[target_type][k]["PROTOCOL"][0]
-                    not in check_protocol_list
-                ):
-                    check_protocol_list.append(
-                        config_dict[target_type][k]["PROTOCOL"][0]
-                    )
+                if config_dict[target_type][k]['PROTOCOL'][0] not in check_protocol_list:
+                    check_protocol_list.append(config_dict[target_type][k]['PROTOCOL'][0])
 
-        self.logger.info(
-            "\n\t %s protocol type: %s"
-            % (self.target_type, check_protocol_list)
-        )
+        self.logger.info("\n\t %s protocol type: %s" % (self.target_type, check_protocol_list))
 
         verified_working_protocol = self.verify_protocol(check_protocol_list)
 
         if verified_working_protocol:
-            self.logger.info(
-                "\n\t---- Completed protocol pre-requisite check ----\n"
-            )
+            self.logger.info("\n\t---- Completed protocol pre-requisite check ----\n")
 
         # Verify top level directory exists for storage
         self.validate_local_store(self.location)
 
-        if (self.remote_protocol not in verified_working_protocol) and (
-            self.remote_protocol != "ALL"
-        ):
-            self.logger.info(
-                "\n\tWorking protocol list: %s" % verified_working_protocol
-            )
+        if ((self.remote_protocol not in verified_working_protocol) and (self.remote_protocol != 'ALL')):
+            self.logger.info("\n\tWorking protocol list: %s" % verified_working_protocol)
             self.logger.error(
-                "\tERROR: Requested protocol %s is not in working protocol"
-                " list.\n"
-                % self.remote_protocol
-            )
+                '\tERROR: Requested protocol %s is not in working protocol list.\n'
+                % self.remote_protocol)
             sys.exit(-1)
         else:
             self.generate_ffdc(verified_working_protocol)
@@ -342,15 +298,12 @@
 
         """
 
-        self.ssh_remoteclient = SSHRemoteclient(
-            self.hostname, self.username, self.password
-        )
+        self.ssh_remoteclient = SSHRemoteclient(self.hostname,
+                                                self.username,
+                                                self.password)
 
         if self.ssh_remoteclient.ssh_remoteclient_login():
-            self.logger.info(
-                "\n\t[Check] %s SSH connection established.\t [OK]"
-                % self.hostname
-            )
+            self.logger.info("\n\t[Check] %s SSH connection established.\t [OK]" % self.hostname)
 
             # Check scp connection.
             # If scp connection fails,
@@ -358,30 +311,21 @@
             self.ssh_remoteclient.scp_connection()
             return True
         else:
-            self.logger.info(
-                "\n\t[Check] %s SSH connection.\t [NOT AVAILABLE]"
-                % self.hostname
-            )
+            self.logger.info("\n\t[Check] %s SSH connection.\t [NOT AVAILABLE]" % self.hostname)
             return False
 
     def telnet_to_target_system(self):
         r"""
         Open a telnet connection to targeted system.
         """
-        self.telnet_remoteclient = TelnetRemoteclient(
-            self.hostname, self.username, self.password
-        )
+        self.telnet_remoteclient = TelnetRemoteclient(self.hostname,
+                                                      self.username,
+                                                      self.password)
         if self.telnet_remoteclient.tn_remoteclient_login():
-            self.logger.info(
-                "\n\t[Check] %s Telnet connection established.\t [OK]"
-                % self.hostname
-            )
+            self.logger.info("\n\t[Check] %s Telnet connection established.\t [OK]" % self.hostname)
             return True
         else:
-            self.logger.info(
-                "\n\t[Check] %s Telnet connection.\t [NOT AVAILABLE]"
-                % self.hostname
-            )
+            self.logger.info("\n\t[Check] %s Telnet connection.\t [NOT AVAILABLE]" % self.hostname)
             return False
 
     def generate_ffdc(self, working_protocol_list):
@@ -392,12 +336,8 @@
         working_protocol_list    list of confirmed working protocols to connect to remote host.
         """
 
-        self.logger.info(
-            "\n\t---- Executing commands on " + self.hostname + " ----"
-        )
-        self.logger.info(
-            "\n\tWorking protocol list: %s" % working_protocol_list
-        )
+        self.logger.info("\n\t---- Executing commands on " + self.hostname + " ----")
+        self.logger.info("\n\tWorking protocol list: %s" % working_protocol_list)
 
         config_dict = self.ffdc_actions
         for target_type in config_dict.keys():
@@ -405,47 +345,40 @@
                 continue
 
             self.logger.info("\n\tFFDC Path: %s " % self.ffdc_dir_path)
-            global_plugin_dict["global_log_store_path"] = self.ffdc_dir_path
+            global_plugin_dict['global_log_store_path'] = self.ffdc_dir_path
             self.logger.info("\tSystem Type: %s" % target_type)
             for k, v in config_dict[target_type].items():
-                if (
-                    self.remote_protocol not in working_protocol_list
-                    and self.remote_protocol != "ALL"
-                ):
+
+                if self.remote_protocol not in working_protocol_list \
+                        and self.remote_protocol != 'ALL':
                     continue
 
-                protocol = config_dict[target_type][k]["PROTOCOL"][0]
+                protocol = config_dict[target_type][k]['PROTOCOL'][0]
 
                 if protocol not in working_protocol_list:
                     continue
 
                 if protocol in working_protocol_list:
-                    if protocol == "SSH" or protocol == "SCP":
+                    if protocol == 'SSH' or protocol == 'SCP':
                         self.protocol_ssh(protocol, target_type, k)
-                    elif protocol == "TELNET":
+                    elif protocol == 'TELNET':
                         self.protocol_telnet(target_type, k)
-                    elif (
-                        protocol == "REDFISH"
-                        or protocol == "IPMI"
-                        or protocol == "SHELL"
-                    ):
+                    elif protocol == 'REDFISH' or protocol == 'IPMI' or protocol == 'SHELL':
                         self.protocol_execute(protocol, target_type, k)
                 else:
-                    self.logger.error(
-                        "\n\tERROR: %s is not available for %s."
-                        % (protocol, self.hostname)
-                    )
+                    self.logger.error("\n\tERROR: %s is not available for %s." % (protocol, self.hostname))
 
         # Close network connection after collecting all files
-        self.elapsed_time = time.strftime(
-            "%H:%M:%S", time.gmtime(time.time() - self.start_time)
-        )
+        self.elapsed_time = time.strftime("%H:%M:%S", time.gmtime(time.time() - self.start_time))
         if self.ssh_remoteclient:
             self.ssh_remoteclient.ssh_remoteclient_disconnect()
         if self.telnet_remoteclient:
             self.telnet_remoteclient.tn_remoteclient_disconnect()
 
-    def protocol_ssh(self, protocol, target_type, sub_type):
+    def protocol_ssh(self,
+                     protocol,
+                     target_type,
+                     sub_type):
         r"""
         Perform actions using SSH and SCP protocols.
 
@@ -455,50 +388,39 @@
         sub_type            Group type of commands.
         """
 
-        if protocol == "SCP":
+        if protocol == 'SCP':
             self.group_copy(self.ffdc_actions[target_type][sub_type])
         else:
-            self.collect_and_copy_ffdc(
-                self.ffdc_actions[target_type][sub_type]
-            )
+            self.collect_and_copy_ffdc(self.ffdc_actions[target_type][sub_type])
 
-    def protocol_telnet(self, target_type, sub_type):
+    def protocol_telnet(self,
+                        target_type,
+                        sub_type):
         r"""
         Perform actions using telnet protocol.
         Description of argument(s):
         target_type          OS Type of remote host.
         """
-        self.logger.info(
-            "\n\t[Run] Executing commands on %s using %s"
-            % (self.hostname, "TELNET")
-        )
+        self.logger.info("\n\t[Run] Executing commands on %s using %s" % (self.hostname, 'TELNET'))
         telnet_files_saved = []
         progress_counter = 0
-        list_of_commands = self.ffdc_actions[target_type][sub_type]["COMMANDS"]
+        list_of_commands = self.ffdc_actions[target_type][sub_type]['COMMANDS']
         for index, each_cmd in enumerate(list_of_commands, start=0):
             command_txt, command_timeout = self.unpack_command(each_cmd)
-            result = self.telnet_remoteclient.execute_command(
-                command_txt, command_timeout
-            )
+            result = self.telnet_remoteclient.execute_command(command_txt, command_timeout)
             if result:
                 try:
-                    targ_file = self.ffdc_actions[target_type][sub_type][
-                        "FILES"
-                    ][index]
+                    targ_file = self.ffdc_actions[target_type][sub_type]['FILES'][index]
                 except IndexError:
                     targ_file = command_txt
                     self.logger.warning(
-                        "\n\t[WARN] Missing filename to store data from"
-                        " telnet %s." % each_cmd
-                    )
-                    self.logger.warning(
-                        "\t[WARN] Data will be stored in %s." % targ_file
-                    )
-                targ_file_with_path = (
-                    self.ffdc_dir_path + self.ffdc_prefix + targ_file
-                )
+                        "\n\t[WARN] Missing filename to store data from telnet %s." % each_cmd)
+                    self.logger.warning("\t[WARN] Data will be stored in %s." % targ_file)
+                targ_file_with_path = (self.ffdc_dir_path
+                                       + self.ffdc_prefix
+                                       + targ_file)
                 # Creates a new file
-                with open(targ_file_with_path, "w") as fp:
+                with open(targ_file_with_path, 'w') as fp:
                     fp.write(result)
                     fp.close
                     telnet_files_saved.append(targ_file)
@@ -508,7 +430,10 @@
         for file in telnet_files_saved:
             self.logger.info("\n\t\tSuccessfully save file " + file + ".")
 
-    def protocol_execute(self, protocol, target_type, sub_type):
+    def protocol_execute(self,
+                         protocol,
+                         target_type,
+                         sub_type):
         r"""
         Perform actions for a given protocol.
 
@@ -518,36 +443,27 @@
         sub_type            Group type of commands.
         """
 
-        self.logger.info(
-            "\n\t[Run] Executing commands to %s using %s"
-            % (self.hostname, protocol)
-        )
+        self.logger.info("\n\t[Run] Executing commands to %s using %s" % (self.hostname, protocol))
         executed_files_saved = []
         progress_counter = 0
-        list_of_cmd = self.get_command_list(
-            self.ffdc_actions[target_type][sub_type]
-        )
+        list_of_cmd = self.get_command_list(self.ffdc_actions[target_type][sub_type])
         for index, each_cmd in enumerate(list_of_cmd, start=0):
             plugin_call = False
             if isinstance(each_cmd, dict):
-                if "plugin" in each_cmd:
+                if 'plugin' in each_cmd:
                     # If the error is set and plugin explicitly
                     # requested to skip execution on error..
-                    if plugin_error_dict[
-                        "exit_on_error"
-                    ] and self.plugin_error_check(each_cmd["plugin"]):
-                        self.logger.info(
-                            "\n\t[PLUGIN-ERROR] exit_on_error: %s"
-                            % plugin_error_dict["exit_on_error"]
-                        )
-                        self.logger.info(
-                            "\t[PLUGIN-SKIP] %s" % each_cmd["plugin"][0]
-                        )
+                    if plugin_error_dict['exit_on_error'] and \
+                            self.plugin_error_check(each_cmd['plugin']):
+                        self.logger.info("\n\t[PLUGIN-ERROR] exit_on_error: %s" %
+                                         plugin_error_dict['exit_on_error'])
+                        self.logger.info("\t[PLUGIN-SKIP] %s" %
+                                         each_cmd['plugin'][0])
                         continue
                     plugin_call = True
                     # call the plugin
                     self.logger.info("\n\t[PLUGIN-START]")
-                    result = self.execute_plugin_block(each_cmd["plugin"])
+                    result = self.execute_plugin_block(each_cmd['plugin'])
                     self.logger.info("\t[PLUGIN-END]\n")
             else:
                 each_cmd = self.yaml_env_and_plugin_vars_populate(each_cmd)
@@ -556,31 +472,23 @@
                 result = self.run_tool_cmd(each_cmd)
             if result:
                 try:
-                    file_name = self.get_file_list(
-                        self.ffdc_actions[target_type][sub_type]
-                    )[index]
+                    file_name = self.get_file_list(self.ffdc_actions[target_type][sub_type])[index]
                     # If file is specified as None.
                     if file_name == "None":
                         continue
-                    targ_file = self.yaml_env_and_plugin_vars_populate(
-                        file_name
-                    )
+                    targ_file = self.yaml_env_and_plugin_vars_populate(file_name)
                 except IndexError:
-                    targ_file = each_cmd.split("/")[-1]
+                    targ_file = each_cmd.split('/')[-1]
                     self.logger.warning(
-                        "\n\t[WARN] Missing filename to store data from %s."
-                        % each_cmd
-                    )
-                    self.logger.warning(
-                        "\t[WARN] Data will be stored in %s." % targ_file
-                    )
+                        "\n\t[WARN] Missing filename to store data from %s." % each_cmd)
+                    self.logger.warning("\t[WARN] Data will be stored in %s." % targ_file)
 
-                targ_file_with_path = (
-                    self.ffdc_dir_path + self.ffdc_prefix + targ_file
-                )
+                targ_file_with_path = (self.ffdc_dir_path
+                                       + self.ffdc_prefix
+                                       + targ_file)
 
                 # Creates a new file
-                with open(targ_file_with_path, "w") as fp:
+                with open(targ_file_with_path, 'w') as fp:
                     if isinstance(result, dict):
                         fp.write(json.dumps(result))
                     else:
@@ -596,9 +504,9 @@
         for file in executed_files_saved:
             self.logger.info("\n\t\tSuccessfully save file " + file + ".")
 
-    def collect_and_copy_ffdc(
-        self, ffdc_actions_for_target_type, form_filename=False
-    ):
+    def collect_and_copy_ffdc(self,
+                              ffdc_actions_for_target_type,
+                              form_filename=False):
         r"""
         Send commands in ffdc_config file to targeted system.
 
@@ -608,32 +516,21 @@
         """
 
         # Executing commands, if any
-        self.ssh_execute_ffdc_commands(
-            ffdc_actions_for_target_type, form_filename
-        )
+        self.ssh_execute_ffdc_commands(ffdc_actions_for_target_type,
+                                       form_filename)
 
         # Copying files
         if self.ssh_remoteclient.scpclient:
-            self.logger.info(
-                "\n\n\tCopying FFDC files from remote system %s.\n"
-                % self.hostname
-            )
+            self.logger.info("\n\n\tCopying FFDC files from remote system %s.\n" % self.hostname)
 
             # Retrieving files from target system
             list_of_files = self.get_file_list(ffdc_actions_for_target_type)
-            self.scp_ffdc(
-                self.ffdc_dir_path,
-                self.ffdc_prefix,
-                form_filename,
-                list_of_files,
-            )
+            self.scp_ffdc(self.ffdc_dir_path, self.ffdc_prefix, form_filename, list_of_files)
         else:
-            self.logger.info(
-                "\n\n\tSkip copying FFDC files from remote system %s.\n"
-                % self.hostname
-            )
+            self.logger.info("\n\n\tSkip copying FFDC files from remote system %s.\n" % self.hostname)
 
-    def get_command_list(self, ffdc_actions_for_target_type):
+    def get_command_list(self,
+                         ffdc_actions_for_target_type):
         r"""
         Fetch list of commands from configuration file
 
@@ -641,12 +538,13 @@
         ffdc_actions_for_target_type    commands and files for the selected remote host type.
         """
         try:
-            list_of_commands = ffdc_actions_for_target_type["COMMANDS"]
+            list_of_commands = ffdc_actions_for_target_type['COMMANDS']
         except KeyError:
             list_of_commands = []
         return list_of_commands
 
-    def get_file_list(self, ffdc_actions_for_target_type):
+    def get_file_list(self,
+                      ffdc_actions_for_target_type):
         r"""
         Fetch list of commands from configuration file
 
@@ -654,12 +552,13 @@
         ffdc_actions_for_target_type    commands and files for the selected remote host type.
         """
         try:
-            list_of_files = ffdc_actions_for_target_type["FILES"]
+            list_of_files = ffdc_actions_for_target_type['FILES']
         except KeyError:
             list_of_files = []
         return list_of_files
 
-    def unpack_command(self, command):
+    def unpack_command(self,
+                       command):
         r"""
         Unpack command from config file
 
@@ -676,9 +575,9 @@
 
         return command_txt, command_timeout
 
-    def ssh_execute_ffdc_commands(
-        self, ffdc_actions_for_target_type, form_filename=False
-    ):
+    def ssh_execute_ffdc_commands(self,
+                                  ffdc_actions_for_target_type,
+                                  form_filename=False):
         r"""
         Send commands in ffdc_config file to targeted system.
 
@@ -686,10 +585,8 @@
         ffdc_actions_for_target_type    commands and files for the selected remote host type.
         form_filename                    if true, pre-pend self.target_type to filename
         """
-        self.logger.info(
-            "\n\t[Run] Executing commands on %s using %s"
-            % (self.hostname, ffdc_actions_for_target_type["PROTOCOL"][0])
-        )
+        self.logger.info("\n\t[Run] Executing commands on %s using %s"
+                         % (self.hostname, ffdc_actions_for_target_type['PROTOCOL'][0]))
 
         list_of_commands = self.get_command_list(ffdc_actions_for_target_type)
         # If command list is empty, returns
@@ -703,19 +600,12 @@
             if form_filename:
                 command_txt = str(command_txt % self.target_type)
 
-            (
-                cmd_exit_code,
-                err,
-                response,
-            ) = self.ssh_remoteclient.execute_command(
-                command_txt, command_timeout
-            )
+            cmd_exit_code, err, response = \
+                self.ssh_remoteclient.execute_command(command_txt, command_timeout)
 
             if cmd_exit_code:
                 self.logger.warning(
-                    "\n\t\t[WARN] %s exits with code %s."
-                    % (command_txt, str(cmd_exit_code))
-                )
+                    "\n\t\t[WARN] %s exits with code %s." % (command_txt, str(cmd_exit_code)))
                 self.logger.warning("\t\t[WARN] %s " % err)
 
             progress_counter += 1
@@ -723,7 +613,8 @@
 
         self.logger.info("\n\t[Run] Commands execution completed.\t\t [OK]")
 
-    def group_copy(self, ffdc_actions_for_target_type):
+    def group_copy(self,
+                   ffdc_actions_for_target_type):
         r"""
         scp group of files (wild card) from remote host.
 
@@ -732,14 +623,9 @@
         """
 
         if self.ssh_remoteclient.scpclient:
-            self.logger.info(
-                "\n\tCopying files from remote system %s via SCP.\n"
-                % self.hostname
-            )
+            self.logger.info("\n\tCopying files from remote system %s via SCP.\n" % self.hostname)
 
-            list_of_commands = self.get_command_list(
-                ffdc_actions_for_target_type
-            )
+            list_of_commands = self.get_command_list(ffdc_actions_for_target_type)
             # If command list is empty, returns
             if not list_of_commands:
                 return
@@ -751,42 +637,29 @@
                     self.logger.error("\t\tInvalid command %s" % command)
                     continue
 
-                (
-                    cmd_exit_code,
-                    err,
-                    response,
-                ) = self.ssh_remoteclient.execute_command(command)
+                cmd_exit_code, err, response = \
+                    self.ssh_remoteclient.execute_command(command)
 
                 # If file does not exist, code take no action.
                 # cmd_exit_code is ignored for this scenario.
                 if response:
-                    scp_result = self.ssh_remoteclient.scp_file_from_remote(
-                        response.split("\n"), self.ffdc_dir_path
-                    )
+                    scp_result = \
+                        self.ssh_remoteclient.scp_file_from_remote(response.split('\n'),
+                                                                   self.ffdc_dir_path)
                     if scp_result:
-                        self.logger.info(
-                            "\t\tSuccessfully copied from "
-                            + self.hostname
-                            + ":"
-                            + command
-                        )
+                        self.logger.info("\t\tSuccessfully copied from " + self.hostname + ':' + command)
                 else:
                     self.logger.info("\t\t%s has no result" % command)
 
         else:
-            self.logger.info(
-                "\n\n\tSkip copying files from remote system %s.\n"
-                % self.hostname
-            )
+            self.logger.info("\n\n\tSkip copying files from remote system %s.\n" % self.hostname)
 
-    def scp_ffdc(
-        self,
-        targ_dir_path,
-        targ_file_prefix,
-        form_filename,
-        file_list=None,
-        quiet=None,
-    ):
+    def scp_ffdc(self,
+                 targ_dir_path,
+                 targ_file_prefix,
+                 form_filename,
+                 file_list=None,
+                 quiet=None):
         r"""
         SCP all files in file_dict to the indicated directory on the local system.
 
@@ -803,37 +676,21 @@
             if form_filename:
                 filename = str(filename % self.target_type)
             source_file_path = filename
-            targ_file_path = (
-                targ_dir_path + targ_file_prefix + filename.split("/")[-1]
-            )
+            targ_file_path = targ_dir_path + targ_file_prefix + filename.split('/')[-1]
 
             # If source file name contains wild card, copy filename as is.
-            if "*" in source_file_path:
-                scp_result = self.ssh_remoteclient.scp_file_from_remote(
-                    source_file_path, self.ffdc_dir_path
-                )
+            if '*' in source_file_path:
+                scp_result = self.ssh_remoteclient.scp_file_from_remote(source_file_path, self.ffdc_dir_path)
             else:
-                scp_result = self.ssh_remoteclient.scp_file_from_remote(
-                    source_file_path, targ_file_path
-                )
+                scp_result = self.ssh_remoteclient.scp_file_from_remote(source_file_path, targ_file_path)
 
             if not quiet:
                 if scp_result:
                     self.logger.info(
-                        "\t\tSuccessfully copied from "
-                        + self.hostname
-                        + ":"
-                        + source_file_path
-                        + ".\n"
-                    )
+                        "\t\tSuccessfully copied from " + self.hostname + ':' + source_file_path + ".\n")
                 else:
                     self.logger.info(
-                        "\t\tFail to copy from "
-                        + self.hostname
-                        + ":"
-                        + source_file_path
-                        + ".\n"
-                    )
+                        "\t\tFail to copy from " + self.hostname + ':' + source_file_path + ".\n")
             else:
                 progress_counter += 1
                 self.print_progress(progress_counter)
@@ -852,9 +709,7 @@
         """
 
         timestr = time.strftime("%Y%m%d-%H%M%S")
-        self.ffdc_dir_path = (
-            self.location + "/" + self.hostname + "_" + timestr + "/"
-        )
+        self.ffdc_dir_path = self.location + "/" + self.hostname + "_" + timestr + "/"
         self.ffdc_prefix = timestr + "_"
         self.validate_local_store(self.ffdc_dir_path)
 
@@ -878,14 +733,10 @@
                 # PermissionError
                 if e.errno == EPERM or e.errno == EACCES:
                     self.logger.error(
-                        "\tERROR: os.makedirs %s failed with"
-                        " PermissionError.\n" % dir_path
-                    )
+                        '\tERROR: os.makedirs %s failed with PermissionError.\n' % dir_path)
                 else:
                     self.logger.error(
-                        "\tERROR: os.makedirs %s failed with %s.\n"
-                        % (dir_path, e.strerror)
-                    )
+                        '\tERROR: os.makedirs %s failed with %s.\n' % (dir_path, e.strerror))
                 sys.exit(-1)
 
     def print_progress(self, progress):
@@ -899,47 +750,34 @@
 
         sys.stdout.write("\r\t" + "+" * progress)
         sys.stdout.flush()
-        time.sleep(0.1)
+        time.sleep(.1)
 
     def verify_redfish(self):
         r"""
         Verify remote host has redfish service active
 
         """
-        redfish_parm = (
-            "redfishtool -r "
-            + self.hostname
-            + " -S Always raw GET /redfish/v1/"
-        )
-        return self.run_tool_cmd(redfish_parm, True)
+        redfish_parm = 'redfishtool -r ' \
+                       + self.hostname + ' -S Always raw GET /redfish/v1/'
+        return (self.run_tool_cmd(redfish_parm, True))
 
     def verify_ipmi(self):
         r"""
         Verify remote host has IPMI LAN service active
 
         """
-        if self.target_type == "OPENBMC":
-            ipmi_parm = (
-                "ipmitool -I lanplus -C 17  -U "
-                + self.username
-                + " -P "
-                + self.password
-                + " -H "
-                + self.hostname
-                + " power status"
-            )
+        if self.target_type == 'OPENBMC':
+            ipmi_parm = 'ipmitool -I lanplus -C 17  -U ' + self.username + ' -P ' \
+                + self.password + ' -H ' + self.hostname + ' power status'
         else:
-            ipmi_parm = (
-                "ipmitool -I lanplus  -P "
-                + self.password
-                + " -H "
-                + self.hostname
-                + " power status"
-            )
+            ipmi_parm = 'ipmitool -I lanplus  -P ' \
+                + self.password + ' -H ' + self.hostname + ' power status'
 
-        return self.run_tool_cmd(ipmi_parm, True)
+        return (self.run_tool_cmd(ipmi_parm, True))
 
-    def run_tool_cmd(self, parms_string, quiet=False):
+    def run_tool_cmd(self,
+                     parms_string,
+                     quiet=False):
         r"""
         Run CLI standard tool or scripts.
 
@@ -948,17 +786,15 @@
         quiet                do not print tool error message if True
         """
 
-        result = subprocess.run(
-            [parms_string],
-            stdout=subprocess.PIPE,
-            stderr=subprocess.PIPE,
-            shell=True,
-            universal_newlines=True,
-        )
+        result = subprocess.run([parms_string],
+                                stdout=subprocess.PIPE,
+                                stderr=subprocess.PIPE,
+                                shell=True,
+                                universal_newlines=True)
 
         if result.stderr and not quiet:
-            self.logger.error("\n\t\tERROR with %s " % parms_string)
-            self.logger.error("\t\t" + result.stderr)
+            self.logger.error('\n\t\tERROR with %s ' % parms_string)
+            self.logger.error('\t\t' + result.stderr)
 
         return result.stdout
 
@@ -975,53 +811,37 @@
             tmp_list.append("SHELL")
 
         for protocol in protocol_list:
-            if self.remote_protocol != "ALL":
+            if self.remote_protocol != 'ALL':
                 if self.remote_protocol != protocol:
                     continue
 
             # Only check SSH/SCP once for both protocols
-            if (
-                protocol == "SSH"
-                or protocol == "SCP"
-                and protocol not in tmp_list
-            ):
+            if protocol == 'SSH' or protocol == 'SCP' and protocol not in tmp_list:
                 if self.ssh_to_target_system():
                     # Add only what user asked.
-                    if self.remote_protocol != "ALL":
+                    if self.remote_protocol != 'ALL':
                         tmp_list.append(self.remote_protocol)
                     else:
-                        tmp_list.append("SSH")
-                        tmp_list.append("SCP")
+                        tmp_list.append('SSH')
+                        tmp_list.append('SCP')
 
-            if protocol == "TELNET":
+            if protocol == 'TELNET':
                 if self.telnet_to_target_system():
                     tmp_list.append(protocol)
 
-            if protocol == "REDFISH":
+            if protocol == 'REDFISH':
                 if self.verify_redfish():
                     tmp_list.append(protocol)
-                    self.logger.info(
-                        "\n\t[Check] %s Redfish Service.\t\t [OK]"
-                        % self.hostname
-                    )
+                    self.logger.info("\n\t[Check] %s Redfish Service.\t\t [OK]" % self.hostname)
                 else:
-                    self.logger.info(
-                        "\n\t[Check] %s Redfish Service.\t\t [NOT AVAILABLE]"
-                        % self.hostname
-                    )
+                    self.logger.info("\n\t[Check] %s Redfish Service.\t\t [NOT AVAILABLE]" % self.hostname)
 
-            if protocol == "IPMI":
+            if protocol == 'IPMI':
                 if self.verify_ipmi():
                     tmp_list.append(protocol)
-                    self.logger.info(
-                        "\n\t[Check] %s IPMI LAN Service.\t\t [OK]"
-                        % self.hostname
-                    )
+                    self.logger.info("\n\t[Check] %s IPMI LAN Service.\t\t [OK]" % self.hostname)
                 else:
-                    self.logger.info(
-                        "\n\t[Check] %s IPMI LAN Service.\t\t [NOT AVAILABLE]"
-                        % self.hostname
-                    )
+                    self.logger.info("\n\t[Check] %s IPMI LAN Service.\t\t [NOT AVAILABLE]" % self.hostname)
 
         return tmp_list
 
@@ -1034,14 +854,14 @@
         # Example YAML:
         # -COMMANDS:
         #    - my_command ${hostname}  ${username}   ${password}
-        os.environ["hostname"] = self.hostname
-        os.environ["username"] = self.username
-        os.environ["password"] = self.password
+        os.environ['hostname'] = self.hostname
+        os.environ['username'] = self.username
+        os.environ['password'] = self.password
 
         # Append default Env.
-        self.env_dict["hostname"] = self.hostname
-        self.env_dict["username"] = self.username
-        self.env_dict["password"] = self.password
+        self.env_dict['hostname'] = self.hostname
+        self.env_dict['username'] = self.username
+        self.env_dict['password'] = self.password
 
         try:
             tmp_env_dict = {}
@@ -1053,14 +873,14 @@
                     self.env_dict[key] = str(value)
 
             if self.econfig:
-                with open(self.econfig, "r") as file:
+                with open(self.econfig, 'r') as file:
                     try:
                         tmp_env_dict = yaml.load(file, Loader=yaml.SafeLoader)
                     except yaml.YAMLError as e:
                         self.logger.error(e)
                         sys.exit(-1)
                 # Export ENV vars.
-                for key, value in tmp_env_dict["env_params"].items():
+                for key, value in tmp_env_dict['env_params'].items():
                     os.environ[key] = str(value)
                     self.env_dict[key] = str(value)
         except json.decoder.JSONDecodeError as e:
@@ -1073,9 +893,8 @@
             if k.lower().find("password") != -1:
                 hidden_text = []
                 hidden_text.append(v)
-                password_regex = (
-                    "(" + "|".join([re.escape(x) for x in hidden_text]) + ")"
-                )
+                password_regex = '(' +\
+                    '|'.join([re.escape(x) for x in hidden_text]) + ')'
                 mask_dict[k] = re.sub(password_regex, "********", v)
 
         self.logger.info(json.dumps(mask_dict, indent=8, sort_keys=False))
@@ -1095,18 +914,16 @@
             self.logger.debug("\tCall func: %s" % eval_string)
             result = eval(eval_string)
             self.logger.info("\treturn: %s" % str(result))
-        except (
-            ValueError,
-            SyntaxError,
-            NameError,
-            AttributeError,
-            TypeError,
-        ) as e:
+        except (ValueError,
+                SyntaxError,
+                NameError,
+                AttributeError,
+                TypeError) as e:
             self.logger.error("\tERROR: execute_python_eval: %s" % e)
             # Set the plugin error state.
-            plugin_error_dict["exit_on_error"] = True
+            plugin_error_dict['exit_on_error'] = True
             self.logger.info("\treturn: PLUGIN_EVAL_ERROR")
-            return "PLUGIN_EVAL_ERROR"
+            return 'PLUGIN_EVAL_ERROR'
 
         return result
 
@@ -1139,18 +956,18 @@
                 - arg2
         """
         try:
-            idx = self.key_index_list_dict("plugin_name", plugin_cmd_list)
-            plugin_name = plugin_cmd_list[idx]["plugin_name"]
+            idx = self.key_index_list_dict('plugin_name', plugin_cmd_list)
+            plugin_name = plugin_cmd_list[idx]['plugin_name']
             # Equal separator means plugin function returns result.
-            if " = " in plugin_name:
+            if ' = ' in plugin_name:
                 # Ex. ['result', 'plugin.foo_func.my_func']
-                plugin_name_args = plugin_name.split(" = ")
+                plugin_name_args = plugin_name.split(' = ')
                 # plugin func return data.
                 for arg in plugin_name_args:
                     if arg == plugin_name_args[-1]:
                         plugin_name = arg
                     else:
-                        plugin_resp = arg.split(",")
+                        plugin_resp = arg.split(',')
                         # ['result1','result2']
                         for x in plugin_resp:
                             global_plugin_list.append(x)
@@ -1158,9 +975,9 @@
 
             # Walk the plugin args ['arg1,'arg2']
             # If the YAML plugin statement 'plugin_args' is not declared.
-            if any("plugin_args" in d for d in plugin_cmd_list):
-                idx = self.key_index_list_dict("plugin_args", plugin_cmd_list)
-                plugin_args = plugin_cmd_list[idx]["plugin_args"]
+            if any('plugin_args' in d for d in plugin_cmd_list):
+                idx = self.key_index_list_dict('plugin_args', plugin_cmd_list)
+                plugin_args = plugin_cmd_list[idx]['plugin_args']
                 if plugin_args:
                     plugin_args = self.yaml_args_populate(plugin_args)
                 else:
@@ -1172,52 +989,43 @@
             # "arg1","arg2","argn"  string as params for function.
             parm_args_str = self.yaml_args_string(plugin_args)
             if parm_args_str:
-                plugin_func = plugin_name + "(" + parm_args_str + ")"
+                plugin_func = plugin_name + '(' + parm_args_str + ')'
             else:
-                plugin_func = plugin_name + "()"
+                plugin_func = plugin_name + '()'
 
             # Execute plugin function.
             if global_plugin_dict:
                 resp = self.execute_python_eval(plugin_func)
                 # Update plugin vars dict if there is any.
-                if resp != "PLUGIN_EVAL_ERROR":
+                if resp != 'PLUGIN_EVAL_ERROR':
                     self.response_args_data(resp)
             else:
                 resp = self.execute_python_eval(plugin_func)
         except Exception as e:
             # Set the plugin error state.
-            plugin_error_dict["exit_on_error"] = True
+            plugin_error_dict['exit_on_error'] = True
             self.logger.error("\tERROR: execute_plugin_block: %s" % e)
             pass
 
         # There is a real error executing the plugin function.
-        if resp == "PLUGIN_EVAL_ERROR":
+        if resp == 'PLUGIN_EVAL_ERROR':
             return resp
 
         # Check if plugin_expects_return (int, string, list,dict etc)
-        if any("plugin_expects_return" in d for d in plugin_cmd_list):
-            idx = self.key_index_list_dict(
-                "plugin_expects_return", plugin_cmd_list
-            )
-            plugin_expects = plugin_cmd_list[idx]["plugin_expects_return"]
+        if any('plugin_expects_return' in d for d in plugin_cmd_list):
+            idx = self.key_index_list_dict('plugin_expects_return', plugin_cmd_list)
+            plugin_expects = plugin_cmd_list[idx]['plugin_expects_return']
             if plugin_expects:
                 if resp:
-                    if (
-                        self.plugin_expect_type(plugin_expects, resp)
-                        == "INVALID"
-                    ):
+                    if self.plugin_expect_type(plugin_expects, resp) == 'INVALID':
                         self.logger.error("\tWARN: Plugin error check skipped")
                     elif not self.plugin_expect_type(plugin_expects, resp):
-                        self.logger.error(
-                            "\tERROR: Plugin expects return data: %s"
-                            % plugin_expects
-                        )
-                        plugin_error_dict["exit_on_error"] = True
+                        self.logger.error("\tERROR: Plugin expects return data: %s"
+                                          % plugin_expects)
+                        plugin_error_dict['exit_on_error'] = True
                 elif not resp:
-                    self.logger.error(
-                        "\tERROR: Plugin func failed to return data"
-                    )
-                    plugin_error_dict["exit_on_error"] = True
+                    self.logger.error("\tERROR: Plugin func failed to return data")
+                    plugin_error_dict['exit_on_error'] = True
 
         return resp
 
@@ -1231,26 +1039,26 @@
         resp_data = ""
 
         # There is nothing to update the plugin response.
-        if len(global_plugin_list) == 0 or plugin_resp == "None":
+        if len(global_plugin_list) == 0 or plugin_resp == 'None':
             return
 
         if isinstance(plugin_resp, str):
-            resp_data = plugin_resp.strip("\r\n\t")
+            resp_data = plugin_resp.strip('\r\n\t')
             resp_list.append(resp_data)
         elif isinstance(plugin_resp, bytes):
-            resp_data = str(plugin_resp, "UTF-8").strip("\r\n\t")
+            resp_data = str(plugin_resp, 'UTF-8').strip('\r\n\t')
             resp_list.append(resp_data)
         elif isinstance(plugin_resp, tuple):
             if len(global_plugin_list) == 1:
                 resp_list.append(plugin_resp)
             else:
                 resp_list = list(plugin_resp)
-                resp_list = [x.strip("\r\n\t") for x in resp_list]
+                resp_list = [x.strip('\r\n\t') for x in resp_list]
         elif isinstance(plugin_resp, list):
             if len(global_plugin_list) == 1:
-                resp_list.append([x.strip("\r\n\t") for x in plugin_resp])
+                resp_list.append([x.strip('\r\n\t') for x in plugin_resp])
             else:
-                resp_list = [x.strip("\r\n\t") for x in plugin_resp]
+                resp_list = [x.strip('\r\n\t') for x in plugin_resp]
         elif isinstance(plugin_resp, int) or isinstance(plugin_resp, float):
             resp_list.append(plugin_resp)
 
@@ -1278,7 +1086,7 @@
 
         plugin_args            arg list ['arg1','arg2,'argn']
         """
-        args_str = ""
+        args_str = ''
         for args in plugin_args:
             if args:
                 if isinstance(args, (int, float)):
@@ -1286,7 +1094,7 @@
                 elif args in global_plugin_type_list:
                     args_str += str(global_plugin_dict[args])
                 else:
-                    args_str += '"' + str(args.strip("\r\n\t")) + '"'
+                    args_str += '"' + str(args.strip('\r\n\t')) + '"'
             # Skip last list element.
             if args != plugin_args[-1]:
                 args_str += ","
@@ -1339,11 +1147,11 @@
         try:
             # Example, list of matching env vars ['username', 'password', 'hostname']
             # Extra escape \ for special symbols. '\$\{([^\}]+)\}' works good.
-            var_name_regex = "\\$\\{([^\\}]+)\\}"
+            var_name_regex = '\\$\\{([^\\}]+)\\}'
             env_var_names_list = re.findall(var_name_regex, yaml_arg_str)
             for var in env_var_names_list:
                 env_var = os.environ[var]
-                env_replace = "${" + var + "}"
+                env_replace = '${' + var + '}'
                 yaml_arg_str = yaml_arg_str.replace(env_replace, env_var)
         except Exception as e:
             self.logger.error("\tERROR:yaml_env_vars_populate: %s" % e)
@@ -1366,14 +1174,10 @@
                         # in eval function call.
                         global_plugin_type_list.append(var)
                     else:
-                        yaml_arg_str = yaml_arg_str.replace(
-                            str(var), str(global_plugin_dict[var])
-                        )
+                        yaml_arg_str = yaml_arg_str.replace(str(var), str(global_plugin_dict[var]))
                 # Just a string like filename or command.
                 else:
-                    yaml_arg_str = yaml_arg_str.replace(
-                        str(var), str(global_plugin_dict[var])
-                    )
+                    yaml_arg_str = yaml_arg_str.replace(str(var), str(global_plugin_dict[var]))
         except (IndexError, ValueError) as e:
             self.logger.error("\tERROR: yaml_plugin_vars_populate: %s" % e)
             pass
@@ -1387,10 +1191,10 @@
         Description of argument(s):
         plugin_dict        Dictionary of plugin error.
         """
-        if any("plugin_error" in d for d in plugin_dict):
+        if any('plugin_error' in d for d in plugin_dict):
             for d in plugin_dict:
-                if "plugin_error" in d:
-                    value = d["plugin_error"]
+                if 'plugin_error' in d:
+                    value = d['plugin_error']
                     # Reference if the error is set or not by plugin.
                     return plugin_error_dict[value]
 
@@ -1410,18 +1214,18 @@
         r"""
         Plugin expect directive type check.
         """
-        if type == "int":
+        if type == 'int':
             return isinstance(data, int)
-        elif type == "float":
+        elif type == 'float':
             return isinstance(data, float)
-        elif type == "str":
+        elif type == 'str':
             return isinstance(data, str)
-        elif type == "list":
+        elif type == 'list':
             return isinstance(data, list)
-        elif type == "dict":
+        elif type == 'dict':
             return isinstance(data, dict)
-        elif type == "tuple":
+        elif type == 'tuple':
             return isinstance(data, tuple)
         else:
             self.logger.info("\tInvalid data type requested: %s" % type)
-            return "INVALID"
+            return 'INVALID'