meta-security: subtree update:b72cc7f87c..95fe86eb98

André Draszik (1):
      linux-yocto: update the bbappend to 5.x

Armin Kuster (36):
      README: add pull request option
      sssd: drop py2 support
      python3-fail2ban: update to latest
      Apparmor: fix some runtime depends
      linux-yocto-dev: remove "+"
      checksecurity: fix runtime issues
      buck-security: fix rdebends and minor style cleanup
      swtpm: fix configure error
      ecryptfs-utils: search nspr header files in ${STAGING_INCDIR}/nspr directory
      bastille: convert to py3
      tpm2-tools: update to 4.1.1
      tpm2-tcti-uefi: fix build issue for i386 machine
      tpm2-tss: update to 2.3.2
      ibmswtpm2: update to 1563
      python3-fail2ban: add 2-3 conversion changes
      google-authenticator-libpam: install module in pam location
      apparmor: update to tip
      clamav: add bison-native to depend
      meta-security-isafw: import layer from Intel
      isafw: fix to work against master
      layer.conf: add zeus
      README.md: update to new maintainer
      clamav-native: missed bison fix
      secuirty*-image: remove dead var and minor cleanup
      libtpm: fix build issue over pod2man
      sssd: python2 not supported
      libseccomp: update to 2.4.3
      lynis: add missing rdepends
      fail2ban: change hardcoded sysklogd to VIRTUAL-RUNTIME_base-utils-syslog
      chkrootkit: add rootkit recipe
      clamav: move to recipes-scanners
      checksec: move to recipe-scanners
      checksecurity: move to recipes-scanners
      buck-security: move to recipes-scanners
      arpwatch: add new recipe
      buck-security: fix runtime issue with missing per module

Bartosz Golaszewski (3):
      linux: drop the bbappend for linux v4.x series
      classes: provide a class for generating dm-verity meta-data images
      dm-verity: add a working example for BeagleBone Black

Haseeb Ashraf (1):
      samhain: dnmalloc hash fix for aarch64 and mips64

Jan Luebbe (2):
      apparmor: fix wrong executable permission on service file
      apparmor: update to 2.13.4

Jonatan Pålsson (10):
      README: Add meta-python to list of layer deps
      sssd: Add PACKAGECONFIG for python2
      sssd: Fix typo in PACKAGECONFIG. cyrpto -> crypto
      sssd: DEPEND on nss if nothing else is chosen
      sssd: Sort PACKAGECONFIG entries
      sssd: Add autofs PACKAGECONFIG
      sssd: Add sudo PACKAGECONFIG
      sssd: Add missing files to SYSTEMD_SERVICE
      sssd: Add missing DEPENDS on jansson
      sssd: Add infopipe PACKAGECONFIG

Kai Kang (1):
      sssd: fix for ldblibdir and systemd etc

Martin Jansa (1):
      layer.conf: update LAYERSERIES_COMPAT for dunfell

Mingli Yu (1):
      linux-yocto: update the bbappend to 5.x

Pierre-Jean Texier via Lists.Yoctoproject.Org (1):
      google-authenticator-libpam: upgrade 1.07 -> 1.08

Yi Zhao (5):
      samhain: fix build with new version attr
      scap-security-guide: fix xml parsing error when build remediation files
      scap-security-guide: pass the correct schema file path to openscap-native
      openscap-daemon: add missing runtime dependencies
      samhain-server: add volatile file for systemd

Change-Id: I3d4a4055cb9420e97d3eacf8436d9b048d34733f
Signed-off-by: Andrew Geissler <geissonator@yahoo.com>
diff --git a/meta-security/README b/meta-security/README
index c419d50..f223fee 100644
--- a/meta-security/README
+++ b/meta-security/README
@@ -24,6 +24,11 @@
   revision: HEAD
   prio: default
 
+  URI: git://git.openembedded.org/meta-openembedded/meta-python
+  branch: master
+  revision: HEAD
+  prio: default
+
   URI: git://git.openembedded.org/meta-openembedded/meta-networking
   branch: master
   revision: HEAD
@@ -64,6 +69,8 @@
 
 Now you can just do 'git send-email origin/master' to send all local patches.
 
+For pull requests, please use create-pull-request and send-pull-request. 
+
 Maintainers:    Armin Kuster <akuster808@gmail.com>
 
 
diff --git a/meta-security/classes/dm-verity-img.bbclass b/meta-security/classes/dm-verity-img.bbclass
new file mode 100644
index 0000000..1c0e29b
--- /dev/null
+++ b/meta-security/classes/dm-verity-img.bbclass
@@ -0,0 +1,88 @@
+# SPDX-License-Identifier: MIT
+#
+# Copyright (C) 2020 BayLibre SAS
+# Author: Bartosz Golaszewski <bgolaszewski@baylibre.com>
+#
+# This bbclass allows creating of dm-verity protected partition images. It
+# generates a device image file with dm-verity hash data appended at the end
+# plus the corresponding .env file containing additional information needed
+# to mount the image such as the root hash in the form of ell variables. To
+# assure data integrity, the root hash must be stored in a trusted location
+# or cryptographically signed and verified.
+#
+# Usage:
+#     DM_VERITY_IMAGE = "core-image-full-cmdline" # or other image
+#     DM_VERITY_IMAGE_TYPE = "ext4" # or ext2, ext3 & btrfs
+#     IMAGE_CLASSES += "dm-verity-img"
+#
+# The resulting image can then be used to implement the device mapper block
+# integrity checking on the target device.
+
+# Process the output from veritysetup and generate the corresponding .env
+# file. The output from veritysetup is not very machine-friendly so we need to
+# convert it to some better format. Let's drop the first line (doesn't contain
+# any useful info) and feed the rest to a script.
+process_verity() {
+    local ENV="$OUTPUT.env"
+
+    # Each line contains a key and a value string delimited by ':'. Read the
+    # two parts into separate variables and process them separately. For the
+    # key part: convert the names to upper case and replace spaces with
+    # underscores to create correct shell variable names. For the value part:
+    # just trim all white-spaces.
+    IFS=":"
+    while read KEY VAL; do
+        echo -ne "$KEY" | tr '[:lower:]' '[:upper:]' | sed 's/ /_/g' >> $ENV
+        echo -ne "=" >> $ENV
+        echo "$VAL" | tr -d " \t" >> $ENV
+    done
+
+    # Add partition size
+    echo "DATA_SIZE=$SIZE" >> $ENV
+
+    ln -sf $ENV ${IMAGE_BASENAME}-${MACHINE}.$TYPE.verity.env
+}
+
+verity_setup() {
+    local TYPE=$1
+    local INPUT=${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.$TYPE
+    local SIZE=$(stat --printf="%s" $INPUT)
+    local OUTPUT=$INPUT.verity
+
+    cp -a $INPUT $OUTPUT
+
+    # Let's drop the first line of output (doesn't contain any useful info)
+    # and feed the rest to another function.
+    veritysetup --data-block-size=1024 --hash-offset=$SIZE format $OUTPUT $OUTPUT | tail -n +2 | process_verity
+}
+
+VERITY_TYPES = "ext2.verity ext3.verity ext4.verity btrfs.verity"
+IMAGE_TYPES += "${VERITY_TYPES}"
+CONVERSIONTYPES += "verity"
+CONVERSION_CMD_verity = "verity_setup ${type}"
+CONVERSION_DEPENDS_verity = "cryptsetup-native"
+
+python __anonymous() {
+    verity_image = d.getVar('DM_VERITY_IMAGE')
+    verity_type = d.getVar('DM_VERITY_IMAGE_TYPE')
+    image_fstypes = d.getVar('IMAGE_FSTYPES')
+    pn = d.getVar('PN')
+
+    if verity_image != pn:
+        return # This doesn't concern this image
+
+    if not verity_image or not verity_type:
+        bb.warn('dm-verity-img class inherited but not used')
+        return
+
+    if len(verity_type.split()) is not 1:
+        bb.fatal('DM_VERITY_IMAGE_TYPE must contain exactly one type')
+
+    d.appendVar('IMAGE_FSTYPES', ' %s.verity' % verity_type)
+
+    # If we're using wic: we'll have to use partition images and not the rootfs
+    # source plugin so add the appropriate dependency.
+    if 'wic' in image_fstypes:
+        dep = ' %s:do_image_%s' % (pn, verity_type)
+        d.appendVarFlag('do_image_wic', 'depends', dep)
+}
diff --git a/meta-security/conf/layer.conf b/meta-security/conf/layer.conf
index 3e890e1..2c3bd96 100644
--- a/meta-security/conf/layer.conf
+++ b/meta-security/conf/layer.conf
@@ -9,6 +9,6 @@
 BBFILE_PATTERN_security = "^${LAYERDIR}/"
 BBFILE_PRIORITY_security = "8"
 
-LAYERSERIES_COMPAT_security = "zeus"
+LAYERSERIES_COMPAT_security = "dunfell"
 
 LAYERDEPENDS_security = "core openembedded-layer perl-layer networking-layer meta-python"
diff --git a/meta-security/meta-integrity/conf/layer.conf b/meta-security/meta-integrity/conf/layer.conf
index bfc9c6f..b4edac3 100644
--- a/meta-security/meta-integrity/conf/layer.conf
+++ b/meta-security/meta-integrity/conf/layer.conf
@@ -21,7 +21,7 @@
 # interactive shell is enough.
 OE_TERMINAL_EXPORTS += "INTEGRITY_BASE"
 
-LAYERSERIES_COMPAT_integrity = "zeus"
+LAYERSERIES_COMPAT_integrity = "dunfell"
 # ima-evm-utils depends on keyutils from meta-oe
 LAYERDEPENDS_integrity = "core openembedded-layer"
 
diff --git a/meta-security/meta-security-compliance/conf/layer.conf b/meta-security/meta-security-compliance/conf/layer.conf
index 8572a1f..965c837 100644
--- a/meta-security/meta-security-compliance/conf/layer.conf
+++ b/meta-security/meta-security-compliance/conf/layer.conf
@@ -8,7 +8,7 @@
 BBFILE_PATTERN_scanners-layer = "^${LAYERDIR}/"
 BBFILE_PRIORITY_scanners-layer = "10"
 
-LAYERSERIES_COMPAT_scanners-layer = "zeus"
+LAYERSERIES_COMPAT_scanners-layer = "dunfell"
 
 LAYERDEPENDS_scanners-layer = "core openembedded-layer meta-python"
 
diff --git a/meta-security/meta-security-compliance/recipes-auditors/lynis/lynis_2.7.5.bb b/meta-security/meta-security-compliance/recipes-auditors/lynis/lynis_2.7.5.bb
index 21e4517..245761c 100644
--- a/meta-security/meta-security-compliance/recipes-auditors/lynis/lynis_2.7.5.bb
+++ b/meta-security/meta-security-compliance/recipes-auditors/lynis/lynis_2.7.5.bb
@@ -38,4 +38,4 @@
 FILES_${PN} += "${sysconfdir}/developer.prf ${sysconfdir}/default.prf"
 FILES_${PN}-doc += "lynis.8 FAQ README CHANGELOG.md CONTRIBUTIONS.md CONTRIBUTORS.md" 
 
-RDEPENDS_${PN} += "procps"
+RDEPENDS_${PN} += "procps findutils"
diff --git a/meta-security/meta-security-compliance/recipes-openscap/openscap-daemon/openscap-daemon_0.1.10.bb b/meta-security/meta-security-compliance/recipes-openscap/openscap-daemon/openscap-daemon_0.1.10.bb
index ca6e030..a775021 100644
--- a/meta-security/meta-security-compliance/recipes-openscap/openscap-daemon/openscap-daemon_0.1.10.bb
+++ b/meta-security/meta-security-compliance/recipes-openscap/openscap-daemon/openscap-daemon_0.1.10.bb
@@ -17,4 +17,7 @@
 
 S = "${WORKDIR}/git"
 
-RDEPENDS_${PN} = "python"
+RDEPENDS_${PN} = "openscap scap-security-guide \
+                  python3-core python3-dbus \
+                  python3-pygobject \
+                 "
diff --git a/meta-security/meta-security-compliance/recipes-openscap/scap-security-guide/files/0001-Fix-XML-parsing-of-the-remediation-functions-file.patch b/meta-security/meta-security-compliance/recipes-openscap/scap-security-guide/files/0001-Fix-XML-parsing-of-the-remediation-functions-file.patch
new file mode 100644
index 0000000..c0b93e4
--- /dev/null
+++ b/meta-security/meta-security-compliance/recipes-openscap/scap-security-guide/files/0001-Fix-XML-parsing-of-the-remediation-functions-file.patch
@@ -0,0 +1,39 @@
+From 174293162e5840684d967e36840fc1f9f57c90be Mon Sep 17 00:00:00 2001
+From: =?UTF-8?q?Mat=C4=9Bj=20T=C3=BD=C4=8D?= <matyc@redhat.com>
+Date: Thu, 5 Dec 2019 15:02:05 +0100
+Subject: [PATCH] Fix XML "parsing" of the remediation functions file.
+
+A proper fix is not worth the effort, as we aim to kill shared Bash remediation
+with Jinja2 macros.
+
+Upstream-Status: Backport
+[https://github.com/ComplianceAsCode/content/commit/174293162e5840684d967e36840fc1f9f57c90be]
+
+Signed-off-by: Yi Zhao <yi.zhao@windriver.com>
+---
+ ssg/build_remediations.py | 8 ++++----
+ 1 file changed, 4 insertions(+), 4 deletions(-)
+
+diff --git a/ssg/build_remediations.py b/ssg/build_remediations.py
+index 7da807bd6..13e90f732 100644
+--- a/ssg/build_remediations.py
++++ b/ssg/build_remediations.py
+@@ -56,11 +56,11 @@ def get_available_functions(build_dir):
+     remediation_functions = []
+     with codecs.open(xmlfilepath, "r", encoding="utf-8") as xmlfile:
+         filestring = xmlfile.read()
+-        # This regex looks implementation dependent but we can rely on
+-        # ElementTree sorting XML attrs alphabetically. Hidden is guaranteed
+-        # to be the first attr and ID is guaranteed to be second.
++        # This regex looks implementation dependent but we can rely on the element attributes
++        # being present on one line.
++        # We can't rely on ElementTree sorting XML attrs in any way since Python 3.7.
+         remediation_functions = re.findall(
+-            r'<Value hidden=\"true\" id=\"function_(\S+)\"',
++            r'<Value.*id=\"function_(\S+)\"',
+             filestring, re.DOTALL
+         )
+ 
+-- 
+2.17.1
+
diff --git a/meta-security/meta-security-compliance/recipes-openscap/scap-security-guide/files/0002-Fixed-the-broken-fix-when-greedy-regex-ate-the-whole.patch b/meta-security/meta-security-compliance/recipes-openscap/scap-security-guide/files/0002-Fixed-the-broken-fix-when-greedy-regex-ate-the-whole.patch
new file mode 100644
index 0000000..f0c9909
--- /dev/null
+++ b/meta-security/meta-security-compliance/recipes-openscap/scap-security-guide/files/0002-Fixed-the-broken-fix-when-greedy-regex-ate-the-whole.patch
@@ -0,0 +1,35 @@
+From 28a35d63a0cc6b7beb51c77d93bb30778e6960cd Mon Sep 17 00:00:00 2001
+From: =?UTF-8?q?Mat=C4=9Bj=20T=C3=BD=C4=8D?= <matyc@redhat.com>
+Date: Mon, 9 Dec 2019 13:41:47 +0100
+Subject: [PATCH] Fixed the broken fix, when greedy regex ate the whole file.
+
+We want to match attributes in an XML element, not in the whole file.
+
+Upstream-Status: Backport
+[https://github.com/ComplianceAsCode/content/commit/28a35d63a0cc6b7beb51c77d93bb30778e6960cd]
+
+Signed-off-by: Yi Zhao <yi.zhao@windriver.com>
+---
+ ssg/build_remediations.py | 4 ++--
+ 1 file changed, 2 insertions(+), 2 deletions(-)
+
+diff --git a/ssg/build_remediations.py b/ssg/build_remediations.py
+index 13e90f732..edf31c0cf 100644
+--- a/ssg/build_remediations.py
++++ b/ssg/build_remediations.py
+@@ -57,10 +57,10 @@ def get_available_functions(build_dir):
+     with codecs.open(xmlfilepath, "r", encoding="utf-8") as xmlfile:
+         filestring = xmlfile.read()
+         # This regex looks implementation dependent but we can rely on the element attributes
+-        # being present on one line.
++        # being present. Beware, DOTALL means we go through the whole file at once.
+         # We can't rely on ElementTree sorting XML attrs in any way since Python 3.7.
+         remediation_functions = re.findall(
+-            r'<Value.*id=\"function_(\S+)\"',
++            r'<Value[^>]+id=\"function_(\S+)\"',
+             filestring, re.DOTALL
+         )
+ 
+-- 
+2.17.1
+
diff --git a/meta-security/meta-security-compliance/recipes-openscap/scap-security-guide/scap-security-guide.inc b/meta-security/meta-security-compliance/recipes-openscap/scap-security-guide/scap-security-guide.inc
index 3212310..66c2623 100644
--- a/meta-security/meta-security-compliance/recipes-openscap/scap-security-guide/scap-security-guide.inc
+++ b/meta-security/meta-security-compliance/recipes-openscap/scap-security-guide/scap-security-guide.inc
@@ -13,6 +13,9 @@
 inherit cmake pkgconfig python3native
 
 STAGING_OSCAP_BUILDDIR = "${TMPDIR}/work-shared/openscap/oscap-build-artifacts"
+export OSCAP_CPE_PATH="${STAGING_OSCAP_BUILDDIR}${datadir_native}/openscap/cpe"
+export OSCAP_SCHEMA_PATH="${STAGING_OSCAP_BUILDDIR}${datadir_native}/openscap/schemas"
+export OSCAP_XSLT_PATH="${STAGING_OSCAP_BUILDDIR}${datadir_native}/openscap/xsl"
 
 OECMAKE_GENERATOR = "Unix Makefiles"
 
diff --git a/meta-security/meta-security-compliance/recipes-openscap/scap-security-guide/scap-security-guide_git.bb b/meta-security/meta-security-compliance/recipes-openscap/scap-security-guide/scap-security-guide_git.bb
index d9238c0..f35d769 100644
--- a/meta-security/meta-security-compliance/recipes-openscap/scap-security-guide/scap-security-guide_git.bb
+++ b/meta-security/meta-security-compliance/recipes-openscap/scap-security-guide/scap-security-guide_git.bb
@@ -1,7 +1,10 @@
 SUMARRY = "SCAP content for various platforms, OE changes"
 
 SRCREV = "5fdfdcb2e95afbd86ace555beca5d20cbf1043ed"
-SRC_URI = "git://github.com/akuster/scap-security-guide.git;branch=oe-0.1.44;"
+SRC_URI = "git://github.com/akuster/scap-security-guide.git;branch=oe-0.1.44; \
+           file://0001-Fix-XML-parsing-of-the-remediation-functions-file.patch \
+           file://0002-Fixed-the-broken-fix-when-greedy-regex-ate-the-whole.patch \
+          "
 PV = "0.1.44+git${SRCPV}"
 
 require scap-security-guide.inc
diff --git a/meta-security/meta-security-isafw/.gitignore b/meta-security/meta-security-isafw/.gitignore
new file mode 100644
index 0000000..2f836aa
--- /dev/null
+++ b/meta-security/meta-security-isafw/.gitignore
@@ -0,0 +1,2 @@
+*~
+*.pyc
diff --git a/meta-security/meta-security-isafw/COPYING.MIT b/meta-security/meta-security-isafw/COPYING.MIT
new file mode 100644
index 0000000..fb950dc
--- /dev/null
+++ b/meta-security/meta-security-isafw/COPYING.MIT
@@ -0,0 +1,17 @@
+Permission is hereby granted, free of charge, to any person obtaining a copy 
+of this software and associated documentation files (the "Software"), to deal 
+in the Software without restriction, including without limitation the rights 
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 
+copies of the Software, and to permit persons to whom the Software is 
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in 
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 
+THE SOFTWARE.
diff --git a/meta-security/meta-security-isafw/README.md b/meta-security/meta-security-isafw/README.md
new file mode 100644
index 0000000..16041cb
--- /dev/null
+++ b/meta-security/meta-security-isafw/README.md
@@ -0,0 +1,92 @@
+**meta-security-isafw** is an OE layer that allows enabling the Image
+Security Analysis Framework (isafw) for your image builds. 
+
+The primary purpose of isafw is to provide an extensible 
+framework for analysing different security aspects of images 
+during the build process.
+
+The isafw project itself can be found at 
+    https://github.com/01org/isafw
+
+The framework supports a number of callbacks (such as 
+process_package(), process_filesystem(), and etc.) that are invoked 
+by the bitbake during different stages of package and image build. 
+These callbacks are then forwarded for processing to the avaliable 
+ISA FW plugins that have registered for these callbacks. 
+Plugins can do their own processing on each stage of the build 
+process and produce security reports. 
+
+Dependencies
+------------
+
+The **meta-security-isafw** layer depends on the Open Embeeded
+core layer:
+
+    git://git.openembedded.org/openembedded-core
+
+
+Usage
+-----
+
+In order to enable the isafw during the image build, please add 
+the following line to your build/conf/local.conf file:
+
+```python
+INHERIT += "isafw"
+```
+
+Next you need to update your build/conf/bblayers.conf file with the
+location of meta-security-isafw layer on your filesystem along with
+any other layers needed. e.g.:
+
+```python
+BBLAYERS ?= " \
+  /OE/oe-core/meta \
+  /OE/meta-security/meta-security-isafw \
+  "
+```
+ 
+Also, some isafw plugins require network connection, so in case of a
+proxy setup please make sure to export http_proxy variable into your 
+environment.
+
+In order to produce image reports, you can execute image build 
+normally. For example:
+
+```shell
+bitbake core-image-minimal
+```
+
+If you are only interested to produce a report based on packages 
+and without building an image, please use:
+
+```shell
+bitbake -c analyse_sources_all core-image-minimal
+```
+
+
+Logs
+----
+
+All isafw plugins by default create their logs under the 
+${LOG_DIR}/isafw-report/ directory, where ${LOG_DIR} is a bitbake 
+default location for log files. If you wish to change this location, 
+please define ISAFW_REPORTDIR variable in your local.conf file. 
+
+Patches
+-------
+end pull requests, patches, comments or questions to yocto@lists.yoctoproject.org
+
+When sending single patches, please using something like:
+'git send-email -1 --to yocto@lists.yoctoproject.org --subject-prefix=meta-security-isafw][PATCH'
+
+These values can be set as defaults for this repository:
+
+$ git config sendemail.to yocto@lists.yoctoproject.org
+$ git config format.subjectPrefix meta-security-isafw][PATCH
+
+Now you can just do 'git send-email origin/master' to send all local patches.
+
+For pull requests, please use create-pull-request and send-pull-request.
+
+Maintainers:    Armin Kuster <akuster808@gmail.com>
diff --git a/meta-security/meta-security-isafw/conf/layer.conf b/meta-security/meta-security-isafw/conf/layer.conf
new file mode 100644
index 0000000..63f990a
--- /dev/null
+++ b/meta-security/meta-security-isafw/conf/layer.conf
@@ -0,0 +1,17 @@
+# We have a conf and classes directory, add to BBPATH
+BBPATH .= ":${LAYERDIR}"
+
+# We have recipes-* directories, add to BBFILES
+BBFILES += "${LAYERDIR}/recipes-*/*/*.bb ${LAYERDIR}/recipes-*/*/*.bbappend"
+
+BBFILE_COLLECTIONS += "security-isafw"
+BBFILE_PATTERN_security-isafw = "^${LAYERDIR}/"
+BBFILE_PRIORITY_security-isafw = "6"
+
+# This should only be incremented on significant changes that will
+# cause compatibility issues with other layers
+LAYERVERSION_security-isafw = "1"
+
+LAYERDEPENDS_security-isafw = "core"
+
+LAYERSERIES_COMPAT_security-isafw = "dunfell"
diff --git a/meta-security/meta-security-isafw/lib/isafw/__init__.py b/meta-security/meta-security-isafw/lib/isafw/__init__.py
new file mode 100644
index 0000000..50527fb
--- /dev/null
+++ b/meta-security/meta-security-isafw/lib/isafw/__init__.py
@@ -0,0 +1,40 @@
+#
+# __init__.py - part of ISA FW
+#
+# Copyright (c) 2015 - 2016, Intel Corporation
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+#    * Redistributions of source code must retain the above copyright notice,
+#      this list of conditions and the following disclaimer.
+#    * Redistributions in binary form must reproduce the above copyright
+#      notice, this list of conditions and the following disclaimer in the
+#      documentation and/or other materials provided with the distribution.
+#    * Neither the name of Intel Corporation nor the names of its contributors
+#      may be used to endorse or promote products derived from this software
+#      without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""isafw
+
+Current Contents:
+
+* isafw.py - main class
+* plugins - ISA plugins
+* plugins/configs - configuration data for the plugins
+"""
+
+__all__ = [
+    'isafw',
+]
diff --git a/meta-security/meta-security-isafw/lib/isafw/isafw.py b/meta-security/meta-security-isafw/lib/isafw/isafw.py
new file mode 100644
index 0000000..a1a76b8
--- /dev/null
+++ b/meta-security/meta-security-isafw/lib/isafw/isafw.py
@@ -0,0 +1,158 @@
+#
+# isafw.py - Main classes for ISA FW
+#
+# Copyright (c) 2015 - 2016, Intel Corporation
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+#    * Redistributions of source code must retain the above copyright notice,
+#      this list of conditions and the following disclaimer.
+#    * Redistributions in binary form must reproduce the above copyright
+#      notice, this list of conditions and the following disclaimer in the
+#      documentation and/or other materials provided with the distribution.
+#    * Neither the name of Intel Corporation nor the names of its contributors
+#      may be used to endorse or promote products derived from this software
+#      without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+from __future__ import absolute_import, print_function
+
+import sys
+import traceback
+try:
+    # absolute import
+    import isafw.isaplugins as isaplugins
+except ImportError:
+    # relative import when installing as separate modules
+    import isaplugins
+try:
+    from bb import error
+except ImportError:
+    error = print
+
+__all__ = [
+    'ISA_package',
+    'ISA_pkg_list',
+    'ISA_kernel',
+    'ISA_filesystem',
+    'ISA_config',
+    'ISA',
+]
+
+# classes for representing objects for ISA plugins
+
+# source package
+
+
+class ISA_package:
+    # pkg name                            (mandatory argument)
+    name = ""
+    # full version                        (mandatory argument)
+    version = ""
+    licenses = []                 # list of licences for all subpackages
+    aliases = []                  # list of alias names for packages if exist
+    source_files = []             # list of strings of source files
+    patch_files = []              # list of patch files to be applied
+    path_to_sources = ""          # path to the source files
+
+# package list
+
+
+class ISA_pkg_list:
+    # image name                            (mandatory argument)
+    img_name = ""
+    # path to the pkg list file             (mandatory argument)
+    path_to_list = ""
+
+# kernel
+
+
+class ISA_kernel:
+    # image name                          (mandatory argument)
+    img_name = ""
+    # path to the kernel config file      (mandatory argument)
+    path_to_config = ""
+
+# filesystem
+
+
+class ISA_filesystem:
+    # image name                          (mandatory argument)
+    img_name = ""
+    type = ""                     # filesystem type
+    # path to the fs location             (mandatory argument)
+    path_to_fs = ""
+
+# configuration of ISAFW
+# if both whitelist and blacklist is empty, all avaliable plugins will be used
+# if whitelist has entries, then only whitelisted plugins will be used from a set of avaliable plugins
+# if blacklist has entries, then the specified plugins won't be used even
+# if avaliable and even if specified in whitelist
+
+
+class ISA_config:
+    plugin_whitelist = ""         # comma separated list of plugins to whitelist
+    plugin_blacklist = ""         # comma separated list of plugins to blacklist
+    cacert = None                 # If set, a CA certificate file that replaces the system default one
+    reportdir = ""                # location of produced reports
+    logdir = ""                   # location of produced logs
+    timestamp = ""                # timestamp of the build provided by build system
+    full_reports = False          # produce full reports for plugins, False by default
+    machine = ""                  # name of machine build is produced for
+    la_plugin_image_whitelist = ""# whitelist of images for violating license checks
+    la_plugin_image_blacklist = ""# blacklist of images for violating license checks
+    arch = ""                     # target architecture
+
+class ISA:
+    def call_plugins(self, methodname, *parameters, **keywords):
+        for name in isaplugins.__all__:
+            plugin = getattr(isaplugins, name)
+            method = getattr(plugin, methodname, None)
+            if not method:
+                # Not having init() is an error, everything else is optional.
+                if methodname == "init":
+                    error("No init() defined for plugin %s.\n"
+                          "Skipping this plugin." %
+                          (methodname, plugin.getPluginName()))
+                continue
+            if self.ISA_config.plugin_whitelist and plugin.getPluginName() not in self.ISA_config.plugin_whitelist:
+                continue
+            if self.ISA_config.plugin_blacklist and plugin.getPluginName() in self.ISA_config.plugin_blacklist:
+                continue
+            try:
+                method(*parameters, **keywords)
+            except:
+                error("Exception in plugin %s %s():\n%s" %
+                      (plugin.getPluginName(),
+                       methodname,
+                       traceback.format_exc()))
+
+    def __init__(self, ISA_config):
+        self.ISA_config = ISA_config
+        self.call_plugins("init", ISA_config)
+
+    def process_package(self, ISA_package):
+        self.call_plugins("process_package", ISA_package)
+
+    def process_pkg_list(self, ISA_pkg_list):
+        self.call_plugins("process_pkg_list", ISA_pkg_list)
+
+    def process_kernel(self, ISA_kernel):
+        self.call_plugins("process_kernel", ISA_kernel)
+
+    def process_filesystem(self, ISA_filesystem):
+        self.call_plugins("process_filesystem", ISA_filesystem)
+
+    def process_report(self):
+        self.call_plugins("process_report")
diff --git a/meta-security/meta-security-isafw/lib/isafw/isaplugins/ISA_cfa_plugin.py b/meta-security/meta-security-isafw/lib/isafw/isaplugins/ISA_cfa_plugin.py
new file mode 100644
index 0000000..daecba1
--- /dev/null
+++ b/meta-security/meta-security-isafw/lib/isafw/isaplugins/ISA_cfa_plugin.py
@@ -0,0 +1,392 @@
+#
+# ISA_cfa_plugin.py - Compile flag analyzer plugin, part of ISA FW
+# Main functionality is based on build_comp script from Clear linux project
+#
+# Copyright (c) 2015 - 2016, Intel Corporation
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+#    * Redistributions of source code must retain the above copyright notice,
+#      this list of conditions and the following disclaimer.
+#    * Redistributions in binary form must reproduce the above copyright
+#      notice, this list of conditions and the following disclaimer in the
+#      documentation and/or other materials provided with the distribution.
+#    * Neither the name of Intel Corporation nor the names of its contributors
+#      may be used to endorse or promote products derived from this software
+#      without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+import subprocess
+import os
+import sys
+import re
+import copy
+try:
+    from lxml import etree
+except ImportError:
+    try:
+        import xml.etree.cElementTree as etree
+    except ImportError:
+        import xml.etree.ElementTree as etree
+
+
+CFChecker = None
+
+
+class ISA_CFChecker():
+    initialized = False
+    no_relro = []
+    partial_relro = []
+    no_canary = []
+    no_pie = []
+    execstack = []
+    execstack_not_defined = []
+    nodrop_groups = []
+    no_mpx = []
+
+    def __init__(self, ISA_config):
+        self.logfile = ISA_config.logdir + "/isafw_cfalog"
+        self.full_report_name = ISA_config.reportdir + "/cfa_full_report_" + \
+            ISA_config.machine + "_" + ISA_config.timestamp
+        self.problems_report_name = ISA_config.reportdir + \
+            "/cfa_problems_report_" + ISA_config.machine + "_" + ISA_config.timestamp
+        self.full_reports = ISA_config.full_reports
+        self.ISA_filesystem = ""
+        # check that checksec and other tools are installed
+        tools_errors = _check_tools()
+        if tools_errors:
+            with open(self.logfile, 'w') as flog:
+                flog.write(tools_errors)
+                return
+        self.initialized = True
+        with open(self.logfile, 'w') as flog:
+            flog.write("\nPlugin ISA_CFChecker initialized!\n")
+        return
+
+    def process_filesystem(self, ISA_filesystem):
+        self.ISA_filesystem = ISA_filesystem
+        fs_path = self.ISA_filesystem.path_to_fs
+        img_name = self.ISA_filesystem.img_name
+        if (self.initialized):
+            if (img_name and fs_path):
+                with open(self.logfile, 'a') as flog:
+                    flog.write("\n\nFilesystem path is: " + fs_path)
+                if self.full_reports:
+                    with open(self.full_report_name + "_" + img_name, 'w') as ffull_report:
+                        ffull_report.write(
+                            "Security-relevant flags for executables for image: " + img_name + '\n')
+                        ffull_report.write("With rootfs location at " + fs_path + "\n\n")
+                files = self.find_files(fs_path)
+                import multiprocessing
+                pool = multiprocessing.Pool()
+                results = pool.imap(process_file_wrapper, files)
+                pool.close()
+                pool.join()
+                self.process_results(results)
+            else:
+                with open(self.logfile, 'a') as flog:
+                    flog.write(
+                        "Mandatory arguments such as image name and path to the filesystem are not provided!\n")
+                    flog.write("Not performing the call.\n")
+        else:
+            with open(self.logfile, 'a') as flog:
+                flog.write("Plugin hasn't initialized! Not performing the call.\n")
+
+    def process_results(self, results):
+        fs_path = self.ISA_filesystem.path_to_fs
+        for result in results:
+            if not result:
+                with open(self.logfile, 'a') as flog:
+                    flog.write("\nError in returned result")
+                continue
+            with open(self.logfile, 'a') as flog:
+                flog.write("\n\nFor file: " + str(result[0]) + "\nlog is: " + str(result[5]))
+            if result[1]:
+                with open(self.logfile, 'a') as flog:
+                    flog.write("\n\nsec_field: " + str(result[1]))
+                if "No RELRO" in result[1]:
+                    self.no_relro.append(result[0].replace(fs_path, ""))
+                elif "Partial RELRO" in result[1]:
+                    self.partial_relro.append(result[0].replace(fs_path, ""))
+                if "No canary found" in result[1]:
+                    self.no_canary.append(result[0].replace(fs_path, ""))
+                if "No PIE" in result[1]:
+                    self.no_pie.append(result[0].replace(fs_path, ""))
+            if result[2]:
+                if result[2] == "execstack":
+                    self.execstack.append(result[0].replace(fs_path, ""))
+                elif result[2] == "not_defined":
+                    self.execstack_not_defined.append(result[0].replace(fs_path, ""))
+            if result[3] and (result[3] == True):
+                self.nodrop_groups.append(result[0].replace(fs_path, ""))
+            if result[4] and (result[4] == True):
+                self.no_mpx.append(result[0].replace(fs_path, ""))
+            self.write_full_report(result)
+        self.write_report()
+        self.write_report_xml()
+
+    def write_full_report(self, result):
+        if not self.full_reports:
+            return
+        fs_path = self.ISA_filesystem.path_to_fs
+        img_name = self.ISA_filesystem.img_name
+        with open(self.full_report_name + "_" + img_name, 'a') as ffull_report:
+            ffull_report.write('\nFile: ' + result[0].replace(fs_path, ""))
+            ffull_report.write('\nsecurity flags: ' + str(result[1]))
+            ffull_report.write('\nexecstack: ' + str(result[2]))
+            ffull_report.write('\nnodrop_groups: ' + str(result[3]))
+            ffull_report.write('\nno mpx: ' + str(result[4]))
+            ffull_report.write('\n')
+
+    def write_report(self):
+        fs_path = self.ISA_filesystem.path_to_fs
+        img_name = self.ISA_filesystem.img_name
+        with open(self.problems_report_name + "_" + img_name, 'w') as fproblems_report:
+            fproblems_report.write("Report for image: " + img_name + '\n')
+            fproblems_report.write("With rootfs location at " + fs_path + "\n\n")
+            fproblems_report.write("Relocation Read-Only\n")
+            fproblems_report.write("More information about RELRO and how to enable it:")
+            fproblems_report.write(
+                " http://tk-blog.blogspot.de/2009/02/relro-not-so-well-known-memory.html\n")
+            fproblems_report.write("Files with no RELRO:\n")
+            for item in self.no_relro:
+                fproblems_report.write(item + '\n')
+            fproblems_report.write("Files with partial RELRO:\n")
+            for item in self.partial_relro:
+                fproblems_report.write(item + '\n')
+            fproblems_report.write("\n\nStack protection\n")
+            fproblems_report.write(
+                "More information about canary stack protection and how to enable it:")
+            fproblems_report.write("https://lwn.net/Articles/584225/ \n")
+            fproblems_report.write("Files with no canary:\n")
+            for item in self.no_canary:
+                fproblems_report.write(item + '\n')
+            fproblems_report.write("\n\nPosition Independent Executable\n")
+            fproblems_report.write("More information about PIE protection and how to enable it:")
+            fproblems_report.write(
+                "https://securityblog.redhat.com/2012/11/28/position-independent-executables-pie/\n")
+            fproblems_report.write("Files with no PIE:\n")
+            for item in self.no_pie:
+                fproblems_report.write(item + '\n')
+            fproblems_report.write("\n\nNon-executable stack\n")
+            fproblems_report.write("Files with executable stack enabled:\n")
+            for item in self.execstack:
+                fproblems_report.write(item + '\n')
+            fproblems_report.write("\n\nFiles with no ability to fetch executable stack status:\n")
+            for item in self.execstack_not_defined:
+                fproblems_report.write(item + '\n')
+            fproblems_report.write("\n\nGrop initialization:\n")
+            fproblems_report.write(
+                "If using setuid/setgid calls in code, one must call initgroups or setgroups\n")
+            fproblems_report.write(
+                "Files that don't initialize groups while using setuid/setgid:\n")
+            for item in self.nodrop_groups:
+                fproblems_report.write(item + '\n')
+            fproblems_report.write("\n\nMemory Protection Extensions\n")
+            fproblems_report.write("More information about MPX protection and how to enable it:")
+            fproblems_report.write(
+                "https://software.intel.com/sites/default/files/managed/9d/f6/Intel_MPX_EnablingGuide.pdf\n")
+            fproblems_report.write("Files that don't have MPX protection enabled:\n")
+            for item in self.no_mpx:
+                fproblems_report.write(item + '\n')
+
+    def write_report_xml(self):
+        numTests = len(self.no_relro) + len(self.partial_relro) + len(self.no_canary) + len(self.no_pie) + \
+            len(self.execstack) + len(self.execstack_not_defined) + \
+            len(self.nodrop_groups) + len(self.no_mpx)
+        root = etree.Element('testsuite', name='ISA_CFChecker', tests=str(numTests))
+        if self.no_relro:
+            for item in self.no_relro:
+                tcase1 = etree.SubElement(
+                    root, 'testcase', classname='files_with_no_RELRO', name=item)
+                etree.SubElement(tcase1, 'failure', message=item, type='violation')
+        if self.partial_relro:
+            for item in self.partial_relro:
+                tcase1 = etree.SubElement(
+                    root, 'testcase', classname='files_with_partial_RELRO', name=item)
+                etree.SubElement(tcase1, 'failure', message=item, type='violation')
+        if self.no_canary:
+            for item in self.no_canary:
+                tcase2 = etree.SubElement(
+                    root, 'testcase', classname='files_with_no_canary', name=item)
+                etree.SubElement(tcase2, 'failure', message=item, type='violation')
+        if self.no_pie:
+            for item in self.no_pie:
+                tcase3 = etree.SubElement(
+                    root, 'testcase', classname='files_with_no_PIE', name=item)
+                etree.SubElement(tcase3, 'failure', message=item, type='violation')
+        if self.execstack:
+            for item in self.execstack:
+                tcase5 = etree.SubElement(
+                    root, 'testcase', classname='files_with_execstack', name=item)
+                etree.SubElement(tcase5, 'failure', message=item, type='violation')
+        if self.execstack_not_defined:
+            for item in self.execstack_not_defined:
+                tcase6 = etree.SubElement(
+                    root, 'testcase', classname='files_with_execstack_not_defined', name=item)
+                etree.SubElement(tcase6, 'failure', message=item, type='violation')
+        if self.nodrop_groups:
+            for item in self.nodrop_groups:
+                tcase7 = etree.SubElement(
+                    root, 'testcase', classname='files_with_nodrop_groups', name=item)
+                etree.SubElement(tcase7, 'failure', message=item, type='violation')
+        if self.no_mpx:
+            for item in self.no_mpx:
+                tcase8 = etree.SubElement(
+                    root, 'testcase', classname='files_with_no_mpx', name=item)
+                etree.SubElement(tcase8, 'failure', message=item, type='violation')
+        tree = etree.ElementTree(root)
+        output = self.problems_report_name + "_" + self.ISA_filesystem.img_name + '.xml'
+        try:
+            tree.write(output, encoding='UTF-8', pretty_print=True, xml_declaration=True)
+        except TypeError:
+            tree.write(output, encoding='UTF-8', xml_declaration=True)
+
+    def find_files(self, init_path):
+        list_of_files = []
+        for (dirpath, dirnames, filenames) in os.walk(init_path):
+            for f in filenames:
+                list_of_files.append(str(dirpath + "/" + f)[:])
+        return list_of_files
+
+
+def _check_tools():
+
+    def _is_in_path(executable):
+        "Check for presence of executable in PATH"
+        for path in os.environ["PATH"].split(os.pathsep):
+            path = path.strip('"')
+            if (os.path.isfile(os.path.join(path, executable)) and
+                    os.access(os.path.join(path, executable), os.X_OK)):
+                return True
+        return False
+
+    tools = {
+        "checksec.sh": "Please install checksec from http://www.trapkit.de/tools/checksec.html\n",
+        "execstack": "Please install execstack from prelink package\n",
+        "readelf": "Please install binutils\n",
+        "objdump": "Please install binutils\n",
+    }
+    output = ""
+    for tool in tools:
+        if not _is_in_path(tool):
+            output += tools[tool]
+    return output
+
+
+def get_info(tool, args, file_name):
+    env = copy.deepcopy(os.environ)
+    env['PSEUDO_UNLOAD'] = "1"
+    cmd = [tool, args, file_name]
+    with open(os.devnull, 'wb') as DEVNULL:
+        try:
+            result = subprocess.check_output(cmd, stderr=DEVNULL, env=env).decode('utf-8')
+        except:
+            return ""
+        else:
+            return result
+
+def get_security_flags(file_name):
+    env = copy.deepcopy(os.environ)
+    env['PSEUDO_UNLOAD'] = "1"
+    cmd = ['checksec.sh', '--file', file_name]
+    try:
+        result = subprocess.check_output(cmd, env=env).decode('utf-8').splitlines()[1]
+    except:
+        return "Not able to fetch flags"
+    else:
+        # remove ansi escape color sequences
+        result = re.sub(r'\x1b[^m]*m', '', result)
+        return re.split(r' {2,}', result)[:-1]
+
+
+def process_file(file):
+    log = "File from map " + file
+    fun_results = [file, [], "", False, False, log]
+    if not os.path.isfile(file):
+        return fun_results
+    env = copy.deepcopy(os.environ)
+    env['PSEUDO_UNLOAD'] = "1"
+    # getting file type
+    cmd = ['file', '--mime-type', file]
+    try:
+        result = subprocess.check_output(cmd, env=env).decode('utf-8')
+    except:
+        fun_results[-1] += "\nNot able to decode mime type"
+        return fun_results
+    file_type = result.split()[-1]
+    # looking for links
+    if "symlink" in file_type:
+        file = os.path.realpath(file)
+        cmd = ['file', '--mime-type', file]
+        try:
+            result = subprocess.check_output(cmd, env=env).decode('utf-8')
+        except:
+            fun_results[-1] += "\nNot able to decode mime type"
+            return fun_results
+        file_type = result.split()[-1]
+    # checking security flags if applies
+    if "application" not in file_type:
+        return fun_results
+    fun_results[-1] += "\nFile type: " + file_type
+    if (("octet-stream" in file_type) or ("dosexec" in file_type) or
+            ("archive" in file_type) or ("xml" in file_type) or
+            ("gzip" in file_type) or ("postscript" in file_type) or
+            ("pdf" in file_type)):
+        return fun_results
+    fun_results[1] = get_security_flags(file)
+    tmp = get_info("execstack", '-q', file)
+    if tmp.startswith("X "):
+        fun_results[2] = "execstack"
+    elif tmp.startswith("? "):
+        fun_results[2] = "not_defined"
+    tmp = get_info("readelf", '-s', file)
+    if ("setgid@GLIBC" in tmp) or ("setegid@GLIBC" in tmp) or ("setresgid@GLIBC" in tmp):
+        if ("setuid@GLIBC" in tmp) or ("seteuid@GLIBC" in tmp) or ("setresuid@GLIBC" in tmp):
+            if ("setgroups@GLIBC" not in tmp) and ("initgroups@GLIBC" not in tmp):
+                fun_results[3] = True
+    tmp = get_info("objdump", '-d', file)
+    if ("bndcu" not in tmp) and ("bndcl" not in tmp) and ("bndmov" not in tmp):
+        fun_results[4] = True
+    return fun_results
+
+def process_file_wrapper(file):
+    # Ensures that exceptions get logged with the original backtrace.
+    # Without this, they appear with a backtrace rooted in
+    # the code which transfers back the result to process_results().
+    try:
+        return process_file(file)
+    except:
+        from isafw import isafw
+        import traceback
+        isafw.error('Internal error:\n%s' % traceback.format_exc())
+        raise
+
+# ======== supported callbacks from ISA ============ #
+
+
+def init(ISA_config):
+    global CFChecker
+    CFChecker = ISA_CFChecker(ISA_config)
+
+
+def getPluginName():
+    return "ISA_CFChecker"
+
+
+def process_filesystem(ISA_filesystem):
+    global CFChecker
+    return CFChecker.process_filesystem(ISA_filesystem)
+
+# =================================================== #
diff --git a/meta-security/meta-security-isafw/lib/isafw/isaplugins/ISA_cve_plugin.py b/meta-security/meta-security-isafw/lib/isafw/isaplugins/ISA_cve_plugin.py
new file mode 100644
index 0000000..268aa45
--- /dev/null
+++ b/meta-security/meta-security-isafw/lib/isafw/isaplugins/ISA_cve_plugin.py
@@ -0,0 +1,217 @@
+#
+# ISA_cve_plugin.py - CVE checker plugin, part of ISA FW
+#
+# Copyright (c) 2015 - 2016, Intel Corporation
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+#    * Redistributions of source code must retain the above copyright notice,
+#      this list of conditions and the following disclaimer.
+#    * Redistributions in binary form must reproduce the above copyright
+#      notice, this list of conditions and the following disclaimer in the
+#      documentation and/or other materials provided with the distribution.
+#    * Neither the name of Intel Corporation nor the names of its contributors
+#      may be used to endorse or promote products derived from this software
+#      without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+import subprocess
+import os, sys
+import re
+
+CVEChecker = None
+pkglist = "/cve_check_tool_pkglist"
+
+
+class ISA_CVEChecker:
+    initialized = False
+
+    def __init__(self, ISA_config):
+        self.cacert = ISA_config.cacert
+        self.reportdir = ISA_config.reportdir
+        self.timestamp = ISA_config.timestamp
+        self.logfile = ISA_config.logdir + "/isafw_cvelog"
+        self.report_name = ISA_config.reportdir + "/cve_report_" + \
+            ISA_config.machine + "_" + ISA_config.timestamp
+        self.initialized = True
+        with open(self.logfile, 'a') as flog:
+            flog.write("\nPlugin ISA_CVEChecker initialized!\n")
+        output = ""
+        # check that cve-check-tool is installed
+
+    def process_package(self, ISA_pkg):
+        if (self.initialized):
+            if (ISA_pkg.name and ISA_pkg.version and ISA_pkg.patch_files):
+                alias_pkgs_faux = []
+                # need to compose faux format line for cve-check-tool
+                cve_patch_info = self.process_patch_list(ISA_pkg.patch_files)
+                pkgline_faux = ISA_pkg.name + "," + ISA_pkg.version + "," + cve_patch_info + ",\n"
+                if ISA_pkg.aliases:
+                    for a in ISA_pkg.aliases:
+                        alias_pkgs_faux.append(
+                            a + "," + ISA_pkg.version + "," + cve_patch_info + ",\n")
+                pkglist_faux = pkglist + "_" + self.timestamp + ".faux"
+                with open(self.reportdir + pkglist_faux, 'a') as fauxfile:
+                    fauxfile.write(pkgline_faux)
+                    for a in alias_pkgs_faux:
+                        fauxfile.write(a)
+
+                with open(self.logfile, 'a') as flog:
+                    flog.write("\npkg info: " + pkgline_faux)
+            else:
+                self.initialized = False
+                with open(self.logfile, 'a') as flog:
+                    flog.write(
+                        "Mandatory arguments such as pkg name, version and list of patches are not provided!\n")
+                    flog.write("Not performing the call.\n")
+        else:
+            with open(self.logfile, 'a') as flog:
+                flog.write(
+                    "Plugin hasn't initialized! Not performing the call.\n")
+
+    def process_report(self):
+        if not os.path.isfile(self.reportdir + pkglist + "_" + self.timestamp + ".faux"):
+            return
+        if (self.initialized):
+            with open(self.logfile, 'a') as flog:
+                flog.write("Creating report in HTML format.\n")
+            result = self.process_report_type("html")
+
+            with open(self.logfile, 'a') as flog:
+                flog.write("Creating report in CSV format.\n")
+            result = self.process_report_type("csv")
+
+            pkglist_faux = pkglist + "_" + self.timestamp + ".faux"
+            os.remove(self.reportdir + pkglist_faux)
+
+            with open(self.logfile, 'a') as flog:
+                flog.write("Creating report in XML format.\n")
+            self.write_report_xml(result)
+
+    def write_report_xml(self, result):
+        try:
+            from lxml import etree
+        except ImportError:
+            try:
+                import xml.etree.cElementTree as etree
+            except ImportError:
+                import xml.etree.ElementTree as etree
+        num_tests = 0
+        root = etree.Element('testsuite', name='CVE_Plugin', tests='1')
+
+        if result :
+            num_tests = 1
+            tcase = etree.SubElement(
+                        root, 'testcase', classname='ISA_CVEChecker', name="Error in cve-check-tool")
+            etree.SubElement( tcase, 'failure', message=result, type='violation')
+        else:
+            with open(self.report_name + ".csv", 'r') as f:
+                for line in f:
+                    num_tests += 1
+                    line = line.strip()
+                    line_sp = line.split(',', 2)
+                    if (len(line_sp) >= 3) and (line_sp[2].startswith('CVE')):
+                        tcase = etree.SubElement(
+                            root, 'testcase', classname='ISA_CVEChecker', name=line.split(',', 1)[0])
+                        etree.SubElement(
+                            tcase, 'failure', message=line, type='violation')
+                    else:
+                        tcase = etree.SubElement(
+                            root, 'testcase', classname='ISA_CVEChecker', name=line.split(',', 1)[0])
+
+        root.set('tests', str(num_tests))
+        tree = etree.ElementTree(root)
+        output = self.report_name + '.xml'
+        try:
+            tree.write(output, encoding='UTF-8',
+                       pretty_print=True, xml_declaration=True)
+        except TypeError:
+            tree.write(output, encoding='UTF-8', xml_declaration=True)
+
+    def process_report_type(self, rtype):
+        # now faux file is ready and we can process it
+        args = ""
+        result = ""
+        tool_stderr_value = ""
+        args += "cve-check-tool "
+        if self.cacert:
+            args += "--cacert '%s' " % self.cacert
+        if rtype != "html":
+            args += "-c "
+            rtype = "csv"
+        pkglist_faux = pkglist + "_" + self.timestamp + ".faux"
+        args += "-a -t faux '" + self.reportdir + pkglist_faux + "'"
+        with open(self.logfile, 'a') as flog:
+            flog.write("Args: " + args)
+        try:
+            popen = subprocess.Popen(
+                args, shell=True, env=os.environ, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+            result = popen.communicate()
+        except:
+            tool_stderr_value = "Error in executing cve-check-tool" + str(sys.exc_info())
+            with open(self.logfile, 'a') as flog:
+                flog.write("Error in executing cve-check-tool: " +
+                           str(sys.exc_info()))
+        else:
+            stdout_value = result[0]
+            tool_stderr_value = result[1].decode('utf-8')
+            if not tool_stderr_value and popen.returncode == 0:
+                report = self.report_name + "." + rtype
+                with open(report, 'wb') as freport:
+                    freport.write(stdout_value)
+            else:
+                tool_stderr_value = tool_stderr_value + \
+                "\ncve-check-tool terminated with exit code " + str(popen.returncode)
+        return tool_stderr_value
+
+    def process_patch_list(self, patch_files):
+        patch_info = ""
+        for patch in patch_files:
+            patch1 = patch.partition("cve")
+            if (patch1[0] == patch):
+                # no cve substring, try CVE
+                patch1 = patch.partition("CVE")
+                if (patch1[0] == patch):
+                    continue
+            patchstripped = patch1[2].split('-')
+            try:
+                patch_info += " CVE-" + \
+                    patchstripped[1] + "-" + re.findall('\d+', patchstripped[2])[0]
+            except IndexError:
+                # string parsing attempt failed, so just skip this patch
+               continue
+        return patch_info
+
+# ======== supported callbacks from ISA ============= #
+
+
+def init(ISA_config):
+    global CVEChecker
+    CVEChecker = ISA_CVEChecker(ISA_config)
+
+
+def getPluginName():
+    return "ISA_CVEChecker"
+
+
+def process_package(ISA_pkg):
+    global CVEChecker
+    return CVEChecker.process_package(ISA_pkg)
+
+
+def process_report():
+    global CVEChecker
+    return CVEChecker.process_report()
+
+# ==================================================== #
diff --git a/meta-security/meta-security-isafw/lib/isafw/isaplugins/ISA_fsa_plugin.py b/meta-security/meta-security-isafw/lib/isafw/isaplugins/ISA_fsa_plugin.py
new file mode 100644
index 0000000..0909756
--- /dev/null
+++ b/meta-security/meta-security-isafw/lib/isafw/isaplugins/ISA_fsa_plugin.py
@@ -0,0 +1,185 @@
+#
+# ISA_fsa_plugin.py - Filesystem analyser plugin, part of ISA FW
+#
+# Copyright (c) 2015 - 2016, Intel Corporation
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+#    * Redistributions of source code must retain the above copyright notice,
+#      this list of conditions and the following disclaimer.
+#    * Redistributions in binary form must reproduce the above copyright
+#      notice, this list of conditions and the following disclaimer in the
+#      documentation and/or other materials provided with the distribution.
+#    * Neither the name of Intel Corporation nor the names of its contributors
+#      may be used to endorse or promote products derived from this software
+#      without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+import os
+from stat import *
+try:
+    from lxml import etree
+except ImportError:
+    try:
+        import xml.etree.cElementTree as etree
+    except ImportError:
+        import xml.etree.ElementTree as etree
+
+
+FSAnalyzer = None
+
+
+class ISA_FSChecker():
+    initialized = False
+
+    def __init__(self, ISA_config):
+        self.logfile = ISA_config.logdir + "/isafw_fsalog"
+        self.full_report_name = ISA_config.reportdir + "/fsa_full_report_" + \
+            ISA_config.machine + "_" + ISA_config.timestamp
+        self.problems_report_name = ISA_config.reportdir + \
+            "/fsa_problems_report_" + ISA_config.machine + "_" + ISA_config.timestamp
+        self.full_reports = ISA_config.full_reports
+        self.initialized = True
+        self.setuid_files = []
+        self.setgid_files = []
+        self.ww_files = []
+        self.no_sticky_bit_ww_dirs = []
+        with open(self.logfile, 'w') as flog:
+            flog.write("\nPlugin ISA_FSChecker initialized!\n")
+
+    def process_filesystem(self, ISA_filesystem):
+        if (self.initialized):
+            if (ISA_filesystem.img_name and ISA_filesystem.path_to_fs):
+                with open(self.logfile, 'a') as flog:
+                    flog.write("Analyzing filesystem at: " + ISA_filesystem.path_to_fs +
+                               " for the image: " + ISA_filesystem.img_name + "\n")
+                self.files = self.find_fsobjects(ISA_filesystem.path_to_fs)
+                with open(self.logfile, 'a') as flog:
+                    flog.write("\nFilelist is: " + str(self.files))
+                if self.full_reports:
+                    with open(self.full_report_name + "_" + ISA_filesystem.img_name, 'w') as ffull_report:
+                        ffull_report.write(
+                            "Report for image: " + ISA_filesystem.img_name + '\n')
+                        ffull_report.write(
+                            "With rootfs location at " + ISA_filesystem.path_to_fs + "\n\n")
+                for f in self.files:
+                    st = os.lstat(f)
+                    i = f.replace(ISA_filesystem.path_to_fs, "")
+                    if self.full_reports:
+                        with open(self.full_report_name + "_" + ISA_filesystem.img_name, 'a') as ffull_report:
+                            ffull_report.write("File: " + i + ' mode: ' + str(oct(st.st_mode)) +
+                                               " uid: " + str(st.st_uid) + " gid: " + str(st.st_gid) + '\n')
+                    if ((st.st_mode & S_ISUID) == S_ISUID):
+                        self.setuid_files.append(i)
+                    if ((st.st_mode & S_ISGID) == S_ISGID):
+                        self.setgid_files.append(i)
+                    if ((st.st_mode & S_IWOTH) == S_IWOTH):
+                        if (((st.st_mode & S_IFDIR) == S_IFDIR) and ((st.st_mode & S_ISVTX) != S_ISVTX)):
+                            self.no_sticky_bit_ww_dirs.append(i)
+                        if (((st.st_mode & S_IFREG) == S_IFREG) and ((st.st_mode & S_IFLNK) != S_IFLNK)):
+                            self.ww_files.append(i)
+                self.write_problems_report(ISA_filesystem)
+                self.write_problems_report_xml(ISA_filesystem)
+            else:
+                with open(self.logfile, 'a') as flog:
+                    flog.write(
+                        "Mandatory arguments such as image name and path to the filesystem are not provided!\n")
+                    flog.write("Not performing the call.\n")
+        else:
+            with open(self.logfile, 'a') as flog:
+                flog.write(
+                    "Plugin hasn't initialized! Not performing the call.\n")
+
+    def write_problems_report(self, ISA_filesystem):
+        with open(self.problems_report_name + "_" + ISA_filesystem.img_name, 'w') as fproblems_report:
+            fproblems_report.write(
+                "Report for image: " + ISA_filesystem.img_name + '\n')
+            fproblems_report.write(
+                "With rootfs location at " + ISA_filesystem.path_to_fs + "\n\n")
+            fproblems_report.write("Files with SETUID bit set:\n")
+            for item in self.setuid_files:
+                fproblems_report.write(item + '\n')
+            fproblems_report.write("\n\nFiles with SETGID bit set:\n")
+            for item in self.setgid_files:
+                fproblems_report.write(item + '\n')
+            fproblems_report.write("\n\nWorld-writable files:\n")
+            for item in self.ww_files:
+                fproblems_report.write(item + '\n')
+            fproblems_report.write(
+                "\n\nWorld-writable dirs with no sticky bit:\n")
+            for item in self.no_sticky_bit_ww_dirs:
+                fproblems_report.write(item + '\n')
+
+    def write_problems_report_xml(self, ISA_filesystem):
+        num_tests = len(self.setuid_files) + len(self.setgid_files) + \
+            len(self.ww_files) + len(self.no_sticky_bit_ww_dirs)
+        root = etree.Element(
+            'testsuite', name='FSA_Plugin', tests=str(num_tests))
+        if self.setuid_files:
+            for item in self.setuid_files:
+                tcase1 = etree.SubElement(
+                    root, 'testcase', classname='Files_with_SETUID_bit_set', name=item)
+                etree.SubElement(
+                    tcase1, 'failure', message=item, type='violation')
+        if self.setgid_files:
+            for item in self.setgid_files:
+                tcase2 = etree.SubElement(
+                    root, 'testacase', classname='Files_with_SETGID_bit_set', name=item)
+                etree.SubElement(
+                    tcase2, 'failure', message=item, type='violation')
+        if self.ww_files:
+            for item in self.ww_files:
+                tcase3 = etree.SubElement(
+                    root, 'testase', classname='World-writable_files', name=item)
+                etree.SubElement(
+                    tcase3, 'failure', message=item, type='violation')
+        if self.no_sticky_bit_ww_dirs:
+            for item in self.no_sticky_bit_ww_dirs:
+                tcase4 = etree.SubElement(
+                    root, 'testcase', classname='World-writable_dirs_with_no_sticky_bit', name=item)
+                etree.SubElement(
+                    tcase4, 'failure', message=item, type='violation')
+        tree = etree.ElementTree(root)
+        output = self.problems_report_name + "_" + ISA_filesystem.img_name + '.xml'
+        try:
+            tree.write(output, encoding='UTF-8',
+                       pretty_print=True, xml_declaration=True)
+        except TypeError:
+            tree.write(output, encoding='UTF-8', xml_declaration=True)
+
+    def find_fsobjects(self, init_path):
+        list_of_files = []
+        for (dirpath, dirnames, filenames) in os.walk(init_path):
+            if (dirpath != init_path):
+                list_of_files.append(str(dirpath)[:])
+            for f in filenames:
+                list_of_files.append(str(dirpath + "/" + f)[:])
+        return list_of_files
+
+# ======== supported callbacks from ISA ============= #
+
+
+def init(ISA_config):
+    global FSAnalyzer
+    FSAnalyzer = ISA_FSChecker(ISA_config)
+
+
+def getPluginName():
+    return "ISA_FSChecker"
+
+
+def process_filesystem(ISA_filesystem):
+    global FSAnalyzer
+    return FSAnalyzer.process_filesystem(ISA_filesystem)
+
+# ==================================================== #
diff --git a/meta-security/meta-security-isafw/lib/isafw/isaplugins/ISA_kca_plugin.py b/meta-security/meta-security-isafw/lib/isafw/isaplugins/ISA_kca_plugin.py
new file mode 100644
index 0000000..ba09819
--- /dev/null
+++ b/meta-security/meta-security-isafw/lib/isafw/isaplugins/ISA_kca_plugin.py
@@ -0,0 +1,323 @@
+#
+# ISA_kca_plugin.py - Kernel config options analyzer plugin, part of ISA FW
+#
+# Copyright (c) 2015 - 2016, Intel Corporation
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+#    * Redistributions of source code must retain the above copyright notice,
+#      this list of conditions and the following disclaimer.
+#    * Redistributions in binary form must reproduce the above copyright
+#      notice, this list of conditions and the following disclaimer in the
+#      documentation and/or other materials provided with the distribution.
+#    * Neither the name of Intel Corporation nor the names of its contributors
+#      may be used to endorse or promote products derived from this software
+#      without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+try:
+    from lxml import etree
+except ImportError:
+    try:
+        import xml.etree.cElementTree as etree
+    except ImportError:
+        import xml.etree.ElementTree as etree
+import importlib
+
+KCAnalyzer = None
+
+
+class ISA_KernelChecker():
+    initialized = False
+
+    def __init__(self, ISA_config):
+        self.logfile = ISA_config.logdir + "/isafw_kcalog"
+        self.full_report_name = ISA_config.reportdir + "/kca_full_report_" + \
+            ISA_config.machine + "_" + ISA_config.timestamp
+        self.problems_report_name = ISA_config.reportdir + \
+            "/kca_problems_report_" + ISA_config.machine + "_" + ISA_config.timestamp
+        self.full_reports = ISA_config.full_reports
+        self.initialized = True
+        self.arch = ISA_config.arch
+        with open(self.logfile, 'w') as flog:
+            flog.write("\nPlugin ISA_KernelChecker initialized!\n")
+
+    def append_recommendation(self, report, key, value):
+        report.write("Recommended value:\n")
+        report.write(key + ' : ' + str(value) + '\n')
+        comment = self.comments.get(key, '')
+        if comment != '':
+            report.write("Comment:\n")
+            report.write(comment + '\n')
+
+    def process_kernel(self, ISA_kernel):
+        if (self.initialized):
+            if (ISA_kernel.img_name and ISA_kernel.path_to_config):
+                # Merging common and arch configs
+                common_config_module = importlib.import_module('isafw.isaplugins.configs.kca.{}'.format('common'))
+                arch_config_module = importlib.import_module('isafw.isaplugins.configs.kca.{}'.format(self.arch))
+
+                for c in ["hardening_kco", "keys_kco", "security_kco", "integrity_kco",
+                          "hardening_kco_ref", "keys_kco_ref", "security_kco_ref", "integrity_kco_ref",
+                          "comments"]:
+                    setattr(self, c, merge_config(getattr(arch_config_module, c), getattr(common_config_module, c)))
+                with open(self.logfile, 'a') as flog:
+                    flog.write("Analyzing kernel config file at: " + ISA_kernel.path_to_config +
+                               " for the image: " + ISA_kernel.img_name + "\n")
+                with open(ISA_kernel.path_to_config, 'r') as fkernel_conf:
+                    for line in fkernel_conf:
+                        line = line.strip('\n')
+                        for key in self.hardening_kco:
+                            if key + '=' in line:
+                                self.hardening_kco[key] = line.split('=')[1]
+                        for key in self.keys_kco:
+                            if key + '=' in line:
+                                self.keys_kco[key] = line.split('=')[1]
+                        for key in self.security_kco:
+                            if key + '=' in line:
+                                self.security_kco[key] = line.split('=')[1]
+                        for key in self.integrity_kco:
+                            if key + '=' in line:
+                                self.integrity_kco[key] = line.split('=')[1]
+                with open(self.logfile, 'a') as flog:
+                    flog.write("\n\nhardening_kco values: " +
+                               str(self.hardening_kco))
+                    flog.write("\n\nkeys_kco values: " + str(self.keys_kco))
+                    flog.write("\n\nsecurity_kco values: " +
+                               str(self.security_kco))
+                    flog.write("\n\nintegrity_kco values: " +
+                               str(self.integrity_kco))
+                self.write_full_report(ISA_kernel)
+                self.write_problems_report(ISA_kernel)
+
+            else:
+                with open(self.logfile, 'a') as flog:
+                    flog.write(
+                        "Mandatory arguments such as image name and path to config are not provided!\n")
+                    flog.write("Not performing the call.\n")
+        else:
+            with open(self.logfile, 'a') as flog:
+                flog.write(
+                    "Plugin hasn't initialized! Not performing the call!\n")
+
+    def write_full_report(self, ISA_kernel):
+        if self.full_reports:
+            with open(self.full_report_name + "_" + ISA_kernel.img_name, 'w') as freport:
+                freport.write("Report for image: " +
+                              ISA_kernel.img_name + '\n')
+                freport.write("With the kernel conf at: " +
+                              ISA_kernel.path_to_config + '\n\n')
+                freport.write("Hardening options:\n")
+                for key in sorted(self.hardening_kco):
+                    freport.write(
+                        key + ' : ' + str(self.hardening_kco[key]) + '\n')
+                freport.write("\nKey-related options:\n")
+                for key in sorted(self.keys_kco):
+                    freport.write(key + ' : ' + str(self.keys_kco[key]) + '\n')
+                freport.write("\nSecurity options:\n")
+                for key in sorted(self.security_kco):
+                    freport.write(
+                        key + ' : ' + str(self.security_kco[key]) + '\n')
+                freport.write("\nIntegrity options:\n")
+                for key in sorted(self.integrity_kco):
+                    freport.write(
+                        key + ' : ' + str(self.integrity_kco[key]) + '\n')
+
+    def write_problems_report(self, ISA_kernel):
+        self.write_text_problems_report(ISA_kernel)
+        self.write_xml_problems_report(ISA_kernel)
+
+    def write_text_problems_report(self, ISA_kernel):
+        with open(self.problems_report_name + "_" + ISA_kernel.img_name, 'w') as freport:
+            freport.write("Report for image: " + ISA_kernel.img_name + '\n')
+            freport.write("With the kernel conf at: " +
+                          ISA_kernel.path_to_config + '\n\n')
+            freport.write("Hardening options that need improvement:\n")
+            for key in sorted(self.hardening_kco):
+                if (self.hardening_kco[key] != self.hardening_kco_ref[key]):
+                    valid = False
+                    if (key == "CONFIG_CMDLINE"):
+                        if (len(self.hardening_kco['CONFIG_CMDLINE']) > 0):
+                            valid = True
+                    if (key == "CONFIG_DEBUG_STRICT_USER_COPY_CHECKS"):
+                        if (self.hardening_kco['CONFIG_ARCH_HAS_DEBUG_STRICT_USER_COPY_CHECKS'] == 'y'):
+                            valid = True
+                    if (key == "CONFIG_RANDOMIZE_BASE_MAX_OFFSET"):
+                        options = self.hardening_kco_ref[key].split(',')
+                        for option in options:
+                            if (option == self.hardening_kco[key]):
+                                valid = True
+                                break
+                    if not valid:
+                        freport.write("\nActual value:\n")
+                        freport.write(
+                            key + ' : ' + str(self.hardening_kco[key]) + '\n')
+                        self.append_recommendation(freport, key, self.hardening_kco_ref[key])
+            freport.write("\nKey-related options that need improvement:\n")
+            for key in sorted(self.keys_kco):
+                if (self.keys_kco[key] != self.keys_kco_ref[key]):
+                    freport.write("\nActual value:\n")
+                    freport.write(key + ' : ' + str(self.keys_kco[key]) + '\n')
+                    self.append_recommendation(freport, key, self.keys_kco_ref[key])
+            freport.write("\nSecurity options that need improvement:\n")
+            for key in sorted(self.security_kco):
+                if (self.security_kco[key] != self.security_kco_ref[key]):
+                    valid = False
+                    if (key == "CONFIG_DEFAULT_SECURITY"):
+                        options = self.security_kco_ref[key].split(',')
+                        for option in options:
+                            if (option == self.security_kco[key]):
+                                valid = True
+                                break
+                    if ((key == "CONFIG_SECURITY_SELINUX") or
+                            (key == "CONFIG_SECURITY_SMACK") or
+                            (key == "CONFIG_SECURITY_APPARMOR") or
+                            (key == "CONFIG_SECURITY_TOMOYO")):
+                        if ((self.security_kco['CONFIG_SECURITY_SELINUX'] == 'y') or
+                                (self.security_kco['CONFIG_SECURITY_SMACK'] == 'y') or
+                                (self.security_kco['CONFIG_SECURITY_APPARMOR'] == 'y') or
+                                (self.security_kco['CONFIG_SECURITY_TOMOYO'] == 'y')):
+                            valid = True
+                    if not valid:
+                        freport.write("\nActual value:\n")
+                        freport.write(
+                            key + ' : ' + str(self.security_kco[key]) + '\n')
+                        self.append_recommendation(freport, key, self.security_kco_ref[key])
+            freport.write("\nIntegrity options that need improvement:\n")
+            for key in sorted(self.integrity_kco):
+                if (self.integrity_kco[key] != self.integrity_kco_ref[key]):
+                    valid = False
+                    if ((key == "CONFIG_IMA_DEFAULT_HASH_SHA1") or
+                            (key == "CONFIG_IMA_DEFAULT_HASH_SHA256") or
+                            (key == "CONFIG_IMA_DEFAULT_HASH_SHA512") or
+                            (key == "CONFIG_IMA_DEFAULT_HASH_WP512")):
+                        if ((self.integrity_kco['CONFIG_IMA_DEFAULT_HASH_SHA256'] == 'y') or
+                                (self.integrity_kco['CONFIG_IMA_DEFAULT_HASH_SHA512'] == 'y')):
+                            valid = True
+                    if not valid:
+                        freport.write("\nActual value:\n")
+                        freport.write(
+                            key + ' : ' + str(self.integrity_kco[key]) + '\n')
+                        self.append_recommendation(freport, key, self.integrity_kco_ref[key])
+
+    def write_xml_problems_report(self, ISA_kernel):
+        # write_problems_report_xml
+        num_tests = len(self.hardening_kco) + len(self.keys_kco) + \
+            len(self.security_kco) + len(self.integrity_kco)
+        root = etree.Element(
+            'testsuite', name='KCA_Plugin', tests=str(num_tests))
+        for key in sorted(self.hardening_kco):
+            tcase1 = etree.SubElement(
+                root, 'testcase', classname='Hardening options', name=key)
+            if (self.hardening_kco[key] != self.hardening_kco_ref[key]):
+                valid = False
+                if (key == "CONFIG_CMDLINE"):
+                    if (len(self.hardening_kco['CONFIG_CMDLINE']) > 0):
+                        valid = True
+                if (key == "CONFIG_DEBUG_STRICT_USER_COPY_CHECKS"):
+                    if (self.hardening_kco['CONFIG_ARCH_HAS_DEBUG_STRICT_USER_COPY_CHECKS'] == 'y'):
+                        valid = True
+                if (key == "CONFIG_RANDOMIZE_BASE_MAX_OFFSET"):
+                    options = self.hardening_kco_ref[key].split(',')
+                    for option in options:
+                        if (option == self.hardening_kco[key]):
+                            valid = True
+                            break
+                if not valid:
+                    msg1 = 'current=' + key + ' is ' + \
+                        str(self.hardening_kco[
+                            key]) + ', recommended=' + key + ' is ' + str(self.hardening_kco_ref[key])
+                    etree.SubElement(
+                        tcase1, 'failure', message=msg1, type='violation')
+        for key in sorted(self.keys_kco):
+            tcase2 = etree.SubElement(
+                root, 'testcase', classname='Key-related options', name=key)
+            if (self.keys_kco[key] != self.keys_kco_ref[key]):
+                msg2 = 'current=' + key + ' is ' + \
+                    str(self.keys_kco[key] + ', recommended=' +
+                        key + ' is ' + str(self.keys_kco_ref[key]))
+                etree.SubElement(
+                    tcase2, 'failure', message=msg2, type='violation')
+        for key in sorted(self.security_kco):
+            tcase3 = etree.SubElement(
+                root, 'testcase', classname='Security options', name=key)
+            if (self.security_kco[key] != self.security_kco_ref[key]):
+                valid = False
+                if (key == "CONFIG_DEFAULT_SECURITY"):
+                    options = self.security_kco_ref[key].split(',')
+                    for option in options:
+                        if (option == self.security_kco[key]):
+                            valid = True
+                            break
+                if ((key == "CONFIG_SECURITY_SELINUX") or
+                        (key == "CONFIG_SECURITY_SMACK") or
+                        (key == "CONFIG_SECURITY_APPARMOR") or
+                        (key == "CONFIG_SECURITY_TOMOYO")):
+                    if ((self.security_kco['CONFIG_SECURITY_SELINUX'] == 'y') or
+                            (self.security_kco['CONFIG_SECURITY_SMACK'] == 'y') or
+                            (self.security_kco['CONFIG_SECURITY_APPARMOR'] == 'y') or
+                            (self.security_kco['CONFIG_SECURITY_TOMOYO'] == 'y')):
+                        valid = True
+                if not valid:
+                    msg3 = 'current=' + key + ' is ' + \
+                        str(self.security_kco[key]) + ', recommended=' + \
+                        key + ' is ' + str(self.security_kco_ref[key])
+                    etree.SubElement(
+                        tcase3, 'failure', message=msg3, type='violation')
+        for key in sorted(self.integrity_kco):
+            tcase4 = etree.SubElement(
+                root, 'testcase', classname='Integrity options', name=key)
+            if (self.integrity_kco[key] != self.integrity_kco_ref[key]):
+                valid = False
+                if ((key == "CONFIG_IMA_DEFAULT_HASH_SHA1") or
+                        (key == "CONFIG_IMA_DEFAULT_HASH_SHA256") or
+                        (key == "CONFIG_IMA_DEFAULT_HASH_SHA512") or
+                        (key == "CONFIG_IMA_DEFAULT_HASH_WP512")):
+                    if ((self.integrity_kco['CONFIG_IMA_DEFAULT_HASH_SHA256'] == 'y') or
+                            (self.integrity_kco['CONFIG_IMA_DEFAULT_HASH_SHA512'] == 'y')):
+                        valid = True
+                if not valid:
+                    msg4 = 'current=' + key + ' is ' + \
+                        str(self.integrity_kco[
+                            key]) + ', recommended=' + key + ' is ' + str(self.integrity_kco_ref[key])
+                    etree.SubElement(
+                        tcase4, 'failure', message=msg4, type='violation')
+        tree = etree.ElementTree(root)
+        output = self.problems_report_name + "_" + ISA_kernel.img_name + '.xml'
+        try:
+            tree.write(output, encoding='UTF-8',
+                       pretty_print=True, xml_declaration=True)
+        except TypeError:
+            tree.write(output, encoding='UTF-8', xml_declaration=True)
+
+
+def merge_config(arch_kco, common_kco):
+    merged = arch_kco.copy()
+    merged.update(common_kco)
+    return merged
+
+# ======== supported callbacks from ISA ============= #
+def init(ISA_config):
+    global KCAnalyzer
+    KCAnalyzer = ISA_KernelChecker(ISA_config)
+
+
+def getPluginName():
+    return "ISA_KernelChecker"
+
+
+def process_kernel(ISA_kernel):
+    global KCAnalyzer
+    return KCAnalyzer.process_kernel(ISA_kernel)
+# ==================================================== #
diff --git a/meta-security/meta-security-isafw/lib/isafw/isaplugins/ISA_la_plugin.py b/meta-security/meta-security-isafw/lib/isafw/isaplugins/ISA_la_plugin.py
new file mode 100644
index 0000000..20e7e26b
--- /dev/null
+++ b/meta-security/meta-security-isafw/lib/isafw/isaplugins/ISA_la_plugin.py
@@ -0,0 +1,273 @@
+#
+# ISA_la_plugin.py - License analyzer plugin, part of ISA FW
+# Functionality is based on similar scripts from Clear linux project
+#
+# Copyright (c) 2015 - 2016, Intel Corporation
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+#    * Redistributions of source code must retain the above copyright notice,
+#      this list of conditions and the following disclaimer.
+#    * Redistributions in binary form must reproduce the above copyright
+#      notice, this list of conditions and the following disclaimer in the
+#      documentation and/or other materials provided with the distribution.
+#    * Neither the name of Intel Corporation nor the names of its contributors
+#      may be used to endorse or promote products derived from this software
+#      without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+import subprocess
+import os, sys
+
+LicenseChecker = None
+
+flicenses = "/configs/la/licenses"
+fapproved_non_osi = "/configs/la/approved-non-osi"
+fexceptions = "/configs/la/exceptions"
+funwanted = "/configs/la/violations"
+
+
+class ISA_LicenseChecker():
+    initialized = False
+    rpm_present = False
+
+    def __init__(self, ISA_config):
+        self.logfile = ISA_config.logdir + "/isafw_lalog"
+        self.unwanted = []
+        self.report_name = ISA_config.reportdir + "/la_problems_report_" + \
+            ISA_config.machine + "_" + ISA_config.timestamp
+        self.image_pkg_list = ISA_config.reportdir + "/pkglist"
+        self.image_pkgs = []
+        self.la_plugin_image_whitelist = ISA_config.la_plugin_image_whitelist
+        self.la_plugin_image_blacklist = ISA_config.la_plugin_image_blacklist
+        self.initialized = True
+        with open(self.logfile, 'a') as flog:
+            flog.write("\nPlugin ISA_LA initialized!\n")
+        # check that rpm is installed (supporting only rpm packages for now)
+        DEVNULL = open(os.devnull, 'wb')
+        rc = subprocess.call(["which", "rpm"], stdout=DEVNULL, stderr=DEVNULL)
+        DEVNULL.close()
+        if rc == 0:
+            self.rpm_present = True
+        else:
+            with open(self.logfile, 'a') as flog:
+                flog.write("rpm tool is missing! Licence info is expected from build system\n")
+
+    def process_package(self, ISA_pkg):
+        if (self.initialized):
+            if ISA_pkg.name:
+                if (not ISA_pkg.licenses):
+                    # need to determine licenses first
+                    # for this we need rpm tool to be present
+                    if (not self.rpm_present):
+                        with open(self.logfile, 'a') as flog:
+                            flog.write("rpm tool is missing and licence info is not provided. Cannot proceed.\n")
+                            return;     
+                    if (not ISA_pkg.source_files):
+                        if (not ISA_pkg.path_to_sources):
+                            self.initialized = False
+                            with open(self.logfile, 'a') as flog:
+                                flog.write(
+                                    "No path to sources or source file list is provided!")
+                                flog.write(
+                                    "\nNot able to determine licenses for package: " + ISA_pkg.name)
+                            return
+                        # need to build list of source files
+                        ISA_pkg.source_files = self.find_files(
+                            ISA_pkg.path_to_sources)
+                    for i in ISA_pkg.source_files:
+                        if (i.endswith(".spec")):# supporting rpm only for now
+                            args = ("rpm", "-q", "--queryformat",
+                                    "%{LICENSE} ", "--specfile", i)
+                            try:
+                                popen = subprocess.Popen(
+                                    args, stdout=subprocess.PIPE)
+                                popen.wait()
+                                ISA_pkg.licenses = popen.stdout.read().split()
+                            except:
+                                self.initialized = False
+                                with open(self.logfile, 'a') as flog:
+                                    flog.write(
+                                        "Error in executing rpm query: " + str(sys.exc_info()))
+                                    flog.write(
+                                        "\nNot able to process package: " + ISA_pkg.name)
+                                return
+                for l in ISA_pkg.licenses:
+                    if (not self.check_license(l, flicenses) and
+                            not self.check_license(l, fapproved_non_osi) and
+                            not self.check_exceptions(ISA_pkg.name, l, fexceptions)):
+                        # log the package as not following correct license
+                        with open(self.report_name, 'a') as freport:
+                            freport.write(l + "\n")
+                    if (self.check_license(l, funwanted)):
+                        # log the package as having license that should not be
+                        # used
+                        with open(self.report_name + "_unwanted", 'a') as freport:
+                            freport.write(l + "\n")
+            else:
+                self.initialized = False
+                with open(self.logfile, 'a') as flog:
+                    flog.write(
+                        "Mandatory argument package name is not provided!\n")
+                    flog.write("Not performing the call.\n")
+        else:
+            with open(self.logfile, 'a') as flog:
+                flog.write(
+                    "Plugin hasn't initialized! Not performing the call.")
+
+    def process_report(self):
+        if (self.initialized):
+            with open(self.logfile, 'a') as flog:
+                flog.write("Creating report with violating licenses.\n")
+            self.process_pkg_list()
+            self.write_report_unwanted()
+            with open(self.logfile, 'a') as flog:
+                flog.write("Creating report in XML format.\n")
+            self.write_report_xml()
+
+    def process_pkg_list(self):
+        if os.path.isfile (self.image_pkg_list):
+            img_name = ""
+            with open(self.image_pkg_list, 'r') as finput:
+                for line in finput:
+                    line = line.strip()
+                    if not line:
+                        continue
+                    if line.startswith("Packages "):
+                        img_name = line.split()[3]
+                        with open(self.logfile, 'a') as flog:
+                            flog.write("img_name: " + img_name + "\n")
+                        continue
+                    package_info = line.split()
+                    pkg_name = package_info[0]
+                    orig_pkg_name = package_info[2]
+                    if (not self.image_pkgs) or ((pkg_name + " from " + img_name) not in self.image_pkgs):
+                        self.image_pkgs.append(pkg_name + " from " + img_name + " " + orig_pkg_name)
+
+    def write_report_xml(self):
+        try:
+            from lxml import etree
+        except ImportError:
+            try:
+                import xml.etree.cElementTree as etree
+            except ImportError:
+                import xml.etree.ElementTree as etree
+        num_tests = 0
+        root = etree.Element('testsuite', name='LA_Plugin', tests='2')
+        if os.path.isfile(self.report_name):
+            with open(self.report_name, 'r') as f:
+                class_name = "Non-approved-licenses"
+                for line in f:
+                    line = line.strip()
+                    if line == "":
+                        continue
+                    if line.startswith("Packages that "):
+                        class_name = "Violating-licenses"
+                        continue
+                    num_tests += 1
+                    tcase1 = etree.SubElement(
+                        root, 'testcase', classname=class_name, name=line.split(':', 1)[0])
+                    etree.SubElement(
+                        tcase1, 'failure', message=line, type='violation')
+        else:
+            tcase1 = etree.SubElement(
+                root, 'testcase', classname='ISA_LAChecker', name='none')
+            num_tests = 1
+        root.set('tests', str(num_tests))
+        tree = etree.ElementTree(root)
+        output = self.report_name + '.xml'
+        try:
+            tree.write(output, encoding='UTF-8',
+                       pretty_print=True, xml_declaration=True)
+        except TypeError:
+            tree.write(output, encoding='UTF-8', xml_declaration=True)
+
+    def write_report_unwanted(self):
+        if os.path.isfile(self.report_name + "_unwanted"):
+            with open(self.logfile, 'a') as flog:
+                flog.write("image_pkgs: " + str(self.image_pkgs) + "\n")
+                flog.write("self.la_plugin_image_whitelist: " + str(self.la_plugin_image_whitelist) + "\n")
+                flog.write("self.la_plugin_image_blacklist: " + str(self.la_plugin_image_blacklist) + "\n")
+            with open(self.report_name, 'a') as fout:
+                with open(self.report_name + "_unwanted", 'r') as f:
+                    fout.write(
+                        "\n\nPackages that violate mandatory license requirements:\n")
+                    for line in f:
+                        line = line.strip()
+                        pkg_name = line.split(':',1)[0]
+                        if (not self.image_pkgs):
+                            fout.write(line + " from image name not available \n")
+                            continue
+                        for pkg_info in self.image_pkgs:
+                            image_pkg_name = pkg_info.split()[0]
+                            image_name = pkg_info.split()[2]
+                            image_orig_pkg_name = pkg_info.split()[3]
+                            if ((image_pkg_name == pkg_name) or (image_orig_pkg_name == pkg_name)):
+                                if self.la_plugin_image_whitelist and (image_name not in self.la_plugin_image_whitelist):
+                                    continue
+                                if self.la_plugin_image_blacklist and (image_name in self.la_plugin_image_blacklist):
+                                    continue
+                                fout.write(line + " from image " + image_name)
+                                if (image_pkg_name != image_orig_pkg_name):
+                                    fout.write(" binary_pkg_name " + image_pkg_name + "\n")
+                                    continue
+                                fout.write("\n")
+            os.remove(self.report_name + "_unwanted")
+
+    def find_files(self, init_path):
+        list_of_files = []
+        for (dirpath, dirnames, filenames) in os.walk(init_path):
+            for f in filenames:
+                list_of_files.append(str(dirpath + "/" + f)[:])
+        return list_of_files
+
+    def check_license(self, license, file_path):
+        with open(os.path.dirname(__file__) + file_path, 'r') as f:
+            for line in f:
+                s = line.rstrip()
+                curr_license = license.split(':',1)[1]
+                if s == curr_license:
+                    return True
+        return False
+
+    def check_exceptions(self, pkg_name, license, file_path):
+        with open(os.path.dirname(__file__) + file_path, 'r') as f:
+            for line in f:
+                s = line.rstrip()
+                curr_license = license.split(':',1)[1]
+                if s == pkg_name + " " + curr_license:
+                    return True
+        return False
+
+# ======== supported callbacks from ISA ============= #
+
+def init(ISA_config):
+    global LicenseChecker
+    LicenseChecker = ISA_LicenseChecker(ISA_config)
+
+
+def getPluginName():
+    return "ISA_LicenseChecker"
+
+
+def process_package(ISA_pkg):
+    global LicenseChecker
+    return LicenseChecker.process_package(ISA_pkg)
+
+
+def process_report():
+    global LicenseChecker
+    return LicenseChecker.process_report()
+
+# ==================================================== #
diff --git a/meta-security/meta-security-isafw/lib/isafw/isaplugins/__init__.py b/meta-security/meta-security-isafw/lib/isafw/isaplugins/__init__.py
new file mode 100644
index 0000000..ad1997d
--- /dev/null
+++ b/meta-security/meta-security-isafw/lib/isafw/isaplugins/__init__.py
@@ -0,0 +1,42 @@
+#
+# __init__.py - part of ISA FW
+#
+# Copyright (c) 2015 - 2016, Intel Corporation
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+#    * Redistributions of source code must retain the above copyright notice,
+#      this list of conditions and the following disclaimer.
+#    * Redistributions in binary form must reproduce the above copyright
+#      notice, this list of conditions and the following disclaimer in the
+#      documentation and/or other materials provided with the distribution.
+#    * Neither the name of Intel Corporation nor the names of its contributors
+#      may be used to endorse or promote products derived from this software
+#      without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+import glob
+import keyword
+import os
+import sys
+
+basedir = os.path.dirname(__file__)
+
+__all__ = []
+for name in glob.glob(os.path.join(basedir, '*.py')):
+    module = os.path.splitext(os.path.split(name)[-1])[0]
+    if not module.startswith('_') and not keyword.iskeyword(module):
+        __import__(__name__ + '.' + module)
+        __all__.append(module)
+__all__.sort()
diff --git a/meta-security/meta-security-isafw/lib/isafw/isaplugins/configs/__init__.py b/meta-security/meta-security-isafw/lib/isafw/isaplugins/configs/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/meta-security/meta-security-isafw/lib/isafw/isaplugins/configs/__init__.py
diff --git a/meta-security/meta-security-isafw/lib/isafw/isaplugins/configs/kca/__init__.py b/meta-security/meta-security-isafw/lib/isafw/isaplugins/configs/kca/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/meta-security/meta-security-isafw/lib/isafw/isaplugins/configs/kca/__init__.py
diff --git a/meta-security/meta-security-isafw/lib/isafw/isaplugins/configs/kca/arm.py b/meta-security/meta-security-isafw/lib/isafw/isaplugins/configs/kca/arm.py
new file mode 100644
index 0000000..d47ba9f
--- /dev/null
+++ b/meta-security/meta-security-isafw/lib/isafw/isaplugins/configs/kca/arm.py
@@ -0,0 +1,24 @@
+############################################################################################
+# Kernel Hardening Configurations
+############################################################################################
+hardening_kco = {'CONFIG_DEFAULT_MMAP_MIN_ADDR': 'not set',}
+hardening_kco_ref = {'CONFIG_DEFAULT_MMAP_MIN_ADDR': '32768',}
+############################################################################################
+# Keys Kernel Configuration
+############################################################################################
+keys_kco = {}
+keys_kco_ref = {}
+############################################################################################
+# Security Kernel Configuration
+############################################################################################
+security_kco = {'CONFIG_LSM_MMAP_MIN_ADDR': 'not set',}
+security_kco_ref = {'CONFIG_LSM_MMAP_MIN_ADDR': '32768',}
+############################################################################################
+# Integrity Kernel Configuration
+############################################################################################
+integrity_kco = {}
+integrity_kco_ref = {}
+############################################################################################
+# Comments
+############################################################################################
+comments = {'CONFIG_DEFAULT_MMAP_MIN_ADDR': 'Defines the portion of low virtual memory that should be protected from userspace allocation. Keeping a user from writing to low pages can help reduce the impact of kernel NULL pointer bugs.'}
diff --git a/meta-security/meta-security-isafw/lib/isafw/isaplugins/configs/kca/common.py b/meta-security/meta-security-isafw/lib/isafw/isaplugins/configs/kca/common.py
new file mode 100644
index 0000000..faa388c
--- /dev/null
+++ b/meta-security/meta-security-isafw/lib/isafw/isaplugins/configs/kca/common.py
@@ -0,0 +1,242 @@
+############################################################################################
+# Kernel Hardening Configurations
+############################################################################################
+hardening_kco = {'CONFIG_SERIAL_8250_CONSOLE': 'not set',
+                 'CONFIG_SERIAL_CORE': 'not set',
+                 'CONFIG_SERIAL_CORE_CONSOLE': 'not set',
+                 'CONFIG_CMDLINE_BOOL': 'not set',
+                 'CONFIG_CMDLINE': 'not set',
+                 'CONFIG_CMDLINE_OVERRIDE': 'not set',
+                 'CONFIG_DEBUG_INFO': 'not set',
+                 'CONFIG_KGDB': 'not set',
+                 'CONFIG_KPROBES': 'not set',
+                 'CONFIG_FTRACE': 'not set',
+                 'CONFIG_OPROFILE': 'not set',
+                 'CONFIG_PROFILING': 'not set',
+                 'CONFIG_MAGIC_SYSRQ': 'not set',
+                 'CONFIG_DEBUG_BUGVERBOSE': 'not set',
+                 'CONFIG_IP_PNP': 'not set',
+                 'CONFIG_IKCONFIG': 'not set',
+                 'CONFIG_SWAP': 'not set',
+                 'CONFIG_NAMESPACES': 'not set',
+                 'CONFIG_NFSD': 'not set',
+                 'CONFIG_NFS_FS': 'not set',
+                 'CONFIG_BINFMT_MISC': 'not set',
+                 'CONFIG_KALLSYMS': 'not set',
+                 'CONFIG_KALLSYMS_ALL': 'not set',
+                 'CONFIG_BUG': 'not set',
+                 'CONFIG_SYSCTL_SYSCALL': 'not set',
+                 'CONFIG_MODULE_UNLOAD': 'not set',
+                 'CONFIG_MODULE_FORCE_LOAD': 'not set',
+                 'CONFIG_DEVMEM': 'not set',
+                 'CONFIG_COREDUMP': 'not set',
+                 'CONFIG_CROSS_MEMORY_ATTACH': 'not set',
+                 'CONFIG_UNIX_DIAG': 'not set',
+                 'CONFIG_CHECKPOINT_RESTORE': 'not set',
+                 'CONFIG_PANIC_ON_OOPS': 'not set',
+                 'CONFIG_PACKET_DIAG': 'not set',
+                 'CONFIG_FW_LOADER_USER_HELPER': 'not set',
+                 'CONFIG_BPF_JIT': 'not set',
+                 'CONFIG_USELIB': 'not set',
+                 'CONFIG_CC_STACKPROTECTOR': 'not set',
+                 'CONFIG_KEXEC': 'not set',
+                 'CONFIG_PROC_KCORE': 'not set',
+                 'CONFIG_SECURITY_DMESG_RESTRICT': 'not set',
+                 'CONFIG_DEBUG_STACKOVERFLOW': 'not set',
+                 'CONFIG_DEBUG_STRICT_USER_COPY_CHECKS': 'not set',
+                 'CONFIG_ARCH_HAS_DEBUG_STRICT_USER_COPY_CHECKS': 'not set',
+                 'CONFIG_IKCONFIG_PROC': 'not set',
+                 'CONFIG_RANDOMIZE_BASE': 'not set',
+                 'CONFIG_DEBUG_RODATA': 'not set',
+                 'CONFIG_STRICT_DEVMEM': 'not set',
+                 'CONFIG_DEVKMEM': 'not set',
+                 'CONFIG_ARCH_BINFMT_ELF_RANDOMIZE_PIE': 'not set',
+                 'CONFIG_DEBUG_KERNEL': 'not set',
+                 'CONFIG_DEBUG_FS': 'not set',
+                 'CONFIG_MODULE_SIG_FORCE': 'not set',
+                 }
+hardening_kco_ref = {'CONFIG_SERIAL_8250_CONSOLE': 'not set',
+                     'CONFIG_SERIAL_CORE': 'not set',
+                     'CONFIG_SERIAL_CORE_CONSOLE': 'not set',
+                     'CONFIG_CMDLINE_BOOL': 'y',
+                     'CONFIG_CMDLINE': '"cmd_line"',
+                     'CONFIG_CMDLINE_OVERRIDE': 'y',
+                     'CONFIG_DEBUG_INFO': 'not set',
+                     'CONFIG_KGDB': 'not set',
+                     'CONFIG_KPROBES': 'not set',
+                     'CONFIG_FTRACE': 'not set',
+                     'CONFIG_OPROFILE': 'not set',
+                     'CONFIG_PROFILING': 'not set',
+                     'CONFIG_MAGIC_SYSRQ': 'not set',
+                     'CONFIG_DEBUG_BUGVERBOSE': 'not set',
+                     'CONFIG_IP_PNP': 'not set',
+                     'CONFIG_IKCONFIG': 'not set',
+                     'CONFIG_SWAP': 'not set',
+                     'CONFIG_NAMESPACES': 'not set',
+                     'CONFIG_NFSD': 'not set',
+                     'CONFIG_NFS_FS': 'not set',
+                     'CONFIG_BINFMT_MISC': 'not set',
+                     'CONFIG_KALLSYMS': 'not set',
+                     'CONFIG_KALLSYMS_ALL': 'not set',
+                     'CONFIG_BUG': 'not set',
+                     'CONFIG_SYSCTL_SYSCALL': 'not set',
+                     'CONFIG_MODULE_UNLOAD': 'not set',
+                     'CONFIG_MODULE_FORCE_LOAD': 'not set',
+                     'CONFIG_DEVMEM': 'not set',
+                     'CONFIG_COREDUMP': 'not set',
+                     'CONFIG_CROSS_MEMORY_ATTACH': 'not set',
+                     'CONFIG_UNIX_DIAG': 'not set',
+                     'CONFIG_CHECKPOINT_RESTORE': 'not set',
+                     'CONFIG_PANIC_ON_OOPS': 'y',
+                     'CONFIG_PACKET_DIAG': 'not set',
+                     'CONFIG_FW_LOADER_USER_HELPER': 'not set',
+                     'CONFIG_BPF_JIT': 'not set',
+                     'CONFIG_USELIB': 'not set',
+                     'CONFIG_CC_STACKPROTECTOR': 'y',
+                     'CONFIG_KEXEC': 'not set',
+                     'CONFIG_PROC_KCORE': 'not set',
+                     'CONFIG_SECURITY_DMESG_RESTRICT': 'y',
+                     'CONFIG_DEBUG_STACKOVERFLOW': 'y',
+                     'CONFIG_DEBUG_STRICT_USER_COPY_CHECKS': 'y',
+                     'CONFIG_ARCH_HAS_DEBUG_STRICT_USER_COPY_CHECKS': 'y',
+                     'CONFIG_IKCONFIG_PROC': 'not set',
+                     'CONFIG_RANDOMIZE_BASE': 'y',
+                     'CONFIG_DEBUG_RODATA': 'y',
+                     'CONFIG_STRICT_DEVMEM': 'y',
+                     'CONFIG_DEVKMEM': 'not set',
+                     'CONFIG_ARCH_BINFMT_ELF_RANDOMIZE_PIE': 'y',
+                     'CONFIG_DEBUG_KERNEL': 'not set',
+                     'CONFIG_DEBUG_FS': 'not set',
+                     'CONFIG_MODULE_SIG_FORCE': 'y',
+                     }
+############################################################################################
+# Keys Kernel Configuration
+############################################################################################
+keys_kco = {'CONFIG_KEYS': 'not set',
+            'CONFIG_TRUSTED_KEYS': 'not set',
+            'CONFIG_ENCRYPTED_KEYS': 'not set',
+            'CONFIG_KEYS_DEBUG_PROC_KEYS': 'not set'
+            }
+keys_kco_ref = {'CONFIG_KEYS': 'y',
+                'CONFIG_TRUSTED_KEYS': 'y',
+                'CONFIG_ENCRYPTED_KEYS': 'y',
+                'CONFIG_KEYS_DEBUG_PROC_KEYS': 'not set'
+                }
+############################################################################################
+# Security Kernel Configuration
+############################################################################################
+security_kco = {'CONFIG_SECURITY': 'not set',
+                'CONFIG_SECURITYFS': 'not set',
+                'CONFIG_SECURITY_NETWORKING': 'not set',
+                'CONFIG_DEFAULT_SECURITY': 'not set',
+                'CONFIG_SECURITY_SELINUX': 'not set',
+                'CONFIG_SECURITY_SMACK': 'not set',
+                'CONFIG_SECURITY_TOMOYO': 'not set',
+                'CONFIG_SECURITY_APPARMOR': 'not set',
+                'CONFIG_SECURITY_YAMA': 'not set',
+                'CONFIG_SECURITY_YAMA_STACKED': 'not set'
+                }
+security_kco_ref = {'CONFIG_SECURITY': 'y',
+                    'CONFIG_SECURITYFS': 'y',
+                    'CONFIG_SECURITY_NETWORKING': 'y',
+                    'CONFIG_DEFAULT_SECURITY': '"selinux","smack","apparmor","tomoyo"',
+                    'CONFIG_SECURITY_SELINUX': 'y',
+                    'CONFIG_SECURITY_SMACK': 'y',
+                    'CONFIG_SECURITY_TOMOYO': 'y',
+                    'CONFIG_SECURITY_APPARMOR': 'y',
+                    'CONFIG_SECURITY_YAMA': 'y',
+                    'CONFIG_SECURITY_YAMA_STACKED': 'y'
+                    }
+############################################################################################
+# Integrity Kernel Configuration
+############################################################################################
+integrity_kco = {'CONFIG_INTEGRITY': 'not set',
+                 'CONFIG_INTEGRITY_SIGNATURE': 'not set',
+                 'CONFIG_INTEGRITY_AUDIT': 'not set',
+                 'CONFIG_IMA': 'not set',
+                 'CONFIG_IMA_LSM_RULES': 'not set',
+                 'CONFIG_IMA_APPRAISE': 'not set',
+                 'CONFIG_IMA_TRUSTED_KEYRING': 'not set',
+                 'CONFIG_IMA_APPRAISE_SIGNED_INIT': 'not set',
+                 'CONFIG_EVM': 'not set',
+                 'CONFIG_EVM_ATTR_FSUUID': 'not set',
+                 'CONFIG_EVM_EXTRA_SMACK_XATTRS': 'not set',
+                 'CONFIG_IMA_DEFAULT_HASH_SHA1': 'not set',
+                 'CONFIG_IMA_DEFAULT_HASH_SHA256': 'not set',
+                 'CONFIG_IMA_DEFAULT_HASH_SHA512': 'not set',
+                 'CONFIG_IMA_DEFAULT_HASH_WP512': 'not set'
+                 }
+integrity_kco_ref = {'CONFIG_INTEGRITY': 'y',
+                     'CONFIG_INTEGRITY_SIGNATURE': 'y',
+                     'CONFIG_INTEGRITY_AUDIT': 'y',
+                     'CONFIG_IMA': 'y',
+                     'CONFIG_IMA_LSM_RULES': 'y',
+                     'CONFIG_IMA_APPRAISE': 'y',
+                     'CONFIG_IMA_TRUSTED_KEYRING': 'y',
+                     'CONFIG_IMA_APPRAISE_SIGNED_INIT': 'y',
+                     'CONFIG_EVM': 'y',
+                     'CONFIG_EVM_ATTR_FSUUID': 'y',
+                     'CONFIG_EVM_EXTRA_SMACK_XATTRS': 'y',
+                     'CONFIG_IMA_DEFAULT_HASH_SHA1': 'not set',
+                     'CONFIG_IMA_DEFAULT_HASH_SHA256': 'y',
+                     'CONFIG_IMA_DEFAULT_HASH_SHA512': 'y',
+                     'CONFIG_IMA_DEFAULT_HASH_WP512': 'not set'
+                     }
+############################################################################################
+# Comments
+############################################################################################
+comments = {  # Kernel Hardening Configurations
+    'CONFIG_SERIAL_8250_CONSOLE': 'Enables the serial console. Providing access to the serial console would assist an attacker in discovering attack vectors.',
+    'CONFIG_SERIAL_CORE': 'Enables the serial console. Providing access to the serial console would assist an attacker in discovering attack vectors.',
+    'CONFIG_SERIAL_CORE_CONSOLE': 'Enables the serial console. Providing access to the serial console would assist an attacker in discovering attack vectors.',
+    'CONFIG_CMDLINE_BOOL': 'Enables the kernel command line to be hardcoded directly into the kernel. Hardcoding the command line allows tighter control over kernel command line options.',
+    'CONFIG_CMDLINE': 'Defines the kernel command line to be hardcoded into the kernel. Hardcoding the command line allows tighter control over kernel command line options.',
+    'CONFIG_CMDLINE_OVERRIDE': 'Enables the kernel to ignore the boot loader command line and to use only the hardcoded command line. Hardcoding the command line allows tighter control over kernel command line options.',
+    'CONFIG_DEBUG_INFO': 'Enables debug symbols in the kernel. Providing debug symbols would assist an attacker in discovering attack vectors.',
+    'CONFIG_KGDB': 'Enables KGDB over USB and console ports. Providing KGDB would assist an attacker in discovering attack vectors.',
+    'CONFIG_KPROBES': 'Enables Kernel Dynamic Probes. Providing kprobes allows the attacker to collect debug and performance information.',
+    'CONFIG_FTRACE': 'Enables the kernel to trace every function. Providing kernel trace functionality would assist an attacker in discovering attack vectors.',
+    'CONFIG_OPROFILE': 'Enables a profiling system capable of profiling kernel and kernel modules. Providing profiling functionality would assist an attacker in discovering attack vectors.',
+    'CONFIG_PROFILING': 'Enables a profiling system capable of profiling kernel and kernel modules. Providing profiling functionality would assist an attacker in discovering attack vectors.',
+    'CONFIG_MAGIC_SYSRQ': 'Enables a console device to interpret special characters as SysRQ system commands. SysRQ commands are an immediate attack vector as they provide the ability to dump information or reboot the device.',
+    'CONFIG_DEBUG_BUGVERBOSE': 'Enables verbose logging for BUG() panics. Verbose logging would assist an attacker in discovering attack vectors.',
+    'CONFIG_IP_PNP': 'Enables automatic configuration of IP addresses of devices and of the routing table during kernel boot. Providing networking functionality before the system has come up would assist an attacker in discovering attack vectors.',
+    'CONFIG_IKCONFIG': 'Enables access to the kernel config through /proc/config.gz. Leaking the kernel configuration would assist an attacker in discovering attack vectors.',
+    'CONFIG_SWAP': 'Enables swap files for kernel. The ability to read kernel memory pages in swap files would assist an attacker in discovering attack vectors.',
+    'CONFIG_NAMESPACES': 'Enabling this can result in duplicates of dev nodes, pids and mount points, which can be useful to attackers trying to spoof running environments on devices.',
+    'CONFIG_NFSD': 'Enables remote access to files residing on this system using Sun\'s Network File System protocol. Providing remote access to the file system would assist an attacker in discovering attack vectors.',
+    'CONFIG_NFS_FS': 'Enables remote access to files residing on this system using Sun\'s Network File System protocol. Providing remote access to the file system would assist an attacker in discovering attack vectors.',
+    'CONFIG_BINFMT_MISC': 'Enables support for binary formats other than ELF. Providing the ability to use alternate interpreters would assist an attacker in discovering attack vectors.',
+    'CONFIG_KALLSYMS': 'Enables printing of symbolic crash information and symbolic stack backtraces. Verbose logging would assist an attacker in discovering attack vectors.',
+    'CONFIG_KALLSYMS_ALL': 'Enables printing of symbolic crash information and symbolic stack backtraces. Verbose logging would assist an attacker in discovering attack vectors.',
+    'CONFIG_BUG': 'Enables display of backtrace and register information for BUGs and WARNs in kernel space. Verbose logging would assist an attacker in discovering attack vectors.',
+    'CONFIG_SYSCTL_SYSCALL': 'Enables sysctl to read and write kernel parameters. Use of deprecated and unmaintained features is not recommended.',
+    'CONFIG_MODULE_UNLOAD': 'Enables the ability to unload a kernel module. Allowing module unloading enables the attacker to disable security modules.',
+    'CONFIG_MODULE_FORCE_LOAD': 'Enables forced loading of modules without version information. Providing an attacker with the ability to force load a module assists in discovering attack vectors.',
+    'CONFIG_DEVMEM': 'Enables mem device, which provides access to physical memory. Providing a view into physical memory would assist an attacker in discovering attack vectors.',
+    'CONFIG_COREDUMP': 'Enables support for performing core dumps. Providing core dumps would assist an attacker in discovering attack vectors.',
+    'CONFIG_CROSS_MEMORY_ATTACH': 'Enables cross-process virtual memory access. Providing virtual memory access to and from a hostile process would assist an attacker in discovering attack vectors.',
+    'CONFIG_UNIX_DIAG': 'Enables support for socket monitoring interface. Allows the attacker to inspect shared file descriptors on Unix Domain sockets or traffic on \'localhost\'.',
+    'CONFIG_CHECKPOINT_RESTORE': 'Enables the checkpoint/restore service which can freeze and migrate processes. Providing a method for manipulating process state would assist an attacker in discovering attack vectors.',
+    'CONFIG_PANIC_ON_OOPS': 'Enables conversion of kernel OOPs to PANIC. When fuzzing the kernel or attempting kernel exploits, attackers are likely to trigger kernel OOPSes. Setting the behavior on OOPS to PANIC can impede their progress.',
+    'CONFIG_PACKET_DIAG': 'Enables support for socket monitoring interface. Allows the attacker to inspect shared file descriptors on Unix Domain sockets or traffic on \'localhost\'.',
+    'CONFIG_FW_LOADER_USER_HELPER': 'Enables the invocation of user-helper (e.g. udev) for loading firmware files as a fallback after the direct file loading in kernel fails. Providing firmware auto loader functionality would assist an attacker in discovering attack vectors.',
+    'CONFIG_BPF_JIT': 'Enables Berkeley Packet Filter filtering capabilities. The BPF JIT can be used to create kernel-payloads from firewall table rules which assist an attacker in discovering attack vectors.',
+    'CONFIG_USELIB': 'Enables the uselib syscall. The uselib system call has no valid use in any libc6 or uclibc system. Legacy features would assist an attacker in discovering attack vectors.',
+    'CONFIG_CC_STACKPROTECTOR': 'Enables the stack protector GCC feature which defends against stack-based buffer overflows',
+    'CONFIG_KEXEC': 'Enables the ability to shutdown your current kernel, and start another one. If enabled, this can be used as a way to bypass signed kernels.',
+    'CONFIG_PROC_KCORE': 'Enables access to a kernel core dump from userspace. Providing access to core dumps of the kernel would assist an attacker in discovering attack vectors.',
+    'CONFIG_SECURITY_DMESG_RESTRICT': 'Enables restrictions on unprivileged users reading the kernel syslog via dmesg(8). Unrestricted access to kernel syslogs would assist an attacker in discovering attack vectors.',
+    'CONFIG_DEBUG_STACKOVERFLOW': 'Enables messages to be printed if free stack space drops below a certain limit. Leaking information about resources used by the kernel would assist an attacker in discovering attack vectors.',
+    'CONFIG_DEBUG_STRICT_USER_COPY_CHECKS': 'Converts a certain set of sanity checks for user copy operations into compile time failures. The copy_from_user() etc checks help test if there are sufficient security checks on the length argument of the copy operation by having gcc prove that the argument is within bounds.',
+    'CONFIG_ARCH_HAS_DEBUG_STRICT_USER_COPY_CHECKS': 'Required to enable DEBUG_STRICT_USER_COPY_CHECKS, but alone does not provide security.',
+    'CONFIG_IKCONFIG_PROC': 'Enables access to the kernel config through /proc/config.gz. Leaking the kernel configuration would assist an attacker in discovering attack vectors.',
+    'CONFIG_RANDOMIZE_BASE': 'Enables Kernel Address Space Layout randomization (kASLR). This hinders some types of security attacks by making it more difficult for an attacker to predict target addresses.',
+    'CONFIG_DEBUG_RODATA': 'Sets kernel text and rodata sections as read-only and write-protected. This guards against malicious attempts to change the kernel\'s executable code.',
+    'CONFIG_STRICT_DEVMEM': 'Enables restriction of userspace access to kernel memory. Failure to enable this option provides an immediate attack vector.',
+    'CONFIG_DEVKMEM': 'Enables kmem device, which direct maps kernel memory. Providing a view into kernel memory would assist an attacker in discovering attack vectors.',
+    'CONFIG_ARCH_BINFMT_ELF_RANDOMIZE_PIE': 'Enables randomization of PIE load address  for ELF binaries. This hinders some types of security attacks by making it more difficult for an attacker to predict target addresses.',
+    'CONFIG_DEBUG_KERNEL': 'Enables sysfs output intended to assist with debugging a kernel. The information output to sysfs would assist an attacker in discovering attack vectors.',
+    'CONFIG_DEBUG_FS': 'Enables the kernel debug filesystem. The kernel debug filesystem presents a lot of useful information and means of manipulation of the kernel to an attacker.',
+    'CONFIG_MODULE_SIG_FORCE': 'Enables validation of module signature. Disabling this option enables an attacker to load unsigned modules.',
+}
diff --git a/meta-security/meta-security-isafw/lib/isafw/isaplugins/configs/kca/x86.py b/meta-security/meta-security-isafw/lib/isafw/isaplugins/configs/kca/x86.py
new file mode 100644
index 0000000..cbaddf8
--- /dev/null
+++ b/meta-security/meta-security-isafw/lib/isafw/isaplugins/configs/kca/x86.py
@@ -0,0 +1,38 @@
+############################################################################################
+# Kernel Hardening Configurations
+############################################################################################
+hardening_kco = {'CONFIG_DEFAULT_MMAP_MIN_ADDR': 'not set',
+                 'CONFIG_RANDOMIZE_BASE_MAX_OFFSET': 'not set',
+                 'CONFIG_X86_INTEL_MPX': 'not set',
+                 'CONFIG_X86_MSR': 'not set'
+                 }
+hardening_kco_ref = {'CONFIG_DEFAULT_MMAP_MIN_ADDR': '65536',  # x86 specific
+                     'CONFIG_RANDOMIZE_BASE_MAX_OFFSET': '0x20000000,0x40000000',  # x86 specific
+                     'CONFIG_X86_INTEL_MPX': 'y',  # x86 and certain HW variants specific
+                     'CONFIG_X86_MSR': 'not set'
+                     }
+############################################################################################
+# Keys Kernel Configuration
+############################################################################################
+keys_kco = {}
+keys_kco_ref = {}
+############################################################################################
+# Security Kernel Configuration
+############################################################################################
+security_kco = {'CONFIG_LSM_MMAP_MIN_ADDR': 'not set',
+                'CONFIG_INTEL_TXT': 'not set'}
+security_kco_ref = {'CONFIG_LSM_MMAP_MIN_ADDR': '65536',  # x86 specific
+                    'CONFIG_INTEL_TXT': 'y'}
+############################################################################################
+# Integrity Kernel Configuration
+############################################################################################
+integrity_kco = {}
+integrity_kco_ref = {}
+############################################################################################
+# Comments
+############################################################################################
+comments = {'CONFIG_DEFAULT_MMAP_MIN_ADDR': 'Defines the portion of low virtual memory that should be protected from userspace allocation. Keeping a user from writing to low pages can help reduce the impact of kernel NULL pointer bugs.',
+            'CONFIG_RANDOMIZE_BASE_MAX_OFFSET': 'Defines the maximal offset in bytes that will be applied to the kernel when kernel Address Space Layout Randomization (kASLR) is active.',
+            'CONFIG_X86_INTEL_MPX': 'Enables MPX hardware features that can be used with compiler-instrumented code to check memory references. It is designed to detect buffer overflow or underflow bugs.',
+            'CONFIG_X86_MSR': 'Enables privileged processes access to the x86 Model-Specific Registers (MSRs). MSR accesses are directed to a specific CPU on multi-processor systems. This alone does not provide security.'
+            }
diff --git a/meta-security/meta-security-isafw/lib/isafw/isaplugins/configs/la/approved-non-osi b/meta-security/meta-security-isafw/lib/isafw/isaplugins/configs/la/approved-non-osi
new file mode 100644
index 0000000..5e7a69f
--- /dev/null
+++ b/meta-security/meta-security-isafw/lib/isafw/isaplugins/configs/la/approved-non-osi
@@ -0,0 +1,43 @@
+Artistic-1.0-perl
+BSD-2-Clause-FreeBSD
+BSD-3-Clause-Clear
+BSD-4-Clause
+BSD-4-Clause-UC
+bzip2-1.0.5
+bzip2-1.0.6
+CC0-1.0
+CC-BY-SA-3.0
+ErlPL-1.1
+FTL
+GFDL-1.1
+GFDL-1.1+
+GFDL-1.2
+GFDL-1.2+
+GFDL-1.3
+GFDL-1.3+
+GPL-1.0
+GPL-1.0+
+ICU
+IJG
+Libpng
+libtiff
+MIT-feh
+MIT-Opengroup
+mpich2
+Muddy-MIT
+OFL-1.0
+OLDAP-2.0.1
+OLDAP-2.8
+OpenSSL
+PHP-3.01
+Qhull
+Ruby
+SGI-B-2.0
+TCL
+Vim
+X11
+Zend-2.0
+zlib-acknowledgement
+ZPL-1.1
+ZPL-2.0
+ZPL-2.1
diff --git a/meta-security/meta-security-isafw/lib/isafw/isaplugins/configs/la/exceptions b/meta-security/meta-security-isafw/lib/isafw/isaplugins/configs/la/exceptions
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/meta-security/meta-security-isafw/lib/isafw/isaplugins/configs/la/exceptions
diff --git a/meta-security/meta-security-isafw/lib/isafw/isaplugins/configs/la/licenses b/meta-security/meta-security-isafw/lib/isafw/isaplugins/configs/la/licenses
new file mode 100644
index 0000000..8fff0b1
--- /dev/null
+++ b/meta-security/meta-security-isafw/lib/isafw/isaplugins/configs/la/licenses
@@ -0,0 +1,105 @@
+AFL-1.1
+AFL-1.2
+AFL-2.0
+AFL-2.1
+AFL-3.0
+APL-1.0
+Apache-1.1
+Apache-2.0
+APSL-1.0
+APSL-1.1
+APSL-1.2
+APSL-2.0
+Artistic-1.0
+Artistic-1.0-Perl
+Artistic-1.0-cl8
+Artistic-2.0
+AAL
+BSL-1.0
+BSD-2-Clause
+BSD-3-Clause
+CNRI-Python
+CDDL-1.0
+CPAL-1.0
+CPL-1.0
+CATOSL-1.1
+CUA-OPL-1.0
+EPL-1.0
+ECL-1.0
+ECL-2.0
+EFL-1.0
+EFL-2.0
+Entessa
+EUDatagrid
+EUPL-1.1
+Fair
+Frameworx-1.0
+AGPL-3.0
+GPL-2.0
+GPL-2.0+
+GPL-2.0-with-autoconf-exception
+GPL-2.0-with-bison-exception
+GPL-2.0-with-classpath-exception
+GPL-2.0-with-font-exception
+GPL-2.0-with-GCC-exception
+GPL-3.0
+GPL-3.0+
+GPL-3.0-with-autoconf-exception
+GPL-3.0-with-GCC-exception
+LGPL-2.1
+LGPL-2.1+
+LGPL-3.0
+LGPL-3.0+
+LGPL-2.0
+LGPL-2.0+
+HPND
+IPL-1.0
+Intel
+IPA
+ISC
+LPPL-1.3c
+LPL-1.02
+LPL-1.0
+MS-PL
+MS-RL
+MirOS
+MIT
+Motosoto
+MPL-1.0
+MPL-1.1
+MPL-2.0
+MPL-2.0-no-copyleft-exception
+Multics
+NASA-1.3
+Naumen
+NGPL
+Nokia
+NPOSL-3.0
+NTP
+OCLC-2.0
+OGTSL
+OSL-1.0
+OSL-2.0
+OSL-2.1
+OSL-3.0
+PHP-3.0
+PostgreSQL
+Python-2.0
+QPL-1.0
+RPSL-1.0
+RPL-1.1
+RPL-1.5
+RSCPL
+OFL-1.1
+SimPL-2.0
+Sleepycat
+SISSL
+SPL-1.0
+Watcom-1.0
+NCSA
+VSL-1.0
+W3C
+WXwindows
+Xnet
+Zlib
+ZPL-2.0
diff --git a/meta-security/meta-security-isafw/lib/isafw/isaplugins/configs/la/violations b/meta-security/meta-security-isafw/lib/isafw/isaplugins/configs/la/violations
new file mode 100644
index 0000000..5da203b
--- /dev/null
+++ b/meta-security/meta-security-isafw/lib/isafw/isaplugins/configs/la/violations
@@ -0,0 +1,7 @@
+GPL-3.0
+GPL-3.0+
+GPL-3.0-with-autoconf-exception
+GPL-3.0-with-GCC-exception
+LGPL-3.0
+LGPL-3.0+
+
diff --git a/meta-security/meta-security-isafw/recipes-devtools/checksec/checksec_1.5-1.bb b/meta-security/meta-security-isafw/recipes-devtools/checksec/checksec_1.5-1.bb
new file mode 100644
index 0000000..247ec76
--- /dev/null
+++ b/meta-security/meta-security-isafw/recipes-devtools/checksec/checksec_1.5-1.bb
@@ -0,0 +1,25 @@
+SUMMARY = "Checksec tool"
+DESCRIPTION = "The checksec.sh script is designed to test what standard Linux OS and PaX security features are being used."
+SECTION = "security"
+LICENSE = "BSD-3-Clause"
+HOMEPAGE="http://www.trapkit.de/tools/checksec.html"
+
+LIC_FILES_CHKSUM = "file://checksec-${PV}.sh;beginline=3;endline=34;md5=6dab14470bfdf12634b866dbdd7a04b0"
+
+SRC_URI = "http://www.trapkit.de/tools/checksec.sh;downloadfilename=checksec-${PV}.sh"
+
+SRC_URI[md5sum] = "57cc3fbbbe48e8ebd4672c569954374d"
+SRC_URI[sha256sum] = "05822cd8668589038d20650faa0e56f740911d8ad06f7005b3d12a5c76591b90"
+
+
+S = "${WORKDIR}"
+
+do_install() {
+    install -d ${D}${bindir}
+    install -m 0755 ${WORKDIR}/checksec-${PV}.sh    ${D}${bindir}/checksec.sh
+    sed -i 's/\r//' ${D}${bindir}/checksec.sh
+}
+
+RDEPENDS_${PN} = "bash binutils"
+
+BBCLASSEXTEND = "native"
diff --git a/meta-security/meta-security-isfafw/classes/isafw.bbclass b/meta-security/meta-security-isfafw/classes/isafw.bbclass
new file mode 100644
index 0000000..146acdf
--- /dev/null
+++ b/meta-security/meta-security-isfafw/classes/isafw.bbclass
@@ -0,0 +1,318 @@
+# Security scanning class
+#
+# Based in part on buildhistory.bbclass which was in turn based on
+# testlab.bbclass and packagehistory.bbclass
+#
+# Copyright (C) 2011-2015 Intel Corporation
+# Copyright (C) 2007-2011 Koen Kooi <koen@openembedded.org>
+#
+
+LICENSE = "MIT"
+
+require conf/distro/include/distro_alias.inc
+
+ISAFW_WORKDIR = "${WORKDIR}/isafw"
+ISAFW_REPORTDIR ?= "${LOG_DIR}/isafw-report"
+ISAFW_LOGDIR ?= "${LOG_DIR}/isafw-logs"
+
+ISAFW_PLUGINS_WHITELIST ?= ""
+ISAFW_PLUGINS_BLACKLIST ?= ""
+
+ISAFW_LA_PLUGIN_IMAGE_WHITELIST ?= ""
+ISAFW_LA_PLUGIN_IMAGE_BLACKLIST ?= ""
+
+# First, code to handle scanning each recipe that goes into the build
+
+do_analysesource[nostamp] = "1"
+do_analysesource[cleandirs] = "${ISAFW_WORKDIR}"
+
+python do_analysesource() {
+    from isafw import isafw
+
+    imageSecurityAnalyser = isafw_init(isafw, d)
+
+    if not d.getVar('SRC_URI', True):
+        # Recipe didn't fetch any sources, nothing to do here I assume?
+        return
+
+    recipe = isafw.ISA_package()
+    recipe.name = d.getVar('BPN', True)
+    recipe.version = d.getVar('PV', True)
+    recipe.version = recipe.version.split('+git', 1)[0]
+
+    for p in d.getVar('PACKAGES', True).split():
+        license = str(d.getVar('LICENSE_' + p, True))
+        if license == "None":
+            license = d.getVar('LICENSE', True)
+        license = license.replace("(", "")
+        license = license.replace(")", "")
+        licenses = license.split()
+        while '|' in licenses:
+            licenses.remove('|')
+        while '&' in licenses:
+            licenses.remove('&')
+        for l in licenses:
+            recipe.licenses.append(p + ":" + canonical_license(d, l))
+
+    aliases = d.getVar('DISTRO_PN_ALIAS', True)
+    if aliases:
+        recipe.aliases = aliases.split()
+        faliases = []
+        for a in recipe.aliases:
+            if (a != "OSPDT") and (not (a.startswith("upstream="))):
+                faliases.append(a.split('=', 1)[-1])
+        # remove possible duplicates in pkg names
+        faliases = list(set(faliases))
+        recipe.aliases = faliases
+
+    for patch in src_patches(d):
+        _,_,local,_,_,_=bb.fetch.decodeurl(patch)
+        recipe.patch_files.append(os.path.basename(local))
+    if (not recipe.patch_files) :
+        recipe.patch_files.append("None")
+
+    # Pass the recipe object to the security framework
+    bb.debug(1, '%s: analyse sources' % (d.getVar('PN', True)))
+    imageSecurityAnalyser.process_package(recipe)
+
+    return
+}
+
+addtask do_analysesource before do_build
+
+# This task intended to be called after default task to process reports
+
+PR_ORIG_TASK := "${BB_DEFAULT_TASK}"
+addhandler process_reports_handler
+process_reports_handler[eventmask] = "bb.event.BuildCompleted"
+
+python process_reports_handler() {
+    from isafw import isafw
+
+    dd = d.createCopy()
+    target_sysroot = dd.expand("${STAGING_DIR}/${MACHINE}")
+    native_sysroot = dd.expand("${STAGING_DIR}/${BUILD_ARCH}")
+    staging_populate_sysroot_dir(target_sysroot, native_sysroot, True, dd)
+ 
+    dd.setVar("STAGING_DIR_NATIVE", native_sysroot)
+    savedenv = os.environ.copy()
+    os.environ["PATH"] = dd.getVar("PATH", True)
+
+    imageSecurityAnalyser = isafw_init(isafw, dd)
+    bb.debug(1, 'isafw: process reports')
+    imageSecurityAnalyser.process_report()
+
+    os.environ["PATH"] = savedenv["PATH"]
+}
+
+do_build[depends] += "cve-update-db-native:do_populate_cve_db ca-certificates-native:do_populate_sysroot"
+do_build[depends] += "python3-lxml-native:do_populate_sysroot"
+
+# These tasks are intended to be called directly by the user (e.g. bitbake -c)
+
+addtask do_analyse_sources after do_analysesource
+do_analyse_sources[doc] = "Produce ISAFW reports based on given package without building it"
+do_analyse_sources[nostamp] = "1"
+do_analyse_sources() {
+	:
+}
+
+addtask do_analyse_sources_all after do_analysesource
+do_analyse_sources_all[doc] = "Produce ISAFW reports for all packages in given target without building them"
+do_analyse_sources_all[recrdeptask] = "do_analyse_sources_all do_analysesource"
+do_analyse_sources_all[recideptask] = "do_${PR_ORIG_TASK}"
+do_analyse_sources_all[nostamp] = "1"
+do_analyse_sources_all() {
+	:
+}
+
+python() {
+    # We probably don't need to scan these
+    if bb.data.inherits_class('native', d) or \
+       bb.data.inherits_class('nativesdk', d) or \
+       bb.data.inherits_class('cross', d) or \
+       bb.data.inherits_class('crosssdk', d) or \
+       bb.data.inherits_class('cross-canadian', d) or \
+       bb.data.inherits_class('packagegroup', d) or \
+       bb.data.inherits_class('image', d):
+        bb.build.deltask('do_analysesource', d)
+}
+
+fakeroot python do_analyse_image() {
+
+    from isafw import isafw
+
+    imageSecurityAnalyser = isafw_init(isafw, d)
+
+    # Directory where the image's entire contents can be examined
+    rootfsdir = d.getVar('IMAGE_ROOTFS', True)
+
+    imagebasename = d.getVar('IMAGE_BASENAME', True)
+
+    kernelconf = d.getVar('STAGING_KERNEL_BUILDDIR', True) + "/.config"
+    if os.path.exists(kernelconf):
+        kernel = isafw.ISA_kernel()
+        kernel.img_name = imagebasename
+        kernel.path_to_config = kernelconf
+        bb.debug(1, 'do kernel conf analysis on %s' % kernelconf)
+        imageSecurityAnalyser.process_kernel(kernel)
+    else:
+        bb.debug(1, 'Kernel configuration file is missing. Not performing analysis on %s' % kernelconf)
+
+    pkglist = manifest2pkglist(d)
+
+    imagebasename = d.getVar('IMAGE_BASENAME', True)
+
+    if (pkglist):
+        pkg_list = isafw.ISA_pkg_list()
+        pkg_list.img_name = imagebasename
+        pkg_list.path_to_list = pkglist
+        bb.debug(1, 'do pkg list analysis on %s' % pkglist)
+        imageSecurityAnalyser.process_pkg_list(pkg_list)
+
+    fs = isafw.ISA_filesystem()
+    fs.img_name = imagebasename
+    fs.path_to_fs = rootfsdir
+
+    bb.debug(1, 'do image analysis on %s' % rootfsdir)
+    imageSecurityAnalyser.process_filesystem(fs)
+}
+
+do_rootfs[depends] += "checksec-native:do_populate_sysroot ca-certificates-native:do_populate_sysroot"
+do_rootfs[depends] += "prelink-native:do_populate_sysroot"
+do_rootfs[depends] += "python3-lxml-native:do_populate_sysroot"
+
+isafw_init[vardepsexclude] = "DATETIME"
+def isafw_init(isafw, d):
+    import re, errno
+
+    isafw_config = isafw.ISA_config()
+    # Override the builtin default in curl-native (used by cve-update-db-nativ)
+    # because that default is a path that may not be valid: when curl-native gets
+    # installed from sstate, we end up with the sysroot path as it was on the
+    # original build host, which is not necessarily the same path used now
+    # (see https://bugzilla.yoctoproject.org/show_bug.cgi?id=9883).
+    #
+    # Can't use ${sysconfdir} here, it already includes ${STAGING_DIR_NATIVE}
+    # when the current recipe is native.
+    isafw_config.cacert = d.expand('${STAGING_DIR_NATIVE}/etc/ssl/certs/ca-certificates.crt')
+
+    bb.utils.export_proxies(d)
+
+    isafw_config.machine = d.getVar('MACHINE', True)
+    isafw_config.timestamp = d.getVar('DATETIME', True)
+    isafw_config.reportdir = d.getVar('ISAFW_REPORTDIR', True) + "_" + isafw_config.timestamp
+    if not os.path.exists(os.path.dirname(isafw_config.reportdir + "/test")):
+        try:
+            os.makedirs(os.path.dirname(isafw_config.reportdir + "/test"))
+        except OSError as exc:
+            if exc.errno == errno.EEXIST and os.path.isdir(isafw_config.reportdir):
+                pass
+            else: raise
+    isafw_config.logdir = d.getVar('ISAFW_LOGDIR', True)
+    # Adding support for arm
+    # TODO: Add support for other platforms
+    isafw_config.arch =  d.getVar('TARGET_ARCH', True)
+    if ( isafw_config.arch != "arm" ):
+        isafw_config.arch = "x86"
+
+    whitelist = d.getVar('ISAFW_PLUGINS_WHITELIST', True)
+    blacklist = d.getVar('ISAFW_PLUGINS_BLACKLIST', True)
+    if whitelist:
+        isafw_config.plugin_whitelist = re.split(r'[,\s]*', whitelist)
+    if blacklist:
+        isafw_config.plugin_blacklist = re.split(r'[,\s]*', blacklist)
+
+    la_image_whitelist = d.getVar('ISAFW_LA_PLUGIN_IMAGE_WHITELIST', True)
+    la_image_blacklist = d.getVar('ISAFW_LA_PLUGIN_IMAGE_BLACKLIST', True)
+    if la_image_whitelist:
+        isafw_config.la_plugin_image_whitelist = re.split(r'[,\s]*', la_image_whitelist)
+    if la_image_blacklist:
+        isafw_config.la_plugin_image_blacklist = re.split(r'[,\s]*', la_image_blacklist)
+
+    return isafw.ISA(isafw_config)
+
+# based on toaster.bbclass _toaster_load_pkgdatafile function
+def binary2source(dirpath, filepath):
+    import re
+    originPkg = ""
+    with open(os.path.join(dirpath, filepath), "r") as fin:
+        for line in fin:
+            try:
+                kn, kv = line.strip().split(": ", 1)
+                m = re.match(r"^PKG_([^A-Z:]*)", kn)
+                if m:
+                    originPkg = str(m.group(1))
+            except ValueError:
+                pass    # ignore lines without valid key: value pairs:
+    if not originPkg:
+        originPkg = "UNKNOWN"
+    return originPkg
+
+manifest2pkglist[vardepsexclude] = "DATETIME"
+def manifest2pkglist(d):
+    import glob
+
+    manifest_file = d.getVar('IMAGE_MANIFEST', True)
+    imagebasename = d.getVar('IMAGE_BASENAME', True)
+    reportdir = d.getVar('ISAFW_REPORTDIR', True) + "_" + d.getVar('DATETIME', True)
+    pkgdata_dir = d.getVar("PKGDATA_DIR", True)
+    rr_dir = "%s/runtime-reverse/" % pkgdata_dir
+    pkglist = reportdir + "/pkglist"
+
+    with open(pkglist, 'a') as foutput:
+        foutput.write("Packages for image " + imagebasename + "\n")
+        try:
+            with open(manifest_file, 'r') as finput:
+                for line in finput:
+                    items = line.split()
+                    if items and (len(items) >= 3):
+                        pkgnames = map(os.path.basename, glob.glob(os.path.join(rr_dir, items[0])))
+                        for pkgname in pkgnames:
+                            originPkg = binary2source(rr_dir, pkgname)
+                            version = items[2]
+                            if not version:
+                                version = "undetermined"
+                            foutput.write(pkgname + " " + version + " " + originPkg + "\n")
+        except IOError:
+            bb.debug(1, 'isafw: manifest file not found. Skip pkg list analysis')
+            return "";
+
+
+    return pkglist
+
+# NOTE: by the time IMAGE_POSTPROCESS_COMMAND items are called, the image
+# has been stripped of the package manager database (if runtime package management
+# is not enabled, i.e. 'package-management' is not in IMAGE_FEATURES). If you
+# do want to be using the package manager to operate on the image contents, you'll
+# need to call your function from ROOTFS_POSTINSTALL_COMMAND or
+# ROOTFS_POSTUNINSTALL_COMMAND instead - however if you do that you should then be
+# aware that what you'll be looking at isn't exactly what you will see in the image
+# at runtime (there will be other postprocessing functions called after yours).
+#
+# do_analyse_image does not need the package manager database. Making it
+# a separate task instead of a IMAGE_POSTPROCESS_COMMAND has several
+# advantages:
+# - all other image commands are guaranteed to have completed
+# - it can run in parallel to other tasks which depend on the complete
+#   image, instead of blocking those other tasks
+# - meta-swupd helper images do not need to be analysed and won't be
+#   because nothing depends on their "do_build" task, only on
+#   do_image_complete
+python () {
+    if bb.data.inherits_class('image', d):
+        bb.build.addtask('do_analyse_image', 'do_build', 'do_image_complete', d)
+}
+
+python isafwreport_handler () {
+
+    import shutil
+
+    logdir = e.data.getVar('ISAFW_LOGDIR', True)
+    if os.path.exists(os.path.dirname(logdir+"/test")):
+        shutil.rmtree(logdir)
+    os.makedirs(os.path.dirname(logdir+"/test"))
+
+}
+addhandler isafwreport_handler
+isafwreport_handler[eventmask] = "bb.event.BuildStarted"
diff --git a/meta-security/meta-tpm/conf/layer.conf b/meta-security/meta-tpm/conf/layer.conf
index 175eba8..c3372c7 100644
--- a/meta-security/meta-tpm/conf/layer.conf
+++ b/meta-security/meta-tpm/conf/layer.conf
@@ -8,7 +8,7 @@
 BBFILE_PATTERN_tpm-layer = "^${LAYERDIR}/"
 BBFILE_PRIORITY_tpm-layer = "10"
 
-LAYERSERIES_COMPAT_tpm-layer = "zeus"
+LAYERSERIES_COMPAT_tpm-layer = "dunfell"
 
 LAYERDEPENDS_tpm-layer = " \
     core \
diff --git a/meta-security/meta-tpm/recipes-kernel/linux/linux-yocto_4.%.bbappend b/meta-security/meta-tpm/recipes-kernel/linux/linux-yocto_5.%.bbappend
similarity index 100%
rename from meta-security/meta-tpm/recipes-kernel/linux/linux-yocto_4.%.bbappend
rename to meta-security/meta-tpm/recipes-kernel/linux/linux-yocto_5.%.bbappend
diff --git a/meta-security/meta-tpm/recipes-tpm/libtpm/libtpm_0.7.0.bb b/meta-security/meta-tpm/recipes-tpm/libtpm/libtpm_0.7.0.bb
index d9863fa..4588c8d 100644
--- a/meta-security/meta-tpm/recipes-tpm/libtpm/libtpm_0.7.0.bb
+++ b/meta-security/meta-tpm/recipes-tpm/libtpm/libtpm_0.7.0.bb
@@ -8,7 +8,7 @@
 PE = "1"
 
 S = "${WORKDIR}/git"
-inherit autotools-brokensep pkgconfig
+inherit autotools-brokensep pkgconfig perlnative
 
 PACKAGECONFIG ?= "openssl"
 PACKAGECONFIG[openssl] = "--with-openssl, --without-openssl, openssl"
diff --git a/meta-security/meta-tpm/recipes-tpm/swtpm/swtpm_0.2.0.bb b/meta-security/meta-tpm/recipes-tpm/swtpm/swtpm_0.2.0.bb
index f3a53dd..35c77c8 100644
--- a/meta-security/meta-tpm/recipes-tpm/swtpm/swtpm_0.2.0.bb
+++ b/meta-security/meta-tpm/recipes-tpm/swtpm/swtpm_0.2.0.bb
@@ -31,6 +31,7 @@
 PACKAGECONFIG[gnutls] = "--with-gnutls, --without-gnutls, gnutls"
 PACKAGECONFIG[selinux] = "--with-selinux, --without-selinux, libselinux"
 PACKAGECONFIG[cuse] = "--with-cuse, --without-cuse, fuse"
+PACKAGECONFIG[seccomp] = "--with-seccomp, --without-seccomp, libseccomp"
 
 EXTRA_OECONF += "--with-tss-user=${TSS_USER} --with-tss-group=${TSS_GROUP}"
 
diff --git a/meta-security/meta-tpm/recipes-tpm2/ibmswtpm2/files/remove_optimization.patch b/meta-security/meta-tpm/recipes-tpm2/ibmswtpm2/files/remove_optimization.patch
new file mode 100644
index 0000000..2919e2e
--- /dev/null
+++ b/meta-security/meta-tpm/recipes-tpm2/ibmswtpm2/files/remove_optimization.patch
@@ -0,0 +1,26 @@
+Allow recipe to overide optimization.
+
+fixes:
+
+397 | #  warning _FORTIFY_SOURCE requires compiling with optimization (-O)
+|       |    ^~~~~~~
+| cc1: all warnings being treated as errors
+
+
+Upstream-Status: OE specific
+
+Signed-off-by: Armin Kuster <akuster808@gmail.com>
+
+Index: src/makefile
+===================================================================
+--- src.orig/makefile
++++ src/makefile
+@@ -43,7 +43,7 @@ CC = /usr/bin/gcc
+ CCFLAGS = -Wall  			\
+ 	-Wmissing-declarations -Wmissing-prototypes -Wnested-externs \
+ 	-Werror -Wsign-compare \
+-	 -c -ggdb -O0 			\
++	 -c -ggdb -O 			\
+ 	-DTPM_POSIX			\
+ 	-D_POSIX_			\
+ 	-DTPM_NUVOTON
diff --git a/meta-security/meta-tpm/recipes-tpm2/ibmswtpm2/ibmswtpm2_1332.bb b/meta-security/meta-tpm/recipes-tpm2/ibmswtpm2/ibmswtpm2_1332.bb
deleted file mode 100644
index a6068e6..0000000
--- a/meta-security/meta-tpm/recipes-tpm2/ibmswtpm2/ibmswtpm2_1332.bb
+++ /dev/null
@@ -1,24 +0,0 @@
-SUMMARY = "IBM's Software TPM 2.0"
-
-LICENSE = "BSD"
-SECTION = "securty/tpm"
-LIC_FILES_CHKSUM = "file://../LICENSE;md5=1e023f61454ac828b4aa1bc4293f7d5f"
-
-SRC_URI = "https://sourceforge.net/projects/ibmswtpm2/files/ibmtpm1332.tar.gz"
-SRC_URI[md5sum] = "0ab34a655b4e09812d7ada19746af4f9"
-SRC_URI[sha256sum] = "8e8193af3d11d9ff6a951dda8cd1f4693cb01934a8ad7876b84e92c6148ab0fd"
-
-DEPENDS = "openssl"
-
-S = "${WORKDIR}/src"
-
-LDFLAGS = "${LDFALGS}"
-
-do_compile () {
-   make CC='${CC}'
-}
-
-do_install () {
-   install -d ${D}/${bindir}
-   install -m 0755 tpm_server  ${D}/${bindir}
-}
diff --git a/meta-security/meta-tpm/recipes-tpm2/ibmswtpm2/ibmswtpm2_1563.bb b/meta-security/meta-tpm/recipes-tpm2/ibmswtpm2/ibmswtpm2_1563.bb
new file mode 100644
index 0000000..8054226
--- /dev/null
+++ b/meta-security/meta-tpm/recipes-tpm2/ibmswtpm2/ibmswtpm2_1563.bb
@@ -0,0 +1,27 @@
+SUMMARY = "IBM's Software TPM 2.0"
+LICENSE = "BSD"
+SECTION = "securty/tpm"
+LIC_FILES_CHKSUM = "file://../LICENSE;md5=1e023f61454ac828b4aa1bc4293f7d5f"
+
+DEPENDS = "openssl"
+
+SRC_URI = "https://sourceforge.net/projects/ibmswtpm2/files/ibmtpm${PV}.tar.gz \
+           file://remove_optimization.patch \
+           "
+SRC_URI[md5sum] = "13013612b3a13dc935fefe1a5684179c"
+SRC_URI[sha256sum] = "fc3a17f8315c1f47670764f2384943afc0d3ba1e9a0422dacb08d455733bd1e9"
+SRC_URI[sha1sum] = "a2a5335024a2edc1739f08b99e716fa355be627d"
+SRC_URI[sha384sum] = "b1f278acabe2198aa79c0fe8aa0182733fe701336cbf54a88058be0b574cab768f59f9315882d0e689e634678d05b79f"
+SRC_URI[sha512sum] = "ff0b9e5f0d0070eb572b23641f7a0e70a8bc65cbf4b59dca1778be3bb014124011221a492147d4c492584e87af23e2f842ca6307641b3919f67a3f27f09312c0"
+
+S = "${WORKDIR}/src"
+
+do_compile () {
+   make CC='${CC}'
+}
+
+do_install () {
+   install -d ${D}/${bindir}
+   install -m 0755 tpm_server  ${D}/${bindir}
+}
+
diff --git a/meta-security/meta-tpm/recipes-tpm2/tpm2-tcti-uefi/files/fix_header_file.patch b/meta-security/meta-tpm/recipes-tpm2/tpm2-tcti-uefi/files/fix_header_file.patch
new file mode 100644
index 0000000..fc730e1
--- /dev/null
+++ b/meta-security/meta-tpm/recipes-tpm2/tpm2-tcti-uefi/files/fix_header_file.patch
@@ -0,0 +1,25 @@
+Error building for i386 target in cross env
+
+#include <efi/x86_64/efibind.h>
+
+ARCH is host arch, not target arch
+
+Upstream-Status: Submitted 
+
+Signed-off-by: Armin Kuster <akuster808@gmail.com>
+Index: git/src/uefi-types.h
+===================================================================
+--- git.orig/src/uefi-types.h
++++ git/src/uefi-types.h
+@@ -3,9 +3,9 @@
+ #define UEFI_TYPES_H
+ 
+ #ifndef EDK2_BUILD
+-#if ARCH == x86_64
++#if defined(__x86_64__)
+ #include <efi/x86_64/efibind.h>
+-#elif ARCH == ia32
++#elif defined(__i386__)
+ #include <efi/ia32/efibind.h>
+ #else
+ #error "Unsupported ARCH."
diff --git a/meta-security/meta-tpm/recipes-tpm2/tpm2-tcti-uefi/files/tpm2-get-caps-fixed.patch b/meta-security/meta-tpm/recipes-tpm2/tpm2-tcti-uefi/files/tpm2-get-caps-fixed.patch
new file mode 100644
index 0000000..bc70913
--- /dev/null
+++ b/meta-security/meta-tpm/recipes-tpm2/tpm2-tcti-uefi/files/tpm2-get-caps-fixed.patch
@@ -0,0 +1,23 @@
+Fix defined to match tpm2-tools 4.1.1
+
+Upstream-Status: Submitted https://github.com/tpm2-software/tpm2-tcti-uefi/pull/81
+Signed-off-by: Armin Kuster <akuster808@gmail.com>
+
+Index: git/example/tpm2-get-caps-fixed.c
+===================================================================
+--- git.orig/example/tpm2-get-caps-fixed.c
++++ git/example/tpm2-get-caps-fixed.c
+@@ -140,11 +140,11 @@ dump_tpm_properties_fixed (TPMS_TAGGED_P
+             Print (L"TPM2_PT_INPUT_BUFFER:\n"
+                     "  value: 0x%X\n", value);
+             break;
+-        case TPM2_PT_HR_TRANSIENT_MIN:
++        case TPM2_PT_TPM2_HR_TRANSIENT_MIN:
+             Print (L"TPM2_PT_TPM2_HR_TRANSIENT_MIN:\n"
+                     "  value: 0x%X\n", value);
+             break;
+-        case TPM2_PT_HR_PERSISTENT_MIN:
++        case TPM2_PT_TPM2_HR_PERSISTENT_MIN:
+             Print (L"TPM2_PT_TPM2_HR_PERSISTENT_MIN:\n"
+                     "  value: 0x%X\n", value);
+             break;
diff --git a/meta-security/meta-tpm/recipes-tpm2/tpm2-tcti-uefi/tpm2-tcti-uefi/0001-configure.ac-stop-inserting-host-directories-into-co.patch b/meta-security/meta-tpm/recipes-tpm2/tpm2-tcti-uefi/tpm2-tcti-uefi/0001-configure.ac-stop-inserting-host-directories-into-co.patch
index 3b54ddd..b3f2287 100644
--- a/meta-security/meta-tpm/recipes-tpm2/tpm2-tcti-uefi/tpm2-tcti-uefi/0001-configure.ac-stop-inserting-host-directories-into-co.patch
+++ b/meta-security/meta-tpm/recipes-tpm2/tpm2-tcti-uefi/tpm2-tcti-uefi/0001-configure.ac-stop-inserting-host-directories-into-co.patch
@@ -16,7 +16,18 @@
 ===================================================================
 --- git.orig/configure.ac
 +++ git/configure.ac
-@@ -81,7 +81,7 @@ AC_ARG_WITH([efi-lds],
+@@ -70,10 +70,6 @@ EXTRA_CFLAGS+="-I${with_efi_includedir}
+ # compiler flags / search path
+ CFLAGS_TMP="$CFLAGS"
+ CFLAGS="$CFLAGS $EXTRA_CFLAGS"
+-AC_CHECK_HEADERS([efi.h efilib.h],
+-                 [],
+-                 [AC_MSG_ERROR([Missing gnu-efi headers.])],
+-                 [#include <efi.h>])
+ CFLAGS="$CFLAGS_TMP"
+ 
+ # path to linker script from gnu-efi
+@@ -81,7 +77,7 @@ AC_ARG_WITH([efi-lds],
              AS_HELP_STRING([--with-efi-lds=LDS_PATH],[Path to gnu-efi lds file.]),
              [],
              [with_efi_lds="/usr/lib/elf_${ARCH}_efi.lds"])
diff --git a/meta-security/meta-tpm/recipes-tpm2/tpm2-tcti-uefi/tpm2-tcti-uefi_0.9.9.bb b/meta-security/meta-tpm/recipes-tpm2/tpm2-tcti-uefi/tpm2-tcti-uefi_0.9.9.bb
index f4918ec..67b36b7 100644
--- a/meta-security/meta-tpm/recipes-tpm2/tpm2-tcti-uefi/tpm2-tcti-uefi_0.9.9.bb
+++ b/meta-security/meta-tpm/recipes-tpm2/tpm2-tcti-uefi/tpm2-tcti-uefi_0.9.9.bb
@@ -2,13 +2,15 @@
 SECTION = "security/tpm"
 LICENSE = "BSD-2-Clause"
 LIC_FILES_CHKSUM = "file://LICENSE;md5=500b2e742befc3da00684d8a1d5fd9da"
-DEPENDS = "libtss2-dev gnu-efi-native gnu-efi pkgconfig autoconf-archive-native"
+DEPENDS = "libtss2-dev libtss2-mu-dev gnu-efi-native gnu-efi pkgconfig autoconf-archive-native"
 
 SRC_URI = "git://github.com/tpm2-software/tpm2-tcti-uefi.git \
            file://configure_oe_fixup.patch \
            file://0001-configure.ac-stop-inserting-host-directories-into-co.patch \
+           file://tpm2-get-caps-fixed.patch \
+           file://fix_header_file.patch \
           "
-SRCREV = "431c85f45dcdca5da003ed47c6e9814282476938"
+SRCREV = "0241b08f069f0fdb3612f5c1b938144dbe9be811"
 
 S = "${WORKDIR}/git"
 
@@ -16,6 +18,17 @@
 
 EFIDIR ?= "/EFI/BOOT"
 
+EFI_ARCH_x86 = "ia32"
+EFI_ARCH_x86-64 = "x86_64"
+
+CFLAGS_append = " -I${STAGING_INCDIR}/efi -I${STAGING_INCDIR}/efi/${EFI_ARCH}"
+
+EXTRA_OECONF_append = " \
+    --with-efi-includedir=${STAGING_INCDIR} \
+    --with-efi-crt0=${STAGING_LIBDIR}/crt0-efi-${EFI_ARCH}.o \
+    --with-efi-lds=${STAGING_LIBDIR}/elf_${EFI_ARCH}_efi.lds \
+"
+
 do_compile_append() {
 	oe_runmake example
 }
@@ -25,15 +38,8 @@
 	install -m 0755 "${B}"/example/*.efi "${D}${EFIDIR}"
 }
 
-EFI_ARCH_x86 = "ia32"
-EFI_ARCH_x86-64 = "x86_64"
-
 COMPATIBLE_HOST = "(i.86|x86_64).*-linux"
-EXTRA_OECONF_append = "\
-    --with-efi-includedir=${STAGING_INCDIR}/efi \
-    --with-efi-crt0=${STAGING_LIBDIR_NATIVE}/crt0-efi-${EFI_ARCH}.o \
-    --with-efi-lds=${STAGING_LIBDIR_NATIVE}/elf_${EFI_ARCH}_efi.lds \
-"
-RDEPENDS_${PN} = "gnu-efi"
 
 FILES_${PN} += "${EFIDIR}"
+
+RDEPENDS_${PN} = "gnu-efi libtss2-mu"
diff --git a/meta-security/meta-tpm/recipes-tpm2/tpm2-tools/tpm2-tools_4.0.1.bb b/meta-security/meta-tpm/recipes-tpm2/tpm2-tools/tpm2-tools_4.0.1.bb
deleted file mode 100644
index 8f94972..0000000
--- a/meta-security/meta-tpm/recipes-tpm2/tpm2-tools/tpm2-tools_4.0.1.bb
+++ /dev/null
@@ -1,14 +0,0 @@
-SUMMARY = "Tools for TPM2."
-DESCRIPTION = "tpm2-tools"
-LICENSE = "BSD"
-LIC_FILES_CHKSUM = "file://LICENSE;md5=0eb1216e46938bd723098d93a23c3bcc"
-SECTION = "tpm"
-
-DEPENDS = "tpm2-abrmd tpm2-tss openssl curl autoconf-archive"
-
-SRC_URI = "https://github.com/tpm2-software/${BPN}/releases/download/${PV}/${BPN}-${PV}.tar.gz"
-
-SRC_URI[md5sum] = "071aa40bc8721700ea4ed19cc2fdeabf"
-SRC_URI[sha256sum] = "ccec3fca6370341a102c5c2ef1ddb4e5cd242bf1bbc6c51d969f77fc78ca67d1"
-
-inherit autotools pkgconfig bash-completion
diff --git a/meta-security/meta-tpm/recipes-tpm2/tpm2-tools/tpm2-tools_4.1.1.bb b/meta-security/meta-tpm/recipes-tpm2/tpm2-tools/tpm2-tools_4.1.1.bb
new file mode 100644
index 0000000..e90dcfe
--- /dev/null
+++ b/meta-security/meta-tpm/recipes-tpm2/tpm2-tools/tpm2-tools_4.1.1.bb
@@ -0,0 +1,17 @@
+SUMMARY = "Tools for TPM2."
+DESCRIPTION = "tpm2-tools"
+LICENSE = "BSD"
+LIC_FILES_CHKSUM = "file://LICENSE;md5=0eb1216e46938bd723098d93a23c3bcc"
+SECTION = "tpm"
+
+DEPENDS = "tpm2-abrmd tpm2-tss openssl curl autoconf-archive"
+
+SRC_URI = "https://github.com/tpm2-software/${BPN}/releases/download/${PV}/${BPN}-${PV}.tar.gz"
+
+SRC_URI[md5sum] = "701ae9e8c8cbdd37d89c8ad774f55395"
+SRC_URI[sha256sum] = "40b9263d8b949bd2bc03a3cd60fa242e27116727467f9bbdd0b5f2539a25a7b1"
+SRC_URI[sha1sum] = "d097d321237983435f05c974533ad90e6f20acef"
+SRC_URI[sha384sum] = "396547f400e4f5626d7741d77ec543f312d94e6697899f4c36260d15fab3f4f971ad2c0487e6eaa2d60256f3cf68f85f"
+SRC_URI[sha512sum] = "25952cf947f0acd16b1a8dbd3ac8573bce85ff970a7e24c290c4f9cd29418e77a3e48ac82c932fbd250887a9303ab301ff92db594c2fffaba47b873382444d26"
+
+inherit autotools pkgconfig bash-completion
diff --git a/meta-security/meta-tpm/recipes-tpm2/tpm2-tss/tpm2-tss_2.3.1.bb b/meta-security/meta-tpm/recipes-tpm2/tpm2-tss/tpm2-tss_2.3.2.bb
similarity index 72%
rename from meta-security/meta-tpm/recipes-tpm2/tpm2-tss/tpm2-tss_2.3.1.bb
rename to meta-security/meta-tpm/recipes-tpm2/tpm2-tss/tpm2-tss_2.3.2.bb
index dfdf734..135efed 100644
--- a/meta-security/meta-tpm/recipes-tpm2/tpm2-tss/tpm2-tss_2.3.1.bb
+++ b/meta-security/meta-tpm/recipes-tpm2/tpm2-tss/tpm2-tss_2.3.2.bb
@@ -8,22 +8,22 @@
 
 SRCREV = "a99e733ba66c359502689a9c42fd5e02ed1dd7d6"
 
-SRC_URI = "git://github.com/tpm2-software/tpm2-tss.git;branch=2.3.x"
+SRC_URI = "https://github.com/tpm2-software/${BPN}/releases/download/${PV}/${BPN}-${PV}.tar.gz"
+SRC_URI[md5sum] = "fb7e6d371959a65dc6d129af81739742"
+SRC_URI[sha256sum] = "82929a0611f39246e09202702a61b54c980ab694626c1f5823520ddf75024fa6"
+SRC_URI[sha1sum] = "c24ce8b20a8686ada775239389292f6d78020668"
+SRC_URI[sha384sum] = "a0c023c024efb6c9906df1e143d692f44433de332b616dc0584c9b4cd4fb0ad544308f291892e91c5a52ef1a4b2abf7f"
+SRC_URI[sha512sum] = "7b679b54f3478c3adee5b6c3135cbe491ffd9f4712991f465edbd6c7d2831e5f1537038ec36f288e9545c719d5d167b61116c924cf5d816220615d0b58a1d436"
 
-inherit autotools-brokensep pkgconfig systemd
-
-S = "${WORKDIR}/git"
+inherit autotools pkgconfig systemd extrausers
 
 PACKAGECONFIG ??= ""
 PACKAGECONFIG[oxygen] = ",--disable-doxygen-doc, "
 
-EXTRA_OECONF += "--with-udevrulesdir=${base_prefix}/lib/udev/rules.d/"
+EXTRA_OECONF += "--enable-static --with-udevrulesdir=${base_prefix}/lib/udev/rules.d/"
+EXTRA_OECONF_remove = " --disable-static"
 
-do_configure_prepend () {
-       ./bootstrap
-}
 
-INHERIT += "extrausers"
 EXTRA_USERS_PARAMS = "\
 	useradd -p '' tss; \
 	groupadd tss; \
diff --git a/meta-security/recipes-core/images/dm-verity-image-initramfs.bb b/meta-security/recipes-core/images/dm-verity-image-initramfs.bb
new file mode 100644
index 0000000..f9ea376
--- /dev/null
+++ b/meta-security/recipes-core/images/dm-verity-image-initramfs.bb
@@ -0,0 +1,26 @@
+DESCRIPTION = "Simple initramfs image for mounting the rootfs over the verity device mapper."
+
+# We want a clean, minimal image.
+IMAGE_FEATURES = ""
+
+PACKAGE_INSTALL = " \
+    initramfs-dm-verity \
+    base-files \
+    busybox \
+    util-linux-mount \
+    udev \
+    cryptsetup \
+    lvm2-udevrules \
+"
+
+# Can we somehow inspect reverse dependencies to avoid these variables?
+do_rootfs[depends] += "${DM_VERITY_IMAGE}:do_image_${DM_VERITY_IMAGE_TYPE}"
+
+IMAGE_FSTYPES = "${INITRAMFS_FSTYPES}"
+
+inherit core-image
+
+deploy_verity_hash() {
+    install -D -m 0644 ${DEPLOY_DIR_IMAGE}/${DM_VERITY_IMAGE}-${MACHINE}.${DM_VERITY_IMAGE_TYPE}.verity.env ${IMAGE_ROOTFS}/${datadir}/dm-verity.env
+}
+ROOTFS_POSTPROCESS_COMMAND += "deploy_verity_hash;"
diff --git a/meta-security/recipes-core/initrdscripts/initramfs-dm-verity.bb b/meta-security/recipes-core/initrdscripts/initramfs-dm-verity.bb
new file mode 100644
index 0000000..b614956
--- /dev/null
+++ b/meta-security/recipes-core/initrdscripts/initramfs-dm-verity.bb
@@ -0,0 +1,13 @@
+SUMMARY = "Simple init script that uses devmapper to mount the rootfs in read-only mode protected by dm-verity"
+LICENSE = "MIT"
+LIC_FILES_CHKSUM = "file://${COREBASE}/meta/COPYING.MIT;md5=3da9cfbcb788c80a0384361b4de20420"
+
+SRC_URI = "file://init-dm-verity.sh"
+
+do_install() {
+    install -m 0755 ${WORKDIR}/init-dm-verity.sh ${D}/init
+    install -d ${D}/dev
+    mknod -m 622 ${D}/dev/console c 5 1
+}
+
+FILES_${PN} = "/init /dev/console"
diff --git a/meta-security/recipes-core/initrdscripts/initramfs-dm-verity/init-dm-verity.sh b/meta-security/recipes-core/initrdscripts/initramfs-dm-verity/init-dm-verity.sh
new file mode 100644
index 0000000..307d2c7
--- /dev/null
+++ b/meta-security/recipes-core/initrdscripts/initramfs-dm-verity/init-dm-verity.sh
@@ -0,0 +1,46 @@
+#!/bin/sh
+
+PATH=/sbin:/bin:/usr/sbin:/usr/bin
+RDEV=""
+ROOT_DIR="/new_root"
+
+mkdir -p /proc
+mkdir -p /sys
+mkdir -p /run
+mkdir -p /tmp
+mount -t proc proc /proc
+mount -t sysfs sysfs /sys
+mount -t devtmpfs none /dev
+
+udevd --daemon
+udevadm trigger --type=subsystems --action=add
+udevadm trigger --type=devices --action=add
+udevadm settle --timeout=10
+
+for PARAM in $(cat /proc/cmdline); do
+	case $PARAM in
+		root=*)
+			RDEV=${PARAM#root=}
+			;;
+	esac
+done
+
+if ! [ -b $RDEV ]; then
+	echo "Missing root command line argument!"
+	exit 1
+fi
+
+case $RDEV in
+	UUID=*)
+		RDEV=$(realpath /dev/disk/by-uuid/${RDEV#UUID=})
+		;;
+esac
+
+. /usr/share/dm-verity.env
+
+echo "Mounting $RDEV over dm-verity as the root filesystem"
+
+veritysetup --data-block-size=1024 --hash-offset=$DATA_SIZE create rootfs $RDEV $RDEV $ROOT_HASH
+mkdir -p $ROOT_DIR
+mount -o ro /dev/mapper/rootfs $ROOT_DIR
+exec switch_root $ROOT_DIR /sbin/init
diff --git a/meta-security/recipes-ids/samhain/files/fix-build-with-new-version-attr.patch b/meta-security/recipes-ids/samhain/files/fix-build-with-new-version-attr.patch
new file mode 100644
index 0000000..eaf30db
--- /dev/null
+++ b/meta-security/recipes-ids/samhain/files/fix-build-with-new-version-attr.patch
@@ -0,0 +1,73 @@
+From e67acafa62f71f0015ed548918b98ed0b1ded128 Mon Sep 17 00:00:00 2001
+From: Yi Zhao <yi.zhao@windriver.com>
+Date: Sun, 19 Jan 2020 15:53:48 +0800
+Subject: [PATCH] fix build with new version attr
+
+The attr/xattr.h has been removed from attr 2.4.48 with commit:
+http://git.savannah.nongnu.org/cgit/attr.git/commit/include?id=7921157890d07858d092f4003ca4c6bae9fd2c38
+The xattr syscalls are provided by sys/xattr.h from glibc now.
+Remove the checking code to adapt it.
+
+Upstream-Status: Pending
+
+Signed-off-by: Yi Zhao <yi.zhao@windriver.com>
+---
+ aclocal.m4    | 26 +++++++++++---------------
+ src/sh_unix.c |  2 +-
+ 2 files changed, 12 insertions(+), 16 deletions(-)
+
+diff --git a/aclocal.m4 b/aclocal.m4
+index ee5b204..38cef8e 100644
+--- a/aclocal.m4
++++ b/aclocal.m4
+@@ -1453,23 +1453,19 @@ AC_DEFUN([sh_CHECK_POSIX_ACL],
+ 
+ AC_DEFUN([sh_CHECK_XATTR],
+ [
+-  AC_CHECK_HEADERS(attr/xattr.h)
+-  if test $ac_cv_header_attr_xattr_h = yes; then
+-
+-  	AC_CHECK_LIB([attr], [getxattr], sh_lattr=yes, sh_lattr=no)
+-  	if test x"$sh_lattr" = xyes; then
+-    		LIBATTR=-lattr
+-  	else
+-    		LIBATTR=
+-  	fi
+-  
+-  	OLDLIBS="$LIBS"
+-  	LIBS="$LIBS $LIBATTR"
+-  	AC_CHECK_FUNCS([getxattr lgetxattr fgetxattr],
+-                       [sh_fattr=yes],[sh_fattr=no])
+-  	LIBS="$OLDLIBS"
++  AC_CHECK_LIB([attr], [getxattr], sh_lattr=yes, sh_lattr=no)
++  if test x"$sh_lattr" = xyes; then
++      LIBATTR=-lattr
++  else
++      LIBATTR=
+   fi
+ 
++  OLDLIBS="$LIBS"
++  LIBS="$LIBS $LIBATTR"
++  AC_CHECK_FUNCS([getxattr lgetxattr fgetxattr],
++	  [sh_fattr=yes],[sh_fattr=no])
++  LIBS="$OLDLIBS"
++
+   if test x"$sh_fattr" = xyes; then
+ 	  AC_DEFINE(USE_XATTR, 1, [Define if you want extended attributes support.])
+ 	  LIBS="$LIBS $LIBATTR"
+diff --git a/src/sh_unix.c b/src/sh_unix.c
+index 3ede57f..ef236e9 100644
+--- a/src/sh_unix.c
++++ b/src/sh_unix.c
+@@ -3681,7 +3681,7 @@ static char * sh_unix_getinfo_acl (char * path, int fd, struct stat * buf)
+ 
+ #ifdef USE_XATTR
+ 
+-#include <attr/xattr.h>
++#include <sys/xattr.h>
+ static char * sh_unix_getinfo_xattr_int (char * path, int fd, char * name)
+ {
+   char *  out   = NULL;
+-- 
+2.7.4
+
diff --git a/meta-security/recipes-ids/samhain/files/samhain-server-volatiles.conf b/meta-security/recipes-ids/samhain/files/samhain-server-volatiles.conf
new file mode 100644
index 0000000..f2ea390
--- /dev/null
+++ b/meta-security/recipes-ids/samhain/files/samhain-server-volatiles.conf
@@ -0,0 +1 @@
+d /var/log/yule 0775 daemon daemon -
diff --git a/meta-security/recipes-ids/samhain/samhain-server.bb b/meta-security/recipes-ids/samhain/samhain-server.bb
index d304912..e7a3aa6 100644
--- a/meta-security/recipes-ids/samhain/samhain-server.bb
+++ b/meta-security/recipes-ids/samhain/samhain-server.bb
@@ -4,14 +4,22 @@
 
 DEPENDS = "gmp"
 
-SRC_URI += "file://samhain-server-volatiles"
+SRC_URI += "file://samhain-server-volatiles \
+            file://samhain-server-volatiles.conf \
+           "
 
 TARGET_CC_ARCH += "${LDFLAGS}"
 
 do_install_append() {
-    install -d ${D}${sysconfdir}/default/volatiles
-    install -m 0644 ${WORKDIR}/samhain-server-volatiles \
-        ${D}${sysconfdir}/default/volatiles/samhain-server
+    if ${@bb.utils.contains('DISTRO_FEATURES', 'systemd', 'true', 'false', d)}; then
+        install -d ${D}${sysconfdir}/tmpfiles.d
+        install -m 0644 ${WORKDIR}/samhain-server-volatiles.conf \
+            ${D}${sysconfdir}/tmpfiles.d/samhain-server.conf
+    else
+        install -d ${D}${sysconfdir}/default/volatiles
+        install -m 0644 ${WORKDIR}/samhain-server-volatiles \
+            ${D}${sysconfdir}/default/volatiles/samhain-server
+    fi
 
     install -m 700 samhain-install.sh init/samhain.startLinux \
         init/samhain.startLSB ${D}/var/lib/samhain
diff --git a/meta-security/recipes-ids/samhain/samhain.inc b/meta-security/recipes-ids/samhain/samhain.inc
index 16222ba..b867bbc 100644
--- a/meta-security/recipes-ids/samhain/samhain.inc
+++ b/meta-security/recipes-ids/samhain/samhain.inc
@@ -14,6 +14,7 @@
            file://samhain-configure-add-option-for-ps.patch \
            file://samhain-avoid-searching-host-for-postgresql.patch \
            file://samhain-add-LDFLAGS-variable-for-samhain_setpwd.patch \
+           file://fix-build-with-new-version-attr.patch \
            file://${INITSCRIPT_NAME}.init \
            file://${INITSCRIPT_NAME}.default \
            file://samhain.service \
@@ -66,6 +67,9 @@
 PACKAGECONFIG[audit] = "ac_cv_header_auparse_h=yes,ac_cv_header_auparse_h=no,audit"
 PACKAGECONFIG[ps] = "--with-ps-path=${base_bindir}/ps,,,procps"
 
+EXTRA_OEMAKE_append_aarch64 = " CPPFLAGS+=-DCONFIG_ARCH_AARCH64=1"
+EXTRA_OEMAKE_append_mips64 = " CPPFLAGS+=-DCONFIG_ARCH_MIPS64=1"
+
 do_unpack_samhain() {
     cd ${WORKDIR}
     tar -xzvf samhain-${PV}.tar.gz
@@ -117,7 +121,6 @@
 	--enable-network=${SAMHAIN_MODE} \
 	--with-pid-file=${localstatedir}/run/samhain.pid \
 	--with-data-file=${localstatedir}/lib/samhain/samhain_file \
-	--disable-dnmalloc \
 	${EXTRA_OECONF}
 }
 
@@ -158,6 +161,8 @@
 	if [ -d ${D}${localstatedir}/run ]; then
 		rmdir ${D}${localstatedir}/run
 	fi
+
+	rm -rf ${D}${localstatedir}/log
 }
 
 FILES_${PN} += "${systemd_system_unitdir}"
diff --git a/meta-security/recipes-kernel/linux/linux-yocto-dev.bbappend b/meta-security/recipes-kernel/linux/linux-yocto-dev.bbappend
index 239e30e..39d4e6f 100644
--- a/meta-security/recipes-kernel/linux/linux-yocto-dev.bbappend
+++ b/meta-security/recipes-kernel/linux/linux-yocto-dev.bbappend
@@ -1,2 +1,2 @@
 KERNEL_FEATURES_append = " ${@bb.utils.contains("DISTRO_FEATURES", "apparmor", " features/apparmor/apparmor.scc", "" ,d)}"
-+KERNEL_FEATURES_append = " ${@bb.utils.contains("DISTRO_FEATURES", "smack", " features/smack/smack.scc", "" ,d)}"
+KERNEL_FEATURES_append = " ${@bb.utils.contains("DISTRO_FEATURES", "smack", " features/smack/smack.scc", "" ,d)}"
diff --git a/meta-security/recipes-kernel/linux/linux-yocto_4.%.bbappend b/meta-security/recipes-kernel/linux/linux-yocto_5.%.bbappend
similarity index 100%
rename from meta-security/recipes-kernel/linux/linux-yocto_4.%.bbappend
rename to meta-security/recipes-kernel/linux/linux-yocto_5.%.bbappend
diff --git a/meta-security/recipes-mac/AppArmor/apparmor_2.13.3.bb b/meta-security/recipes-mac/AppArmor/apparmor_2.13.4.bb
similarity index 95%
rename from meta-security/recipes-mac/AppArmor/apparmor_2.13.3.bb
rename to meta-security/recipes-mac/AppArmor/apparmor_2.13.4.bb
index 32230a5..d6f61b3 100644
--- a/meta-security/recipes-mac/AppArmor/apparmor_2.13.3.bb
+++ b/meta-security/recipes-mac/AppArmor/apparmor_2.13.4.bb
@@ -25,7 +25,7 @@
 	file://run-ptest \
 	"
 
-SRCREV = "2f9d9ea7e01a115b29858455d3b1b5c6a0bab75c"
+SRCREV = "df0ac742f7a1146181d8734d03334494f2015134"
 S = "${WORKDIR}/git"
 
 PARALLEL_MAKE = ""
@@ -120,7 +120,7 @@
 
 	if ${@bb.utils.contains('DISTRO_FEATURES','systemd','true','false',d)}; then
 		install -d ${D}${systemd_system_unitdir}
-		install ${WORKDIR}/apparmor.service ${D}${systemd_system_unitdir}
+		install -m 0644 ${WORKDIR}/apparmor.service ${D}${systemd_system_unitdir}
 	fi
 }
 
@@ -191,7 +191,7 @@
 FILES_${PN} += "/lib/apparmor/ ${sysconfdir}/apparmor ${PYTHON_SITEPACKAGES_DIR}"
 FILES_mod-${PN} = "${libdir}/apache2/modules/*"
 
-RDEPENDS_${PN} += "${@bb.utils.contains('PACKAGECONFIG','python','python3-core python3-modules','', d)}"
+RDEPENDS_${PN} +=  "coreutils findutils ${@bb.utils.contains('PACKAGECONFIG','python','python3-core python3-modules','', d)}"
 RDEPENDS_${PN}_remove += "${@bb.utils.contains('PACKAGECONFIG','perl','','perl', d)}"
 RDEPENDS_${PN}-ptest += "perl coreutils dbus-lib bash"
 
diff --git a/meta-security/recipes-scanners/arpwatch/arpwatch_3.0.bb b/meta-security/recipes-scanners/arpwatch/arpwatch_3.0.bb
new file mode 100644
index 0000000..9be319a
--- /dev/null
+++ b/meta-security/recipes-scanners/arpwatch/arpwatch_3.0.bb
@@ -0,0 +1,79 @@
+SUMARRY = "The ethernet monitor program; for keeping track of ethernet/ip address pairings"
+LICENSE = "BSD-4-Clause"
+HOME_PAGE = "http://ee.lbl.gov/"
+LIC_FILES_CHKSUM = "file://configure;md5=212742e55562cf47527d31c2a492411a"
+
+DEPENDS += "libpcap postfix"
+
+SRC_URI = "https://ee.lbl.gov/downloads/arpwatch/${BP}.tar.gz \
+           file://arpwatch.conf \
+           file://arpwatch.default \
+           file://arpwatch_init  \
+           file://postfix_workaround.patch \
+           file://host_contam_fix.patch "
+
+SRC_URI[sha256sum] = "82e137e104aca8b1280f5cca0ebe61b978f10eadcbb4c4802c181522ad02b25b"
+
+inherit  autotools-brokensep update-rc.d useradd
+
+ARPWATCH_UID ?= "arpwatch"
+ARPWATCH_GID ?= "arpwatch"
+APRWATCH_FROM ?= "root "
+ARPWATH_REPLY ?= "${ARPWATCH_UID}"
+
+EXTRA_OECONF = " --srcdir=${S} --with-watcher=email=${APRWATCH_FROM} --with-watchee=email=${ARPWATH_REPLY}"
+
+CONFIGUREOPTS = " --build=${BUILD_SYS} \
+          --host=${HOST_SYS} \
+          --target=${TARGET_SYS} \
+          --prefix=${prefix} \
+          --exec_prefix=${exec_prefix} \
+          --bindir=${bindir} \
+          --sbindir=${sbindir} \
+          --libexecdir=${libexecdir} \
+          --datadir=${datadir} \
+          --sysconfdir=${sysconfdir} \
+          --sharedstatedir=${sharedstatedir} \
+          --localstatedir=${localstatedir} \
+          --libdir=${libdir} \
+          --includedir=${includedir} \
+          --oldincludedir=${oldincludedir} \
+          --infodir=${infodir} \
+          --mandir=${mandir} \
+          "
+
+do_configure () {
+    ${S}/configure ${CONFIGUREOPTS} ${EXTRA_OECONF}
+}
+
+do_install () {
+    install -d ${D}${bindir}
+    install -d ${D}${sbindir}
+    install -d ${D}${mandir}
+    install -d ${D}${sysconfdir}
+    install -d ${D}${sysconfdir}/default
+    install -d ${D}${sysconfdir}/init.d
+    install -d ${D}${prefix}/etc/rc.d
+    install -d ${D}/var/lib/arpwatch
+
+    oe_runmake install DESTDIR=${D}
+    install -m 644 ${WORKDIR}/arpwatch.conf  ${D}${sysconfdir}
+    install -m 655 ${WORKDIR}/arpwatch_init  ${D}${sysconfdir}/init.d/arpwatch
+    install -m 644 ${WORKDIR}/arpwatch.default  ${D}${sysconfdir}/default
+}
+
+INITSCRIPT_NAME = "arpwatch"
+INITSCRIPT_PARAMS = "start 02 2 3 4 5 . stop 20 0 1 6 ."
+
+USERADD_PACKAGES = "${PN}"
+GROUPADD_PARAM_${PN} = "--system ${ARPWATCH_UID}"
+USERADD_PARAM_${PN} = "--system -g ${ARPWATCH_GID} --home-dir  \
+    ${localstatedir}/spool/${BPN} \
+    --no-create-home  --shell /bin/false ${BPN}"
+
+CONFFILE_FILES = "${sysconfdir}/${PN}.conf"
+
+FILES_${PN} = "${bindir} ${sbindir} ${prefix}/etc/rc.d \
+               ${sysconfdir} /var/lib/arpwatch"
+
+RDEPENDS_${PN} = "libpcap postfix postfix-cfg"
diff --git a/meta-security/recipes-scanners/arpwatch/files/arpwatch.conf b/meta-security/recipes-scanners/arpwatch/files/arpwatch.conf
new file mode 100644
index 0000000..67213c9
--- /dev/null
+++ b/meta-security/recipes-scanners/arpwatch/files/arpwatch.conf
@@ -0,0 +1,23 @@
+# /etc/arpwatch.conf: Debian-specific way to watch multiple interfaces.
+# Format of this configuration file is:
+#
+#<dev1>	<arpwatch options for dev1>
+#<dev2>	<arpwatch options for dev2>
+#...
+#<devN>	<arpwatch options for devN>
+#
+# You can set global options for all interfaces by editing
+# /etc/default/arpwatch
+
+# For example:
+
+eth0	
+#eth0	-m root
+#eth1	-m root
+#eth2	-m root
+
+# or, if you have an MTA configured for plussed addressing:
+#
+#eth0	-m root+eth0
+#eth1	-m root+eth1
+#eth2	-m root+eth2
diff --git a/meta-security/recipes-scanners/arpwatch/files/arpwatch.default b/meta-security/recipes-scanners/arpwatch/files/arpwatch.default
new file mode 100644
index 0000000..b0a7d8f
--- /dev/null
+++ b/meta-security/recipes-scanners/arpwatch/files/arpwatch.default
@@ -0,0 +1,7 @@
+# Global options for arpwatch(8).
+
+# Debian: don't report bogons, don't use PROMISC.
+ARGS="-N -p"
+
+# Debian: run as `arpwatch' user.  Empty this to run as root.
+RUNAS="arpwatch"
diff --git a/meta-security/recipes-scanners/arpwatch/files/arpwatch_init b/meta-security/recipes-scanners/arpwatch/files/arpwatch_init
new file mode 100644
index 0000000..9860c65
--- /dev/null
+++ b/meta-security/recipes-scanners/arpwatch/files/arpwatch_init
@@ -0,0 +1,123 @@
+#!/bin/sh
+
+PATH=/sbin:/bin:/usr/sbin:/usr/bin
+NAME=arpwatch
+DAEMON=/usr/sbin/$NAME
+DESC="Ethernet/FDDI station monitor daemon"
+DATADIR=/var/lib/$NAME
+RETVAL=0
+
+. /etc/init.d/functions
+
+### You shouldn't touch anything below unless you know what you are doing.
+
+[ -f /etc/default/arpwatch ] && . /etc/default/arpwatch
+
+# Decide whether we have to deal with multiple interfaces.
+CONF=/etc/arpwatch.conf
+MULTIPLE=0
+if [ -r $CONF ]; then
+	grep -c '^[a-z]' $CONF 2>&1 >/dev/null && MULTIPLE=1
+fi
+
+# Check whether we have to drop privileges.
+if [ -n "$RUNAS" ]; then
+	if getent passwd "$RUNAS" >/dev/null; then
+		ARGS="-u ${RUNAS} $ARGS"
+	else
+		RUNAS=""
+	fi
+fi
+
+start_instance () {
+	IFACE=$1
+	INSTANCE=${NAME}-${IFACE}
+	DATAFILE=$DATADIR/${IFACE}.dat
+	IFACE_OPTS="-P /var/run/${INSTANCE}.pid -i ${IFACE} -f ${DATAFILE} $2"
+
+	echo -n "Starting $DESC: "
+	if [ ! -f $DATAFILE ]; then
+		echo -n "(creating $DATAFILE) " :> $DATAFILE
+	fi
+	if [ -n "$RUNAS" ]; then
+		echo -n "(chown $RUNAS $DATAFILE) "
+		chown $RUNAS $DATAFILE
+	fi
+	start-stop-daemon --start --quiet \
+		--pidfile /var/run/${INSTANCE}.pid \
+		--exec $DAEMON -- $IFACE_OPTS $ARGS
+	echo "${INSTANCE}."
+	ps h -C $NAME -o pid,args | \
+		awk "/$IFACE/ { print \$1 }" > /var/run/${INSTANCE}.pid
+}
+
+stop_instance () {
+	IFACE=$1
+	INSTANCE=${NAME}-${IFACE}
+	[ -f /var/run/${INSTANCE}.pid ] || return 0
+	echo -n "Stopping $DESC: "
+	start-stop-daemon --stop --quiet --oknodo \
+		--pidfile /var/run/${INSTANCE}.pid
+	echo "${INSTANCE}."
+	rm -f /var/run/${INSTANCE}.pid
+}
+
+process_loop_break_line () {
+	__IFACE=$1
+	shift
+	__IOPTS="$@"
+}
+
+process_loop () {
+	OPERATION=$1
+	grep '^[a-z]' $CONF 2>/dev/null | \
+	while read LINE
+	do
+		process_loop_break_line $LINE
+		I=$__IFACE
+		I_OPTS="$__IOPTS"
+		$OPERATION $I "$I_OPTS"
+	done
+}
+
+startup () {
+  	process_loop start_instance
+}
+
+shutdown () {
+	process_loop stop_instance
+}
+
+case "$1" in
+  start)
+  	startup
+	;;
+  stop)
+  	shutdown
+	;;
+  reload)
+  	echo "Reload operation not supported -- use restart."
+	RETVAL=2
+	;;
+  restart|force-reload)
+	#
+	#	If the "reload" option is implemented, move the "force-reload"
+	#	option to the "reload" entry above. If not, "force-reload" is
+	#	just the same as "restart".
+	#
+	shutdown
+	sleep 1
+	startup
+	;;
+  status)
+      status_of_proc $DAEMON $NAME
+      ;;
+  *)
+	N=/etc/init.d/$NAME
+	# echo "Usage: $N {start|stop|restart|reload|force-reload}" >&2
+	echo "Usage: $N {start|stop|restart|force-reload}" >&2
+	RETVAL=2
+	;;
+esac
+
+exit $RETVAL
diff --git a/meta-security/recipes-scanners/arpwatch/files/host_contam_fix.patch b/meta-security/recipes-scanners/arpwatch/files/host_contam_fix.patch
new file mode 100644
index 0000000..7d7ffac
--- /dev/null
+++ b/meta-security/recipes-scanners/arpwatch/files/host_contam_fix.patch
@@ -0,0 +1,21 @@
+This removes the host contamination
+
+Upstream-Status: Inappropriate [embedded specific]
+
+Signed-off-by: Armin Kuster <akuster808@gmail.com>
+
+Index: arpwatch-3.0/configure
+===================================================================
+--- arpwatch-3.0.orig/configure
++++ arpwatch-3.0/configure
+@@ -4349,8 +4349,8 @@ fi
+ 	    CC=cc
+ 	    export CC
+     fi
+-    V_INCLS="$V_INCLS -I/usr/local/include"
+-    LDFLAGS="$LDFLAGS -L/usr/local/lib"
++    V_INCLS="$V_INCLS "
++    LDFLAGS="$LDFLAGS "
+     if test "$GCC" != yes ; then
+ 	    { $as_echo "$as_me:${as_lineno-$LINENO}: checking that $CC handles ansi prototypes" >&5
+ $as_echo_n "checking that $CC handles ansi prototypes... " >&6; }
diff --git a/meta-security/recipes-scanners/arpwatch/files/postfix_workaround.patch b/meta-security/recipes-scanners/arpwatch/files/postfix_workaround.patch
new file mode 100644
index 0000000..95213f2
--- /dev/null
+++ b/meta-security/recipes-scanners/arpwatch/files/postfix_workaround.patch
@@ -0,0 +1,91 @@
+Sendmail exists after the system boots. We are using postfix
+so no need to check if it exists. 
+
+Upstream-Status: Inappropriate [embedded specific]
+
+Signed-off-by: Armin Kuster <akuster808@gmail.com>
+
+Index: arpwatch-3.0/configure
+===================================================================
+--- arpwatch-3.0.orig/configure
++++ arpwatch-3.0/configure
+@@ -636,7 +636,6 @@ LBL_LIBS
+ HAVE_FREEBSD_TRUE
+ HAVE_FREEBSD_FALSE
+ PYTHON
+-V_SENDMAIL
+ LIBOBJS
+ INSTALL_DATA
+ INSTALL_SCRIPT
+@@ -5573,53 +5572,6 @@ fi
+ done
+ 
+ 
+-# Extract the first word of "sendmail", so it can be a program name with args.
+-set dummy sendmail; ac_word=$2
+-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+-$as_echo_n "checking for $ac_word... " >&6; }
+-if ${ac_cv_path_V_SENDMAIL+:} false; then :
+-  $as_echo_n "(cached) " >&6
+-else
+-  case $V_SENDMAIL in
+-  [\\/]* | ?:[\\/]*)
+-  ac_cv_path_V_SENDMAIL="$V_SENDMAIL" # Let the user override the test with a path.
+-  ;;
+-  *)
+-  as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+-as_dummy="$PATH:/usr/sbin:/usr/lib:/usr/bin:/usr/ucblib:/usr/local/etc"
+-for as_dir in $as_dummy
+-do
+-  IFS=$as_save_IFS
+-  test -z "$as_dir" && as_dir=.
+-    for ac_exec_ext in '' $ac_executable_extensions; do
+-  if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
+-    ac_cv_path_V_SENDMAIL="$as_dir/$ac_word$ac_exec_ext"
+-    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+-    break 2
+-  fi
+-done
+-  done
+-IFS=$as_save_IFS
+-
+-  ;;
+-esac
+-fi
+-V_SENDMAIL=$ac_cv_path_V_SENDMAIL
+-if test -n "$V_SENDMAIL"; then
+-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $V_SENDMAIL" >&5
+-$as_echo "$V_SENDMAIL" >&6; }
+-else
+-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+-$as_echo "no" >&6; }
+-fi
+-
+-
+-
+-if test -z "${V_SENDMAIL}" ; then
+-	as_fn_error $? "Can't find sendmail" "$LINENO" 5
+-fi
+-
+-
+ python=${PYTHON:-python}
+ # Extract the first word of "${python}", so it can be a program name with args.
+ set dummy ${python}; ac_word=$2
+Index: arpwatch-3.0/configure.in
+===================================================================
+--- arpwatch-3.0.orig/configure.in
++++ arpwatch-3.0/configure.in
+@@ -76,13 +76,6 @@ AC_LBL_UNION_WAIT
+ AC_CHECK_LIB(resolv, res_query)
+ AC_LBL_LIBPCAP(V_PCAPDEP, V_INCLS)
+ 
+-AC_PATH_PROG(V_SENDMAIL, sendmail,,
+-    $PATH:/usr/sbin:/usr/lib:/usr/bin:/usr/ucblib:/usr/local/etc)
+-
+-if test -z "${V_SENDMAIL}" ; then
+-	AC_MSG_ERROR([Can't find sendmail])
+-fi
+-
+ dnl AC_LBL_CHECK_TYPE(int32_t, int)
+ dnl AC_LBL_CHECK_TYPE(u_int32_t, u_int)
+ 
diff --git a/meta-security/recipes-scanners/buck-security/buck-security_0.7.bb b/meta-security/recipes-scanners/buck-security/buck-security_0.7.bb
new file mode 100644
index 0000000..179eeda
--- /dev/null
+++ b/meta-security/recipes-scanners/buck-security/buck-security_0.7.bb
@@ -0,0 +1,45 @@
+SUMMARY = "Linux security scanner"
+DESCRIPTION = "Buck-Security is a security scanner for Debian and Ubuntu Linux. It runs a couple of important checks and helps you to harden your Linux \
+system. This enables you to quickly overview the security status of your Linux system."
+SECTION = "security"
+LICENSE = "GPL-2.0"
+LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/GPL-2.0;md5=801f80980d171dd6425610833a22dbe6"
+
+SRC_URI = "http://sourceforge.net/projects/buck-security/files/buck-security/buck-security_${PV}/${BPN}_${PV}.tar.gz"
+
+SRC_URI[md5sum] = "611a3e9bb7ed8a8270aa15216c321c53"
+SRC_URI[sha256sum] = "c533c6631ec3554dd8d39d2d1c3ed44badbbf50810ebb75469c74639fa294b01"
+
+S = "${WORKDIR}/${BPN}_${PV}"
+
+do_configure[noexec] = "1"
+do_compile[noexec] = "1"
+
+do_install() {
+    install -d ${D}${bindir}/buck
+    cp -r ${S}/* ${D}${bindir}/buck
+    cp -r ${S}/buck-security ${D}${bindir}
+    sed -i 's!use lib "checks"!use lib File::Spec->catfile(dirname(File::Spec->rel2abs(__FILE__)), "buck/checks")!' ${D}${bindir}/buck-security
+    sed -i 's!use lib "checks/lib"!use lib File::Spec->catfile(dirname(File::Spec->rel2abs(__FILE__)), "buck/checks/lib")!' ${D}${bindir}/buck-security
+    sed -i 's!use lib "lib"!use lib File::Spec->catfile(dirname(File::Spec->rel2abs(__FILE__)), "buck/lib")!' ${D}${bindir}/buck-security
+    sed -i 's!my $buck_root = "."!my $buck_root = File::Spec->catfile(dirname(File::Spec->rel2abs(__FILE__)), "buck")!' ${D}${bindir}/buck-security
+
+}
+
+FILES_${PN} = "${bindir}/*"
+
+RDEPENDS_${PN} = "coreutils gnupg net-tools perl perl-module-data-dumper \
+                  perl-module-file-basename perl-module-file-spec perl-module-getopt-long \
+                  perl-module-lib perl-module-posix perl-module-term-ansicolor \
+                  perl-module-time-localtime pinentry perl-module-pod-usage \
+                  perl-module-pod-text perl-module-file-glob \
+                 "
+
+RDEPENDS_${PN}_class-native = "coreutils net-tools perl perl-module-data-dumper \
+                               perl-module-file-basename perl-module-file-spec perl-module-getopt-long \
+                               perl-module-lib perl-module-posix perl-module-term-ansicolor \
+                               perl-module-time-localtime perl-module-file-glob\
+                              "
+
+
+BBCLASSEXTEND = "native"
diff --git a/meta-security/recipes-security/checksec/checksec_2.1.0.bb b/meta-security/recipes-scanners/checksec/checksec_2.1.0.bb
similarity index 100%
rename from meta-security/recipes-security/checksec/checksec_2.1.0.bb
rename to meta-security/recipes-scanners/checksec/checksec_2.1.0.bb
diff --git a/meta-security/recipes-security/checksecurity/checksecurity_2.0.15.bb b/meta-security/recipes-scanners/checksecurity/checksecurity_2.0.15.bb
similarity index 84%
rename from meta-security/recipes-security/checksecurity/checksecurity_2.0.15.bb
rename to meta-security/recipes-scanners/checksecurity/checksecurity_2.0.15.bb
index 030bf25..204123d 100644
--- a/meta-security/recipes-security/checksecurity/checksecurity_2.0.15.bb
+++ b/meta-security/recipes-scanners/checksecurity/checksecurity_2.0.15.bb
@@ -18,4 +18,4 @@
     oe_runmake PREFIX=${D}
 }
 
-RDEPENDS_${PN} = "perl libenv-perl perl-module-tie-array perl-module-getopt-long perl-module-file-glob util-linux findutils coreutils"
+RDEPENDS_${PN} = "perl libenv-perl perl-module-tie-array perl-module-getopt-long perl-module-file-glob perl-module-carp perl-module-env perl-module-tap-parser-iterator-array util-linux findutils coreutils"
diff --git a/meta-security/recipes-security/checksecurity/files/check-setuid-use-more-portable-find-args.patch b/meta-security/recipes-scanners/checksecurity/files/check-setuid-use-more-portable-find-args.patch
similarity index 100%
rename from meta-security/recipes-security/checksecurity/files/check-setuid-use-more-portable-find-args.patch
rename to meta-security/recipes-scanners/checksecurity/files/check-setuid-use-more-portable-find-args.patch
diff --git a/meta-security/recipes-security/checksecurity/files/setuid-log-folder.patch b/meta-security/recipes-scanners/checksecurity/files/setuid-log-folder.patch
similarity index 100%
rename from meta-security/recipes-security/checksecurity/files/setuid-log-folder.patch
rename to meta-security/recipes-scanners/checksecurity/files/setuid-log-folder.patch
diff --git a/meta-security/recipes-security/clamav/clamav_0.101.5.bb b/meta-security/recipes-scanners/clamav/clamav_0.101.5.bb
similarity index 98%
rename from meta-security/recipes-security/clamav/clamav_0.101.5.bb
rename to meta-security/recipes-scanners/clamav/clamav_0.101.5.bb
index a4c32e1..f4625b1 100644
--- a/meta-security/recipes-security/clamav/clamav_0.101.5.bb
+++ b/meta-security/recipes-scanners/clamav/clamav_0.101.5.bb
@@ -4,8 +4,8 @@
 SECTION = "security"
 LICENSE = "LGPL-2.1"
 
-DEPENDS = "libtool db libxml2 openssl zlib curl llvm clamav-native libmspack"
-DEPENDS_class-native = "db-native openssl-native zlib-native llvm-native curl-native"
+DEPENDS = "libtool db libxml2 openssl zlib curl llvm clamav-native libmspack bison-native"
+DEPENDS_class-native = "db-native openssl-native zlib-native llvm-native curl-native bison-native"
  
 LIC_FILES_CHKSUM = "file://COPYING.LGPL;beginline=2;endline=3;md5=4b89c05acc71195e9a06edfa2fa7d092"
 
diff --git a/meta-security/recipes-security/clamav/files/clamav-freshclam.service b/meta-security/recipes-scanners/clamav/files/clamav-freshclam.service
similarity index 100%
rename from meta-security/recipes-security/clamav/files/clamav-freshclam.service
rename to meta-security/recipes-scanners/clamav/files/clamav-freshclam.service
diff --git a/meta-security/recipes-security/clamav/files/clamav-milter.conf.sample b/meta-security/recipes-scanners/clamav/files/clamav-milter.conf.sample
similarity index 100%
rename from meta-security/recipes-security/clamav/files/clamav-milter.conf.sample
rename to meta-security/recipes-scanners/clamav/files/clamav-milter.conf.sample
diff --git a/meta-security/recipes-security/clamav/files/clamav.service b/meta-security/recipes-scanners/clamav/files/clamav.service
similarity index 100%
rename from meta-security/recipes-security/clamav/files/clamav.service
rename to meta-security/recipes-scanners/clamav/files/clamav.service
diff --git a/meta-security/recipes-security/clamav/files/clamd.conf b/meta-security/recipes-scanners/clamav/files/clamd.conf
similarity index 100%
rename from meta-security/recipes-security/clamav/files/clamd.conf
rename to meta-security/recipes-scanners/clamav/files/clamd.conf
diff --git a/meta-security/recipes-security/clamav/files/freshclam-native.conf b/meta-security/recipes-scanners/clamav/files/freshclam-native.conf
similarity index 100%
rename from meta-security/recipes-security/clamav/files/freshclam-native.conf
rename to meta-security/recipes-scanners/clamav/files/freshclam-native.conf
diff --git a/meta-security/recipes-security/clamav/files/freshclam.conf b/meta-security/recipes-scanners/clamav/files/freshclam.conf
similarity index 100%
rename from meta-security/recipes-security/clamav/files/freshclam.conf
rename to meta-security/recipes-scanners/clamav/files/freshclam.conf
diff --git a/meta-security/recipes-security/clamav/files/tmpfiles.clamav b/meta-security/recipes-scanners/clamav/files/tmpfiles.clamav
similarity index 100%
rename from meta-security/recipes-security/clamav/files/tmpfiles.clamav
rename to meta-security/recipes-scanners/clamav/files/tmpfiles.clamav
diff --git a/meta-security/recipes-security/clamav/files/volatiles.03_clamav b/meta-security/recipes-scanners/clamav/files/volatiles.03_clamav
similarity index 100%
rename from meta-security/recipes-security/clamav/files/volatiles.03_clamav
rename to meta-security/recipes-scanners/clamav/files/volatiles.03_clamav
diff --git a/meta-security/recipes-scanners/rootkits/chkrootkit_0.53.bb b/meta-security/recipes-scanners/rootkits/chkrootkit_0.53.bb
new file mode 100644
index 0000000..4536be3
--- /dev/null
+++ b/meta-security/recipes-scanners/rootkits/chkrootkit_0.53.bb
@@ -0,0 +1,48 @@
+DESCRIPTION = "rootkit detector"
+SUMMARY = "locally checks for signs of a rootkit"
+HOMEPAGE = "http://www.chkrootkit.org/"
+SECTION = "security"
+LICENSE = "BSD-2-Clause"
+LIC_FILES_CHKSUM = "file://COPYRIGHT;md5=fdbe53788f7081c63387d8087273f5ff"
+
+SRC_URI = "ftp://ftp.pangeia.com.br/pub/seg/pac/${BPN}.tar.gz"
+SRC_URI[sha256sum] = "7262dae33b338976828b5d156b70d159e0043c0db43ada8dee66c97387cf45b5"
+
+
+inherit autotools-brokensep
+
+TARGET_CC_ARCH += "${LDFLAGS}"
+
+do_configure () {
+    sed -i 's/@strip.*$//' ${S}/Makefile
+}
+
+do_compile () {
+    make CC="${CC}" LDFLAGS="${LDFLAGS}" sense
+    gzip -9vkf ACKNOWLEDGMENTS
+    gzip -9vkf README
+}
+
+do_install () {
+    install -d ${D}/${libdir}/${PN}
+    install -d ${D}/${sbindir}
+    install -d ${D}/${docdir}/${PN}
+
+    install -m 644 ${B}/chkdirs ${D}/${libdir}/${PN}
+    install -m 644 ${B}/chklastlog ${D}/${libdir}/${PN}
+    install -m 644 ${B}/chkproc ${D}/${libdir}/${PN}
+    install -m 644 ${B}/chkutmp ${D}/${libdir}/${PN}
+    install -m 644 ${B}/chkwtmp ${D}/${libdir}/${PN}
+    install -m 644 ${B}/ifpromisc ${D}/${libdir}/${PN}
+    install -m 644 ${B}/strings-static ${D}/${libdir}/${PN}
+
+    install -m 755 ${B}/chklastlog ${D}/${sbindir}
+    install -m 755 ${B}/chkrootkit ${D}/${sbindir}
+    install -m 755 ${B}/chkwtmp ${D}/${sbindir}
+
+    install -m 644 ${B}/ACKNOWLEDGMENTS.gz ${D}/${docdir}/${PN}
+    install -m 644 ${B}/README.chklastlog ${D}/${docdir}/${PN}
+    install -m 644 ${B}/README.chkwtmp ${D}/${docdir}/${PN}
+    install -m 644 ${B}/README.gz ${D}/${docdir}/${PN}
+    install -m 644 ${B}/COPYRIGHT ${D}/${docdir}/${PN}
+}
diff --git a/meta-security/recipes-security/bastille/files/set_required_questions.py b/meta-security/recipes-security/bastille/files/set_required_questions.py
index 4a28358..f306109 100755
--- a/meta-security/recipes-security/bastille/files/set_required_questions.py
+++ b/meta-security/recipes-security/bastille/files/set_required_questions.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
 
 #Signed-off-by: Anne Mulhern <mulhern@yoctoproject.org>
 
@@ -83,7 +83,7 @@
   @param name qlabel The question label for which the distro is to be added.
   """
   questions_in = open(qfile)
-  questions_out = tempfile.NamedTemporaryFile(delete=False)
+  questions_out = tempfile.NamedTemporaryFile(mode="w+", delete=False)
   for l in add_requires(qlabel, distro, questions_in):
     questions_out.write(l)
   questions_out.close()
diff --git a/meta-security/recipes-security/buck-security/buck-security_0.7.bb b/meta-security/recipes-security/buck-security/buck-security_0.7.bb
deleted file mode 100644
index 3733c88..0000000
--- a/meta-security/recipes-security/buck-security/buck-security_0.7.bb
+++ /dev/null
@@ -1,63 +0,0 @@
-SUMMARY = "Linux security scanner"
-DESCRIPTION = "Buck-Security is a security scanner for Debian and Ubuntu Linux. It runs a couple of important checks and helps you to harden your Linux \
-system. This enables you to quickly overview the security status of your Linux system."
-SECTION = "security"
-LICENSE = "GPL-2.0"
-LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/GPL-2.0;md5=801f80980d171dd6425610833a22dbe6"
-RDEPENDS_${PN} = "coreutils \
-                  gnupg \
-                  net-tools \
-                  perl \
-                  perl-module-data-dumper \
-                  perl-module-file-basename \
-                  perl-module-file-spec \
-                  perl-module-getopt-long \
-                  perl-module-lib \
-                  perl-module-posix \
-                  perl-module-term-ansicolor \
-                  perl-module-time-localtime \
-                  pinentry \
-                 "
-
-RDEPENDS_${PN}_class-native = "coreutils \
-                               net-tools \
-                               perl \
-                               perl-module-data-dumper \
-                               perl-module-file-basename \
-                               perl-module-file-spec \
-                               perl-module-getopt-long \
-                               perl-module-lib \
-                               perl-module-posix \
-                               perl-module-term-ansicolor \
-                               perl-module-time-localtime \
-                              "
-
-SRC_URI = "http://sourceforge.net/projects/buck-security/files/buck-security/buck-security_${PV}/${BPN}_${PV}.tar.gz"
-
-SRC_URI[md5sum] = "611a3e9bb7ed8a8270aa15216c321c53"
-SRC_URI[sha256sum] = "c533c6631ec3554dd8d39d2d1c3ed44badbbf50810ebb75469c74639fa294b01"
-
-S = "${WORKDIR}/${BPN}_${PV}"
-
-do_configure() {
-    :
-}
-
-do_compile() {
-    :
-}
-
-do_install() {
-    install -d ${D}${bindir}/buck
-    cp -r ${S}/* ${D}${bindir}/buck
-    cp -r ${S}/buck-security ${D}${bindir}
-    sed -i 's!use lib "checks"!use lib File::Spec->catfile(dirname(File::Spec->rel2abs(__FILE__)), "buck/checks")!' ${D}${bindir}/buck-security
-    sed -i 's!use lib "checks/lib"!use lib File::Spec->catfile(dirname(File::Spec->rel2abs(__FILE__)), "buck/checks/lib")!' ${D}${bindir}/buck-security
-    sed -i 's!use lib "lib"!use lib File::Spec->catfile(dirname(File::Spec->rel2abs(__FILE__)), "buck/lib")!' ${D}${bindir}/buck-security
-    sed -i 's!my $buck_root = "."!my $buck_root = File::Spec->catfile(dirname(File::Spec->rel2abs(__FILE__)), "buck")!' ${D}${bindir}/buck-security
-
-}
-
-FILES_${PN} = "${bindir}/*"
-
-BBCLASSEXTEND = "native"
diff --git a/meta-security/recipes-security/ecryptfs-utils/ecryptfs-utils_111.bb b/meta-security/recipes-security/ecryptfs-utils/ecryptfs-utils_111.bb
index e45ee0b..d8cd06f 100644
--- a/meta-security/recipes-security/ecryptfs-utils/ecryptfs-utils_111.bb
+++ b/meta-security/recipes-security/ecryptfs-utils/ecryptfs-utils_111.bb
@@ -41,7 +41,7 @@
 PACKAGECONFIG[pam] = "--enable-pam,--disable-pam,libpam,"
 
 do_configure_prepend() {
-    export NSS_CFLAGS="-I${STAGING_INCDIR}/nspr4 -I${STAGING_INCDIR}/nss3"
+    export NSS_CFLAGS="-I${STAGING_INCDIR}/nspr -I${STAGING_INCDIR}/nss3"
     export NSS_LIBS="-L${STAGING_BASELIBDIR} -lssl3 -lsmime3 -lnss3 -lsoftokn3 -lnssutil3"
     export KEYUTILS_CFLAGS="-I${STAGING_INCDIR}"
     export KEYUTILS_LIBS="-L${STAGING_LIBDIR} -lkeyutils"
diff --git a/meta-security/recipes-security/fail2ban/files/0001-python3-fail2ban-2-3-conversion.patch b/meta-security/recipes-security/fail2ban/files/0001-python3-fail2ban-2-3-conversion.patch
new file mode 100644
index 0000000..ee872ec
--- /dev/null
+++ b/meta-security/recipes-security/fail2ban/files/0001-python3-fail2ban-2-3-conversion.patch
@@ -0,0 +1,2527 @@
+From abaa20435bac7decffa69e6f965aac9ce29aff6a Mon Sep 17 00:00:00 2001
+From: Armin Kuster <akuster808@gmail.com>
+Date: Wed, 12 Feb 2020 17:19:15 +0000
+Subject: [PATCH] python3-fail2ban: 2-3 conversion
+
+Upstream-Status: OE specific.
+
+fail2ban handles py3 via a 2-3 conversion utility.
+
+Signed-off-by: Armin Kuster <akuster808@gmail.com>
+---
+ fail2ban/client/actionreader.py               |   4 +-
+ fail2ban/client/configparserinc.py            |  10 +-
+ fail2ban/client/configreader.py               |   4 +-
+ fail2ban/client/csocket.py                    |   4 +-
+ fail2ban/client/fail2banclient.py             |   4 +-
+ fail2ban/client/fail2banregex.py              |  20 +-
+ fail2ban/client/filterreader.py               |   2 +-
+ fail2ban/client/jailreader.py                 |   4 +-
+ fail2ban/helpers.py                           |  15 +-
+ fail2ban/server/action.py                     |  19 +-
+ fail2ban/server/actions.py                    |  24 +-
+ fail2ban/server/asyncserver.py                |   4 +-
+ fail2ban/server/banmanager.py                 |  18 +-
+ fail2ban/server/database.py                   |   6 +-
+ fail2ban/server/failmanager.py                |   8 +-
+ fail2ban/server/failregex.py                  |   9 +-
+ fail2ban/server/filter.py                     |  12 +-
+ fail2ban/server/filterpoll.py                 |   2 +-
+ fail2ban/server/filterpyinotify.py            |   6 +-
+ fail2ban/server/ipdns.py                      |  16 +-
+ fail2ban/server/jail.py                       |  14 +-
+ fail2ban/server/mytime.py                     |   2 +-
+ fail2ban/server/server.py                     |  18 +-
+ fail2ban/server/strptime.py                   |   6 +-
+ fail2ban/server/ticket.py                     |  14 +-
+ fail2ban/server/transmitter.py                |   2 +-
+ fail2ban/server/utils.py                      |   6 +-
+ fail2ban/tests/action_d/test_badips.py        |   2 +-
+ fail2ban/tests/actiontestcase.py              |   4 +-
+ fail2ban/tests/clientreadertestcase.py        |   4 +-
+ fail2ban/tests/databasetestcase.py            |  16 +-
+ fail2ban/tests/datedetectortestcase.py        |   6 +-
+ fail2ban/tests/fail2banclienttestcase.py      |   8 +-
+ fail2ban/tests/failmanagertestcase.py         |  10 +-
+ .../tests/files/config/apache-auth/digest.py  |  20 +-
+ fail2ban/tests/filtertestcase.py              |  92 ++---
+ fail2ban/tests/misctestcase.py                |  22 +-
+ fail2ban/tests/observertestcase.py            |  34 +-
+ fail2ban/tests/samplestestcase.py             |   8 +-
+ fail2ban/tests/servertestcase.py              |  28 +-
+ fail2ban/tests/sockettestcase.py              |   2 +-
+ fail2ban/tests/utils.py                       |  22 +-
+ setup.py                                      | 326 ------------------
+ 43 files changed, 264 insertions(+), 593 deletions(-)
+ delete mode 100755 setup.py
+
+diff --git a/fail2ban/client/actionreader.py b/fail2ban/client/actionreader.py
+index 80617a50..ecf323c5 100644
+--- a/fail2ban/client/actionreader.py
++++ b/fail2ban/client/actionreader.py
+@@ -90,11 +90,11 @@ class ActionReader(DefinitionInitConfigReader):
+ 		stream = list()
+ 		stream.append(head + ["addaction", self._name])
+ 		multi = []
+-		for opt, optval in opts.iteritems():
++		for opt, optval in opts.items():
+ 			if opt in self._configOpts and not opt.startswith('known/'):
+ 				multi.append([opt, optval])
+ 		if self._initOpts:
+-			for opt, optval in self._initOpts.iteritems():
++			for opt, optval in self._initOpts.items():
+ 				if opt not in self._configOpts and not opt.startswith('known/'):
+ 					multi.append([opt, optval])
+ 		if len(multi) > 1:
+diff --git a/fail2ban/client/configparserinc.py b/fail2ban/client/configparserinc.py
+index e0f39579..45c77437 100644
+--- a/fail2ban/client/configparserinc.py
++++ b/fail2ban/client/configparserinc.py
+@@ -62,7 +62,7 @@ if sys.version_info >= (3,2):
+ 					parser, option, accum, rest, section, map, *args, **kwargs)
+ 
+ else: # pragma: no cover
+-	from ConfigParser import SafeConfigParser, \
++	from configparser import SafeConfigParser, \
+ 		InterpolationMissingOptionError, NoOptionError, NoSectionError
+ 
+ 	# Interpolate missing known/option as option from default section
+@@ -327,7 +327,7 @@ after = 1.conf
+ 			# mix it with defaults:
+ 			return set(opts.keys()) | set(self._defaults)
+ 		# only own option names:
+-		return opts.keys()
++		return list(opts.keys())
+ 
+ 	def read(self, filenames, get_includes=True):
+ 		if not isinstance(filenames, list):
+@@ -356,7 +356,7 @@ after = 1.conf
+ 					ret += i
+ 					# merge defaults and all sections to self:
+ 					alld.update(cfg.get_defaults())
+-					for n, s in cfg.get_sections().iteritems():
++					for n, s in cfg.get_sections().items():
+ 						# conditional sections
+ 						cond = SafeConfigParserWithIncludes.CONDITIONAL_RE.match(n)
+ 						if cond:
+@@ -366,7 +366,7 @@ after = 1.conf
+ 								del(s['__name__'])
+ 							except KeyError:
+ 								pass
+-							for k in s.keys():
++							for k in list(s.keys()):
+ 								v = s.pop(k)
+ 								s[k + cond] = v
+ 						s2 = alls.get(n)
+@@ -399,7 +399,7 @@ after = 1.conf
+ 			sec.update(options)
+ 			return
+ 		sk = {}
+-		for k, v in options.iteritems():
++		for k, v in options.items():
+ 			if not k.startswith(pref) and k != '__name__':
+ 				sk[pref+k] = v
+ 		sec.update(sk)
+diff --git a/fail2ban/client/configreader.py b/fail2ban/client/configreader.py
+index 20709b72..b5167409 100644
+--- a/fail2ban/client/configreader.py
++++ b/fail2ban/client/configreader.py
+@@ -26,7 +26,7 @@ __license__ = "GPL"
+ 
+ import glob
+ import os
+-from ConfigParser import NoOptionError, NoSectionError
++from configparser import NoOptionError, NoSectionError
+ 
+ from .configparserinc import sys, SafeConfigParserWithIncludes, logLevel
+ from ..helpers import getLogger, _as_bool, _merge_dicts, substituteRecursiveTags
+@@ -197,7 +197,7 @@ class ConfigReaderUnshared(SafeConfigParserWithIncludes):
+ 		config_files += sorted(glob.glob('%s/*.local' % config_dir))
+ 
+ 		# choose only existing ones
+-		config_files = filter(os.path.exists, config_files)
++		config_files = list(filter(os.path.exists, config_files))
+ 
+ 		if len(config_files):
+ 			# at least one config exists and accessible
+diff --git a/fail2ban/client/csocket.py b/fail2ban/client/csocket.py
+index ab3e294b..9417cde9 100644
+--- a/fail2ban/client/csocket.py
++++ b/fail2ban/client/csocket.py
+@@ -47,7 +47,7 @@ class CSocket:
+ 	
+ 	def send(self, msg, nonblocking=False, timeout=None):
+ 		# Convert every list member to string
+-		obj = dumps(map(CSocket.convert, msg), HIGHEST_PROTOCOL)
++		obj = dumps(list(map(CSocket.convert, msg)), HIGHEST_PROTOCOL)
+ 		self.__csock.send(obj + CSPROTO.END)
+ 		return self.receive(self.__csock, nonblocking, timeout)
+ 
+@@ -71,7 +71,7 @@ class CSocket:
+ 	@staticmethod
+ 	def convert(m):
+ 		"""Convert every "unexpected" member of message to string"""
+-		if isinstance(m, (basestring, bool, int, float, list, dict, set)):
++		if isinstance(m, (str, bool, int, float, list, dict, set)):
+ 			return m
+ 		else: # pragma: no cover
+ 			return str(m)
+diff --git a/fail2ban/client/fail2banclient.py b/fail2ban/client/fail2banclient.py
+index 7c90ca40..7eb11684 100755
+--- a/fail2ban/client/fail2banclient.py
++++ b/fail2ban/client/fail2banclient.py
+@@ -45,7 +45,7 @@ def _thread_name():
+ 	return threading.current_thread().__class__.__name__
+ 
+ def input_command(): # pragma: no cover
+-	return raw_input(PROMPT)
++	return input(PROMPT)
+ 
+ ##
+ #
+@@ -444,7 +444,7 @@ class Fail2banClient(Fail2banCmdLine, Thread):
+ 			return False
+ 		finally:
+ 			self._alive = False
+-			for s, sh in _prev_signals.iteritems():
++			for s, sh in _prev_signals.items():
+ 				signal.signal(s, sh)
+ 
+ 
+diff --git a/fail2ban/client/fail2banregex.py b/fail2ban/client/fail2banregex.py
+index 513b765d..4a71b3c0 100644
+--- a/fail2ban/client/fail2banregex.py
++++ b/fail2ban/client/fail2banregex.py
+@@ -41,10 +41,10 @@ import shlex
+ import sys
+ import time
+ import time
+-import urllib
++import urllib.request, urllib.parse, urllib.error
+ from optparse import OptionParser, Option
+ 
+-from ConfigParser import NoOptionError, NoSectionError, MissingSectionHeaderError
++from configparser import NoOptionError, NoSectionError, MissingSectionHeaderError
+ 
+ try: # pragma: no cover
+ 	from ..server.filtersystemd import FilterSystemd
+@@ -68,7 +68,7 @@ def debuggexURL(sample, regex, multiline=False, useDns="yes"):
+ 		'flavor': 'python'
+ 	}
+ 	if multiline: args['flags'] = 'm'
+-	return 'https://www.debuggex.com/?' + urllib.urlencode(args)
++	return 'https://www.debuggex.com/?' + urllib.parse.urlencode(args)
+ 
+ def output(args): # pragma: no cover (overriden in test-cases)
+ 	print(args)
+@@ -244,7 +244,7 @@ class Fail2banRegex(object):
+ 
+ 	def __init__(self, opts):
+ 		# set local protected members from given options:
+-		self.__dict__.update(dict(('_'+o,v) for o,v in opts.__dict__.iteritems()))
++		self.__dict__.update(dict(('_'+o,v) for o,v in opts.__dict__.items()))
+ 		self._opts = opts
+ 		self._maxlines_set = False		  # so we allow to override maxlines in cmdline
+ 		self._datepattern_set = False
+@@ -304,7 +304,7 @@ class Fail2banRegex(object):
+ 		realopts = {}
+ 		combopts = reader.getCombined()
+ 		# output all options that are specified in filter-argument as well as some special (mostly interested):
+-		for k in ['logtype', 'datepattern'] + fltOpt.keys():
++		for k in ['logtype', 'datepattern'] + list(fltOpt.keys()):
+ 			# combined options win, but they contain only a sub-set in filter expected keys,
+ 			# so get the rest from definition section:
+ 			try:
+@@ -424,7 +424,7 @@ class Fail2banRegex(object):
+ 			self.output( "Use %11s line : %s" % (regex, shortstr(value)) )
+ 			regex_values = {regextype: [RegexStat(value)]}
+ 
+-		for regextype, regex_values in regex_values.iteritems():
++		for regextype, regex_values in regex_values.items():
+ 			regex = regextype + 'regex'
+ 			setattr(self, "_" + regex, regex_values)
+ 			for regex in regex_values:
+@@ -523,10 +523,10 @@ class Fail2banRegex(object):
+ 							output(ret[1])
+ 					elif self._opts.out == 'msg':
+ 						for ret in ret:
+-							output('\n'.join(map(lambda v:''.join(v for v in v), ret[3].get('matches'))))
++							output('\n'.join([''.join(v for v in v) for v in ret[3].get('matches')]))
+ 					elif self._opts.out == 'row':
+ 						for ret in ret:
+-							output('[%r,\t%r,\t%r],' % (ret[1],ret[2],dict((k,v) for k, v in ret[3].iteritems() if k != 'matches')))
++							output('[%r,\t%r,\t%r],' % (ret[1],ret[2],dict((k,v) for k, v in ret[3].items() if k != 'matches')))
+ 					else:
+ 						for ret in ret:
+ 							output(ret[3].get(self._opts.out))
+@@ -565,9 +565,9 @@ class Fail2banRegex(object):
+ 					ans = [[]]
+ 					for arg in [l, regexlist]:
+ 						ans = [ x + [y] for x in ans for y in arg ]
+-					b = map(lambda a: a[0] +  ' | ' + a[1].getFailRegex() + ' |  ' + 
++					b = [a[0] +  ' | ' + a[1].getFailRegex() + ' |  ' + 
+ 						debuggexURL(self.encode_line(a[0]), a[1].getFailRegex(), 
+-							multiline, self._opts.usedns), ans)
++							multiline, self._opts.usedns) for a in ans]
+ 					pprint_list([x.rstrip() for x in b], header)
+ 				else:
+ 					output( "%s too many to print.  Use --print-all-%s " \
+diff --git a/fail2ban/client/filterreader.py b/fail2ban/client/filterreader.py
+index 413f125e..4f0cc4cf 100644
+--- a/fail2ban/client/filterreader.py
++++ b/fail2ban/client/filterreader.py
+@@ -71,7 +71,7 @@ class FilterReader(DefinitionInitConfigReader):
+ 	@staticmethod
+ 	def _fillStream(stream, opts, jailName):
+ 		prio0idx = 0
+-		for opt, value in opts.iteritems():
++		for opt, value in opts.items():
+ 			if opt in ("failregex", "ignoreregex"):
+ 				if value is None: continue
+ 				multi = []
+diff --git a/fail2ban/client/jailreader.py b/fail2ban/client/jailreader.py
+index 50c1d047..969d0bc0 100644
+--- a/fail2ban/client/jailreader.py
++++ b/fail2ban/client/jailreader.py
+@@ -117,7 +117,7 @@ class JailReader(ConfigReader):
+ 	}
+ 	_configOpts.update(FilterReader._configOpts)
+ 
+-	_ignoreOpts = set(['action', 'filter', 'enabled'] + FilterReader._configOpts.keys())
++	_ignoreOpts = set(['action', 'filter', 'enabled'] + list(FilterReader._configOpts.keys()))
+ 
+ 	def getOptions(self):
+ 
+@@ -236,7 +236,7 @@ class JailReader(ConfigReader):
+ 			stream.extend(self.__filter.convert())
+ 		# and using options from jail:
+ 		FilterReader._fillStream(stream, self.__opts, self.__name)
+-		for opt, value in self.__opts.iteritems():
++		for opt, value in self.__opts.items():
+ 			if opt == "logpath":
+ 				if self.__opts.get('backend', '').startswith("systemd"): continue
+ 				found_files = 0
+diff --git a/fail2ban/helpers.py b/fail2ban/helpers.py
+index 6f2bcdd7..7e563696 100644
+--- a/fail2ban/helpers.py
++++ b/fail2ban/helpers.py
+@@ -31,6 +31,7 @@ import traceback
+ from threading import Lock
+ 
+ from .server.mytime import MyTime
++import importlib
+ 
+ try:
+ 	import ctypes
+@@ -63,7 +64,7 @@ if sys.version_info < (3,): # pragma: 3.x no cover
+ 					from imp import load_dynamic as __ldm
+ 					_sys = __ldm('_sys', 'sys')
+ 				except ImportError: # pragma: no cover - only if load_dynamic fails
+-					reload(sys)
++					importlib.reload(sys)
+ 					_sys = sys
+ 			if hasattr(_sys, "setdefaultencoding"):
+ 				_sys.setdefaultencoding(encoding)
+@@ -101,7 +102,7 @@ if sys.version_info >= (3,): # pragma: 2.x no cover
+ else: # pragma: 3.x no cover
+ 	def uni_decode(x, enc=PREFER_ENC, errors='strict'):
+ 		try:
+-			if isinstance(x, unicode):
++			if isinstance(x, str):
+ 				return x.encode(enc, errors)
+ 			return x
+ 		except (UnicodeDecodeError, UnicodeEncodeError): # pragma: no cover - unsure if reachable
+@@ -110,7 +111,7 @@ else: # pragma: 3.x no cover
+ 			return x.encode(enc, 'replace')
+ 	if sys.getdefaultencoding().upper() != 'UTF-8': # pragma: no cover - utf-8 is default encoding now
+ 		def uni_string(x):
+-			if not isinstance(x, unicode):
++			if not isinstance(x, str):
+ 				return str(x)
+ 			return x.encode(PREFER_ENC, 'replace')
+ 	else:
+@@ -118,7 +119,7 @@ else: # pragma: 3.x no cover
+ 
+ 
+ def _as_bool(val):
+-	return bool(val) if not isinstance(val, basestring) \
++	return bool(val) if not isinstance(val, str) \
+ 		else val.lower() in ('1', 'on', 'true', 'yes')
+ 
+ 
+@@ -326,7 +327,7 @@ def splitwords(s):
+ 	"""
+ 	if not s:
+ 		return []
+-	return filter(bool, map(lambda v: v.strip(), re.split('[ ,\n]+', s)))
++	return list(filter(bool, [v.strip() for v in re.split('[ ,\n]+', s)]))
+ 
+ if sys.version_info >= (3,5):
+ 	eval(compile(r'''if 1:
+@@ -436,7 +437,7 @@ def substituteRecursiveTags(inptags, conditional='',
+ 	while True:
+ 		repFlag = False
+ 		# substitute each value:
+-		for tag in tags.iterkeys():
++		for tag in tags.keys():
+ 			# ignore escaped or already done (or in ignore list):
+ 			if tag in ignore or tag in done: continue
+ 			# ignore replacing callable items from calling map - should be converted on demand only (by get):
+@@ -476,7 +477,7 @@ def substituteRecursiveTags(inptags, conditional='',
+ 					m = tre_search(value, m.end())
+ 					continue
+ 				# if calling map - be sure we've string:
+-				if not isinstance(repl, basestring): repl = uni_string(repl)
++				if not isinstance(repl, str): repl = uni_string(repl)
+ 				value = value.replace('<%s>' % rtag, repl)
+ 				#logSys.log(5, 'value now: %s' % value)
+ 				# increment reference count:
+diff --git a/fail2ban/server/action.py b/fail2ban/server/action.py
+index 5c817fc0..81d50689 100644
+--- a/fail2ban/server/action.py
++++ b/fail2ban/server/action.py
+@@ -111,9 +111,9 @@ class CallingMap(MutableMapping, object):
+ 	def _asdict(self, calculated=False, checker=None):
+ 		d = dict(self.data, **self.storage)
+ 		if not calculated:
+-			return dict((n,v) for n,v in d.iteritems() \
++			return dict((n,v) for n,v in d.items() \
+ 				if not callable(v) or n in self.CM_REPR_ITEMS)
+-		for n,v in d.items():
++		for n,v in list(d.items()):
+ 			if callable(v):
+ 				try:
+ 					# calculate:
+@@ -179,7 +179,7 @@ class CallingMap(MutableMapping, object):
+ 		return self.__class__(_merge_copy_dicts(self.data, self.storage))
+ 
+ 
+-class ActionBase(object):
++class ActionBase(object, metaclass=ABCMeta):
+ 	"""An abstract base class for actions in Fail2Ban.
+ 
+ 	Action Base is a base definition of what methods need to be in
+@@ -209,7 +209,6 @@ class ActionBase(object):
+ 	Any additional arguments specified in `jail.conf` or passed
+ 	via `fail2ban-client` will be passed as keyword arguments.
+ 	"""
+-	__metaclass__ = ABCMeta
+ 
+ 	@classmethod
+ 	def __subclasshook__(cls, C):
+@@ -420,7 +419,7 @@ class CommandAction(ActionBase):
+ 			if not callable(family): # pragma: no cover
+ 				return self.__substCache.get(key, {}).get(family)
+ 			# family as expression - use it to filter values:
+-			return [v for f, v in self.__substCache.get(key, {}).iteritems() if family(f)]
++			return [v for f, v in self.__substCache.get(key, {}).items() if family(f)]
+ 		cmd = args[0]
+ 		if cmd: # set:
+ 			try:
+@@ -432,7 +431,7 @@ class CommandAction(ActionBase):
+ 			try:
+ 				famd = self.__substCache[key]
+ 				cmd = famd.pop(family)
+-				for family, v in famd.items():
++				for family, v in list(famd.items()):
+ 					if v == cmd:
+ 						del famd[family]
+ 			except KeyError: # pragma: no cover
+@@ -448,7 +447,7 @@ class CommandAction(ActionBase):
+ 		res = True
+ 		err = 'Script error'
+ 		if not family: # all started:
+-			family = [famoper for (famoper,v) in self.__started.iteritems() if v]
++			family = [famoper for (famoper,v) in self.__started.items() if v]
+ 		for famoper in family:
+ 			try:
+ 				cmd = self._getOperation(tag, famoper)
+@@ -617,7 +616,7 @@ class CommandAction(ActionBase):
+ 		and executes the resulting command.
+ 		"""
+ 		# collect started families, may be started on demand (conditional):
+-		family = [f for (f,v) in self.__started.iteritems() if v & 3 == 3]; # started and contains items
++		family = [f for (f,v) in self.__started.items() if v & 3 == 3]; # started and contains items
+ 		# if nothing contains items:
+ 		if not family: return True
+ 		# flush:
+@@ -642,7 +641,7 @@ class CommandAction(ActionBase):
+ 		"""
+ 		# collect started families, if started on demand (conditional):
+ 		if family is None:
+-			family = [f for (f,v) in self.__started.iteritems() if v]
++			family = [f for (f,v) in self.__started.items() if v]
+ 			# if no started (on demand) actions:
+ 			if not family: return True
+ 			self.__started = {}
+@@ -676,7 +675,7 @@ class CommandAction(ActionBase):
+ 		ret = True
+ 		# for each started family:
+ 		if self.actioncheck:
+-			for (family, started) in self.__started.items():
++			for (family, started) in list(self.__started.items()):
+ 				if started and not self._invariantCheck(family, beforeRepair):
+ 					# reset started flag and command of executed operation:
+ 					self.__started[family] = 0
+diff --git a/fail2ban/server/actions.py b/fail2ban/server/actions.py
+index 24fea838..94b9c3ed 100644
+--- a/fail2ban/server/actions.py
++++ b/fail2ban/server/actions.py
+@@ -156,11 +156,11 @@ class Actions(JailThread, Mapping):
+ 		else:
+ 			if hasattr(self, '_reload_actions'):
+ 				# reload actions after all parameters set via stream:
+-				for name, initOpts in self._reload_actions.iteritems():
++				for name, initOpts in self._reload_actions.items():
+ 					if name in self._actions:
+ 						self._actions[name].reload(**(initOpts if initOpts else {}))
+ 				# remove obsolete actions (untouched by reload process):
+-				delacts = OrderedDict((name, action) for name, action in self._actions.iteritems()
++				delacts = OrderedDict((name, action) for name, action in self._actions.items()
+ 					if name not in self._reload_actions)
+ 				if len(delacts):
+ 					# unban all tickets using removed actions only:
+@@ -289,7 +289,7 @@ class Actions(JailThread, Mapping):
+ 		"""
+ 		if actions is None:
+ 			actions = self._actions
+-		revactions = actions.items()
++		revactions = list(actions.items())
+ 		revactions.reverse()
+ 		for name, action in revactions:
+ 			try:
+@@ -314,7 +314,7 @@ class Actions(JailThread, Mapping):
+ 			True when the thread exits nicely.
+ 		"""
+ 		cnt = 0
+-		for name, action in self._actions.iteritems():
++		for name, action in self._actions.items():
+ 			try:
+ 				action.start()
+ 			except Exception as e:
+@@ -474,7 +474,7 @@ class Actions(JailThread, Mapping):
+ 					Observers.Main.add('banFound', bTicket, self._jail, btime)
+ 				logSys.notice("[%s] %sBan %s", self._jail.name, ('' if not bTicket.restored else 'Restore '), ip)
+ 				# do actions :
+-				for name, action in self._actions.iteritems():
++				for name, action in self._actions.items():
+ 					try:
+ 						if ticket.restored and getattr(action, 'norestored', False):
+ 							continue
+@@ -511,13 +511,13 @@ class Actions(JailThread, Mapping):
+ 					if bTicket.banEpoch == self.banEpoch and diftm > 3:
+ 						# avoid too often checks:
+ 						if not rebanacts and MyTime.time() > self.__lastConsistencyCheckTM + 3:
+-							for action in self._actions.itervalues():
++							for action in self._actions.values():
+ 								action.consistencyCheck()
+ 							self.__lastConsistencyCheckTM = MyTime.time()
+ 					# check epoch in order to reban it:
+ 					if bTicket.banEpoch < self.banEpoch:
+ 						if not rebanacts: rebanacts = dict(
+-							(name, action) for name, action in self._actions.iteritems()
++							(name, action) for name, action in self._actions.items()
+ 								if action.banEpoch > bTicket.banEpoch)
+ 						cnt += self.__reBan(bTicket, actions=rebanacts)
+ 				else: # pragma: no cover - unexpected: ticket is not banned for some reasons - reban using all actions:
+@@ -542,8 +542,8 @@ class Actions(JailThread, Mapping):
+ 		ip = ticket.getIP()
+ 		aInfo = self.__getActionInfo(ticket)
+ 		if log:
+-			logSys.notice("[%s] Reban %s%s", self._jail.name, aInfo["ip"], (', action %r' % actions.keys()[0] if len(actions) == 1 else ''))
+-		for name, action in actions.iteritems():
++			logSys.notice("[%s] Reban %s%s", self._jail.name, aInfo["ip"], (', action %r' % list(actions.keys())[0] if len(actions) == 1 else ''))
++		for name, action in actions.items():
+ 			try:
+ 				logSys.debug("[%s] action %r: reban %s", self._jail.name, name, ip)
+ 				if not aInfo.immutable: aInfo.reset()
+@@ -567,7 +567,7 @@ class Actions(JailThread, Mapping):
+ 		if not self.__banManager._inBanList(ticket): return
+ 		# do actions :
+ 		aInfo = None
+-		for name, action in self._actions.iteritems():
++		for name, action in self._actions.items():
+ 			try:
+ 				if ticket.restored and getattr(action, 'norestored', False):
+ 					continue
+@@ -616,7 +616,7 @@ class Actions(JailThread, Mapping):
+ 		cnt = 0
+ 		# first we'll execute flush for actions supporting this operation:
+ 		unbactions = {}
+-		for name, action in (actions if actions is not None else self._actions).iteritems():
++		for name, action in (actions if actions is not None else self._actions).items():
+ 			try:
+ 				if hasattr(action, 'flush') and (not isinstance(action, CommandAction) or action.actionflush):
+ 					logSys.notice("[%s] Flush ticket(s) with %s", self._jail.name, name)
+@@ -671,7 +671,7 @@ class Actions(JailThread, Mapping):
+ 		aInfo = self.__getActionInfo(ticket)
+ 		if log:
+ 			logSys.notice("[%s] Unban %s", self._jail.name, aInfo["ip"])
+-		for name, action in unbactions.iteritems():
++		for name, action in unbactions.items():
+ 			try:
+ 				logSys.debug("[%s] action %r: unban %s", self._jail.name, name, ip)
+ 				if not aInfo.immutable: aInfo.reset()
+diff --git a/fail2ban/server/asyncserver.py b/fail2ban/server/asyncserver.py
+index e3400737..f5f9740b 100644
+--- a/fail2ban/server/asyncserver.py
++++ b/fail2ban/server/asyncserver.py
+@@ -178,7 +178,7 @@ def loop(active, timeout=None, use_poll=False, err_count=None):
+ 			elif err_count['listen'] > 100: # pragma: no cover - normally unreachable
+ 				if (
+ 					   e.args[0] == errno.EMFILE # [Errno 24] Too many open files
+-					or sum(err_count.itervalues()) > 1000
++					or sum(err_count.values()) > 1000
+ 				):
+ 					logSys.critical("Too many errors - critical count reached %r", err_count)
+ 					break
+@@ -220,7 +220,7 @@ class AsyncServer(asyncore.dispatcher):
+ 			elif self.__errCount['accept'] > 100:
+ 				if (
+ 					  (isinstance(e, socket.error) and e.args[0] == errno.EMFILE) # [Errno 24] Too many open files
+-					or sum(self.__errCount.itervalues()) > 1000
++					or sum(self.__errCount.values()) > 1000
+ 				):
+ 					logSys.critical("Too many errors - critical count reached %r", self.__errCount)
+ 					self.stop()
+diff --git a/fail2ban/server/banmanager.py b/fail2ban/server/banmanager.py
+index 5770bfd7..9bb44971 100644
+--- a/fail2ban/server/banmanager.py
++++ b/fail2ban/server/banmanager.py
+@@ -105,9 +105,9 @@ class BanManager:
+ 	def getBanList(self, ordered=False, withTime=False):
+ 		with self.__lock:
+ 			if not ordered:
+-				return self.__banList.keys()
++				return list(self.__banList.keys())
+ 			lst = []
+-			for ticket in self.__banList.itervalues():
++			for ticket in self.__banList.values():
+ 				eob = ticket.getEndOfBanTime(self.__banTime)
+ 				lst.append((ticket,eob))
+ 			lst.sort(key=lambda t: t[1])
+@@ -126,7 +126,7 @@ class BanManager:
+ 	
+ 	def __iter__(self):
+ 		with self.__lock:
+-			return self.__banList.itervalues()
++			return iter(self.__banList.values())
+ 
+ 	##
+ 	# Returns normalized value
+@@ -165,7 +165,7 @@ class BanManager:
+ 				return return_dict
+ 		# get ips in lock:
+ 		with self.__lock:
+-			banIPs = [banData.getIP() for banData in self.__banList.values()]
++			banIPs = [banData.getIP() for banData in list(self.__banList.values())]
+ 		# get cymru info:
+ 		try:
+ 			for ip in banIPs:
+@@ -341,7 +341,7 @@ class BanManager:
+ 			# Gets the list of ticket to remove (thereby correct next unban time).
+ 			unBanList = {}
+ 			nextUnbanTime = BanTicket.MAX_TIME
+-			for fid,ticket in self.__banList.iteritems():
++			for fid,ticket in self.__banList.items():
+ 				# current time greater as end of ban - timed out:
+ 				eob = ticket.getEndOfBanTime(self.__banTime)
+ 				if time > eob:
+@@ -357,15 +357,15 @@ class BanManager:
+ 			if len(unBanList):
+ 				if len(unBanList) / 2.0 <= len(self.__banList) / 3.0:
+ 					# few as 2/3 should be removed - remove particular items:
+-					for fid in unBanList.iterkeys():
++					for fid in unBanList.keys():
+ 						del self.__banList[fid]
+ 				else:
+ 					# create new dictionary without items to be deleted:
+-					self.__banList = dict((fid,ticket) for fid,ticket in self.__banList.iteritems() \
++					self.__banList = dict((fid,ticket) for fid,ticket in self.__banList.items() \
+ 						if fid not in unBanList)
+ 						
+ 			# return list of tickets:
+-			return unBanList.values()
++			return list(unBanList.values())
+ 
+ 	##
+ 	# Flush the ban list.
+@@ -375,7 +375,7 @@ class BanManager:
+ 	
+ 	def flushBanList(self):
+ 		with self.__lock:
+-			uBList = self.__banList.values()
++			uBList = list(self.__banList.values())
+ 			self.__banList = dict()
+ 			return uBList
+ 
+diff --git a/fail2ban/server/database.py b/fail2ban/server/database.py
+index ed736a7a..0e8c9aec 100644
+--- a/fail2ban/server/database.py
++++ b/fail2ban/server/database.py
+@@ -67,13 +67,13 @@ if sys.version_info >= (3,): # pragma: 2.x no cover
+ else: # pragma: 3.x no cover
+ 	def _normalize(x):
+ 		if isinstance(x, dict):
+-			return dict((_normalize(k), _normalize(v)) for k, v in x.iteritems())
++			return dict((_normalize(k), _normalize(v)) for k, v in x.items())
+ 		elif isinstance(x, (list, set)):
+ 			return [_normalize(element) for element in x]
+-		elif isinstance(x, unicode):
++		elif isinstance(x, str):
+ 			# in 2.x default text_factory is unicode - so return proper unicode here:
+ 			return x.encode(PREFER_ENC, 'replace').decode(PREFER_ENC)
+-		elif isinstance(x, basestring):
++		elif isinstance(x, str):
+ 			return x.decode(PREFER_ENC, 'replace')
+ 		return x
+ 
+diff --git a/fail2ban/server/failmanager.py b/fail2ban/server/failmanager.py
+index 93c028fb..a9c6b5f6 100644
+--- a/fail2ban/server/failmanager.py
++++ b/fail2ban/server/failmanager.py
+@@ -57,7 +57,7 @@ class FailManager:
+ 	def getFailCount(self):
+ 		# may be slow on large list of failures, should be used for test purposes only...
+ 		with self.__lock:
+-			return len(self.__failList), sum([f.getRetry() for f in self.__failList.values()])
++			return len(self.__failList), sum([f.getRetry() for f in list(self.__failList.values())])
+ 
+ 	def getFailTotal(self):
+ 		with self.__lock:
+@@ -125,7 +125,7 @@ class FailManager:
+ 				# in case of having many active failures, it should be ran only
+ 				# if debug level is "low" enough
+ 				failures_summary = ', '.join(['%s:%d' % (k, v.getRetry())
+-											  for k,v in  self.__failList.iteritems()])
++											  for k,v in  self.__failList.items()])
+ 				logSys.log(logLevel, "Total # of detected failures: %d. Current failures from %d IPs (IP:count): %s"
+ 							 % (self.__failTotal, len(self.__failList), failures_summary))
+ 
+@@ -138,7 +138,7 @@ class FailManager:
+ 	
+ 	def cleanup(self, time):
+ 		with self.__lock:
+-			todelete = [fid for fid,item in self.__failList.iteritems() \
++			todelete = [fid for fid,item in self.__failList.items() \
+ 				if item.getLastTime() + self.__maxTime <= time]
+ 			if len(todelete) == len(self.__failList):
+ 				# remove all:
+@@ -152,7 +152,7 @@ class FailManager:
+ 					del self.__failList[fid]
+ 			else:
+ 				# create new dictionary without items to be deleted:
+-				self.__failList = dict((fid,item) for fid,item in self.__failList.iteritems() \
++				self.__failList = dict((fid,item) for fid,item in self.__failList.items() \
+ 					if item.getLastTime() + self.__maxTime > time)
+ 		self.__bgSvc.service()
+ 	
+diff --git a/fail2ban/server/failregex.py b/fail2ban/server/failregex.py
+index f7dafbef..fb75187d 100644
+--- a/fail2ban/server/failregex.py
++++ b/fail2ban/server/failregex.py
+@@ -128,10 +128,7 @@ class Regex:
+ 			self._regexObj = re.compile(regex, re.MULTILINE if multiline else 0)
+ 			self._regex = regex
+ 			self._altValues = {}
+-			for k in filter(
+-				lambda k: len(k) > len(ALTNAME_PRE) and k.startswith(ALTNAME_PRE),
+-				self._regexObj.groupindex
+-			):
++			for k in [k for k in self._regexObj.groupindex if len(k) > len(ALTNAME_PRE) and k.startswith(ALTNAME_PRE)]:
+ 				n = ALTNAME_CRE.match(k).group(1)
+ 				self._altValues[k] = n
+ 			self._altValues = list(self._altValues.items()) if len(self._altValues) else None
+@@ -211,7 +208,7 @@ class Regex:
+ 	#
+ 	@staticmethod
+ 	def _tupleLinesBuf(tupleLines):
+-		return "\n".join(map(lambda v: "".join(v[::2]), tupleLines)) + "\n"
++		return "\n".join(["".join(v[::2]) for v in tupleLines]) + "\n"
+ 
+ 	##
+ 	# Searches the regular expression.
+@@ -223,7 +220,7 @@ class Regex:
+ 	
+ 	def search(self, tupleLines, orgLines=None):
+ 		buf = tupleLines
+-		if not isinstance(tupleLines, basestring):
++		if not isinstance(tupleLines, str):
+ 			buf = Regex._tupleLinesBuf(tupleLines)
+ 		self._matchCache = self._regexObj.search(buf)
+ 		if self._matchCache:
+diff --git a/fail2ban/server/filter.py b/fail2ban/server/filter.py
+index 998fe298..d181fd38 100644
+--- a/fail2ban/server/filter.py
++++ b/fail2ban/server/filter.py
+@@ -292,7 +292,7 @@ class Filter(JailThread):
+ 			dd = DateDetector()
+ 			dd.default_tz = self.__logtimezone
+ 			if not isinstance(pattern, (list, tuple)):
+-				pattern = filter(bool, map(str.strip, re.split('\n+', pattern)))
++				pattern = list(filter(bool, list(map(str.strip, re.split('\n+', pattern)))))
+ 			for pattern in pattern:
+ 				dd.appendTemplate(pattern)
+ 			self.dateDetector = dd
+@@ -987,7 +987,7 @@ class FileFilter(Filter):
+ 	# @return log paths
+ 
+ 	def getLogPaths(self):
+-		return self.__logs.keys()
++		return list(self.__logs.keys())
+ 
+ 	##
+ 	# Get the log containers
+@@ -995,7 +995,7 @@ class FileFilter(Filter):
+ 	# @return log containers
+ 
+ 	def getLogs(self):
+-		return self.__logs.values()
++		return list(self.__logs.values())
+ 
+ 	##
+ 	# Get the count of log containers
+@@ -1021,7 +1021,7 @@ class FileFilter(Filter):
+ 
+ 	def setLogEncoding(self, encoding):
+ 		encoding = super(FileFilter, self).setLogEncoding(encoding)
+-		for log in self.__logs.itervalues():
++		for log in self.__logs.values():
+ 			log.setEncoding(encoding)
+ 
+ 	def getLog(self, path):
+@@ -1183,7 +1183,7 @@ class FileFilter(Filter):
+ 		"""Status of Filter plus files being monitored.
+ 		"""
+ 		ret = super(FileFilter, self).status(flavor=flavor)
+-		path = self.__logs.keys()
++		path = list(self.__logs.keys())
+ 		ret.append(("File list", path))
+ 		return ret
+ 
+@@ -1191,7 +1191,7 @@ class FileFilter(Filter):
+ 		"""Stop monitoring of log-file(s)
+ 		"""
+ 		# stop files monitoring:
+-		for path in self.__logs.keys():
++		for path in list(self.__logs.keys()):
+ 			self.delLogPath(path)
+ 		# stop thread:
+ 		super(Filter, self).stop()
+diff --git a/fail2ban/server/filterpoll.py b/fail2ban/server/filterpoll.py
+index 228a2c8b..d49315cc 100644
+--- a/fail2ban/server/filterpoll.py
++++ b/fail2ban/server/filterpoll.py
+@@ -176,4 +176,4 @@ class FilterPoll(FileFilter):
+ 			return False
+ 
+ 	def getPendingPaths(self):
+-		return self.__file404Cnt.keys()
++		return list(self.__file404Cnt.keys())
+diff --git a/fail2ban/server/filterpyinotify.py b/fail2ban/server/filterpyinotify.py
+index ca6b253f..b683b860 100644
+--- a/fail2ban/server/filterpyinotify.py
++++ b/fail2ban/server/filterpyinotify.py
+@@ -158,7 +158,7 @@ class FilterPyinotify(FileFilter):
+ 		except KeyError: pass
+ 
+ 	def getPendingPaths(self):
+-		return self.__pending.keys()
++		return list(self.__pending.keys())
+ 
+ 	def _checkPending(self):
+ 		if not self.__pending:
+@@ -168,7 +168,7 @@ class FilterPyinotify(FileFilter):
+ 			return
+ 		found = {}
+ 		minTime = 60
+-		for path, (retardTM, isDir) in self.__pending.iteritems():
++		for path, (retardTM, isDir) in self.__pending.items():
+ 			if ntm - self.__pendingChkTime < retardTM:
+ 				if minTime > retardTM: minTime = retardTM
+ 				continue
+@@ -184,7 +184,7 @@ class FilterPyinotify(FileFilter):
+ 		self.__pendingChkTime = time.time()
+ 		self.__pendingMinTime = minTime
+ 		# process now because we've missed it in monitoring:
+-		for path, isDir in found.iteritems():
++		for path, isDir in found.items():
+ 			self._delPending(path)
+ 			# refresh monitoring of this:
+ 			self._refreshWatcher(path, isDir=isDir)
+diff --git a/fail2ban/server/ipdns.py b/fail2ban/server/ipdns.py
+index 6648dac6..fe8f8db8 100644
+--- a/fail2ban/server/ipdns.py
++++ b/fail2ban/server/ipdns.py
+@@ -275,7 +275,7 @@ class IPAddr(object):
+ 			raise ValueError("invalid ipstr %r, too many plen representation" % (ipstr,))
+ 		if "." in s[1] or ":" in s[1]: # 255.255.255.0 resp. ffff:: style mask
+ 			s[1] = IPAddr.masktoplen(s[1])
+-		s[1] = long(s[1])
++		s[1] = int(s[1])
+ 		return s
+ 		
+ 	def __init(self, ipstr, cidr=CIDR_UNSPEC):
+@@ -309,7 +309,7 @@ class IPAddr(object):
+ 
+ 				# mask out host portion if prefix length is supplied
+ 				if cidr is not None and cidr >= 0:
+-					mask = ~(0xFFFFFFFFL >> cidr)
++					mask = ~(0xFFFFFFFF >> cidr)
+ 					self._addr &= mask
+ 					self._plen = cidr
+ 
+@@ -321,13 +321,13 @@ class IPAddr(object):
+ 
+ 				# mask out host portion if prefix length is supplied
+ 				if cidr is not None and cidr >= 0:
+-					mask = ~(0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFL >> cidr)
++					mask = ~(0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF >> cidr)
+ 					self._addr &= mask
+ 					self._plen = cidr
+ 
+ 				# if IPv6 address is a IPv4-compatible, make instance a IPv4
+ 				elif self.isInNet(IPAddr.IP6_4COMPAT):
+-					self._addr = lo & 0xFFFFFFFFL
++					self._addr = lo & 0xFFFFFFFF
+ 					self._family = socket.AF_INET
+ 					self._plen = 32
+ 		else:
+@@ -445,7 +445,7 @@ class IPAddr(object):
+ 		elif self.isIPv6:
+ 			# convert network to host byte order
+ 			hi = self._addr >> 64
+-			lo = self._addr & 0xFFFFFFFFFFFFFFFFL
++			lo = self._addr & 0xFFFFFFFFFFFFFFFF
+ 			binary = struct.pack("!QQ", hi, lo)
+ 			if self._plen and self._plen < 128:
+ 				add = "/%d" % self._plen
+@@ -503,9 +503,9 @@ class IPAddr(object):
+ 		if self.family != net.family:
+ 			return False
+ 		if self.isIPv4:
+-			mask = ~(0xFFFFFFFFL >> net.plen)
++			mask = ~(0xFFFFFFFF >> net.plen)
+ 		elif self.isIPv6:
+-			mask = ~(0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFL >> net.plen)
++			mask = ~(0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF >> net.plen)
+ 		else:
+ 			return False
+ 		
+@@ -517,7 +517,7 @@ class IPAddr(object):
+ 		m4 = (1 << 32)-1
+ 		mmap = {m6: 128, m4: 32, 0: 0}
+ 		m = 0
+-		for i in xrange(0, 128):
++		for i in range(0, 128):
+ 			m |= 1 << i
+ 			if i < 32:
+ 				mmap[m ^ m4] = 32-1-i
+diff --git a/fail2ban/server/jail.py b/fail2ban/server/jail.py
+index ce9968a8..5fa5ef10 100644
+--- a/fail2ban/server/jail.py
++++ b/fail2ban/server/jail.py
+@@ -26,7 +26,7 @@ __license__ = "GPL"
+ import logging
+ import math
+ import random
+-import Queue
++import queue
+ 
+ from .actions import Actions
+ from ..helpers import getLogger, _as_bool, extractOptions, MyTime
+@@ -76,7 +76,7 @@ class Jail(object):
+ 							"might not function correctly. Please shorten"
+ 							% name)
+ 		self.__name = name
+-		self.__queue = Queue.Queue()
++		self.__queue = queue.Queue()
+ 		self.__filter = None
+ 		# Extra parameters for increase ban time
+ 		self._banExtra = {};
+@@ -127,25 +127,25 @@ class Jail(object):
+ 			"Failed to initialize any backend for Jail %r" % self.name)
+ 
+ 	def _initPolling(self, **kwargs):
+-		from filterpoll import FilterPoll
++		from .filterpoll import FilterPoll
+ 		logSys.info("Jail '%s' uses poller %r" % (self.name, kwargs))
+ 		self.__filter = FilterPoll(self, **kwargs)
+ 
+ 	def _initGamin(self, **kwargs):
+ 		# Try to import gamin
+-		from filtergamin import FilterGamin
++		from .filtergamin import FilterGamin
+ 		logSys.info("Jail '%s' uses Gamin %r" % (self.name, kwargs))
+ 		self.__filter = FilterGamin(self, **kwargs)
+ 
+ 	def _initPyinotify(self, **kwargs):
+ 		# Try to import pyinotify
+-		from filterpyinotify import FilterPyinotify
++		from .filterpyinotify import FilterPyinotify
+ 		logSys.info("Jail '%s' uses pyinotify %r" % (self.name, kwargs))
+ 		self.__filter = FilterPyinotify(self, **kwargs)
+ 
+ 	def _initSystemd(self, **kwargs): # pragma: systemd no cover
+ 		# Try to import systemd
+-		from filtersystemd import FilterSystemd
++		from .filtersystemd import FilterSystemd
+ 		logSys.info("Jail '%s' uses systemd %r" % (self.name, kwargs))
+ 		self.__filter = FilterSystemd(self, **kwargs)
+ 
+@@ -213,7 +213,7 @@ class Jail(object):
+ 		try:
+ 			ticket = self.__queue.get(False)
+ 			return ticket
+-		except Queue.Empty:
++		except queue.Empty:
+ 			return False
+ 
+ 	def setBanTimeExtra(self, opt, value):
+diff --git a/fail2ban/server/mytime.py b/fail2ban/server/mytime.py
+index 98b69bd4..24bba5cf 100644
+--- a/fail2ban/server/mytime.py
++++ b/fail2ban/server/mytime.py
+@@ -162,7 +162,7 @@ class MyTime:
+ 		
+ 		@returns number (calculated seconds from expression "val")
+ 		"""
+-		if isinstance(val, (int, long, float, complex)):
++		if isinstance(val, (int, float, complex)):
+ 			return val
+ 		# replace together standing abbreviations, example '1d12h' -> '1d 12h':
+ 		val = MyTime._str2sec_prep.sub(r" \1", val)
+diff --git a/fail2ban/server/server.py b/fail2ban/server/server.py
+index 159f6506..fc948e8c 100644
+--- a/fail2ban/server/server.py
++++ b/fail2ban/server/server.py
+@@ -97,7 +97,7 @@ class Server:
+ 
+ 	def start(self, sock, pidfile, force=False, observer=True, conf={}):
+ 		# First set the mask to only allow access to owner
+-		os.umask(0077)
++		os.umask(0o077)
+ 		# Second daemonize before logging etc, because it will close all handles:
+ 		if self.__daemon: # pragma: no cover
+ 			logSys.info("Starting in daemon mode")
+@@ -190,7 +190,7 @@ class Server:
+ 
+ 		# Restore default signal handlers:
+ 		if _thread_name() == '_MainThread':
+-			for s, sh in self.__prev_signals.iteritems():
++			for s, sh in self.__prev_signals.items():
+ 				signal.signal(s, sh)
+ 
+ 		# Give observer a small chance to complete its work before exit
+@@ -268,10 +268,10 @@ class Server:
+ 		logSys.info("Stopping all jails")
+ 		with self.__lock:
+ 			# 1st stop all jails (signal and stop actions/filter thread):
+-			for name in self.__jails.keys():
++			for name in list(self.__jails.keys()):
+ 				self.delJail(name, stop=True, join=False)
+ 			# 2nd wait for end and delete jails:
+-			for name in self.__jails.keys():
++			for name in list(self.__jails.keys()):
+ 				self.delJail(name, stop=False, join=True)
+ 
+ 	def reloadJails(self, name, opts, begin):
+@@ -302,7 +302,7 @@ class Server:
+ 					if "--restart" in opts:
+ 						self.stopAllJail()
+ 				# first set all affected jail(s) to idle and reset filter regex and other lists/dicts:
+-				for jn, jail in self.__jails.iteritems():
++				for jn, jail in self.__jails.items():
+ 					if name == '--all' or jn == name:
+ 						jail.idle = True
+ 						self.__reload_state[jn] = jail
+@@ -313,7 +313,7 @@ class Server:
+ 			# end reload, all affected (or new) jails have already all new parameters (via stream) and (re)started:
+ 			with self.__lock:
+ 				deljails = []
+-				for jn, jail in self.__jails.iteritems():
++				for jn, jail in self.__jails.items():
+ 					# still in reload state:
+ 					if jn in self.__reload_state:
+ 						# remove jails that are not reloaded (untouched, so not in new configuration)
+@@ -513,7 +513,7 @@ class Server:
+ 			jails = [self.__jails[name]]
+ 		else:
+ 			# in all jails:
+-			jails = self.__jails.values()
++			jails = list(self.__jails.values())
+ 		# unban given or all (if value is None):
+ 		cnt = 0
+ 		ifexists |= (name is None)
+@@ -551,7 +551,7 @@ class Server:
+ 	def isAlive(self, jailnum=None):
+ 		if jailnum is not None and len(self.__jails) != jailnum:
+ 			return 0
+-		for jail in self.__jails.values():
++		for jail in list(self.__jails.values()):
+ 			if not jail.isAlive():
+ 				return 0
+ 		return 1
+@@ -759,7 +759,7 @@ class Server:
+ 			return "flushed"
+ 			
+ 	def setThreadOptions(self, value):
+-		for o, v in value.iteritems():
++		for o, v in value.items():
+ 			if o == 'stacksize':
+ 				threading.stack_size(int(v)*1024)
+ 			else: # pragma: no cover
+diff --git a/fail2ban/server/strptime.py b/fail2ban/server/strptime.py
+index 498d284b..a5579fdc 100644
+--- a/fail2ban/server/strptime.py
++++ b/fail2ban/server/strptime.py
+@@ -79,7 +79,7 @@ timeRE['ExY'] = r"(?P<Y>%s\d)" % _getYearCentRE(cent=(0,3), distance=3)
+ timeRE['Exy'] = r"(?P<y>%s\d)" % _getYearCentRE(cent=(2,3), distance=3)
+ 
+ def getTimePatternRE():
+-	keys = timeRE.keys()
++	keys = list(timeRE.keys())
+ 	patt = (r"%%(%%|%s|[%s])" % (
+ 		"|".join([k for k in keys if len(k) > 1]),
+ 		"".join([k for k in keys if len(k) == 1]),
+@@ -134,7 +134,7 @@ def zone2offset(tz, dt):
+ 	"""
+ 	if isinstance(tz, int):
+ 		return tz
+-	if isinstance(tz, basestring):
++	if isinstance(tz, str):
+ 		return validateTimeZone(tz)
+ 	tz, tzo = tz
+ 	if tzo is None or tzo == '': # without offset
+@@ -171,7 +171,7 @@ def reGroupDictStrptime(found_dict, msec=False, default_tz=None):
+ 	year = month = day = hour = minute = tzoffset = \
+ 	weekday = julian = week_of_year = None
+ 	second = fraction = 0
+-	for key, val in found_dict.iteritems():
++	for key, val in found_dict.items():
+ 		if val is None: continue
+ 		# Directives not explicitly handled below:
+ 		#   c, x, X
+diff --git a/fail2ban/server/ticket.py b/fail2ban/server/ticket.py
+index f67e0d23..f0b727c2 100644
+--- a/fail2ban/server/ticket.py
++++ b/fail2ban/server/ticket.py
+@@ -55,7 +55,7 @@ class Ticket(object):
+ 		self._time = time if time is not None else MyTime.time()
+ 		self._data = {'matches': matches or [], 'failures': 0}
+ 		if data is not None:
+-			for k,v in data.iteritems():
++			for k,v in data.items():
+ 				if v is not None:
+ 					self._data[k] = v
+ 		if ticket:
+@@ -89,7 +89,7 @@ class Ticket(object):
+ 
+ 	def setIP(self, value):
+ 		# guarantee using IPAddr instead of unicode, str for the IP
+-		if isinstance(value, basestring):
++		if isinstance(value, str):
+ 			value = IPAddr(value)
+ 		self._ip = value
+ 	
+@@ -181,7 +181,7 @@ class Ticket(object):
+ 		if len(args) == 1:
+ 			# todo: if support >= 2.7 only:
+ 			# self._data = {k:v for k,v in args[0].iteritems() if v is not None}
+-			self._data = dict([(k,v) for k,v in args[0].iteritems() if v is not None])
++			self._data = dict([(k,v) for k,v in args[0].items() if v is not None])
+ 		# add k,v list or dict (merge):
+ 		elif len(args) == 2:
+ 			self._data.update((args,))
+@@ -192,7 +192,7 @@ class Ticket(object):
+ 		# filter (delete) None values:
+ 		# todo: if support >= 2.7 only:
+ 		# self._data = {k:v for k,v in self._data.iteritems() if v is not None}
+-		self._data = dict([(k,v) for k,v in self._data.iteritems() if v is not None])
++		self._data = dict([(k,v) for k,v in self._data.items() if v is not None])
+ 	
+ 	def getData(self, key=None, default=None):
+ 		# return whole data dict:
+@@ -201,17 +201,17 @@ class Ticket(object):
+ 		# return default if not exists:
+ 		if not self._data:
+ 			return default
+-		if not isinstance(key,(str,unicode,type(None),int,float,bool,complex)):
++		if not isinstance(key,(str,type(None),int,float,bool,complex)):
+ 			# return filtered by lambda/function:
+ 			if callable(key):
+ 				# todo: if support >= 2.7 only:
+ 				# return {k:v for k,v in self._data.iteritems() if key(k)}
+-				return dict([(k,v) for k,v in self._data.iteritems() if key(k)])
++				return dict([(k,v) for k,v in self._data.items() if key(k)])
+ 			# return filtered by keys:
+ 			if hasattr(key, '__iter__'):
+ 				# todo: if support >= 2.7 only:
+ 				# return {k:v for k,v in self._data.iteritems() if k in key}
+-				return dict([(k,v) for k,v in self._data.iteritems() if k in key])
++				return dict([(k,v) for k,v in self._data.items() if k in key])
+ 		# return single value of data:
+ 		return self._data.get(key, default)
+ 
+diff --git a/fail2ban/server/transmitter.py b/fail2ban/server/transmitter.py
+index f83e9d5f..80726cb4 100644
+--- a/fail2ban/server/transmitter.py
++++ b/fail2ban/server/transmitter.py
+@@ -475,7 +475,7 @@ class Transmitter:
+ 			opt = command[1][len("bantime."):]
+ 			return self.__server.getBanTimeExtra(name, opt)
+ 		elif command[1] == "actions":
+-			return self.__server.getActions(name).keys()
++			return list(self.__server.getActions(name).keys())
+ 		elif command[1] == "action":
+ 			actionname = command[2]
+ 			actionvalue = command[3]
+diff --git a/fail2ban/server/utils.py b/fail2ban/server/utils.py
+index d4461a7d..13c24e76 100644
+--- a/fail2ban/server/utils.py
++++ b/fail2ban/server/utils.py
+@@ -57,7 +57,7 @@ _RETCODE_HINTS = {
+ 
+ # Dictionary to lookup signal name from number
+ signame = dict((num, name)
+-	for name, num in signal.__dict__.iteritems() if name.startswith("SIG"))
++	for name, num in signal.__dict__.items() if name.startswith("SIG"))
+ 
+ class Utils():
+ 	"""Utilities provide diverse static methods like executes OS shell commands, etc.
+@@ -109,7 +109,7 @@ class Utils():
+ 								break
+ 					else: # pragma: 3.x no cover (dict is in 2.6 only)
+ 						remlst = []
+-						for (ck, cv) in cache.iteritems():
++						for (ck, cv) in cache.items():
+ 							# if expired:
+ 							if cv[1] <= t:
+ 								remlst.append(ck)
+@@ -152,7 +152,7 @@ class Utils():
+ 		if not isinstance(realCmd, list):
+ 			realCmd = [realCmd]
+ 		i = len(realCmd)-1
+-		for k, v in varsDict.iteritems():
++		for k, v in varsDict.items():
+ 			varsStat += "%s=$%s " % (k, i)
+ 			realCmd.append(v)
+ 			i += 1
+diff --git a/fail2ban/tests/action_d/test_badips.py b/fail2ban/tests/action_d/test_badips.py
+index 013c0fdb..3c35e4d7 100644
+--- a/fail2ban/tests/action_d/test_badips.py
++++ b/fail2ban/tests/action_d/test_badips.py
+@@ -32,7 +32,7 @@ from ..utils import LogCaptureTestCase, CONFIG_DIR
+ if sys.version_info >= (3, ): # pragma: 2.x no cover
+ 	from urllib.error import HTTPError, URLError
+ else: # pragma: 3.x no cover
+-	from urllib2 import HTTPError, URLError
++	from urllib.error import HTTPError, URLError
+ 
+ def skip_if_not_available(f):
+ 	"""Helper to decorate tests to skip in case of timeout/http-errors like "502 bad gateway".
+diff --git a/fail2ban/tests/actiontestcase.py b/fail2ban/tests/actiontestcase.py
+index 1a00c040..ecd09246 100644
+--- a/fail2ban/tests/actiontestcase.py
++++ b/fail2ban/tests/actiontestcase.py
+@@ -244,14 +244,14 @@ class CommandActionTest(LogCaptureTestCase):
+ 		setattr(self.__action, 'ab', "<ac>")
+ 		setattr(self.__action, 'x?family=inet6', "")
+ 		# produce self-referencing properties except:
+-		self.assertRaisesRegexp(ValueError, r"properties contain self referencing definitions",
++		self.assertRaisesRegex(ValueError, r"properties contain self referencing definitions",
+ 			lambda: self.__action.replaceTag("<a><b>", 
+ 				self.__action._properties, conditional="family=inet4")
+ 		)
+ 		# remore self-referencing in props:
+ 		delattr(self.__action, 'ac')
+ 		# produce self-referencing query except:
+-		self.assertRaisesRegexp(ValueError, r"possible self referencing definitions in query",
++		self.assertRaisesRegex(ValueError, r"possible self referencing definitions in query",
+ 			lambda: self.__action.replaceTag("<x<x<x<x<x<x<x<x<x<x<x<x<x<x<x<x<x<x<x<x<x>>>>>>>>>>>>>>>>>>>>>", 
+ 				self.__action._properties, conditional="family=inet6")
+ 		)
+diff --git a/fail2ban/tests/clientreadertestcase.py b/fail2ban/tests/clientreadertestcase.py
+index 2c1d0a0e..aa7908c4 100644
+--- a/fail2ban/tests/clientreadertestcase.py
++++ b/fail2ban/tests/clientreadertestcase.py
+@@ -390,7 +390,7 @@ class JailReaderTest(LogCaptureTestCase):
+ 		# And multiple groups (`][` instead of `,`)
+ 		result = extractOptions(option.replace(',', ']['))
+ 		expected2 = (expected[0],
+-		 dict((k, v.replace(',', '][')) for k, v in expected[1].iteritems())
++		 dict((k, v.replace(',', '][')) for k, v in expected[1].items())
+ 		)
+ 		self.assertEqual(expected2, result)
+ 
+@@ -975,7 +975,7 @@ filter = testfilter1
+ 		self.assertEqual(add_actions[-1][-1], "{}")
+ 
+ 	def testLogPathFileFilterBackend(self):
+-		self.assertRaisesRegexp(ValueError, r"Have not found any log file for .* jail", 
++		self.assertRaisesRegex(ValueError, r"Have not found any log file for .* jail", 
+ 			self._testLogPath, backend='polling')
+ 
+ 	def testLogPathSystemdBackend(self):
+diff --git a/fail2ban/tests/databasetestcase.py b/fail2ban/tests/databasetestcase.py
+index 9a5e9fa1..562461a6 100644
+--- a/fail2ban/tests/databasetestcase.py
++++ b/fail2ban/tests/databasetestcase.py
+@@ -67,7 +67,7 @@ class DatabaseTest(LogCaptureTestCase):
+ 
+ 	@property
+ 	def db(self):
+-		if isinstance(self._db, basestring) and self._db == ':auto-create-in-memory:':
++		if isinstance(self._db, str) and self._db == ':auto-create-in-memory:':
+ 			self._db = getFail2BanDb(self.dbFilename)
+ 		return self._db
+ 	@db.setter
+@@ -159,7 +159,7 @@ class DatabaseTest(LogCaptureTestCase):
+ 			self.db = Fail2BanDb(self.dbFilename)
+ 			self.assertEqual(self.db.getJailNames(), set(['DummyJail #29162448 with 0 tickets']))
+ 			self.assertEqual(self.db.getLogPaths(), set(['/tmp/Fail2BanDb_pUlZJh.log']))
+-			ticket = FailTicket("127.0.0.1", 1388009242.26, [u"abc\n"])
++			ticket = FailTicket("127.0.0.1", 1388009242.26, ["abc\n"])
+ 			self.assertEqual(self.db.getBans()[0], ticket)
+ 
+ 			self.assertEqual(self.db.updateDb(Fail2BanDb.__version__), Fail2BanDb.__version__)
+@@ -185,9 +185,9 @@ class DatabaseTest(LogCaptureTestCase):
+ 		self.assertEqual(len(bans), 2)
+ 		# compare first ticket completely:
+ 		ticket = FailTicket("1.2.3.7", 1417595494, [
+-			u'Dec  3 09:31:08 f2btest test:auth[27658]: pam_unix(test:auth): authentication failure; logname= uid=0 euid=0 tty=test ruser= rhost=1.2.3.7',
+-			u'Dec  3 09:31:32 f2btest test:auth[27671]: pam_unix(test:auth): authentication failure; logname= uid=0 euid=0 tty=test ruser= rhost=1.2.3.7',
+-			u'Dec  3 09:31:34 f2btest test:auth[27673]: pam_unix(test:auth): authentication failure; logname= uid=0 euid=0 tty=test ruser= rhost=1.2.3.7'
++			'Dec  3 09:31:08 f2btest test:auth[27658]: pam_unix(test:auth): authentication failure; logname= uid=0 euid=0 tty=test ruser= rhost=1.2.3.7',
++			'Dec  3 09:31:32 f2btest test:auth[27671]: pam_unix(test:auth): authentication failure; logname= uid=0 euid=0 tty=test ruser= rhost=1.2.3.7',
++			'Dec  3 09:31:34 f2btest test:auth[27673]: pam_unix(test:auth): authentication failure; logname= uid=0 euid=0 tty=test ruser= rhost=1.2.3.7'
+ 		])
+ 		ticket.setAttempt(3)
+ 		self.assertEqual(bans[0], ticket)
+@@ -286,11 +286,11 @@ class DatabaseTest(LogCaptureTestCase):
+ 		# invalid + valid, invalid + valid unicode, invalid + valid dual converted (like in filter:readline by fallback) ...
+ 		tickets = [
+ 		  FailTicket("127.0.0.1", 0, ['user "test"', 'user "\xd1\xe2\xe5\xf2\xe0"', 'user "\xc3\xa4\xc3\xb6\xc3\xbc\xc3\x9f"']),
+-		  FailTicket("127.0.0.2", 0, ['user "test"', u'user "\xd1\xe2\xe5\xf2\xe0"', u'user "\xc3\xa4\xc3\xb6\xc3\xbc\xc3\x9f"']),
++		  FailTicket("127.0.0.2", 0, ['user "test"', 'user "\xd1\xe2\xe5\xf2\xe0"', 'user "\xc3\xa4\xc3\xb6\xc3\xbc\xc3\x9f"']),
+ 		  FailTicket("127.0.0.3", 0, ['user "test"', b'user "\xd1\xe2\xe5\xf2\xe0"', b'user "\xc3\xa4\xc3\xb6\xc3\xbc\xc3\x9f"']),
+-		  FailTicket("127.0.0.4", 0, ['user "test"', 'user "\xd1\xe2\xe5\xf2\xe0"', u'user "\xe4\xf6\xfc\xdf"']),
++		  FailTicket("127.0.0.4", 0, ['user "test"', 'user "\xd1\xe2\xe5\xf2\xe0"', 'user "\xe4\xf6\xfc\xdf"']),
+ 		  FailTicket("127.0.0.5", 0, ['user "test"', 'unterminated \xcf']),
+-		  FailTicket("127.0.0.6", 0, ['user "test"', u'unterminated \xcf']),
++		  FailTicket("127.0.0.6", 0, ['user "test"', 'unterminated \xcf']),
+ 		  FailTicket("127.0.0.7", 0, ['user "test"', b'unterminated \xcf'])
+ 		]
+ 		for ticket in tickets:
+diff --git a/fail2ban/tests/datedetectortestcase.py b/fail2ban/tests/datedetectortestcase.py
+index 458f76ef..49ada60d 100644
+--- a/fail2ban/tests/datedetectortestcase.py
++++ b/fail2ban/tests/datedetectortestcase.py
+@@ -279,7 +279,7 @@ class DateDetectorTest(LogCaptureTestCase):
+ 		self.assertEqual(logTime, mu)
+ 		self.assertEqual(logMatch.group(1), '2012/10/11 02:37:17')
+ 		# confuse it with year being at the end
+-		for i in xrange(10):
++		for i in range(10):
+ 			( logTime, logMatch ) =	self.datedetector.getTime('11/10/2012 02:37:17 [error] 18434#0')
+ 			self.assertEqual(logTime, mu)
+ 			self.assertEqual(logMatch.group(1), '11/10/2012 02:37:17')
+@@ -505,7 +505,7 @@ class CustomDateFormatsTest(unittest.TestCase):
+ 			date = dd.getTime(line)
+ 			if matched:
+ 				self.assertTrue(date)
+-				if isinstance(matched, basestring):
++				if isinstance(matched, str):
+ 					self.assertEqual(matched, date[1].group(1))
+ 				else:
+ 					self.assertEqual(matched, date[0])
+@@ -537,7 +537,7 @@ class CustomDateFormatsTest(unittest.TestCase):
+ 			date = dd.getTime(line)
+ 			if matched:
+ 				self.assertTrue(date)
+-				if isinstance(matched, basestring): # pragma: no cover
++				if isinstance(matched, str): # pragma: no cover
+ 					self.assertEqual(matched, date[1].group(1))
+ 				else:
+ 					self.assertEqual(matched, date[0])
+diff --git a/fail2ban/tests/fail2banclienttestcase.py b/fail2ban/tests/fail2banclienttestcase.py
+index 95f73ed3..bba354fa 100644
+--- a/fail2ban/tests/fail2banclienttestcase.py
++++ b/fail2ban/tests/fail2banclienttestcase.py
+@@ -367,10 +367,10 @@ def with_foreground_server_thread(startextra={}):
+ 				# several commands to server in body of decorated function:
+ 				return f(self, tmp, startparams, *args, **kwargs)
+ 			except Exception as e: # pragma: no cover
+-				print('=== Catch an exception: %s' % e)
++				print(('=== Catch an exception: %s' % e))
+ 				log = self.getLog()
+ 				if log:
+-					print('=== Error of server, log: ===\n%s===' % log)
++					print(('=== Error of server, log: ===\n%s===' % log))
+ 					self.pruneLog()
+ 				raise
+ 			finally:
+@@ -440,7 +440,7 @@ class Fail2banClientServerBase(LogCaptureTestCase):
+ 					)
+ 		except:  # pragma: no cover
+ 			if _inherited_log(startparams):
+-				print('=== Error by wait fot server, log: ===\n%s===' % self.getLog())
++				print(('=== Error by wait fot server, log: ===\n%s===' % self.getLog()))
+ 				self.pruneLog()
+ 			log = pjoin(tmp, "f2b.log")
+ 			if isfile(log):
+@@ -1610,6 +1610,6 @@ class Fail2banServerTest(Fail2banClientServerBase):
+ 			self.stopAndWaitForServerEnd(SUCCESS)
+ 
+ 		def testServerStartStop(self):
+-			for i in xrange(2000):
++			for i in range(2000):
+ 				self._testServerStartStop()
+ 
+diff --git a/fail2ban/tests/failmanagertestcase.py b/fail2ban/tests/failmanagertestcase.py
+index a5425286..2a94cc82 100644
+--- a/fail2ban/tests/failmanagertestcase.py
++++ b/fail2ban/tests/failmanagertestcase.py
+@@ -45,11 +45,11 @@ class AddFailure(unittest.TestCase):
+ 		super(AddFailure, self).tearDown()
+ 		
+ 	def _addDefItems(self):
+-		self.__items = [[u'193.168.0.128', 1167605999.0],
+-					    [u'193.168.0.128', 1167605999.0],
+-					    [u'193.168.0.128', 1167605999.0],
+-					    [u'193.168.0.128', 1167605999.0],
+-					    [u'193.168.0.128', 1167605999.0],
++		self.__items = [['193.168.0.128', 1167605999.0],
++					    ['193.168.0.128', 1167605999.0],
++					    ['193.168.0.128', 1167605999.0],
++					    ['193.168.0.128', 1167605999.0],
++					    ['193.168.0.128', 1167605999.0],
+ 					    ['87.142.124.10', 1167605999.0],
+ 					    ['87.142.124.10', 1167605999.0],
+ 					    ['87.142.124.10', 1167605999.0],
+diff --git a/fail2ban/tests/files/config/apache-auth/digest.py b/fail2ban/tests/files/config/apache-auth/digest.py
+index 03588594..e2297ab3 100755
+--- a/fail2ban/tests/files/config/apache-auth/digest.py
++++ b/fail2ban/tests/files/config/apache-auth/digest.py
+@@ -41,7 +41,7 @@ def auth(v):
+         response="%s"
+     """ % ( username, algorithm, realm, url, nonce, qop, response )
+ #        opaque="%s",
+-    print(p.method, p.url, p.headers)
++    print((p.method, p.url, p.headers))
+     s =  requests.Session()
+     return s.send(p)
+ 
+@@ -76,18 +76,18 @@ r = auth(v)
+ 
+ # [Sun Jul 28 21:41:20 2013] [error] [client 127.0.0.1] Digest: unknown algorithm `super funky chicken' received: /digest/
+ 
+-print(r.status_code,r.headers, r.text)
++print((r.status_code,r.headers, r.text))
+ v['algorithm'] = algorithm
+ 
+ 
+ r = auth(v)
+-print(r.status_code,r.headers, r.text)
++print((r.status_code,r.headers, r.text))
+ 
+ nonce = v['nonce']
+ v['nonce']=v['nonce'][5:-5]
+ 
+ r = auth(v)
+-print(r.status_code,r.headers, r.text)
++print((r.status_code,r.headers, r.text))
+ 
+ # [Sun Jul 28 21:05:31.178340 2013] [auth_digest:error] [pid 24224:tid 139895539455744] [client 127.0.0.1:56906] AH01793: invalid qop `auth' received: /digest/qop_none/
+ 
+@@ -95,7 +95,7 @@ print(r.status_code,r.headers, r.text)
+ v['nonce']=nonce[0:11] + 'ZZZ' + nonce[14:]
+ 
+ r = auth(v)
+-print(r.status_code,r.headers, r.text)
++print((r.status_code,r.headers, r.text))
+ 
+ #[Sun Jul 28 21:18:11.769228 2013] [auth_digest:error] [pid 24752:tid 139895505884928] [client 127.0.0.1:56964] AH01776: invalid nonce b9YAiJDiBAZZZ1b1abe02d20063ea3b16b544ea1b0d981c1bafe received - hash is not d42d824dee7aaf50c3ba0a7c6290bd453e3dd35b
+ 
+@@ -107,7 +107,7 @@ import time
+ time.sleep(1)
+ 
+ r = auth(v)
+-print(r.status_code,r.headers, r.text)
++print((r.status_code,r.headers, r.text))
+ 
+ # Obtained by putting the following code in modules/aaa/mod_auth_digest.c
+ # in the function initialize_secret
+@@ -137,7 +137,7 @@ s = sha.sha(apachesecret)
+ 
+ v=preauth()
+ 
+-print(v['nonce'])
++print((v['nonce']))
+ realm = v['Digest realm'][1:-1]
+ 
+ (t,) = struct.unpack('l',base64.b64decode(v['nonce'][1:13]))
+@@ -156,13 +156,13 @@ print(v)
+ 
+ r = auth(v)
+ #[Mon Jul 29 02:12:55.539813 2013] [auth_digest:error] [pid 9647:tid 139895522670336] [client 127.0.0.1:58474] AH01777: invalid nonce 59QJppTiBAA=b08983fd166ade9840407df1b0f75b9e6e07d88d received - user attempted time travel
+-print(r.status_code,r.headers, r.text)
++print((r.status_code,r.headers, r.text))
+ 
+ url='/digest_onetime/'
+ v=preauth()
+ 
+ # Need opaque header handling in auth
+ r = auth(v)
+-print(r.status_code,r.headers, r.text)
++print((r.status_code,r.headers, r.text))
+ r = auth(v)
+-print(r.status_code,r.headers, r.text)
++print((r.status_code,r.headers, r.text))
+diff --git a/fail2ban/tests/filtertestcase.py b/fail2ban/tests/filtertestcase.py
+index 35785a58..8eeb6902 100644
+--- a/fail2ban/tests/filtertestcase.py
++++ b/fail2ban/tests/filtertestcase.py
+@@ -22,7 +22,7 @@
+ __copyright__ = "Copyright (c) 2004 Cyril Jaquier; 2012 Yaroslav Halchenko"
+ __license__ = "GPL"
+ 
+-from __builtin__ import open as fopen
++from builtins import open as fopen
+ import unittest
+ import os
+ import re
+@@ -204,7 +204,7 @@ def _copy_lines_between_files(in_, fout, n=None, skip=0, mode='a', terminal_line
+ 	else:
+ 		fin = in_
+ 	# Skip
+-	for i in xrange(skip):
++	for i in range(skip):
+ 		fin.readline()
+ 	# Read
+ 	i = 0
+@@ -244,7 +244,7 @@ def _copy_lines_to_journal(in_, fields={},n=None, skip=0, terminal_line=""): # p
+ 	# Required for filtering
+ 	fields.update(TEST_JOURNAL_FIELDS)
+ 	# Skip
+-	for i in xrange(skip):
++	for i in range(skip):
+ 		fin.readline()
+ 	# Read/Write
+ 	i = 0
+@@ -306,18 +306,18 @@ class BasicFilter(unittest.TestCase):
+ 	def testTest_tm(self):
+ 		unittest.F2B.SkipIfFast()
+ 		## test function "_tm" works correct (returns the same as slow strftime):
+-		for i in xrange(1417512352, (1417512352 // 3600 + 3) * 3600):
++		for i in range(1417512352, (1417512352 // 3600 + 3) * 3600):
+ 			tm = MyTime.time2str(i)
+ 			if _tm(i) != tm: # pragma: no cover - never reachable
+ 				self.assertEqual((_tm(i), i), (tm, i))
+ 
+ 	def testWrongCharInTupleLine(self):
+ 		## line tuple has different types (ascii after ascii / unicode):
+-		for a1 in ('', u'', b''):
+-			for a2 in ('2016-09-05T20:18:56', u'2016-09-05T20:18:56', b'2016-09-05T20:18:56'):
++		for a1 in ('', '', b''):
++			for a2 in ('2016-09-05T20:18:56', '2016-09-05T20:18:56', b'2016-09-05T20:18:56'):
+ 				for a3 in (
+ 					'Fail for "g\xc3\xb6ran" from 192.0.2.1', 
+-					u'Fail for "g\xc3\xb6ran" from 192.0.2.1',
++					'Fail for "g\xc3\xb6ran" from 192.0.2.1',
+ 					b'Fail for "g\xc3\xb6ran" from 192.0.2.1'
+ 				):
+ 					# join should work if all arguments have the same type:
+@@ -435,7 +435,7 @@ class IgnoreIP(LogCaptureTestCase):
+ 
+ 	def testAddAttempt(self):
+ 		self.filter.setMaxRetry(3)
+-		for i in xrange(1, 1+3):
++		for i in range(1, 1+3):
+ 			self.filter.addAttempt('192.0.2.1')
+ 			self.assertLogged('Attempt 192.0.2.1', '192.0.2.1:%d' % i, all=True, wait=True)
+ 		self.jail.actions._Actions__checkBan()
+@@ -472,7 +472,7 @@ class IgnoreIP(LogCaptureTestCase):
+ 		# like both test-cases above, just cached (so once per key)...
+ 		self.filter.ignoreCache = {"key":"<ip>"}
+ 		self.filter.ignoreCommand = 'if [ "<ip>" = "10.0.0.1" ]; then exit 0; fi; exit 1'
+-		for i in xrange(5):
++		for i in range(5):
+ 			self.pruneLog()
+ 			self.assertTrue(self.filter.inIgnoreIPList("10.0.0.1"))
+ 			self.assertFalse(self.filter.inIgnoreIPList("10.0.0.0"))
+@@ -483,7 +483,7 @@ class IgnoreIP(LogCaptureTestCase):
+ 		# by host of IP:
+ 		self.filter.ignoreCache = {"key":"<ip-host>"}
+ 		self.filter.ignoreCommand = 'if [ "<ip-host>" = "test-host" ]; then exit 0; fi; exit 1'
+-		for i in xrange(5):
++		for i in range(5):
+ 			self.pruneLog()
+ 			self.assertTrue(self.filter.inIgnoreIPList(FailTicket("2001:db8::1")))
+ 			self.assertFalse(self.filter.inIgnoreIPList(FailTicket("2001:db8::ffff")))
+@@ -495,7 +495,7 @@ class IgnoreIP(LogCaptureTestCase):
+ 		self.filter.ignoreCache = {"key":"<F-USER>", "max-count":"10", "max-time":"1h"}
+ 		self.assertEqual(self.filter.ignoreCache, ["<F-USER>", 10, 60*60])
+ 		self.filter.ignoreCommand = 'if [ "<F-USER>" = "tester" ]; then exit 0; fi; exit 1'
+-		for i in xrange(5):
++		for i in range(5):
+ 			self.pruneLog()
+ 			self.assertTrue(self.filter.inIgnoreIPList(FailTicket("tester", data={'user': 'tester'})))
+ 			self.assertFalse(self.filter.inIgnoreIPList(FailTicket("root", data={'user': 'root'})))
+@@ -644,7 +644,7 @@ class LogFileFilterPoll(unittest.TestCase):
+ 			fc = FileContainer(fname, self.filter.getLogEncoding())
+ 			fc.open()
+ 			# no time - nothing should be found :
+-			for i in xrange(10):
++			for i in range(10):
+ 				f.write("[sshd] error: PAM: failure len 1\n")
+ 				f.flush()
+ 				fc.setPos(0); self.filter.seekToTime(fc, time)
+@@ -718,14 +718,14 @@ class LogFileFilterPoll(unittest.TestCase):
+ 			# variable length of file (ca 45K or 450K before and hereafter):
+ 			# write lines with smaller as search time:
+ 			t = time - count - 1
+-			for i in xrange(count):
++			for i in range(count):
+ 				f.write("%s [sshd] error: PAM: failure\n" % _tm(t))
+ 				t += 1
+ 			f.flush()
+ 			fc.setPos(0); self.filter.seekToTime(fc, time)
+ 			self.assertEqual(fc.getPos(), 47*count)
+ 			# write lines with exact search time:
+-			for i in xrange(10):
++			for i in range(10):
+ 				f.write("%s [sshd] error: PAM: failure\n" % _tm(time))
+ 			f.flush()
+ 			fc.setPos(0); self.filter.seekToTime(fc, time)
+@@ -734,8 +734,8 @@ class LogFileFilterPoll(unittest.TestCase):
+ 			self.assertEqual(fc.getPos(), 47*count)
+ 			# write lines with greater as search time:
+ 			t = time+1
+-			for i in xrange(count//500):
+-				for j in xrange(500):
++			for i in range(count//500):
++				for j in range(500):
+ 					f.write("%s [sshd] error: PAM: failure\n" % _tm(t))
+ 					t += 1
+ 				f.flush()
+@@ -1488,10 +1488,10 @@ def get_monitor_failures_journal_testcase(Filter_): # pragma: systemd no cover
+ 			# Add direct utf, unicode, blob:
+ 			for l in (
+ 		    "error: PAM: Authentication failure for \xe4\xf6\xfc\xdf from 192.0.2.1",
+-		   u"error: PAM: Authentication failure for \xe4\xf6\xfc\xdf from 192.0.2.1",
++		   "error: PAM: Authentication failure for \xe4\xf6\xfc\xdf from 192.0.2.1",
+ 		   b"error: PAM: Authentication failure for \xe4\xf6\xfc\xdf from 192.0.2.1".decode('utf-8', 'replace'),
+ 		    "error: PAM: Authentication failure for \xc3\xa4\xc3\xb6\xc3\xbc\xc3\x9f from 192.0.2.2",
+-		   u"error: PAM: Authentication failure for \xc3\xa4\xc3\xb6\xc3\xbc\xc3\x9f from 192.0.2.2",
++		   "error: PAM: Authentication failure for \xc3\xa4\xc3\xb6\xc3\xbc\xc3\x9f from 192.0.2.2",
+ 		   b"error: PAM: Authentication failure for \xc3\xa4\xc3\xb6\xc3\xbc\xc3\x9f from 192.0.2.2".decode('utf-8', 'replace')
+ 			):
+ 				fields = self.journal_fields
+@@ -1520,7 +1520,7 @@ class GetFailures(LogCaptureTestCase):
+ 
+ 	# so that they could be reused by other tests
+ 	FAILURES_01 = ('193.168.0.128', 3, 1124013599.0,
+-				  [u'Aug 14 11:59:59 [sshd] error: PAM: Authentication failure for kevin from 193.168.0.128']*3)
++				  ['Aug 14 11:59:59 [sshd] error: PAM: Authentication failure for kevin from 193.168.0.128']*3)
+ 
+ 	def setUp(self):
+ 		"""Call before every test case."""
+@@ -1595,8 +1595,8 @@ class GetFailures(LogCaptureTestCase):
+ 
+ 	def testGetFailures02(self):
+ 		output = ('141.3.81.106', 4, 1124013539.0,
+-				  [u'Aug 14 11:%d:59 i60p295 sshd[12365]: Failed publickey for roehl from ::ffff:141.3.81.106 port 51332 ssh2'
+-				   % m for m in 53, 54, 57, 58])
++				  ['Aug 14 11:%d:59 i60p295 sshd[12365]: Failed publickey for roehl from ::ffff:141.3.81.106 port 51332 ssh2'
++				   % m for m in (53, 54, 57, 58)])
+ 
+ 		self.filter.addLogPath(GetFailures.FILENAME_02, autoSeek=0)
+ 		self.filter.addFailRegex(r"Failed .* from <HOST>")
+@@ -1691,17 +1691,17 @@ class GetFailures(LogCaptureTestCase):
+ 		# We should still catch failures with usedns = no ;-)
+ 		output_yes = (
+ 			('93.184.216.34', 2, 1124013539.0,
+-			  [u'Aug 14 11:54:59 i60p295 sshd[12365]: Failed publickey for roehl from example.com port 51332 ssh2',
+-			   u'Aug 14 11:58:59 i60p295 sshd[12365]: Failed publickey for roehl from ::ffff:93.184.216.34 port 51332 ssh2']
++			  ['Aug 14 11:54:59 i60p295 sshd[12365]: Failed publickey for roehl from example.com port 51332 ssh2',
++			   'Aug 14 11:58:59 i60p295 sshd[12365]: Failed publickey for roehl from ::ffff:93.184.216.34 port 51332 ssh2']
+ 			),
+ 			('2606:2800:220:1:248:1893:25c8:1946', 1, 1124013299.0,
+-			  [u'Aug 14 11:54:59 i60p295 sshd[12365]: Failed publickey for roehl from example.com port 51332 ssh2']
++			  ['Aug 14 11:54:59 i60p295 sshd[12365]: Failed publickey for roehl from example.com port 51332 ssh2']
+ 			),
+ 		)
+ 
+ 		output_no = (
+ 			('93.184.216.34', 1, 1124013539.0,
+-			  [u'Aug 14 11:58:59 i60p295 sshd[12365]: Failed publickey for roehl from ::ffff:93.184.216.34 port 51332 ssh2']
++			  ['Aug 14 11:58:59 i60p295 sshd[12365]: Failed publickey for roehl from ::ffff:93.184.216.34 port 51332 ssh2']
+ 			)
+ 		)
+ 
+@@ -1807,9 +1807,9 @@ class DNSUtilsTests(unittest.TestCase):
+ 		self.assertTrue(c.get('a') is None)
+ 		self.assertEqual(c.get('a', 'test'), 'test')
+ 		# exact 5 elements :
+-		for i in xrange(5):
++		for i in range(5):
+ 			c.set(i, i)
+-		for i in xrange(5):
++		for i in range(5):
+ 			self.assertEqual(c.get(i), i)
+ 		# remove unavailable key:
+ 		c.unset('a'); c.unset('a')
+@@ -1817,30 +1817,30 @@ class DNSUtilsTests(unittest.TestCase):
+ 	def testCacheMaxSize(self):
+ 		c = Utils.Cache(maxCount=5, maxTime=60)
+ 		# exact 5 elements :
+-		for i in xrange(5):
++		for i in range(5):
+ 			c.set(i, i)
+-		self.assertEqual([c.get(i) for i in xrange(5)], [i for i in xrange(5)])
+-		self.assertNotIn(-1, (c.get(i, -1) for i in xrange(5)))
++		self.assertEqual([c.get(i) for i in range(5)], [i for i in range(5)])
++		self.assertNotIn(-1, (c.get(i, -1) for i in range(5)))
+ 		# add one - too many:
+ 		c.set(10, i)
+ 		# one element should be removed :
+-		self.assertIn(-1, (c.get(i, -1) for i in xrange(5)))
++		self.assertIn(-1, (c.get(i, -1) for i in range(5)))
+ 		# test max size (not expired):
+-		for i in xrange(10):
++		for i in range(10):
+ 			c.set(i, 1)
+ 		self.assertEqual(len(c), 5)
+ 
+ 	def testCacheMaxTime(self):
+ 		# test max time (expired, timeout reached) :
+ 		c = Utils.Cache(maxCount=5, maxTime=0.0005)
+-		for i in xrange(10):
++		for i in range(10):
+ 			c.set(i, 1)
+ 		st = time.time()
+ 		self.assertTrue(Utils.wait_for(lambda: time.time() >= st + 0.0005, 1))
+ 		# we have still 5 elements (or fewer if too slow test mashine):
+ 		self.assertTrue(len(c) <= 5)
+ 		# but all that are expiered also:
+-		for i in xrange(10):
++		for i in range(10):
+ 			self.assertTrue(c.get(i) is None)
+ 		# here the whole cache should be empty:
+ 		self.assertEqual(len(c), 0)
+@@ -1861,7 +1861,7 @@ class DNSUtilsTests(unittest.TestCase):
+ 					c = count
+ 					while c:
+ 						c -= 1
+-						s = xrange(0, 256, 1) if forw else xrange(255, -1, -1)
++						s = range(0, 256, 1) if forw else range(255, -1, -1)
+ 						if random: shuffle([i for i in s])
+ 						for i in s:
+ 							IPAddr('192.0.2.'+str(i), IPAddr.FAM_IPv4)
+@@ -1983,15 +1983,15 @@ class DNSUtilsNetworkTests(unittest.TestCase):
+ 
+ 	def testAddr2bin(self):
+ 		res = IPAddr('10.0.0.0')
+-		self.assertEqual(res.addr, 167772160L)
++		self.assertEqual(res.addr, 167772160)
+ 		res = IPAddr('10.0.0.0', cidr=None)
+-		self.assertEqual(res.addr, 167772160L)
+-		res = IPAddr('10.0.0.0', cidr=32L)
+-		self.assertEqual(res.addr, 167772160L)
+-		res = IPAddr('10.0.0.1', cidr=32L)
+-		self.assertEqual(res.addr, 167772161L)
+-		res = IPAddr('10.0.0.1', cidr=31L)
+-		self.assertEqual(res.addr, 167772160L)
++		self.assertEqual(res.addr, 167772160)
++		res = IPAddr('10.0.0.0', cidr=32)
++		self.assertEqual(res.addr, 167772160)
++		res = IPAddr('10.0.0.1', cidr=32)
++		self.assertEqual(res.addr, 167772161)
++		res = IPAddr('10.0.0.1', cidr=31)
++		self.assertEqual(res.addr, 167772160)
+ 
+ 		self.assertEqual(IPAddr('10.0.0.0').hexdump, '0a000000')
+ 		self.assertEqual(IPAddr('1::2').hexdump, '00010000000000000000000000000002')
+@@ -2067,9 +2067,9 @@ class DNSUtilsNetworkTests(unittest.TestCase):
+ 			'93.184.216.34': 'ip4-test', 
+ 			'2606:2800:220:1:248:1893:25c8:1946': 'ip6-test'
+ 		}
+-		d2 = dict([(IPAddr(k), v) for k, v in d.iteritems()])
+-		self.assertTrue(isinstance(d.keys()[0], basestring))
+-		self.assertTrue(isinstance(d2.keys()[0], IPAddr))
++		d2 = dict([(IPAddr(k), v) for k, v in d.items()])
++		self.assertTrue(isinstance(list(d.keys())[0], str))
++		self.assertTrue(isinstance(list(d2.keys())[0], IPAddr))
+ 		self.assertEqual(d.get(ip4[2], ''), 'ip4-test')
+ 		self.assertEqual(d.get(ip6[2], ''), 'ip6-test')
+ 		self.assertEqual(d2.get(str(ip4[2]), ''), 'ip4-test')
+diff --git a/fail2ban/tests/misctestcase.py b/fail2ban/tests/misctestcase.py
+index 9b986f53..94f7a8de 100644
+--- a/fail2ban/tests/misctestcase.py
++++ b/fail2ban/tests/misctestcase.py
+@@ -29,9 +29,9 @@ import tempfile
+ import shutil
+ import fnmatch
+ from glob import glob
+-from StringIO import StringIO
++from io import StringIO
+ 
+-from utils import LogCaptureTestCase, logSys as DefLogSys
++from .utils import LogCaptureTestCase, logSys as DefLogSys
+ 
+ from ..helpers import formatExceptionInfo, mbasename, TraceBack, FormatterWithTraceBack, getLogger, \
+ 	splitwords, uni_decode, uni_string
+@@ -67,7 +67,7 @@ class HelpersTest(unittest.TestCase):
+ 		self.assertEqual(splitwords(' 1\n  2'), ['1', '2'])
+ 		self.assertEqual(splitwords(' 1\n  2, 3'), ['1', '2', '3'])
+ 		# string as unicode:
+-		self.assertEqual(splitwords(u' 1\n  2, 3'), ['1', '2', '3'])
++		self.assertEqual(splitwords(' 1\n  2, 3'), ['1', '2', '3'])
+ 
+ 
+ if sys.version_info >= (2,7):
+@@ -197,11 +197,11 @@ class TestsUtilsTest(LogCaptureTestCase):
+ 
+ 	def testUniConverters(self):
+ 		self.assertRaises(Exception, uni_decode, 
+-			(b'test' if sys.version_info >= (3,) else u'test'), 'f2b-test::non-existing-encoding')
+-		uni_decode((b'test\xcf' if sys.version_info >= (3,) else u'test\xcf'))
++			(b'test' if sys.version_info >= (3,) else 'test'), 'f2b-test::non-existing-encoding')
++		uni_decode((b'test\xcf' if sys.version_info >= (3,) else 'test\xcf'))
+ 		uni_string(b'test\xcf')
+ 		uni_string('test\xcf')
+-		uni_string(u'test\xcf')
++		uni_string('test\xcf')
+ 
+ 	def testSafeLogging(self):
+ 		# logging should be exception-safe, to avoid possible errors (concat, str. conversion, representation failures, etc)
+@@ -213,7 +213,7 @@ class TestsUtilsTest(LogCaptureTestCase):
+ 				if self.err:
+ 					raise Exception('no represenation for test!')
+ 				else:
+-					return u'conv-error (\xf2\xf0\xe5\xf2\xe8\xe9), unterminated utf \xcf'
++					return 'conv-error (\xf2\xf0\xe5\xf2\xe8\xe9), unterminated utf \xcf'
+ 		test = Test()
+ 		logSys.log(logging.NOTICE, "test 1a: %r", test)
+ 		self.assertLogged("Traceback", "no represenation for test!")
+@@ -261,7 +261,7 @@ class TestsUtilsTest(LogCaptureTestCase):
+ 					func_raise()
+ 
+ 			try:
+-				print deep_function(3)
++				print(deep_function(3))
+ 			except ValueError:
+ 				s = tb()
+ 
+@@ -278,7 +278,7 @@ class TestsUtilsTest(LogCaptureTestCase):
+ 			self.assertIn(':', s)
+ 
+ 	def _testAssertionErrorRE(self, regexp, fun, *args, **kwargs):
+-		self.assertRaisesRegexp(AssertionError, regexp, fun, *args, **kwargs)
++		self.assertRaisesRegex(AssertionError, regexp, fun, *args, **kwargs)
+ 	
+ 	def testExtendedAssertRaisesRE(self):
+ 		## test _testAssertionErrorRE several fail cases:
+@@ -316,13 +316,13 @@ class TestsUtilsTest(LogCaptureTestCase):
+ 		self._testAssertionErrorRE(r"'a' unexpectedly found in 'cba'",
+ 			self.assertNotIn, 'a', 'cba')
+ 		self._testAssertionErrorRE(r"1 unexpectedly found in \[0, 1, 2\]",
+-			self.assertNotIn, 1, xrange(3))
++			self.assertNotIn, 1, range(3))
+ 		self._testAssertionErrorRE(r"'A' unexpectedly found in \['C', 'A'\]",
+ 			self.assertNotIn, 'A', (c.upper() for c in 'cba' if c != 'b'))
+ 		self._testAssertionErrorRE(r"'a' was not found in 'xyz'",
+ 			self.assertIn, 'a', 'xyz')
+ 		self._testAssertionErrorRE(r"5 was not found in \[0, 1, 2\]",
+-			self.assertIn, 5, xrange(3))
++			self.assertIn, 5, range(3))
+ 		self._testAssertionErrorRE(r"'A' was not found in \['C', 'B'\]",
+ 			self.assertIn, 'A', (c.upper() for c in 'cba' if c != 'a'))
+ 		## assertLogged, assertNotLogged positive case:
+diff --git a/fail2ban/tests/observertestcase.py b/fail2ban/tests/observertestcase.py
+index 8e944454..ed520286 100644
+--- a/fail2ban/tests/observertestcase.py
++++ b/fail2ban/tests/observertestcase.py
+@@ -69,7 +69,7 @@ class BanTimeIncr(LogCaptureTestCase):
+ 		a.setBanTimeExtra('multipliers', multipliers)
+ 		# test algorithm and max time 24 hours :
+ 		self.assertEqual(
+-			[a.calcBanTime(600, i) for i in xrange(1, 11)],
++			[a.calcBanTime(600, i) for i in range(1, 11)],
+ 			[1200, 2400, 4800, 9600, 19200, 38400, 76800, 86400, 86400, 86400]
+ 		)
+ 		# with extra large max time (30 days):
+@@ -81,38 +81,38 @@ class BanTimeIncr(LogCaptureTestCase):
+ 			if multcnt < 11:
+ 				arr = arr[0:multcnt-1] + ([arr[multcnt-2]] * (11-multcnt))
+ 		self.assertEqual(
+-			[a.calcBanTime(600, i) for i in xrange(1, 11)],
++			[a.calcBanTime(600, i) for i in range(1, 11)],
+ 			arr
+ 		)
+ 		a.setBanTimeExtra('maxtime', '1d')
+ 		# change factor :
+ 		a.setBanTimeExtra('factor', '2');
+ 		self.assertEqual(
+-			[a.calcBanTime(600, i) for i in xrange(1, 11)],
++			[a.calcBanTime(600, i) for i in range(1, 11)],
+ 			[2400, 4800, 9600, 19200, 38400, 76800, 86400, 86400, 86400, 86400]
+ 		)
+ 		# factor is float :
+ 		a.setBanTimeExtra('factor', '1.33');
+ 		self.assertEqual(
+-			[int(a.calcBanTime(600, i)) for i in xrange(1, 11)],
++			[int(a.calcBanTime(600, i)) for i in range(1, 11)],
+ 			[1596, 3192, 6384, 12768, 25536, 51072, 86400, 86400, 86400, 86400]
+ 		)
+ 		a.setBanTimeExtra('factor', None);
+ 		# change max time :
+ 		a.setBanTimeExtra('maxtime', '12h')
+ 		self.assertEqual(
+-			[a.calcBanTime(600, i) for i in xrange(1, 11)],
++			[a.calcBanTime(600, i) for i in range(1, 11)],
+ 			[1200, 2400, 4800, 9600, 19200, 38400, 43200, 43200, 43200, 43200]
+ 		)
+ 		a.setBanTimeExtra('maxtime', '24h')
+ 		## test randomization - not possibe all 10 times we have random = 0:
+ 		a.setBanTimeExtra('rndtime', '5m')
+ 		self.assertTrue(
+-			False in [1200 in [a.calcBanTime(600, 1) for i in xrange(10)] for c in xrange(10)]
++			False in [1200 in [a.calcBanTime(600, 1) for i in range(10)] for c in range(10)]
+ 		)
+ 		a.setBanTimeExtra('rndtime', None)
+ 		self.assertFalse(
+-			False in [1200 in [a.calcBanTime(600, 1) for i in xrange(10)] for c in xrange(10)]
++			False in [1200 in [a.calcBanTime(600, 1) for i in range(10)] for c in range(10)]
+ 		)
+ 		# restore default:
+ 		a.setBanTimeExtra('multipliers', None)
+@@ -124,7 +124,7 @@ class BanTimeIncr(LogCaptureTestCase):
+ 		# this multipliers has the same values as default formula, we test stop growing after count 9:
+ 		self.testDefault('1 2 4 8 16 32 64 128 256')
+ 		# this multipliers has exactly the same values as default formula, test endless growing (stops by count 31 only):
+-		self.testDefault(' '.join([str(1<<i) for i in xrange(31)]))
++		self.testDefault(' '.join([str(1<<i) for i in range(31)]))
+ 
+ 	def testFormula(self):
+ 		a = self.__jail;
+@@ -136,38 +136,38 @@ class BanTimeIncr(LogCaptureTestCase):
+ 		a.setBanTimeExtra('multipliers', None)
+ 		# test algorithm and max time 24 hours :
+ 		self.assertEqual(
+-			[int(a.calcBanTime(600, i)) for i in xrange(1, 11)],
++			[int(a.calcBanTime(600, i)) for i in range(1, 11)],
+ 			[1200, 2400, 4800, 9600, 19200, 38400, 76800, 86400, 86400, 86400]
+ 		)
+ 		# with extra large max time (30 days):
+ 		a.setBanTimeExtra('maxtime', '30d')
+ 		self.assertEqual(
+-			[int(a.calcBanTime(600, i)) for i in xrange(1, 11)],
++			[int(a.calcBanTime(600, i)) for i in range(1, 11)],
+ 			[1200, 2400, 4800, 9600, 19200, 38400, 76800, 153601, 307203, 614407]
+ 		)
+ 		a.setBanTimeExtra('maxtime', '24h')
+ 		# change factor :
+ 		a.setBanTimeExtra('factor', '1');
+ 		self.assertEqual(
+-			[int(a.calcBanTime(600, i)) for i in xrange(1, 11)],
++			[int(a.calcBanTime(600, i)) for i in range(1, 11)],
+ 			[1630, 4433, 12051, 32758, 86400, 86400, 86400, 86400, 86400, 86400]
+ 		)
+ 		a.setBanTimeExtra('factor', '2.0 / 2.885385')
+ 		# change max time :
+ 		a.setBanTimeExtra('maxtime', '12h')
+ 		self.assertEqual(
+-			[int(a.calcBanTime(600, i)) for i in xrange(1, 11)],
++			[int(a.calcBanTime(600, i)) for i in range(1, 11)],
+ 			[1200, 2400, 4800, 9600, 19200, 38400, 43200, 43200, 43200, 43200]
+ 		)
+ 		a.setBanTimeExtra('maxtime', '24h')
+ 		## test randomization - not possibe all 10 times we have random = 0:
+ 		a.setBanTimeExtra('rndtime', '5m')
+ 		self.assertTrue(
+-			False in [1200 in [int(a.calcBanTime(600, 1)) for i in xrange(10)] for c in xrange(10)]
++			False in [1200 in [int(a.calcBanTime(600, 1)) for i in range(10)] for c in range(10)]
+ 		)
+ 		a.setBanTimeExtra('rndtime', None)
+ 		self.assertFalse(
+-			False in [1200 in [int(a.calcBanTime(600, 1)) for i in xrange(10)] for c in xrange(10)]
++			False in [1200 in [int(a.calcBanTime(600, 1)) for i in range(10)] for c in range(10)]
+ 		)
+ 		# restore default:
+ 		a.setBanTimeExtra('factor', None);
+@@ -230,7 +230,7 @@ class BanTimeIncrDB(LogCaptureTestCase):
+ 		ticket = FailTicket(ip, stime, [])
+ 		# test ticket not yet found
+ 		self.assertEqual(
+-			[self.incrBanTime(ticket, 10) for i in xrange(3)], 
++			[self.incrBanTime(ticket, 10) for i in range(3)], 
+ 			[10, 10, 10]
+ 		)
+ 		# add a ticket banned
+@@ -285,7 +285,7 @@ class BanTimeIncrDB(LogCaptureTestCase):
+ 		)
+ 		# increase ban multiple times:
+ 		lastBanTime = 20
+-		for i in xrange(10):
++		for i in range(10):
+ 			ticket.setTime(stime + lastBanTime + 5)
+ 			banTime = self.incrBanTime(ticket, 10)
+ 			self.assertEqual(banTime, lastBanTime * 2)
+@@ -481,7 +481,7 @@ class BanTimeIncrDB(LogCaptureTestCase):
+ 		ticket = FailTicket(ip, stime-120, [])
+ 		failManager = FailManager()
+ 		failManager.setMaxRetry(3)
+-		for i in xrange(3):
++		for i in range(3):
+ 			failManager.addFailure(ticket)
+ 			obs.add('failureFound', failManager, jail, ticket)
+ 		obs.wait_empty(5)
+diff --git a/fail2ban/tests/samplestestcase.py b/fail2ban/tests/samplestestcase.py
+index 0bbd05f5..479b564a 100644
+--- a/fail2ban/tests/samplestestcase.py
++++ b/fail2ban/tests/samplestestcase.py
+@@ -138,7 +138,7 @@ class FilterSamplesRegex(unittest.TestCase):
+ 
+ 	@staticmethod
+ 	def _filterOptions(opts):
+-				return dict((k, v) for k, v in opts.iteritems() if not k.startswith('test.'))
++				return dict((k, v) for k, v in opts.items() if not k.startswith('test.'))
+ 		
+ def testSampleRegexsFactory(name, basedir):
+ 	def testFilter(self):
+@@ -249,10 +249,10 @@ def testSampleRegexsFactory(name, basedir):
+ 						self.assertTrue(faildata.get('match', False), 
+ 							"Line matched when shouldn't have")
+ 						self.assertEqual(len(ret), 1,
+-							"Multiple regexs matched %r" % (map(lambda x: x[0], ret)))
++							"Multiple regexs matched %r" % ([x[0] for x in ret]))
+ 
+ 						# Verify match captures (at least fid/host) and timestamp as expected
+-						for k, v in faildata.iteritems():
++						for k, v in faildata.items():
+ 							if k not in ("time", "match", "desc", "filter"):
+ 								fv = fail.get(k, None)
+ 								if fv is None:
+@@ -294,7 +294,7 @@ def testSampleRegexsFactory(name, basedir):
+ 								'\n'.join(pprint.pformat(fail).splitlines())))
+ 
+ 		# check missing samples for regex using each filter-options combination:
+-		for fltName, flt in self._filters.iteritems():
++		for fltName, flt in self._filters.items():
+ 			flt, regexsUsedIdx = flt
+ 			regexList = flt.getFailRegex()
+ 			for failRegexIndex, failRegex in enumerate(regexList):
+diff --git a/fail2ban/tests/servertestcase.py b/fail2ban/tests/servertestcase.py
+index 55e72455..7925ab1e 100644
+--- a/fail2ban/tests/servertestcase.py
++++ b/fail2ban/tests/servertestcase.py
+@@ -124,14 +124,14 @@ class TransmitterBase(LogCaptureTestCase):
+ 			self.transm.proceed(["get", jail, cmd]), (0, []))
+ 		for n, value in enumerate(values):
+ 			ret = self.transm.proceed(["set", jail, cmdAdd, value])
+-			self.assertSortedEqual((ret[0], map(str, ret[1])), (0, map(str, values[:n+1])), level=2)
++			self.assertSortedEqual((ret[0], list(map(str, ret[1]))), (0, list(map(str, values[:n+1]))), level=2)
+ 			ret = self.transm.proceed(["get", jail, cmd])
+-			self.assertSortedEqual((ret[0], map(str, ret[1])), (0, map(str, values[:n+1])), level=2)
++			self.assertSortedEqual((ret[0], list(map(str, ret[1]))), (0, list(map(str, values[:n+1]))), level=2)
+ 		for n, value in enumerate(values):
+ 			ret = self.transm.proceed(["set", jail, cmdDel, value])
+-			self.assertSortedEqual((ret[0], map(str, ret[1])), (0, map(str, values[n+1:])), level=2)
++			self.assertSortedEqual((ret[0], list(map(str, ret[1]))), (0, list(map(str, values[n+1:]))), level=2)
+ 			ret = self.transm.proceed(["get", jail, cmd])
+-			self.assertSortedEqual((ret[0], map(str, ret[1])), (0, map(str, values[n+1:])), level=2)
++			self.assertSortedEqual((ret[0], list(map(str, ret[1]))), (0, list(map(str, values[n+1:]))), level=2)
+ 
+ 	def jailAddDelRegexTest(self, cmd, inValues, outValues, jail):
+ 		cmdAdd = "add" + cmd
+@@ -930,7 +930,7 @@ class TransmitterLogging(TransmitterBase):
+ 
+ 	def testLogTarget(self):
+ 		logTargets = []
+-		for _ in xrange(3):
++		for _ in range(3):
+ 			tmpFile = tempfile.mkstemp("fail2ban", "transmitter")
+ 			logTargets.append(tmpFile[1])
+ 			os.close(tmpFile[0])
+@@ -1003,26 +1003,26 @@ class TransmitterLogging(TransmitterBase):
+ 				self.assertEqual(self.transm.proceed(["flushlogs"]), (0, "rolled over"))
+ 				l.warning("After flushlogs")
+ 				with open(fn2,'r') as f:
+-					line1 = f.next()
++					line1 = next(f)
+ 					if line1.find('Changed logging target to') >= 0:
+-						line1 = f.next()
++						line1 = next(f)
+ 					self.assertTrue(line1.endswith("Before file moved\n"))
+-					line2 = f.next()
++					line2 = next(f)
+ 					self.assertTrue(line2.endswith("After file moved\n"))
+ 					try:
+-						n = f.next()
++						n = next(f)
+ 						if n.find("Command: ['flushlogs']") >=0:
+-							self.assertRaises(StopIteration, f.next)
++							self.assertRaises(StopIteration, f.__next__)
+ 						else:
+ 							self.fail("Exception StopIteration or Command: ['flushlogs'] expected. Got: %s" % n)
+ 					except StopIteration:
+ 						pass # on higher debugging levels this is expected
+ 				with open(fn,'r') as f:
+-					line1 = f.next()
++					line1 = next(f)
+ 					if line1.find('rollover performed on') >= 0:
+-						line1 = f.next()
++						line1 = next(f)
+ 					self.assertTrue(line1.endswith("After flushlogs\n"))
+-					self.assertRaises(StopIteration, f.next)
++					self.assertRaises(StopIteration, f.__next__)
+ 					f.close()
+ 			finally:
+ 				os.remove(fn2)
+@@ -1185,7 +1185,7 @@ class LoggingTests(LogCaptureTestCase):
+ 					os.remove(f)
+ 
+ 
+-from clientreadertestcase import ActionReader, JailsReader, CONFIG_DIR
++from .clientreadertestcase import ActionReader, JailsReader, CONFIG_DIR
+ 
+ class ServerConfigReaderTests(LogCaptureTestCase):
+ 
+diff --git a/fail2ban/tests/sockettestcase.py b/fail2ban/tests/sockettestcase.py
+index 69bf8d8b..60f49e57 100644
+--- a/fail2ban/tests/sockettestcase.py
++++ b/fail2ban/tests/sockettestcase.py
+@@ -153,7 +153,7 @@ class Socket(LogCaptureTestCase):
+ 		org_handler = RequestHandler.found_terminator
+ 		try:
+ 			RequestHandler.found_terminator = lambda self: self.close()
+-			self.assertRaisesRegexp(RuntimeError, r"socket connection broken", 
++			self.assertRaisesRegex(RuntimeError, r"socket connection broken", 
+ 				lambda: client.send(testMessage, timeout=unittest.F2B.maxWaitTime(10)))
+ 		finally:
+ 			RequestHandler.found_terminator = org_handler
+diff --git a/fail2ban/tests/utils.py b/fail2ban/tests/utils.py
+index fcfddba7..cb234e0d 100644
+--- a/fail2ban/tests/utils.py
++++ b/fail2ban/tests/utils.py
+@@ -35,7 +35,7 @@ import time
+ import threading
+ import unittest
+ 
+-from cStringIO import StringIO
++from io import StringIO
+ from functools import wraps
+ 
+ from ..helpers import getLogger, str2LogLevel, getVerbosityFormat, uni_decode
+@@ -174,8 +174,8 @@ def initProcess(opts):
+ 
+ 	# Let know the version
+ 	if opts.verbosity != 0:
+-		print("Fail2ban %s test suite. Python %s. Please wait..." \
+-				% (version, str(sys.version).replace('\n', '')))
++		print(("Fail2ban %s test suite. Python %s. Please wait..." \
++				% (version, str(sys.version).replace('\n', ''))))
+ 
+ 	return opts;
+ 
+@@ -322,7 +322,7 @@ def initTests(opts):
+ 	c = DNSUtils.CACHE_ipToName
+ 	# increase max count and max time (too many entries, long time testing):
+ 	c.setOptions(maxCount=10000, maxTime=5*60)
+-	for i in xrange(256):
++	for i in range(256):
+ 		c.set('192.0.2.%s' % i, None)
+ 		c.set('198.51.100.%s' % i, None)
+ 		c.set('203.0.113.%s' % i, None)
+@@ -541,8 +541,8 @@ def gatherTests(regexps=None, opts=None):
+ import difflib, pprint
+ if not hasattr(unittest.TestCase, 'assertDictEqual'):
+ 	def assertDictEqual(self, d1, d2, msg=None):
+-		self.assert_(isinstance(d1, dict), 'First argument is not a dictionary')
+-		self.assert_(isinstance(d2, dict), 'Second argument is not a dictionary')
++		self.assertTrue(isinstance(d1, dict), 'First argument is not a dictionary')
++		self.assertTrue(isinstance(d2, dict), 'Second argument is not a dictionary')
+ 		if d1 != d2:
+ 			standardMsg = '%r != %r' % (d1, d2)
+ 			diff = ('\n' + '\n'.join(difflib.ndiff(
+@@ -560,7 +560,7 @@ def assertSortedEqual(self, a, b, level=1, nestedOnly=True, key=repr, msg=None):
+ 	# used to recognize having element as nested dict, list or tuple:
+ 	def _is_nested(v):
+ 		if isinstance(v, dict):
+-			return any(isinstance(v, (dict, list, tuple)) for v in v.itervalues())
++			return any(isinstance(v, (dict, list, tuple)) for v in v.values())
+ 		return any(isinstance(v, (dict, list, tuple)) for v in v)
+ 	# level comparison routine:
+ 	def _assertSortedEqual(a, b, level, nestedOnly, key):
+@@ -573,7 +573,7 @@ def assertSortedEqual(self, a, b, level=1, nestedOnly=True, key=repr, msg=None):
+ 				return
+ 			raise ValueError('%r != %r' % (a, b))
+ 		if isinstance(a, dict) and isinstance(b, dict): # compare dict's:
+-			for k, v1 in a.iteritems():
++			for k, v1 in a.items():
+ 				v2 = b[k]
+ 				if isinstance(v1, (dict, list, tuple)) and isinstance(v2, (dict, list, tuple)):
+ 					_assertSortedEqual(v1, v2, level-1 if level != 0 else 0, nestedOnly, key)
+@@ -608,14 +608,14 @@ if not hasattr(unittest.TestCase, 'assertRaisesRegexp'):
+ 				self.fail('\"%s\" does not match \"%s\"' % (regexp, e))
+ 		else:
+ 			self.fail('%s not raised' % getattr(exccls, '__name__'))
+-	unittest.TestCase.assertRaisesRegexp = assertRaisesRegexp
++	unittest.TestCase.assertRaisesRegex = assertRaisesRegexp
+ 
+ # always custom following methods, because we use atm better version of both (support generators)
+ if True: ## if not hasattr(unittest.TestCase, 'assertIn'):
+ 	def assertIn(self, a, b, msg=None):
+ 		bb = b
+ 		wrap = False
+-		if msg is None and hasattr(b, '__iter__') and not isinstance(b, basestring):
++		if msg is None and hasattr(b, '__iter__') and not isinstance(b, str):
+ 			b, bb = itertools.tee(b)
+ 			wrap = True
+ 		if a not in b:
+@@ -626,7 +626,7 @@ if True: ## if not hasattr(unittest.TestCase, 'assertIn'):
+ 	def assertNotIn(self, a, b, msg=None):
+ 		bb = b
+ 		wrap = False
+-		if msg is None and hasattr(b, '__iter__') and not isinstance(b, basestring):
++		if msg is None and hasattr(b, '__iter__') and not isinstance(b, str):
+ 			b, bb = itertools.tee(b)
+ 			wrap = True
+ 		if a in b:
+diff --git a/setup.py b/setup.py
+deleted file mode 100755
+index ce1eedf6..00000000
+--- a/setup.py
++++ /dev/null
+@@ -1,326 +0,0 @@
+-#!/usr/bin/env python
+-# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: t -*-
+-# vi: set ft=python sts=4 ts=4 sw=4 noet :
+-
+-# This file is part of Fail2Ban.
+-#
+-# Fail2Ban is free software; you can redistribute it and/or modify
+-# it under the terms of the GNU General Public License as published by
+-# the Free Software Foundation; either version 2 of the License, or
+-# (at your option) any later version.
+-#
+-# Fail2Ban is distributed in the hope that it will be useful,
+-# but WITHOUT ANY WARRANTY; without even the implied warranty of
+-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+-# GNU General Public License for more details.
+-#
+-# You should have received a copy of the GNU General Public License
+-# along with Fail2Ban; if not, write to the Free Software
+-# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
+-
+-__author__ = "Cyril Jaquier, Steven Hiscocks, Yaroslav Halchenko"
+-__copyright__ = "Copyright (c) 2004 Cyril Jaquier, 2008-2016 Fail2Ban Contributors"
+-__license__ = "GPL"
+-
+-import platform
+-
+-try:
+-	import setuptools
+-	from setuptools import setup
+-	from setuptools.command.install import install
+-	from setuptools.command.install_scripts import install_scripts
+-except ImportError:
+-	setuptools = None
+-	from distutils.core import setup
+-
+-# all versions
+-from distutils.command.build_py import build_py
+-from distutils.command.build_scripts import build_scripts
+-if setuptools is None:
+-	from distutils.command.install import install
+-	from distutils.command.install_scripts import install_scripts
+-try:
+-	# python 3.x
+-	from distutils.command.build_py import build_py_2to3
+-	from distutils.command.build_scripts import build_scripts_2to3
+-	_2to3 = True
+-except ImportError:
+-	# python 2.x
+-	_2to3 = False
+-
+-import os
+-from os.path import isfile, join, isdir, realpath
+-import re
+-import sys
+-import warnings
+-from glob import glob
+-
+-from fail2ban.setup import updatePyExec
+-
+-
+-source_dir = os.path.realpath(os.path.dirname(
+-	# __file__ seems to be overwritten sometimes on some python versions (e.g. bug of 2.6 by running under cProfile, etc.):
+-	sys.argv[0] if os.path.basename(sys.argv[0]) == 'setup.py' else __file__
+-))
+-
+-# Wrapper to install python binding (to current python version):
+-class install_scripts_f2b(install_scripts):
+-
+-	def get_outputs(self):
+-		outputs = install_scripts.get_outputs(self)
+-		# setup.py --dry-run install:
+-		dry_run = not outputs
+-		self.update_scripts(dry_run)
+-		if dry_run:
+-			#bindir = self.install_dir
+-			bindir = self.build_dir
+-			print('creating fail2ban-python binding -> %s (dry-run, real path can be different)' % (bindir,))
+-			print('Copying content of %s to %s' % (self.build_dir, self.install_dir));
+-			return outputs
+-		fn = None
+-		for fn in outputs:
+-			if os.path.basename(fn) == 'fail2ban-server':
+-				break
+-		bindir = os.path.dirname(fn)
+-		print('creating fail2ban-python binding -> %s' % (bindir,))
+-		updatePyExec(bindir)
+-		return outputs
+-
+-	def update_scripts(self, dry_run=False):
+-		buildroot = os.path.dirname(self.build_dir)
+-		install_dir = self.install_dir
+-		try:
+-			# remove root-base from install scripts path:
+-			root = self.distribution.command_options['install']['root'][1]
+-			if install_dir.startswith(root):
+-				install_dir = install_dir[len(root):]
+-		except: # pragma: no cover
+-			print('WARNING: Cannot find root-base option, check the bin-path to fail2ban-scripts in "fail2ban.service".')
+-		print('Creating %s/fail2ban.service (from fail2ban.service.in): @BINDIR@ -> %s' % (buildroot, install_dir))
+-		with open(os.path.join(source_dir, 'files/fail2ban.service.in'), 'r') as fn:
+-			lines = fn.readlines()
+-		fn = None
+-		if not dry_run:
+-			fn = open(os.path.join(buildroot, 'fail2ban.service'), 'w')
+-		try:
+-			for ln in lines:
+-				ln = re.sub(r'@BINDIR@', lambda v: install_dir, ln)
+-				if dry_run:
+-					sys.stdout.write(' | ' + ln)
+-					continue
+-				fn.write(ln)
+-		finally:
+-			if fn: fn.close()
+-		if dry_run:
+-			print(' `')
+-
+-
+-# Wrapper to specify fail2ban own options:
+-class install_command_f2b(install):
+-	user_options = install.user_options + [
+-		('disable-2to3', None, 'Specify to deactivate 2to3, e.g. if the install runs from fail2ban test-cases.'),
+-		('without-tests', None, 'without tests files installation'),
+-	]
+-	def initialize_options(self):
+-		self.disable_2to3 = None
+-		self.without_tests = None
+-		install.initialize_options(self)
+-	def finalize_options(self):
+-		global _2to3
+-		## in the test cases 2to3 should be already done (fail2ban-2to3):
+-		if self.disable_2to3:
+-			_2to3 = False
+-		if _2to3:
+-			cmdclass = self.distribution.cmdclass
+-			cmdclass['build_py'] = build_py_2to3
+-			cmdclass['build_scripts'] = build_scripts_2to3
+-		if self.without_tests:
+-			self.distribution.scripts.remove('bin/fail2ban-testcases')
+-
+-			self.distribution.packages.remove('fail2ban.tests')
+-			self.distribution.packages.remove('fail2ban.tests.action_d')
+-
+-			del self.distribution.package_data['fail2ban.tests']
+-		install.finalize_options(self)
+-	def run(self):
+-		install.run(self)
+-
+-
+-# Update fail2ban-python env to current python version (where f2b-modules located/installed)
+-updatePyExec(os.path.join(source_dir, 'bin'))
+-
+-if setuptools and "test" in sys.argv:
+-	import logging
+-	logSys = logging.getLogger("fail2ban")
+-	hdlr = logging.StreamHandler(sys.stdout)
+-	fmt = logging.Formatter("%(asctime)-15s %(message)s")
+-	hdlr.setFormatter(fmt)
+-	logSys.addHandler(hdlr)
+-	if set(["-q", "--quiet"]) & set(sys.argv):
+-		logSys.setLevel(logging.CRITICAL)
+-		warnings.simplefilter("ignore")
+-		sys.warnoptions.append("ignore")
+-	elif set(["-v", "--verbose"]) & set(sys.argv):
+-		logSys.setLevel(logging.DEBUG)
+-	else:
+-		logSys.setLevel(logging.INFO)
+-elif "test" in sys.argv:
+-	print("python distribute required to execute fail2ban tests")
+-	print("")
+-
+-longdesc = '''
+-Fail2Ban scans log files like /var/log/pwdfail or
+-/var/log/apache/error_log and bans IP that makes
+-too many password failures. It updates firewall rules
+-to reject the IP address or executes user defined
+-commands.'''
+-
+-if setuptools:
+-	setup_extra = {
+-		'test_suite': "fail2ban.tests.utils.gatherTests",
+-		'use_2to3': True,
+-	}
+-else:
+-	setup_extra = {}
+-
+-data_files_extra = []
+-if os.path.exists('/var/run'):
+-	# if we are on the system with /var/run -- we are to use it for having fail2ban/
+-	# directory there for socket file etc.
+-	# realpath is used to possibly resolve /var/run -> /run symlink
+-	data_files_extra += [(realpath('/var/run/fail2ban'), '')]
+-
+-# Installing documentation files only under Linux or other GNU/ systems
+-# (e.g. GNU/kFreeBSD), since others might have protective mechanisms forbidding
+-# installation there (see e.g. #1233)
+-platform_system = platform.system().lower()
+-doc_files = ['README.md', 'DEVELOP', 'FILTERS', 'doc/run-rootless.txt']
+-if platform_system in ('solaris', 'sunos'):
+-	doc_files.append('README.Solaris')
+-if platform_system in ('linux', 'solaris', 'sunos') or platform_system.startswith('gnu'):
+-	data_files_extra.append(
+-		('/usr/share/doc/fail2ban', doc_files)
+-	)
+-
+-# Get version number, avoiding importing fail2ban.
+-# This is due to tests not functioning for python3 as 2to3 takes place later
+-exec(open(join("fail2ban", "version.py")).read())
+-
+-setup(
+-	name = "fail2ban",
+-	version = version,
+-	description = "Ban IPs that make too many password failures",
+-	long_description = longdesc,
+-	author = "Cyril Jaquier & Fail2Ban Contributors",
+-	author_email = "cyril.jaquier@fail2ban.org",
+-	url = "http://www.fail2ban.org",
+-	license = "GPL",
+-	platforms = "Posix",
+-	cmdclass = {
+-		'build_py': build_py, 'build_scripts': build_scripts,
+-		'install_scripts': install_scripts_f2b, 'install': install_command_f2b
+-	},
+-	scripts = [
+-		'bin/fail2ban-client',
+-		'bin/fail2ban-server',
+-		'bin/fail2ban-regex',
+-		'bin/fail2ban-testcases',
+-		# 'bin/fail2ban-python', -- link (binary), will be installed via install_scripts_f2b wrapper
+-	],
+-	packages = [
+-		'fail2ban',
+-		'fail2ban.client',
+-		'fail2ban.server',
+-		'fail2ban.tests',
+-		'fail2ban.tests.action_d',
+-	],
+-	package_data = {
+-		'fail2ban.tests':
+-			[ join(w[0], f).replace("fail2ban/tests/", "", 1)
+-				for w in os.walk('fail2ban/tests/files')
+-				for f in w[2]] +
+-			[ join(w[0], f).replace("fail2ban/tests/", "", 1)
+-				for w in os.walk('fail2ban/tests/config')
+-				for f in w[2]] +
+-			[ join(w[0], f).replace("fail2ban/tests/", "", 1)
+-				for w in os.walk('fail2ban/tests/action_d')
+-				for f in w[2]]
+-	},
+-	data_files = [
+-		('/etc/fail2ban',
+-			glob("config/*.conf")
+-		),
+-		('/etc/fail2ban/filter.d',
+-			glob("config/filter.d/*.conf")
+-		),
+-		('/etc/fail2ban/filter.d/ignorecommands',
+-			[p for p in glob("config/filter.d/ignorecommands/*") if isfile(p)]
+-		),
+-		('/etc/fail2ban/action.d',
+-			glob("config/action.d/*.conf") +
+-			glob("config/action.d/*.py")
+-		),
+-		('/etc/fail2ban/fail2ban.d',
+-			''
+-		),
+-		('/etc/fail2ban/jail.d',
+-			''
+-		),
+-		('/var/lib/fail2ban',
+-			''
+-		),
+-	] + data_files_extra,
+-	**setup_extra
+-)
+-
+-# Do some checks after installation
+-# Search for obsolete files.
+-obsoleteFiles = []
+-elements = {
+-	"/etc/":
+-		[
+-			"fail2ban.conf"
+-		],
+-	"/usr/bin/":
+-		[
+-			"fail2ban.py"
+-		],
+-	"/usr/lib/fail2ban/":
+-		[
+-			"version.py",
+-			"protocol.py"
+-		]
+-}
+-
+-for directory in elements:
+-	for f in elements[directory]:
+-		path = join(directory, f)
+-		if isfile(path):
+-			obsoleteFiles.append(path)
+-
+-if obsoleteFiles:
+-	print("")
+-	print("Obsolete files from previous Fail2Ban versions were found on "
+-		  "your system.")
+-	print("Please delete them:")
+-	print("")
+-	for f in obsoleteFiles:
+-		print("\t" + f)
+-	print("")
+-
+-if isdir("/usr/lib/fail2ban"):
+-	print("")
+-	print("Fail2ban is not installed under /usr/lib anymore. The new "
+-		  "location is under /usr/share. Please remove the directory "
+-		  "/usr/lib/fail2ban and everything under this directory.")
+-	print("")
+-
+-# Update config file
+-if sys.argv[1] == "install":
+-	print("")
+-	print("Please do not forget to update your configuration files.")
+-	print("They are in \"/etc/fail2ban/\".")
+-	print("")
+-	print("You can also install systemd service-unit file from \"build/fail2ban.service\"")
+-	print("resp. corresponding init script from \"files/*-initd\".")
+-	print("")
+-- 
+2.17.1
+
diff --git a/meta-security/recipes-security/fail2ban/files/fail2ban_setup.py b/meta-security/recipes-security/fail2ban/files/fail2ban_setup.py
index a5d4ed6..e231949 100755
--- a/meta-security/recipes-security/fail2ban/files/fail2ban_setup.py
+++ b/meta-security/recipes-security/fail2ban/files/fail2ban_setup.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python
 # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: t -*-
 # vi: set ft=python sts=4 ts=4 sw=4 noet :
 
diff --git a/meta-security/recipes-security/fail2ban/python3-fail2ban_0.10.4.0.bb b/meta-security/recipes-security/fail2ban/python3-fail2ban_0.10.4.0.bb
index 53f94ff..e737f50 100644
--- a/meta-security/recipes-security/fail2ban/python3-fail2ban_0.10.4.0.bb
+++ b/meta-security/recipes-security/fail2ban/python3-fail2ban_0.10.4.0.bb
@@ -9,13 +9,12 @@
 LICENSE = "GPL-2.0"
 LIC_FILES_CHKSUM = "file://COPYING;md5=ecabc31e90311da843753ba772885d9f"
 
-SRCREV ="aa565eb80ec6043317e8430cabcaf9c3f4e61578"
-SRC_URI = " \
-	git://github.com/fail2ban/fail2ban.git;branch=0.11 \
-	file://initd \
+SRCREV ="3befbb177017957869425c81a560edb8e27db75a"
+SRC_URI = " git://github.com/fail2ban/fail2ban.git;branch=0.11 \
+        file://initd \
         file://fail2ban_setup.py \
         file://run-ptest \
-        file://0001-To-fix-build-error-of-xrang.patch \
+        file://0001-python3-fail2ban-2-3-conversion.patch \
 "
 
 inherit update-rc.d ptest setuptools3
@@ -27,16 +26,16 @@
 }
 
 do_install_append () {
-	install -d ${D}/${sysconfdir}/fail2ban
-	install -d ${D}/${sysconfdir}/init.d
-    	install -m 0755 ${WORKDIR}/initd ${D}${sysconfdir}/init.d/fail2ban-server
-	chown -R root:root ${D}/${bindir}
+    install -d ${D}/${sysconfdir}/fail2ban
+    install -d ${D}/${sysconfdir}/init.d
+    install -m 0755 ${WORKDIR}/initd ${D}${sysconfdir}/init.d/fail2ban-server
+    chown -R root:root ${D}/${bindir}
 }
 
 do_install_ptest_append () {
-        install -d ${D}${PTEST_PATH}
-        sed -i -e 's/##PYTHON##/${PYTHON_PN}/g' ${D}${PTEST_PATH}/run-ptest
-        install -D ${S}/bin/fail2ban-testcases ${D}${PTEST_PATH}
+    install -d ${D}${PTEST_PATH}
+    sed -i -e 's/##PYTHON##/${PYTHON_PN}/g' ${D}${PTEST_PATH}/run-ptest
+    install -D ${S}/bin/fail2ban-testcases ${D}${PTEST_PATH}
 }
 
 FILES_${PN} += "/run"
@@ -47,5 +46,6 @@
 
 INSANE_SKIP_${PN}_append = "already-stripped"
 
-RDEPENDS_${PN} = "sysklogd iptables sqlite3 ${PYTHON_PN} ${PYTHON_PN}-pyinotify"
+RDEPENDS_${PN} = "${VIRTUAL-RUNTIME_base-utils-syslog} iptables sqlite3 python3-core python3-pyinotify"
+RDEPENDS_${PN} += " python3-logging python3-fcntl python3-json"
 RDEPENDS_${PN}-ptest = "python3-core python3-io python3-modules python3-fail2ban"
diff --git a/meta-security/recipes-security/google-authenticator-libpam/google-authenticator-libpam_1.07.bb b/meta-security/recipes-security/google-authenticator-libpam/google-authenticator-libpam_1.08.bb
similarity index 68%
rename from meta-security/recipes-security/google-authenticator-libpam/google-authenticator-libpam_1.07.bb
rename to meta-security/recipes-security/google-authenticator-libpam/google-authenticator-libpam_1.08.bb
index 98f895c..f9ca092 100644
--- a/meta-security/recipes-security/google-authenticator-libpam/google-authenticator-libpam_1.07.bb
+++ b/meta-security/recipes-security/google-authenticator-libpam/google-authenticator-libpam_1.08.bb
@@ -4,7 +4,7 @@
 LICENSE = "Apache-2.0"
 
 SRC_URI = "git://github.com/google/google-authenticator-libpam.git"
-SRCREV = "c9280f43610ce896f91eafd0f740a4eb4dcecedd"
+SRCREV = "2c7415d950fb0b4a7f779f045910666447b100ef"
 
 DEPENDS = "libpam"
 
@@ -14,7 +14,10 @@
 
 REQUIRED_DISTRO_FEATURES = "pam"
 
+# Use the same dir location as PAM
+EXTRA_OECONF = "--libdir=${base_libdir}" 
+
 PACKAGES += "pam-google-authenticator"
-FILES_pam-google-authenticator = "${libdir}/security/pam_google_authenticator.so"
+FILES_pam-google-authenticator = "${base_libdir}/security/pam_google_authenticator.so"
 
 RDEPNEDS_pam-google-authenticator  = "libpam"
diff --git a/meta-security/recipes-security/images/security-client-image.bb b/meta-security/recipes-security/images/security-client-image.bb
index 1a92479..f4ebc69 100644
--- a/meta-security/recipes-security/images/security-client-image.bb
+++ b/meta-security/recipes-security/images/security-client-image.bb
@@ -5,8 +5,7 @@
     packagegroup-core-boot \
     os-release \
     samhain-client \
-    ${@bb.utils.contains("DISTRO_FEATURES", "x11", "packagegroup-xfce-base", "", d)} \
-    ${ROOTFS_PKGMANAGE_BOOTSTRAP} ${CORE_IMAGE_EXTRA_INSTALL}"
+    ${@bb.utils.contains("DISTRO_FEATURES", "x11", "packagegroup-xfce-base", "", d)}"
 
 IMAGE_LINGUAS ?= " "
 
diff --git a/meta-security/recipes-security/images/security-server-image.bb b/meta-security/recipes-security/images/security-server-image.bb
index 502b5c1..4927e0e 100644
--- a/meta-security/recipes-security/images/security-server-image.bb
+++ b/meta-security/recipes-security/images/security-server-image.bb
@@ -6,8 +6,7 @@
     packagegroup-base \
     packagegroup-core-boot \
     samhain-server \
-    os-release \
-    ${ROOTFS_PKGMANAGE_BOOTSTRAP} ${CORE_IMAGE_EXTRA_INSTALL}"
+    os-release "
 
 IMAGE_LINGUAS ?= " "
 
diff --git a/meta-security/recipes-security/libseccomp/files/0001-tests-rely-on-__SNR_xxx-instead-of-__NR_xxx-for-sysc.patch b/meta-security/recipes-security/libseccomp/files/0001-tests-rely-on-__SNR_xxx-instead-of-__NR_xxx-for-sysc.patch
deleted file mode 100644
index a53433f..0000000
--- a/meta-security/recipes-security/libseccomp/files/0001-tests-rely-on-__SNR_xxx-instead-of-__NR_xxx-for-sysc.patch
+++ /dev/null
@@ -1,45 +0,0 @@
-From 1ecdddb2a5b61cf527d1f238f88a9d129239f87a Mon Sep 17 00:00:00 2001
-From: Paul Moore <paul@paul-moore.com>
-Date: Tue, 5 Nov 2019 15:11:11 -0500
-Subject: [PATCH] tests: rely on __SNR_xxx instead of __NR_xxx for syscalls
-
-We recently changed how libseccomp handles syscall numbers that are
-not defined natively, but we missed test #15.
-
-Acked-by: Tom Hromatka <tom.hromatka@oracle.com>
-Signed-off-by: Paul Moore <paul@paul-moore.com>
-
-Upstream-Status: Backport
-[https://github.com/seccomp/libseccomp/commit/1ecdddb2a5b61cf527d1f238f88a9d129239f87a]
-
-Signed-off-by: Yi Zhao <yi.zhao@windriver.com>
----
- tests/15-basic-resolver.c | 6 +++---
- 1 file changed, 3 insertions(+), 3 deletions(-)
-
-diff --git a/tests/15-basic-resolver.c b/tests/15-basic-resolver.c
-index 6badef1..0c1eefe 100644
---- a/tests/15-basic-resolver.c
-+++ b/tests/15-basic-resolver.c
-@@ -55,15 +55,15 @@ int main(int argc, char *argv[])
- 	unsigned int arch;
- 	char *name = NULL;
- 
--	if (seccomp_syscall_resolve_name("open") != __NR_open)
-+	if (seccomp_syscall_resolve_name("open") != __SNR_open)
- 		goto fail;
--	if (seccomp_syscall_resolve_name("read") != __NR_read)
-+	if (seccomp_syscall_resolve_name("read") != __SNR_read)
- 		goto fail;
- 	if (seccomp_syscall_resolve_name("INVALID") != __NR_SCMP_ERROR)
- 		goto fail;
- 
- 	rc = seccomp_syscall_resolve_name_rewrite(SCMP_ARCH_NATIVE, "openat");
--	if (rc != __NR_openat)
-+	if (rc != __SNR_openat)
- 		goto fail;
- 
- 	while ((arch = arch_list[iter++]) != -1) {
--- 
-2.17.1
-
diff --git a/meta-security/recipes-security/libseccomp/libseccomp_2.4.2.bb b/meta-security/recipes-security/libseccomp/libseccomp_2.4.3.bb
similarity index 90%
rename from meta-security/recipes-security/libseccomp/libseccomp_2.4.2.bb
rename to meta-security/recipes-security/libseccomp/libseccomp_2.4.3.bb
index 07db82a..9ca41e6 100644
--- a/meta-security/recipes-security/libseccomp/libseccomp_2.4.2.bb
+++ b/meta-security/recipes-security/libseccomp/libseccomp_2.4.3.bb
@@ -4,10 +4,9 @@
 LICENSE = "LGPL-2.1"
 LIC_FILES_CHKSUM = "file://LICENSE;beginline=0;endline=1;md5=8eac08d22113880357ceb8e7c37f989f"
 
-SRCREV = "1b6cfd1fc0b7499a28c24299a93a80bd18619563"
+SRCREV = "1dde9d94e0848e12da20602ca38032b91d521427"
 
 SRC_URI = "git://github.com/seccomp/libseccomp.git;branch=release-2.4 \
-           file://0001-tests-rely-on-__SNR_xxx-instead-of-__NR_xxx-for-sysc.patch \
            file://run-ptest \
 "
 
diff --git a/meta-security/recipes-security/sssd/files/fix-ldblibdir.patch b/meta-security/recipes-security/sssd/files/fix-ldblibdir.patch
new file mode 100644
index 0000000..e350baf
--- /dev/null
+++ b/meta-security/recipes-security/sssd/files/fix-ldblibdir.patch
@@ -0,0 +1,25 @@
+When calculate value of ldblibdir, it checks whether the directory of
+$ldblibdir exists. If not, it assigns ldblibdir with ${libdir}/ldb. It is not
+suitable for cross compile. Fix it that only re-assign ldblibdir when its value
+is empty.
+
+Upstream-Status: Inappropriate [cross compile specific]
+
+Signed-off-by: Kai Kang <kai.kang@windriver.com>
+---
+ src/external/libldb.m4 | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/src/external/libldb.m4 b/src/external/libldb.m4
+index c400add..5e5f06d 100644
+--- a/src/external/libldb.m4
++++ b/src/external/libldb.m4
+@@ -19,7 +19,7 @@ if test x"$with_ldb_lib_dir" != x; then
+     ldblibdir=$with_ldb_lib_dir
+ else
+     ldblibdir="`$PKG_CONFIG --variable=modulesdir ldb`"
+-    if ! test -d $ldblibdir; then
++    if test -z $ldblibdir; then
+         ldblibdir="${libdir}/ldb"
+     fi
+ fi
diff --git a/meta-security/recipes-security/sssd/files/volatiles.99_sssd b/meta-security/recipes-security/sssd/files/volatiles.99_sssd
new file mode 100644
index 0000000..2a82413
--- /dev/null
+++ b/meta-security/recipes-security/sssd/files/volatiles.99_sssd
@@ -0,0 +1 @@
+d root root 0750 /var/log/sssd none
diff --git a/meta-security/recipes-security/sssd/sssd_1.16.4.bb b/meta-security/recipes-security/sssd/sssd_1.16.4.bb
index 089a99e..7ea1586 100644
--- a/meta-security/recipes-security/sssd/sssd_1.16.4.bb
+++ b/meta-security/recipes-security/sssd/sssd_1.16.4.bb
@@ -8,13 +8,21 @@
 DEPENDS = "openldap cyrus-sasl libtdb ding-libs libpam c-ares krb5 autoconf-archive"
 DEPENDS += "libldb dbus libtalloc libpcre glib-2.0 popt e2fsprogs libtevent"
 
-SRC_URI = "https://releases.pagure.org/SSSD/${BPN}/${BP}.tar.gz\
-            file://sssd.conf "
+# If no crypto has been selected, default to DEPEND on nss, since that's what
+# sssd will pick if no active choice is made during configure
+DEPENDS += "${@bb.utils.contains('PACKAGECONFIG', 'nss', '', \
+               bb.utils.contains('PACKAGECONFIG', 'crypto', '', 'nss', d), d)}"
+
+SRC_URI = "https://releases.pagure.org/SSSD/${BPN}/${BP}.tar.gz \
+           file://sssd.conf \
+           file://volatiles.99_sssd \
+           file://fix-ldblibdir.patch \
+           "
 
 SRC_URI[md5sum] = "757bbb6f15409d8d075f4f06cb678d50"
 SRC_URI[sha256sum] = "6bb212cd6b75b918e945c24e7c3f95a486fb54d7f7d489a9334cfa1a1f3bf959"
 
-inherit autotools pkgconfig gettext python-dir features_check
+inherit autotools pkgconfig gettext python3-dir features_check systemd
 
 REQUIRED_DISTRO_FEATURES = "pam"
 
@@ -22,29 +30,37 @@
 SSSD_GID ?= "root"
 
 CACHED_CONFIGUREVARS = "ac_cv_member_struct_ldap_conncb_lc_arg=no \
-    ac_cv_path_NSUPDATE=${bindir} \
-    ac_cv_path_PYTHON2=${PYTHON_DIR} ac_cv_prog_HAVE_PYTHON3=${PYTHON_DIR} \
+    ac_cv_path_NSUPDATE=${bindir} ac_cv_prog_HAVE_PYTHON3=${PYTHON_DIR} \
     "
 
-PACKAGECONFIG ?="nss nscd"
+PACKAGECONFIG ?="nss nscd autofs sudo infopipe"
 PACKAGECONFIG += "${@bb.utils.contains('DISTRO_FEATURES', 'selinux', 'selinux', '', d)}"
 PACKAGECONFIG += "${@bb.utils.contains('DISTRO_FEATURES', 'systemd', 'systemd', '', d)}"
 
-PACKAGECONFIG[ssh] = "--with-ssh, --with-ssh=no, "
+PACKAGECONFIG[autofs] = "--with-autofs, --with-autofs=no"
+PACKAGECONFIG[crypto] = "--with-crypto=libcrypto, , libcrypto"
+PACKAGECONFIG[curl] = "--with-secrets --with-kcm, --without-secrets --without-kcm, curl jansson"
+PACKAGECONFIG[http] = "--with-secrets, --without-secrets, apache2"
+PACKAGECONFIG[infopipe] = "--with-infopipe, --with-infopipe=no, "
+PACKAGECONFIG[manpages] = "--with-manpages, --with-manpages=no"
+PACKAGECONFIG[nl] = "--with-libnl, --with-libnl=no, libnl"
+PACKAGECONFIG[nscd] = "--with-nscd=${sbindir}, --with-nscd=no "
+PACKAGECONFIG[nss] = "--with-crypto=nss, ,nss,"
+PACKAGECONFIG[python3] = "--with-python3-bindings, --without-python3-bindings"
 PACKAGECONFIG[samba] = "--with-samba, --with-samba=no, samba"
 PACKAGECONFIG[selinux] = "--with-selinux, --with-selinux=no --with-semanage=no, libselinux"
-PACKAGECONFIG[manpages] = "--with-manpages, --with-manpages=no"
-PACKAGECONFIG[python2] = "--with-python2-bindings, --without-python2-bindings"
-PACKAGECONFIG[python3] = "--with-python3-bindings, --without-python3-bindings"
-PACKAGECONFIG[nss] = "--with-crypto=nss, ,nss,"
-PACKAGECONFIG[cyrpto] = "--with-crypto=libcrypto, , libcrypto"
-PACKAGECONFIG[nscd] = "--with-nscd=${sbindir}, --with-nscd=no "
-PACKAGECONFIG[nl] = "--with-libnl, --with-libnl=no, libnl"
-PACKAGECONFIG[systemd] = "--with-systemdunitdir=${systemd_unitdir}/system/, --with-systemdunitdir="
-PACKAGECONFIG[http] = "--with-secrets, --without-secrets, apache2"
-PACKAGECONFIG[curl] = "--with-secrets --with-kcm, --without-secrets --without-kcm, curl"
+PACKAGECONFIG[ssh] = "--with-ssh, --with-ssh=no, "
+PACKAGECONFIG[sudo] = "--with-sudo, --with-sudo=no, "
+PACKAGECONFIG[systemd] = "--with-initscript=systemd,--with-initscript=sysv"
 
-EXTRA_OECONF += "--disable-cifs-idmap-plugin --without-nfsv4-idmapd-plugin --without-ipa-getkeytab"
+EXTRA_OECONF += " \
+    --disable-cifs-idmap-plugin \
+    --without-nfsv4-idmapd-plugin \
+    --without-ipa-getkeytab \
+    --without-python2-bindings \
+    --enable-pammoddir=${base_libdir}/security \
+    --without-python2-bindings \
+"
 
 do_configure_prepend() {
     mkdir -p ${AUTOTOOLS_AUXDIR}/build
@@ -59,6 +75,12 @@
     rmdir --ignore-fail-on-non-empty "${D}/${bindir}"
     install -d ${D}/${sysconfdir}/${BPN}
     install -m 600 ${WORKDIR}/${BPN}.conf ${D}/${sysconfdir}/${BPN}
+    install -D -m 644 ${WORKDIR}/volatiles.99_sssd ${D}/${sysconfdir}/default/volatiles/99_sssd
+
+    if ${@bb.utils.contains('DISTRO_FEATURES', 'systemd', 'true', 'false', d)}; then
+        install -d ${D}${sysconfdir}/tmpfiles.d
+        echo "d /var/log/sssd 0750 - - - -" > ${D}${sysconfdir}/tmpfiles.d/sss.conf
+    fi
 
     # Remove /var/run as it is created on startup
     rm -rf ${D}${localstatedir}/run
@@ -76,10 +98,24 @@
 
 INITSCRIPT_NAME = "sssd"
 INITSCRIPT_PARAMS = "start 02 5 3 2 . stop 20 0 1 6 ."
-SYSTEMD_SERVICE_${PN} = "${BPN}.service"
+SYSTEMD_SERVICE_${PN} = " \
+    ${@bb.utils.contains('PACKAGECONFIG', 'autofs', 'sssd-autofs.service sssd-autofs.socket', '', d)} \
+    ${@bb.utils.contains('PACKAGECONFIG', 'curl', 'sssd-kcm.service sssd-kcm.socket', '', d)} \
+    ${@bb.utils.contains('PACKAGECONFIG', 'infopipe', 'sssd-ifp.service ', '', d)} \
+    ${@bb.utils.contains('PACKAGECONFIG', 'ssh', 'sssd-ssh.service sssd-ssh.socket', '', d)} \
+    ${@bb.utils.contains('PACKAGECONFIG', 'sudo', 'sssd-sudo.service sssd-sudo.socket', '', d)} \
+    sssd-nss.service \
+    sssd-nss.socket \
+    sssd-pam-priv.socket \
+    sssd-pam.service \
+    sssd-pam.socket \
+    sssd-secrets.service \
+    sssd-secrets.socket \
+    sssd.service \
+"
 SYSTEMD_AUTO_ENABLE = "disable"
 
-FILES_${PN} += "${libdir} ${datadir} /run ${libdir}/*.so* "
+FILES_${PN} += "${libdir} ${datadir} ${base_libdir}/security/pam_sss.so"
 FILES_${PN}-dev = " ${includedir}/* ${libdir}/*la ${libdir}/*/*la"
 
 # The package contains symlinks that trip up insane
diff --git a/meta-security/wic/beaglebone-yocto-verity.wks.in b/meta-security/wic/beaglebone-yocto-verity.wks.in
new file mode 100644
index 0000000..cd1702e
--- /dev/null
+++ b/meta-security/wic/beaglebone-yocto-verity.wks.in
@@ -0,0 +1,15 @@
+# SPDX-License-Identifier: MIT
+#
+# Copyright (C) 2020 BayLibre SAS
+# Author: Bartosz Golaszewski <bgolaszewski@baylibre.com>
+#
+# A dm-verity variant of the regular wks for beaglebone black. We need to fetch
+# the partition images from the DEPLOY_DIR_IMAGE as the rootfs source plugin will
+# not recreate the exact block device corresponding with the hash tree. We must
+# not alter the label or any other setting on the image.
+#
+# This .wks only works with the dm-verity-img class.
+
+part /boot --source bootimg-partition --ondisk mmcblk0 --fstype=vfat --label boot --active --align 4 --size 16 --sourceparams="loader=u-boot" --use-uuid
+part / --source rawcopy --ondisk mmcblk0 --sourceparams="file=${DEPLOY_DIR_IMAGE}/${DM_VERITY_IMAGE}-${MACHINE}.${DM_VERITY_IMAGE_TYPE}.verity"
+bootloader --append="console=ttyS0,115200"