poky: refresh thud: e4c0a8a7cb..9dfebdaf7a

Update poky to thud HEAD.

Mazliana (2):
      scripts/resulttool: enable manual execution and result creation
      resulttool/manualexecution: To output right test case id

Michael Halstead (1):
      yocto-uninative: Correct sha256sum for aarch64

Richard Purdie (12):
      resulttool: Improvements to allow integration to the autobuilder
      resulttool/resultutils: Avoids tracebacks for missing logs
      resulttool/store: Handle results files for multiple revisions
      resulttool/report: Handle missing metadata sections more cleanly
      resulttool/report: Ensure test suites with no results show up on the report
      resulttool/report: Ensure ptest results are sorted
      resulttool/store: Fix missing variable causing testresult corruption
      oeqa/utils/gitarchive: Handle case where parent is only on origin
      scripts/wic: Be consistent about how we call bitbake
      yocto-uninative: Update to 2.4
      poky.conf: Bump version for 2.6.2 thud release
      build-appliance-image: Update to thud head revision

Yeoh Ee Peng (4):
      resulttool: enable merge, store, report and regression analysis
      resulttool/regression: Ensure regressoin results are sorted
      scripts/resulttool: Enable manual result store and regression
      resulttool/report: Enable roll-up report for a commit

Change-Id: Icf3c93db794539bdd4501d2e7db15c68b6c541ae
Signed-off-by: Brad Bishop <bradleyb@fuzziesquirrel.com>
diff --git a/poky/scripts/lib/resulttool/resultutils.py b/poky/scripts/lib/resulttool/resultutils.py
new file mode 100644
index 0000000..153f2b8
--- /dev/null
+++ b/poky/scripts/lib/resulttool/resultutils.py
@@ -0,0 +1,131 @@
+# resulttool - common library/utility functions
+#
+# Copyright (c) 2019, Intel Corporation.
+# Copyright (c) 2019, Linux Foundation
+#
+# This program is free software; you can redistribute it and/or modify it
+# under the terms and conditions of the GNU General Public License,
+# version 2, as published by the Free Software Foundation.
+#
+# This program is distributed in the hope it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+# FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License for
+# more details.
+#
+import os
+import json
+import scriptpath
+scriptpath.add_oe_lib_path()
+
+flatten_map = {
+    "oeselftest": [],
+    "runtime": [],
+    "sdk": [],
+    "sdkext": [],
+    "manual": []
+}
+regression_map = {
+    "oeselftest": ['TEST_TYPE', 'MACHINE'],
+    "runtime": ['TESTSERIES', 'TEST_TYPE', 'IMAGE_BASENAME', 'MACHINE', 'IMAGE_PKGTYPE', 'DISTRO'],
+    "sdk": ['TESTSERIES', 'TEST_TYPE', 'IMAGE_BASENAME', 'MACHINE', 'SDKMACHINE'],
+    "sdkext": ['TESTSERIES', 'TEST_TYPE', 'IMAGE_BASENAME', 'MACHINE', 'SDKMACHINE'],
+    "manual": ['TEST_TYPE', 'TEST_MODULE', 'IMAGE_BASENAME', 'MACHINE']
+}
+store_map = {
+    "oeselftest": ['TEST_TYPE'],
+    "runtime": ['TEST_TYPE', 'DISTRO', 'MACHINE', 'IMAGE_BASENAME'],
+    "sdk": ['TEST_TYPE', 'MACHINE', 'SDKMACHINE', 'IMAGE_BASENAME'],
+    "sdkext": ['TEST_TYPE', 'MACHINE', 'SDKMACHINE', 'IMAGE_BASENAME'],
+    "manual": ['TEST_TYPE', 'TEST_MODULE', 'MACHINE', 'IMAGE_BASENAME']
+}
+
+#
+# Load the json file and append the results data into the provided results dict
+#
+def append_resultsdata(results, f, configmap=store_map):
+    if type(f) is str:
+        with open(f, "r") as filedata:
+            data = json.load(filedata)
+    else:
+        data = f
+    for res in data:
+        if "configuration" not in data[res] or "result" not in data[res]:
+            raise ValueError("Test results data without configuration or result section?")
+        if "TESTSERIES" not in data[res]["configuration"]:
+            data[res]["configuration"]["TESTSERIES"] = os.path.basename(os.path.dirname(f))
+        testtype = data[res]["configuration"].get("TEST_TYPE")
+        if testtype not in configmap:
+            raise ValueError("Unknown test type %s" % testtype)
+        configvars = configmap[testtype]
+        testpath = "/".join(data[res]["configuration"].get(i) for i in configmap[testtype])
+        if testpath not in results:
+            results[testpath] = {}
+        if 'ptestresult.rawlogs' in data[res]['result']:
+            del data[res]['result']['ptestresult.rawlogs']
+        if 'ptestresult.sections' in data[res]['result']:
+            for i in data[res]['result']['ptestresult.sections']:
+                if 'log' in data[res]['result']['ptestresult.sections'][i]:
+                    del data[res]['result']['ptestresult.sections'][i]['log']
+        results[testpath][res] = data[res]
+
+#
+# Walk a directory and find/load results data
+# or load directly from a file
+#
+def load_resultsdata(source, configmap=store_map):
+    results = {}
+    if os.path.isfile(source):
+        append_resultsdata(results, source, configmap)
+        return results
+    for root, dirs, files in os.walk(source):
+        for name in files:
+            f = os.path.join(root, name)
+            if name == "testresults.json":
+                append_resultsdata(results, f, configmap)
+    return results
+
+def filter_resultsdata(results, resultid):
+    newresults = {}
+    for r in results:
+        for i in results[r]:
+            if i == resultsid:
+                 newresults[r] = {}
+                 newresults[r][i] = results[r][i]
+    return newresults
+
+def save_resultsdata(results, destdir, fn="testresults.json"):
+    for res in results:
+        if res:
+            dst = destdir + "/" + res + "/" + fn
+        else:
+            dst = destdir + "/" + fn
+        os.makedirs(os.path.dirname(dst), exist_ok=True)
+        with open(dst, 'w') as f:
+            f.write(json.dumps(results[res], sort_keys=True, indent=4))
+
+def git_get_result(repo, tags):
+    git_objs = []
+    for tag in tags:
+        files = repo.run_cmd(['ls-tree', "--name-only", "-r", tag]).splitlines()
+        git_objs.extend([tag + ':' + f for f in files if f.endswith("testresults.json")])
+
+    def parse_json_stream(data):
+        """Parse multiple concatenated JSON objects"""
+        objs = []
+        json_d = ""
+        for line in data.splitlines():
+            if line == '}{':
+                json_d += '}'
+                objs.append(json.loads(json_d))
+                json_d = '{'
+            else:
+                json_d += line
+        objs.append(json.loads(json_d))
+        return objs
+
+    # Optimize by reading all data with one git command
+    results = {}
+    for obj in parse_json_stream(repo.run_cmd(['show'] + git_objs + ['--'])):
+        append_resultsdata(results, obj)
+
+    return results