blob: 455a5176fe5d4cf00d4e8ac38b7625458fa48ddf [file] [log] [blame]
#! /usr/bin/env python3
import argparse
import datetime
import os
import pathlib
import re
import sys
import jinja2
def trim_pv(pv):
Strip anything after +git from the PV
return "".join(pv.partition("+git")[:2])
def needs_update(version, upstream):
Do a dumb comparison to determine if the version needs to be updated.
if "+git" in version:
# strip +git and see if this is a post-release snapshot
version = version.replace("+git", "")
return version != upstream
def safe_patches(patches):
for info in patches:
if info["status"] in ("Denied", "Pending", "Unknown"):
return False
return True
def layer_path(layername: str, d) -> pathlib.Path:
Return the path to the specified layer, or None if the layer isn't present.
if not hasattr(layer_path, "cache"):
# Don't use functools.lru_cache as we don't want d changing to invalidate the cache
layer_path.cache = {}
if layername in layer_path.cache:
return layer_path.cache[layername]
bbpath = d.getVar("BBPATH").split(":")
pattern = d.getVar('BBFILE_PATTERN_' + layername)
for path in reversed(sorted(bbpath)):
if re.match(pattern, path + "/"):
layer_path.cache[layername] = pathlib.Path(path)
return layer_path.cache[layername]
return None
def get_url_for_patch(layer: str, localpath: pathlib.Path, d) -> str:
relative = localpath.relative_to(layer_path(layer, d))
# TODO: use layerindexlib
# TODO: assumes default branch
if layer == "core":
return f"{relative}"
elif layer in ("meta-arm", "meta-arm-bsp", "arm-toolchain"):
return f"{layer}/{relative}"
print(f"WARNING: Don't know web URL for layer {layer}", file=sys.stderr)
return None
def extract_patch_info(src_uri, d):
Parse the specified patch entry from a SRC_URI and return (base name, layer name, status) tuple
import bb.fetch, bb.utils
info = {}
localpath = pathlib.Path(bb.fetch.decodeurl(src_uri)[2])
info["name"] =
info["layer"] = bb.utils.get_file_layer(str(localpath), d)
info["url"] = get_url_for_patch(info["layer"], localpath, d)
status = "Unknown"
with open(localpath, errors="ignore") as f:
m ="^[\t ]*Upstream[-_ ]Status:?[\t ]*(\w*)",, re.IGNORECASE | re.MULTILINE)
if m:
# TODO: validate
status =
info["status"] = status
return info
def harvest_data(machines, recipes):
import bb.tinfoil
with bb.tinfoil.Tinfoil() as tinfoil:
corepath = layer_path("core", tinfoil.config_data)
sys.path.append(os.path.join(corepath, "lib"))
import oe.recipeutils
import oe.patch
# Queue of recipes that we're still looking for upstream releases for
to_check = list(recipes)
# Upstream releases
upstreams = {}
# Machines to recipes to versions
versions = {}
for machine in machines:
print(f"Gathering data for {machine}...")
os.environ["MACHINE"] = machine
with bb.tinfoil.Tinfoil() as tinfoil:
versions[machine] = {}
for recipe in recipes:
d = tinfoil.parse_recipe(recipe)
except bb.providers.NoProvider:
if recipe in to_check:
info = oe.recipeutils.get_recipe_upstream_version(d)
upstreams[recipe] = info["version"]
except (bb.providers.NoProvider, KeyError):
details = versions[machine][recipe] = {}
details["recipe"] = d.getVar("PN")
details["version"] = trim_pv(d.getVar("PV"))
details["fullversion"] = d.getVar("PV")
details["patches"] = [extract_patch_info(p, d) for p in oe.patch.src_patches(d)]
details["patched"] = bool(details["patches"])
details["patches_safe"] = safe_patches(details["patches"])
# Now backfill the upstream versions
for machine in versions:
for recipe in versions[machine]:
data = versions[machine][recipe]
data["upstream"] = upstreams[recipe]
data["needs_update"] = needs_update(data["version"], data["upstream"])
return upstreams, versions
# TODO can this be inferred from the list of recipes in the layer
recipes = ("virtual/kernel",
class Format:
The name of this format
name = None
Registry of names to classes
registry = {}
def __init_subclass__(cls, **kwargs):
cls.registry[] = cls
def get_format(cls, name):
return cls.registry[name]()
def render(self, context, output: pathlib.Path):
def get_template(self, name):
template_dir = os.path.dirname(os.path.abspath(__file__))
env = jinja2.Environment(
# We only need i18n for plurals
return env.get_template(name)
class TextOverview(Format):
name = "overview.txt"
def render(self, context, output: pathlib.Path):
with open(output, "wt") as f:
class HtmlUpdates(Format):
name = "report"
def render(self, context, output: pathlib.Path):
if output.exists() and not output.is_dir():
print(f"{output} is not a directory", file=sys.stderr)
if not output.exists():
with open(output / "index.html", "wt") as f:
subcontext = context.copy()
del subcontext["data"]
for machine, subdata in context["data"].items():
subcontext["machine"] = machine
subcontext["data"] = subdata
with open(output / f"{machine}.html", "wt") as f:
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="machine-summary")
parser.add_argument("machines", nargs="+", help="machine names", metavar="MACHINE")
parser.add_argument("-t", "--type", required=True, choices=Format.registry.keys())
parser.add_argument("-o", "--output", type=pathlib.Path, required=True)
args = parser.parse_args()
context = {}
# TODO: include git describe for meta-arm
context["timestamp"] = str("%c"))
context["recipes"] = sorted(recipes)
context["releases"], context["data"] = harvest_data(args.machines, recipes)
formatter = Format.get_format(args.type)
formatter.render(context, args.output)