| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 1 | # | 
|  | 2 | # Records history of build output in order to detect regressions | 
|  | 3 | # | 
|  | 4 | # Based in part on testlab.bbclass and packagehistory.bbclass | 
|  | 5 | # | 
|  | 6 | # Copyright (C) 2011-2014 Intel Corporation | 
|  | 7 | # Copyright (C) 2007-2011 Koen Kooi <koen@openembedded.org> | 
|  | 8 | # | 
|  | 9 |  | 
|  | 10 | BUILDHISTORY_FEATURES ?= "image package sdk" | 
|  | 11 | BUILDHISTORY_DIR ?= "${TOPDIR}/buildhistory" | 
|  | 12 | BUILDHISTORY_DIR_IMAGE = "${BUILDHISTORY_DIR}/images/${MACHINE_ARCH}/${TCLIBC}/${IMAGE_BASENAME}" | 
|  | 13 | BUILDHISTORY_DIR_PACKAGE = "${BUILDHISTORY_DIR}/packages/${MULTIMACH_TARGET_SYS}/${PN}" | 
|  | 14 | BUILDHISTORY_DIR_SDK = "${BUILDHISTORY_DIR}/sdk/${SDK_NAME}/${IMAGE_BASENAME}" | 
|  | 15 | BUILDHISTORY_IMAGE_FILES ?= "/etc/passwd /etc/group" | 
|  | 16 | BUILDHISTORY_COMMIT ?= "0" | 
|  | 17 | BUILDHISTORY_COMMIT_AUTHOR ?= "buildhistory <buildhistory@${DISTRO}>" | 
|  | 18 | BUILDHISTORY_PUSH_REPO ?= "" | 
|  | 19 |  | 
|  | 20 | SSTATEPOSTINSTFUNCS_append = " buildhistory_emit_pkghistory" | 
|  | 21 | # We want to avoid influence the signatures of sstate tasks - first the function itself: | 
|  | 22 | sstate_install[vardepsexclude] += "buildhistory_emit_pkghistory" | 
|  | 23 | # then the value added to SSTATEPOSTINSTFUNCS: | 
|  | 24 | SSTATEPOSTINSTFUNCS[vardepvalueexclude] .= "| buildhistory_emit_pkghistory" | 
|  | 25 |  | 
|  | 26 | # | 
|  | 27 | # Write out metadata about this package for comparision when writing future packages | 
|  | 28 | # | 
|  | 29 | python buildhistory_emit_pkghistory() { | 
|  | 30 | if not d.getVar('BB_CURRENTTASK', True) in ['packagedata', 'packagedata_setscene']: | 
|  | 31 | return 0 | 
|  | 32 |  | 
|  | 33 | if not "package" in (d.getVar('BUILDHISTORY_FEATURES', True) or "").split(): | 
|  | 34 | return 0 | 
|  | 35 |  | 
|  | 36 | import re | 
|  | 37 | import json | 
|  | 38 | import errno | 
|  | 39 |  | 
|  | 40 | pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE', True) | 
|  | 41 |  | 
|  | 42 | class RecipeInfo: | 
|  | 43 | def __init__(self, name): | 
|  | 44 | self.name = name | 
|  | 45 | self.pe = "0" | 
|  | 46 | self.pv = "0" | 
|  | 47 | self.pr = "r0" | 
|  | 48 | self.depends = "" | 
|  | 49 | self.packages = "" | 
|  | 50 | self.srcrev = "" | 
|  | 51 |  | 
|  | 52 |  | 
|  | 53 | class PackageInfo: | 
|  | 54 | def __init__(self, name): | 
|  | 55 | self.name = name | 
|  | 56 | self.pe = "0" | 
|  | 57 | self.pv = "0" | 
|  | 58 | self.pr = "r0" | 
|  | 59 | # pkg/pkge/pkgv/pkgr should be empty because we want to be able to default them | 
|  | 60 | self.pkg = "" | 
|  | 61 | self.pkge = "" | 
|  | 62 | self.pkgv = "" | 
|  | 63 | self.pkgr = "" | 
|  | 64 | self.size = 0 | 
|  | 65 | self.depends = "" | 
|  | 66 | self.rprovides = "" | 
|  | 67 | self.rdepends = "" | 
|  | 68 | self.rrecommends = "" | 
|  | 69 | self.rsuggests = "" | 
|  | 70 | self.rreplaces = "" | 
|  | 71 | self.rconflicts = "" | 
|  | 72 | self.files = "" | 
|  | 73 | self.filelist = "" | 
|  | 74 | # Variables that need to be written to their own separate file | 
|  | 75 | self.filevars = dict.fromkeys(['pkg_preinst', 'pkg_postinst', 'pkg_prerm', 'pkg_postrm']) | 
|  | 76 |  | 
|  | 77 | # Should check PACKAGES here to see if anything removed | 
|  | 78 |  | 
|  | 79 | def readPackageInfo(pkg, histfile): | 
|  | 80 | pkginfo = PackageInfo(pkg) | 
|  | 81 | with open(histfile, "r") as f: | 
|  | 82 | for line in f: | 
|  | 83 | lns = line.split('=') | 
|  | 84 | name = lns[0].strip() | 
|  | 85 | value = lns[1].strip(" \t\r\n").strip('"') | 
|  | 86 | if name == "PE": | 
|  | 87 | pkginfo.pe = value | 
|  | 88 | elif name == "PV": | 
|  | 89 | pkginfo.pv = value | 
|  | 90 | elif name == "PR": | 
|  | 91 | pkginfo.pr = value | 
|  | 92 | elif name == "PKG": | 
|  | 93 | pkginfo.pkg = value | 
|  | 94 | elif name == "PKGE": | 
|  | 95 | pkginfo.pkge = value | 
|  | 96 | elif name == "PKGV": | 
|  | 97 | pkginfo.pkgv = value | 
|  | 98 | elif name == "PKGR": | 
|  | 99 | pkginfo.pkgr = value | 
|  | 100 | elif name == "RPROVIDES": | 
|  | 101 | pkginfo.rprovides = value | 
|  | 102 | elif name == "RDEPENDS": | 
|  | 103 | pkginfo.rdepends = value | 
|  | 104 | elif name == "RRECOMMENDS": | 
|  | 105 | pkginfo.rrecommends = value | 
|  | 106 | elif name == "RSUGGESTS": | 
|  | 107 | pkginfo.rsuggests = value | 
|  | 108 | elif name == "RREPLACES": | 
|  | 109 | pkginfo.rreplaces = value | 
|  | 110 | elif name == "RCONFLICTS": | 
|  | 111 | pkginfo.rconflicts = value | 
|  | 112 | elif name == "PKGSIZE": | 
|  | 113 | pkginfo.size = long(value) | 
|  | 114 | elif name == "FILES": | 
|  | 115 | pkginfo.files = value | 
|  | 116 | elif name == "FILELIST": | 
|  | 117 | pkginfo.filelist = value | 
|  | 118 | # Apply defaults | 
|  | 119 | if not pkginfo.pkg: | 
|  | 120 | pkginfo.pkg = pkginfo.name | 
|  | 121 | if not pkginfo.pkge: | 
|  | 122 | pkginfo.pkge = pkginfo.pe | 
|  | 123 | if not pkginfo.pkgv: | 
|  | 124 | pkginfo.pkgv = pkginfo.pv | 
|  | 125 | if not pkginfo.pkgr: | 
|  | 126 | pkginfo.pkgr = pkginfo.pr | 
|  | 127 | return pkginfo | 
|  | 128 |  | 
|  | 129 | def getlastpkgversion(pkg): | 
|  | 130 | try: | 
|  | 131 | histfile = os.path.join(pkghistdir, pkg, "latest") | 
|  | 132 | return readPackageInfo(pkg, histfile) | 
|  | 133 | except EnvironmentError: | 
|  | 134 | return None | 
|  | 135 |  | 
|  | 136 | def sortpkglist(string): | 
|  | 137 | pkgiter = re.finditer(r'[a-zA-Z0-9.+-]+( \([><=]+ [^ )]+\))?', string, 0) | 
|  | 138 | pkglist = [p.group(0) for p in pkgiter] | 
|  | 139 | pkglist.sort() | 
|  | 140 | return ' '.join(pkglist) | 
|  | 141 |  | 
|  | 142 | def sortlist(string): | 
|  | 143 | items = string.split(' ') | 
|  | 144 | items.sort() | 
|  | 145 | return ' '.join(items) | 
|  | 146 |  | 
|  | 147 | pn = d.getVar('PN', True) | 
|  | 148 | pe = d.getVar('PE', True) or "0" | 
|  | 149 | pv = d.getVar('PV', True) | 
|  | 150 | pr = d.getVar('PR', True) | 
|  | 151 |  | 
|  | 152 | pkgdata_dir = d.getVar('PKGDATA_DIR', True) | 
|  | 153 | packages = "" | 
|  | 154 | try: | 
|  | 155 | with open(os.path.join(pkgdata_dir, pn)) as f: | 
|  | 156 | for line in f.readlines(): | 
|  | 157 | if line.startswith('PACKAGES: '): | 
|  | 158 | packages = oe.utils.squashspaces(line.split(': ', 1)[1]) | 
|  | 159 | break | 
|  | 160 | except IOError as e: | 
|  | 161 | if e.errno == errno.ENOENT: | 
|  | 162 | # Probably a -cross recipe, just ignore | 
|  | 163 | return 0 | 
|  | 164 | else: | 
|  | 165 | raise | 
|  | 166 |  | 
|  | 167 | packagelist = packages.split() | 
|  | 168 | if not os.path.exists(pkghistdir): | 
|  | 169 | bb.utils.mkdirhier(pkghistdir) | 
|  | 170 | else: | 
|  | 171 | # Remove files for packages that no longer exist | 
|  | 172 | for item in os.listdir(pkghistdir): | 
|  | 173 | if item != "latest" and item != "latest_srcrev": | 
|  | 174 | if item not in packagelist: | 
|  | 175 | itempath = os.path.join(pkghistdir, item) | 
|  | 176 | if os.path.isdir(itempath): | 
|  | 177 | for subfile in os.listdir(itempath): | 
|  | 178 | os.unlink(os.path.join(itempath, subfile)) | 
|  | 179 | os.rmdir(itempath) | 
|  | 180 | else: | 
|  | 181 | os.unlink(itempath) | 
|  | 182 |  | 
|  | 183 | rcpinfo = RecipeInfo(pn) | 
|  | 184 | rcpinfo.pe = pe | 
|  | 185 | rcpinfo.pv = pv | 
|  | 186 | rcpinfo.pr = pr | 
|  | 187 | rcpinfo.depends = sortlist(oe.utils.squashspaces(d.getVar('DEPENDS', True) or "")) | 
|  | 188 | rcpinfo.packages = packages | 
|  | 189 | write_recipehistory(rcpinfo, d) | 
|  | 190 |  | 
|  | 191 | pkgdest = d.getVar('PKGDEST', True) | 
|  | 192 | for pkg in packagelist: | 
|  | 193 | pkgdata = {} | 
|  | 194 | with open(os.path.join(pkgdata_dir, 'runtime', pkg)) as f: | 
|  | 195 | for line in f.readlines(): | 
|  | 196 | item = line.rstrip('\n').split(': ', 1) | 
|  | 197 | key = item[0] | 
|  | 198 | if key.endswith('_' + pkg): | 
|  | 199 | key = key[:-len(pkg)-1] | 
|  | 200 | pkgdata[key] = item[1].decode('utf-8').decode('string_escape') | 
|  | 201 |  | 
|  | 202 | pkge = pkgdata.get('PKGE', '0') | 
|  | 203 | pkgv = pkgdata['PKGV'] | 
|  | 204 | pkgr = pkgdata['PKGR'] | 
|  | 205 | # | 
|  | 206 | # Find out what the last version was | 
|  | 207 | # Make sure the version did not decrease | 
|  | 208 | # | 
|  | 209 | lastversion = getlastpkgversion(pkg) | 
|  | 210 | if lastversion: | 
|  | 211 | last_pkge = lastversion.pkge | 
|  | 212 | last_pkgv = lastversion.pkgv | 
|  | 213 | last_pkgr = lastversion.pkgr | 
|  | 214 | r = bb.utils.vercmp((pkge, pkgv, pkgr), (last_pkge, last_pkgv, last_pkgr)) | 
|  | 215 | if r < 0: | 
|  | 216 | msg = "Package version for package %s went backwards which would break package feeds from (%s:%s-%s to %s:%s-%s)" % (pkg, last_pkge, last_pkgv, last_pkgr, pkge, pkgv, pkgr) | 
|  | 217 | package_qa_handle_error("version-going-backwards", msg, d) | 
|  | 218 |  | 
|  | 219 | pkginfo = PackageInfo(pkg) | 
|  | 220 | # Apparently the version can be different on a per-package basis (see Python) | 
|  | 221 | pkginfo.pe = pkgdata.get('PE', '0') | 
|  | 222 | pkginfo.pv = pkgdata['PV'] | 
|  | 223 | pkginfo.pr = pkgdata['PR'] | 
|  | 224 | pkginfo.pkg = pkgdata['PKG'] | 
|  | 225 | pkginfo.pkge = pkge | 
|  | 226 | pkginfo.pkgv = pkgv | 
|  | 227 | pkginfo.pkgr = pkgr | 
|  | 228 | pkginfo.rprovides = sortpkglist(oe.utils.squashspaces(pkgdata.get('RPROVIDES', ""))) | 
|  | 229 | pkginfo.rdepends = sortpkglist(oe.utils.squashspaces(pkgdata.get('RDEPENDS', ""))) | 
|  | 230 | pkginfo.rrecommends = sortpkglist(oe.utils.squashspaces(pkgdata.get('RRECOMMENDS', ""))) | 
|  | 231 | pkginfo.rsuggests = sortpkglist(oe.utils.squashspaces(pkgdata.get('RSUGGESTS', ""))) | 
|  | 232 | pkginfo.rreplaces = sortpkglist(oe.utils.squashspaces(pkgdata.get('RREPLACES', ""))) | 
|  | 233 | pkginfo.rconflicts = sortpkglist(oe.utils.squashspaces(pkgdata.get('RCONFLICTS', ""))) | 
|  | 234 | pkginfo.files = oe.utils.squashspaces(pkgdata.get('FILES', "")) | 
|  | 235 | for filevar in pkginfo.filevars: | 
|  | 236 | pkginfo.filevars[filevar] = pkgdata.get(filevar, "") | 
|  | 237 |  | 
|  | 238 | # Gather information about packaged files | 
|  | 239 | val = pkgdata.get('FILES_INFO', '') | 
|  | 240 | dictval = json.loads(val) | 
|  | 241 | filelist = dictval.keys() | 
|  | 242 | filelist.sort() | 
|  | 243 | pkginfo.filelist = " ".join(filelist) | 
|  | 244 |  | 
|  | 245 | pkginfo.size = int(pkgdata['PKGSIZE']) | 
|  | 246 |  | 
|  | 247 | write_pkghistory(pkginfo, d) | 
|  | 248 |  | 
|  | 249 | # Create files-in-<package-name>.txt files containing a list of files of each recipe's package | 
|  | 250 | bb.build.exec_func("buildhistory_list_pkg_files", d) | 
|  | 251 | } | 
|  | 252 |  | 
|  | 253 |  | 
|  | 254 | def write_recipehistory(rcpinfo, d): | 
|  | 255 | import codecs | 
|  | 256 |  | 
|  | 257 | bb.debug(2, "Writing recipe history") | 
|  | 258 |  | 
|  | 259 | pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE', True) | 
|  | 260 |  | 
|  | 261 | infofile = os.path.join(pkghistdir, "latest") | 
|  | 262 | with codecs.open(infofile, "w", encoding='utf8') as f: | 
|  | 263 | if rcpinfo.pe != "0": | 
|  | 264 | f.write(u"PE = %s\n" %  rcpinfo.pe) | 
|  | 265 | f.write(u"PV = %s\n" %  rcpinfo.pv) | 
|  | 266 | f.write(u"PR = %s\n" %  rcpinfo.pr) | 
|  | 267 | f.write(u"DEPENDS = %s\n" %  rcpinfo.depends) | 
|  | 268 | f.write(u"PACKAGES = %s\n" %  rcpinfo.packages) | 
|  | 269 |  | 
|  | 270 |  | 
|  | 271 | def write_pkghistory(pkginfo, d): | 
|  | 272 | import codecs | 
|  | 273 |  | 
|  | 274 | bb.debug(2, "Writing package history for package %s" % pkginfo.name) | 
|  | 275 |  | 
|  | 276 | pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE', True) | 
|  | 277 |  | 
|  | 278 | pkgpath = os.path.join(pkghistdir, pkginfo.name) | 
|  | 279 | if not os.path.exists(pkgpath): | 
|  | 280 | bb.utils.mkdirhier(pkgpath) | 
|  | 281 |  | 
|  | 282 | infofile = os.path.join(pkgpath, "latest") | 
|  | 283 | with codecs.open(infofile, "w", encoding='utf8') as f: | 
|  | 284 | if pkginfo.pe != "0": | 
|  | 285 | f.write(u"PE = %s\n" %  pkginfo.pe) | 
|  | 286 | f.write(u"PV = %s\n" %  pkginfo.pv) | 
|  | 287 | f.write(u"PR = %s\n" %  pkginfo.pr) | 
|  | 288 |  | 
|  | 289 | pkgvars = {} | 
|  | 290 | pkgvars['PKG'] = pkginfo.pkg if pkginfo.pkg != pkginfo.name else '' | 
|  | 291 | pkgvars['PKGE'] = pkginfo.pkge if pkginfo.pkge != pkginfo.pe else '' | 
|  | 292 | pkgvars['PKGV'] = pkginfo.pkgv if pkginfo.pkgv != pkginfo.pv else '' | 
|  | 293 | pkgvars['PKGR'] = pkginfo.pkgr if pkginfo.pkgr != pkginfo.pr else '' | 
|  | 294 | for pkgvar in pkgvars: | 
|  | 295 | val = pkgvars[pkgvar] | 
|  | 296 | if val: | 
|  | 297 | f.write(u"%s = %s\n" % (pkgvar, val)) | 
|  | 298 |  | 
|  | 299 | f.write(u"RPROVIDES = %s\n" %  pkginfo.rprovides) | 
|  | 300 | f.write(u"RDEPENDS = %s\n" %  pkginfo.rdepends) | 
|  | 301 | f.write(u"RRECOMMENDS = %s\n" %  pkginfo.rrecommends) | 
|  | 302 | if pkginfo.rsuggests: | 
|  | 303 | f.write(u"RSUGGESTS = %s\n" %  pkginfo.rsuggests) | 
|  | 304 | if pkginfo.rreplaces: | 
|  | 305 | f.write(u"RREPLACES = %s\n" %  pkginfo.rreplaces) | 
|  | 306 | if pkginfo.rconflicts: | 
|  | 307 | f.write(u"RCONFLICTS = %s\n" %  pkginfo.rconflicts) | 
|  | 308 | f.write(u"PKGSIZE = %d\n" %  pkginfo.size) | 
|  | 309 | f.write(u"FILES = %s\n" %  pkginfo.files) | 
|  | 310 | f.write(u"FILELIST = %s\n" %  pkginfo.filelist) | 
|  | 311 |  | 
|  | 312 | for filevar in pkginfo.filevars: | 
|  | 313 | filevarpath = os.path.join(pkgpath, "latest.%s" % filevar) | 
|  | 314 | val = pkginfo.filevars[filevar] | 
|  | 315 | if val: | 
|  | 316 | with codecs.open(filevarpath, "w", encoding='utf8') as f: | 
|  | 317 | f.write(val) | 
|  | 318 | else: | 
|  | 319 | if os.path.exists(filevarpath): | 
|  | 320 | os.unlink(filevarpath) | 
|  | 321 |  | 
|  | 322 | # | 
|  | 323 | # rootfs_type can be: image, sdk_target, sdk_host | 
|  | 324 | # | 
|  | 325 | def buildhistory_list_installed(d, rootfs_type="image"): | 
|  | 326 | from oe.rootfs import image_list_installed_packages | 
|  | 327 | from oe.sdk import sdk_list_installed_packages | 
|  | 328 |  | 
|  | 329 | process_list = [('file', 'bh_installed_pkgs.txt'),\ | 
|  | 330 | ('deps', 'bh_installed_pkgs_deps.txt')] | 
|  | 331 |  | 
|  | 332 | for output_type, output_file in process_list: | 
|  | 333 | output_file_full = os.path.join(d.getVar('WORKDIR', True), output_file) | 
|  | 334 |  | 
|  | 335 | with open(output_file_full, 'w') as output: | 
|  | 336 | if rootfs_type == "image": | 
|  | 337 | output.write(image_list_installed_packages(d, output_type)) | 
|  | 338 | else: | 
|  | 339 | output.write(sdk_list_installed_packages(d, rootfs_type == "sdk_target", output_type)) | 
|  | 340 |  | 
|  | 341 | python buildhistory_list_installed_image() { | 
|  | 342 | buildhistory_list_installed(d) | 
|  | 343 | } | 
|  | 344 |  | 
|  | 345 | python buildhistory_list_installed_sdk_target() { | 
|  | 346 | buildhistory_list_installed(d, "sdk_target") | 
|  | 347 | } | 
|  | 348 |  | 
|  | 349 | python buildhistory_list_installed_sdk_host() { | 
|  | 350 | buildhistory_list_installed(d, "sdk_host") | 
|  | 351 | } | 
|  | 352 |  | 
|  | 353 | buildhistory_get_installed() { | 
|  | 354 | mkdir -p $1 | 
|  | 355 |  | 
|  | 356 | # Get list of installed packages | 
|  | 357 | pkgcache="$1/installed-packages.tmp" | 
|  | 358 | cat ${WORKDIR}/bh_installed_pkgs.txt | sort > $pkgcache && rm ${WORKDIR}/bh_installed_pkgs.txt | 
|  | 359 |  | 
|  | 360 | cat $pkgcache | awk '{ print $1 }' > $1/installed-package-names.txt | 
|  | 361 | if [ -s $pkgcache ] ; then | 
|  | 362 | cat $pkgcache | awk '{ print $2 }' | xargs -n1 basename > $1/installed-packages.txt | 
|  | 363 | else | 
|  | 364 | printf "" > $1/installed-packages.txt | 
|  | 365 | fi | 
|  | 366 |  | 
|  | 367 | # Produce dependency graph | 
|  | 368 | # First, quote each name to handle characters that cause issues for dot | 
|  | 369 | sed 's:\([^| ]*\):"\1":g' ${WORKDIR}/bh_installed_pkgs_deps.txt > $1/depends.tmp && \ | 
|  | 370 | rm ${WORKDIR}/bh_installed_pkgs_deps.txt | 
|  | 371 | # Change delimiter from pipe to -> and set style for recommend lines | 
|  | 372 | sed -i -e 's:|: -> :' -e 's:"\[REC\]":[style=dotted]:' -e 's:$:;:' $1/depends.tmp | 
|  | 373 | # Add header, sorted and de-duped contents and footer and then delete the temp file | 
|  | 374 | printf "digraph depends {\n    node [shape=plaintext]\n" > $1/depends.dot | 
|  | 375 | cat $1/depends.tmp | sort | uniq >> $1/depends.dot | 
|  | 376 | echo "}" >>  $1/depends.dot | 
|  | 377 | rm $1/depends.tmp | 
|  | 378 |  | 
|  | 379 | # Produce installed package sizes list | 
|  | 380 | printf "" > $1/installed-package-sizes.tmp | 
|  | 381 | cat $pkgcache | while read pkg pkgfile pkgarch | 
|  | 382 | do | 
|  | 383 | size=`oe-pkgdata-util -p ${PKGDATA_DIR} read-value "PKGSIZE" ${pkg}_${pkgarch}` | 
|  | 384 | if [ "$size" != "" ] ; then | 
|  | 385 | echo "$size $pkg" >> $1/installed-package-sizes.tmp | 
|  | 386 | fi | 
|  | 387 | done | 
|  | 388 | cat $1/installed-package-sizes.tmp | sort -n -r | awk '{print $1 "\tKiB " $2}' > $1/installed-package-sizes.txt | 
|  | 389 | rm $1/installed-package-sizes.tmp | 
|  | 390 |  | 
|  | 391 | # We're now done with the cache, delete it | 
|  | 392 | rm $pkgcache | 
|  | 393 |  | 
|  | 394 | if [ "$2" != "sdk" ] ; then | 
|  | 395 | # Produce some cut-down graphs (for readability) | 
|  | 396 | grep -v kernel_image $1/depends.dot | grep -v kernel-2 | grep -v kernel-3 > $1/depends-nokernel.dot | 
|  | 397 | grep -v libc6 $1/depends-nokernel.dot | grep -v libgcc > $1/depends-nokernel-nolibc.dot | 
|  | 398 | grep -v update- $1/depends-nokernel-nolibc.dot > $1/depends-nokernel-nolibc-noupdate.dot | 
|  | 399 | grep -v kernel-module $1/depends-nokernel-nolibc-noupdate.dot > $1/depends-nokernel-nolibc-noupdate-nomodules.dot | 
|  | 400 | fi | 
|  | 401 |  | 
|  | 402 | # add complementary package information | 
|  | 403 | if [ -e ${WORKDIR}/complementary_pkgs.txt ]; then | 
|  | 404 | cp ${WORKDIR}/complementary_pkgs.txt $1 | 
|  | 405 | fi | 
|  | 406 | } | 
|  | 407 |  | 
|  | 408 | buildhistory_get_image_installed() { | 
|  | 409 | # Anything requiring the use of the packaging system should be done in here | 
|  | 410 | # in case the packaging files are going to be removed for this image | 
|  | 411 |  | 
|  | 412 | if [ "${@bb.utils.contains('BUILDHISTORY_FEATURES', 'image', '1', '0', d)}" = "0" ] ; then | 
|  | 413 | return | 
|  | 414 | fi | 
|  | 415 |  | 
|  | 416 | buildhistory_get_installed ${BUILDHISTORY_DIR_IMAGE} | 
|  | 417 | } | 
|  | 418 |  | 
|  | 419 | buildhistory_get_sdk_installed() { | 
|  | 420 | # Anything requiring the use of the packaging system should be done in here | 
|  | 421 | # in case the packaging files are going to be removed for this SDK | 
|  | 422 |  | 
|  | 423 | if [ "${@bb.utils.contains('BUILDHISTORY_FEATURES', 'sdk', '1', '0', d)}" = "0" ] ; then | 
|  | 424 | return | 
|  | 425 | fi | 
|  | 426 |  | 
|  | 427 | buildhistory_get_installed ${BUILDHISTORY_DIR_SDK}/$1 sdk | 
|  | 428 | } | 
|  | 429 |  | 
|  | 430 | buildhistory_get_sdk_installed_host() { | 
|  | 431 | buildhistory_get_sdk_installed host | 
|  | 432 | } | 
|  | 433 |  | 
|  | 434 | buildhistory_get_sdk_installed_target() { | 
|  | 435 | buildhistory_get_sdk_installed target | 
|  | 436 | } | 
|  | 437 |  | 
|  | 438 | buildhistory_list_files() { | 
|  | 439 | # List the files in the specified directory, but exclude date/time etc. | 
|  | 440 | # This awk script is somewhat messy, but handles where the size is not printed for device files under pseudo | 
|  | 441 | if [ "$3" = "fakeroot" ] ; then | 
|  | 442 | ( cd $1 && ${FAKEROOTENV} ${FAKEROOTCMD} find . ! -path . -printf "%M %-10u %-10g %10s %p -> %l\n" | sort -k5 | sed 's/ * -> $//' > $2 ) | 
|  | 443 | else | 
|  | 444 | ( cd $1 && find . ! -path . -printf "%M %-10u %-10g %10s %p -> %l\n" | sort -k5 | sed 's/ * -> $//' > $2 ) | 
|  | 445 | fi | 
|  | 446 | } | 
|  | 447 |  | 
|  | 448 | buildhistory_list_pkg_files() { | 
|  | 449 | # Create individual files-in-package for each recipe's package | 
|  | 450 | for pkgdir in $(find ${PKGDEST}/* -maxdepth 0 -type d); do | 
|  | 451 | pkgname=$(basename $pkgdir) | 
|  | 452 | outfolder="${BUILDHISTORY_DIR_PACKAGE}/$pkgname" | 
|  | 453 | outfile="$outfolder/files-in-package.txt" | 
|  | 454 | # Make sure the output folder exists so we can create the file | 
|  | 455 | if [ ! -d $outfolder ] ; then | 
|  | 456 | bbdebug 2 "Folder $outfolder does not exist, file $outfile not created" | 
|  | 457 | continue | 
|  | 458 | fi | 
|  | 459 | buildhistory_list_files $pkgdir $outfile fakeroot | 
|  | 460 | done | 
|  | 461 | } | 
|  | 462 |  | 
|  | 463 | buildhistory_get_imageinfo() { | 
|  | 464 | if [ "${@bb.utils.contains('BUILDHISTORY_FEATURES', 'image', '1', '0', d)}" = "0" ] ; then | 
|  | 465 | return | 
|  | 466 | fi | 
|  | 467 |  | 
|  | 468 | buildhistory_list_files ${IMAGE_ROOTFS} ${BUILDHISTORY_DIR_IMAGE}/files-in-image.txt | 
|  | 469 |  | 
|  | 470 | # Collect files requested in BUILDHISTORY_IMAGE_FILES | 
|  | 471 | rm -rf ${BUILDHISTORY_DIR_IMAGE}/image-files | 
|  | 472 | for f in ${BUILDHISTORY_IMAGE_FILES}; do | 
|  | 473 | if [ -f ${IMAGE_ROOTFS}/$f ] ; then | 
|  | 474 | mkdir -p ${BUILDHISTORY_DIR_IMAGE}/image-files/`dirname $f` | 
|  | 475 | cp ${IMAGE_ROOTFS}/$f ${BUILDHISTORY_DIR_IMAGE}/image-files/$f | 
|  | 476 | fi | 
|  | 477 | done | 
|  | 478 |  | 
|  | 479 | # Record some machine-readable meta-information about the image | 
|  | 480 | printf ""  > ${BUILDHISTORY_DIR_IMAGE}/image-info.txt | 
|  | 481 | cat >> ${BUILDHISTORY_DIR_IMAGE}/image-info.txt <<END | 
|  | 482 | ${@buildhistory_get_imagevars(d)} | 
|  | 483 | END | 
|  | 484 | imagesize=`du -ks ${IMAGE_ROOTFS} | awk '{ print $1 }'` | 
|  | 485 | echo "IMAGESIZE = $imagesize" >> ${BUILDHISTORY_DIR_IMAGE}/image-info.txt | 
|  | 486 |  | 
|  | 487 | # Add some configuration information | 
|  | 488 | echo "${MACHINE}: ${IMAGE_BASENAME} configured for ${DISTRO} ${DISTRO_VERSION}" > ${BUILDHISTORY_DIR_IMAGE}/build-id.txt | 
|  | 489 |  | 
|  | 490 | cat >> ${BUILDHISTORY_DIR_IMAGE}/build-id.txt <<END | 
|  | 491 | ${@buildhistory_get_build_id(d)} | 
|  | 492 | END | 
|  | 493 | } | 
|  | 494 |  | 
|  | 495 | buildhistory_get_sdkinfo() { | 
|  | 496 | if [ "${@bb.utils.contains('BUILDHISTORY_FEATURES', 'sdk', '1', '0', d)}" = "0" ] ; then | 
|  | 497 | return | 
|  | 498 | fi | 
|  | 499 |  | 
|  | 500 | buildhistory_list_files ${SDK_OUTPUT} ${BUILDHISTORY_DIR_SDK}/files-in-sdk.txt | 
|  | 501 |  | 
|  | 502 | # Record some machine-readable meta-information about the SDK | 
|  | 503 | printf ""  > ${BUILDHISTORY_DIR_SDK}/sdk-info.txt | 
|  | 504 | cat >> ${BUILDHISTORY_DIR_SDK}/sdk-info.txt <<END | 
|  | 505 | ${@buildhistory_get_sdkvars(d)} | 
|  | 506 | END | 
|  | 507 | sdksize=`du -ks ${SDK_OUTPUT} | awk '{ print $1 }'` | 
|  | 508 | echo "SDKSIZE = $sdksize" >> ${BUILDHISTORY_DIR_SDK}/sdk-info.txt | 
|  | 509 | } | 
|  | 510 |  | 
|  | 511 | # By using ROOTFS_POSTUNINSTALL_COMMAND we get in after uninstallation of | 
|  | 512 | # unneeded packages but before the removal of packaging files | 
|  | 513 | ROOTFS_POSTUNINSTALL_COMMAND += " buildhistory_list_installed_image ;\ | 
|  | 514 | buildhistory_get_image_installed ; " | 
|  | 515 |  | 
|  | 516 | IMAGE_POSTPROCESS_COMMAND += " buildhistory_get_imageinfo ; " | 
|  | 517 |  | 
|  | 518 | # We want these to be the last run so that we get called after complementary package installation | 
|  | 519 | POPULATE_SDK_POST_TARGET_COMMAND_append = " buildhistory_list_installed_sdk_target ;\ | 
|  | 520 | buildhistory_get_sdk_installed_target ; " | 
|  | 521 | POPULATE_SDK_POST_HOST_COMMAND_append = " buildhistory_list_installed_sdk_host ;\ | 
|  | 522 | buildhistory_get_sdk_installed_host ; " | 
|  | 523 |  | 
| Patrick Williams | f1e5d69 | 2016-03-30 15:21:19 -0500 | [diff] [blame^] | 524 | SDK_POSTPROCESS_COMMAND_append = " buildhistory_get_sdkinfo ; " | 
| Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 525 |  | 
|  | 526 | def buildhistory_get_build_id(d): | 
|  | 527 | if d.getVar('BB_WORKERCONTEXT', True) != '1': | 
|  | 528 | return "" | 
|  | 529 | localdata = bb.data.createCopy(d) | 
|  | 530 | bb.data.update_data(localdata) | 
|  | 531 | statuslines = [] | 
|  | 532 | for func in oe.data.typed_value('BUILDCFG_FUNCS', localdata): | 
|  | 533 | g = globals() | 
|  | 534 | if func not in g: | 
|  | 535 | bb.warn("Build configuration function '%s' does not exist" % func) | 
|  | 536 | else: | 
|  | 537 | flines = g[func](localdata) | 
|  | 538 | if flines: | 
|  | 539 | statuslines.extend(flines) | 
|  | 540 |  | 
|  | 541 | statusheader = d.getVar('BUILDCFG_HEADER', True) | 
|  | 542 | return('\n%s\n%s\n' % (statusheader, '\n'.join(statuslines))) | 
|  | 543 |  | 
|  | 544 | def buildhistory_get_metadata_revs(d): | 
|  | 545 | # We want an easily machine-readable format here, so get_layers_branch_rev isn't quite what we want | 
|  | 546 | layers = (d.getVar("BBLAYERS", True) or "").split() | 
|  | 547 | medadata_revs = ["%-17s = %s:%s" % (os.path.basename(i), \ | 
|  | 548 | base_get_metadata_git_branch(i, None).strip(), \ | 
|  | 549 | base_get_metadata_git_revision(i, None)) \ | 
|  | 550 | for i in layers] | 
|  | 551 | return '\n'.join(medadata_revs) | 
|  | 552 |  | 
|  | 553 | def outputvars(vars, listvars, d): | 
|  | 554 | vars = vars.split() | 
|  | 555 | listvars = listvars.split() | 
|  | 556 | ret = "" | 
|  | 557 | for var in vars: | 
|  | 558 | value = d.getVar(var, True) or "" | 
|  | 559 | if var in listvars: | 
|  | 560 | # Squash out spaces | 
|  | 561 | value = oe.utils.squashspaces(value) | 
|  | 562 | ret += "%s = %s\n" % (var, value) | 
|  | 563 | return ret.rstrip('\n') | 
|  | 564 |  | 
|  | 565 | def buildhistory_get_imagevars(d): | 
|  | 566 | if d.getVar('BB_WORKERCONTEXT', True) != '1': | 
|  | 567 | return "" | 
|  | 568 | imagevars = "DISTRO DISTRO_VERSION USER_CLASSES IMAGE_CLASSES IMAGE_FEATURES IMAGE_LINGUAS IMAGE_INSTALL BAD_RECOMMENDATIONS NO_RECOMMENDATIONS PACKAGE_EXCLUDE ROOTFS_POSTPROCESS_COMMAND IMAGE_POSTPROCESS_COMMAND" | 
|  | 569 | listvars = "USER_CLASSES IMAGE_CLASSES IMAGE_FEATURES IMAGE_LINGUAS IMAGE_INSTALL BAD_RECOMMENDATIONS PACKAGE_EXCLUDE" | 
|  | 570 | return outputvars(imagevars, listvars, d) | 
|  | 571 |  | 
|  | 572 | def buildhistory_get_sdkvars(d): | 
|  | 573 | if d.getVar('BB_WORKERCONTEXT', True) != '1': | 
|  | 574 | return "" | 
|  | 575 | sdkvars = "DISTRO DISTRO_VERSION SDK_NAME SDK_VERSION SDKMACHINE SDKIMAGE_FEATURES BAD_RECOMMENDATIONS NO_RECOMMENDATIONS PACKAGE_EXCLUDE" | 
|  | 576 | listvars = "SDKIMAGE_FEATURES BAD_RECOMMENDATIONS PACKAGE_EXCLUDE" | 
|  | 577 | return outputvars(sdkvars, listvars, d) | 
|  | 578 |  | 
|  | 579 |  | 
|  | 580 | def buildhistory_get_cmdline(d): | 
|  | 581 | if sys.argv[0].endswith('bin/bitbake'): | 
|  | 582 | bincmd = 'bitbake' | 
|  | 583 | else: | 
|  | 584 | bincmd = sys.argv[0] | 
|  | 585 | return '%s %s' % (bincmd, ' '.join(sys.argv[1:])) | 
|  | 586 |  | 
|  | 587 |  | 
|  | 588 | buildhistory_single_commit() { | 
|  | 589 | if [ "$3" = "" ] ; then | 
|  | 590 | commitopts="${BUILDHISTORY_DIR}/ --allow-empty" | 
|  | 591 | item="No changes" | 
|  | 592 | else | 
|  | 593 | commitopts="$3 metadata-revs" | 
|  | 594 | item="$3" | 
|  | 595 | fi | 
|  | 596 | if [ "${BUILDHISTORY_BUILD_FAILURES}" = "0" ] ; then | 
|  | 597 | result="succeeded" | 
|  | 598 | else | 
|  | 599 | result="failed" | 
|  | 600 | fi | 
|  | 601 | case ${BUILDHISTORY_BUILD_INTERRUPTED} in | 
|  | 602 | 1) | 
|  | 603 | result="$result (interrupted)" | 
|  | 604 | ;; | 
|  | 605 | 2) | 
|  | 606 | result="$result (force interrupted)" | 
|  | 607 | ;; | 
|  | 608 | esac | 
|  | 609 | commitmsgfile=`mktemp` | 
|  | 610 | cat > $commitmsgfile << END | 
|  | 611 | $item: Build ${BUILDNAME} of ${DISTRO} ${DISTRO_VERSION} for machine ${MACHINE} on $2 | 
|  | 612 |  | 
|  | 613 | cmd: $1 | 
|  | 614 |  | 
|  | 615 | result: $result | 
|  | 616 |  | 
|  | 617 | metadata revisions: | 
|  | 618 | END | 
|  | 619 | cat ${BUILDHISTORY_DIR}/metadata-revs >> $commitmsgfile | 
|  | 620 | git commit $commitopts -F $commitmsgfile --author "${BUILDHISTORY_COMMIT_AUTHOR}" > /dev/null | 
|  | 621 | rm $commitmsgfile | 
|  | 622 | } | 
|  | 623 |  | 
|  | 624 | buildhistory_commit() { | 
|  | 625 | if [ ! -d ${BUILDHISTORY_DIR} ] ; then | 
|  | 626 | # Code above that creates this dir never executed, so there can't be anything to commit | 
|  | 627 | return | 
|  | 628 | fi | 
|  | 629 |  | 
|  | 630 | # Create a machine-readable list of metadata revisions for each layer | 
|  | 631 | cat > ${BUILDHISTORY_DIR}/metadata-revs <<END | 
|  | 632 | ${@buildhistory_get_metadata_revs(d)} | 
|  | 633 | END | 
|  | 634 |  | 
|  | 635 | ( cd ${BUILDHISTORY_DIR}/ | 
|  | 636 | # Initialise the repo if necessary | 
|  | 637 | if [ ! -d .git ] ; then | 
|  | 638 | git init -q | 
|  | 639 | else | 
|  | 640 | git tag -f build-minus-3 build-minus-2 > /dev/null 2>&1 || true | 
|  | 641 | git tag -f build-minus-2 build-minus-1 > /dev/null 2>&1 || true | 
|  | 642 | git tag -f build-minus-1 > /dev/null 2>&1 || true | 
|  | 643 | fi | 
|  | 644 | # If the user hasn't set up their name/email, set some defaults | 
|  | 645 | # just for this repo (otherwise the commit will fail with older | 
|  | 646 | # versions of git) | 
|  | 647 | if ! git config user.email > /dev/null ; then | 
|  | 648 | git config --local user.email "buildhistory@${DISTRO}" | 
|  | 649 | fi | 
|  | 650 | if ! git config user.name > /dev/null ; then | 
|  | 651 | git config --local user.name "buildhistory" | 
|  | 652 | fi | 
|  | 653 | # Check if there are new/changed files to commit (other than metadata-revs) | 
|  | 654 | repostatus=`git status --porcelain | grep -v " metadata-revs$"` | 
|  | 655 | HOSTNAME=`hostname 2>/dev/null || echo unknown` | 
|  | 656 | CMDLINE="${@buildhistory_get_cmdline(d)}" | 
|  | 657 | if [ "$repostatus" != "" ] ; then | 
|  | 658 | git add -A . | 
|  | 659 | # porcelain output looks like "?? packages/foo/bar" | 
|  | 660 | # Ensure we commit metadata-revs with the first commit | 
|  | 661 | for entry in `echo "$repostatus" | awk '{print $2}' | awk -F/ '{print $1}' | sort | uniq` ; do | 
|  | 662 | buildhistory_single_commit "$CMDLINE" "$HOSTNAME" "$entry" | 
|  | 663 | done | 
|  | 664 | git gc --auto --quiet | 
|  | 665 | else | 
|  | 666 | buildhistory_single_commit "$CMDLINE" "$HOSTNAME" | 
|  | 667 | fi | 
|  | 668 | if [ "${BUILDHISTORY_PUSH_REPO}" != "" ] ; then | 
|  | 669 | git push -q ${BUILDHISTORY_PUSH_REPO} | 
|  | 670 | fi) || true | 
|  | 671 | } | 
|  | 672 |  | 
|  | 673 | python buildhistory_eventhandler() { | 
|  | 674 | if e.data.getVar('BUILDHISTORY_FEATURES', True).strip(): | 
|  | 675 | if e.data.getVar("BUILDHISTORY_COMMIT", True) == "1": | 
|  | 676 | bb.note("Writing buildhistory") | 
|  | 677 | localdata = bb.data.createCopy(e.data) | 
|  | 678 | localdata.setVar('BUILDHISTORY_BUILD_FAILURES', str(e._failures)) | 
|  | 679 | interrupted = getattr(e, '_interrupted', 0) | 
|  | 680 | localdata.setVar('BUILDHISTORY_BUILD_INTERRUPTED', str(interrupted)) | 
|  | 681 | bb.build.exec_func("buildhistory_commit", localdata) | 
|  | 682 | } | 
|  | 683 |  | 
|  | 684 | addhandler buildhistory_eventhandler | 
|  | 685 | buildhistory_eventhandler[eventmask] = "bb.event.BuildCompleted" | 
|  | 686 |  | 
|  | 687 |  | 
|  | 688 | # FIXME this ought to be moved into the fetcher | 
|  | 689 | def _get_srcrev_values(d): | 
|  | 690 | """ | 
|  | 691 | Return the version strings for the current recipe | 
|  | 692 | """ | 
|  | 693 |  | 
|  | 694 | scms = [] | 
|  | 695 | fetcher = bb.fetch.Fetch(d.getVar('SRC_URI', True).split(), d) | 
|  | 696 | urldata = fetcher.ud | 
|  | 697 | for u in urldata: | 
|  | 698 | if urldata[u].method.supports_srcrev(): | 
|  | 699 | scms.append(u) | 
|  | 700 |  | 
|  | 701 | autoinc_templ = 'AUTOINC+' | 
|  | 702 | dict_srcrevs = {} | 
|  | 703 | dict_tag_srcrevs = {} | 
|  | 704 | for scm in scms: | 
|  | 705 | ud = urldata[scm] | 
|  | 706 | for name in ud.names: | 
|  | 707 | try: | 
|  | 708 | rev = ud.method.sortable_revision(ud, d, name) | 
|  | 709 | except TypeError: | 
|  | 710 | # support old bitbake versions | 
|  | 711 | rev = ud.method.sortable_revision(scm, ud, d, name) | 
|  | 712 | # Clean this up when we next bump bitbake version | 
|  | 713 | if type(rev) != str: | 
|  | 714 | autoinc, rev = rev | 
|  | 715 | elif rev.startswith(autoinc_templ): | 
|  | 716 | rev = rev[len(autoinc_templ):] | 
|  | 717 | dict_srcrevs[name] = rev | 
|  | 718 | if 'tag' in ud.parm: | 
|  | 719 | tag = ud.parm['tag']; | 
|  | 720 | key = name+'_'+tag | 
|  | 721 | dict_tag_srcrevs[key] = rev | 
|  | 722 | return (dict_srcrevs, dict_tag_srcrevs) | 
|  | 723 |  | 
|  | 724 | do_fetch[postfuncs] += "write_srcrev" | 
|  | 725 | do_fetch[vardepsexclude] += "write_srcrev" | 
|  | 726 | python write_srcrev() { | 
|  | 727 | pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE', True) | 
|  | 728 | srcrevfile = os.path.join(pkghistdir, 'latest_srcrev') | 
|  | 729 |  | 
|  | 730 | srcrevs, tag_srcrevs = _get_srcrev_values(d) | 
|  | 731 | if srcrevs: | 
|  | 732 | if not os.path.exists(pkghistdir): | 
|  | 733 | bb.utils.mkdirhier(pkghistdir) | 
|  | 734 | old_tag_srcrevs = {} | 
|  | 735 | if os.path.exists(srcrevfile): | 
|  | 736 | with open(srcrevfile) as f: | 
|  | 737 | for line in f: | 
|  | 738 | if line.startswith('# tag_'): | 
|  | 739 | key, value = line.split("=", 1) | 
|  | 740 | key = key.replace('# tag_', '').strip() | 
|  | 741 | value = value.replace('"', '').strip() | 
|  | 742 | old_tag_srcrevs[key] = value | 
|  | 743 | with open(srcrevfile, 'w') as f: | 
|  | 744 | orig_srcrev = d.getVar('SRCREV', False) or 'INVALID' | 
|  | 745 | if orig_srcrev != 'INVALID': | 
|  | 746 | f.write('# SRCREV = "%s"\n' % orig_srcrev) | 
|  | 747 | if len(srcrevs) > 1: | 
|  | 748 | for name, srcrev in srcrevs.items(): | 
|  | 749 | orig_srcrev = d.getVar('SRCREV_%s' % name, False) | 
|  | 750 | if orig_srcrev: | 
|  | 751 | f.write('# SRCREV_%s = "%s"\n' % (name, orig_srcrev)) | 
|  | 752 | f.write('SRCREV_%s = "%s"\n' % (name, srcrev)) | 
|  | 753 | else: | 
|  | 754 | f.write('SRCREV = "%s"\n' % srcrevs.itervalues().next()) | 
|  | 755 | if len(tag_srcrevs) > 0: | 
|  | 756 | for name, srcrev in tag_srcrevs.items(): | 
|  | 757 | f.write('# tag_%s = "%s"\n' % (name, srcrev)) | 
|  | 758 | if name in old_tag_srcrevs and old_tag_srcrevs[name] != srcrev: | 
|  | 759 | pkg = d.getVar('PN', True) | 
|  | 760 | bb.warn("Revision for tag %s in package %s was changed since last build (from %s to %s)" % (name, pkg, old_tag_srcrevs[name], srcrev)) | 
|  | 761 |  | 
|  | 762 | else: | 
|  | 763 | if os.path.exists(srcrevfile): | 
|  | 764 | os.remove(srcrevfile) | 
|  | 765 | } |