Patrick Williams | 92b42cb | 2022-09-03 06:53:57 -0500 | [diff] [blame] | 1 | # Copyright (C) 2012 Linux Foundation |
| 2 | # Author: Richard Purdie |
| 3 | # Some code and influence taken from srctree.bbclass: |
| 4 | # Copyright (C) 2009 Chris Larson <clarson@kergoth.com> |
| 5 | # |
| 6 | # SPDX-License-Identifier: MIT |
| 7 | # |
| 8 | # externalsrc.bbclass enables use of an existing source tree, usually external to |
| 9 | # the build system to build a piece of software rather than the usual fetch/unpack/patch |
| 10 | # process. |
| 11 | # |
| 12 | # To use, add externalsrc to the global inherit and set EXTERNALSRC to point at the |
| 13 | # directory you want to use containing the sources e.g. from local.conf for a recipe |
| 14 | # called "myrecipe" you would do: |
| 15 | # |
| 16 | # INHERIT += "externalsrc" |
| 17 | # EXTERNALSRC:pn-myrecipe = "/path/to/my/source/tree" |
| 18 | # |
| 19 | # In order to make this class work for both target and native versions (or with |
| 20 | # multilibs/cross or other BBCLASSEXTEND variants), B is set to point to a separate |
| 21 | # directory under the work directory (split source and build directories). This is |
| 22 | # the default, but the build directory can be set to the source directory if |
| 23 | # circumstances dictate by setting EXTERNALSRC_BUILD to the same value, e.g.: |
| 24 | # |
| 25 | # EXTERNALSRC_BUILD:pn-myrecipe = "/path/to/my/source/tree" |
| 26 | # |
| 27 | |
| 28 | SRCTREECOVEREDTASKS ?= "do_patch do_unpack do_fetch" |
| 29 | EXTERNALSRC_SYMLINKS ?= "oe-workdir:${WORKDIR} oe-logs:${T}" |
| 30 | |
| 31 | python () { |
| 32 | externalsrc = d.getVar('EXTERNALSRC') |
| 33 | externalsrcbuild = d.getVar('EXTERNALSRC_BUILD') |
| 34 | |
| 35 | if externalsrc and not externalsrc.startswith("/"): |
| 36 | bb.error("EXTERNALSRC must be an absolute path") |
| 37 | if externalsrcbuild and not externalsrcbuild.startswith("/"): |
| 38 | bb.error("EXTERNALSRC_BUILD must be an absolute path") |
| 39 | |
| 40 | # If this is the base recipe and EXTERNALSRC is set for it or any of its |
| 41 | # derivatives, then enable BB_DONT_CACHE to force the recipe to always be |
| 42 | # re-parsed so that the file-checksums function for do_compile is run every |
| 43 | # time. |
| 44 | bpn = d.getVar('BPN') |
| 45 | classextend = (d.getVar('BBCLASSEXTEND') or '').split() |
| 46 | if bpn == d.getVar('PN') or not classextend: |
| 47 | if (externalsrc or |
| 48 | ('native' in classextend and |
| 49 | d.getVar('EXTERNALSRC:pn-%s-native' % bpn)) or |
| 50 | ('nativesdk' in classextend and |
| 51 | d.getVar('EXTERNALSRC:pn-nativesdk-%s' % bpn)) or |
| 52 | ('cross' in classextend and |
| 53 | d.getVar('EXTERNALSRC:pn-%s-cross' % bpn))): |
| 54 | d.setVar('BB_DONT_CACHE', '1') |
| 55 | |
| 56 | if externalsrc: |
| 57 | import oe.recipeutils |
| 58 | import oe.path |
| 59 | |
| 60 | d.setVar('S', externalsrc) |
| 61 | if externalsrcbuild: |
| 62 | d.setVar('B', externalsrcbuild) |
| 63 | else: |
| 64 | d.setVar('B', '${WORKDIR}/${BPN}-${PV}/') |
| 65 | |
| 66 | local_srcuri = [] |
| 67 | fetch = bb.fetch2.Fetch((d.getVar('SRC_URI') or '').split(), d) |
| 68 | for url in fetch.urls: |
| 69 | url_data = fetch.ud[url] |
| 70 | parm = url_data.parm |
| 71 | if (url_data.type == 'file' or |
| 72 | url_data.type == 'npmsw' or url_data.type == 'crate' or |
| 73 | 'type' in parm and parm['type'] == 'kmeta'): |
| 74 | local_srcuri.append(url) |
| 75 | |
| 76 | d.setVar('SRC_URI', ' '.join(local_srcuri)) |
| 77 | |
| 78 | # Dummy value because the default function can't be called with blank SRC_URI |
| 79 | d.setVar('SRCPV', '999') |
| 80 | |
| 81 | if d.getVar('CONFIGUREOPT_DEPTRACK') == '--disable-dependency-tracking': |
| 82 | d.setVar('CONFIGUREOPT_DEPTRACK', '') |
| 83 | |
| 84 | tasks = filter(lambda k: d.getVarFlag(k, "task"), d.keys()) |
| 85 | |
| 86 | for task in tasks: |
| 87 | if task.endswith("_setscene"): |
| 88 | # sstate is never going to work for external source trees, disable it |
| 89 | bb.build.deltask(task, d) |
| 90 | elif os.path.realpath(d.getVar('S')) == os.path.realpath(d.getVar('B')): |
| 91 | # Since configure will likely touch ${S}, ensure only we lock so one task has access at a time |
| 92 | d.appendVarFlag(task, "lockfiles", " ${S}/singletask.lock") |
| 93 | |
Andrew Geissler | 87f5cff | 2022-09-30 13:13:31 -0500 | [diff] [blame] | 94 | for v in d.keys(): |
| 95 | cleandirs = d.getVarFlag(v, "cleandirs", False) |
| 96 | if cleandirs: |
Patrick Williams | 92b42cb | 2022-09-03 06:53:57 -0500 | [diff] [blame] | 97 | # We do not want our source to be wiped out, ever (kernel.bbclass does this for do_clean) |
Andrew Geissler | 87f5cff | 2022-09-30 13:13:31 -0500 | [diff] [blame] | 98 | cleandirs = oe.recipeutils.split_var_value(cleandirs) |
Patrick Williams | 92b42cb | 2022-09-03 06:53:57 -0500 | [diff] [blame] | 99 | setvalue = False |
| 100 | for cleandir in cleandirs[:]: |
| 101 | if oe.path.is_path_parent(externalsrc, d.expand(cleandir)): |
| 102 | cleandirs.remove(cleandir) |
| 103 | setvalue = True |
| 104 | if setvalue: |
Andrew Geissler | 87f5cff | 2022-09-30 13:13:31 -0500 | [diff] [blame] | 105 | d.setVarFlag(v, 'cleandirs', ' '.join(cleandirs)) |
Patrick Williams | 92b42cb | 2022-09-03 06:53:57 -0500 | [diff] [blame] | 106 | |
| 107 | fetch_tasks = ['do_fetch', 'do_unpack'] |
| 108 | # If we deltask do_patch, there's no dependency to ensure do_unpack gets run, so add one |
| 109 | # Note that we cannot use d.appendVarFlag() here because deps is expected to be a list object, not a string |
| 110 | d.setVarFlag('do_configure', 'deps', (d.getVarFlag('do_configure', 'deps', False) or []) + ['do_unpack']) |
| 111 | |
| 112 | for task in d.getVar("SRCTREECOVEREDTASKS").split(): |
| 113 | if local_srcuri and task in fetch_tasks: |
| 114 | continue |
| 115 | bb.build.deltask(task, d) |
| 116 | if task == 'do_unpack': |
| 117 | # The reproducible build create_source_date_epoch_stamp function must |
| 118 | # be run after the source is available and before the |
| 119 | # do_deploy_source_date_epoch task. In the normal case, it's attached |
| 120 | # to do_unpack as a postfuncs, but since we removed do_unpack (above) |
| 121 | # we need to move the function elsewhere. The easiest thing to do is |
| 122 | # move it into the prefuncs of the do_deploy_source_date_epoch task. |
| 123 | # This is safe, as externalsrc runs with the source already unpacked. |
| 124 | d.prependVarFlag('do_deploy_source_date_epoch', 'prefuncs', 'create_source_date_epoch_stamp ') |
| 125 | |
| 126 | d.prependVarFlag('do_compile', 'prefuncs', "externalsrc_compile_prefunc ") |
| 127 | d.prependVarFlag('do_configure', 'prefuncs', "externalsrc_configure_prefunc ") |
| 128 | |
| 129 | d.setVarFlag('do_compile', 'file-checksums', '${@srctree_hash_files(d)}') |
| 130 | d.setVarFlag('do_configure', 'file-checksums', '${@srctree_configure_hash_files(d)}') |
| 131 | |
| 132 | # We don't want the workdir to go away |
| 133 | d.appendVar('RM_WORK_EXCLUDE', ' ' + d.getVar('PN')) |
| 134 | |
| 135 | bb.build.addtask('do_buildclean', |
| 136 | 'do_clean' if d.getVar('S') == d.getVar('B') else None, |
| 137 | None, d) |
| 138 | |
| 139 | # If B=S the same builddir is used even for different architectures. |
| 140 | # Thus, use a shared CONFIGURESTAMPFILE and STAMP directory so that |
| 141 | # change of do_configure task hash is correctly detected and stamps are |
| 142 | # invalidated if e.g. MACHINE changes. |
| 143 | if d.getVar('S') == d.getVar('B'): |
| 144 | configstamp = '${TMPDIR}/work-shared/${PN}/${EXTENDPE}${PV}-${PR}/configure.sstate' |
| 145 | d.setVar('CONFIGURESTAMPFILE', configstamp) |
| 146 | d.setVar('STAMP', '${STAMPS_DIR}/work-shared/${PN}/${EXTENDPE}${PV}-${PR}') |
| 147 | d.setVar('STAMPCLEAN', '${STAMPS_DIR}/work-shared/${PN}/*-*') |
| 148 | } |
| 149 | |
| 150 | python externalsrc_configure_prefunc() { |
| 151 | s_dir = d.getVar('S') |
| 152 | # Create desired symlinks |
| 153 | symlinks = (d.getVar('EXTERNALSRC_SYMLINKS') or '').split() |
| 154 | newlinks = [] |
| 155 | for symlink in symlinks: |
| 156 | symsplit = symlink.split(':', 1) |
| 157 | lnkfile = os.path.join(s_dir, symsplit[0]) |
| 158 | target = d.expand(symsplit[1]) |
| 159 | if len(symsplit) > 1: |
| 160 | if os.path.islink(lnkfile): |
| 161 | # Link already exists, leave it if it points to the right location already |
| 162 | if os.readlink(lnkfile) == target: |
| 163 | continue |
| 164 | os.unlink(lnkfile) |
| 165 | elif os.path.exists(lnkfile): |
| 166 | # File/dir exists with same name as link, just leave it alone |
| 167 | continue |
| 168 | os.symlink(target, lnkfile) |
| 169 | newlinks.append(symsplit[0]) |
| 170 | # Hide the symlinks from git |
| 171 | try: |
| 172 | git_exclude_file = os.path.join(s_dir, '.git/info/exclude') |
| 173 | if os.path.exists(git_exclude_file): |
| 174 | with open(git_exclude_file, 'r+') as efile: |
| 175 | elines = efile.readlines() |
| 176 | for link in newlinks: |
| 177 | if link in elines or '/'+link in elines: |
| 178 | continue |
| 179 | efile.write('/' + link + '\n') |
| 180 | except IOError as ioe: |
| 181 | bb.note('Failed to hide EXTERNALSRC_SYMLINKS from git') |
| 182 | } |
| 183 | |
| 184 | python externalsrc_compile_prefunc() { |
| 185 | # Make it obvious that this is happening, since forgetting about it could lead to much confusion |
| 186 | bb.plain('NOTE: %s: compiling from external source tree %s' % (d.getVar('PN'), d.getVar('EXTERNALSRC'))) |
| 187 | } |
| 188 | |
| 189 | do_buildclean[dirs] = "${S} ${B}" |
| 190 | do_buildclean[nostamp] = "1" |
| 191 | do_buildclean[doc] = "Call 'make clean' or equivalent in ${B}" |
| 192 | externalsrc_do_buildclean() { |
| 193 | if [ -e Makefile -o -e makefile -o -e GNUmakefile ]; then |
| 194 | rm -f ${@' '.join([x.split(':')[0] for x in (d.getVar('EXTERNALSRC_SYMLINKS') or '').split()])} |
| 195 | if [ "${CLEANBROKEN}" != "1" ]; then |
| 196 | oe_runmake clean || die "make failed" |
| 197 | fi |
| 198 | else |
| 199 | bbnote "nothing to do - no makefile found" |
| 200 | fi |
| 201 | } |
| 202 | |
| 203 | def srctree_hash_files(d, srcdir=None): |
| 204 | import shutil |
| 205 | import subprocess |
| 206 | import tempfile |
| 207 | import hashlib |
| 208 | |
| 209 | s_dir = srcdir or d.getVar('EXTERNALSRC') |
| 210 | git_dir = None |
| 211 | |
| 212 | try: |
| 213 | git_dir = os.path.join(s_dir, |
| 214 | subprocess.check_output(['git', '-C', s_dir, 'rev-parse', '--git-dir'], stderr=subprocess.DEVNULL).decode("utf-8").rstrip()) |
| 215 | top_git_dir = os.path.join(s_dir, subprocess.check_output(['git', '-C', d.getVar("TOPDIR"), 'rev-parse', '--git-dir'], |
| 216 | stderr=subprocess.DEVNULL).decode("utf-8").rstrip()) |
| 217 | if git_dir == top_git_dir: |
| 218 | git_dir = None |
| 219 | except subprocess.CalledProcessError: |
| 220 | pass |
| 221 | |
| 222 | ret = " " |
| 223 | if git_dir is not None: |
| 224 | oe_hash_file = os.path.join(git_dir, 'oe-devtool-tree-sha1-%s' % d.getVar('PN')) |
| 225 | with tempfile.NamedTemporaryFile(prefix='oe-devtool-index') as tmp_index: |
| 226 | # Clone index |
| 227 | shutil.copyfile(os.path.join(git_dir, 'index'), tmp_index.name) |
| 228 | # Update our custom index |
| 229 | env = os.environ.copy() |
| 230 | env['GIT_INDEX_FILE'] = tmp_index.name |
| 231 | subprocess.check_output(['git', 'add', '-A', '.'], cwd=s_dir, env=env) |
| 232 | git_sha1 = subprocess.check_output(['git', 'write-tree'], cwd=s_dir, env=env).decode("utf-8") |
Andrew Geissler | 87f5cff | 2022-09-30 13:13:31 -0500 | [diff] [blame] | 233 | if os.path.exists(".gitmodules"): |
| 234 | submodule_helper = subprocess.check_output(["git", "config", "--file", ".gitmodules", "--get-regexp", "path"], cwd=s_dir, env=env).decode("utf-8") |
| 235 | for line in submodule_helper.splitlines(): |
| 236 | module_dir = os.path.join(s_dir, line.rsplit(maxsplit=1)[1]) |
| 237 | if os.path.isdir(module_dir): |
| 238 | proc = subprocess.Popen(['git', 'add', '-A', '.'], cwd=module_dir, env=env, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) |
| 239 | proc.communicate() |
| 240 | proc = subprocess.Popen(['git', 'write-tree'], cwd=module_dir, env=env, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL) |
| 241 | stdout, _ = proc.communicate() |
| 242 | git_sha1 += stdout.decode("utf-8") |
Patrick Williams | 92b42cb | 2022-09-03 06:53:57 -0500 | [diff] [blame] | 243 | sha1 = hashlib.sha1(git_sha1.encode("utf-8")).hexdigest() |
| 244 | with open(oe_hash_file, 'w') as fobj: |
| 245 | fobj.write(sha1) |
| 246 | ret = oe_hash_file + ':True' |
| 247 | else: |
| 248 | ret = s_dir + '/*:True' |
| 249 | return ret |
| 250 | |
| 251 | def srctree_configure_hash_files(d): |
| 252 | """ |
| 253 | Get the list of files that should trigger do_configure to re-execute, |
| 254 | based on the value of CONFIGURE_FILES |
| 255 | """ |
| 256 | in_files = (d.getVar('CONFIGURE_FILES') or '').split() |
| 257 | out_items = [] |
| 258 | search_files = [] |
| 259 | for entry in in_files: |
| 260 | if entry.startswith('/'): |
| 261 | out_items.append('%s:%s' % (entry, os.path.exists(entry))) |
| 262 | else: |
| 263 | search_files.append(entry) |
| 264 | if search_files: |
| 265 | s_dir = d.getVar('EXTERNALSRC') |
| 266 | for root, _, files in os.walk(s_dir): |
| 267 | for f in files: |
| 268 | if f in search_files: |
| 269 | out_items.append('%s:True' % os.path.join(root, f)) |
| 270 | return ' '.join(out_items) |
| 271 | |
| 272 | EXPORT_FUNCTIONS do_buildclean |