Patrick Williams | 92b42cb | 2022-09-03 06:53:57 -0500 | [diff] [blame] | 1 | # Copyright (C) 2012 Linux Foundation |
| 2 | # Author: Richard Purdie |
| 3 | # Some code and influence taken from srctree.bbclass: |
| 4 | # Copyright (C) 2009 Chris Larson <clarson@kergoth.com> |
| 5 | # |
| 6 | # SPDX-License-Identifier: MIT |
| 7 | # |
| 8 | # externalsrc.bbclass enables use of an existing source tree, usually external to |
| 9 | # the build system to build a piece of software rather than the usual fetch/unpack/patch |
| 10 | # process. |
| 11 | # |
| 12 | # To use, add externalsrc to the global inherit and set EXTERNALSRC to point at the |
| 13 | # directory you want to use containing the sources e.g. from local.conf for a recipe |
| 14 | # called "myrecipe" you would do: |
| 15 | # |
| 16 | # INHERIT += "externalsrc" |
| 17 | # EXTERNALSRC:pn-myrecipe = "/path/to/my/source/tree" |
| 18 | # |
| 19 | # In order to make this class work for both target and native versions (or with |
| 20 | # multilibs/cross or other BBCLASSEXTEND variants), B is set to point to a separate |
| 21 | # directory under the work directory (split source and build directories). This is |
| 22 | # the default, but the build directory can be set to the source directory if |
| 23 | # circumstances dictate by setting EXTERNALSRC_BUILD to the same value, e.g.: |
| 24 | # |
| 25 | # EXTERNALSRC_BUILD:pn-myrecipe = "/path/to/my/source/tree" |
| 26 | # |
| 27 | |
| 28 | SRCTREECOVEREDTASKS ?= "do_patch do_unpack do_fetch" |
| 29 | EXTERNALSRC_SYMLINKS ?= "oe-workdir:${WORKDIR} oe-logs:${T}" |
| 30 | |
| 31 | python () { |
| 32 | externalsrc = d.getVar('EXTERNALSRC') |
| 33 | externalsrcbuild = d.getVar('EXTERNALSRC_BUILD') |
| 34 | |
| 35 | if externalsrc and not externalsrc.startswith("/"): |
| 36 | bb.error("EXTERNALSRC must be an absolute path") |
| 37 | if externalsrcbuild and not externalsrcbuild.startswith("/"): |
| 38 | bb.error("EXTERNALSRC_BUILD must be an absolute path") |
| 39 | |
| 40 | # If this is the base recipe and EXTERNALSRC is set for it or any of its |
| 41 | # derivatives, then enable BB_DONT_CACHE to force the recipe to always be |
| 42 | # re-parsed so that the file-checksums function for do_compile is run every |
| 43 | # time. |
| 44 | bpn = d.getVar('BPN') |
| 45 | classextend = (d.getVar('BBCLASSEXTEND') or '').split() |
| 46 | if bpn == d.getVar('PN') or not classextend: |
| 47 | if (externalsrc or |
| 48 | ('native' in classextend and |
| 49 | d.getVar('EXTERNALSRC:pn-%s-native' % bpn)) or |
| 50 | ('nativesdk' in classextend and |
| 51 | d.getVar('EXTERNALSRC:pn-nativesdk-%s' % bpn)) or |
| 52 | ('cross' in classextend and |
| 53 | d.getVar('EXTERNALSRC:pn-%s-cross' % bpn))): |
| 54 | d.setVar('BB_DONT_CACHE', '1') |
| 55 | |
| 56 | if externalsrc: |
| 57 | import oe.recipeutils |
| 58 | import oe.path |
| 59 | |
| 60 | d.setVar('S', externalsrc) |
| 61 | if externalsrcbuild: |
| 62 | d.setVar('B', externalsrcbuild) |
| 63 | else: |
Patrick Williams | 2390b1b | 2022-11-03 13:47:49 -0500 | [diff] [blame] | 64 | d.setVar('B', '${WORKDIR}/${BPN}-${PV}') |
Patrick Williams | 92b42cb | 2022-09-03 06:53:57 -0500 | [diff] [blame] | 65 | |
Andrew Geissler | 5082cc7 | 2023-09-11 08:41:39 -0400 | [diff] [blame] | 66 | bb.fetch.get_hashvalue(d) |
Patrick Williams | 92b42cb | 2022-09-03 06:53:57 -0500 | [diff] [blame] | 67 | local_srcuri = [] |
| 68 | fetch = bb.fetch2.Fetch((d.getVar('SRC_URI') or '').split(), d) |
| 69 | for url in fetch.urls: |
| 70 | url_data = fetch.ud[url] |
| 71 | parm = url_data.parm |
Patrick Williams | 8e7b46e | 2023-05-01 14:19:06 -0500 | [diff] [blame] | 72 | if url_data.type in ['file', 'npmsw', 'crate'] or parm.get('type') in ['kmeta', 'git-dependency']: |
Patrick Williams | 92b42cb | 2022-09-03 06:53:57 -0500 | [diff] [blame] | 73 | local_srcuri.append(url) |
| 74 | |
| 75 | d.setVar('SRC_URI', ' '.join(local_srcuri)) |
| 76 | |
Patrick Williams | 2a25492 | 2023-08-11 09:48:11 -0500 | [diff] [blame] | 77 | # sstate is never going to work for external source trees, disable it |
| 78 | d.setVar('SSTATE_SKIP_CREATION', '1') |
Patrick Williams | 92b42cb | 2022-09-03 06:53:57 -0500 | [diff] [blame] | 79 | |
| 80 | if d.getVar('CONFIGUREOPT_DEPTRACK') == '--disable-dependency-tracking': |
| 81 | d.setVar('CONFIGUREOPT_DEPTRACK', '') |
| 82 | |
| 83 | tasks = filter(lambda k: d.getVarFlag(k, "task"), d.keys()) |
| 84 | |
| 85 | for task in tasks: |
Patrick Williams | 2a25492 | 2023-08-11 09:48:11 -0500 | [diff] [blame] | 86 | if os.path.realpath(d.getVar('S')) == os.path.realpath(d.getVar('B')): |
Patrick Williams | 92b42cb | 2022-09-03 06:53:57 -0500 | [diff] [blame] | 87 | # Since configure will likely touch ${S}, ensure only we lock so one task has access at a time |
| 88 | d.appendVarFlag(task, "lockfiles", " ${S}/singletask.lock") |
| 89 | |
Andrew Geissler | 87f5cff | 2022-09-30 13:13:31 -0500 | [diff] [blame] | 90 | for v in d.keys(): |
| 91 | cleandirs = d.getVarFlag(v, "cleandirs", False) |
| 92 | if cleandirs: |
Patrick Williams | 92b42cb | 2022-09-03 06:53:57 -0500 | [diff] [blame] | 93 | # We do not want our source to be wiped out, ever (kernel.bbclass does this for do_clean) |
Andrew Geissler | 87f5cff | 2022-09-30 13:13:31 -0500 | [diff] [blame] | 94 | cleandirs = oe.recipeutils.split_var_value(cleandirs) |
Patrick Williams | 92b42cb | 2022-09-03 06:53:57 -0500 | [diff] [blame] | 95 | setvalue = False |
| 96 | for cleandir in cleandirs[:]: |
| 97 | if oe.path.is_path_parent(externalsrc, d.expand(cleandir)): |
| 98 | cleandirs.remove(cleandir) |
| 99 | setvalue = True |
| 100 | if setvalue: |
Andrew Geissler | 87f5cff | 2022-09-30 13:13:31 -0500 | [diff] [blame] | 101 | d.setVarFlag(v, 'cleandirs', ' '.join(cleandirs)) |
Patrick Williams | 92b42cb | 2022-09-03 06:53:57 -0500 | [diff] [blame] | 102 | |
| 103 | fetch_tasks = ['do_fetch', 'do_unpack'] |
| 104 | # If we deltask do_patch, there's no dependency to ensure do_unpack gets run, so add one |
| 105 | # Note that we cannot use d.appendVarFlag() here because deps is expected to be a list object, not a string |
| 106 | d.setVarFlag('do_configure', 'deps', (d.getVarFlag('do_configure', 'deps', False) or []) + ['do_unpack']) |
Patrick Williams | f52e3dd | 2024-01-26 13:04:43 -0600 | [diff] [blame] | 107 | d.setVarFlag('do_populate_lic', 'deps', (d.getVarFlag('do_populate_lic', 'deps', False) or []) + ['do_unpack']) |
Patrick Williams | 92b42cb | 2022-09-03 06:53:57 -0500 | [diff] [blame] | 108 | |
| 109 | for task in d.getVar("SRCTREECOVEREDTASKS").split(): |
| 110 | if local_srcuri and task in fetch_tasks: |
| 111 | continue |
| 112 | bb.build.deltask(task, d) |
| 113 | if task == 'do_unpack': |
| 114 | # The reproducible build create_source_date_epoch_stamp function must |
| 115 | # be run after the source is available and before the |
| 116 | # do_deploy_source_date_epoch task. In the normal case, it's attached |
| 117 | # to do_unpack as a postfuncs, but since we removed do_unpack (above) |
| 118 | # we need to move the function elsewhere. The easiest thing to do is |
| 119 | # move it into the prefuncs of the do_deploy_source_date_epoch task. |
| 120 | # This is safe, as externalsrc runs with the source already unpacked. |
| 121 | d.prependVarFlag('do_deploy_source_date_epoch', 'prefuncs', 'create_source_date_epoch_stamp ') |
| 122 | |
| 123 | d.prependVarFlag('do_compile', 'prefuncs', "externalsrc_compile_prefunc ") |
| 124 | d.prependVarFlag('do_configure', 'prefuncs', "externalsrc_configure_prefunc ") |
| 125 | |
| 126 | d.setVarFlag('do_compile', 'file-checksums', '${@srctree_hash_files(d)}') |
| 127 | d.setVarFlag('do_configure', 'file-checksums', '${@srctree_configure_hash_files(d)}') |
| 128 | |
Andrew Geissler | 5082cc7 | 2023-09-11 08:41:39 -0400 | [diff] [blame] | 129 | d.appendVarFlag('do_compile', 'prefuncs', ' fetcher_hashes_dummyfunc') |
| 130 | d.appendVarFlag('do_configure', 'prefuncs', ' fetcher_hashes_dummyfunc') |
| 131 | |
Patrick Williams | 92b42cb | 2022-09-03 06:53:57 -0500 | [diff] [blame] | 132 | # We don't want the workdir to go away |
| 133 | d.appendVar('RM_WORK_EXCLUDE', ' ' + d.getVar('PN')) |
| 134 | |
| 135 | bb.build.addtask('do_buildclean', |
| 136 | 'do_clean' if d.getVar('S') == d.getVar('B') else None, |
| 137 | None, d) |
| 138 | |
| 139 | # If B=S the same builddir is used even for different architectures. |
| 140 | # Thus, use a shared CONFIGURESTAMPFILE and STAMP directory so that |
| 141 | # change of do_configure task hash is correctly detected and stamps are |
| 142 | # invalidated if e.g. MACHINE changes. |
| 143 | if d.getVar('S') == d.getVar('B'): |
| 144 | configstamp = '${TMPDIR}/work-shared/${PN}/${EXTENDPE}${PV}-${PR}/configure.sstate' |
| 145 | d.setVar('CONFIGURESTAMPFILE', configstamp) |
| 146 | d.setVar('STAMP', '${STAMPS_DIR}/work-shared/${PN}/${EXTENDPE}${PV}-${PR}') |
| 147 | d.setVar('STAMPCLEAN', '${STAMPS_DIR}/work-shared/${PN}/*-*') |
| 148 | } |
| 149 | |
| 150 | python externalsrc_configure_prefunc() { |
| 151 | s_dir = d.getVar('S') |
| 152 | # Create desired symlinks |
| 153 | symlinks = (d.getVar('EXTERNALSRC_SYMLINKS') or '').split() |
| 154 | newlinks = [] |
| 155 | for symlink in symlinks: |
| 156 | symsplit = symlink.split(':', 1) |
| 157 | lnkfile = os.path.join(s_dir, symsplit[0]) |
| 158 | target = d.expand(symsplit[1]) |
| 159 | if len(symsplit) > 1: |
| 160 | if os.path.islink(lnkfile): |
| 161 | # Link already exists, leave it if it points to the right location already |
| 162 | if os.readlink(lnkfile) == target: |
| 163 | continue |
| 164 | os.unlink(lnkfile) |
| 165 | elif os.path.exists(lnkfile): |
| 166 | # File/dir exists with same name as link, just leave it alone |
| 167 | continue |
| 168 | os.symlink(target, lnkfile) |
| 169 | newlinks.append(symsplit[0]) |
| 170 | # Hide the symlinks from git |
| 171 | try: |
| 172 | git_exclude_file = os.path.join(s_dir, '.git/info/exclude') |
| 173 | if os.path.exists(git_exclude_file): |
| 174 | with open(git_exclude_file, 'r+') as efile: |
| 175 | elines = efile.readlines() |
| 176 | for link in newlinks: |
| 177 | if link in elines or '/'+link in elines: |
| 178 | continue |
| 179 | efile.write('/' + link + '\n') |
| 180 | except IOError as ioe: |
| 181 | bb.note('Failed to hide EXTERNALSRC_SYMLINKS from git') |
| 182 | } |
| 183 | |
| 184 | python externalsrc_compile_prefunc() { |
| 185 | # Make it obvious that this is happening, since forgetting about it could lead to much confusion |
| 186 | bb.plain('NOTE: %s: compiling from external source tree %s' % (d.getVar('PN'), d.getVar('EXTERNALSRC'))) |
| 187 | } |
| 188 | |
| 189 | do_buildclean[dirs] = "${S} ${B}" |
| 190 | do_buildclean[nostamp] = "1" |
| 191 | do_buildclean[doc] = "Call 'make clean' or equivalent in ${B}" |
| 192 | externalsrc_do_buildclean() { |
| 193 | if [ -e Makefile -o -e makefile -o -e GNUmakefile ]; then |
| 194 | rm -f ${@' '.join([x.split(':')[0] for x in (d.getVar('EXTERNALSRC_SYMLINKS') or '').split()])} |
| 195 | if [ "${CLEANBROKEN}" != "1" ]; then |
| 196 | oe_runmake clean || die "make failed" |
| 197 | fi |
| 198 | else |
| 199 | bbnote "nothing to do - no makefile found" |
| 200 | fi |
| 201 | } |
| 202 | |
| 203 | def srctree_hash_files(d, srcdir=None): |
| 204 | import shutil |
| 205 | import subprocess |
| 206 | import tempfile |
| 207 | import hashlib |
| 208 | |
| 209 | s_dir = srcdir or d.getVar('EXTERNALSRC') |
| 210 | git_dir = None |
| 211 | |
| 212 | try: |
| 213 | git_dir = os.path.join(s_dir, |
| 214 | subprocess.check_output(['git', '-C', s_dir, 'rev-parse', '--git-dir'], stderr=subprocess.DEVNULL).decode("utf-8").rstrip()) |
Patrick Williams | 2390b1b | 2022-11-03 13:47:49 -0500 | [diff] [blame] | 215 | top_git_dir = os.path.join(d.getVar("TOPDIR"), |
| 216 | subprocess.check_output(['git', '-C', d.getVar("TOPDIR"), 'rev-parse', '--git-dir'], stderr=subprocess.DEVNULL).decode("utf-8").rstrip()) |
Patrick Williams | 92b42cb | 2022-09-03 06:53:57 -0500 | [diff] [blame] | 217 | if git_dir == top_git_dir: |
| 218 | git_dir = None |
| 219 | except subprocess.CalledProcessError: |
| 220 | pass |
| 221 | |
| 222 | ret = " " |
| 223 | if git_dir is not None: |
| 224 | oe_hash_file = os.path.join(git_dir, 'oe-devtool-tree-sha1-%s' % d.getVar('PN')) |
| 225 | with tempfile.NamedTemporaryFile(prefix='oe-devtool-index') as tmp_index: |
| 226 | # Clone index |
| 227 | shutil.copyfile(os.path.join(git_dir, 'index'), tmp_index.name) |
| 228 | # Update our custom index |
| 229 | env = os.environ.copy() |
| 230 | env['GIT_INDEX_FILE'] = tmp_index.name |
| 231 | subprocess.check_output(['git', 'add', '-A', '.'], cwd=s_dir, env=env) |
| 232 | git_sha1 = subprocess.check_output(['git', 'write-tree'], cwd=s_dir, env=env).decode("utf-8") |
Andrew Geissler | c5535c9 | 2023-01-27 16:10:19 -0600 | [diff] [blame] | 233 | if os.path.exists(os.path.join(s_dir, ".gitmodules")) and os.path.getsize(os.path.join(s_dir, ".gitmodules")) > 0: |
Andrew Geissler | 87f5cff | 2022-09-30 13:13:31 -0500 | [diff] [blame] | 234 | submodule_helper = subprocess.check_output(["git", "config", "--file", ".gitmodules", "--get-regexp", "path"], cwd=s_dir, env=env).decode("utf-8") |
| 235 | for line in submodule_helper.splitlines(): |
| 236 | module_dir = os.path.join(s_dir, line.rsplit(maxsplit=1)[1]) |
| 237 | if os.path.isdir(module_dir): |
| 238 | proc = subprocess.Popen(['git', 'add', '-A', '.'], cwd=module_dir, env=env, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) |
| 239 | proc.communicate() |
| 240 | proc = subprocess.Popen(['git', 'write-tree'], cwd=module_dir, env=env, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL) |
| 241 | stdout, _ = proc.communicate() |
| 242 | git_sha1 += stdout.decode("utf-8") |
Patrick Williams | 92b42cb | 2022-09-03 06:53:57 -0500 | [diff] [blame] | 243 | sha1 = hashlib.sha1(git_sha1.encode("utf-8")).hexdigest() |
| 244 | with open(oe_hash_file, 'w') as fobj: |
| 245 | fobj.write(sha1) |
| 246 | ret = oe_hash_file + ':True' |
| 247 | else: |
| 248 | ret = s_dir + '/*:True' |
| 249 | return ret |
| 250 | |
| 251 | def srctree_configure_hash_files(d): |
| 252 | """ |
| 253 | Get the list of files that should trigger do_configure to re-execute, |
| 254 | based on the value of CONFIGURE_FILES |
| 255 | """ |
Andrew Geissler | 2013739 | 2023-10-12 04:59:14 -0600 | [diff] [blame] | 256 | import fnmatch |
| 257 | |
Patrick Williams | 92b42cb | 2022-09-03 06:53:57 -0500 | [diff] [blame] | 258 | in_files = (d.getVar('CONFIGURE_FILES') or '').split() |
| 259 | out_items = [] |
| 260 | search_files = [] |
| 261 | for entry in in_files: |
| 262 | if entry.startswith('/'): |
| 263 | out_items.append('%s:%s' % (entry, os.path.exists(entry))) |
| 264 | else: |
| 265 | search_files.append(entry) |
| 266 | if search_files: |
| 267 | s_dir = d.getVar('EXTERNALSRC') |
| 268 | for root, _, files in os.walk(s_dir): |
Andrew Geissler | 2013739 | 2023-10-12 04:59:14 -0600 | [diff] [blame] | 269 | for p in search_files: |
| 270 | for f in fnmatch.filter(files, p): |
Patrick Williams | 92b42cb | 2022-09-03 06:53:57 -0500 | [diff] [blame] | 271 | out_items.append('%s:True' % os.path.join(root, f)) |
| 272 | return ' '.join(out_items) |
| 273 | |
| 274 | EXPORT_FUNCTIONS do_buildclean |