Patrick Williams | 92b42cb | 2022-09-03 06:53:57 -0500 | [diff] [blame] | 1 | # Copyright (C) 2012 Linux Foundation |
| 2 | # Author: Richard Purdie |
| 3 | # Some code and influence taken from srctree.bbclass: |
| 4 | # Copyright (C) 2009 Chris Larson <clarson@kergoth.com> |
| 5 | # |
| 6 | # SPDX-License-Identifier: MIT |
| 7 | # |
| 8 | # externalsrc.bbclass enables use of an existing source tree, usually external to |
| 9 | # the build system to build a piece of software rather than the usual fetch/unpack/patch |
| 10 | # process. |
| 11 | # |
| 12 | # To use, add externalsrc to the global inherit and set EXTERNALSRC to point at the |
| 13 | # directory you want to use containing the sources e.g. from local.conf for a recipe |
| 14 | # called "myrecipe" you would do: |
| 15 | # |
| 16 | # INHERIT += "externalsrc" |
| 17 | # EXTERNALSRC:pn-myrecipe = "/path/to/my/source/tree" |
| 18 | # |
| 19 | # In order to make this class work for both target and native versions (or with |
| 20 | # multilibs/cross or other BBCLASSEXTEND variants), B is set to point to a separate |
| 21 | # directory under the work directory (split source and build directories). This is |
| 22 | # the default, but the build directory can be set to the source directory if |
| 23 | # circumstances dictate by setting EXTERNALSRC_BUILD to the same value, e.g.: |
| 24 | # |
| 25 | # EXTERNALSRC_BUILD:pn-myrecipe = "/path/to/my/source/tree" |
| 26 | # |
| 27 | |
| 28 | SRCTREECOVEREDTASKS ?= "do_patch do_unpack do_fetch" |
| 29 | EXTERNALSRC_SYMLINKS ?= "oe-workdir:${WORKDIR} oe-logs:${T}" |
| 30 | |
| 31 | python () { |
| 32 | externalsrc = d.getVar('EXTERNALSRC') |
| 33 | externalsrcbuild = d.getVar('EXTERNALSRC_BUILD') |
| 34 | |
| 35 | if externalsrc and not externalsrc.startswith("/"): |
| 36 | bb.error("EXTERNALSRC must be an absolute path") |
| 37 | if externalsrcbuild and not externalsrcbuild.startswith("/"): |
| 38 | bb.error("EXTERNALSRC_BUILD must be an absolute path") |
| 39 | |
| 40 | # If this is the base recipe and EXTERNALSRC is set for it or any of its |
| 41 | # derivatives, then enable BB_DONT_CACHE to force the recipe to always be |
| 42 | # re-parsed so that the file-checksums function for do_compile is run every |
| 43 | # time. |
| 44 | bpn = d.getVar('BPN') |
| 45 | classextend = (d.getVar('BBCLASSEXTEND') or '').split() |
| 46 | if bpn == d.getVar('PN') or not classextend: |
| 47 | if (externalsrc or |
| 48 | ('native' in classextend and |
| 49 | d.getVar('EXTERNALSRC:pn-%s-native' % bpn)) or |
| 50 | ('nativesdk' in classextend and |
| 51 | d.getVar('EXTERNALSRC:pn-nativesdk-%s' % bpn)) or |
| 52 | ('cross' in classextend and |
| 53 | d.getVar('EXTERNALSRC:pn-%s-cross' % bpn))): |
| 54 | d.setVar('BB_DONT_CACHE', '1') |
| 55 | |
| 56 | if externalsrc: |
| 57 | import oe.recipeutils |
| 58 | import oe.path |
| 59 | |
| 60 | d.setVar('S', externalsrc) |
| 61 | if externalsrcbuild: |
| 62 | d.setVar('B', externalsrcbuild) |
| 63 | else: |
Patrick Williams | 2390b1b | 2022-11-03 13:47:49 -0500 | [diff] [blame] | 64 | d.setVar('B', '${WORKDIR}/${BPN}-${PV}') |
Patrick Williams | 92b42cb | 2022-09-03 06:53:57 -0500 | [diff] [blame] | 65 | |
Andrew Geissler | 5082cc7 | 2023-09-11 08:41:39 -0400 | [diff] [blame] | 66 | bb.fetch.get_hashvalue(d) |
Patrick Williams | 92b42cb | 2022-09-03 06:53:57 -0500 | [diff] [blame] | 67 | local_srcuri = [] |
| 68 | fetch = bb.fetch2.Fetch((d.getVar('SRC_URI') or '').split(), d) |
| 69 | for url in fetch.urls: |
| 70 | url_data = fetch.ud[url] |
| 71 | parm = url_data.parm |
Patrick Williams | 8e7b46e | 2023-05-01 14:19:06 -0500 | [diff] [blame] | 72 | if url_data.type in ['file', 'npmsw', 'crate'] or parm.get('type') in ['kmeta', 'git-dependency']: |
Patrick Williams | 92b42cb | 2022-09-03 06:53:57 -0500 | [diff] [blame] | 73 | local_srcuri.append(url) |
| 74 | |
| 75 | d.setVar('SRC_URI', ' '.join(local_srcuri)) |
| 76 | |
Patrick Williams | 2a25492 | 2023-08-11 09:48:11 -0500 | [diff] [blame] | 77 | # sstate is never going to work for external source trees, disable it |
| 78 | d.setVar('SSTATE_SKIP_CREATION', '1') |
Patrick Williams | 92b42cb | 2022-09-03 06:53:57 -0500 | [diff] [blame] | 79 | |
| 80 | if d.getVar('CONFIGUREOPT_DEPTRACK') == '--disable-dependency-tracking': |
| 81 | d.setVar('CONFIGUREOPT_DEPTRACK', '') |
| 82 | |
| 83 | tasks = filter(lambda k: d.getVarFlag(k, "task"), d.keys()) |
| 84 | |
| 85 | for task in tasks: |
Patrick Williams | 2a25492 | 2023-08-11 09:48:11 -0500 | [diff] [blame] | 86 | if os.path.realpath(d.getVar('S')) == os.path.realpath(d.getVar('B')): |
Patrick Williams | 92b42cb | 2022-09-03 06:53:57 -0500 | [diff] [blame] | 87 | # Since configure will likely touch ${S}, ensure only we lock so one task has access at a time |
| 88 | d.appendVarFlag(task, "lockfiles", " ${S}/singletask.lock") |
| 89 | |
Andrew Geissler | 87f5cff | 2022-09-30 13:13:31 -0500 | [diff] [blame] | 90 | for v in d.keys(): |
| 91 | cleandirs = d.getVarFlag(v, "cleandirs", False) |
| 92 | if cleandirs: |
Patrick Williams | 92b42cb | 2022-09-03 06:53:57 -0500 | [diff] [blame] | 93 | # We do not want our source to be wiped out, ever (kernel.bbclass does this for do_clean) |
Andrew Geissler | 87f5cff | 2022-09-30 13:13:31 -0500 | [diff] [blame] | 94 | cleandirs = oe.recipeutils.split_var_value(cleandirs) |
Patrick Williams | 92b42cb | 2022-09-03 06:53:57 -0500 | [diff] [blame] | 95 | setvalue = False |
| 96 | for cleandir in cleandirs[:]: |
| 97 | if oe.path.is_path_parent(externalsrc, d.expand(cleandir)): |
| 98 | cleandirs.remove(cleandir) |
| 99 | setvalue = True |
| 100 | if setvalue: |
Andrew Geissler | 87f5cff | 2022-09-30 13:13:31 -0500 | [diff] [blame] | 101 | d.setVarFlag(v, 'cleandirs', ' '.join(cleandirs)) |
Patrick Williams | 92b42cb | 2022-09-03 06:53:57 -0500 | [diff] [blame] | 102 | |
| 103 | fetch_tasks = ['do_fetch', 'do_unpack'] |
| 104 | # If we deltask do_patch, there's no dependency to ensure do_unpack gets run, so add one |
| 105 | # Note that we cannot use d.appendVarFlag() here because deps is expected to be a list object, not a string |
| 106 | d.setVarFlag('do_configure', 'deps', (d.getVarFlag('do_configure', 'deps', False) or []) + ['do_unpack']) |
| 107 | |
| 108 | for task in d.getVar("SRCTREECOVEREDTASKS").split(): |
| 109 | if local_srcuri and task in fetch_tasks: |
| 110 | continue |
| 111 | bb.build.deltask(task, d) |
| 112 | if task == 'do_unpack': |
| 113 | # The reproducible build create_source_date_epoch_stamp function must |
| 114 | # be run after the source is available and before the |
| 115 | # do_deploy_source_date_epoch task. In the normal case, it's attached |
| 116 | # to do_unpack as a postfuncs, but since we removed do_unpack (above) |
| 117 | # we need to move the function elsewhere. The easiest thing to do is |
| 118 | # move it into the prefuncs of the do_deploy_source_date_epoch task. |
| 119 | # This is safe, as externalsrc runs with the source already unpacked. |
| 120 | d.prependVarFlag('do_deploy_source_date_epoch', 'prefuncs', 'create_source_date_epoch_stamp ') |
| 121 | |
| 122 | d.prependVarFlag('do_compile', 'prefuncs', "externalsrc_compile_prefunc ") |
| 123 | d.prependVarFlag('do_configure', 'prefuncs', "externalsrc_configure_prefunc ") |
| 124 | |
| 125 | d.setVarFlag('do_compile', 'file-checksums', '${@srctree_hash_files(d)}') |
| 126 | d.setVarFlag('do_configure', 'file-checksums', '${@srctree_configure_hash_files(d)}') |
| 127 | |
Andrew Geissler | 5082cc7 | 2023-09-11 08:41:39 -0400 | [diff] [blame] | 128 | d.appendVarFlag('do_compile', 'prefuncs', ' fetcher_hashes_dummyfunc') |
| 129 | d.appendVarFlag('do_configure', 'prefuncs', ' fetcher_hashes_dummyfunc') |
| 130 | |
Patrick Williams | 92b42cb | 2022-09-03 06:53:57 -0500 | [diff] [blame] | 131 | # We don't want the workdir to go away |
| 132 | d.appendVar('RM_WORK_EXCLUDE', ' ' + d.getVar('PN')) |
| 133 | |
| 134 | bb.build.addtask('do_buildclean', |
| 135 | 'do_clean' if d.getVar('S') == d.getVar('B') else None, |
| 136 | None, d) |
| 137 | |
| 138 | # If B=S the same builddir is used even for different architectures. |
| 139 | # Thus, use a shared CONFIGURESTAMPFILE and STAMP directory so that |
| 140 | # change of do_configure task hash is correctly detected and stamps are |
| 141 | # invalidated if e.g. MACHINE changes. |
| 142 | if d.getVar('S') == d.getVar('B'): |
| 143 | configstamp = '${TMPDIR}/work-shared/${PN}/${EXTENDPE}${PV}-${PR}/configure.sstate' |
| 144 | d.setVar('CONFIGURESTAMPFILE', configstamp) |
| 145 | d.setVar('STAMP', '${STAMPS_DIR}/work-shared/${PN}/${EXTENDPE}${PV}-${PR}') |
| 146 | d.setVar('STAMPCLEAN', '${STAMPS_DIR}/work-shared/${PN}/*-*') |
| 147 | } |
| 148 | |
| 149 | python externalsrc_configure_prefunc() { |
| 150 | s_dir = d.getVar('S') |
| 151 | # Create desired symlinks |
| 152 | symlinks = (d.getVar('EXTERNALSRC_SYMLINKS') or '').split() |
| 153 | newlinks = [] |
| 154 | for symlink in symlinks: |
| 155 | symsplit = symlink.split(':', 1) |
| 156 | lnkfile = os.path.join(s_dir, symsplit[0]) |
| 157 | target = d.expand(symsplit[1]) |
| 158 | if len(symsplit) > 1: |
| 159 | if os.path.islink(lnkfile): |
| 160 | # Link already exists, leave it if it points to the right location already |
| 161 | if os.readlink(lnkfile) == target: |
| 162 | continue |
| 163 | os.unlink(lnkfile) |
| 164 | elif os.path.exists(lnkfile): |
| 165 | # File/dir exists with same name as link, just leave it alone |
| 166 | continue |
| 167 | os.symlink(target, lnkfile) |
| 168 | newlinks.append(symsplit[0]) |
| 169 | # Hide the symlinks from git |
| 170 | try: |
| 171 | git_exclude_file = os.path.join(s_dir, '.git/info/exclude') |
| 172 | if os.path.exists(git_exclude_file): |
| 173 | with open(git_exclude_file, 'r+') as efile: |
| 174 | elines = efile.readlines() |
| 175 | for link in newlinks: |
| 176 | if link in elines or '/'+link in elines: |
| 177 | continue |
| 178 | efile.write('/' + link + '\n') |
| 179 | except IOError as ioe: |
| 180 | bb.note('Failed to hide EXTERNALSRC_SYMLINKS from git') |
| 181 | } |
| 182 | |
| 183 | python externalsrc_compile_prefunc() { |
| 184 | # Make it obvious that this is happening, since forgetting about it could lead to much confusion |
| 185 | bb.plain('NOTE: %s: compiling from external source tree %s' % (d.getVar('PN'), d.getVar('EXTERNALSRC'))) |
| 186 | } |
| 187 | |
| 188 | do_buildclean[dirs] = "${S} ${B}" |
| 189 | do_buildclean[nostamp] = "1" |
| 190 | do_buildclean[doc] = "Call 'make clean' or equivalent in ${B}" |
| 191 | externalsrc_do_buildclean() { |
| 192 | if [ -e Makefile -o -e makefile -o -e GNUmakefile ]; then |
| 193 | rm -f ${@' '.join([x.split(':')[0] for x in (d.getVar('EXTERNALSRC_SYMLINKS') or '').split()])} |
| 194 | if [ "${CLEANBROKEN}" != "1" ]; then |
| 195 | oe_runmake clean || die "make failed" |
| 196 | fi |
| 197 | else |
| 198 | bbnote "nothing to do - no makefile found" |
| 199 | fi |
| 200 | } |
| 201 | |
| 202 | def srctree_hash_files(d, srcdir=None): |
| 203 | import shutil |
| 204 | import subprocess |
| 205 | import tempfile |
| 206 | import hashlib |
| 207 | |
| 208 | s_dir = srcdir or d.getVar('EXTERNALSRC') |
| 209 | git_dir = None |
| 210 | |
| 211 | try: |
| 212 | git_dir = os.path.join(s_dir, |
| 213 | subprocess.check_output(['git', '-C', s_dir, 'rev-parse', '--git-dir'], stderr=subprocess.DEVNULL).decode("utf-8").rstrip()) |
Patrick Williams | 2390b1b | 2022-11-03 13:47:49 -0500 | [diff] [blame] | 214 | top_git_dir = os.path.join(d.getVar("TOPDIR"), |
| 215 | subprocess.check_output(['git', '-C', d.getVar("TOPDIR"), 'rev-parse', '--git-dir'], stderr=subprocess.DEVNULL).decode("utf-8").rstrip()) |
Patrick Williams | 92b42cb | 2022-09-03 06:53:57 -0500 | [diff] [blame] | 216 | if git_dir == top_git_dir: |
| 217 | git_dir = None |
| 218 | except subprocess.CalledProcessError: |
| 219 | pass |
| 220 | |
| 221 | ret = " " |
| 222 | if git_dir is not None: |
| 223 | oe_hash_file = os.path.join(git_dir, 'oe-devtool-tree-sha1-%s' % d.getVar('PN')) |
| 224 | with tempfile.NamedTemporaryFile(prefix='oe-devtool-index') as tmp_index: |
| 225 | # Clone index |
| 226 | shutil.copyfile(os.path.join(git_dir, 'index'), tmp_index.name) |
| 227 | # Update our custom index |
| 228 | env = os.environ.copy() |
| 229 | env['GIT_INDEX_FILE'] = tmp_index.name |
| 230 | subprocess.check_output(['git', 'add', '-A', '.'], cwd=s_dir, env=env) |
| 231 | git_sha1 = subprocess.check_output(['git', 'write-tree'], cwd=s_dir, env=env).decode("utf-8") |
Andrew Geissler | c5535c9 | 2023-01-27 16:10:19 -0600 | [diff] [blame] | 232 | if os.path.exists(os.path.join(s_dir, ".gitmodules")) and os.path.getsize(os.path.join(s_dir, ".gitmodules")) > 0: |
Andrew Geissler | 87f5cff | 2022-09-30 13:13:31 -0500 | [diff] [blame] | 233 | submodule_helper = subprocess.check_output(["git", "config", "--file", ".gitmodules", "--get-regexp", "path"], cwd=s_dir, env=env).decode("utf-8") |
| 234 | for line in submodule_helper.splitlines(): |
| 235 | module_dir = os.path.join(s_dir, line.rsplit(maxsplit=1)[1]) |
| 236 | if os.path.isdir(module_dir): |
| 237 | proc = subprocess.Popen(['git', 'add', '-A', '.'], cwd=module_dir, env=env, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) |
| 238 | proc.communicate() |
| 239 | proc = subprocess.Popen(['git', 'write-tree'], cwd=module_dir, env=env, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL) |
| 240 | stdout, _ = proc.communicate() |
| 241 | git_sha1 += stdout.decode("utf-8") |
Patrick Williams | 92b42cb | 2022-09-03 06:53:57 -0500 | [diff] [blame] | 242 | sha1 = hashlib.sha1(git_sha1.encode("utf-8")).hexdigest() |
| 243 | with open(oe_hash_file, 'w') as fobj: |
| 244 | fobj.write(sha1) |
| 245 | ret = oe_hash_file + ':True' |
| 246 | else: |
| 247 | ret = s_dir + '/*:True' |
| 248 | return ret |
| 249 | |
| 250 | def srctree_configure_hash_files(d): |
| 251 | """ |
| 252 | Get the list of files that should trigger do_configure to re-execute, |
| 253 | based on the value of CONFIGURE_FILES |
| 254 | """ |
Andrew Geissler | 2013739 | 2023-10-12 04:59:14 -0600 | [diff] [blame^] | 255 | import fnmatch |
| 256 | |
Patrick Williams | 92b42cb | 2022-09-03 06:53:57 -0500 | [diff] [blame] | 257 | in_files = (d.getVar('CONFIGURE_FILES') or '').split() |
| 258 | out_items = [] |
| 259 | search_files = [] |
| 260 | for entry in in_files: |
| 261 | if entry.startswith('/'): |
| 262 | out_items.append('%s:%s' % (entry, os.path.exists(entry))) |
| 263 | else: |
| 264 | search_files.append(entry) |
| 265 | if search_files: |
| 266 | s_dir = d.getVar('EXTERNALSRC') |
| 267 | for root, _, files in os.walk(s_dir): |
Andrew Geissler | 2013739 | 2023-10-12 04:59:14 -0600 | [diff] [blame^] | 268 | for p in search_files: |
| 269 | for f in fnmatch.filter(files, p): |
Patrick Williams | 92b42cb | 2022-09-03 06:53:57 -0500 | [diff] [blame] | 270 | out_items.append('%s:True' % os.path.join(root, f)) |
| 271 | return ' '.join(out_items) |
| 272 | |
| 273 | EXPORT_FUNCTIONS do_buildclean |