Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 1 | # Script utility functions |
| 2 | # |
| 3 | # Copyright (C) 2014 Intel Corporation |
| 4 | # |
Brad Bishop | c342db3 | 2019-05-15 21:57:59 -0400 | [diff] [blame] | 5 | # SPDX-License-Identifier: GPL-2.0-only |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 6 | # |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 7 | |
Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 8 | import argparse |
Brad Bishop | 1a4b7ee | 2018-12-16 17:11:34 -0800 | [diff] [blame] | 9 | import glob |
| 10 | import logging |
| 11 | import os |
Brad Bishop | d7bf8c1 | 2018-02-25 22:55:05 -0500 | [diff] [blame] | 12 | import random |
Brad Bishop | 1a4b7ee | 2018-12-16 17:11:34 -0800 | [diff] [blame] | 13 | import shlex |
| 14 | import shutil |
Brad Bishop | d7bf8c1 | 2018-02-25 22:55:05 -0500 | [diff] [blame] | 15 | import string |
Brad Bishop | 1a4b7ee | 2018-12-16 17:11:34 -0800 | [diff] [blame] | 16 | import subprocess |
| 17 | import sys |
| 18 | import tempfile |
Brad Bishop | 79641f2 | 2019-09-10 07:20:22 -0400 | [diff] [blame] | 19 | import threading |
Brad Bishop | 1932369 | 2019-04-05 15:28:33 -0400 | [diff] [blame] | 20 | import importlib |
Andrew Geissler | 595f630 | 2022-01-24 19:11:47 +0000 | [diff] [blame] | 21 | import importlib.machinery |
| 22 | import importlib.util |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 23 | |
Brad Bishop | 79641f2 | 2019-09-10 07:20:22 -0400 | [diff] [blame] | 24 | class KeepAliveStreamHandler(logging.StreamHandler): |
| 25 | def __init__(self, keepalive=True, **kwargs): |
| 26 | super().__init__(**kwargs) |
| 27 | if keepalive is True: |
| 28 | keepalive = 5000 # default timeout |
| 29 | self._timeout = threading.Condition() |
| 30 | self._stop = False |
| 31 | |
| 32 | # background thread waits on condition, if the condition does not |
| 33 | # happen emit a keep alive message |
| 34 | def thread(): |
| 35 | while not self._stop: |
| 36 | with self._timeout: |
| 37 | if not self._timeout.wait(keepalive): |
| 38 | self.emit(logging.LogRecord("keepalive", logging.INFO, |
| 39 | None, None, "Keepalive message", None, None)) |
| 40 | |
| 41 | self._thread = threading.Thread(target = thread, daemon = True) |
| 42 | self._thread.start() |
| 43 | |
| 44 | def close(self): |
| 45 | # mark the thread to stop and notify it |
| 46 | self._stop = True |
| 47 | with self._timeout: |
| 48 | self._timeout.notify() |
| 49 | # wait for it to join |
| 50 | self._thread.join() |
| 51 | super().close() |
| 52 | |
| 53 | def emit(self, record): |
| 54 | super().emit(record) |
| 55 | # trigger timer reset |
| 56 | with self._timeout: |
| 57 | self._timeout.notify() |
| 58 | |
| 59 | def logger_create(name, stream=None, keepalive=None): |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 60 | logger = logging.getLogger(name) |
Brad Bishop | 79641f2 | 2019-09-10 07:20:22 -0400 | [diff] [blame] | 61 | if keepalive is not None: |
| 62 | loggerhandler = KeepAliveStreamHandler(stream=stream, keepalive=keepalive) |
| 63 | else: |
| 64 | loggerhandler = logging.StreamHandler(stream=stream) |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 65 | loggerhandler.setFormatter(logging.Formatter("%(levelname)s: %(message)s")) |
| 66 | logger.addHandler(loggerhandler) |
| 67 | logger.setLevel(logging.INFO) |
| 68 | return logger |
| 69 | |
| 70 | def logger_setup_color(logger, color='auto'): |
| 71 | from bb.msg import BBLogFormatter |
Brad Bishop | 1932369 | 2019-04-05 15:28:33 -0400 | [diff] [blame] | 72 | |
| 73 | for handler in logger.handlers: |
| 74 | if (isinstance(handler, logging.StreamHandler) and |
| 75 | isinstance(handler.formatter, BBLogFormatter)): |
| 76 | if color == 'always' or (color == 'auto' and handler.stream.isatty()): |
| 77 | handler.formatter.enable_color() |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 78 | |
| 79 | |
| 80 | def load_plugins(logger, plugins, pluginpath): |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 81 | |
| 82 | def load_plugin(name): |
| 83 | logger.debug('Loading plugin %s' % name) |
Brad Bishop | 1932369 | 2019-04-05 15:28:33 -0400 | [diff] [blame] | 84 | spec = importlib.machinery.PathFinder.find_spec(name, path=[pluginpath] ) |
| 85 | if spec: |
Andrew Geissler | 595f630 | 2022-01-24 19:11:47 +0000 | [diff] [blame] | 86 | mod = importlib.util.module_from_spec(spec) |
| 87 | spec.loader.exec_module(mod) |
| 88 | return mod |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 89 | |
Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 90 | def plugin_name(filename): |
| 91 | return os.path.splitext(os.path.basename(filename))[0] |
| 92 | |
| 93 | known_plugins = [plugin_name(p.__name__) for p in plugins] |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 94 | logger.debug('Loading plugins from %s...' % pluginpath) |
| 95 | for fn in glob.glob(os.path.join(pluginpath, '*.py')): |
Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 96 | name = plugin_name(fn) |
| 97 | if name != '__init__' and name not in known_plugins: |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 98 | plugin = load_plugin(name) |
| 99 | if hasattr(plugin, 'plugin_init'): |
| 100 | plugin.plugin_init(plugins) |
| 101 | plugins.append(plugin) |
| 102 | |
Brad Bishop | 1932369 | 2019-04-05 15:28:33 -0400 | [diff] [blame] | 103 | |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 104 | def git_convert_standalone_clone(repodir): |
| 105 | """If specified directory is a git repository, ensure it's a standalone clone""" |
| 106 | import bb.process |
| 107 | if os.path.exists(os.path.join(repodir, '.git')): |
| 108 | alternatesfile = os.path.join(repodir, '.git', 'objects', 'info', 'alternates') |
| 109 | if os.path.exists(alternatesfile): |
| 110 | # This will have been cloned with -s, so we need to convert it so none |
| 111 | # of the contents is shared |
| 112 | bb.process.run('git repack -a', cwd=repodir) |
| 113 | os.remove(alternatesfile) |
| 114 | |
Brad Bishop | d7bf8c1 | 2018-02-25 22:55:05 -0500 | [diff] [blame] | 115 | def _get_temp_recipe_dir(d): |
| 116 | # This is a little bit hacky but we need to find a place where we can put |
| 117 | # the recipe so that bitbake can find it. We're going to delete it at the |
| 118 | # end so it doesn't really matter where we put it. |
| 119 | bbfiles = d.getVar('BBFILES').split() |
| 120 | fetchrecipedir = None |
| 121 | for pth in bbfiles: |
| 122 | if pth.endswith('.bb'): |
| 123 | pthdir = os.path.dirname(pth) |
| 124 | if os.access(os.path.dirname(os.path.dirname(pthdir)), os.W_OK): |
| 125 | fetchrecipedir = pthdir.replace('*', 'recipetool') |
| 126 | if pthdir.endswith('workspace/recipes/*'): |
| 127 | # Prefer the workspace |
| 128 | break |
| 129 | return fetchrecipedir |
| 130 | |
| 131 | class FetchUrlFailure(Exception): |
| 132 | def __init__(self, url): |
| 133 | self.url = url |
| 134 | def __str__(self): |
| 135 | return "Failed to fetch URL %s" % self.url |
| 136 | |
| 137 | def fetch_url(tinfoil, srcuri, srcrev, destdir, logger, preserve_tmp=False, mirrors=False): |
| 138 | """ |
| 139 | Fetch the specified URL using normal do_fetch and do_unpack tasks, i.e. |
| 140 | any dependencies that need to be satisfied in order to support the fetch |
| 141 | operation will be taken care of |
| 142 | """ |
| 143 | |
Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 144 | import bb |
Brad Bishop | d7bf8c1 | 2018-02-25 22:55:05 -0500 | [diff] [blame] | 145 | |
| 146 | checksums = {} |
| 147 | fetchrecipepn = None |
| 148 | |
| 149 | # We need to put our temp directory under ${BASE_WORKDIR} otherwise |
| 150 | # we may have problems with the recipe-specific sysroot population |
| 151 | tmpparent = tinfoil.config_data.getVar('BASE_WORKDIR') |
Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 152 | bb.utils.mkdirhier(tmpparent) |
Brad Bishop | d7bf8c1 | 2018-02-25 22:55:05 -0500 | [diff] [blame] | 153 | tmpdir = tempfile.mkdtemp(prefix='recipetool-', dir=tmpparent) |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 154 | try: |
Brad Bishop | d7bf8c1 | 2018-02-25 22:55:05 -0500 | [diff] [blame] | 155 | tmpworkdir = os.path.join(tmpdir, 'work') |
| 156 | logger.debug('fetch_url: temp dir is %s' % tmpdir) |
Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 157 | |
Brad Bishop | d7bf8c1 | 2018-02-25 22:55:05 -0500 | [diff] [blame] | 158 | fetchrecipedir = _get_temp_recipe_dir(tinfoil.config_data) |
| 159 | if not fetchrecipedir: |
| 160 | logger.error('Searched BBFILES but unable to find a writeable place to put temporary recipe') |
| 161 | sys.exit(1) |
| 162 | fetchrecipe = None |
| 163 | bb.utils.mkdirhier(fetchrecipedir) |
Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 164 | try: |
Brad Bishop | d7bf8c1 | 2018-02-25 22:55:05 -0500 | [diff] [blame] | 165 | # Generate a dummy recipe so we can follow more or less normal paths |
| 166 | # for do_fetch and do_unpack |
| 167 | # I'd use tempfile functions here but underscores can be produced by that and those |
| 168 | # aren't allowed in recipe file names except to separate the version |
| 169 | rndstring = ''.join(random.choice(string.ascii_lowercase + string.digits) for _ in range(8)) |
| 170 | fetchrecipe = os.path.join(fetchrecipedir, 'tmp-recipetool-%s.bb' % rndstring) |
| 171 | fetchrecipepn = os.path.splitext(os.path.basename(fetchrecipe))[0] |
| 172 | logger.debug('Generating initial recipe %s for fetching' % fetchrecipe) |
| 173 | with open(fetchrecipe, 'w') as f: |
| 174 | # We don't want to have to specify LIC_FILES_CHKSUM |
| 175 | f.write('LICENSE = "CLOSED"\n') |
| 176 | # We don't need the cross-compiler |
| 177 | f.write('INHIBIT_DEFAULT_DEPS = "1"\n') |
| 178 | # We don't have the checksums yet so we can't require them |
| 179 | f.write('BB_STRICT_CHECKSUM = "ignore"\n') |
| 180 | f.write('SRC_URI = "%s"\n' % srcuri) |
| 181 | f.write('SRCREV = "%s"\n' % srcrev) |
Andrew Geissler | 7e0e3c0 | 2022-02-25 20:34:39 +0000 | [diff] [blame] | 182 | f.write('PV = "0.0+${SRCPV}"\n') |
Brad Bishop | d7bf8c1 | 2018-02-25 22:55:05 -0500 | [diff] [blame] | 183 | f.write('WORKDIR = "%s"\n' % tmpworkdir) |
| 184 | # Set S out of the way so it doesn't get created under the workdir |
| 185 | f.write('S = "%s"\n' % os.path.join(tmpdir, 'emptysrc')) |
| 186 | if not mirrors: |
| 187 | # We do not need PREMIRRORS since we are almost certainly |
| 188 | # fetching new source rather than something that has already |
| 189 | # been fetched. Hence, we disable them by default. |
| 190 | # However, we provide an option for users to enable it. |
| 191 | f.write('PREMIRRORS = ""\n') |
| 192 | f.write('MIRRORS = ""\n') |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 193 | |
Brad Bishop | d7bf8c1 | 2018-02-25 22:55:05 -0500 | [diff] [blame] | 194 | logger.info('Fetching %s...' % srcuri) |
| 195 | |
| 196 | # FIXME this is too noisy at the moment |
| 197 | |
| 198 | # Parse recipes so our new recipe gets picked up |
| 199 | tinfoil.parse_recipes() |
| 200 | |
| 201 | def eventhandler(event): |
| 202 | if isinstance(event, bb.fetch2.MissingChecksumEvent): |
| 203 | checksums.update(event.checksums) |
| 204 | return True |
| 205 | return False |
| 206 | |
| 207 | # Run the fetch + unpack tasks |
| 208 | res = tinfoil.build_targets(fetchrecipepn, |
| 209 | 'do_unpack', |
| 210 | handle_events=True, |
| 211 | extra_events=['bb.fetch2.MissingChecksumEvent'], |
| 212 | event_callback=eventhandler) |
| 213 | if not res: |
| 214 | raise FetchUrlFailure(srcuri) |
| 215 | |
| 216 | # Remove unneeded directories |
| 217 | rd = tinfoil.parse_recipe(fetchrecipepn) |
| 218 | if rd: |
| 219 | pathvars = ['T', 'RECIPE_SYSROOT', 'RECIPE_SYSROOT_NATIVE'] |
| 220 | for pathvar in pathvars: |
| 221 | path = rd.getVar(pathvar) |
Patrick Williams | 93c203f | 2021-10-06 16:15:23 -0500 | [diff] [blame] | 222 | if os.path.exists(path): |
| 223 | shutil.rmtree(path) |
Brad Bishop | d7bf8c1 | 2018-02-25 22:55:05 -0500 | [diff] [blame] | 224 | finally: |
| 225 | if fetchrecipe: |
| 226 | try: |
| 227 | os.remove(fetchrecipe) |
| 228 | except FileNotFoundError: |
| 229 | pass |
| 230 | try: |
| 231 | os.rmdir(fetchrecipedir) |
| 232 | except OSError as e: |
| 233 | import errno |
| 234 | if e.errno != errno.ENOTEMPTY: |
| 235 | raise |
| 236 | |
| 237 | bb.utils.mkdirhier(destdir) |
| 238 | for fn in os.listdir(tmpworkdir): |
| 239 | shutil.move(os.path.join(tmpworkdir, fn), destdir) |
| 240 | |
| 241 | finally: |
| 242 | if not preserve_tmp: |
| 243 | shutil.rmtree(tmpdir) |
| 244 | tmpdir = None |
| 245 | |
| 246 | return checksums, tmpdir |
| 247 | |
| 248 | |
| 249 | def run_editor(fn, logger=None): |
Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 250 | if isinstance(fn, str): |
Brad Bishop | 1a4b7ee | 2018-12-16 17:11:34 -0800 | [diff] [blame] | 251 | files = [fn] |
Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 252 | else: |
Brad Bishop | 1a4b7ee | 2018-12-16 17:11:34 -0800 | [diff] [blame] | 253 | files = fn |
Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 254 | |
| 255 | editor = os.getenv('VISUAL', os.getenv('EDITOR', 'vi')) |
| 256 | try: |
Brad Bishop | 1a4b7ee | 2018-12-16 17:11:34 -0800 | [diff] [blame] | 257 | #print(shlex.split(editor) + files) |
| 258 | return subprocess.check_call(shlex.split(editor) + files) |
Brad Bishop | d7bf8c1 | 2018-02-25 22:55:05 -0500 | [diff] [blame] | 259 | except subprocess.CalledProcessError as exc: |
| 260 | logger.error("Execution of '%s' failed: %s" % (editor, exc)) |
Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 261 | return 1 |
Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 262 | |
| 263 | def is_src_url(param): |
| 264 | """ |
| 265 | Check if a parameter is a URL and return True if so |
| 266 | NOTE: be careful about changing this as it will influence how devtool/recipetool command line handling works |
| 267 | """ |
| 268 | if not param: |
| 269 | return False |
| 270 | elif '://' in param: |
| 271 | return True |
| 272 | elif param.startswith('git@') or ('@' in param and param.endswith('.git')): |
| 273 | return True |
| 274 | return False |
Brad Bishop | 6dbb316 | 2019-11-25 09:41:34 -0500 | [diff] [blame] | 275 | |
| 276 | def filter_src_subdirs(pth): |
| 277 | """ |
| 278 | Filter out subdirectories of initial unpacked source trees that we do not care about. |
| 279 | Used by devtool and recipetool. |
| 280 | """ |
| 281 | dirlist = os.listdir(pth) |
| 282 | filterout = ['git.indirectionsymlink', 'source-date-epoch'] |
| 283 | dirlist = [x for x in dirlist if x not in filterout] |
| 284 | return dirlist |