Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 1 | # Script utility functions |
| 2 | # |
| 3 | # Copyright (C) 2014 Intel Corporation |
| 4 | # |
Brad Bishop | c342db3 | 2019-05-15 21:57:59 -0400 | [diff] [blame] | 5 | # SPDX-License-Identifier: GPL-2.0-only |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 6 | # |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 7 | |
Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 8 | import argparse |
Brad Bishop | 1a4b7ee | 2018-12-16 17:11:34 -0800 | [diff] [blame] | 9 | import glob |
| 10 | import logging |
| 11 | import os |
Brad Bishop | d7bf8c1 | 2018-02-25 22:55:05 -0500 | [diff] [blame] | 12 | import random |
Brad Bishop | 1a4b7ee | 2018-12-16 17:11:34 -0800 | [diff] [blame] | 13 | import shlex |
| 14 | import shutil |
Brad Bishop | d7bf8c1 | 2018-02-25 22:55:05 -0500 | [diff] [blame] | 15 | import string |
Brad Bishop | 1a4b7ee | 2018-12-16 17:11:34 -0800 | [diff] [blame] | 16 | import subprocess |
| 17 | import sys |
| 18 | import tempfile |
Brad Bishop | 79641f2 | 2019-09-10 07:20:22 -0400 | [diff] [blame] | 19 | import threading |
Brad Bishop | 1932369 | 2019-04-05 15:28:33 -0400 | [diff] [blame] | 20 | import importlib |
| 21 | from importlib import machinery |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 22 | |
Brad Bishop | 79641f2 | 2019-09-10 07:20:22 -0400 | [diff] [blame] | 23 | class KeepAliveStreamHandler(logging.StreamHandler): |
| 24 | def __init__(self, keepalive=True, **kwargs): |
| 25 | super().__init__(**kwargs) |
| 26 | if keepalive is True: |
| 27 | keepalive = 5000 # default timeout |
| 28 | self._timeout = threading.Condition() |
| 29 | self._stop = False |
| 30 | |
| 31 | # background thread waits on condition, if the condition does not |
| 32 | # happen emit a keep alive message |
| 33 | def thread(): |
| 34 | while not self._stop: |
| 35 | with self._timeout: |
| 36 | if not self._timeout.wait(keepalive): |
| 37 | self.emit(logging.LogRecord("keepalive", logging.INFO, |
| 38 | None, None, "Keepalive message", None, None)) |
| 39 | |
| 40 | self._thread = threading.Thread(target = thread, daemon = True) |
| 41 | self._thread.start() |
| 42 | |
| 43 | def close(self): |
| 44 | # mark the thread to stop and notify it |
| 45 | self._stop = True |
| 46 | with self._timeout: |
| 47 | self._timeout.notify() |
| 48 | # wait for it to join |
| 49 | self._thread.join() |
| 50 | super().close() |
| 51 | |
| 52 | def emit(self, record): |
| 53 | super().emit(record) |
| 54 | # trigger timer reset |
| 55 | with self._timeout: |
| 56 | self._timeout.notify() |
| 57 | |
| 58 | def logger_create(name, stream=None, keepalive=None): |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 59 | logger = logging.getLogger(name) |
Brad Bishop | 79641f2 | 2019-09-10 07:20:22 -0400 | [diff] [blame] | 60 | if keepalive is not None: |
| 61 | loggerhandler = KeepAliveStreamHandler(stream=stream, keepalive=keepalive) |
| 62 | else: |
| 63 | loggerhandler = logging.StreamHandler(stream=stream) |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 64 | loggerhandler.setFormatter(logging.Formatter("%(levelname)s: %(message)s")) |
| 65 | logger.addHandler(loggerhandler) |
| 66 | logger.setLevel(logging.INFO) |
| 67 | return logger |
| 68 | |
| 69 | def logger_setup_color(logger, color='auto'): |
| 70 | from bb.msg import BBLogFormatter |
Brad Bishop | 1932369 | 2019-04-05 15:28:33 -0400 | [diff] [blame] | 71 | |
| 72 | for handler in logger.handlers: |
| 73 | if (isinstance(handler, logging.StreamHandler) and |
| 74 | isinstance(handler.formatter, BBLogFormatter)): |
| 75 | if color == 'always' or (color == 'auto' and handler.stream.isatty()): |
| 76 | handler.formatter.enable_color() |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 77 | |
| 78 | |
| 79 | def load_plugins(logger, plugins, pluginpath): |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 80 | |
| 81 | def load_plugin(name): |
| 82 | logger.debug('Loading plugin %s' % name) |
Brad Bishop | 1932369 | 2019-04-05 15:28:33 -0400 | [diff] [blame] | 83 | spec = importlib.machinery.PathFinder.find_spec(name, path=[pluginpath] ) |
| 84 | if spec: |
| 85 | return spec.loader.load_module() |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 86 | |
Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 87 | def plugin_name(filename): |
| 88 | return os.path.splitext(os.path.basename(filename))[0] |
| 89 | |
| 90 | known_plugins = [plugin_name(p.__name__) for p in plugins] |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 91 | logger.debug('Loading plugins from %s...' % pluginpath) |
| 92 | for fn in glob.glob(os.path.join(pluginpath, '*.py')): |
Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 93 | name = plugin_name(fn) |
| 94 | if name != '__init__' and name not in known_plugins: |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 95 | plugin = load_plugin(name) |
| 96 | if hasattr(plugin, 'plugin_init'): |
| 97 | plugin.plugin_init(plugins) |
| 98 | plugins.append(plugin) |
| 99 | |
Brad Bishop | 1932369 | 2019-04-05 15:28:33 -0400 | [diff] [blame] | 100 | |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 101 | def git_convert_standalone_clone(repodir): |
| 102 | """If specified directory is a git repository, ensure it's a standalone clone""" |
| 103 | import bb.process |
| 104 | if os.path.exists(os.path.join(repodir, '.git')): |
| 105 | alternatesfile = os.path.join(repodir, '.git', 'objects', 'info', 'alternates') |
| 106 | if os.path.exists(alternatesfile): |
| 107 | # This will have been cloned with -s, so we need to convert it so none |
| 108 | # of the contents is shared |
| 109 | bb.process.run('git repack -a', cwd=repodir) |
| 110 | os.remove(alternatesfile) |
| 111 | |
Brad Bishop | d7bf8c1 | 2018-02-25 22:55:05 -0500 | [diff] [blame] | 112 | def _get_temp_recipe_dir(d): |
| 113 | # This is a little bit hacky but we need to find a place where we can put |
| 114 | # the recipe so that bitbake can find it. We're going to delete it at the |
| 115 | # end so it doesn't really matter where we put it. |
| 116 | bbfiles = d.getVar('BBFILES').split() |
| 117 | fetchrecipedir = None |
| 118 | for pth in bbfiles: |
| 119 | if pth.endswith('.bb'): |
| 120 | pthdir = os.path.dirname(pth) |
| 121 | if os.access(os.path.dirname(os.path.dirname(pthdir)), os.W_OK): |
| 122 | fetchrecipedir = pthdir.replace('*', 'recipetool') |
| 123 | if pthdir.endswith('workspace/recipes/*'): |
| 124 | # Prefer the workspace |
| 125 | break |
| 126 | return fetchrecipedir |
| 127 | |
| 128 | class FetchUrlFailure(Exception): |
| 129 | def __init__(self, url): |
| 130 | self.url = url |
| 131 | def __str__(self): |
| 132 | return "Failed to fetch URL %s" % self.url |
| 133 | |
| 134 | def fetch_url(tinfoil, srcuri, srcrev, destdir, logger, preserve_tmp=False, mirrors=False): |
| 135 | """ |
| 136 | Fetch the specified URL using normal do_fetch and do_unpack tasks, i.e. |
| 137 | any dependencies that need to be satisfied in order to support the fetch |
| 138 | operation will be taken care of |
| 139 | """ |
| 140 | |
Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 141 | import bb |
Brad Bishop | d7bf8c1 | 2018-02-25 22:55:05 -0500 | [diff] [blame] | 142 | |
| 143 | checksums = {} |
| 144 | fetchrecipepn = None |
| 145 | |
| 146 | # We need to put our temp directory under ${BASE_WORKDIR} otherwise |
| 147 | # we may have problems with the recipe-specific sysroot population |
| 148 | tmpparent = tinfoil.config_data.getVar('BASE_WORKDIR') |
Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 149 | bb.utils.mkdirhier(tmpparent) |
Brad Bishop | d7bf8c1 | 2018-02-25 22:55:05 -0500 | [diff] [blame] | 150 | tmpdir = tempfile.mkdtemp(prefix='recipetool-', dir=tmpparent) |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 151 | try: |
Brad Bishop | d7bf8c1 | 2018-02-25 22:55:05 -0500 | [diff] [blame] | 152 | tmpworkdir = os.path.join(tmpdir, 'work') |
| 153 | logger.debug('fetch_url: temp dir is %s' % tmpdir) |
Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 154 | |
Brad Bishop | d7bf8c1 | 2018-02-25 22:55:05 -0500 | [diff] [blame] | 155 | fetchrecipedir = _get_temp_recipe_dir(tinfoil.config_data) |
| 156 | if not fetchrecipedir: |
| 157 | logger.error('Searched BBFILES but unable to find a writeable place to put temporary recipe') |
| 158 | sys.exit(1) |
| 159 | fetchrecipe = None |
| 160 | bb.utils.mkdirhier(fetchrecipedir) |
Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 161 | try: |
Brad Bishop | d7bf8c1 | 2018-02-25 22:55:05 -0500 | [diff] [blame] | 162 | # Generate a dummy recipe so we can follow more or less normal paths |
| 163 | # for do_fetch and do_unpack |
| 164 | # I'd use tempfile functions here but underscores can be produced by that and those |
| 165 | # aren't allowed in recipe file names except to separate the version |
| 166 | rndstring = ''.join(random.choice(string.ascii_lowercase + string.digits) for _ in range(8)) |
| 167 | fetchrecipe = os.path.join(fetchrecipedir, 'tmp-recipetool-%s.bb' % rndstring) |
| 168 | fetchrecipepn = os.path.splitext(os.path.basename(fetchrecipe))[0] |
| 169 | logger.debug('Generating initial recipe %s for fetching' % fetchrecipe) |
| 170 | with open(fetchrecipe, 'w') as f: |
| 171 | # We don't want to have to specify LIC_FILES_CHKSUM |
| 172 | f.write('LICENSE = "CLOSED"\n') |
| 173 | # We don't need the cross-compiler |
| 174 | f.write('INHIBIT_DEFAULT_DEPS = "1"\n') |
| 175 | # We don't have the checksums yet so we can't require them |
| 176 | f.write('BB_STRICT_CHECKSUM = "ignore"\n') |
| 177 | f.write('SRC_URI = "%s"\n' % srcuri) |
| 178 | f.write('SRCREV = "%s"\n' % srcrev) |
| 179 | f.write('WORKDIR = "%s"\n' % tmpworkdir) |
| 180 | # Set S out of the way so it doesn't get created under the workdir |
| 181 | f.write('S = "%s"\n' % os.path.join(tmpdir, 'emptysrc')) |
| 182 | if not mirrors: |
| 183 | # We do not need PREMIRRORS since we are almost certainly |
| 184 | # fetching new source rather than something that has already |
| 185 | # been fetched. Hence, we disable them by default. |
| 186 | # However, we provide an option for users to enable it. |
| 187 | f.write('PREMIRRORS = ""\n') |
| 188 | f.write('MIRRORS = ""\n') |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 189 | |
Brad Bishop | d7bf8c1 | 2018-02-25 22:55:05 -0500 | [diff] [blame] | 190 | logger.info('Fetching %s...' % srcuri) |
| 191 | |
| 192 | # FIXME this is too noisy at the moment |
| 193 | |
| 194 | # Parse recipes so our new recipe gets picked up |
| 195 | tinfoil.parse_recipes() |
| 196 | |
| 197 | def eventhandler(event): |
| 198 | if isinstance(event, bb.fetch2.MissingChecksumEvent): |
| 199 | checksums.update(event.checksums) |
| 200 | return True |
| 201 | return False |
| 202 | |
| 203 | # Run the fetch + unpack tasks |
| 204 | res = tinfoil.build_targets(fetchrecipepn, |
| 205 | 'do_unpack', |
| 206 | handle_events=True, |
| 207 | extra_events=['bb.fetch2.MissingChecksumEvent'], |
| 208 | event_callback=eventhandler) |
| 209 | if not res: |
| 210 | raise FetchUrlFailure(srcuri) |
| 211 | |
| 212 | # Remove unneeded directories |
| 213 | rd = tinfoil.parse_recipe(fetchrecipepn) |
| 214 | if rd: |
| 215 | pathvars = ['T', 'RECIPE_SYSROOT', 'RECIPE_SYSROOT_NATIVE'] |
| 216 | for pathvar in pathvars: |
| 217 | path = rd.getVar(pathvar) |
| 218 | shutil.rmtree(path) |
| 219 | finally: |
| 220 | if fetchrecipe: |
| 221 | try: |
| 222 | os.remove(fetchrecipe) |
| 223 | except FileNotFoundError: |
| 224 | pass |
| 225 | try: |
| 226 | os.rmdir(fetchrecipedir) |
| 227 | except OSError as e: |
| 228 | import errno |
| 229 | if e.errno != errno.ENOTEMPTY: |
| 230 | raise |
| 231 | |
| 232 | bb.utils.mkdirhier(destdir) |
| 233 | for fn in os.listdir(tmpworkdir): |
| 234 | shutil.move(os.path.join(tmpworkdir, fn), destdir) |
| 235 | |
| 236 | finally: |
| 237 | if not preserve_tmp: |
| 238 | shutil.rmtree(tmpdir) |
| 239 | tmpdir = None |
| 240 | |
| 241 | return checksums, tmpdir |
| 242 | |
| 243 | |
| 244 | def run_editor(fn, logger=None): |
Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 245 | if isinstance(fn, str): |
Brad Bishop | 1a4b7ee | 2018-12-16 17:11:34 -0800 | [diff] [blame] | 246 | files = [fn] |
Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 247 | else: |
Brad Bishop | 1a4b7ee | 2018-12-16 17:11:34 -0800 | [diff] [blame] | 248 | files = fn |
Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 249 | |
| 250 | editor = os.getenv('VISUAL', os.getenv('EDITOR', 'vi')) |
| 251 | try: |
Brad Bishop | 1a4b7ee | 2018-12-16 17:11:34 -0800 | [diff] [blame] | 252 | #print(shlex.split(editor) + files) |
| 253 | return subprocess.check_call(shlex.split(editor) + files) |
Brad Bishop | d7bf8c1 | 2018-02-25 22:55:05 -0500 | [diff] [blame] | 254 | except subprocess.CalledProcessError as exc: |
| 255 | logger.error("Execution of '%s' failed: %s" % (editor, exc)) |
Patrick Williams | d8c66bc | 2016-06-20 12:57:21 -0500 | [diff] [blame] | 256 | return 1 |
Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 257 | |
| 258 | def is_src_url(param): |
| 259 | """ |
| 260 | Check if a parameter is a URL and return True if so |
| 261 | NOTE: be careful about changing this as it will influence how devtool/recipetool command line handling works |
| 262 | """ |
| 263 | if not param: |
| 264 | return False |
| 265 | elif '://' in param: |
| 266 | return True |
| 267 | elif param.startswith('git@') or ('@' in param and param.endswith('.git')): |
| 268 | return True |
| 269 | return False |
Brad Bishop | 6dbb316 | 2019-11-25 09:41:34 -0500 | [diff] [blame] | 270 | |
| 271 | def filter_src_subdirs(pth): |
| 272 | """ |
| 273 | Filter out subdirectories of initial unpacked source trees that we do not care about. |
| 274 | Used by devtool and recipetool. |
| 275 | """ |
| 276 | dirlist = os.listdir(pth) |
| 277 | filterout = ['git.indirectionsymlink', 'source-date-epoch'] |
| 278 | dirlist = [x for x in dirlist if x not in filterout] |
| 279 | return dirlist |