Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 1 | # Recipe creation tool - create build system handler for python |
| 2 | # |
| 3 | # Copyright (C) 2015 Mentor Graphics Corporation |
| 4 | # |
Brad Bishop | c342db3 | 2019-05-15 21:57:59 -0400 | [diff] [blame] | 5 | # SPDX-License-Identifier: GPL-2.0-only |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 6 | # |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 7 | |
| 8 | import ast |
| 9 | import codecs |
| 10 | import collections |
Andrew Geissler | 595f630 | 2022-01-24 19:11:47 +0000 | [diff] [blame] | 11 | import setuptools.command.build_py |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 12 | import email |
Andrew Geissler | 2013739 | 2023-10-12 04:59:14 -0600 | [diff] [blame] | 13 | import importlib |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 14 | import glob |
| 15 | import itertools |
| 16 | import logging |
| 17 | import os |
| 18 | import re |
| 19 | import sys |
| 20 | import subprocess |
| 21 | from recipetool.create import RecipeHandler |
| 22 | |
| 23 | logger = logging.getLogger('recipetool') |
| 24 | |
| 25 | tinfoil = None |
| 26 | |
| 27 | |
| 28 | def tinfoil_init(instance): |
| 29 | global tinfoil |
| 30 | tinfoil = instance |
| 31 | |
| 32 | |
| 33 | class PythonRecipeHandler(RecipeHandler): |
Brad Bishop | 15ae250 | 2019-06-18 21:44:24 -0400 | [diff] [blame] | 34 | base_pkgdeps = ['python3-core'] |
| 35 | excluded_pkgdeps = ['python3-dbg'] |
| 36 | # os.path is provided by python3-core |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 37 | assume_provided = ['builtins', 'os.path'] |
Brad Bishop | 15ae250 | 2019-06-18 21:44:24 -0400 | [diff] [blame] | 38 | # Assumes that the host python3 builtin_module_names is sane for target too |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 39 | assume_provided = assume_provided + list(sys.builtin_module_names) |
Patrick Williams | ac13d5f | 2023-11-24 18:59:46 -0600 | [diff] [blame] | 40 | excluded_fields = [] |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 41 | |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 42 | |
| 43 | classifier_license_map = { |
| 44 | 'License :: OSI Approved :: Academic Free License (AFL)': 'AFL', |
| 45 | 'License :: OSI Approved :: Apache Software License': 'Apache', |
| 46 | 'License :: OSI Approved :: Apple Public Source License': 'APSL', |
| 47 | 'License :: OSI Approved :: Artistic License': 'Artistic', |
| 48 | 'License :: OSI Approved :: Attribution Assurance License': 'AAL', |
Andrew Geissler | 5199d83 | 2021-09-24 16:47:35 -0500 | [diff] [blame] | 49 | 'License :: OSI Approved :: BSD License': 'BSD-3-Clause', |
Andrew Geissler | 9aee500 | 2022-03-30 16:27:02 +0000 | [diff] [blame] | 50 | 'License :: OSI Approved :: Boost Software License 1.0 (BSL-1.0)': 'BSL-1.0', |
| 51 | 'License :: OSI Approved :: CEA CNRS Inria Logiciel Libre License, version 2.1 (CeCILL-2.1)': 'CECILL-2.1', |
| 52 | 'License :: OSI Approved :: Common Development and Distribution License 1.0 (CDDL-1.0)': 'CDDL-1.0', |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 53 | 'License :: OSI Approved :: Common Public License': 'CPL', |
Andrew Geissler | 9aee500 | 2022-03-30 16:27:02 +0000 | [diff] [blame] | 54 | 'License :: OSI Approved :: Eclipse Public License 1.0 (EPL-1.0)': 'EPL-1.0', |
| 55 | 'License :: OSI Approved :: Eclipse Public License 2.0 (EPL-2.0)': 'EPL-2.0', |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 56 | 'License :: OSI Approved :: Eiffel Forum License': 'EFL', |
| 57 | 'License :: OSI Approved :: European Union Public Licence 1.0 (EUPL 1.0)': 'EUPL-1.0', |
| 58 | 'License :: OSI Approved :: European Union Public Licence 1.1 (EUPL 1.1)': 'EUPL-1.1', |
Andrew Geissler | 9aee500 | 2022-03-30 16:27:02 +0000 | [diff] [blame] | 59 | 'License :: OSI Approved :: European Union Public Licence 1.2 (EUPL 1.2)': 'EUPL-1.2', |
| 60 | 'License :: OSI Approved :: GNU Affero General Public License v3': 'AGPL-3.0-only', |
| 61 | 'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)': 'AGPL-3.0-or-later', |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 62 | 'License :: OSI Approved :: GNU Free Documentation License (FDL)': 'GFDL', |
| 63 | 'License :: OSI Approved :: GNU General Public License (GPL)': 'GPL', |
Andrew Geissler | 9aee500 | 2022-03-30 16:27:02 +0000 | [diff] [blame] | 64 | 'License :: OSI Approved :: GNU General Public License v2 (GPLv2)': 'GPL-2.0-only', |
| 65 | 'License :: OSI Approved :: GNU General Public License v2 or later (GPLv2+)': 'GPL-2.0-or-later', |
| 66 | 'License :: OSI Approved :: GNU General Public License v3 (GPLv3)': 'GPL-3.0-only', |
| 67 | 'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)': 'GPL-3.0-or-later', |
| 68 | 'License :: OSI Approved :: GNU Lesser General Public License v2 (LGPLv2)': 'LGPL-2.0-only', |
| 69 | 'License :: OSI Approved :: GNU Lesser General Public License v2 or later (LGPLv2+)': 'LGPL-2.0-or-later', |
| 70 | 'License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)': 'LGPL-3.0-only', |
| 71 | 'License :: OSI Approved :: GNU Lesser General Public License v3 or later (LGPLv3+)': 'LGPL-3.0-or-later', |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 72 | 'License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)': 'LGPL', |
Andrew Geissler | 9aee500 | 2022-03-30 16:27:02 +0000 | [diff] [blame] | 73 | 'License :: OSI Approved :: Historical Permission Notice and Disclaimer (HPND)': 'HPND', |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 74 | 'License :: OSI Approved :: IBM Public License': 'IPL', |
| 75 | 'License :: OSI Approved :: ISC License (ISCL)': 'ISC', |
| 76 | 'License :: OSI Approved :: Intel Open Source License': 'Intel', |
| 77 | 'License :: OSI Approved :: Jabber Open Source License': 'Jabber', |
| 78 | 'License :: OSI Approved :: MIT License': 'MIT', |
Andrew Geissler | 9aee500 | 2022-03-30 16:27:02 +0000 | [diff] [blame] | 79 | 'License :: OSI Approved :: MIT No Attribution License (MIT-0)': 'MIT-0', |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 80 | 'License :: OSI Approved :: MITRE Collaborative Virtual Workspace License (CVW)': 'CVWL', |
Andrew Geissler | 9aee500 | 2022-03-30 16:27:02 +0000 | [diff] [blame] | 81 | 'License :: OSI Approved :: MirOS License (MirOS)': 'MirOS', |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 82 | 'License :: OSI Approved :: Motosoto License': 'Motosoto', |
| 83 | 'License :: OSI Approved :: Mozilla Public License 1.0 (MPL)': 'MPL-1.0', |
| 84 | 'License :: OSI Approved :: Mozilla Public License 1.1 (MPL 1.1)': 'MPL-1.1', |
| 85 | 'License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)': 'MPL-2.0', |
| 86 | 'License :: OSI Approved :: Nethack General Public License': 'NGPL', |
| 87 | 'License :: OSI Approved :: Nokia Open Source License': 'Nokia', |
| 88 | 'License :: OSI Approved :: Open Group Test Suite License': 'OGTSL', |
Andrew Geissler | 9aee500 | 2022-03-30 16:27:02 +0000 | [diff] [blame] | 89 | 'License :: OSI Approved :: Open Software License 3.0 (OSL-3.0)': 'OSL-3.0', |
| 90 | 'License :: OSI Approved :: PostgreSQL License': 'PostgreSQL', |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 91 | 'License :: OSI Approved :: Python License (CNRI Python License)': 'CNRI-Python', |
Andrew Geissler | 9aee500 | 2022-03-30 16:27:02 +0000 | [diff] [blame] | 92 | 'License :: OSI Approved :: Python Software Foundation License': 'PSF-2.0', |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 93 | 'License :: OSI Approved :: Qt Public License (QPL)': 'QPL', |
| 94 | 'License :: OSI Approved :: Ricoh Source Code Public License': 'RSCPL', |
Andrew Geissler | 9aee500 | 2022-03-30 16:27:02 +0000 | [diff] [blame] | 95 | 'License :: OSI Approved :: SIL Open Font License 1.1 (OFL-1.1)': 'OFL-1.1', |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 96 | 'License :: OSI Approved :: Sleepycat License': 'Sleepycat', |
Andrew Geissler | 9aee500 | 2022-03-30 16:27:02 +0000 | [diff] [blame] | 97 | 'License :: OSI Approved :: Sun Industry Standards Source License (SISSL)': 'SISSL', |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 98 | 'License :: OSI Approved :: Sun Public License': 'SPL', |
Andrew Geissler | 9aee500 | 2022-03-30 16:27:02 +0000 | [diff] [blame] | 99 | 'License :: OSI Approved :: The Unlicense (Unlicense)': 'Unlicense', |
| 100 | 'License :: OSI Approved :: Universal Permissive License (UPL)': 'UPL-1.0', |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 101 | 'License :: OSI Approved :: University of Illinois/NCSA Open Source License': 'NCSA', |
| 102 | 'License :: OSI Approved :: Vovida Software License 1.0': 'VSL-1.0', |
| 103 | 'License :: OSI Approved :: W3C License': 'W3C', |
| 104 | 'License :: OSI Approved :: X.Net License': 'Xnet', |
| 105 | 'License :: OSI Approved :: Zope Public License': 'ZPL', |
| 106 | 'License :: OSI Approved :: zlib/libpng License': 'Zlib', |
Andrew Geissler | 9aee500 | 2022-03-30 16:27:02 +0000 | [diff] [blame] | 107 | 'License :: Other/Proprietary License': 'Proprietary', |
| 108 | 'License :: Public Domain': 'PD', |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 109 | } |
| 110 | |
| 111 | def __init__(self): |
| 112 | pass |
| 113 | |
Patrick Williams | ac13d5f | 2023-11-24 18:59:46 -0600 | [diff] [blame] | 114 | def handle_classifier_license(self, classifiers, existing_licenses=""): |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 115 | |
Patrick Williams | ac13d5f | 2023-11-24 18:59:46 -0600 | [diff] [blame] | 116 | licenses = [] |
| 117 | for classifier in classifiers: |
| 118 | if classifier in self.classifier_license_map: |
| 119 | license = self.classifier_license_map[classifier] |
| 120 | if license == 'Apache' and 'Apache-2.0' in existing_licenses: |
| 121 | license = 'Apache-2.0' |
| 122 | elif license == 'GPL': |
| 123 | if 'GPL-2.0' in existing_licenses or 'GPLv2' in existing_licenses: |
| 124 | license = 'GPL-2.0' |
| 125 | elif 'GPL-3.0' in existing_licenses or 'GPLv3' in existing_licenses: |
| 126 | license = 'GPL-3.0' |
| 127 | elif license == 'LGPL': |
| 128 | if 'LGPL-2.1' in existing_licenses or 'LGPLv2.1' in existing_licenses: |
| 129 | license = 'LGPL-2.1' |
| 130 | elif 'LGPL-2.0' in existing_licenses or 'LGPLv2' in existing_licenses: |
| 131 | license = 'LGPL-2.0' |
| 132 | elif 'LGPL-3.0' in existing_licenses or 'LGPLv3' in existing_licenses: |
| 133 | license = 'LGPL-3.0' |
| 134 | licenses.append(license) |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 135 | |
Patrick Williams | ac13d5f | 2023-11-24 18:59:46 -0600 | [diff] [blame] | 136 | if licenses: |
| 137 | return ' & '.join(licenses) |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 138 | |
Patrick Williams | ac13d5f | 2023-11-24 18:59:46 -0600 | [diff] [blame] | 139 | return None |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 140 | |
Patrick Williams | ac13d5f | 2023-11-24 18:59:46 -0600 | [diff] [blame] | 141 | def map_info_to_bbvar(self, info, extravalues): |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 142 | |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 143 | # Map PKG-INFO & setup.py fields to bitbake variables |
Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 144 | for field, values in info.items(): |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 145 | if field in self.excluded_fields: |
| 146 | continue |
| 147 | |
| 148 | if field not in self.bbvar_map: |
| 149 | continue |
| 150 | |
Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 151 | if isinstance(values, str): |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 152 | value = values |
| 153 | else: |
| 154 | value = ' '.join(str(v) for v in values if v) |
| 155 | |
| 156 | bbvar = self.bbvar_map[field] |
Patrick Williams | ac13d5f | 2023-11-24 18:59:46 -0600 | [diff] [blame] | 157 | if bbvar == "PN": |
| 158 | # by convention python recipes start with "python3-" |
| 159 | if not value.startswith('python'): |
| 160 | value = 'python3-' + value |
| 161 | |
Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 162 | if bbvar not in extravalues and value: |
| 163 | extravalues[bbvar] = value |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 164 | |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 165 | def apply_info_replacements(self, info): |
Patrick Williams | ac13d5f | 2023-11-24 18:59:46 -0600 | [diff] [blame] | 166 | if not self.replacements: |
| 167 | return |
| 168 | |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 169 | for variable, search, replace in self.replacements: |
| 170 | if variable not in info: |
| 171 | continue |
| 172 | |
| 173 | def replace_value(search, replace, value): |
| 174 | if replace is None: |
| 175 | if re.search(search, value): |
| 176 | return None |
| 177 | else: |
| 178 | new_value = re.sub(search, replace, value) |
| 179 | if value != new_value: |
| 180 | return new_value |
| 181 | return value |
| 182 | |
| 183 | value = info[variable] |
Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 184 | if isinstance(value, str): |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 185 | new_value = replace_value(search, replace, value) |
| 186 | if new_value is None: |
| 187 | del info[variable] |
| 188 | elif new_value != value: |
| 189 | info[variable] = new_value |
Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 190 | elif hasattr(value, 'items'): |
| 191 | for dkey, dvalue in list(value.items()): |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 192 | new_list = [] |
| 193 | for pos, a_value in enumerate(dvalue): |
| 194 | new_value = replace_value(search, replace, a_value) |
| 195 | if new_value is not None and new_value != value: |
| 196 | new_list.append(new_value) |
| 197 | |
| 198 | if value != new_list: |
| 199 | value[dkey] = new_list |
| 200 | else: |
| 201 | new_list = [] |
| 202 | for pos, a_value in enumerate(value): |
| 203 | new_value = replace_value(search, replace, a_value) |
| 204 | if new_value is not None and new_value != value: |
| 205 | new_list.append(new_value) |
| 206 | |
| 207 | if value != new_list: |
| 208 | info[variable] = new_list |
| 209 | |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 210 | |
| 211 | def scan_python_dependencies(self, paths): |
| 212 | deps = set() |
| 213 | try: |
| 214 | dep_output = self.run_command(['pythondeps', '-d'] + paths) |
| 215 | except (OSError, subprocess.CalledProcessError): |
| 216 | pass |
| 217 | else: |
Brad Bishop | 37a0e4d | 2017-12-04 01:01:44 -0500 | [diff] [blame] | 218 | for line in dep_output.splitlines(): |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 219 | line = line.rstrip() |
| 220 | dep, filename = line.split('\t', 1) |
| 221 | if filename.endswith('/setup.py'): |
| 222 | continue |
| 223 | deps.add(dep) |
| 224 | |
| 225 | try: |
| 226 | provides_output = self.run_command(['pythondeps', '-p'] + paths) |
| 227 | except (OSError, subprocess.CalledProcessError): |
| 228 | pass |
| 229 | else: |
| 230 | provides_lines = (l.rstrip() for l in provides_output.splitlines()) |
| 231 | provides = set(l for l in provides_lines if l and l != 'setup') |
| 232 | deps -= provides |
| 233 | |
| 234 | return deps |
| 235 | |
| 236 | def parse_pkgdata_for_python_packages(self): |
Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 237 | pkgdata_dir = tinfoil.config_data.getVar('PKGDATA_DIR') |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 238 | |
| 239 | ldata = tinfoil.config_data.createCopy() |
Patrick Williams | 92b42cb | 2022-09-03 06:53:57 -0500 | [diff] [blame] | 240 | bb.parse.handle('classes-recipe/python3-dir.bbclass', ldata, True) |
Brad Bishop | 6e60e8b | 2018-02-01 10:27:11 -0500 | [diff] [blame] | 241 | python_sitedir = ldata.getVar('PYTHON_SITEPACKAGES_DIR') |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 242 | |
| 243 | dynload_dir = os.path.join(os.path.dirname(python_sitedir), 'lib-dynload') |
| 244 | python_dirs = [python_sitedir + os.sep, |
| 245 | os.path.join(os.path.dirname(python_sitedir), 'dist-packages') + os.sep, |
| 246 | os.path.dirname(python_sitedir) + os.sep] |
| 247 | packages = {} |
| 248 | for pkgdatafile in glob.glob('{}/runtime/*'.format(pkgdata_dir)): |
| 249 | files_info = None |
| 250 | with open(pkgdatafile, 'r') as f: |
| 251 | for line in f.readlines(): |
| 252 | field, value = line.split(': ', 1) |
Andrew Geissler | d159c7f | 2021-09-02 21:05:58 -0500 | [diff] [blame] | 253 | if field.startswith('FILES_INFO'): |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 254 | files_info = ast.literal_eval(value) |
| 255 | break |
| 256 | else: |
| 257 | continue |
| 258 | |
Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 259 | for fn in files_info: |
Andrew Geissler | 2013739 | 2023-10-12 04:59:14 -0600 | [diff] [blame] | 260 | for suffix in importlib.machinery.all_suffixes(): |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 261 | if fn.endswith(suffix): |
| 262 | break |
| 263 | else: |
| 264 | continue |
| 265 | |
| 266 | if fn.startswith(dynload_dir + os.sep): |
Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 267 | if '/.debug/' in fn: |
| 268 | continue |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 269 | base = os.path.basename(fn) |
| 270 | provided = base.split('.', 1)[0] |
| 271 | packages[provided] = os.path.basename(pkgdatafile) |
| 272 | continue |
| 273 | |
| 274 | for python_dir in python_dirs: |
| 275 | if fn.startswith(python_dir): |
| 276 | relpath = fn[len(python_dir):] |
| 277 | relstart, _, relremaining = relpath.partition(os.sep) |
| 278 | if relstart.endswith('.egg'): |
| 279 | relpath = relremaining |
| 280 | base, _ = os.path.splitext(relpath) |
| 281 | |
| 282 | if '/.debug/' in base: |
| 283 | continue |
| 284 | if os.path.basename(base) == '__init__': |
| 285 | base = os.path.dirname(base) |
| 286 | base = base.replace(os.sep + os.sep, os.sep) |
| 287 | provided = base.replace(os.sep, '.') |
| 288 | packages[provided] = os.path.basename(pkgdatafile) |
| 289 | return packages |
| 290 | |
| 291 | @classmethod |
| 292 | def run_command(cls, cmd, **popenargs): |
| 293 | if 'stderr' not in popenargs: |
| 294 | popenargs['stderr'] = subprocess.STDOUT |
| 295 | try: |
Brad Bishop | 37a0e4d | 2017-12-04 01:01:44 -0500 | [diff] [blame] | 296 | return subprocess.check_output(cmd, **popenargs).decode('utf-8') |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 297 | except OSError as exc: |
| 298 | logger.error('Unable to run `{}`: {}', ' '.join(cmd), exc) |
| 299 | raise |
| 300 | except subprocess.CalledProcessError as exc: |
| 301 | logger.error('Unable to run `{}`: {}', ' '.join(cmd), exc.output) |
| 302 | raise |
| 303 | |
Patrick Williams | ac13d5f | 2023-11-24 18:59:46 -0600 | [diff] [blame] | 304 | class PythonSetupPyRecipeHandler(PythonRecipeHandler): |
| 305 | bbvar_map = { |
| 306 | 'Name': 'PN', |
| 307 | 'Version': 'PV', |
| 308 | 'Home-page': 'HOMEPAGE', |
| 309 | 'Summary': 'SUMMARY', |
| 310 | 'Description': 'DESCRIPTION', |
| 311 | 'License': 'LICENSE', |
| 312 | 'Requires': 'RDEPENDS:${PN}', |
| 313 | 'Provides': 'RPROVIDES:${PN}', |
| 314 | 'Obsoletes': 'RREPLACES:${PN}', |
| 315 | } |
| 316 | # PN/PV are already set by recipetool core & desc can be extremely long |
| 317 | excluded_fields = [ |
| 318 | 'Description', |
| 319 | ] |
| 320 | setup_parse_map = { |
| 321 | 'Url': 'Home-page', |
| 322 | 'Classifiers': 'Classifier', |
| 323 | 'Description': 'Summary', |
| 324 | } |
| 325 | setuparg_map = { |
| 326 | 'Home-page': 'url', |
| 327 | 'Classifier': 'classifiers', |
| 328 | 'Summary': 'description', |
| 329 | 'Description': 'long-description', |
| 330 | } |
| 331 | # Values which are lists, used by the setup.py argument based metadata |
| 332 | # extraction method, to determine how to process the setup.py output. |
| 333 | setuparg_list_fields = [ |
| 334 | 'Classifier', |
| 335 | 'Requires', |
| 336 | 'Provides', |
| 337 | 'Obsoletes', |
| 338 | 'Platform', |
| 339 | 'Supported-Platform', |
| 340 | ] |
| 341 | setuparg_multi_line_values = ['Description'] |
| 342 | |
| 343 | replacements = [ |
| 344 | ('License', r' +$', ''), |
| 345 | ('License', r'^ +', ''), |
| 346 | ('License', r' ', '-'), |
| 347 | ('License', r'^GNU-', ''), |
| 348 | ('License', r'-[Ll]icen[cs]e(,?-[Vv]ersion)?', ''), |
| 349 | ('License', r'^UNKNOWN$', ''), |
| 350 | |
| 351 | # Remove currently unhandled version numbers from these variables |
| 352 | ('Requires', r' *\([^)]*\)', ''), |
| 353 | ('Provides', r' *\([^)]*\)', ''), |
| 354 | ('Obsoletes', r' *\([^)]*\)', ''), |
| 355 | ('Install-requires', r'^([^><= ]+).*', r'\1'), |
| 356 | ('Extras-require', r'^([^><= ]+).*', r'\1'), |
| 357 | ('Tests-require', r'^([^><= ]+).*', r'\1'), |
| 358 | |
| 359 | # Remove unhandled dependency on particular features (e.g. foo[PDF]) |
| 360 | ('Install-requires', r'\[[^\]]+\]$', ''), |
| 361 | ] |
| 362 | |
| 363 | def __init__(self): |
| 364 | pass |
| 365 | |
| 366 | def parse_setup_py(self, setupscript='./setup.py'): |
| 367 | with codecs.open(setupscript) as f: |
| 368 | info, imported_modules, non_literals, extensions = gather_setup_info(f) |
| 369 | |
| 370 | def _map(key): |
| 371 | key = key.replace('_', '-') |
| 372 | key = key[0].upper() + key[1:] |
| 373 | if key in self.setup_parse_map: |
| 374 | key = self.setup_parse_map[key] |
| 375 | return key |
| 376 | |
| 377 | # Naive mapping of setup() arguments to PKG-INFO field names |
| 378 | for d in [info, non_literals]: |
| 379 | for key, value in list(d.items()): |
| 380 | if key is None: |
| 381 | continue |
| 382 | new_key = _map(key) |
| 383 | if new_key != key: |
| 384 | del d[key] |
| 385 | d[new_key] = value |
| 386 | |
| 387 | return info, 'setuptools' in imported_modules, non_literals, extensions |
| 388 | |
| 389 | def get_setup_args_info(self, setupscript='./setup.py'): |
| 390 | cmd = ['python3', setupscript] |
| 391 | info = {} |
| 392 | keys = set(self.bbvar_map.keys()) |
| 393 | keys |= set(self.setuparg_list_fields) |
| 394 | keys |= set(self.setuparg_multi_line_values) |
| 395 | grouped_keys = itertools.groupby(keys, lambda k: (k in self.setuparg_list_fields, k in self.setuparg_multi_line_values)) |
| 396 | for index, keys in grouped_keys: |
| 397 | if index == (True, False): |
| 398 | # Splitlines output for each arg as a list value |
| 399 | for key in keys: |
| 400 | arg = self.setuparg_map.get(key, key.lower()) |
| 401 | try: |
| 402 | arg_info = self.run_command(cmd + ['--' + arg], cwd=os.path.dirname(setupscript)) |
| 403 | except (OSError, subprocess.CalledProcessError): |
| 404 | pass |
| 405 | else: |
| 406 | info[key] = [l.rstrip() for l in arg_info.splitlines()] |
| 407 | elif index == (False, True): |
| 408 | # Entire output for each arg |
| 409 | for key in keys: |
| 410 | arg = self.setuparg_map.get(key, key.lower()) |
| 411 | try: |
| 412 | arg_info = self.run_command(cmd + ['--' + arg], cwd=os.path.dirname(setupscript)) |
| 413 | except (OSError, subprocess.CalledProcessError): |
| 414 | pass |
| 415 | else: |
| 416 | info[key] = arg_info |
| 417 | else: |
| 418 | info.update(self.get_setup_byline(list(keys), setupscript)) |
| 419 | return info |
| 420 | |
| 421 | def get_setup_byline(self, fields, setupscript='./setup.py'): |
| 422 | info = {} |
| 423 | |
| 424 | cmd = ['python3', setupscript] |
| 425 | cmd.extend('--' + self.setuparg_map.get(f, f.lower()) for f in fields) |
| 426 | try: |
| 427 | info_lines = self.run_command(cmd, cwd=os.path.dirname(setupscript)).splitlines() |
| 428 | except (OSError, subprocess.CalledProcessError): |
| 429 | pass |
| 430 | else: |
| 431 | if len(fields) != len(info_lines): |
| 432 | logger.error('Mismatch between setup.py output lines and number of fields') |
| 433 | sys.exit(1) |
| 434 | |
| 435 | for lineno, line in enumerate(info_lines): |
| 436 | line = line.rstrip() |
| 437 | info[fields[lineno]] = line |
| 438 | return info |
| 439 | |
| 440 | def get_pkginfo(self, pkginfo_fn): |
| 441 | msg = email.message_from_file(open(pkginfo_fn, 'r')) |
| 442 | msginfo = {} |
| 443 | for field in msg.keys(): |
| 444 | values = msg.get_all(field) |
| 445 | if len(values) == 1: |
| 446 | msginfo[field] = values[0] |
| 447 | else: |
| 448 | msginfo[field] = values |
| 449 | return msginfo |
| 450 | |
| 451 | def scan_setup_python_deps(self, srctree, setup_info, setup_non_literals): |
| 452 | if 'Package-dir' in setup_info: |
| 453 | package_dir = setup_info['Package-dir'] |
| 454 | else: |
| 455 | package_dir = {} |
| 456 | |
| 457 | dist = setuptools.Distribution() |
| 458 | |
| 459 | class PackageDir(setuptools.command.build_py.build_py): |
| 460 | def __init__(self, package_dir): |
| 461 | self.package_dir = package_dir |
| 462 | self.dist = dist |
| 463 | super().__init__(self.dist) |
| 464 | |
| 465 | pd = PackageDir(package_dir) |
| 466 | to_scan = [] |
| 467 | if not any(v in setup_non_literals for v in ['Py-modules', 'Scripts', 'Packages']): |
| 468 | if 'Py-modules' in setup_info: |
| 469 | for module in setup_info['Py-modules']: |
| 470 | try: |
| 471 | package, module = module.rsplit('.', 1) |
| 472 | except ValueError: |
| 473 | package, module = '.', module |
| 474 | module_path = os.path.join(pd.get_package_dir(package), module + '.py') |
| 475 | to_scan.append(module_path) |
| 476 | |
| 477 | if 'Packages' in setup_info: |
| 478 | for package in setup_info['Packages']: |
| 479 | to_scan.append(pd.get_package_dir(package)) |
| 480 | |
| 481 | if 'Scripts' in setup_info: |
| 482 | to_scan.extend(setup_info['Scripts']) |
| 483 | else: |
| 484 | logger.info("Scanning the entire source tree, as one or more of the following setup keywords are non-literal: py_modules, scripts, packages.") |
| 485 | |
| 486 | if not to_scan: |
| 487 | to_scan = ['.'] |
| 488 | |
| 489 | logger.info("Scanning paths for packages & dependencies: %s", ', '.join(to_scan)) |
| 490 | |
| 491 | provided_packages = self.parse_pkgdata_for_python_packages() |
| 492 | scanned_deps = self.scan_python_dependencies([os.path.join(srctree, p) for p in to_scan]) |
| 493 | mapped_deps, unmapped_deps = set(self.base_pkgdeps), set() |
| 494 | for dep in scanned_deps: |
| 495 | mapped = provided_packages.get(dep) |
| 496 | if mapped: |
| 497 | logger.debug('Mapped %s to %s' % (dep, mapped)) |
| 498 | mapped_deps.add(mapped) |
| 499 | else: |
| 500 | logger.debug('Could not map %s' % dep) |
| 501 | unmapped_deps.add(dep) |
| 502 | return mapped_deps, unmapped_deps |
| 503 | |
| 504 | def process(self, srctree, classes, lines_before, lines_after, handled, extravalues): |
| 505 | |
| 506 | if 'buildsystem' in handled: |
| 507 | return False |
| 508 | |
| 509 | # Check for non-zero size setup.py files |
| 510 | setupfiles = RecipeHandler.checkfiles(srctree, ['setup.py']) |
| 511 | for fn in setupfiles: |
| 512 | if os.path.getsize(fn): |
| 513 | break |
| 514 | else: |
| 515 | return False |
| 516 | |
| 517 | # setup.py is always parsed to get at certain required information, such as |
| 518 | # distutils vs setuptools |
| 519 | # |
| 520 | # If egg info is available, we use it for both its PKG-INFO metadata |
| 521 | # and for its requires.txt for install_requires. |
| 522 | # If PKG-INFO is available but no egg info is, we use that for metadata in preference to |
| 523 | # the parsed setup.py, but use the install_requires info from the |
| 524 | # parsed setup.py. |
| 525 | |
| 526 | setupscript = os.path.join(srctree, 'setup.py') |
| 527 | try: |
| 528 | setup_info, uses_setuptools, setup_non_literals, extensions = self.parse_setup_py(setupscript) |
| 529 | except Exception: |
| 530 | logger.exception("Failed to parse setup.py") |
| 531 | setup_info, uses_setuptools, setup_non_literals, extensions = {}, True, [], [] |
| 532 | |
| 533 | egginfo = glob.glob(os.path.join(srctree, '*.egg-info')) |
| 534 | if egginfo: |
| 535 | info = self.get_pkginfo(os.path.join(egginfo[0], 'PKG-INFO')) |
| 536 | requires_txt = os.path.join(egginfo[0], 'requires.txt') |
| 537 | if os.path.exists(requires_txt): |
| 538 | with codecs.open(requires_txt) as f: |
| 539 | inst_req = [] |
| 540 | extras_req = collections.defaultdict(list) |
| 541 | current_feature = None |
| 542 | for line in f.readlines(): |
| 543 | line = line.rstrip() |
| 544 | if not line: |
| 545 | continue |
| 546 | |
| 547 | if line.startswith('['): |
| 548 | # PACKAGECONFIG must not contain expressions or whitespace |
| 549 | line = line.replace(" ", "") |
| 550 | line = line.replace(':', "") |
| 551 | line = line.replace('.', "-dot-") |
| 552 | line = line.replace('"', "") |
| 553 | line = line.replace('<', "-smaller-") |
| 554 | line = line.replace('>', "-bigger-") |
| 555 | line = line.replace('_', "-") |
| 556 | line = line.replace('(', "") |
| 557 | line = line.replace(')', "") |
| 558 | line = line.replace('!', "-not-") |
| 559 | line = line.replace('=', "-equals-") |
| 560 | current_feature = line[1:-1] |
| 561 | elif current_feature: |
| 562 | extras_req[current_feature].append(line) |
| 563 | else: |
| 564 | inst_req.append(line) |
| 565 | info['Install-requires'] = inst_req |
| 566 | info['Extras-require'] = extras_req |
| 567 | elif RecipeHandler.checkfiles(srctree, ['PKG-INFO']): |
| 568 | info = self.get_pkginfo(os.path.join(srctree, 'PKG-INFO')) |
| 569 | |
| 570 | if setup_info: |
| 571 | if 'Install-requires' in setup_info: |
| 572 | info['Install-requires'] = setup_info['Install-requires'] |
| 573 | if 'Extras-require' in setup_info: |
| 574 | info['Extras-require'] = setup_info['Extras-require'] |
| 575 | else: |
| 576 | if setup_info: |
| 577 | info = setup_info |
| 578 | else: |
| 579 | info = self.get_setup_args_info(setupscript) |
| 580 | |
| 581 | # Grab the license value before applying replacements |
| 582 | license_str = info.get('License', '').strip() |
| 583 | |
| 584 | self.apply_info_replacements(info) |
| 585 | |
| 586 | if uses_setuptools: |
| 587 | classes.append('setuptools3') |
| 588 | else: |
| 589 | classes.append('distutils3') |
| 590 | |
| 591 | if license_str: |
| 592 | for i, line in enumerate(lines_before): |
| 593 | if line.startswith('##LICENSE_PLACEHOLDER##'): |
| 594 | lines_before.insert(i, '# NOTE: License in setup.py/PKGINFO is: %s' % license_str) |
| 595 | break |
| 596 | |
| 597 | if 'Classifier' in info: |
| 598 | license = self.handle_classifier_license(info['Classifier'], info.get('License', '')) |
| 599 | if license: |
| 600 | info['License'] = license |
| 601 | |
| 602 | self.map_info_to_bbvar(info, extravalues) |
| 603 | |
| 604 | mapped_deps, unmapped_deps = self.scan_setup_python_deps(srctree, setup_info, setup_non_literals) |
| 605 | |
| 606 | extras_req = set() |
| 607 | if 'Extras-require' in info: |
| 608 | extras_req = info['Extras-require'] |
| 609 | if extras_req: |
| 610 | lines_after.append('# The following configs & dependencies are from setuptools extras_require.') |
| 611 | lines_after.append('# These dependencies are optional, hence can be controlled via PACKAGECONFIG.') |
| 612 | lines_after.append('# The upstream names may not correspond exactly to bitbake package names.') |
| 613 | lines_after.append('# The configs are might not correct, since PACKAGECONFIG does not support expressions as may used in requires.txt - they are just replaced by text.') |
| 614 | lines_after.append('#') |
| 615 | lines_after.append('# Uncomment this line to enable all the optional features.') |
| 616 | lines_after.append('#PACKAGECONFIG ?= "{}"'.format(' '.join(k.lower() for k in extras_req))) |
| 617 | for feature, feature_reqs in extras_req.items(): |
| 618 | unmapped_deps.difference_update(feature_reqs) |
| 619 | |
| 620 | feature_req_deps = ('python3-' + r.replace('.', '-').lower() for r in sorted(feature_reqs)) |
| 621 | lines_after.append('PACKAGECONFIG[{}] = ",,,{}"'.format(feature.lower(), ' '.join(feature_req_deps))) |
| 622 | |
| 623 | inst_reqs = set() |
| 624 | if 'Install-requires' in info: |
| 625 | if extras_req: |
| 626 | lines_after.append('') |
| 627 | inst_reqs = info['Install-requires'] |
| 628 | if inst_reqs: |
| 629 | unmapped_deps.difference_update(inst_reqs) |
| 630 | |
| 631 | inst_req_deps = ('python3-' + r.replace('.', '-').lower() for r in sorted(inst_reqs)) |
| 632 | lines_after.append('# WARNING: the following rdepends are from setuptools install_requires. These') |
| 633 | lines_after.append('# upstream names may not correspond exactly to bitbake package names.') |
| 634 | lines_after.append('RDEPENDS:${{PN}} += "{}"'.format(' '.join(inst_req_deps))) |
| 635 | |
| 636 | if mapped_deps: |
| 637 | name = info.get('Name') |
| 638 | if name and name[0] in mapped_deps: |
| 639 | # Attempt to avoid self-reference |
| 640 | mapped_deps.remove(name[0]) |
| 641 | mapped_deps -= set(self.excluded_pkgdeps) |
| 642 | if inst_reqs or extras_req: |
| 643 | lines_after.append('') |
| 644 | lines_after.append('# WARNING: the following rdepends are determined through basic analysis of the') |
| 645 | lines_after.append('# python sources, and might not be 100% accurate.') |
| 646 | lines_after.append('RDEPENDS:${{PN}} += "{}"'.format(' '.join(sorted(mapped_deps)))) |
| 647 | |
| 648 | unmapped_deps -= set(extensions) |
| 649 | unmapped_deps -= set(self.assume_provided) |
| 650 | if unmapped_deps: |
| 651 | if mapped_deps: |
| 652 | lines_after.append('') |
| 653 | lines_after.append('# WARNING: We were unable to map the following python package/module') |
| 654 | lines_after.append('# dependencies to the bitbake packages which include them:') |
| 655 | lines_after.extend('# {}'.format(d) for d in sorted(unmapped_deps)) |
| 656 | |
| 657 | handled.append('buildsystem') |
| 658 | |
| 659 | class PythonPyprojectTomlRecipeHandler(PythonRecipeHandler): |
| 660 | """Base class to support PEP517 and PEP518 |
| 661 | |
| 662 | PEP517 https://peps.python.org/pep-0517/#source-trees |
| 663 | PEP518 https://peps.python.org/pep-0518/#build-system-table |
| 664 | """ |
| 665 | # bitbake currently supports the 4 following backends |
| 666 | build_backend_map = { |
| 667 | "setuptools.build_meta": "python_setuptools_build_meta", |
| 668 | "poetry.core.masonry.api": "python_poetry_core", |
| 669 | "flit_core.buildapi": "python_flit_core", |
| 670 | "hatchling.build": "python_hatchling", |
| 671 | } |
| 672 | |
| 673 | # setuptools.build_meta and flit declare project metadata into the "project" section of pyproject.toml |
| 674 | # according to PEP-621: https://packaging.python.org/en/latest/specifications/declaring-project-metadata/#declaring-project-metadata |
| 675 | # while poetry uses the "tool.poetry" section according to its official documentation: https://python-poetry.org/docs/pyproject/ |
| 676 | # keys from "project" and "tool.poetry" sections are almost the same except for the HOMEPAGE which is "homepage" for tool.poetry |
| 677 | # and "Homepage" for "project" section. So keep both |
| 678 | bbvar_map = { |
| 679 | "name": "PN", |
| 680 | "version": "PV", |
| 681 | "Homepage": "HOMEPAGE", |
| 682 | "homepage": "HOMEPAGE", |
| 683 | "description": "SUMMARY", |
| 684 | "license": "LICENSE", |
| 685 | "dependencies": "RDEPENDS:${PN}", |
| 686 | "requires": "DEPENDS", |
| 687 | } |
| 688 | |
| 689 | replacements = [ |
| 690 | ("license", r" +$", ""), |
| 691 | ("license", r"^ +", ""), |
| 692 | ("license", r" ", "-"), |
| 693 | ("license", r"^GNU-", ""), |
| 694 | ("license", r"-[Ll]icen[cs]e(,?-[Vv]ersion)?", ""), |
| 695 | ("license", r"^UNKNOWN$", ""), |
| 696 | # Remove currently unhandled version numbers from these variables |
| 697 | ("requires", r"\[[^\]]+\]$", ""), |
| 698 | ("requires", r"^([^><= ]+).*", r"\1"), |
| 699 | ("dependencies", r"\[[^\]]+\]$", ""), |
| 700 | ("dependencies", r"^([^><= ]+).*", r"\1"), |
| 701 | ] |
| 702 | |
| 703 | excluded_native_pkgdeps = [ |
| 704 | # already provided by python_setuptools_build_meta.bbclass |
| 705 | "python3-setuptools-native", |
| 706 | "python3-wheel-native", |
| 707 | # already provided by python_poetry_core.bbclass |
| 708 | "python3-poetry-core-native", |
| 709 | # already provided by python_flit_core.bbclass |
| 710 | "python3-flit-core-native", |
| 711 | ] |
| 712 | |
| 713 | # add here a list of known and often used packages and the corresponding bitbake package |
| 714 | known_deps_map = { |
| 715 | "setuptools": "python3-setuptools", |
| 716 | "wheel": "python3-wheel", |
| 717 | "poetry-core": "python3-poetry-core", |
| 718 | "flit_core": "python3-flit-core", |
| 719 | "setuptools-scm": "python3-setuptools-scm", |
| 720 | "hatchling": "python3-hatchling", |
| 721 | "hatch-vcs": "python3-hatch-vcs", |
| 722 | } |
| 723 | |
| 724 | def __init__(self): |
| 725 | pass |
| 726 | |
| 727 | def process(self, srctree, classes, lines_before, lines_after, handled, extravalues): |
| 728 | info = {} |
| 729 | |
| 730 | if 'buildsystem' in handled: |
| 731 | return False |
| 732 | |
| 733 | # Check for non-zero size setup.py files |
| 734 | setupfiles = RecipeHandler.checkfiles(srctree, ["pyproject.toml"]) |
| 735 | for fn in setupfiles: |
| 736 | if os.path.getsize(fn): |
| 737 | break |
| 738 | else: |
| 739 | return False |
| 740 | |
| 741 | setupscript = os.path.join(srctree, "pyproject.toml") |
| 742 | |
| 743 | try: |
| 744 | try: |
| 745 | import tomllib |
| 746 | except ImportError: |
| 747 | try: |
| 748 | import tomli as tomllib |
| 749 | except ImportError: |
| 750 | logger.exception("Neither 'tomllib' nor 'tomli' could be imported. Please use python3.11 or above or install tomli module") |
| 751 | return False |
| 752 | except Exception: |
| 753 | logger.exception("Failed to parse pyproject.toml") |
| 754 | return False |
| 755 | |
| 756 | with open(setupscript, "rb") as f: |
| 757 | config = tomllib.load(f) |
| 758 | build_backend = config["build-system"]["build-backend"] |
| 759 | if build_backend in self.build_backend_map: |
| 760 | classes.append(self.build_backend_map[build_backend]) |
| 761 | else: |
| 762 | logger.error( |
| 763 | "Unsupported build-backend: %s, cannot use pyproject.toml. Will try to use legacy setup.py" |
| 764 | % build_backend |
| 765 | ) |
| 766 | return False |
| 767 | |
| 768 | licfile = "" |
| 769 | |
| 770 | if build_backend == "poetry.core.masonry.api": |
| 771 | if "tool" in config and "poetry" in config["tool"]: |
| 772 | metadata = config["tool"]["poetry"] |
| 773 | else: |
| 774 | if "project" in config: |
| 775 | metadata = config["project"] |
| 776 | |
| 777 | if metadata: |
| 778 | for field, values in metadata.items(): |
| 779 | if field == "license": |
| 780 | # For setuptools.build_meta and flit, licence is a table |
| 781 | # but for poetry licence is a string |
| 782 | # for hatchling, both table (jsonschema) and string (iniconfig) have been used |
| 783 | if build_backend == "poetry.core.masonry.api": |
| 784 | value = values |
| 785 | else: |
| 786 | value = values.get("text", "") |
| 787 | if not value: |
| 788 | licfile = values.get("file", "") |
| 789 | continue |
| 790 | elif field == "dependencies" and build_backend == "poetry.core.masonry.api": |
| 791 | # For poetry backend, "dependencies" section looks like: |
| 792 | # [tool.poetry.dependencies] |
| 793 | # requests = "^2.13.0" |
| 794 | # requests = { version = "^2.13.0", source = "private" } |
| 795 | # See https://python-poetry.org/docs/master/pyproject/#dependencies-and-dependency-groups for more details |
| 796 | # This class doesn't handle versions anyway, so we just get the dependencies name here and construct a list |
| 797 | value = [] |
| 798 | for k in values.keys(): |
| 799 | value.append(k) |
| 800 | elif isinstance(values, dict): |
| 801 | for k, v in values.items(): |
| 802 | info[k] = v |
| 803 | continue |
| 804 | else: |
| 805 | value = values |
| 806 | |
| 807 | info[field] = value |
| 808 | |
| 809 | # Grab the license value before applying replacements |
| 810 | license_str = info.get("license", "").strip() |
| 811 | |
| 812 | if license_str: |
| 813 | for i, line in enumerate(lines_before): |
| 814 | if line.startswith("##LICENSE_PLACEHOLDER##"): |
| 815 | lines_before.insert( |
| 816 | i, "# NOTE: License in pyproject.toml is: %s" % license_str |
| 817 | ) |
| 818 | break |
| 819 | |
| 820 | info["requires"] = config["build-system"]["requires"] |
| 821 | |
| 822 | self.apply_info_replacements(info) |
| 823 | |
| 824 | if "classifiers" in info: |
| 825 | license = self.handle_classifier_license( |
| 826 | info["classifiers"], info.get("license", "") |
| 827 | ) |
| 828 | if license: |
| 829 | if licfile: |
| 830 | lines = [] |
| 831 | md5value = bb.utils.md5_file(os.path.join(srctree, licfile)) |
| 832 | lines.append('LICENSE = "%s"' % license) |
| 833 | lines.append( |
| 834 | 'LIC_FILES_CHKSUM = "file://%s;md5=%s"' |
| 835 | % (licfile, md5value) |
| 836 | ) |
| 837 | lines.append("") |
| 838 | |
| 839 | # Replace the placeholder so we get the values in the right place in the recipe file |
| 840 | try: |
| 841 | pos = lines_before.index("##LICENSE_PLACEHOLDER##") |
| 842 | except ValueError: |
| 843 | pos = -1 |
| 844 | if pos == -1: |
| 845 | lines_before.extend(lines) |
| 846 | else: |
| 847 | lines_before[pos : pos + 1] = lines |
| 848 | |
| 849 | handled.append(("license", [license, licfile, md5value])) |
| 850 | else: |
| 851 | info["license"] = license |
| 852 | |
| 853 | provided_packages = self.parse_pkgdata_for_python_packages() |
| 854 | provided_packages.update(self.known_deps_map) |
| 855 | native_mapped_deps, native_unmapped_deps = set(), set() |
| 856 | mapped_deps, unmapped_deps = set(), set() |
| 857 | |
| 858 | if "requires" in info: |
| 859 | for require in info["requires"]: |
| 860 | mapped = provided_packages.get(require) |
| 861 | |
| 862 | if mapped: |
| 863 | logger.debug("Mapped %s to %s" % (require, mapped)) |
| 864 | native_mapped_deps.add(mapped) |
| 865 | else: |
| 866 | logger.debug("Could not map %s" % require) |
| 867 | native_unmapped_deps.add(require) |
| 868 | |
| 869 | info.pop("requires") |
| 870 | |
| 871 | if native_mapped_deps != set(): |
| 872 | native_mapped_deps = { |
| 873 | item + "-native" for item in native_mapped_deps |
| 874 | } |
| 875 | native_mapped_deps -= set(self.excluded_native_pkgdeps) |
| 876 | if native_mapped_deps != set(): |
| 877 | info["requires"] = " ".join(sorted(native_mapped_deps)) |
| 878 | |
| 879 | if native_unmapped_deps: |
| 880 | lines_after.append("") |
| 881 | lines_after.append( |
| 882 | "# WARNING: We were unable to map the following python package/module" |
| 883 | ) |
| 884 | lines_after.append( |
| 885 | "# dependencies to the bitbake packages which include them:" |
| 886 | ) |
| 887 | lines_after.extend( |
| 888 | "# {}".format(d) for d in sorted(native_unmapped_deps) |
| 889 | ) |
| 890 | |
| 891 | if "dependencies" in info: |
| 892 | for dependency in info["dependencies"]: |
| 893 | mapped = provided_packages.get(dependency) |
| 894 | if mapped: |
| 895 | logger.debug("Mapped %s to %s" % (dependency, mapped)) |
| 896 | mapped_deps.add(mapped) |
| 897 | else: |
| 898 | logger.debug("Could not map %s" % dependency) |
| 899 | unmapped_deps.add(dependency) |
| 900 | |
| 901 | info.pop("dependencies") |
| 902 | |
| 903 | if mapped_deps != set(): |
| 904 | if mapped_deps != set(): |
| 905 | info["dependencies"] = " ".join(sorted(mapped_deps)) |
| 906 | |
| 907 | if unmapped_deps: |
| 908 | lines_after.append("") |
| 909 | lines_after.append( |
| 910 | "# WARNING: We were unable to map the following python package/module" |
| 911 | ) |
| 912 | lines_after.append( |
| 913 | "# runtime dependencies to the bitbake packages which include them:" |
| 914 | ) |
| 915 | lines_after.extend( |
| 916 | "# {}".format(d) for d in sorted(unmapped_deps) |
| 917 | ) |
| 918 | |
| 919 | self.map_info_to_bbvar(info, extravalues) |
| 920 | |
| 921 | handled.append("buildsystem") |
| 922 | except Exception: |
| 923 | logger.exception("Failed to correctly handle pyproject.toml, falling back to another method") |
| 924 | return False |
| 925 | |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 926 | |
| 927 | def gather_setup_info(fileobj): |
| 928 | parsed = ast.parse(fileobj.read(), fileobj.name) |
| 929 | visitor = SetupScriptVisitor() |
| 930 | visitor.visit(parsed) |
| 931 | |
| 932 | non_literals, extensions = {}, [] |
Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 933 | for key, value in list(visitor.keywords.items()): |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 934 | if key == 'ext_modules': |
| 935 | if isinstance(value, list): |
| 936 | for ext in value: |
| 937 | if (isinstance(ext, ast.Call) and |
| 938 | isinstance(ext.func, ast.Name) and |
| 939 | ext.func.id == 'Extension' and |
| 940 | not has_non_literals(ext.args)): |
| 941 | extensions.append(ext.args[0]) |
| 942 | elif has_non_literals(value): |
| 943 | non_literals[key] = value |
| 944 | del visitor.keywords[key] |
| 945 | |
| 946 | return visitor.keywords, visitor.imported_modules, non_literals, extensions |
| 947 | |
| 948 | |
| 949 | class SetupScriptVisitor(ast.NodeVisitor): |
| 950 | def __init__(self): |
| 951 | ast.NodeVisitor.__init__(self) |
| 952 | self.keywords = {} |
| 953 | self.non_literals = [] |
| 954 | self.imported_modules = set() |
| 955 | |
| 956 | def visit_Expr(self, node): |
| 957 | if isinstance(node.value, ast.Call) and \ |
| 958 | isinstance(node.value.func, ast.Name) and \ |
| 959 | node.value.func.id == 'setup': |
| 960 | self.visit_setup(node.value) |
| 961 | |
| 962 | def visit_setup(self, node): |
| 963 | call = LiteralAstTransform().visit(node) |
| 964 | self.keywords = call.keywords |
Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 965 | for k, v in self.keywords.items(): |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 966 | if has_non_literals(v): |
| 967 | self.non_literals.append(k) |
| 968 | |
| 969 | def visit_Import(self, node): |
| 970 | for alias in node.names: |
| 971 | self.imported_modules.add(alias.name) |
| 972 | |
| 973 | def visit_ImportFrom(self, node): |
| 974 | self.imported_modules.add(node.module) |
| 975 | |
| 976 | |
| 977 | class LiteralAstTransform(ast.NodeTransformer): |
| 978 | """Simplify the ast through evaluation of literals.""" |
| 979 | excluded_fields = ['ctx'] |
| 980 | |
| 981 | def visit(self, node): |
| 982 | if not isinstance(node, ast.AST): |
| 983 | return node |
| 984 | else: |
| 985 | return ast.NodeTransformer.visit(self, node) |
| 986 | |
| 987 | def generic_visit(self, node): |
| 988 | try: |
| 989 | return ast.literal_eval(node) |
| 990 | except ValueError: |
| 991 | for field, value in ast.iter_fields(node): |
| 992 | if field in self.excluded_fields: |
| 993 | delattr(node, field) |
| 994 | if value is None: |
| 995 | continue |
| 996 | |
| 997 | if isinstance(value, list): |
| 998 | if field in ('keywords', 'kwargs'): |
| 999 | new_value = dict((kw.arg, self.visit(kw.value)) for kw in value) |
| 1000 | else: |
| 1001 | new_value = [self.visit(i) for i in value] |
| 1002 | else: |
| 1003 | new_value = self.visit(value) |
| 1004 | setattr(node, field, new_value) |
| 1005 | return node |
| 1006 | |
| 1007 | def visit_Name(self, node): |
| 1008 | if hasattr('__builtins__', node.id): |
| 1009 | return getattr(__builtins__, node.id) |
| 1010 | else: |
| 1011 | return self.generic_visit(node) |
| 1012 | |
| 1013 | def visit_Tuple(self, node): |
| 1014 | return tuple(self.visit(v) for v in node.elts) |
| 1015 | |
| 1016 | def visit_List(self, node): |
| 1017 | return [self.visit(v) for v in node.elts] |
| 1018 | |
| 1019 | def visit_Set(self, node): |
| 1020 | return set(self.visit(v) for v in node.elts) |
| 1021 | |
| 1022 | def visit_Dict(self, node): |
| 1023 | keys = (self.visit(k) for k in node.keys) |
| 1024 | values = (self.visit(v) for v in node.values) |
| 1025 | return dict(zip(keys, values)) |
| 1026 | |
| 1027 | |
| 1028 | def has_non_literals(value): |
| 1029 | if isinstance(value, ast.AST): |
| 1030 | return True |
Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 1031 | elif isinstance(value, str): |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 1032 | return False |
Patrick Williams | c0f7c04 | 2017-02-23 20:41:17 -0600 | [diff] [blame] | 1033 | elif hasattr(value, 'values'): |
| 1034 | return any(has_non_literals(v) for v in value.values()) |
Patrick Williams | c124f4f | 2015-09-15 14:41:29 -0500 | [diff] [blame] | 1035 | elif hasattr(value, '__iter__'): |
| 1036 | return any(has_non_literals(v) for v in value) |
| 1037 | |
| 1038 | |
| 1039 | def register_recipe_handlers(handlers): |
Patrick Williams | ac13d5f | 2023-11-24 18:59:46 -0600 | [diff] [blame] | 1040 | # We need to make sure these are ahead of the makefile fallback handler |
| 1041 | # and the pyproject.toml handler ahead of the setup.py handler |
| 1042 | handlers.append((PythonPyprojectTomlRecipeHandler(), 75)) |
| 1043 | handlers.append((PythonSetupPyRecipeHandler(), 70)) |