diff options
Diffstat (limited to 'scripts/lib/recipetool/create_buildsys_python.py')
| -rw-r--r-- | scripts/lib/recipetool/create_buildsys_python.py | 1124 |
1 files changed, 0 insertions, 1124 deletions
diff --git a/scripts/lib/recipetool/create_buildsys_python.py b/scripts/lib/recipetool/create_buildsys_python.py deleted file mode 100644 index a807dafae5..0000000000 --- a/scripts/lib/recipetool/create_buildsys_python.py +++ /dev/null | |||
| @@ -1,1124 +0,0 @@ | |||
| 1 | # Recipe creation tool - create build system handler for python | ||
| 2 | # | ||
| 3 | # Copyright (C) 2015 Mentor Graphics Corporation | ||
| 4 | # | ||
| 5 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 6 | # | ||
| 7 | |||
| 8 | import ast | ||
| 9 | import codecs | ||
| 10 | import collections | ||
| 11 | import setuptools.command.build_py | ||
| 12 | import email | ||
| 13 | import importlib | ||
| 14 | import glob | ||
| 15 | import itertools | ||
| 16 | import logging | ||
| 17 | import os | ||
| 18 | import re | ||
| 19 | import sys | ||
| 20 | import subprocess | ||
| 21 | import json | ||
| 22 | import urllib.request | ||
| 23 | from recipetool.create import RecipeHandler | ||
| 24 | from urllib.parse import urldefrag | ||
| 25 | from recipetool.create import determine_from_url | ||
| 26 | |||
| 27 | logger = logging.getLogger('recipetool') | ||
| 28 | |||
| 29 | tinfoil = None | ||
| 30 | |||
| 31 | |||
| 32 | def tinfoil_init(instance): | ||
| 33 | global tinfoil | ||
| 34 | tinfoil = instance | ||
| 35 | |||
| 36 | |||
| 37 | class PythonRecipeHandler(RecipeHandler): | ||
| 38 | base_pkgdeps = ['python3-core'] | ||
| 39 | excluded_pkgdeps = ['python3-dbg'] | ||
| 40 | # os.path is provided by python3-core | ||
| 41 | assume_provided = ['builtins', 'os.path'] | ||
| 42 | # Assumes that the host python3 builtin_module_names is sane for target too | ||
| 43 | assume_provided = assume_provided + list(sys.builtin_module_names) | ||
| 44 | excluded_fields = [] | ||
| 45 | |||
| 46 | |||
| 47 | classifier_license_map = { | ||
| 48 | 'License :: OSI Approved :: Academic Free License (AFL)': 'AFL', | ||
| 49 | 'License :: OSI Approved :: Apache Software License': 'Apache', | ||
| 50 | 'License :: OSI Approved :: Apple Public Source License': 'APSL', | ||
| 51 | 'License :: OSI Approved :: Artistic License': 'Artistic', | ||
| 52 | 'License :: OSI Approved :: Attribution Assurance License': 'AAL', | ||
| 53 | 'License :: OSI Approved :: BSD License': 'BSD-3-Clause', | ||
| 54 | 'License :: OSI Approved :: Boost Software License 1.0 (BSL-1.0)': 'BSL-1.0', | ||
| 55 | 'License :: OSI Approved :: CEA CNRS Inria Logiciel Libre License, version 2.1 (CeCILL-2.1)': 'CECILL-2.1', | ||
| 56 | 'License :: OSI Approved :: Common Development and Distribution License 1.0 (CDDL-1.0)': 'CDDL-1.0', | ||
| 57 | 'License :: OSI Approved :: Common Public License': 'CPL', | ||
| 58 | 'License :: OSI Approved :: Eclipse Public License 1.0 (EPL-1.0)': 'EPL-1.0', | ||
| 59 | 'License :: OSI Approved :: Eclipse Public License 2.0 (EPL-2.0)': 'EPL-2.0', | ||
| 60 | 'License :: OSI Approved :: Eiffel Forum License': 'EFL', | ||
| 61 | 'License :: OSI Approved :: European Union Public Licence 1.0 (EUPL 1.0)': 'EUPL-1.0', | ||
| 62 | 'License :: OSI Approved :: European Union Public Licence 1.1 (EUPL 1.1)': 'EUPL-1.1', | ||
| 63 | 'License :: OSI Approved :: European Union Public Licence 1.2 (EUPL 1.2)': 'EUPL-1.2', | ||
| 64 | 'License :: OSI Approved :: GNU Affero General Public License v3': 'AGPL-3.0-only', | ||
| 65 | 'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)': 'AGPL-3.0-or-later', | ||
| 66 | 'License :: OSI Approved :: GNU Free Documentation License (FDL)': 'GFDL', | ||
| 67 | 'License :: OSI Approved :: GNU General Public License (GPL)': 'GPL', | ||
| 68 | 'License :: OSI Approved :: GNU General Public License v2 (GPLv2)': 'GPL-2.0-only', | ||
| 69 | 'License :: OSI Approved :: GNU General Public License v2 or later (GPLv2+)': 'GPL-2.0-or-later', | ||
| 70 | 'License :: OSI Approved :: GNU General Public License v3 (GPLv3)': 'GPL-3.0-only', | ||
| 71 | 'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)': 'GPL-3.0-or-later', | ||
| 72 | 'License :: OSI Approved :: GNU Lesser General Public License v2 (LGPLv2)': 'LGPL-2.0-only', | ||
| 73 | 'License :: OSI Approved :: GNU Lesser General Public License v2 or later (LGPLv2+)': 'LGPL-2.0-or-later', | ||
| 74 | 'License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)': 'LGPL-3.0-only', | ||
| 75 | 'License :: OSI Approved :: GNU Lesser General Public License v3 or later (LGPLv3+)': 'LGPL-3.0-or-later', | ||
| 76 | 'License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)': 'LGPL', | ||
| 77 | 'License :: OSI Approved :: Historical Permission Notice and Disclaimer (HPND)': 'HPND', | ||
| 78 | 'License :: OSI Approved :: IBM Public License': 'IPL', | ||
| 79 | 'License :: OSI Approved :: ISC License (ISCL)': 'ISC', | ||
| 80 | 'License :: OSI Approved :: Intel Open Source License': 'Intel', | ||
| 81 | 'License :: OSI Approved :: Jabber Open Source License': 'Jabber', | ||
| 82 | 'License :: OSI Approved :: MIT License': 'MIT', | ||
| 83 | 'License :: OSI Approved :: MIT No Attribution License (MIT-0)': 'MIT-0', | ||
| 84 | 'License :: OSI Approved :: MITRE Collaborative Virtual Workspace License (CVW)': 'CVWL', | ||
| 85 | 'License :: OSI Approved :: MirOS License (MirOS)': 'MirOS', | ||
| 86 | 'License :: OSI Approved :: Motosoto License': 'Motosoto', | ||
| 87 | 'License :: OSI Approved :: Mozilla Public License 1.0 (MPL)': 'MPL-1.0', | ||
| 88 | 'License :: OSI Approved :: Mozilla Public License 1.1 (MPL 1.1)': 'MPL-1.1', | ||
| 89 | 'License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)': 'MPL-2.0', | ||
| 90 | 'License :: OSI Approved :: Nethack General Public License': 'NGPL', | ||
| 91 | 'License :: OSI Approved :: Nokia Open Source License': 'Nokia', | ||
| 92 | 'License :: OSI Approved :: Open Group Test Suite License': 'OGTSL', | ||
| 93 | 'License :: OSI Approved :: Open Software License 3.0 (OSL-3.0)': 'OSL-3.0', | ||
| 94 | 'License :: OSI Approved :: PostgreSQL License': 'PostgreSQL', | ||
| 95 | 'License :: OSI Approved :: Python License (CNRI Python License)': 'CNRI-Python', | ||
| 96 | 'License :: OSI Approved :: Python Software Foundation License': 'PSF-2.0', | ||
| 97 | 'License :: OSI Approved :: Qt Public License (QPL)': 'QPL', | ||
| 98 | 'License :: OSI Approved :: Ricoh Source Code Public License': 'RSCPL', | ||
| 99 | 'License :: OSI Approved :: SIL Open Font License 1.1 (OFL-1.1)': 'OFL-1.1', | ||
| 100 | 'License :: OSI Approved :: Sleepycat License': 'Sleepycat', | ||
| 101 | 'License :: OSI Approved :: Sun Industry Standards Source License (SISSL)': 'SISSL', | ||
| 102 | 'License :: OSI Approved :: Sun Public License': 'SPL', | ||
| 103 | 'License :: OSI Approved :: The Unlicense (Unlicense)': 'Unlicense', | ||
| 104 | 'License :: OSI Approved :: Universal Permissive License (UPL)': 'UPL-1.0', | ||
| 105 | 'License :: OSI Approved :: University of Illinois/NCSA Open Source License': 'NCSA', | ||
| 106 | 'License :: OSI Approved :: Vovida Software License 1.0': 'VSL-1.0', | ||
| 107 | 'License :: OSI Approved :: W3C License': 'W3C', | ||
| 108 | 'License :: OSI Approved :: X.Net License': 'Xnet', | ||
| 109 | 'License :: OSI Approved :: Zope Public License': 'ZPL', | ||
| 110 | 'License :: OSI Approved :: zlib/libpng License': 'Zlib', | ||
| 111 | 'License :: Other/Proprietary License': 'Proprietary', | ||
| 112 | 'License :: Public Domain': 'PD', | ||
| 113 | } | ||
| 114 | |||
| 115 | def __init__(self): | ||
| 116 | pass | ||
| 117 | |||
| 118 | def process_url(self, args, classes, handled, extravalues): | ||
| 119 | """ | ||
| 120 | Convert any pypi url https://pypi.org/project/<package>/<version> into https://files.pythonhosted.org/packages/source/... | ||
| 121 | which corresponds to the archive location, and add pypi class | ||
| 122 | """ | ||
| 123 | |||
| 124 | if 'url' in handled: | ||
| 125 | return None | ||
| 126 | |||
| 127 | fetch_uri = None | ||
| 128 | source = args.source | ||
| 129 | required_version = args.version if args.version else None | ||
| 130 | match = re.match(r'https?://pypi.org/project/([^/]+)(?:/([^/]+))?/?$', urldefrag(source)[0]) | ||
| 131 | if match: | ||
| 132 | package = match.group(1) | ||
| 133 | version = match.group(2) if match.group(2) else required_version | ||
| 134 | |||
| 135 | json_url = f"https://pypi.org/pypi/%s/json" % package | ||
| 136 | response = urllib.request.urlopen(json_url) | ||
| 137 | if response.status == 200: | ||
| 138 | data = json.loads(response.read()) | ||
| 139 | if not version: | ||
| 140 | # grab latest version | ||
| 141 | version = data["info"]["version"] | ||
| 142 | pypi_package = data["info"]["name"] | ||
| 143 | for release in reversed(data["releases"][version]): | ||
| 144 | if release["packagetype"] == "sdist": | ||
| 145 | fetch_uri = release["url"] | ||
| 146 | break | ||
| 147 | else: | ||
| 148 | logger.warning("Cannot handle pypi url %s: cannot fetch package information using %s", source, json_url) | ||
| 149 | return None | ||
| 150 | else: | ||
| 151 | match = re.match(r'^https?://files.pythonhosted.org/packages.*/(.*)-.*$', source) | ||
| 152 | if match: | ||
| 153 | fetch_uri = source | ||
| 154 | pypi_package = match.group(1) | ||
| 155 | _, version = determine_from_url(fetch_uri) | ||
| 156 | |||
| 157 | if match and not args.no_pypi: | ||
| 158 | if required_version and version != required_version: | ||
| 159 | raise Exception("Version specified using --version/-V (%s) and version specified in the url (%s) do not match" % (required_version, version)) | ||
| 160 | # This is optionnal if BPN looks like "python-<pypi_package>" or "python3-<pypi_package>" (see pypi.bbclass) | ||
| 161 | # but at this point we cannot know because because user can specify the output name of the recipe on the command line | ||
| 162 | extravalues["PYPI_PACKAGE"] = pypi_package | ||
| 163 | # If the tarball extension is not 'tar.gz' (default value in pypi.bblcass) whe should set PYPI_PACKAGE_EXT in the recipe | ||
| 164 | pypi_package_ext = re.match(r'.*%s-%s\.(.*)$' % (pypi_package, version), fetch_uri) | ||
| 165 | if pypi_package_ext: | ||
| 166 | pypi_package_ext = pypi_package_ext.group(1) | ||
| 167 | if pypi_package_ext != "tar.gz": | ||
| 168 | extravalues["PYPI_PACKAGE_EXT"] = pypi_package_ext | ||
| 169 | |||
| 170 | # Pypi class will handle S and SRC_URI variables, so remove them | ||
| 171 | # TODO: allow oe.recipeutils.patch_recipe_lines() to accept regexp so we can simplify the following to: | ||
| 172 | # extravalues['SRC_URI(?:\[.*?\])?'] = None | ||
| 173 | extravalues['S'] = None | ||
| 174 | extravalues['SRC_URI'] = None | ||
| 175 | |||
| 176 | classes.append('pypi') | ||
| 177 | |||
| 178 | handled.append('url') | ||
| 179 | return fetch_uri | ||
| 180 | |||
| 181 | def handle_classifier_license(self, classifiers, existing_licenses=""): | ||
| 182 | |||
| 183 | licenses = [] | ||
| 184 | for classifier in classifiers: | ||
| 185 | if classifier in self.classifier_license_map: | ||
| 186 | license = self.classifier_license_map[classifier] | ||
| 187 | if license == 'Apache' and 'Apache-2.0' in existing_licenses: | ||
| 188 | license = 'Apache-2.0' | ||
| 189 | elif license == 'GPL': | ||
| 190 | if 'GPL-2.0' in existing_licenses or 'GPLv2' in existing_licenses: | ||
| 191 | license = 'GPL-2.0' | ||
| 192 | elif 'GPL-3.0' in existing_licenses or 'GPLv3' in existing_licenses: | ||
| 193 | license = 'GPL-3.0' | ||
| 194 | elif license == 'LGPL': | ||
| 195 | if 'LGPL-2.1' in existing_licenses or 'LGPLv2.1' in existing_licenses: | ||
| 196 | license = 'LGPL-2.1' | ||
| 197 | elif 'LGPL-2.0' in existing_licenses or 'LGPLv2' in existing_licenses: | ||
| 198 | license = 'LGPL-2.0' | ||
| 199 | elif 'LGPL-3.0' in existing_licenses or 'LGPLv3' in existing_licenses: | ||
| 200 | license = 'LGPL-3.0' | ||
| 201 | licenses.append(license) | ||
| 202 | |||
| 203 | if licenses: | ||
| 204 | return ' & '.join(licenses) | ||
| 205 | |||
| 206 | return None | ||
| 207 | |||
| 208 | def map_info_to_bbvar(self, info, extravalues): | ||
| 209 | |||
| 210 | # Map PKG-INFO & setup.py fields to bitbake variables | ||
| 211 | for field, values in info.items(): | ||
| 212 | if field in self.excluded_fields: | ||
| 213 | continue | ||
| 214 | |||
| 215 | if field not in self.bbvar_map: | ||
| 216 | continue | ||
| 217 | |||
| 218 | if isinstance(values, str): | ||
| 219 | value = values | ||
| 220 | else: | ||
| 221 | value = ' '.join(str(v) for v in values if v) | ||
| 222 | |||
| 223 | bbvar = self.bbvar_map[field] | ||
| 224 | if bbvar == "PN": | ||
| 225 | # by convention python recipes start with "python3-" | ||
| 226 | if not value.startswith('python'): | ||
| 227 | value = 'python3-' + value | ||
| 228 | |||
| 229 | if bbvar not in extravalues and value: | ||
| 230 | extravalues[bbvar] = value | ||
| 231 | |||
| 232 | def apply_info_replacements(self, info): | ||
| 233 | if not self.replacements: | ||
| 234 | return | ||
| 235 | |||
| 236 | for variable, search, replace in self.replacements: | ||
| 237 | if variable not in info: | ||
| 238 | continue | ||
| 239 | |||
| 240 | def replace_value(search, replace, value): | ||
| 241 | if replace is None: | ||
| 242 | if re.search(search, value): | ||
| 243 | return None | ||
| 244 | else: | ||
| 245 | new_value = re.sub(search, replace, value) | ||
| 246 | if value != new_value: | ||
| 247 | return new_value | ||
| 248 | return value | ||
| 249 | |||
| 250 | value = info[variable] | ||
| 251 | if isinstance(value, str): | ||
| 252 | new_value = replace_value(search, replace, value) | ||
| 253 | if new_value is None: | ||
| 254 | del info[variable] | ||
| 255 | elif new_value != value: | ||
| 256 | info[variable] = new_value | ||
| 257 | elif hasattr(value, 'items'): | ||
| 258 | for dkey, dvalue in list(value.items()): | ||
| 259 | new_list = [] | ||
| 260 | for pos, a_value in enumerate(dvalue): | ||
| 261 | new_value = replace_value(search, replace, a_value) | ||
| 262 | if new_value is not None and new_value != value: | ||
| 263 | new_list.append(new_value) | ||
| 264 | |||
| 265 | if value != new_list: | ||
| 266 | value[dkey] = new_list | ||
| 267 | else: | ||
| 268 | new_list = [] | ||
| 269 | for pos, a_value in enumerate(value): | ||
| 270 | new_value = replace_value(search, replace, a_value) | ||
| 271 | if new_value is not None and new_value != value: | ||
| 272 | new_list.append(new_value) | ||
| 273 | |||
| 274 | if value != new_list: | ||
| 275 | info[variable] = new_list | ||
| 276 | |||
| 277 | |||
| 278 | def scan_python_dependencies(self, paths): | ||
| 279 | deps = set() | ||
| 280 | try: | ||
| 281 | dep_output = self.run_command(['pythondeps', '-d'] + paths) | ||
| 282 | except (OSError, subprocess.CalledProcessError): | ||
| 283 | pass | ||
| 284 | else: | ||
| 285 | for line in dep_output.splitlines(): | ||
| 286 | line = line.rstrip() | ||
| 287 | dep, filename = line.split('\t', 1) | ||
| 288 | if filename.endswith('/setup.py'): | ||
| 289 | continue | ||
| 290 | deps.add(dep) | ||
| 291 | |||
| 292 | try: | ||
| 293 | provides_output = self.run_command(['pythondeps', '-p'] + paths) | ||
| 294 | except (OSError, subprocess.CalledProcessError): | ||
| 295 | pass | ||
| 296 | else: | ||
| 297 | provides_lines = (l.rstrip() for l in provides_output.splitlines()) | ||
| 298 | provides = set(l for l in provides_lines if l and l != 'setup') | ||
| 299 | deps -= provides | ||
| 300 | |||
| 301 | return deps | ||
| 302 | |||
| 303 | def parse_pkgdata_for_python_packages(self): | ||
| 304 | pkgdata_dir = tinfoil.config_data.getVar('PKGDATA_DIR') | ||
| 305 | |||
| 306 | ldata = tinfoil.config_data.createCopy() | ||
| 307 | bb.parse.handle('classes-recipe/python3-dir.bbclass', ldata, True) | ||
| 308 | python_sitedir = ldata.getVar('PYTHON_SITEPACKAGES_DIR') | ||
| 309 | |||
| 310 | dynload_dir = os.path.join(os.path.dirname(python_sitedir), 'lib-dynload') | ||
| 311 | python_dirs = [python_sitedir + os.sep, | ||
| 312 | os.path.join(os.path.dirname(python_sitedir), 'dist-packages') + os.sep, | ||
| 313 | os.path.dirname(python_sitedir) + os.sep] | ||
| 314 | packages = {} | ||
| 315 | for pkgdatafile in glob.glob('{}/runtime/*'.format(pkgdata_dir)): | ||
| 316 | files_info = None | ||
| 317 | with open(pkgdatafile, 'r') as f: | ||
| 318 | for line in f.readlines(): | ||
| 319 | field, value = line.split(': ', 1) | ||
| 320 | if field.startswith('FILES_INFO'): | ||
| 321 | files_info = ast.literal_eval(value) | ||
| 322 | break | ||
| 323 | else: | ||
| 324 | continue | ||
| 325 | |||
| 326 | for fn in files_info: | ||
| 327 | for suffix in importlib.machinery.all_suffixes(): | ||
| 328 | if fn.endswith(suffix): | ||
| 329 | break | ||
| 330 | else: | ||
| 331 | continue | ||
| 332 | |||
| 333 | if fn.startswith(dynload_dir + os.sep): | ||
| 334 | if '/.debug/' in fn: | ||
| 335 | continue | ||
| 336 | base = os.path.basename(fn) | ||
| 337 | provided = base.split('.', 1)[0] | ||
| 338 | packages[provided] = os.path.basename(pkgdatafile) | ||
| 339 | continue | ||
| 340 | |||
| 341 | for python_dir in python_dirs: | ||
| 342 | if fn.startswith(python_dir): | ||
| 343 | relpath = fn[len(python_dir):] | ||
| 344 | relstart, _, relremaining = relpath.partition(os.sep) | ||
| 345 | if relstart.endswith('.egg'): | ||
| 346 | relpath = relremaining | ||
| 347 | base, _ = os.path.splitext(relpath) | ||
| 348 | |||
| 349 | if '/.debug/' in base: | ||
| 350 | continue | ||
| 351 | if os.path.basename(base) == '__init__': | ||
| 352 | base = os.path.dirname(base) | ||
| 353 | base = base.replace(os.sep + os.sep, os.sep) | ||
| 354 | provided = base.replace(os.sep, '.') | ||
| 355 | packages[provided] = os.path.basename(pkgdatafile) | ||
| 356 | return packages | ||
| 357 | |||
| 358 | @classmethod | ||
| 359 | def run_command(cls, cmd, **popenargs): | ||
| 360 | if 'stderr' not in popenargs: | ||
| 361 | popenargs['stderr'] = subprocess.STDOUT | ||
| 362 | try: | ||
| 363 | return subprocess.check_output(cmd, **popenargs).decode('utf-8') | ||
| 364 | except OSError as exc: | ||
| 365 | logger.error('Unable to run `{}`: {}', ' '.join(cmd), exc) | ||
| 366 | raise | ||
| 367 | except subprocess.CalledProcessError as exc: | ||
| 368 | logger.error('Unable to run `{}`: {}', ' '.join(cmd), exc.output) | ||
| 369 | raise | ||
| 370 | |||
| 371 | class PythonSetupPyRecipeHandler(PythonRecipeHandler): | ||
| 372 | bbvar_map = { | ||
| 373 | 'Name': 'PN', | ||
| 374 | 'Version': 'PV', | ||
| 375 | 'Home-page': 'HOMEPAGE', | ||
| 376 | 'Summary': 'SUMMARY', | ||
| 377 | 'Description': 'DESCRIPTION', | ||
| 378 | 'License': 'LICENSE', | ||
| 379 | 'Requires': 'RDEPENDS:${PN}', | ||
| 380 | 'Provides': 'RPROVIDES:${PN}', | ||
| 381 | 'Obsoletes': 'RREPLACES:${PN}', | ||
| 382 | } | ||
| 383 | # PN/PV are already set by recipetool core & desc can be extremely long | ||
| 384 | excluded_fields = [ | ||
| 385 | 'Description', | ||
| 386 | ] | ||
| 387 | setup_parse_map = { | ||
| 388 | 'Url': 'Home-page', | ||
| 389 | 'Classifiers': 'Classifier', | ||
| 390 | 'Description': 'Summary', | ||
| 391 | } | ||
| 392 | setuparg_map = { | ||
| 393 | 'Home-page': 'url', | ||
| 394 | 'Classifier': 'classifiers', | ||
| 395 | 'Summary': 'description', | ||
| 396 | 'Description': 'long-description', | ||
| 397 | } | ||
| 398 | # Values which are lists, used by the setup.py argument based metadata | ||
| 399 | # extraction method, to determine how to process the setup.py output. | ||
| 400 | setuparg_list_fields = [ | ||
| 401 | 'Classifier', | ||
| 402 | 'Requires', | ||
| 403 | 'Provides', | ||
| 404 | 'Obsoletes', | ||
| 405 | 'Platform', | ||
| 406 | 'Supported-Platform', | ||
| 407 | ] | ||
| 408 | setuparg_multi_line_values = ['Description'] | ||
| 409 | |||
| 410 | replacements = [ | ||
| 411 | ('License', r' +$', ''), | ||
| 412 | ('License', r'^ +', ''), | ||
| 413 | ('License', r' ', '-'), | ||
| 414 | ('License', r'^GNU-', ''), | ||
| 415 | ('License', r'-[Ll]icen[cs]e(,?-[Vv]ersion)?', ''), | ||
| 416 | ('License', r'^UNKNOWN$', ''), | ||
| 417 | |||
| 418 | # Remove currently unhandled version numbers from these variables | ||
| 419 | ('Requires', r' *\([^)]*\)', ''), | ||
| 420 | ('Provides', r' *\([^)]*\)', ''), | ||
| 421 | ('Obsoletes', r' *\([^)]*\)', ''), | ||
| 422 | ('Install-requires', r'^([^><= ]+).*', r'\1'), | ||
| 423 | ('Extras-require', r'^([^><= ]+).*', r'\1'), | ||
| 424 | ('Tests-require', r'^([^><= ]+).*', r'\1'), | ||
| 425 | |||
| 426 | # Remove unhandled dependency on particular features (e.g. foo[PDF]) | ||
| 427 | ('Install-requires', r'\[[^\]]+\]$', ''), | ||
| 428 | ] | ||
| 429 | |||
| 430 | def __init__(self): | ||
| 431 | pass | ||
| 432 | |||
| 433 | def parse_setup_py(self, setupscript='./setup.py'): | ||
| 434 | with codecs.open(setupscript) as f: | ||
| 435 | info, imported_modules, non_literals, extensions = gather_setup_info(f) | ||
| 436 | |||
| 437 | def _map(key): | ||
| 438 | key = key.replace('_', '-') | ||
| 439 | key = key[0].upper() + key[1:] | ||
| 440 | if key in self.setup_parse_map: | ||
| 441 | key = self.setup_parse_map[key] | ||
| 442 | return key | ||
| 443 | |||
| 444 | # Naive mapping of setup() arguments to PKG-INFO field names | ||
| 445 | for d in [info, non_literals]: | ||
| 446 | for key, value in list(d.items()): | ||
| 447 | if key is None: | ||
| 448 | continue | ||
| 449 | new_key = _map(key) | ||
| 450 | if new_key != key: | ||
| 451 | del d[key] | ||
| 452 | d[new_key] = value | ||
| 453 | |||
| 454 | return info, 'setuptools' in imported_modules, non_literals, extensions | ||
| 455 | |||
| 456 | def get_setup_args_info(self, setupscript='./setup.py'): | ||
| 457 | cmd = ['python3', setupscript] | ||
| 458 | info = {} | ||
| 459 | keys = set(self.bbvar_map.keys()) | ||
| 460 | keys |= set(self.setuparg_list_fields) | ||
| 461 | keys |= set(self.setuparg_multi_line_values) | ||
| 462 | grouped_keys = itertools.groupby(keys, lambda k: (k in self.setuparg_list_fields, k in self.setuparg_multi_line_values)) | ||
| 463 | for index, keys in grouped_keys: | ||
| 464 | if index == (True, False): | ||
| 465 | # Splitlines output for each arg as a list value | ||
| 466 | for key in keys: | ||
| 467 | arg = self.setuparg_map.get(key, key.lower()) | ||
| 468 | try: | ||
| 469 | arg_info = self.run_command(cmd + ['--' + arg], cwd=os.path.dirname(setupscript)) | ||
| 470 | except (OSError, subprocess.CalledProcessError): | ||
| 471 | pass | ||
| 472 | else: | ||
| 473 | info[key] = [l.rstrip() for l in arg_info.splitlines()] | ||
| 474 | elif index == (False, True): | ||
| 475 | # Entire output for each arg | ||
| 476 | for key in keys: | ||
| 477 | arg = self.setuparg_map.get(key, key.lower()) | ||
| 478 | try: | ||
| 479 | arg_info = self.run_command(cmd + ['--' + arg], cwd=os.path.dirname(setupscript)) | ||
| 480 | except (OSError, subprocess.CalledProcessError): | ||
| 481 | pass | ||
| 482 | else: | ||
| 483 | info[key] = arg_info | ||
| 484 | else: | ||
| 485 | info.update(self.get_setup_byline(list(keys), setupscript)) | ||
| 486 | return info | ||
| 487 | |||
| 488 | def get_setup_byline(self, fields, setupscript='./setup.py'): | ||
| 489 | info = {} | ||
| 490 | |||
| 491 | cmd = ['python3', setupscript] | ||
| 492 | cmd.extend('--' + self.setuparg_map.get(f, f.lower()) for f in fields) | ||
| 493 | try: | ||
| 494 | info_lines = self.run_command(cmd, cwd=os.path.dirname(setupscript)).splitlines() | ||
| 495 | except (OSError, subprocess.CalledProcessError): | ||
| 496 | pass | ||
| 497 | else: | ||
| 498 | if len(fields) != len(info_lines): | ||
| 499 | logger.error('Mismatch between setup.py output lines and number of fields') | ||
| 500 | sys.exit(1) | ||
| 501 | |||
| 502 | for lineno, line in enumerate(info_lines): | ||
| 503 | line = line.rstrip() | ||
| 504 | info[fields[lineno]] = line | ||
| 505 | return info | ||
| 506 | |||
| 507 | def get_pkginfo(self, pkginfo_fn): | ||
| 508 | msg = email.message_from_file(open(pkginfo_fn, 'r')) | ||
| 509 | msginfo = {} | ||
| 510 | for field in msg.keys(): | ||
| 511 | values = msg.get_all(field) | ||
| 512 | if len(values) == 1: | ||
| 513 | msginfo[field] = values[0] | ||
| 514 | else: | ||
| 515 | msginfo[field] = values | ||
| 516 | return msginfo | ||
| 517 | |||
| 518 | def scan_setup_python_deps(self, srctree, setup_info, setup_non_literals): | ||
| 519 | if 'Package-dir' in setup_info: | ||
| 520 | package_dir = setup_info['Package-dir'] | ||
| 521 | else: | ||
| 522 | package_dir = {} | ||
| 523 | |||
| 524 | dist = setuptools.Distribution() | ||
| 525 | |||
| 526 | class PackageDir(setuptools.command.build_py.build_py): | ||
| 527 | def __init__(self, package_dir): | ||
| 528 | self.package_dir = package_dir | ||
| 529 | self.dist = dist | ||
| 530 | super().__init__(self.dist) | ||
| 531 | |||
| 532 | pd = PackageDir(package_dir) | ||
| 533 | to_scan = [] | ||
| 534 | if not any(v in setup_non_literals for v in ['Py-modules', 'Scripts', 'Packages']): | ||
| 535 | if 'Py-modules' in setup_info: | ||
| 536 | for module in setup_info['Py-modules']: | ||
| 537 | try: | ||
| 538 | package, module = module.rsplit('.', 1) | ||
| 539 | except ValueError: | ||
| 540 | package, module = '.', module | ||
| 541 | module_path = os.path.join(pd.get_package_dir(package), module + '.py') | ||
| 542 | to_scan.append(module_path) | ||
| 543 | |||
| 544 | if 'Packages' in setup_info: | ||
| 545 | for package in setup_info['Packages']: | ||
| 546 | to_scan.append(pd.get_package_dir(package)) | ||
| 547 | |||
| 548 | if 'Scripts' in setup_info: | ||
| 549 | to_scan.extend(setup_info['Scripts']) | ||
| 550 | else: | ||
| 551 | logger.info("Scanning the entire source tree, as one or more of the following setup keywords are non-literal: py_modules, scripts, packages.") | ||
| 552 | |||
| 553 | if not to_scan: | ||
| 554 | to_scan = ['.'] | ||
| 555 | |||
| 556 | logger.info("Scanning paths for packages & dependencies: %s", ', '.join(to_scan)) | ||
| 557 | |||
| 558 | provided_packages = self.parse_pkgdata_for_python_packages() | ||
| 559 | scanned_deps = self.scan_python_dependencies([os.path.join(srctree, p) for p in to_scan]) | ||
| 560 | mapped_deps, unmapped_deps = set(self.base_pkgdeps), set() | ||
| 561 | for dep in scanned_deps: | ||
| 562 | mapped = provided_packages.get(dep) | ||
| 563 | if mapped: | ||
| 564 | logger.debug('Mapped %s to %s' % (dep, mapped)) | ||
| 565 | mapped_deps.add(mapped) | ||
| 566 | else: | ||
| 567 | logger.debug('Could not map %s' % dep) | ||
| 568 | unmapped_deps.add(dep) | ||
| 569 | return mapped_deps, unmapped_deps | ||
| 570 | |||
| 571 | def process(self, srctree, classes, lines_before, lines_after, handled, extravalues): | ||
| 572 | |||
| 573 | if 'buildsystem' in handled: | ||
| 574 | return False | ||
| 575 | |||
| 576 | logger.debug("Trying setup.py parser") | ||
| 577 | |||
| 578 | # Check for non-zero size setup.py files | ||
| 579 | setupfiles = RecipeHandler.checkfiles(srctree, ['setup.py']) | ||
| 580 | for fn in setupfiles: | ||
| 581 | if os.path.getsize(fn): | ||
| 582 | break | ||
| 583 | else: | ||
| 584 | logger.debug("No setup.py found") | ||
| 585 | return False | ||
| 586 | |||
| 587 | # setup.py is always parsed to get at certain required information, such as | ||
| 588 | # distutils vs setuptools | ||
| 589 | # | ||
| 590 | # If egg info is available, we use it for both its PKG-INFO metadata | ||
| 591 | # and for its requires.txt for install_requires. | ||
| 592 | # If PKG-INFO is available but no egg info is, we use that for metadata in preference to | ||
| 593 | # the parsed setup.py, but use the install_requires info from the | ||
| 594 | # parsed setup.py. | ||
| 595 | |||
| 596 | setupscript = os.path.join(srctree, 'setup.py') | ||
| 597 | try: | ||
| 598 | setup_info, uses_setuptools, setup_non_literals, extensions = self.parse_setup_py(setupscript) | ||
| 599 | except Exception: | ||
| 600 | logger.exception("Failed to parse setup.py") | ||
| 601 | setup_info, uses_setuptools, setup_non_literals, extensions = {}, True, [], [] | ||
| 602 | |||
| 603 | egginfo = glob.glob(os.path.join(srctree, '*.egg-info')) | ||
| 604 | if egginfo: | ||
| 605 | info = self.get_pkginfo(os.path.join(egginfo[0], 'PKG-INFO')) | ||
| 606 | requires_txt = os.path.join(egginfo[0], 'requires.txt') | ||
| 607 | if os.path.exists(requires_txt): | ||
| 608 | with codecs.open(requires_txt) as f: | ||
| 609 | inst_req = [] | ||
| 610 | extras_req = collections.defaultdict(list) | ||
| 611 | current_feature = None | ||
| 612 | for line in f.readlines(): | ||
| 613 | line = line.rstrip() | ||
| 614 | if not line: | ||
| 615 | continue | ||
| 616 | |||
| 617 | if line.startswith('['): | ||
| 618 | # PACKAGECONFIG must not contain expressions or whitespace | ||
| 619 | line = line.replace(" ", "") | ||
| 620 | line = line.replace(':', "") | ||
| 621 | line = line.replace('.', "-dot-") | ||
| 622 | line = line.replace('"', "") | ||
| 623 | line = line.replace('<', "-smaller-") | ||
| 624 | line = line.replace('>', "-bigger-") | ||
| 625 | line = line.replace('_', "-") | ||
| 626 | line = line.replace('(', "") | ||
| 627 | line = line.replace(')', "") | ||
| 628 | line = line.replace('!', "-not-") | ||
| 629 | line = line.replace('=', "-equals-") | ||
| 630 | current_feature = line[1:-1] | ||
| 631 | elif current_feature: | ||
| 632 | extras_req[current_feature].append(line) | ||
| 633 | else: | ||
| 634 | inst_req.append(line) | ||
| 635 | info['Install-requires'] = inst_req | ||
| 636 | info['Extras-require'] = extras_req | ||
| 637 | elif RecipeHandler.checkfiles(srctree, ['PKG-INFO']): | ||
| 638 | info = self.get_pkginfo(os.path.join(srctree, 'PKG-INFO')) | ||
| 639 | |||
| 640 | if setup_info: | ||
| 641 | if 'Install-requires' in setup_info: | ||
| 642 | info['Install-requires'] = setup_info['Install-requires'] | ||
| 643 | if 'Extras-require' in setup_info: | ||
| 644 | info['Extras-require'] = setup_info['Extras-require'] | ||
| 645 | else: | ||
| 646 | if setup_info: | ||
| 647 | info = setup_info | ||
| 648 | else: | ||
| 649 | info = self.get_setup_args_info(setupscript) | ||
| 650 | |||
| 651 | # Grab the license value before applying replacements | ||
| 652 | license_str = info.get('License', '').strip() | ||
| 653 | |||
| 654 | self.apply_info_replacements(info) | ||
| 655 | |||
| 656 | if uses_setuptools: | ||
| 657 | classes.append('setuptools3') | ||
| 658 | else: | ||
| 659 | classes.append('distutils3') | ||
| 660 | |||
| 661 | if license_str: | ||
| 662 | for i, line in enumerate(lines_before): | ||
| 663 | if line.startswith('##LICENSE_PLACEHOLDER##'): | ||
| 664 | lines_before.insert(i, '# NOTE: License in setup.py/PKGINFO is: %s' % license_str) | ||
| 665 | break | ||
| 666 | |||
| 667 | if 'Classifier' in info: | ||
| 668 | license = self.handle_classifier_license(info['Classifier'], info.get('License', '')) | ||
| 669 | if license: | ||
| 670 | info['License'] = license | ||
| 671 | |||
| 672 | self.map_info_to_bbvar(info, extravalues) | ||
| 673 | |||
| 674 | mapped_deps, unmapped_deps = self.scan_setup_python_deps(srctree, setup_info, setup_non_literals) | ||
| 675 | |||
| 676 | extras_req = set() | ||
| 677 | if 'Extras-require' in info: | ||
| 678 | extras_req = info['Extras-require'] | ||
| 679 | if extras_req: | ||
| 680 | lines_after.append('# The following configs & dependencies are from setuptools extras_require.') | ||
| 681 | lines_after.append('# These dependencies are optional, hence can be controlled via PACKAGECONFIG.') | ||
| 682 | lines_after.append('# The upstream names may not correspond exactly to bitbake package names.') | ||
| 683 | lines_after.append('# The configs are might not correct, since PACKAGECONFIG does not support expressions as may used in requires.txt - they are just replaced by text.') | ||
| 684 | lines_after.append('#') | ||
| 685 | lines_after.append('# Uncomment this line to enable all the optional features.') | ||
| 686 | lines_after.append('#PACKAGECONFIG ?= "{}"'.format(' '.join(k.lower() for k in extras_req))) | ||
| 687 | for feature, feature_reqs in extras_req.items(): | ||
| 688 | unmapped_deps.difference_update(feature_reqs) | ||
| 689 | |||
| 690 | feature_req_deps = ('python3-' + r.replace('.', '-').lower() for r in sorted(feature_reqs)) | ||
| 691 | lines_after.append('PACKAGECONFIG[{}] = ",,,{}"'.format(feature.lower(), ' '.join(feature_req_deps))) | ||
| 692 | |||
| 693 | inst_reqs = set() | ||
| 694 | if 'Install-requires' in info: | ||
| 695 | if extras_req: | ||
| 696 | lines_after.append('') | ||
| 697 | inst_reqs = info['Install-requires'] | ||
| 698 | if inst_reqs: | ||
| 699 | unmapped_deps.difference_update(inst_reqs) | ||
| 700 | |||
| 701 | inst_req_deps = ('python3-' + r.replace('.', '-').lower() for r in sorted(inst_reqs)) | ||
| 702 | lines_after.append('# WARNING: the following rdepends are from setuptools install_requires. These') | ||
| 703 | lines_after.append('# upstream names may not correspond exactly to bitbake package names.') | ||
| 704 | lines_after.append('RDEPENDS:${{PN}} += "{}"'.format(' '.join(inst_req_deps))) | ||
| 705 | |||
| 706 | if mapped_deps: | ||
| 707 | name = info.get('Name') | ||
| 708 | if name and name[0] in mapped_deps: | ||
| 709 | # Attempt to avoid self-reference | ||
| 710 | mapped_deps.remove(name[0]) | ||
| 711 | mapped_deps -= set(self.excluded_pkgdeps) | ||
| 712 | if inst_reqs or extras_req: | ||
| 713 | lines_after.append('') | ||
| 714 | lines_after.append('# WARNING: the following rdepends are determined through basic analysis of the') | ||
| 715 | lines_after.append('# python sources, and might not be 100% accurate.') | ||
| 716 | lines_after.append('RDEPENDS:${{PN}} += "{}"'.format(' '.join(sorted(mapped_deps)))) | ||
| 717 | |||
| 718 | unmapped_deps -= set(extensions) | ||
| 719 | unmapped_deps -= set(self.assume_provided) | ||
| 720 | if unmapped_deps: | ||
| 721 | if mapped_deps: | ||
| 722 | lines_after.append('') | ||
| 723 | lines_after.append('# WARNING: We were unable to map the following python package/module') | ||
| 724 | lines_after.append('# dependencies to the bitbake packages which include them:') | ||
| 725 | lines_after.extend('# {}'.format(d) for d in sorted(unmapped_deps)) | ||
| 726 | |||
| 727 | handled.append('buildsystem') | ||
| 728 | |||
| 729 | class PythonPyprojectTomlRecipeHandler(PythonRecipeHandler): | ||
| 730 | """Base class to support PEP517 and PEP518 | ||
| 731 | |||
| 732 | PEP517 https://peps.python.org/pep-0517/#source-trees | ||
| 733 | PEP518 https://peps.python.org/pep-0518/#build-system-table | ||
| 734 | """ | ||
| 735 | # bitbake currently supports the 4 following backends | ||
| 736 | build_backend_map = { | ||
| 737 | "setuptools.build_meta": "python_setuptools_build_meta", | ||
| 738 | "poetry.core.masonry.api": "python_poetry_core", | ||
| 739 | "flit_core.buildapi": "python_flit_core", | ||
| 740 | "hatchling.build": "python_hatchling", | ||
| 741 | "maturin": "python_maturin", | ||
| 742 | "mesonpy": "python_mesonpy", | ||
| 743 | } | ||
| 744 | |||
| 745 | # setuptools.build_meta and flit declare project metadata into the "project" section of pyproject.toml | ||
| 746 | # according to PEP-621: https://packaging.python.org/en/latest/specifications/declaring-project-metadata/#declaring-project-metadata | ||
| 747 | # while poetry uses the "tool.poetry" section according to its official documentation: https://python-poetry.org/docs/pyproject/ | ||
| 748 | # keys from "project" and "tool.poetry" sections are almost the same except for the HOMEPAGE which is "homepage" for tool.poetry | ||
| 749 | # and "Homepage" for "project" section. So keep both | ||
| 750 | bbvar_map = { | ||
| 751 | "name": "PN", | ||
| 752 | "version": "PV", | ||
| 753 | "Homepage": "HOMEPAGE", | ||
| 754 | "homepage": "HOMEPAGE", | ||
| 755 | "description": "SUMMARY", | ||
| 756 | "license": "LICENSE", | ||
| 757 | "dependencies": "RDEPENDS:${PN}", | ||
| 758 | "requires": "DEPENDS", | ||
| 759 | } | ||
| 760 | |||
| 761 | replacements = [ | ||
| 762 | ("license", r" +$", ""), | ||
| 763 | ("license", r"^ +", ""), | ||
| 764 | ("license", r" ", "-"), | ||
| 765 | ("license", r"^GNU-", ""), | ||
| 766 | ("license", r"-[Ll]icen[cs]e(,?-[Vv]ersion)?", ""), | ||
| 767 | ("license", r"^UNKNOWN$", ""), | ||
| 768 | # Remove currently unhandled version numbers from these variables | ||
| 769 | ("requires", r"\[[^\]]+\]$", ""), | ||
| 770 | ("requires", r"^([^><= ]+).*", r"\1"), | ||
| 771 | ("dependencies", r"\[[^\]]+\]$", ""), | ||
| 772 | ("dependencies", r"^([^><= ]+).*", r"\1"), | ||
| 773 | ] | ||
| 774 | |||
| 775 | excluded_native_pkgdeps = [ | ||
| 776 | # already provided by python_setuptools_build_meta.bbclass | ||
| 777 | "python3-setuptools-native", | ||
| 778 | "python3-wheel-native", | ||
| 779 | # already provided by python_poetry_core.bbclass | ||
| 780 | "python3-poetry-core-native", | ||
| 781 | # already provided by python_flit_core.bbclass | ||
| 782 | "python3-flit-core-native", | ||
| 783 | # already provided by python_mesonpy | ||
| 784 | "python3-meson-python-native", | ||
| 785 | ] | ||
| 786 | |||
| 787 | # add here a list of known and often used packages and the corresponding bitbake package | ||
| 788 | known_deps_map = { | ||
| 789 | "setuptools": "python3-setuptools", | ||
| 790 | "wheel": "python3-wheel", | ||
| 791 | "poetry-core": "python3-poetry-core", | ||
| 792 | "flit_core": "python3-flit-core", | ||
| 793 | "setuptools-scm": "python3-setuptools-scm", | ||
| 794 | "hatchling": "python3-hatchling", | ||
| 795 | "hatch-vcs": "python3-hatch-vcs", | ||
| 796 | "meson-python" : "python3-meson-python", | ||
| 797 | } | ||
| 798 | |||
| 799 | def __init__(self): | ||
| 800 | pass | ||
| 801 | |||
| 802 | def process(self, srctree, classes, lines_before, lines_after, handled, extravalues): | ||
| 803 | info = {} | ||
| 804 | metadata = {} | ||
| 805 | |||
| 806 | if 'buildsystem' in handled: | ||
| 807 | return False | ||
| 808 | |||
| 809 | logger.debug("Trying pyproject.toml parser") | ||
| 810 | |||
| 811 | # Check for non-zero size setup.py files | ||
| 812 | setupfiles = RecipeHandler.checkfiles(srctree, ["pyproject.toml"]) | ||
| 813 | for fn in setupfiles: | ||
| 814 | if os.path.getsize(fn): | ||
| 815 | break | ||
| 816 | else: | ||
| 817 | logger.debug("No pyproject.toml found") | ||
| 818 | return False | ||
| 819 | |||
| 820 | setupscript = os.path.join(srctree, "pyproject.toml") | ||
| 821 | |||
| 822 | try: | ||
| 823 | try: | ||
| 824 | import tomllib | ||
| 825 | except ImportError: | ||
| 826 | try: | ||
| 827 | import tomli as tomllib | ||
| 828 | except ImportError: | ||
| 829 | logger.error("Neither 'tomllib' nor 'tomli' could be imported, cannot scan pyproject.toml.") | ||
| 830 | return False | ||
| 831 | |||
| 832 | try: | ||
| 833 | with open(setupscript, "rb") as f: | ||
| 834 | config = tomllib.load(f) | ||
| 835 | except Exception: | ||
| 836 | logger.exception("Failed to parse pyproject.toml") | ||
| 837 | return False | ||
| 838 | |||
| 839 | build_backend = config["build-system"]["build-backend"] | ||
| 840 | if build_backend in self.build_backend_map: | ||
| 841 | classes.append(self.build_backend_map[build_backend]) | ||
| 842 | else: | ||
| 843 | logger.error( | ||
| 844 | "Unsupported build-backend: %s, cannot use pyproject.toml. Will try to use legacy setup.py" | ||
| 845 | % build_backend | ||
| 846 | ) | ||
| 847 | return False | ||
| 848 | |||
| 849 | licfile = "" | ||
| 850 | |||
| 851 | if build_backend == "poetry.core.masonry.api": | ||
| 852 | if "tool" in config and "poetry" in config["tool"]: | ||
| 853 | metadata = config["tool"]["poetry"] | ||
| 854 | else: | ||
| 855 | if "project" in config: | ||
| 856 | metadata = config["project"] | ||
| 857 | |||
| 858 | if metadata: | ||
| 859 | for field, values in metadata.items(): | ||
| 860 | if field == "license": | ||
| 861 | # For setuptools.build_meta and flit, licence is a table | ||
| 862 | # but for poetry licence is a string | ||
| 863 | # for hatchling, both table (jsonschema) and string (iniconfig) have been used | ||
| 864 | if build_backend == "poetry.core.masonry.api": | ||
| 865 | value = values | ||
| 866 | else: | ||
| 867 | value = values.get("text", "") | ||
| 868 | if not value: | ||
| 869 | licfile = values.get("file", "") | ||
| 870 | continue | ||
| 871 | elif field == "dependencies" and build_backend == "poetry.core.masonry.api": | ||
| 872 | # For poetry backend, "dependencies" section looks like: | ||
| 873 | # [tool.poetry.dependencies] | ||
| 874 | # requests = "^2.13.0" | ||
| 875 | # requests = { version = "^2.13.0", source = "private" } | ||
| 876 | # See https://python-poetry.org/docs/master/pyproject/#dependencies-and-dependency-groups for more details | ||
| 877 | # This class doesn't handle versions anyway, so we just get the dependencies name here and construct a list | ||
| 878 | value = [] | ||
| 879 | for k in values.keys(): | ||
| 880 | value.append(k) | ||
| 881 | elif isinstance(values, dict): | ||
| 882 | for k, v in values.items(): | ||
| 883 | info[k] = v | ||
| 884 | continue | ||
| 885 | else: | ||
| 886 | value = values | ||
| 887 | |||
| 888 | info[field] = value | ||
| 889 | |||
| 890 | # Grab the license value before applying replacements | ||
| 891 | license_str = info.get("license", "").strip() | ||
| 892 | |||
| 893 | if license_str: | ||
| 894 | for i, line in enumerate(lines_before): | ||
| 895 | if line.startswith("##LICENSE_PLACEHOLDER##"): | ||
| 896 | lines_before.insert( | ||
| 897 | i, "# NOTE: License in pyproject.toml is: %s" % license_str | ||
| 898 | ) | ||
| 899 | break | ||
| 900 | |||
| 901 | info["requires"] = config["build-system"]["requires"] | ||
| 902 | |||
| 903 | self.apply_info_replacements(info) | ||
| 904 | |||
| 905 | if "classifiers" in info: | ||
| 906 | license = self.handle_classifier_license( | ||
| 907 | info["classifiers"], info.get("license", "") | ||
| 908 | ) | ||
| 909 | if license: | ||
| 910 | if licfile: | ||
| 911 | lines = [] | ||
| 912 | md5value = bb.utils.md5_file(os.path.join(srctree, licfile)) | ||
| 913 | lines.append('LICENSE = "%s"' % license) | ||
| 914 | lines.append( | ||
| 915 | 'LIC_FILES_CHKSUM = "file://%s;md5=%s"' | ||
| 916 | % (licfile, md5value) | ||
| 917 | ) | ||
| 918 | lines.append("") | ||
| 919 | |||
| 920 | # Replace the placeholder so we get the values in the right place in the recipe file | ||
| 921 | try: | ||
| 922 | pos = lines_before.index("##LICENSE_PLACEHOLDER##") | ||
| 923 | except ValueError: | ||
| 924 | pos = -1 | ||
| 925 | if pos == -1: | ||
| 926 | lines_before.extend(lines) | ||
| 927 | else: | ||
| 928 | lines_before[pos : pos + 1] = lines | ||
| 929 | |||
| 930 | handled.append(("license", [license, licfile, md5value])) | ||
| 931 | else: | ||
| 932 | info["license"] = license | ||
| 933 | |||
| 934 | provided_packages = self.parse_pkgdata_for_python_packages() | ||
| 935 | provided_packages.update(self.known_deps_map) | ||
| 936 | native_mapped_deps, native_unmapped_deps = set(), set() | ||
| 937 | mapped_deps, unmapped_deps = set(), set() | ||
| 938 | |||
| 939 | if "requires" in info: | ||
| 940 | for require in info["requires"]: | ||
| 941 | mapped = provided_packages.get(require) | ||
| 942 | |||
| 943 | if mapped: | ||
| 944 | logger.debug("Mapped %s to %s" % (require, mapped)) | ||
| 945 | native_mapped_deps.add(mapped) | ||
| 946 | else: | ||
| 947 | logger.debug("Could not map %s" % require) | ||
| 948 | native_unmapped_deps.add(require) | ||
| 949 | |||
| 950 | info.pop("requires") | ||
| 951 | |||
| 952 | if native_mapped_deps != set(): | ||
| 953 | native_mapped_deps = { | ||
| 954 | item + "-native" for item in native_mapped_deps | ||
| 955 | } | ||
| 956 | native_mapped_deps -= set(self.excluded_native_pkgdeps) | ||
| 957 | if native_mapped_deps != set(): | ||
| 958 | info["requires"] = " ".join(sorted(native_mapped_deps)) | ||
| 959 | |||
| 960 | if native_unmapped_deps: | ||
| 961 | lines_after.append("") | ||
| 962 | lines_after.append( | ||
| 963 | "# WARNING: We were unable to map the following python package/module" | ||
| 964 | ) | ||
| 965 | lines_after.append( | ||
| 966 | "# dependencies to the bitbake packages which include them:" | ||
| 967 | ) | ||
| 968 | lines_after.extend( | ||
| 969 | "# {}".format(d) for d in sorted(native_unmapped_deps) | ||
| 970 | ) | ||
| 971 | |||
| 972 | if "dependencies" in info: | ||
| 973 | for dependency in info["dependencies"]: | ||
| 974 | mapped = provided_packages.get(dependency) | ||
| 975 | if mapped: | ||
| 976 | logger.debug("Mapped %s to %s" % (dependency, mapped)) | ||
| 977 | mapped_deps.add(mapped) | ||
| 978 | else: | ||
| 979 | logger.debug("Could not map %s" % dependency) | ||
| 980 | unmapped_deps.add(dependency) | ||
| 981 | |||
| 982 | info.pop("dependencies") | ||
| 983 | |||
| 984 | if mapped_deps != set(): | ||
| 985 | if mapped_deps != set(): | ||
| 986 | info["dependencies"] = " ".join(sorted(mapped_deps)) | ||
| 987 | |||
| 988 | if unmapped_deps: | ||
| 989 | lines_after.append("") | ||
| 990 | lines_after.append( | ||
| 991 | "# WARNING: We were unable to map the following python package/module" | ||
| 992 | ) | ||
| 993 | lines_after.append( | ||
| 994 | "# runtime dependencies to the bitbake packages which include them:" | ||
| 995 | ) | ||
| 996 | lines_after.extend( | ||
| 997 | "# {}".format(d) for d in sorted(unmapped_deps) | ||
| 998 | ) | ||
| 999 | |||
| 1000 | self.map_info_to_bbvar(info, extravalues) | ||
| 1001 | |||
| 1002 | handled.append("buildsystem") | ||
| 1003 | except Exception: | ||
| 1004 | logger.exception("Failed to correctly handle pyproject.toml, falling back to another method") | ||
| 1005 | return False | ||
| 1006 | |||
| 1007 | |||
| 1008 | def gather_setup_info(fileobj): | ||
| 1009 | parsed = ast.parse(fileobj.read(), fileobj.name) | ||
| 1010 | visitor = SetupScriptVisitor() | ||
| 1011 | visitor.visit(parsed) | ||
| 1012 | |||
| 1013 | non_literals, extensions = {}, [] | ||
| 1014 | for key, value in list(visitor.keywords.items()): | ||
| 1015 | if key == 'ext_modules': | ||
| 1016 | if isinstance(value, list): | ||
| 1017 | for ext in value: | ||
| 1018 | if (isinstance(ext, ast.Call) and | ||
| 1019 | isinstance(ext.func, ast.Name) and | ||
| 1020 | ext.func.id == 'Extension' and | ||
| 1021 | not has_non_literals(ext.args)): | ||
| 1022 | extensions.append(ext.args[0]) | ||
| 1023 | elif has_non_literals(value): | ||
| 1024 | non_literals[key] = value | ||
| 1025 | del visitor.keywords[key] | ||
| 1026 | |||
| 1027 | return visitor.keywords, visitor.imported_modules, non_literals, extensions | ||
| 1028 | |||
| 1029 | |||
| 1030 | class SetupScriptVisitor(ast.NodeVisitor): | ||
| 1031 | def __init__(self): | ||
| 1032 | ast.NodeVisitor.__init__(self) | ||
| 1033 | self.keywords = {} | ||
| 1034 | self.non_literals = [] | ||
| 1035 | self.imported_modules = set() | ||
| 1036 | |||
| 1037 | def visit_Expr(self, node): | ||
| 1038 | if isinstance(node.value, ast.Call) and \ | ||
| 1039 | isinstance(node.value.func, ast.Name) and \ | ||
| 1040 | node.value.func.id == 'setup': | ||
| 1041 | self.visit_setup(node.value) | ||
| 1042 | |||
| 1043 | def visit_setup(self, node): | ||
| 1044 | call = LiteralAstTransform().visit(node) | ||
| 1045 | self.keywords = call.keywords | ||
| 1046 | for k, v in self.keywords.items(): | ||
| 1047 | if has_non_literals(v): | ||
| 1048 | self.non_literals.append(k) | ||
| 1049 | |||
| 1050 | def visit_Import(self, node): | ||
| 1051 | for alias in node.names: | ||
| 1052 | self.imported_modules.add(alias.name) | ||
| 1053 | |||
| 1054 | def visit_ImportFrom(self, node): | ||
| 1055 | self.imported_modules.add(node.module) | ||
| 1056 | |||
| 1057 | |||
| 1058 | class LiteralAstTransform(ast.NodeTransformer): | ||
| 1059 | """Simplify the ast through evaluation of literals.""" | ||
| 1060 | excluded_fields = ['ctx'] | ||
| 1061 | |||
| 1062 | def visit(self, node): | ||
| 1063 | if not isinstance(node, ast.AST): | ||
| 1064 | return node | ||
| 1065 | else: | ||
| 1066 | return ast.NodeTransformer.visit(self, node) | ||
| 1067 | |||
| 1068 | def generic_visit(self, node): | ||
| 1069 | try: | ||
| 1070 | return ast.literal_eval(node) | ||
| 1071 | except ValueError: | ||
| 1072 | for field, value in ast.iter_fields(node): | ||
| 1073 | if field in self.excluded_fields: | ||
| 1074 | delattr(node, field) | ||
| 1075 | if value is None: | ||
| 1076 | continue | ||
| 1077 | |||
| 1078 | if isinstance(value, list): | ||
| 1079 | if field in ('keywords', 'kwargs'): | ||
| 1080 | new_value = dict((kw.arg, self.visit(kw.value)) for kw in value) | ||
| 1081 | else: | ||
| 1082 | new_value = [self.visit(i) for i in value] | ||
| 1083 | else: | ||
| 1084 | new_value = self.visit(value) | ||
| 1085 | setattr(node, field, new_value) | ||
| 1086 | return node | ||
| 1087 | |||
| 1088 | def visit_Name(self, node): | ||
| 1089 | if hasattr('__builtins__', node.id): | ||
| 1090 | return getattr(__builtins__, node.id) | ||
| 1091 | else: | ||
| 1092 | return self.generic_visit(node) | ||
| 1093 | |||
| 1094 | def visit_Tuple(self, node): | ||
| 1095 | return tuple(self.visit(v) for v in node.elts) | ||
| 1096 | |||
| 1097 | def visit_List(self, node): | ||
| 1098 | return [self.visit(v) for v in node.elts] | ||
| 1099 | |||
| 1100 | def visit_Set(self, node): | ||
| 1101 | return set(self.visit(v) for v in node.elts) | ||
| 1102 | |||
| 1103 | def visit_Dict(self, node): | ||
| 1104 | keys = (self.visit(k) for k in node.keys) | ||
| 1105 | values = (self.visit(v) for v in node.values) | ||
| 1106 | return dict(zip(keys, values)) | ||
| 1107 | |||
| 1108 | |||
| 1109 | def has_non_literals(value): | ||
| 1110 | if isinstance(value, ast.AST): | ||
| 1111 | return True | ||
| 1112 | elif isinstance(value, str): | ||
| 1113 | return False | ||
| 1114 | elif hasattr(value, 'values'): | ||
| 1115 | return any(has_non_literals(v) for v in value.values()) | ||
| 1116 | elif hasattr(value, '__iter__'): | ||
| 1117 | return any(has_non_literals(v) for v in value) | ||
| 1118 | |||
| 1119 | |||
| 1120 | def register_recipe_handlers(handlers): | ||
| 1121 | # We need to make sure these are ahead of the makefile fallback handler | ||
| 1122 | # and the pyproject.toml handler ahead of the setup.py handler | ||
| 1123 | handlers.append((PythonPyprojectTomlRecipeHandler(), 75)) | ||
| 1124 | handlers.append((PythonSetupPyRecipeHandler(), 70)) | ||
