diff options
Diffstat (limited to 'scripts/lib/recipetool')
| -rw-r--r-- | scripts/lib/recipetool/__init__.py | 0 | ||||
| -rw-r--r-- | scripts/lib/recipetool/append.py | 477 | ||||
| -rw-r--r-- | scripts/lib/recipetool/create.py | 1439 | ||||
| -rw-r--r-- | scripts/lib/recipetool/create_buildsys.py | 875 | ||||
| -rw-r--r-- | scripts/lib/recipetool/create_buildsys_python.py | 1124 | ||||
| -rw-r--r-- | scripts/lib/recipetool/create_go.py | 777 | ||||
| -rw-r--r-- | scripts/lib/recipetool/create_kernel.py | 89 | ||||
| -rw-r--r-- | scripts/lib/recipetool/create_kmod.py | 142 | ||||
| -rw-r--r-- | scripts/lib/recipetool/create_npm.py | 299 | ||||
| -rw-r--r-- | scripts/lib/recipetool/edit.py | 44 | ||||
| -rw-r--r-- | scripts/lib/recipetool/licenses.csv | 37 | ||||
| -rw-r--r-- | scripts/lib/recipetool/newappend.py | 79 | ||||
| -rw-r--r-- | scripts/lib/recipetool/setvar.py | 66 |
13 files changed, 0 insertions, 5448 deletions
diff --git a/scripts/lib/recipetool/__init__.py b/scripts/lib/recipetool/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 --- a/scripts/lib/recipetool/__init__.py +++ /dev/null | |||
diff --git a/scripts/lib/recipetool/append.py b/scripts/lib/recipetool/append.py deleted file mode 100644 index 10945d6008..0000000000 --- a/scripts/lib/recipetool/append.py +++ /dev/null | |||
| @@ -1,477 +0,0 @@ | |||
| 1 | # Recipe creation tool - append plugin | ||
| 2 | # | ||
| 3 | # Copyright (C) 2015 Intel Corporation | ||
| 4 | # | ||
| 5 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 6 | # | ||
| 7 | |||
| 8 | import sys | ||
| 9 | import os | ||
| 10 | import argparse | ||
| 11 | import glob | ||
| 12 | import fnmatch | ||
| 13 | import re | ||
| 14 | import subprocess | ||
| 15 | import logging | ||
| 16 | import stat | ||
| 17 | import shutil | ||
| 18 | import scriptutils | ||
| 19 | import errno | ||
| 20 | from collections import defaultdict | ||
| 21 | import difflib | ||
| 22 | |||
| 23 | logger = logging.getLogger('recipetool') | ||
| 24 | |||
| 25 | tinfoil = None | ||
| 26 | |||
| 27 | def tinfoil_init(instance): | ||
| 28 | global tinfoil | ||
| 29 | tinfoil = instance | ||
| 30 | |||
| 31 | |||
| 32 | # FIXME guessing when we don't have pkgdata? | ||
| 33 | # FIXME mode to create patch rather than directly substitute | ||
| 34 | |||
| 35 | class InvalidTargetFileError(Exception): | ||
| 36 | pass | ||
| 37 | |||
| 38 | def find_target_file(targetpath, d, pkglist=None): | ||
| 39 | """Find the recipe installing the specified target path, optionally limited to a select list of packages""" | ||
| 40 | import json | ||
| 41 | |||
| 42 | pkgdata_dir = d.getVar('PKGDATA_DIR') | ||
| 43 | |||
| 44 | # The mix between /etc and ${sysconfdir} here may look odd, but it is just | ||
| 45 | # being consistent with usage elsewhere | ||
| 46 | invalidtargets = {'${sysconfdir}/version': '${sysconfdir}/version is written out at image creation time', | ||
| 47 | '/etc/timestamp': '/etc/timestamp is written out at image creation time', | ||
| 48 | '/dev/*': '/dev is handled by udev (or equivalent) and the kernel (devtmpfs)', | ||
| 49 | '/etc/passwd': '/etc/passwd should be managed through the useradd and extrausers classes', | ||
| 50 | '/etc/group': '/etc/group should be managed through the useradd and extrausers classes', | ||
| 51 | '/etc/shadow': '/etc/shadow should be managed through the useradd and extrausers classes', | ||
| 52 | '/etc/gshadow': '/etc/gshadow should be managed through the useradd and extrausers classes', | ||
| 53 | '${sysconfdir}/hostname': '${sysconfdir}/hostname contents should be set by setting hostname:pn-base-files = "value" in configuration',} | ||
| 54 | |||
| 55 | for pthspec, message in invalidtargets.items(): | ||
| 56 | if fnmatch.fnmatchcase(targetpath, d.expand(pthspec)): | ||
| 57 | raise InvalidTargetFileError(d.expand(message)) | ||
| 58 | |||
| 59 | targetpath_re = re.compile(r'\s+(\$D)?%s(\s|$)' % targetpath) | ||
| 60 | |||
| 61 | recipes = defaultdict(list) | ||
| 62 | for root, dirs, files in os.walk(os.path.join(pkgdata_dir, 'runtime')): | ||
| 63 | if pkglist: | ||
| 64 | filelist = pkglist | ||
| 65 | else: | ||
| 66 | filelist = files | ||
| 67 | for fn in filelist: | ||
| 68 | pkgdatafile = os.path.join(root, fn) | ||
| 69 | if pkglist and not os.path.exists(pkgdatafile): | ||
| 70 | continue | ||
| 71 | with open(pkgdatafile, 'r') as f: | ||
| 72 | pn = '' | ||
| 73 | # This does assume that PN comes before other values, but that's a fairly safe assumption | ||
| 74 | for line in f: | ||
| 75 | if line.startswith('PN:'): | ||
| 76 | pn = line.split(': ', 1)[1].strip() | ||
| 77 | elif line.startswith('FILES_INFO'): | ||
| 78 | val = line.split(': ', 1)[1].strip() | ||
| 79 | dictval = json.loads(val) | ||
| 80 | for fullpth in dictval.keys(): | ||
| 81 | if fnmatch.fnmatchcase(fullpth, targetpath): | ||
| 82 | recipes[targetpath].append(pn) | ||
| 83 | elif line.startswith('pkg_preinst:') or line.startswith('pkg_postinst:'): | ||
| 84 | scriptval = line.split(': ', 1)[1].strip().encode('utf-8').decode('unicode_escape') | ||
| 85 | if 'update-alternatives --install %s ' % targetpath in scriptval: | ||
| 86 | recipes[targetpath].append('?%s' % pn) | ||
| 87 | elif targetpath_re.search(scriptval): | ||
| 88 | recipes[targetpath].append('!%s' % pn) | ||
| 89 | return recipes | ||
| 90 | |||
| 91 | def _parse_recipe(pn, tinfoil): | ||
| 92 | try: | ||
| 93 | rd = tinfoil.parse_recipe(pn) | ||
| 94 | except bb.providers.NoProvider as e: | ||
| 95 | logger.error(str(e)) | ||
| 96 | return None | ||
| 97 | return rd | ||
| 98 | |||
| 99 | def determine_file_source(targetpath, rd): | ||
| 100 | """Assuming we know a file came from a specific recipe, figure out exactly where it came from""" | ||
| 101 | import oe.recipeutils | ||
| 102 | |||
| 103 | # See if it's in do_install for the recipe | ||
| 104 | unpackdir = rd.getVar('UNPACKDIR') | ||
| 105 | src_uri = rd.getVar('SRC_URI') | ||
| 106 | srcfile = '' | ||
| 107 | modpatches = [] | ||
| 108 | elements = check_do_install(rd, targetpath) | ||
| 109 | if elements: | ||
| 110 | logger.debug('do_install line:\n%s' % ' '.join(elements)) | ||
| 111 | srcpath = get_source_path(elements) | ||
| 112 | logger.debug('source path: %s' % srcpath) | ||
| 113 | if not srcpath.startswith('/'): | ||
| 114 | # Handle non-absolute path | ||
| 115 | srcpath = os.path.abspath(os.path.join(rd.getVarFlag('do_install', 'dirs').split()[-1], srcpath)) | ||
| 116 | if srcpath.startswith(unpackdir): | ||
| 117 | # OK, now we have the source file name, look for it in SRC_URI | ||
| 118 | workdirfile = os.path.relpath(srcpath, unpackdir) | ||
| 119 | # FIXME this is where we ought to have some code in the fetcher, because this is naive | ||
| 120 | for item in src_uri.split(): | ||
| 121 | localpath = bb.fetch2.localpath(item, rd) | ||
| 122 | # Source path specified in do_install might be a glob | ||
| 123 | if fnmatch.fnmatch(os.path.basename(localpath), workdirfile): | ||
| 124 | srcfile = 'file://%s' % localpath | ||
| 125 | elif '/' in workdirfile: | ||
| 126 | if item == 'file://%s' % workdirfile: | ||
| 127 | srcfile = 'file://%s' % localpath | ||
| 128 | |||
| 129 | # Check patches | ||
| 130 | srcpatches = [] | ||
| 131 | patchedfiles = oe.recipeutils.get_recipe_patched_files(rd) | ||
| 132 | for patch, filelist in patchedfiles.items(): | ||
| 133 | for fileitem in filelist: | ||
| 134 | if fileitem[0] == srcpath: | ||
| 135 | srcpatches.append((patch, fileitem[1])) | ||
| 136 | if srcpatches: | ||
| 137 | addpatch = None | ||
| 138 | for patch in srcpatches: | ||
| 139 | if patch[1] == 'A': | ||
| 140 | addpatch = patch[0] | ||
| 141 | else: | ||
| 142 | modpatches.append(patch[0]) | ||
| 143 | if addpatch: | ||
| 144 | srcfile = 'patch://%s' % addpatch | ||
| 145 | |||
| 146 | return (srcfile, elements, modpatches) | ||
| 147 | |||
| 148 | def get_source_path(cmdelements): | ||
| 149 | """Find the source path specified within a command""" | ||
| 150 | command = cmdelements[0] | ||
| 151 | if command in ['install', 'cp']: | ||
| 152 | helptext = subprocess.check_output('LC_ALL=C %s --help' % command, shell=True).decode('utf-8') | ||
| 153 | argopts = '' | ||
| 154 | argopt_line_re = re.compile('^-([a-zA-Z0-9]), --[a-z-]+=') | ||
| 155 | for line in helptext.splitlines(): | ||
| 156 | line = line.lstrip() | ||
| 157 | res = argopt_line_re.search(line) | ||
| 158 | if res: | ||
| 159 | argopts += res.group(1) | ||
| 160 | if not argopts: | ||
| 161 | # Fallback | ||
| 162 | if command == 'install': | ||
| 163 | argopts = 'gmoSt' | ||
| 164 | elif command == 'cp': | ||
| 165 | argopts = 't' | ||
| 166 | else: | ||
| 167 | raise Exception('No fallback arguments for command %s' % command) | ||
| 168 | |||
| 169 | skipnext = False | ||
| 170 | for elem in cmdelements[1:-1]: | ||
| 171 | if elem.startswith('-'): | ||
| 172 | if len(elem) > 1 and elem[1] in argopts: | ||
| 173 | skipnext = True | ||
| 174 | continue | ||
| 175 | if skipnext: | ||
| 176 | skipnext = False | ||
| 177 | continue | ||
| 178 | return elem | ||
| 179 | else: | ||
| 180 | raise Exception('get_source_path: no handling for command "%s"') | ||
| 181 | |||
| 182 | def get_func_deps(func, d): | ||
| 183 | """Find the function dependencies of a shell function""" | ||
| 184 | deps = bb.codeparser.ShellParser(func, logger).parse_shell(d.getVar(func)) | ||
| 185 | deps |= set((d.getVarFlag(func, "vardeps") or "").split()) | ||
| 186 | funcdeps = [] | ||
| 187 | for dep in deps: | ||
| 188 | if d.getVarFlag(dep, 'func'): | ||
| 189 | funcdeps.append(dep) | ||
| 190 | return funcdeps | ||
| 191 | |||
| 192 | def check_do_install(rd, targetpath): | ||
| 193 | """Look at do_install for a command that installs/copies the specified target path""" | ||
| 194 | instpath = os.path.abspath(os.path.join(rd.getVar('D'), targetpath.lstrip('/'))) | ||
| 195 | do_install = rd.getVar('do_install') | ||
| 196 | # Handle where do_install calls other functions (somewhat crudely, but good enough for this purpose) | ||
| 197 | deps = get_func_deps('do_install', rd) | ||
| 198 | for dep in deps: | ||
| 199 | do_install = do_install.replace(dep, rd.getVar(dep)) | ||
| 200 | |||
| 201 | # Look backwards through do_install as we want to catch where a later line (perhaps | ||
| 202 | # from a bbappend) is writing over the top | ||
| 203 | for line in reversed(do_install.splitlines()): | ||
| 204 | line = line.strip() | ||
| 205 | if (line.startswith('install ') and ' -m' in line) or line.startswith('cp '): | ||
| 206 | elements = line.split() | ||
| 207 | destpath = os.path.abspath(elements[-1]) | ||
| 208 | if destpath == instpath: | ||
| 209 | return elements | ||
| 210 | elif destpath.rstrip('/') == os.path.dirname(instpath): | ||
| 211 | # FIXME this doesn't take recursive copy into account; unsure if it's practical to do so | ||
| 212 | srcpath = get_source_path(elements) | ||
| 213 | if fnmatch.fnmatchcase(os.path.basename(instpath), os.path.basename(srcpath)): | ||
| 214 | return elements | ||
| 215 | return None | ||
| 216 | |||
| 217 | |||
| 218 | def appendfile(args): | ||
| 219 | import oe.recipeutils | ||
| 220 | |||
| 221 | stdout = '' | ||
| 222 | try: | ||
| 223 | (stdout, _) = bb.process.run('LANG=C file -b %s' % args.newfile, shell=True) | ||
| 224 | if 'cannot open' in stdout: | ||
| 225 | raise bb.process.ExecutionError(stdout) | ||
| 226 | except bb.process.ExecutionError as err: | ||
| 227 | logger.debug('file command returned error: %s' % err) | ||
| 228 | stdout = '' | ||
| 229 | if stdout: | ||
| 230 | logger.debug('file command output: %s' % stdout.rstrip()) | ||
| 231 | if ('executable' in stdout and not 'shell script' in stdout) or 'shared object' in stdout: | ||
| 232 | logger.warning('This file looks like it is a binary or otherwise the output of compilation. If it is, you should consider building it properly instead of substituting a binary file directly.') | ||
| 233 | |||
| 234 | if args.recipe: | ||
| 235 | recipes = {args.targetpath: [args.recipe],} | ||
| 236 | else: | ||
| 237 | try: | ||
| 238 | recipes = find_target_file(args.targetpath, tinfoil.config_data) | ||
| 239 | except InvalidTargetFileError as e: | ||
| 240 | logger.error('%s cannot be handled by this tool: %s' % (args.targetpath, e)) | ||
| 241 | return 1 | ||
| 242 | if not recipes: | ||
| 243 | logger.error('Unable to find any package producing path %s - this may be because the recipe packaging it has not been built yet' % args.targetpath) | ||
| 244 | return 1 | ||
| 245 | |||
| 246 | alternative_pns = [] | ||
| 247 | postinst_pns = [] | ||
| 248 | |||
| 249 | selectpn = None | ||
| 250 | for targetpath, pnlist in recipes.items(): | ||
| 251 | for pn in pnlist: | ||
| 252 | if pn.startswith('?'): | ||
| 253 | alternative_pns.append(pn[1:]) | ||
| 254 | elif pn.startswith('!'): | ||
| 255 | postinst_pns.append(pn[1:]) | ||
| 256 | elif selectpn: | ||
| 257 | # hit here with multilibs | ||
| 258 | continue | ||
| 259 | else: | ||
| 260 | selectpn = pn | ||
| 261 | |||
| 262 | if not selectpn and len(alternative_pns) == 1: | ||
| 263 | selectpn = alternative_pns[0] | ||
| 264 | logger.error('File %s is an alternative possibly provided by recipe %s but seemingly no other, selecting it by default - you should double check other recipes' % (args.targetpath, selectpn)) | ||
| 265 | |||
| 266 | if selectpn: | ||
| 267 | logger.debug('Selecting recipe %s for file %s' % (selectpn, args.targetpath)) | ||
| 268 | if postinst_pns: | ||
| 269 | logger.warning('%s be modified by postinstall scripts for the following recipes:\n %s\nThis may or may not be an issue depending on what modifications these postinstall scripts make.' % (args.targetpath, '\n '.join(postinst_pns))) | ||
| 270 | rd = _parse_recipe(selectpn, tinfoil) | ||
| 271 | if not rd: | ||
| 272 | # Error message already shown | ||
| 273 | return 1 | ||
| 274 | sourcefile, instelements, modpatches = determine_file_source(args.targetpath, rd) | ||
| 275 | sourcepath = None | ||
| 276 | if sourcefile: | ||
| 277 | sourcetype, sourcepath = sourcefile.split('://', 1) | ||
| 278 | logger.debug('Original source file is %s (%s)' % (sourcepath, sourcetype)) | ||
| 279 | if sourcetype == 'patch': | ||
| 280 | logger.warning('File %s is added by the patch %s - you may need to remove or replace this patch in order to replace the file.' % (args.targetpath, sourcepath)) | ||
| 281 | sourcepath = None | ||
| 282 | else: | ||
| 283 | logger.debug('Unable to determine source file, proceeding anyway') | ||
| 284 | if modpatches: | ||
| 285 | logger.warning('File %s is modified by the following patches:\n %s' % (args.targetpath, '\n '.join(modpatches))) | ||
| 286 | |||
| 287 | if instelements and sourcepath: | ||
| 288 | install = None | ||
| 289 | else: | ||
| 290 | # Auto-determine permissions | ||
| 291 | # Check destination | ||
| 292 | binpaths = '${bindir}:${sbindir}:${base_bindir}:${base_sbindir}:${libexecdir}:${sysconfdir}/init.d' | ||
| 293 | perms = '0644' | ||
| 294 | if os.path.abspath(os.path.dirname(args.targetpath)) in rd.expand(binpaths).split(':'): | ||
| 295 | # File is going into a directory normally reserved for executables, so it should be executable | ||
| 296 | perms = '0755' | ||
| 297 | else: | ||
| 298 | # Check source | ||
| 299 | st = os.stat(args.newfile) | ||
| 300 | if st.st_mode & stat.S_IXUSR: | ||
| 301 | perms = '0755' | ||
| 302 | install = {args.newfile: (args.targetpath, perms)} | ||
| 303 | if sourcepath: | ||
| 304 | sourcepath = os.path.basename(sourcepath) | ||
| 305 | oe.recipeutils.bbappend_recipe(rd, args.destlayer, {args.newfile: {'newname' : sourcepath}}, install, wildcardver=args.wildcard_version, machine=args.machine) | ||
| 306 | tinfoil.modified_files() | ||
| 307 | return 0 | ||
| 308 | else: | ||
| 309 | if alternative_pns: | ||
| 310 | logger.error('File %s is an alternative possibly provided by the following recipes:\n %s\nPlease select recipe with -r/--recipe' % (targetpath, '\n '.join(alternative_pns))) | ||
| 311 | elif postinst_pns: | ||
| 312 | logger.error('File %s may be written out in a pre/postinstall script of the following recipes:\n %s\nPlease select recipe with -r/--recipe' % (targetpath, '\n '.join(postinst_pns))) | ||
| 313 | return 3 | ||
| 314 | |||
| 315 | |||
| 316 | def appendsrc(args, files, rd, extralines=None): | ||
| 317 | import oe.recipeutils | ||
| 318 | |||
| 319 | srcdir = rd.getVar('S') | ||
| 320 | workdir = rd.getVar('WORKDIR') | ||
| 321 | |||
| 322 | import bb.fetch | ||
| 323 | simplified = {} | ||
| 324 | src_uri = rd.getVar('SRC_URI').split() | ||
| 325 | for uri in src_uri: | ||
| 326 | if uri.endswith(';'): | ||
| 327 | uri = uri[:-1] | ||
| 328 | simple_uri = bb.fetch.URI(uri) | ||
| 329 | simple_uri.params = {} | ||
| 330 | simplified[str(simple_uri)] = uri | ||
| 331 | |||
| 332 | copyfiles = {} | ||
| 333 | extralines = extralines or [] | ||
| 334 | params = [] | ||
| 335 | for newfile, srcfile in files.items(): | ||
| 336 | src_destdir = os.path.dirname(srcfile) | ||
| 337 | if not args.use_workdir: | ||
| 338 | if rd.getVar('S') == rd.getVar('STAGING_KERNEL_DIR'): | ||
| 339 | srcdir = os.path.join(workdir, 'git') | ||
| 340 | if not bb.data.inherits_class('kernel-yocto', rd): | ||
| 341 | logger.warning('S == STAGING_KERNEL_DIR and non-kernel-yocto, unable to determine path to srcdir, defaulting to ${WORKDIR}/git') | ||
| 342 | src_destdir = os.path.join(os.path.relpath(srcdir, workdir), src_destdir) | ||
| 343 | src_destdir = os.path.normpath(src_destdir) | ||
| 344 | |||
| 345 | if src_destdir and src_destdir != '.': | ||
| 346 | params.append({'subdir': src_destdir}) | ||
| 347 | else: | ||
| 348 | params.append({}) | ||
| 349 | |||
| 350 | copyfiles[newfile] = {'newname' : os.path.basename(srcfile)} | ||
| 351 | |||
| 352 | dry_run_output = None | ||
| 353 | dry_run_outdir = None | ||
| 354 | if args.dry_run: | ||
| 355 | import tempfile | ||
| 356 | dry_run_output = tempfile.TemporaryDirectory(prefix='devtool') | ||
| 357 | dry_run_outdir = dry_run_output.name | ||
| 358 | |||
| 359 | appendfile, _ = oe.recipeutils.bbappend_recipe(rd, args.destlayer, copyfiles, None, wildcardver=args.wildcard_version, machine=args.machine, extralines=extralines, params=params, | ||
| 360 | redirect_output=dry_run_outdir, update_original_recipe=args.update_recipe) | ||
| 361 | if not appendfile: | ||
| 362 | return | ||
| 363 | if args.dry_run: | ||
| 364 | output = '' | ||
| 365 | appendfilename = os.path.basename(appendfile) | ||
| 366 | newappendfile = appendfile | ||
| 367 | if appendfile and os.path.exists(appendfile): | ||
| 368 | with open(appendfile, 'r') as f: | ||
| 369 | oldlines = f.readlines() | ||
| 370 | else: | ||
| 371 | appendfile = '/dev/null' | ||
| 372 | oldlines = [] | ||
| 373 | |||
| 374 | with open(os.path.join(dry_run_outdir, appendfilename), 'r') as f: | ||
| 375 | newlines = f.readlines() | ||
| 376 | diff = difflib.unified_diff(oldlines, newlines, appendfile, newappendfile) | ||
| 377 | difflines = list(diff) | ||
| 378 | if difflines: | ||
| 379 | output += ''.join(difflines) | ||
| 380 | if output: | ||
| 381 | logger.info('Diff of changed files:\n%s' % output) | ||
| 382 | else: | ||
| 383 | logger.info('No changed files') | ||
| 384 | tinfoil.modified_files() | ||
| 385 | |||
| 386 | def appendsrcfiles(parser, args): | ||
| 387 | recipedata = _parse_recipe(args.recipe, tinfoil) | ||
| 388 | if not recipedata: | ||
| 389 | parser.error('RECIPE must be a valid recipe name') | ||
| 390 | |||
| 391 | files = dict((f, os.path.join(args.destdir, os.path.basename(f))) | ||
| 392 | for f in args.files) | ||
| 393 | return appendsrc(args, files, recipedata) | ||
| 394 | |||
| 395 | |||
| 396 | def appendsrcfile(parser, args): | ||
| 397 | recipedata = _parse_recipe(args.recipe, tinfoil) | ||
| 398 | if not recipedata: | ||
| 399 | parser.error('RECIPE must be a valid recipe name') | ||
| 400 | |||
| 401 | if not args.destfile: | ||
| 402 | args.destfile = os.path.basename(args.file) | ||
| 403 | elif args.destfile.endswith('/'): | ||
| 404 | args.destfile = os.path.join(args.destfile, os.path.basename(args.file)) | ||
| 405 | |||
| 406 | return appendsrc(args, {args.file: args.destfile}, recipedata) | ||
| 407 | |||
| 408 | |||
| 409 | def layer(layerpath): | ||
| 410 | if not os.path.exists(os.path.join(layerpath, 'conf', 'layer.conf')): | ||
| 411 | raise argparse.ArgumentTypeError('{0!r} must be a path to a valid layer'.format(layerpath)) | ||
| 412 | return layerpath | ||
| 413 | |||
| 414 | |||
| 415 | def existing_path(filepath): | ||
| 416 | if not os.path.exists(filepath): | ||
| 417 | raise argparse.ArgumentTypeError('{0!r} must be an existing path'.format(filepath)) | ||
| 418 | return filepath | ||
| 419 | |||
| 420 | |||
| 421 | def existing_file(filepath): | ||
| 422 | filepath = existing_path(filepath) | ||
| 423 | if os.path.isdir(filepath): | ||
| 424 | raise argparse.ArgumentTypeError('{0!r} must be a file, not a directory'.format(filepath)) | ||
| 425 | return filepath | ||
| 426 | |||
| 427 | |||
| 428 | def destination_path(destpath): | ||
| 429 | if os.path.isabs(destpath): | ||
| 430 | raise argparse.ArgumentTypeError('{0!r} must be a relative path, not absolute'.format(destpath)) | ||
| 431 | return destpath | ||
| 432 | |||
| 433 | |||
| 434 | def target_path(targetpath): | ||
| 435 | if not os.path.isabs(targetpath): | ||
| 436 | raise argparse.ArgumentTypeError('{0!r} must be an absolute path, not relative'.format(targetpath)) | ||
| 437 | return targetpath | ||
| 438 | |||
| 439 | |||
| 440 | def register_commands(subparsers): | ||
| 441 | common = argparse.ArgumentParser(add_help=False) | ||
| 442 | common.add_argument('-m', '--machine', help='Make bbappend changes specific to a machine only', metavar='MACHINE') | ||
| 443 | common.add_argument('-w', '--wildcard-version', help='Use wildcard to make the bbappend apply to any recipe version', action='store_true') | ||
| 444 | common.add_argument('destlayer', metavar='DESTLAYER', help='Base directory of the destination layer to write the bbappend to', type=layer) | ||
| 445 | |||
| 446 | parser_appendfile = subparsers.add_parser('appendfile', | ||
| 447 | parents=[common], | ||
| 448 | help='Create/update a bbappend to replace a target file', | ||
| 449 | description='Creates a bbappend (or updates an existing one) to replace the specified file that appears in the target system, determining the recipe that packages the file and the required path and name for the bbappend automatically. Note that the ability to determine the recipe packaging a particular file depends upon the recipe\'s do_packagedata task having already run prior to running this command (which it will have when the recipe has been built successfully, which in turn will have happened if one or more of the recipe\'s packages is included in an image that has been built successfully).') | ||
| 450 | parser_appendfile.add_argument('targetpath', help='Path to the file to be replaced (as it would appear within the target image, e.g. /etc/motd)', type=target_path) | ||
| 451 | parser_appendfile.add_argument('newfile', help='Custom file to replace the target file with', type=existing_file) | ||
| 452 | parser_appendfile.add_argument('-r', '--recipe', help='Override recipe to apply to (default is to find which recipe already packages the file)') | ||
| 453 | parser_appendfile.set_defaults(func=appendfile, parserecipes=True) | ||
| 454 | |||
| 455 | common_src = argparse.ArgumentParser(add_help=False, parents=[common]) | ||
| 456 | common_src.add_argument('-W', '--workdir', help='Unpack file into WORKDIR rather than S', dest='use_workdir', action='store_true') | ||
| 457 | common_src.add_argument('recipe', metavar='RECIPE', help='Override recipe to apply to') | ||
| 458 | |||
| 459 | parser = subparsers.add_parser('appendsrcfiles', | ||
| 460 | parents=[common_src], | ||
| 461 | help='Create/update a bbappend to add or replace source files', | ||
| 462 | description='Creates a bbappend (or updates an existing one) to add or replace the specified file in the recipe sources, either those in WORKDIR or those in the source tree. This command lets you specify multiple files with a destination directory, so cannot specify the destination filename. See the `appendsrcfile` command for the other behavior.') | ||
| 463 | parser.add_argument('-D', '--destdir', help='Destination directory (relative to S or WORKDIR, defaults to ".")', default='', type=destination_path) | ||
| 464 | parser.add_argument('-u', '--update-recipe', help='Update recipe instead of creating (or updating) a bbapend file. DESTLAYER must contains the recipe to update', action='store_true') | ||
| 465 | parser.add_argument('-n', '--dry-run', help='Dry run mode', action='store_true') | ||
| 466 | parser.add_argument('files', nargs='+', metavar='FILE', help='File(s) to be added to the recipe sources (WORKDIR or S)', type=existing_path) | ||
| 467 | parser.set_defaults(func=lambda a: appendsrcfiles(parser, a), parserecipes=True) | ||
| 468 | |||
| 469 | parser = subparsers.add_parser('appendsrcfile', | ||
| 470 | parents=[common_src], | ||
| 471 | help='Create/update a bbappend to add or replace a source file', | ||
| 472 | description='Creates a bbappend (or updates an existing one) to add or replace the specified files in the recipe sources, either those in WORKDIR or those in the source tree. This command lets you specify the destination filename, not just destination directory, but only works for one file. See the `appendsrcfiles` command for the other behavior.') | ||
| 473 | parser.add_argument('-u', '--update-recipe', help='Update recipe instead of creating (or updating) a bbapend file. DESTLAYER must contains the recipe to update', action='store_true') | ||
| 474 | parser.add_argument('-n', '--dry-run', help='Dry run mode', action='store_true') | ||
| 475 | parser.add_argument('file', metavar='FILE', help='File to be added to the recipe sources (WORKDIR or S)', type=existing_path) | ||
| 476 | parser.add_argument('destfile', metavar='DESTFILE', nargs='?', help='Destination path (relative to S or WORKDIR, optional)', type=destination_path) | ||
| 477 | parser.set_defaults(func=lambda a: appendsrcfile(parser, a), parserecipes=True) | ||
diff --git a/scripts/lib/recipetool/create.py b/scripts/lib/recipetool/create.py deleted file mode 100644 index 94d52d6077..0000000000 --- a/scripts/lib/recipetool/create.py +++ /dev/null | |||
| @@ -1,1439 +0,0 @@ | |||
| 1 | # Recipe creation tool - create command plugin | ||
| 2 | # | ||
| 3 | # Copyright (C) 2014-2017 Intel Corporation | ||
| 4 | # | ||
| 5 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 6 | # | ||
| 7 | |||
| 8 | import sys | ||
| 9 | import os | ||
| 10 | import argparse | ||
| 11 | import glob | ||
| 12 | import fnmatch | ||
| 13 | import re | ||
| 14 | import json | ||
| 15 | import logging | ||
| 16 | import scriptutils | ||
| 17 | from urllib.parse import urlparse, urldefrag, urlsplit | ||
| 18 | import hashlib | ||
| 19 | import bb.fetch2 | ||
| 20 | logger = logging.getLogger('recipetool') | ||
| 21 | |||
| 22 | tinfoil = None | ||
| 23 | plugins = None | ||
| 24 | |||
| 25 | def log_error_cond(message, debugonly): | ||
| 26 | if debugonly: | ||
| 27 | logger.debug(message) | ||
| 28 | else: | ||
| 29 | logger.error(message) | ||
| 30 | |||
| 31 | def log_info_cond(message, debugonly): | ||
| 32 | if debugonly: | ||
| 33 | logger.debug(message) | ||
| 34 | else: | ||
| 35 | logger.info(message) | ||
| 36 | |||
| 37 | def plugin_init(pluginlist): | ||
| 38 | # Take a reference to the list so we can use it later | ||
| 39 | global plugins | ||
| 40 | plugins = pluginlist | ||
| 41 | |||
| 42 | def tinfoil_init(instance): | ||
| 43 | global tinfoil | ||
| 44 | tinfoil = instance | ||
| 45 | |||
| 46 | class RecipeHandler(object): | ||
| 47 | recipelibmap = {} | ||
| 48 | recipeheadermap = {} | ||
| 49 | recipecmakefilemap = {} | ||
| 50 | recipebinmap = {} | ||
| 51 | |||
| 52 | def __init__(self): | ||
| 53 | self._devtool = False | ||
| 54 | |||
| 55 | @staticmethod | ||
| 56 | def load_libmap(d): | ||
| 57 | '''Load library->recipe mapping''' | ||
| 58 | import oe.package | ||
| 59 | |||
| 60 | if RecipeHandler.recipelibmap: | ||
| 61 | return | ||
| 62 | # First build up library->package mapping | ||
| 63 | d2 = bb.data.createCopy(d) | ||
| 64 | d2.setVar("WORKDIR_PKGDATA", "${PKGDATA_DIR}") | ||
| 65 | shlib_providers = oe.package.read_shlib_providers(d2) | ||
| 66 | libdir = d.getVar('libdir') | ||
| 67 | base_libdir = d.getVar('base_libdir') | ||
| 68 | libpaths = list(set([base_libdir, libdir])) | ||
| 69 | libname_re = re.compile(r'^lib(.+)\.so.*$') | ||
| 70 | pkglibmap = {} | ||
| 71 | for lib, item in shlib_providers.items(): | ||
| 72 | for path, pkg in item.items(): | ||
| 73 | if path in libpaths: | ||
| 74 | res = libname_re.match(lib) | ||
| 75 | if res: | ||
| 76 | libname = res.group(1) | ||
| 77 | if not libname in pkglibmap: | ||
| 78 | pkglibmap[libname] = pkg[0] | ||
| 79 | else: | ||
| 80 | logger.debug('unable to extract library name from %s' % lib) | ||
| 81 | |||
| 82 | # Now turn it into a library->recipe mapping | ||
| 83 | pkgdata_dir = d.getVar('PKGDATA_DIR') | ||
| 84 | for libname, pkg in pkglibmap.items(): | ||
| 85 | try: | ||
| 86 | with open(os.path.join(pkgdata_dir, 'runtime', pkg)) as f: | ||
| 87 | for line in f: | ||
| 88 | if line.startswith('PN:'): | ||
| 89 | RecipeHandler.recipelibmap[libname] = line.split(':', 1)[-1].strip() | ||
| 90 | break | ||
| 91 | except IOError as ioe: | ||
| 92 | if ioe.errno == 2: | ||
| 93 | logger.warning('unable to find a pkgdata file for package %s' % pkg) | ||
| 94 | else: | ||
| 95 | raise | ||
| 96 | |||
| 97 | # Some overrides - these should be mapped to the virtual | ||
| 98 | RecipeHandler.recipelibmap['GL'] = 'virtual/libgl' | ||
| 99 | RecipeHandler.recipelibmap['EGL'] = 'virtual/egl' | ||
| 100 | RecipeHandler.recipelibmap['GLESv2'] = 'virtual/libgles2' | ||
| 101 | |||
| 102 | @staticmethod | ||
| 103 | def load_devel_filemap(d): | ||
| 104 | '''Build up development file->recipe mapping''' | ||
| 105 | if RecipeHandler.recipeheadermap: | ||
| 106 | return | ||
| 107 | pkgdata_dir = d.getVar('PKGDATA_DIR') | ||
| 108 | includedir = d.getVar('includedir') | ||
| 109 | cmakedir = os.path.join(d.getVar('libdir'), 'cmake') | ||
| 110 | for pkg in glob.glob(os.path.join(pkgdata_dir, 'runtime', '*-dev')): | ||
| 111 | with open(os.path.join(pkgdata_dir, 'runtime', pkg)) as f: | ||
| 112 | pn = None | ||
| 113 | headers = [] | ||
| 114 | cmakefiles = [] | ||
| 115 | for line in f: | ||
| 116 | if line.startswith('PN:'): | ||
| 117 | pn = line.split(':', 1)[-1].strip() | ||
| 118 | elif line.startswith('FILES_INFO:%s:' % pkg): | ||
| 119 | val = line.split(': ', 1)[1].strip() | ||
| 120 | dictval = json.loads(val) | ||
| 121 | for fullpth in sorted(dictval): | ||
| 122 | if fullpth.startswith(includedir) and fullpth.endswith('.h'): | ||
| 123 | headers.append(os.path.relpath(fullpth, includedir)) | ||
| 124 | elif fullpth.startswith(cmakedir) and fullpth.endswith('.cmake'): | ||
| 125 | cmakefiles.append(os.path.relpath(fullpth, cmakedir)) | ||
| 126 | if pn and headers: | ||
| 127 | for header in headers: | ||
| 128 | RecipeHandler.recipeheadermap[header] = pn | ||
| 129 | if pn and cmakefiles: | ||
| 130 | for fn in cmakefiles: | ||
| 131 | RecipeHandler.recipecmakefilemap[fn] = pn | ||
| 132 | |||
| 133 | @staticmethod | ||
| 134 | def load_binmap(d): | ||
| 135 | '''Build up native binary->recipe mapping''' | ||
| 136 | if RecipeHandler.recipebinmap: | ||
| 137 | return | ||
| 138 | sstate_manifests = d.getVar('SSTATE_MANIFESTS') | ||
| 139 | staging_bindir_native = d.getVar('STAGING_BINDIR_NATIVE') | ||
| 140 | build_arch = d.getVar('BUILD_ARCH') | ||
| 141 | fileprefix = 'manifest-%s-' % build_arch | ||
| 142 | for fn in glob.glob(os.path.join(sstate_manifests, '%s*-native.populate_sysroot' % fileprefix)): | ||
| 143 | with open(fn, 'r') as f: | ||
| 144 | pn = os.path.basename(fn).rsplit('.', 1)[0][len(fileprefix):] | ||
| 145 | for line in f: | ||
| 146 | if line.startswith(staging_bindir_native): | ||
| 147 | prog = os.path.basename(line.rstrip()) | ||
| 148 | RecipeHandler.recipebinmap[prog] = pn | ||
| 149 | |||
| 150 | @staticmethod | ||
| 151 | def checkfiles(path, speclist, recursive=False, excludedirs=None): | ||
| 152 | results = [] | ||
| 153 | if recursive: | ||
| 154 | for root, dirs, files in os.walk(path, topdown=True): | ||
| 155 | if excludedirs: | ||
| 156 | dirs[:] = [d for d in dirs if d not in excludedirs] | ||
| 157 | for fn in files: | ||
| 158 | for spec in speclist: | ||
| 159 | if fnmatch.fnmatch(fn, spec): | ||
| 160 | results.append(os.path.join(root, fn)) | ||
| 161 | else: | ||
| 162 | for spec in speclist: | ||
| 163 | results.extend(glob.glob(os.path.join(path, spec))) | ||
| 164 | return results | ||
| 165 | |||
| 166 | @staticmethod | ||
| 167 | def handle_depends(libdeps, pcdeps, deps, outlines, values, d): | ||
| 168 | if pcdeps: | ||
| 169 | recipemap = read_pkgconfig_provides(d) | ||
| 170 | if libdeps: | ||
| 171 | RecipeHandler.load_libmap(d) | ||
| 172 | |||
| 173 | ignorelibs = ['socket'] | ||
| 174 | ignoredeps = ['gcc-runtime', 'glibc', 'uclibc', 'musl', 'tar-native', 'binutils-native', 'coreutils-native'] | ||
| 175 | |||
| 176 | unmappedpc = [] | ||
| 177 | pcdeps = list(set(pcdeps)) | ||
| 178 | for pcdep in pcdeps: | ||
| 179 | if isinstance(pcdep, str): | ||
| 180 | recipe = recipemap.get(pcdep, None) | ||
| 181 | if recipe: | ||
| 182 | deps.append(recipe) | ||
| 183 | else: | ||
| 184 | if not pcdep.startswith('$'): | ||
| 185 | unmappedpc.append(pcdep) | ||
| 186 | else: | ||
| 187 | for item in pcdep: | ||
| 188 | recipe = recipemap.get(pcdep, None) | ||
| 189 | if recipe: | ||
| 190 | deps.append(recipe) | ||
| 191 | break | ||
| 192 | else: | ||
| 193 | unmappedpc.append('(%s)' % ' or '.join(pcdep)) | ||
| 194 | |||
| 195 | unmappedlibs = [] | ||
| 196 | for libdep in libdeps: | ||
| 197 | if isinstance(libdep, tuple): | ||
| 198 | lib, header = libdep | ||
| 199 | else: | ||
| 200 | lib = libdep | ||
| 201 | header = None | ||
| 202 | |||
| 203 | if lib in ignorelibs: | ||
| 204 | logger.debug('Ignoring library dependency %s' % lib) | ||
| 205 | continue | ||
| 206 | |||
| 207 | recipe = RecipeHandler.recipelibmap.get(lib, None) | ||
| 208 | if recipe: | ||
| 209 | deps.append(recipe) | ||
| 210 | elif recipe is None: | ||
| 211 | if header: | ||
| 212 | RecipeHandler.load_devel_filemap(d) | ||
| 213 | recipe = RecipeHandler.recipeheadermap.get(header, None) | ||
| 214 | if recipe: | ||
| 215 | deps.append(recipe) | ||
| 216 | elif recipe is None: | ||
| 217 | unmappedlibs.append(lib) | ||
| 218 | else: | ||
| 219 | unmappedlibs.append(lib) | ||
| 220 | |||
| 221 | deps = set(deps).difference(set(ignoredeps)) | ||
| 222 | |||
| 223 | if unmappedpc: | ||
| 224 | outlines.append('# NOTE: unable to map the following pkg-config dependencies: %s' % ' '.join(unmappedpc)) | ||
| 225 | outlines.append('# (this is based on recipes that have previously been built and packaged)') | ||
| 226 | |||
| 227 | if unmappedlibs: | ||
| 228 | outlines.append('# NOTE: the following library dependencies are unknown, ignoring: %s' % ' '.join(list(set(unmappedlibs)))) | ||
| 229 | outlines.append('# (this is based on recipes that have previously been built and packaged)') | ||
| 230 | |||
| 231 | if deps: | ||
| 232 | values['DEPENDS'] = ' '.join(deps) | ||
| 233 | |||
| 234 | @staticmethod | ||
| 235 | def genfunction(outlines, funcname, content, python=False, forcespace=False): | ||
| 236 | if python: | ||
| 237 | prefix = 'python ' | ||
| 238 | else: | ||
| 239 | prefix = '' | ||
| 240 | outlines.append('%s%s () {' % (prefix, funcname)) | ||
| 241 | if python or forcespace: | ||
| 242 | indent = ' ' | ||
| 243 | else: | ||
| 244 | indent = '\t' | ||
| 245 | addnoop = not python | ||
| 246 | for line in content: | ||
| 247 | outlines.append('%s%s' % (indent, line)) | ||
| 248 | if addnoop: | ||
| 249 | strippedline = line.lstrip() | ||
| 250 | if strippedline and not strippedline.startswith('#'): | ||
| 251 | addnoop = False | ||
| 252 | if addnoop: | ||
| 253 | # Without this there'll be a syntax error | ||
| 254 | outlines.append('%s:' % indent) | ||
| 255 | outlines.append('}') | ||
| 256 | outlines.append('') | ||
| 257 | |||
| 258 | def process(self, srctree, classes, lines_before, lines_after, handled, extravalues): | ||
| 259 | return False | ||
| 260 | |||
| 261 | |||
| 262 | def validate_pv(pv): | ||
| 263 | if not pv or '_version' in pv.lower() or pv[0] not in '0123456789': | ||
| 264 | return False | ||
| 265 | return True | ||
| 266 | |||
| 267 | def determine_from_filename(srcfile): | ||
| 268 | """Determine name and version from a filename""" | ||
| 269 | if is_package(srcfile): | ||
| 270 | # Force getting the value from the package metadata | ||
| 271 | return None, None | ||
| 272 | |||
| 273 | if '.tar.' in srcfile: | ||
| 274 | namepart = srcfile.split('.tar.')[0] | ||
| 275 | else: | ||
| 276 | namepart = os.path.splitext(srcfile)[0] | ||
| 277 | namepart = namepart.lower().replace('_', '-') | ||
| 278 | if namepart.endswith('.src'): | ||
| 279 | namepart = namepart[:-4] | ||
| 280 | if namepart.endswith('.orig'): | ||
| 281 | namepart = namepart[:-5] | ||
| 282 | splitval = namepart.split('-') | ||
| 283 | logger.debug('determine_from_filename: split name %s into: %s' % (srcfile, splitval)) | ||
| 284 | |||
| 285 | ver_re = re.compile('^v?[0-9]') | ||
| 286 | |||
| 287 | pv = None | ||
| 288 | pn = None | ||
| 289 | if len(splitval) == 1: | ||
| 290 | # Try to split the version out if there is no separator (or a .) | ||
| 291 | res = re.match('^([^0-9]+)([0-9.]+.*)$', namepart) | ||
| 292 | if res: | ||
| 293 | if len(res.group(1)) > 1 and len(res.group(2)) > 1: | ||
| 294 | pn = res.group(1).rstrip('.') | ||
| 295 | pv = res.group(2) | ||
| 296 | else: | ||
| 297 | pn = namepart | ||
| 298 | else: | ||
| 299 | if splitval[-1] in ['source', 'src']: | ||
| 300 | splitval.pop() | ||
| 301 | if len(splitval) > 2 and re.match('^(alpha|beta|stable|release|rc[0-9]|pre[0-9]|p[0-9]|[0-9]{8})', splitval[-1]) and ver_re.match(splitval[-2]): | ||
| 302 | pv = '-'.join(splitval[-2:]) | ||
| 303 | if pv.endswith('-release'): | ||
| 304 | pv = pv[:-8] | ||
| 305 | splitval = splitval[:-2] | ||
| 306 | elif ver_re.match(splitval[-1]): | ||
| 307 | pv = splitval.pop() | ||
| 308 | pn = '-'.join(splitval) | ||
| 309 | if pv and pv.startswith('v'): | ||
| 310 | pv = pv[1:] | ||
| 311 | logger.debug('determine_from_filename: name = "%s" version = "%s"' % (pn, pv)) | ||
| 312 | return (pn, pv) | ||
| 313 | |||
| 314 | def determine_from_url(srcuri): | ||
| 315 | """Determine name and version from a URL""" | ||
| 316 | pn = None | ||
| 317 | pv = None | ||
| 318 | parseres = urlparse(srcuri.lower().split(';', 1)[0]) | ||
| 319 | if parseres.path: | ||
| 320 | if 'github.com' in parseres.netloc: | ||
| 321 | res = re.search(r'.*/(.*?)/archive/(.*)-final\.(tar|zip)', parseres.path) | ||
| 322 | if res: | ||
| 323 | pn = res.group(1).strip().replace('_', '-') | ||
| 324 | pv = res.group(2).strip().replace('_', '.') | ||
| 325 | else: | ||
| 326 | res = re.search(r'.*/(.*?)/archive/v?(.*)\.(tar|zip)', parseres.path) | ||
| 327 | if res: | ||
| 328 | pn = res.group(1).strip().replace('_', '-') | ||
| 329 | pv = res.group(2).strip().replace('_', '.') | ||
| 330 | elif 'bitbucket.org' in parseres.netloc: | ||
| 331 | res = re.search(r'.*/(.*?)/get/[a-zA-Z_-]*([0-9][0-9a-zA-Z_.]*)\.(tar|zip)', parseres.path) | ||
| 332 | if res: | ||
| 333 | pn = res.group(1).strip().replace('_', '-') | ||
| 334 | pv = res.group(2).strip().replace('_', '.') | ||
| 335 | |||
| 336 | if not pn and not pv: | ||
| 337 | if parseres.scheme not in ['git', 'gitsm', 'svn', 'hg']: | ||
| 338 | srcfile = os.path.basename(parseres.path.rstrip('/')) | ||
| 339 | pn, pv = determine_from_filename(srcfile) | ||
| 340 | elif parseres.scheme in ['git', 'gitsm']: | ||
| 341 | pn = os.path.basename(parseres.path.rstrip('/')).lower().replace('_', '-') | ||
| 342 | if pn.endswith('.git'): | ||
| 343 | pn = pn[:-4] | ||
| 344 | |||
| 345 | logger.debug('Determined from source URL: name = "%s", version = "%s"' % (pn, pv)) | ||
| 346 | return (pn, pv) | ||
| 347 | |||
| 348 | def supports_srcrev(uri): | ||
| 349 | localdata = bb.data.createCopy(tinfoil.config_data) | ||
| 350 | # This is a bit sad, but if you don't have this set there can be some | ||
| 351 | # odd interactions with the urldata cache which lead to errors | ||
| 352 | localdata.setVar('SRCREV', '${AUTOREV}') | ||
| 353 | try: | ||
| 354 | fetcher = bb.fetch2.Fetch([uri], localdata) | ||
| 355 | urldata = fetcher.ud | ||
| 356 | for u in urldata: | ||
| 357 | if urldata[u].method.supports_srcrev(): | ||
| 358 | return True | ||
| 359 | except bb.fetch2.FetchError as e: | ||
| 360 | logger.debug('FetchError in supports_srcrev: %s' % str(e)) | ||
| 361 | # Fall back to basic check | ||
| 362 | if uri.startswith(('git://', 'gitsm://')): | ||
| 363 | return True | ||
| 364 | return False | ||
| 365 | |||
| 366 | def reformat_git_uri(uri): | ||
| 367 | '''Convert any http[s]://....git URI into git://...;protocol=http[s]''' | ||
| 368 | checkuri = uri.split(';', 1)[0] | ||
| 369 | if checkuri.endswith('.git') or '/git/' in checkuri or re.match('https?://git(hub|lab).com/[^/]+/[^/]+/?$', checkuri): | ||
| 370 | # Appends scheme if the scheme is missing | ||
| 371 | if not '://' in uri: | ||
| 372 | uri = 'git://' + uri | ||
| 373 | scheme, host, path, user, pswd, parms = bb.fetch2.decodeurl(uri) | ||
| 374 | # Detection mechanism, this is required due to certain URL are formatter with ":" rather than "/" | ||
| 375 | # which causes decodeurl to fail getting the right host and path | ||
| 376 | if len(host.split(':')) > 1: | ||
| 377 | splitslash = host.split(':') | ||
| 378 | # Port number should not be split from host | ||
| 379 | if not re.match('^[0-9]+$', splitslash[1]): | ||
| 380 | host = splitslash[0] | ||
| 381 | path = '/' + splitslash[1] + path | ||
| 382 | #Algorithm: | ||
| 383 | # if user is defined, append protocol=ssh or if a protocol is defined, then honor the user-defined protocol | ||
| 384 | # if no user & password is defined, check for scheme type and append the protocol with the scheme type | ||
| 385 | # finally if protocols or if the url is well-formed, do nothing and rejoin everything back to normal | ||
| 386 | # Need to repackage the arguments for encodeurl, the format is: (scheme, host, path, user, password, OrderedDict([('key', 'value')])) | ||
| 387 | if user: | ||
| 388 | if not 'protocol' in parms: | ||
| 389 | parms.update({('protocol', 'ssh')}) | ||
| 390 | elif (scheme == "http" or scheme == 'https' or scheme == 'ssh') and not ('protocol' in parms): | ||
| 391 | parms.update({('protocol', scheme)}) | ||
| 392 | # Always append 'git://' | ||
| 393 | fUrl = bb.fetch2.encodeurl(('git', host, path, user, pswd, parms)) | ||
| 394 | return fUrl | ||
| 395 | else: | ||
| 396 | return uri | ||
| 397 | |||
| 398 | def is_package(url): | ||
| 399 | '''Check if a URL points to a package''' | ||
| 400 | checkurl = url.split(';', 1)[0] | ||
| 401 | if checkurl.endswith(('.deb', '.ipk', '.rpm', '.srpm')): | ||
| 402 | return True | ||
| 403 | return False | ||
| 404 | |||
| 405 | def create_recipe(args): | ||
| 406 | import bb.process | ||
| 407 | import tempfile | ||
| 408 | import shutil | ||
| 409 | import oe.recipeutils | ||
| 410 | |||
| 411 | pkgarch = "" | ||
| 412 | if args.machine: | ||
| 413 | pkgarch = "${MACHINE_ARCH}" | ||
| 414 | |||
| 415 | extravalues = {} | ||
| 416 | checksums = {} | ||
| 417 | tempsrc = '' | ||
| 418 | source = args.source | ||
| 419 | srcsubdir = '' | ||
| 420 | srcrev = '${AUTOREV}' | ||
| 421 | srcbranch = '' | ||
| 422 | scheme = '' | ||
| 423 | storeTagName = '' | ||
| 424 | pv_srcpv = False | ||
| 425 | |||
| 426 | handled = [] | ||
| 427 | classes = [] | ||
| 428 | |||
| 429 | # Find all plugins that want to register handlers | ||
| 430 | logger.debug('Loading recipe handlers') | ||
| 431 | raw_handlers = [] | ||
| 432 | for plugin in plugins: | ||
| 433 | if hasattr(plugin, 'register_recipe_handlers'): | ||
| 434 | plugin.register_recipe_handlers(raw_handlers) | ||
| 435 | # Sort handlers by priority | ||
| 436 | handlers = [] | ||
| 437 | for i, handler in enumerate(raw_handlers): | ||
| 438 | if isinstance(handler, tuple): | ||
| 439 | handlers.append((handler[0], handler[1], i)) | ||
| 440 | else: | ||
| 441 | handlers.append((handler, 0, i)) | ||
| 442 | handlers.sort(key=lambda item: (item[1], -item[2]), reverse=True) | ||
| 443 | for handler, priority, _ in handlers: | ||
| 444 | logger.debug('Handler: %s (priority %d)' % (handler.__class__.__name__, priority)) | ||
| 445 | setattr(handler, '_devtool', args.devtool) | ||
| 446 | handlers = [item[0] for item in handlers] | ||
| 447 | |||
| 448 | fetchuri = None | ||
| 449 | for handler in handlers: | ||
| 450 | if hasattr(handler, 'process_url'): | ||
| 451 | ret = handler.process_url(args, classes, handled, extravalues) | ||
| 452 | if 'url' in handled and ret: | ||
| 453 | fetchuri = ret | ||
| 454 | break | ||
| 455 | |||
| 456 | if os.path.isfile(source): | ||
| 457 | source = 'file://%s' % os.path.abspath(source) | ||
| 458 | |||
| 459 | if scriptutils.is_src_url(source): | ||
| 460 | # Warn about github archive URLs | ||
| 461 | if re.match(r'https?://github.com/[^/]+/[^/]+/archive/.+(\.tar\..*|\.zip)$', source): | ||
| 462 | logger.warning('github archive files are not guaranteed to be stable and may be re-generated over time. If the latter occurs, the checksums will likely change and the recipe will fail at do_fetch. It is recommended that you point to an actual commit or tag in the repository instead (using the repository URL in conjunction with the -S/--srcrev option).') | ||
| 463 | # Fetch a URL | ||
| 464 | if not fetchuri: | ||
| 465 | fetchuri = reformat_git_uri(urldefrag(source)[0]) | ||
| 466 | if args.binary: | ||
| 467 | # Assume the archive contains the directory structure verbatim | ||
| 468 | # so we need to extract to a subdirectory | ||
| 469 | fetchuri += ';subdir=${BPN}' | ||
| 470 | srcuri = fetchuri | ||
| 471 | rev_re = re.compile(';rev=([^;]+)') | ||
| 472 | res = rev_re.search(srcuri) | ||
| 473 | if res: | ||
| 474 | if args.srcrev: | ||
| 475 | logger.error('rev= parameter and -S/--srcrev option cannot both be specified - use one or the other') | ||
| 476 | sys.exit(1) | ||
| 477 | if args.autorev: | ||
| 478 | logger.error('rev= parameter and -a/--autorev option cannot both be specified - use one or the other') | ||
| 479 | sys.exit(1) | ||
| 480 | srcrev = res.group(1) | ||
| 481 | srcuri = rev_re.sub('', srcuri) | ||
| 482 | elif args.srcrev: | ||
| 483 | srcrev = args.srcrev | ||
| 484 | |||
| 485 | # Check whether users provides any branch info in fetchuri. | ||
| 486 | # If true, we will skip all branch checking process to honor all user's input. | ||
| 487 | scheme, network, path, user, passwd, params = bb.fetch2.decodeurl(fetchuri) | ||
| 488 | srcbranch = params.get('branch') | ||
| 489 | if args.srcbranch: | ||
| 490 | if srcbranch: | ||
| 491 | logger.error('branch= parameter and -B/--srcbranch option cannot both be specified - use one or the other') | ||
| 492 | sys.exit(1) | ||
| 493 | srcbranch = args.srcbranch | ||
| 494 | params['branch'] = srcbranch | ||
| 495 | nobranch = params.get('nobranch') | ||
| 496 | if nobranch and srcbranch: | ||
| 497 | logger.error('nobranch= cannot be used if you specify a branch') | ||
| 498 | sys.exit(1) | ||
| 499 | tag = params.get('tag') | ||
| 500 | if not srcbranch and not nobranch and srcrev != '${AUTOREV}': | ||
| 501 | # Append nobranch=1 in the following conditions: | ||
| 502 | # 1. User did not set 'branch=' in srcuri, and | ||
| 503 | # 2. User did not set 'nobranch=1' in srcuri, and | ||
| 504 | # 3. Source revision is not '${AUTOREV}' | ||
| 505 | params['nobranch'] = '1' | ||
| 506 | if tag: | ||
| 507 | # Keep a copy of tag and append nobranch=1 then remove tag from URL. | ||
| 508 | # Bitbake fetcher unable to fetch when {AUTOREV} and tag is set at the same time. | ||
| 509 | storeTagName = params['tag'] | ||
| 510 | params['nobranch'] = '1' | ||
| 511 | del params['tag'] | ||
| 512 | # Assume 'master' branch if not set | ||
| 513 | if scheme in ['git', 'gitsm'] and 'branch' not in params and 'nobranch' not in params: | ||
| 514 | params['branch'] = 'master' | ||
| 515 | fetchuri = bb.fetch2.encodeurl((scheme, network, path, user, passwd, params)) | ||
| 516 | |||
| 517 | tmpparent = tinfoil.config_data.getVar('BASE_WORKDIR') | ||
| 518 | bb.utils.mkdirhier(tmpparent) | ||
| 519 | tempsrc = tempfile.mkdtemp(prefix='recipetool-', dir=tmpparent) | ||
| 520 | srctree = os.path.join(tempsrc, 'source') | ||
| 521 | |||
| 522 | try: | ||
| 523 | checksums, ftmpdir = scriptutils.fetch_url(tinfoil, fetchuri, srcrev, srctree, logger, preserve_tmp=args.keep_temp) | ||
| 524 | except scriptutils.FetchUrlFailure as e: | ||
| 525 | logger.error(str(e)) | ||
| 526 | sys.exit(1) | ||
| 527 | |||
| 528 | if ftmpdir and args.keep_temp: | ||
| 529 | logger.info('Fetch temp directory is %s' % ftmpdir) | ||
| 530 | |||
| 531 | dirlist = os.listdir(srctree) | ||
| 532 | logger.debug('Directory listing (excluding filtered out):\n %s' % '\n '.join(dirlist)) | ||
| 533 | if len(dirlist) == 1: | ||
| 534 | singleitem = os.path.join(srctree, dirlist[0]) | ||
| 535 | if os.path.isdir(singleitem): | ||
| 536 | # We unpacked a single directory, so we should use that | ||
| 537 | srcsubdir = dirlist[0] | ||
| 538 | srctree = os.path.join(srctree, srcsubdir) | ||
| 539 | else: | ||
| 540 | check_single_file(dirlist[0], fetchuri) | ||
| 541 | elif len(dirlist) == 0: | ||
| 542 | if '/' in fetchuri: | ||
| 543 | fn = os.path.join(tinfoil.config_data.getVar('DL_DIR'), fetchuri.split('/')[-1]) | ||
| 544 | if os.path.isfile(fn): | ||
| 545 | check_single_file(fn, fetchuri) | ||
| 546 | # If we've got to here then there's no source so we might as well give up | ||
| 547 | logger.error('URL %s resulted in an empty source tree' % fetchuri) | ||
| 548 | sys.exit(1) | ||
| 549 | |||
| 550 | # We need this checking mechanism to improve the recipe created by recipetool and devtool | ||
| 551 | # is able to parse and build by bitbake. | ||
| 552 | # If there is no input for branch name, then check for branch name with SRCREV provided. | ||
| 553 | if not srcbranch and not nobranch and srcrev and (srcrev != '${AUTOREV}') and scheme in ['git', 'gitsm']: | ||
| 554 | try: | ||
| 555 | cmd = 'git branch -r --contains' | ||
| 556 | check_branch, check_branch_err = bb.process.run('%s %s' % (cmd, srcrev), cwd=srctree) | ||
| 557 | except bb.process.ExecutionError as err: | ||
| 558 | logger.error(str(err)) | ||
| 559 | sys.exit(1) | ||
| 560 | get_branch = [x.strip() for x in check_branch.splitlines()] | ||
| 561 | # Remove HEAD reference point and drop remote prefix | ||
| 562 | get_branch = [x.split('/', 1)[1] for x in get_branch if not x.startswith('origin/HEAD')] | ||
| 563 | if 'master' in get_branch: | ||
| 564 | # Even with the case where get_branch has multiple objects, if 'master' is one | ||
| 565 | # of them, we should default take from 'master' | ||
| 566 | srcbranch = 'master' | ||
| 567 | elif len(get_branch) == 1: | ||
| 568 | # If 'master' isn't in get_branch and get_branch contains only ONE object, then store result into 'srcbranch' | ||
| 569 | srcbranch = get_branch[0] | ||
| 570 | else: | ||
| 571 | # If get_branch contains more than one objects, then display error and exit. | ||
| 572 | mbrch = '\n ' + '\n '.join(get_branch) | ||
| 573 | logger.error('Revision %s was found on multiple branches: %s\nPlease provide the correct branch with -B/--srcbranch' % (srcrev, mbrch)) | ||
| 574 | sys.exit(1) | ||
| 575 | |||
| 576 | # Since we might have a value in srcbranch, we need to | ||
| 577 | # recontruct the srcuri to include 'branch' in params. | ||
| 578 | scheme, network, path, user, passwd, params = bb.fetch2.decodeurl(srcuri) | ||
| 579 | if scheme in ['git', 'gitsm']: | ||
| 580 | params['branch'] = srcbranch or 'master' | ||
| 581 | |||
| 582 | if storeTagName and scheme in ['git', 'gitsm']: | ||
| 583 | # Check srcrev using tag and check validity of the tag | ||
| 584 | cmd = ('git rev-parse --verify %s' % (storeTagName)) | ||
| 585 | try: | ||
| 586 | check_tag, check_tag_err = bb.process.run('%s' % cmd, cwd=srctree) | ||
| 587 | srcrev = check_tag.split()[0] | ||
| 588 | except bb.process.ExecutionError as err: | ||
| 589 | logger.error(str(err)) | ||
| 590 | logger.error("Possibly wrong tag name is provided") | ||
| 591 | sys.exit(1) | ||
| 592 | # Drop tag from srcuri as it will have conflicts with SRCREV during recipe parse. | ||
| 593 | del params['tag'] | ||
| 594 | srcuri = bb.fetch2.encodeurl((scheme, network, path, user, passwd, params)) | ||
| 595 | |||
| 596 | if os.path.exists(os.path.join(srctree, '.gitmodules')) and srcuri.startswith('git://'): | ||
| 597 | srcuri = 'gitsm://' + srcuri[6:] | ||
| 598 | logger.info('Fetching submodules...') | ||
| 599 | bb.process.run('git submodule update --init --recursive', cwd=srctree) | ||
| 600 | |||
| 601 | if is_package(fetchuri): | ||
| 602 | localdata = bb.data.createCopy(tinfoil.config_data) | ||
| 603 | pkgfile = bb.fetch2.localpath(fetchuri, localdata) | ||
| 604 | if pkgfile: | ||
| 605 | tmpfdir = tempfile.mkdtemp(prefix='recipetool-') | ||
| 606 | try: | ||
| 607 | if pkgfile.endswith(('.deb', '.ipk')): | ||
| 608 | stdout, _ = bb.process.run('ar x %s' % pkgfile, cwd=tmpfdir) | ||
| 609 | stdout, _ = bb.process.run('tar xf control.tar.gz', cwd=tmpfdir) | ||
| 610 | values = convert_debian(tmpfdir) | ||
| 611 | extravalues.update(values) | ||
| 612 | elif pkgfile.endswith(('.rpm', '.srpm')): | ||
| 613 | stdout, _ = bb.process.run('rpm -qp --xml %s > pkginfo.xml' % pkgfile, cwd=tmpfdir) | ||
| 614 | values = convert_rpm_xml(os.path.join(tmpfdir, 'pkginfo.xml')) | ||
| 615 | extravalues.update(values) | ||
| 616 | finally: | ||
| 617 | shutil.rmtree(tmpfdir) | ||
| 618 | else: | ||
| 619 | # Assume we're pointing to an existing source tree | ||
| 620 | if args.extract_to: | ||
| 621 | logger.error('--extract-to cannot be specified if source is a directory') | ||
| 622 | sys.exit(1) | ||
| 623 | if not os.path.isdir(source): | ||
| 624 | logger.error('Invalid source directory %s' % source) | ||
| 625 | sys.exit(1) | ||
| 626 | srctree = source | ||
| 627 | srcuri = '' | ||
| 628 | if os.path.exists(os.path.join(srctree, '.git')): | ||
| 629 | # Try to get upstream repo location from origin remote | ||
| 630 | try: | ||
| 631 | stdout, _ = bb.process.run('git remote -v', cwd=srctree, shell=True) | ||
| 632 | except bb.process.ExecutionError as e: | ||
| 633 | stdout = None | ||
| 634 | if stdout: | ||
| 635 | for line in stdout.splitlines(): | ||
| 636 | splitline = line.split() | ||
| 637 | if len(splitline) > 1: | ||
| 638 | if splitline[0] == 'origin' and scriptutils.is_src_url(splitline[1]): | ||
| 639 | srcuri = reformat_git_uri(splitline[1]) + ';branch=master' | ||
| 640 | srcsubdir = 'git' | ||
| 641 | break | ||
| 642 | |||
| 643 | if args.src_subdir: | ||
| 644 | srcsubdir = os.path.join(srcsubdir, args.src_subdir) | ||
| 645 | srctree_use = os.path.abspath(os.path.join(srctree, args.src_subdir)) | ||
| 646 | else: | ||
| 647 | srctree_use = os.path.abspath(srctree) | ||
| 648 | |||
| 649 | if args.outfile and os.path.isdir(args.outfile): | ||
| 650 | outfile = None | ||
| 651 | outdir = args.outfile | ||
| 652 | else: | ||
| 653 | outfile = args.outfile | ||
| 654 | outdir = None | ||
| 655 | if outfile and outfile != '-': | ||
| 656 | if os.path.exists(outfile): | ||
| 657 | logger.error('Output file %s already exists' % outfile) | ||
| 658 | sys.exit(1) | ||
| 659 | |||
| 660 | lines_before = [] | ||
| 661 | lines_after = [] | ||
| 662 | |||
| 663 | lines_before.append('# Recipe created by %s' % os.path.basename(sys.argv[0])) | ||
| 664 | lines_before.append('# This is the basis of a recipe and may need further editing in order to be fully functional.') | ||
| 665 | lines_before.append('# (Feel free to remove these comments when editing.)') | ||
| 666 | # We need a blank line here so that patch_recipe_lines can rewind before the LICENSE comments | ||
| 667 | lines_before.append('') | ||
| 668 | |||
| 669 | # We'll come back and replace this later in handle_license_vars() | ||
| 670 | lines_before.append('##LICENSE_PLACEHOLDER##') | ||
| 671 | |||
| 672 | |||
| 673 | # FIXME This is kind of a hack, we probably ought to be using bitbake to do this | ||
| 674 | pn = None | ||
| 675 | pv = None | ||
| 676 | if outfile: | ||
| 677 | recipefn = os.path.splitext(os.path.basename(outfile))[0] | ||
| 678 | fnsplit = recipefn.split('_') | ||
| 679 | if len(fnsplit) > 1: | ||
| 680 | pn = fnsplit[0] | ||
| 681 | pv = fnsplit[1] | ||
| 682 | else: | ||
| 683 | pn = recipefn | ||
| 684 | |||
| 685 | if args.version: | ||
| 686 | pv = args.version | ||
| 687 | |||
| 688 | if args.name: | ||
| 689 | pn = args.name | ||
| 690 | if args.name.endswith('-native'): | ||
| 691 | if args.also_native: | ||
| 692 | logger.error('--also-native cannot be specified for a recipe named *-native (*-native denotes a recipe that is already only for native) - either remove the -native suffix from the name or drop --also-native') | ||
| 693 | sys.exit(1) | ||
| 694 | classes.append('native') | ||
| 695 | elif args.name.startswith('nativesdk-'): | ||
| 696 | if args.also_native: | ||
| 697 | logger.error('--also-native cannot be specified for a recipe named nativesdk-* (nativesdk-* denotes a recipe that is already only for nativesdk)') | ||
| 698 | sys.exit(1) | ||
| 699 | classes.append('nativesdk') | ||
| 700 | |||
| 701 | if pv and pv not in 'git svn hg'.split(): | ||
| 702 | realpv = pv | ||
| 703 | else: | ||
| 704 | realpv = None | ||
| 705 | |||
| 706 | if not srcuri: | ||
| 707 | lines_before.append('# No information for SRC_URI yet (only an external source tree was specified)') | ||
| 708 | lines_before.append('SRC_URI = "%s"' % srcuri) | ||
| 709 | shown_checksums = ["%ssum" % s for s in bb.fetch2.SHOWN_CHECKSUM_LIST] | ||
| 710 | for key, value in sorted(checksums.items()): | ||
| 711 | if key in shown_checksums: | ||
| 712 | lines_before.append('SRC_URI[%s] = "%s"' % (key, value)) | ||
| 713 | if srcuri and supports_srcrev(srcuri): | ||
| 714 | lines_before.append('') | ||
| 715 | lines_before.append('# Modify these as desired') | ||
| 716 | # Note: we have code to replace realpv further down if it gets set to some other value | ||
| 717 | scheme, _, _, _, _, _ = bb.fetch2.decodeurl(srcuri) | ||
| 718 | if scheme in ['git', 'gitsm']: | ||
| 719 | srcpvprefix = 'git' | ||
| 720 | elif scheme == 'svn': | ||
| 721 | srcpvprefix = 'svnr' | ||
| 722 | else: | ||
| 723 | srcpvprefix = scheme | ||
| 724 | lines_before.append('PV = "%s+%s"' % (realpv or '1.0', srcpvprefix)) | ||
| 725 | pv_srcpv = True | ||
| 726 | if not args.autorev and srcrev == '${AUTOREV}': | ||
| 727 | if os.path.exists(os.path.join(srctree, '.git')): | ||
| 728 | (stdout, _) = bb.process.run('git rev-parse HEAD', cwd=srctree) | ||
| 729 | srcrev = stdout.rstrip() | ||
| 730 | lines_before.append('SRCREV = "%s"' % srcrev) | ||
| 731 | if args.provides: | ||
| 732 | lines_before.append('PROVIDES = "%s"' % args.provides) | ||
| 733 | lines_before.append('') | ||
| 734 | |||
| 735 | if srcsubdir and not args.binary: | ||
| 736 | # (for binary packages we explicitly specify subdir= when fetching to | ||
| 737 | # match the default value of S, so we don't need to set it in that case) | ||
| 738 | lines_before.append('S = "${WORKDIR}/%s"' % srcsubdir) | ||
| 739 | lines_before.append('') | ||
| 740 | |||
| 741 | if pkgarch: | ||
| 742 | lines_after.append('PACKAGE_ARCH = "%s"' % pkgarch) | ||
| 743 | lines_after.append('') | ||
| 744 | |||
| 745 | if args.binary: | ||
| 746 | lines_after.append('INSANE_SKIP:${PN} += "already-stripped"') | ||
| 747 | lines_after.append('') | ||
| 748 | |||
| 749 | if args.npm_dev: | ||
| 750 | extravalues['NPM_INSTALL_DEV'] = 1 | ||
| 751 | |||
| 752 | # Apply the handlers | ||
| 753 | if args.binary: | ||
| 754 | classes.append('bin_package') | ||
| 755 | handled.append('buildsystem') | ||
| 756 | |||
| 757 | for handler in handlers: | ||
| 758 | handler.process(srctree_use, classes, lines_before, lines_after, handled, extravalues) | ||
| 759 | |||
| 760 | # native and nativesdk classes are special and must be inherited last | ||
| 761 | # If present, put them at the end of the classes list | ||
| 762 | classes.sort(key=lambda c: c in ("native", "nativesdk")) | ||
| 763 | |||
| 764 | extrafiles = extravalues.pop('extrafiles', {}) | ||
| 765 | extra_pn = extravalues.pop('PN', None) | ||
| 766 | extra_pv = extravalues.pop('PV', None) | ||
| 767 | |||
| 768 | if extra_pv and not realpv: | ||
| 769 | realpv = extra_pv | ||
| 770 | if not validate_pv(realpv): | ||
| 771 | realpv = None | ||
| 772 | else: | ||
| 773 | realpv = realpv.lower().split()[0] | ||
| 774 | if '_' in realpv: | ||
| 775 | realpv = realpv.replace('_', '-') | ||
| 776 | if extra_pn and not pn: | ||
| 777 | pn = extra_pn | ||
| 778 | if pn.startswith('GNU '): | ||
| 779 | pn = pn[4:] | ||
| 780 | if ' ' in pn: | ||
| 781 | # Probably a descriptive identifier rather than a proper name | ||
| 782 | pn = None | ||
| 783 | else: | ||
| 784 | pn = pn.lower() | ||
| 785 | if '_' in pn: | ||
| 786 | pn = pn.replace('_', '-') | ||
| 787 | |||
| 788 | if srcuri and not realpv or not pn: | ||
| 789 | name_pn, name_pv = determine_from_url(srcuri) | ||
| 790 | if name_pn and not pn: | ||
| 791 | pn = name_pn | ||
| 792 | if name_pv and not realpv: | ||
| 793 | realpv = name_pv | ||
| 794 | |||
| 795 | licvalues = handle_license_vars(srctree_use, lines_before, handled, extravalues, tinfoil.config_data) | ||
| 796 | |||
| 797 | if not outfile: | ||
| 798 | if not pn: | ||
| 799 | log_error_cond('Unable to determine short program name from source tree - please specify name with -N/--name or output file name with -o/--outfile', args.devtool) | ||
| 800 | # devtool looks for this specific exit code, so don't change it | ||
| 801 | sys.exit(15) | ||
| 802 | else: | ||
| 803 | if srcuri and srcuri.startswith(('gitsm://', 'git://', 'hg://', 'svn://')): | ||
| 804 | suffix = srcuri.split(':', 1)[0] | ||
| 805 | if suffix == 'gitsm': | ||
| 806 | suffix = 'git' | ||
| 807 | outfile = '%s_%s.bb' % (pn, suffix) | ||
| 808 | elif realpv: | ||
| 809 | outfile = '%s_%s.bb' % (pn, realpv) | ||
| 810 | else: | ||
| 811 | outfile = '%s.bb' % pn | ||
| 812 | if outdir: | ||
| 813 | outfile = os.path.join(outdir, outfile) | ||
| 814 | # We need to check this again | ||
| 815 | if os.path.exists(outfile): | ||
| 816 | logger.error('Output file %s already exists' % outfile) | ||
| 817 | sys.exit(1) | ||
| 818 | |||
| 819 | # Move any extra files the plugins created to a directory next to the recipe | ||
| 820 | if extrafiles: | ||
| 821 | if outfile == '-': | ||
| 822 | extraoutdir = pn | ||
| 823 | else: | ||
| 824 | extraoutdir = os.path.join(os.path.dirname(outfile), pn) | ||
| 825 | bb.utils.mkdirhier(extraoutdir) | ||
| 826 | for destfn, extrafile in extrafiles.items(): | ||
| 827 | shutil.move(extrafile, os.path.join(extraoutdir, destfn)) | ||
| 828 | |||
| 829 | lines = lines_before | ||
| 830 | lines_before = [] | ||
| 831 | skipblank = True | ||
| 832 | for line in lines: | ||
| 833 | if skipblank: | ||
| 834 | skipblank = False | ||
| 835 | if not line: | ||
| 836 | continue | ||
| 837 | if line.startswith('S = '): | ||
| 838 | if realpv and pv not in 'git svn hg'.split(): | ||
| 839 | line = line.replace(realpv, '${PV}') | ||
| 840 | if pn: | ||
| 841 | line = line.replace(pn, '${BPN}') | ||
| 842 | if line == 'S = "${WORKDIR}/${BPN}-${PV}"': | ||
| 843 | skipblank = True | ||
| 844 | continue | ||
| 845 | elif line.startswith('SRC_URI = '): | ||
| 846 | if realpv and not pv_srcpv: | ||
| 847 | line = line.replace(realpv, '${PV}') | ||
| 848 | elif line.startswith('PV = '): | ||
| 849 | if realpv: | ||
| 850 | # Replace the first part of the PV value | ||
| 851 | line = re.sub(r'"[^+]*\+', '"%s+' % realpv, line) | ||
| 852 | lines_before.append(line) | ||
| 853 | |||
| 854 | if args.also_native: | ||
| 855 | lines = lines_after | ||
| 856 | lines_after = [] | ||
| 857 | bbclassextend = None | ||
| 858 | for line in lines: | ||
| 859 | if line.startswith('BBCLASSEXTEND ='): | ||
| 860 | splitval = line.split('"') | ||
| 861 | if len(splitval) > 1: | ||
| 862 | bbclassextend = splitval[1].split() | ||
| 863 | if not 'native' in bbclassextend: | ||
| 864 | bbclassextend.insert(0, 'native') | ||
| 865 | line = 'BBCLASSEXTEND = "%s"' % ' '.join(bbclassextend) | ||
| 866 | lines_after.append(line) | ||
| 867 | if not bbclassextend: | ||
| 868 | lines_after.append('BBCLASSEXTEND = "native"') | ||
| 869 | |||
| 870 | postinst = ("postinst", extravalues.pop('postinst', None)) | ||
| 871 | postrm = ("postrm", extravalues.pop('postrm', None)) | ||
| 872 | preinst = ("preinst", extravalues.pop('preinst', None)) | ||
| 873 | prerm = ("prerm", extravalues.pop('prerm', None)) | ||
| 874 | funcs = [postinst, postrm, preinst, prerm] | ||
| 875 | for func in funcs: | ||
| 876 | if func[1]: | ||
| 877 | RecipeHandler.genfunction(lines_after, 'pkg_%s_${PN}' % func[0], func[1]) | ||
| 878 | |||
| 879 | outlines = [] | ||
| 880 | outlines.extend(lines_before) | ||
| 881 | if classes: | ||
| 882 | if outlines[-1] and not outlines[-1].startswith('#'): | ||
| 883 | outlines.append('') | ||
| 884 | outlines.append('inherit %s' % ' '.join(classes)) | ||
| 885 | outlines.append('') | ||
| 886 | outlines.extend(lines_after) | ||
| 887 | |||
| 888 | outlines = [ line.rstrip('\n') +"\n" for line in outlines] | ||
| 889 | |||
| 890 | if extravalues: | ||
| 891 | _, outlines = oe.recipeutils.patch_recipe_lines(outlines, extravalues, trailing_newline=True) | ||
| 892 | |||
| 893 | if args.extract_to: | ||
| 894 | scriptutils.git_convert_standalone_clone(srctree) | ||
| 895 | if os.path.isdir(args.extract_to): | ||
| 896 | # If the directory exists we'll move the temp dir into it instead of | ||
| 897 | # its contents - of course, we could try to always move its contents | ||
| 898 | # but that is a pain if there are symlinks; the simplest solution is | ||
| 899 | # to just remove it first | ||
| 900 | os.rmdir(args.extract_to) | ||
| 901 | shutil.move(srctree, args.extract_to) | ||
| 902 | if tempsrc == srctree: | ||
| 903 | tempsrc = None | ||
| 904 | log_info_cond('Source extracted to %s' % args.extract_to, args.devtool) | ||
| 905 | |||
| 906 | if outfile == '-': | ||
| 907 | sys.stdout.write(''.join(outlines) + '\n') | ||
| 908 | else: | ||
| 909 | with open(outfile, 'w') as f: | ||
| 910 | lastline = None | ||
| 911 | for line in outlines: | ||
| 912 | if not lastline and not line: | ||
| 913 | # Skip extra blank lines | ||
| 914 | continue | ||
| 915 | f.write('%s' % line) | ||
| 916 | lastline = line | ||
| 917 | log_info_cond('Recipe %s has been created; further editing may be required to make it fully functional' % outfile, args.devtool) | ||
| 918 | tinfoil.modified_files() | ||
| 919 | |||
| 920 | if tempsrc: | ||
| 921 | if args.keep_temp: | ||
| 922 | logger.info('Preserving temporary directory %s' % tempsrc) | ||
| 923 | else: | ||
| 924 | shutil.rmtree(tempsrc) | ||
| 925 | |||
| 926 | return 0 | ||
| 927 | |||
| 928 | def check_single_file(fn, fetchuri): | ||
| 929 | """Determine if a single downloaded file is something we can't handle""" | ||
| 930 | with open(fn, 'r', errors='surrogateescape') as f: | ||
| 931 | if '<html' in f.read(100).lower(): | ||
| 932 | logger.error('Fetching "%s" returned a single HTML page - check the URL is correct and functional' % fetchuri) | ||
| 933 | sys.exit(1) | ||
| 934 | |||
| 935 | def split_value(value): | ||
| 936 | if isinstance(value, str): | ||
| 937 | return value.split() | ||
| 938 | else: | ||
| 939 | return value | ||
| 940 | |||
| 941 | def fixup_license(value): | ||
| 942 | # Ensure licenses with OR starts and ends with brackets | ||
| 943 | if '|' in value: | ||
| 944 | return '(' + value + ')' | ||
| 945 | return value | ||
| 946 | |||
| 947 | def tidy_licenses(value): | ||
| 948 | """Flat, split and sort licenses""" | ||
| 949 | from oe.license import flattened_licenses | ||
| 950 | def _choose(a, b): | ||
| 951 | str_a, str_b = sorted((" & ".join(a), " & ".join(b)), key=str.casefold) | ||
| 952 | return ["(%s | %s)" % (str_a, str_b)] | ||
| 953 | if not isinstance(value, str): | ||
| 954 | value = " & ".join(value) | ||
| 955 | return sorted(list(set(flattened_licenses(value, _choose))), key=str.casefold) | ||
| 956 | |||
| 957 | def handle_license_vars(srctree, lines_before, handled, extravalues, d): | ||
| 958 | lichandled = [x for x in handled if x[0] == 'license'] | ||
| 959 | if lichandled: | ||
| 960 | # Someone else has already handled the license vars, just return their value | ||
| 961 | return lichandled[0][1] | ||
| 962 | |||
| 963 | licvalues = find_licenses(srctree, d) | ||
| 964 | licenses = [] | ||
| 965 | lic_files_chksum = [] | ||
| 966 | lic_unknown = [] | ||
| 967 | lines = [] | ||
| 968 | if licvalues: | ||
| 969 | for licvalue in licvalues: | ||
| 970 | license = licvalue[0] | ||
| 971 | lics = tidy_licenses(fixup_license(license)) | ||
| 972 | lics = [lic for lic in lics if lic not in licenses] | ||
| 973 | if len(lics): | ||
| 974 | licenses.extend(lics) | ||
| 975 | lic_files_chksum.append('file://%s;md5=%s' % (licvalue[1], licvalue[2])) | ||
| 976 | if license == 'Unknown': | ||
| 977 | lic_unknown.append(licvalue[1]) | ||
| 978 | if lic_unknown: | ||
| 979 | lines.append('#') | ||
| 980 | lines.append('# The following license files were not able to be identified and are') | ||
| 981 | lines.append('# represented as "Unknown" below, you will need to check them yourself:') | ||
| 982 | for licfile in lic_unknown: | ||
| 983 | lines.append('# %s' % licfile) | ||
| 984 | |||
| 985 | extra_license = tidy_licenses(extravalues.pop('LICENSE', '')) | ||
| 986 | if extra_license: | ||
| 987 | if licenses == ['Unknown']: | ||
| 988 | licenses = extra_license | ||
| 989 | else: | ||
| 990 | for item in extra_license: | ||
| 991 | if item not in licenses: | ||
| 992 | licenses.append(item) | ||
| 993 | extra_lic_files_chksum = split_value(extravalues.pop('LIC_FILES_CHKSUM', [])) | ||
| 994 | for item in extra_lic_files_chksum: | ||
| 995 | if item not in lic_files_chksum: | ||
| 996 | lic_files_chksum.append(item) | ||
| 997 | |||
| 998 | if lic_files_chksum: | ||
| 999 | # We are going to set the vars, so prepend the standard disclaimer | ||
| 1000 | lines.insert(0, '# WARNING: the following LICENSE and LIC_FILES_CHKSUM values are best guesses - it is') | ||
| 1001 | lines.insert(1, '# your responsibility to verify that the values are complete and correct.') | ||
| 1002 | else: | ||
| 1003 | # Without LIC_FILES_CHKSUM we set LICENSE = "CLOSED" to allow the | ||
| 1004 | # user to get started easily | ||
| 1005 | lines.append('# Unable to find any files that looked like license statements. Check the accompanying') | ||
| 1006 | lines.append('# documentation and source headers and set LICENSE and LIC_FILES_CHKSUM accordingly.') | ||
| 1007 | lines.append('#') | ||
| 1008 | lines.append('# NOTE: LICENSE is being set to "CLOSED" to allow you to at least start building - if') | ||
| 1009 | lines.append('# this is not accurate with respect to the licensing of the software being built (it') | ||
| 1010 | lines.append('# will not be in most cases) you must specify the correct value before using this') | ||
| 1011 | lines.append('# recipe for anything other than initial testing/development!') | ||
| 1012 | licenses = ['CLOSED'] | ||
| 1013 | |||
| 1014 | if extra_license and sorted(licenses) != sorted(extra_license): | ||
| 1015 | lines.append('# NOTE: Original package / source metadata indicates license is: %s' % ' & '.join(extra_license)) | ||
| 1016 | |||
| 1017 | if len(licenses) > 1: | ||
| 1018 | lines.append('#') | ||
| 1019 | lines.append('# NOTE: multiple licenses have been detected; they have been separated with &') | ||
| 1020 | lines.append('# in the LICENSE value for now since it is a reasonable assumption that all') | ||
| 1021 | lines.append('# of the licenses apply. If instead there is a choice between the multiple') | ||
| 1022 | lines.append('# licenses then you should change the value to separate the licenses with |') | ||
| 1023 | lines.append('# instead of &. If there is any doubt, check the accompanying documentation') | ||
| 1024 | lines.append('# to determine which situation is applicable.') | ||
| 1025 | |||
| 1026 | lines.append('LICENSE = "%s"' % ' & '.join(sorted(licenses, key=str.casefold))) | ||
| 1027 | lines.append('LIC_FILES_CHKSUM = "%s"' % ' \\\n '.join(lic_files_chksum)) | ||
| 1028 | lines.append('') | ||
| 1029 | |||
| 1030 | # Replace the placeholder so we get the values in the right place in the recipe file | ||
| 1031 | try: | ||
| 1032 | pos = lines_before.index('##LICENSE_PLACEHOLDER##') | ||
| 1033 | except ValueError: | ||
| 1034 | pos = -1 | ||
| 1035 | if pos == -1: | ||
| 1036 | lines_before.extend(lines) | ||
| 1037 | else: | ||
| 1038 | lines_before[pos:pos+1] = lines | ||
| 1039 | |||
| 1040 | handled.append(('license', licvalues)) | ||
| 1041 | return licvalues | ||
| 1042 | |||
| 1043 | def get_license_md5sums(d, static_only=False, linenumbers=False): | ||
| 1044 | import bb.utils | ||
| 1045 | import csv | ||
| 1046 | md5sums = {} | ||
| 1047 | if not static_only and not linenumbers: | ||
| 1048 | # Gather md5sums of license files in common license dir | ||
| 1049 | commonlicdir = d.getVar('COMMON_LICENSE_DIR') | ||
| 1050 | for fn in os.listdir(commonlicdir): | ||
| 1051 | md5value = bb.utils.md5_file(os.path.join(commonlicdir, fn)) | ||
| 1052 | md5sums[md5value] = fn | ||
| 1053 | |||
| 1054 | # The following were extracted from common values in various recipes | ||
| 1055 | # (double checking the license against the license file itself, not just | ||
| 1056 | # the LICENSE value in the recipe) | ||
| 1057 | |||
| 1058 | # Read license md5sums from csv file | ||
| 1059 | scripts_path = os.path.dirname(os.path.realpath(__file__)) | ||
| 1060 | for path in (d.getVar('BBPATH').split(':') | ||
| 1061 | + [os.path.join(scripts_path, '..', '..')]): | ||
| 1062 | csv_path = os.path.join(path, 'lib', 'recipetool', 'licenses.csv') | ||
| 1063 | if os.path.isfile(csv_path): | ||
| 1064 | with open(csv_path, newline='') as csv_file: | ||
| 1065 | fieldnames = ['md5sum', 'license', 'beginline', 'endline', 'md5'] | ||
| 1066 | reader = csv.DictReader(csv_file, delimiter=',', fieldnames=fieldnames) | ||
| 1067 | for row in reader: | ||
| 1068 | if linenumbers: | ||
| 1069 | md5sums[row['md5sum']] = ( | ||
| 1070 | row['license'], row['beginline'], row['endline'], row['md5']) | ||
| 1071 | else: | ||
| 1072 | md5sums[row['md5sum']] = row['license'] | ||
| 1073 | |||
| 1074 | return md5sums | ||
| 1075 | |||
| 1076 | def crunch_known_licenses(d): | ||
| 1077 | ''' | ||
| 1078 | Calculate the MD5 checksums for the crunched versions of all common | ||
| 1079 | licenses. Also add additional known checksums. | ||
| 1080 | ''' | ||
| 1081 | |||
| 1082 | crunched_md5sums = {} | ||
| 1083 | |||
| 1084 | # common licenses | ||
| 1085 | crunched_md5sums['ad4e9d34a2e966dfe9837f18de03266d'] = 'GFDL-1.1-only' | ||
| 1086 | crunched_md5sums['d014fb11a34eb67dc717fdcfc97e60ed'] = 'GFDL-1.2-only' | ||
| 1087 | crunched_md5sums['e020ca655b06c112def28e597ab844f1'] = 'GFDL-1.3-only' | ||
| 1088 | |||
| 1089 | # The following two were gleaned from the "forever" npm package | ||
| 1090 | crunched_md5sums['0a97f8e4cbaf889d6fa51f84b89a79f6'] = 'ISC' | ||
| 1091 | # https://github.com/waffle-gl/waffle/blob/master/LICENSE.txt | ||
| 1092 | crunched_md5sums['50fab24ce589d69af8964fdbfe414c60'] = 'BSD-2-Clause' | ||
| 1093 | # https://github.com/spigwitmer/fakeds1963s/blob/master/LICENSE | ||
| 1094 | crunched_md5sums['88a4355858a1433fea99fae34a44da88'] = 'GPL-2.0-only' | ||
| 1095 | # http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt | ||
| 1096 | crunched_md5sums['063b5c3ebb5f3aa4c85a2ed18a31fbe7'] = 'GPL-2.0-only' | ||
| 1097 | # https://github.com/FFmpeg/FFmpeg/blob/master/COPYING.LGPLv2.1 | ||
| 1098 | crunched_md5sums['7f5202f4d44ed15dcd4915f5210417d8'] = 'LGPL-2.1-only' | ||
| 1099 | # unixODBC-2.3.4 COPYING | ||
| 1100 | crunched_md5sums['3debde09238a8c8e1f6a847e1ec9055b'] = 'LGPL-2.1-only' | ||
| 1101 | # https://github.com/FFmpeg/FFmpeg/blob/master/COPYING.LGPLv3 | ||
| 1102 | crunched_md5sums['f90c613c51aa35da4d79dd55fc724ceb'] = 'LGPL-3.0-only' | ||
| 1103 | # https://raw.githubusercontent.com/eclipse/mosquitto/v1.4.14/epl-v10 | ||
| 1104 | crunched_md5sums['efe2cb9a35826992b9df68224e3c2628'] = 'EPL-1.0' | ||
| 1105 | |||
| 1106 | # https://raw.githubusercontent.com/jquery/esprima/3.1.3/LICENSE.BSD | ||
| 1107 | crunched_md5sums['80fa7b56a28e8c902e6af194003220a5'] = 'BSD-2-Clause' | ||
| 1108 | # https://raw.githubusercontent.com/npm/npm-install-checks/master/LICENSE | ||
| 1109 | crunched_md5sums['e659f77bfd9002659e112d0d3d59b2c1'] = 'BSD-2-Clause' | ||
| 1110 | # https://raw.githubusercontent.com/silverwind/default-gateway/4.2.0/LICENSE | ||
| 1111 | crunched_md5sums['4c641f2d995c47f5cb08bdb4b5b6ea05'] = 'BSD-2-Clause' | ||
| 1112 | # https://raw.githubusercontent.com/tad-lispy/node-damerau-levenshtein/v1.0.5/LICENSE | ||
| 1113 | crunched_md5sums['2b8c039b2b9a25f0feb4410c4542d346'] = 'BSD-2-Clause' | ||
| 1114 | # https://raw.githubusercontent.com/terser/terser/v3.17.0/LICENSE | ||
| 1115 | crunched_md5sums['8bd23871802951c9ad63855151204c2c'] = 'BSD-2-Clause' | ||
| 1116 | # https://raw.githubusercontent.com/alexei/sprintf.js/1.0.3/LICENSE | ||
| 1117 | crunched_md5sums['008c22318c8ea65928bf730ddd0273e3'] = 'BSD-3-Clause' | ||
| 1118 | # https://raw.githubusercontent.com/Caligatio/jsSHA/v3.2.0/LICENSE | ||
| 1119 | crunched_md5sums['0e46634a01bfef056892949acaea85b1'] = 'BSD-3-Clause' | ||
| 1120 | # https://raw.githubusercontent.com/d3/d3-path/v1.0.9/LICENSE | ||
| 1121 | crunched_md5sums['b5f72aef53d3b2b432702c30b0215666'] = 'BSD-3-Clause' | ||
| 1122 | # https://raw.githubusercontent.com/feross/ieee754/v1.1.13/LICENSE | ||
| 1123 | crunched_md5sums['a39327c997c20da0937955192d86232d'] = 'BSD-3-Clause' | ||
| 1124 | # https://raw.githubusercontent.com/joyent/node-extsprintf/v1.3.0/LICENSE | ||
| 1125 | crunched_md5sums['721f23a96ff4161ca3a5f071bbe18108'] = 'MIT' | ||
| 1126 | # https://raw.githubusercontent.com/pvorb/clone/v0.2.0/LICENSE | ||
| 1127 | crunched_md5sums['b376d29a53c9573006b9970709231431'] = 'MIT' | ||
| 1128 | # https://raw.githubusercontent.com/andris9/encoding/v0.1.12/LICENSE | ||
| 1129 | crunched_md5sums['85d8a977ee9d7c5ab4ac03c9b95431c4'] = 'MIT-0' | ||
| 1130 | # https://raw.githubusercontent.com/faye/websocket-driver-node/0.7.3/LICENSE.md | ||
| 1131 | crunched_md5sums['b66384e7137e41a9b1904ef4d39703b6'] = 'Apache-2.0' | ||
| 1132 | # https://raw.githubusercontent.com/less/less.js/v4.1.1/LICENSE | ||
| 1133 | crunched_md5sums['b27575459e02221ccef97ec0bfd457ae'] = 'Apache-2.0' | ||
| 1134 | # https://raw.githubusercontent.com/microsoft/TypeScript/v3.5.3/LICENSE.txt | ||
| 1135 | crunched_md5sums['a54a1a6a39e7f9dbb4a23a42f5c7fd1c'] = 'Apache-2.0' | ||
| 1136 | # https://raw.githubusercontent.com/request/request/v2.87.0/LICENSE | ||
| 1137 | crunched_md5sums['1034431802e57486b393d00c5d262b8a'] = 'Apache-2.0' | ||
| 1138 | # https://raw.githubusercontent.com/dchest/tweetnacl-js/v0.14.5/LICENSE | ||
| 1139 | crunched_md5sums['75605e6bdd564791ab698fca65c94a4f'] = 'Unlicense' | ||
| 1140 | # https://raw.githubusercontent.com/stackgl/gl-mat3/v2.0.0/LICENSE.md | ||
| 1141 | crunched_md5sums['75512892d6f59dddb6d1c7e191957e9c'] = 'Zlib' | ||
| 1142 | |||
| 1143 | commonlicdir = d.getVar('COMMON_LICENSE_DIR') | ||
| 1144 | for fn in sorted(os.listdir(commonlicdir)): | ||
| 1145 | md5value, lictext = crunch_license(os.path.join(commonlicdir, fn)) | ||
| 1146 | if md5value not in crunched_md5sums: | ||
| 1147 | crunched_md5sums[md5value] = fn | ||
| 1148 | elif fn != crunched_md5sums[md5value]: | ||
| 1149 | bb.debug(2, "crunched_md5sums['%s'] is already set to '%s' rather than '%s'" % (md5value, crunched_md5sums[md5value], fn)) | ||
| 1150 | else: | ||
| 1151 | bb.debug(2, "crunched_md5sums['%s'] is already set to '%s'" % (md5value, crunched_md5sums[md5value])) | ||
| 1152 | |||
| 1153 | return crunched_md5sums | ||
| 1154 | |||
| 1155 | def crunch_license(licfile): | ||
| 1156 | ''' | ||
| 1157 | Remove non-material text from a license file and then calculate its | ||
| 1158 | md5sum. This works well for licenses that contain a copyright statement, | ||
| 1159 | but is also a useful way to handle people's insistence upon reformatting | ||
| 1160 | the license text slightly (with no material difference to the text of the | ||
| 1161 | license). | ||
| 1162 | ''' | ||
| 1163 | |||
| 1164 | import oe.utils | ||
| 1165 | |||
| 1166 | # Note: these are carefully constructed! | ||
| 1167 | license_title_re = re.compile(r'^#*\(? *(This is )?([Tt]he )?.{0,15} ?[Ll]icen[sc]e( \(.{1,10}\))?\)?[:\.]? ?#*$') | ||
| 1168 | license_statement_re = re.compile(r'^((This (project|software)|.{1,10}) is( free software)? (released|licen[sc]ed)|(Released|Licen[cs]ed)) under the .{1,10} [Ll]icen[sc]e:?$') | ||
| 1169 | copyright_re = re.compile(r'^ *[#\*]* *(Modified work |MIT LICENSED )?Copyright ?(\([cC]\))? .*$') | ||
| 1170 | disclaimer_re = re.compile(r'^ *\*? ?All [Rr]ights [Rr]eserved\.$') | ||
| 1171 | email_re = re.compile(r'^.*<[\w\.-]*@[\w\.\-]*>$') | ||
| 1172 | header_re = re.compile(r'^(\/\**!?)? ?[\-=\*]* ?(\*\/)?$') | ||
| 1173 | tag_re = re.compile(r'^ *@?\(?([Ll]icense|MIT)\)?$') | ||
| 1174 | url_re = re.compile(r'^ *[#\*]* *https?:\/\/[\w\.\/\-]+$') | ||
| 1175 | |||
| 1176 | lictext = [] | ||
| 1177 | with open(licfile, 'r', errors='surrogateescape') as f: | ||
| 1178 | for line in f: | ||
| 1179 | # Drop opening statements | ||
| 1180 | if copyright_re.match(line): | ||
| 1181 | continue | ||
| 1182 | elif disclaimer_re.match(line): | ||
| 1183 | continue | ||
| 1184 | elif email_re.match(line): | ||
| 1185 | continue | ||
| 1186 | elif header_re.match(line): | ||
| 1187 | continue | ||
| 1188 | elif tag_re.match(line): | ||
| 1189 | continue | ||
| 1190 | elif url_re.match(line): | ||
| 1191 | continue | ||
| 1192 | elif license_title_re.match(line): | ||
| 1193 | continue | ||
| 1194 | elif license_statement_re.match(line): | ||
| 1195 | continue | ||
| 1196 | # Strip comment symbols | ||
| 1197 | line = line.replace('*', '') \ | ||
| 1198 | .replace('#', '') | ||
| 1199 | # Unify spelling | ||
| 1200 | line = line.replace('sub-license', 'sublicense') | ||
| 1201 | # Squash spaces | ||
| 1202 | line = oe.utils.squashspaces(line.strip()) | ||
| 1203 | # Replace smart quotes, double quotes and backticks with single quotes | ||
| 1204 | line = line.replace(u"\u2018", "'").replace(u"\u2019", "'").replace(u"\u201c","'").replace(u"\u201d", "'").replace('"', '\'').replace('`', '\'') | ||
| 1205 | # Unify brackets | ||
| 1206 | line = line.replace("{", "[").replace("}", "]") | ||
| 1207 | if line: | ||
| 1208 | lictext.append(line) | ||
| 1209 | |||
| 1210 | m = hashlib.md5() | ||
| 1211 | try: | ||
| 1212 | m.update(' '.join(lictext).encode('utf-8')) | ||
| 1213 | md5val = m.hexdigest() | ||
| 1214 | except UnicodeEncodeError: | ||
| 1215 | md5val = None | ||
| 1216 | lictext = '' | ||
| 1217 | return md5val, lictext | ||
| 1218 | |||
| 1219 | def find_license_files(srctree): | ||
| 1220 | licspecs = ['*LICEN[CS]E*', 'COPYING*', '*[Ll]icense*', 'LEGAL*', '[Ll]egal*', '*GPL*', 'README.lic*', 'COPYRIGHT*', '[Cc]opyright*', 'e[dp]l-v10'] | ||
| 1221 | skip_extensions = (".html", ".js", ".json", ".svg", ".ts", ".go") | ||
| 1222 | licfiles = [] | ||
| 1223 | for root, dirs, files in os.walk(srctree): | ||
| 1224 | for fn in files: | ||
| 1225 | if fn.endswith(skip_extensions): | ||
| 1226 | continue | ||
| 1227 | for spec in licspecs: | ||
| 1228 | if fnmatch.fnmatch(fn, spec): | ||
| 1229 | fullpath = os.path.join(root, fn) | ||
| 1230 | if not fullpath in licfiles: | ||
| 1231 | licfiles.append(fullpath) | ||
| 1232 | |||
| 1233 | return licfiles | ||
| 1234 | |||
| 1235 | def match_licenses(licfiles, srctree, d): | ||
| 1236 | import bb | ||
| 1237 | md5sums = get_license_md5sums(d) | ||
| 1238 | |||
| 1239 | crunched_md5sums = crunch_known_licenses(d) | ||
| 1240 | |||
| 1241 | licenses = [] | ||
| 1242 | for licfile in sorted(licfiles): | ||
| 1243 | resolved_licfile = d.expand(licfile) | ||
| 1244 | md5value = bb.utils.md5_file(resolved_licfile) | ||
| 1245 | license = md5sums.get(md5value, None) | ||
| 1246 | if not license: | ||
| 1247 | crunched_md5, lictext = crunch_license(resolved_licfile) | ||
| 1248 | license = crunched_md5sums.get(crunched_md5, None) | ||
| 1249 | if lictext and not license: | ||
| 1250 | license = 'Unknown' | ||
| 1251 | logger.info("Please add the following line for '%s' to a 'lib/recipetool/licenses.csv' " \ | ||
| 1252 | "and replace `Unknown` with the license:\n" \ | ||
| 1253 | "%s,Unknown" % (os.path.relpath(licfile, srctree + "/.."), md5value)) | ||
| 1254 | if license: | ||
| 1255 | licenses.append((license, os.path.relpath(licfile, srctree), md5value)) | ||
| 1256 | |||
| 1257 | return licenses | ||
| 1258 | |||
| 1259 | def find_licenses(srctree, d): | ||
| 1260 | licfiles = find_license_files(srctree) | ||
| 1261 | licenses = match_licenses(licfiles, srctree, d) | ||
| 1262 | |||
| 1263 | # FIXME should we grab at least one source file with a license header and add that too? | ||
| 1264 | |||
| 1265 | return licenses | ||
| 1266 | |||
| 1267 | def split_pkg_licenses(licvalues, packages, outlines, fallback_licenses=None, pn='${PN}'): | ||
| 1268 | """ | ||
| 1269 | Given a list of (license, path, md5sum) as returned by match_licenses(), | ||
| 1270 | a dict of package name to path mappings, write out a set of | ||
| 1271 | package-specific LICENSE values. | ||
| 1272 | """ | ||
| 1273 | pkglicenses = {pn: []} | ||
| 1274 | for license, licpath, _ in licvalues: | ||
| 1275 | license = fixup_license(license) | ||
| 1276 | for pkgname, pkgpath in packages.items(): | ||
| 1277 | if licpath.startswith(pkgpath + '/'): | ||
| 1278 | if pkgname in pkglicenses: | ||
| 1279 | pkglicenses[pkgname].append(license) | ||
| 1280 | else: | ||
| 1281 | pkglicenses[pkgname] = [license] | ||
| 1282 | break | ||
| 1283 | else: | ||
| 1284 | # Accumulate on the main package | ||
| 1285 | pkglicenses[pn].append(license) | ||
| 1286 | outlicenses = {} | ||
| 1287 | for pkgname in packages: | ||
| 1288 | # Assume AND operator between license files | ||
| 1289 | license = ' & '.join(list(set(pkglicenses.get(pkgname, ['Unknown'])))) or 'Unknown' | ||
| 1290 | if license == 'Unknown' and fallback_licenses and pkgname in fallback_licenses: | ||
| 1291 | license = fallback_licenses[pkgname] | ||
| 1292 | licenses = tidy_licenses(license) | ||
| 1293 | license = ' & '.join(licenses) | ||
| 1294 | outlines.append('LICENSE:%s = "%s"' % (pkgname, license)) | ||
| 1295 | outlicenses[pkgname] = licenses | ||
| 1296 | return outlicenses | ||
| 1297 | |||
| 1298 | def generate_common_licenses_chksums(common_licenses, d): | ||
| 1299 | lic_files_chksums = [] | ||
| 1300 | for license in tidy_licenses(common_licenses): | ||
| 1301 | licfile = '${COMMON_LICENSE_DIR}/' + license | ||
| 1302 | md5value = bb.utils.md5_file(d.expand(licfile)) | ||
| 1303 | lic_files_chksums.append('file://%s;md5=%s' % (licfile, md5value)) | ||
| 1304 | return lic_files_chksums | ||
| 1305 | |||
| 1306 | def read_pkgconfig_provides(d): | ||
| 1307 | pkgdatadir = d.getVar('PKGDATA_DIR') | ||
| 1308 | pkgmap = {} | ||
| 1309 | for fn in glob.glob(os.path.join(pkgdatadir, 'shlibs2', '*.pclist')): | ||
| 1310 | with open(fn, 'r') as f: | ||
| 1311 | for line in f: | ||
| 1312 | pkgmap[os.path.basename(line.rstrip())] = os.path.splitext(os.path.basename(fn))[0] | ||
| 1313 | recipemap = {} | ||
| 1314 | for pc, pkg in pkgmap.items(): | ||
| 1315 | pkgdatafile = os.path.join(pkgdatadir, 'runtime', pkg) | ||
| 1316 | if os.path.exists(pkgdatafile): | ||
| 1317 | with open(pkgdatafile, 'r') as f: | ||
| 1318 | for line in f: | ||
| 1319 | if line.startswith('PN: '): | ||
| 1320 | recipemap[pc] = line.split(':', 1)[1].strip() | ||
| 1321 | return recipemap | ||
| 1322 | |||
| 1323 | def convert_debian(debpath): | ||
| 1324 | value_map = {'Package': 'PN', | ||
| 1325 | 'Version': 'PV', | ||
| 1326 | 'Section': 'SECTION', | ||
| 1327 | 'License': 'LICENSE', | ||
| 1328 | 'Homepage': 'HOMEPAGE'} | ||
| 1329 | |||
| 1330 | # FIXME extend this mapping - perhaps use distro_alias.inc? | ||
| 1331 | depmap = {'libz-dev': 'zlib'} | ||
| 1332 | |||
| 1333 | values = {} | ||
| 1334 | depends = [] | ||
| 1335 | with open(os.path.join(debpath, 'control'), 'r', errors='surrogateescape') as f: | ||
| 1336 | indesc = False | ||
| 1337 | for line in f: | ||
| 1338 | if indesc: | ||
| 1339 | if line.startswith(' '): | ||
| 1340 | if line.startswith(' This package contains'): | ||
| 1341 | indesc = False | ||
| 1342 | else: | ||
| 1343 | if 'DESCRIPTION' in values: | ||
| 1344 | values['DESCRIPTION'] += ' ' + line.strip() | ||
| 1345 | else: | ||
| 1346 | values['DESCRIPTION'] = line.strip() | ||
| 1347 | else: | ||
| 1348 | indesc = False | ||
| 1349 | if not indesc: | ||
| 1350 | splitline = line.split(':', 1) | ||
| 1351 | if len(splitline) < 2: | ||
| 1352 | continue | ||
| 1353 | key = splitline[0] | ||
| 1354 | value = splitline[1].strip() | ||
| 1355 | if key == 'Build-Depends': | ||
| 1356 | for dep in value.split(','): | ||
| 1357 | dep = dep.split()[0] | ||
| 1358 | mapped = depmap.get(dep, '') | ||
| 1359 | if mapped: | ||
| 1360 | depends.append(mapped) | ||
| 1361 | elif key == 'Description': | ||
| 1362 | values['SUMMARY'] = value | ||
| 1363 | indesc = True | ||
| 1364 | else: | ||
| 1365 | varname = value_map.get(key, None) | ||
| 1366 | if varname: | ||
| 1367 | values[varname] = value | ||
| 1368 | postinst = os.path.join(debpath, 'postinst') | ||
| 1369 | postrm = os.path.join(debpath, 'postrm') | ||
| 1370 | preinst = os.path.join(debpath, 'preinst') | ||
| 1371 | prerm = os.path.join(debpath, 'prerm') | ||
| 1372 | sfiles = [postinst, postrm, preinst, prerm] | ||
| 1373 | for sfile in sfiles: | ||
| 1374 | if os.path.isfile(sfile): | ||
| 1375 | logger.info("Converting %s file to recipe function..." % | ||
| 1376 | os.path.basename(sfile).upper()) | ||
| 1377 | content = [] | ||
| 1378 | with open(sfile) as f: | ||
| 1379 | for line in f: | ||
| 1380 | if "#!/" in line: | ||
| 1381 | continue | ||
| 1382 | line = line.rstrip("\n") | ||
| 1383 | if line.strip(): | ||
| 1384 | content.append(line) | ||
| 1385 | if content: | ||
| 1386 | values[os.path.basename(f.name)] = content | ||
| 1387 | |||
| 1388 | #if depends: | ||
| 1389 | # values['DEPENDS'] = ' '.join(depends) | ||
| 1390 | |||
| 1391 | return values | ||
| 1392 | |||
| 1393 | def convert_rpm_xml(xmlfile): | ||
| 1394 | '''Converts the output from rpm -qp --xml to a set of variable values''' | ||
| 1395 | import xml.etree.ElementTree as ElementTree | ||
| 1396 | rpmtag_map = {'Name': 'PN', | ||
| 1397 | 'Version': 'PV', | ||
| 1398 | 'Summary': 'SUMMARY', | ||
| 1399 | 'Description': 'DESCRIPTION', | ||
| 1400 | 'License': 'LICENSE', | ||
| 1401 | 'Url': 'HOMEPAGE'} | ||
| 1402 | |||
| 1403 | values = {} | ||
| 1404 | tree = ElementTree.parse(xmlfile) | ||
| 1405 | root = tree.getroot() | ||
| 1406 | for child in root: | ||
| 1407 | if child.tag == 'rpmTag': | ||
| 1408 | name = child.attrib.get('name', None) | ||
| 1409 | if name: | ||
| 1410 | varname = rpmtag_map.get(name, None) | ||
| 1411 | if varname: | ||
| 1412 | values[varname] = child[0].text | ||
| 1413 | return values | ||
| 1414 | |||
| 1415 | |||
| 1416 | def register_commands(subparsers): | ||
| 1417 | parser_create = subparsers.add_parser('create', | ||
| 1418 | help='Create a new recipe', | ||
| 1419 | description='Creates a new recipe from a source tree') | ||
| 1420 | parser_create.add_argument('source', help='Path or URL to source') | ||
| 1421 | parser_create.add_argument('-o', '--outfile', help='Specify filename for recipe to create') | ||
| 1422 | parser_create.add_argument('-p', '--provides', help='Specify an alias for the item provided by the recipe') | ||
| 1423 | parser_create.add_argument('-m', '--machine', help='Make recipe machine-specific as opposed to architecture-specific', action='store_true') | ||
| 1424 | parser_create.add_argument('-x', '--extract-to', metavar='EXTRACTPATH', help='Assuming source is a URL, fetch it and extract it to the directory specified as %(metavar)s') | ||
| 1425 | parser_create.add_argument('-N', '--name', help='Name to use within recipe (PN)') | ||
| 1426 | parser_create.add_argument('-V', '--version', help='Version to use within recipe (PV)') | ||
| 1427 | parser_create.add_argument('-b', '--binary', help='Treat the source tree as something that should be installed verbatim (no compilation, same directory structure)', action='store_true') | ||
| 1428 | parser_create.add_argument('--also-native', help='Also add native variant (i.e. support building recipe for the build host as well as the target machine)', action='store_true') | ||
| 1429 | parser_create.add_argument('--src-subdir', help='Specify subdirectory within source tree to use', metavar='SUBDIR') | ||
| 1430 | group = parser_create.add_mutually_exclusive_group() | ||
| 1431 | group.add_argument('-a', '--autorev', help='When fetching from a git repository, set SRCREV in the recipe to a floating revision instead of fixed', action="store_true") | ||
| 1432 | group.add_argument('-S', '--srcrev', help='Source revision to fetch if fetching from an SCM such as git (default latest)') | ||
| 1433 | parser_create.add_argument('-B', '--srcbranch', help='Branch in source repository if fetching from an SCM such as git (default master)') | ||
| 1434 | parser_create.add_argument('--keep-temp', action="store_true", help='Keep temporary directory (for debugging)') | ||
| 1435 | parser_create.add_argument('--npm-dev', action="store_true", help='For npm, also fetch devDependencies') | ||
| 1436 | parser_create.add_argument('--no-pypi', action="store_true", help='Do not inherit pypi class') | ||
| 1437 | parser_create.add_argument('--devtool', action="store_true", help=argparse.SUPPRESS) | ||
| 1438 | parser_create.add_argument('--mirrors', action="store_true", help='Enable PREMIRRORS and MIRRORS for source tree fetching (disabled by default).') | ||
| 1439 | parser_create.set_defaults(func=create_recipe) | ||
diff --git a/scripts/lib/recipetool/create_buildsys.py b/scripts/lib/recipetool/create_buildsys.py deleted file mode 100644 index ec9d510e23..0000000000 --- a/scripts/lib/recipetool/create_buildsys.py +++ /dev/null | |||
| @@ -1,875 +0,0 @@ | |||
| 1 | # Recipe creation tool - create command build system handlers | ||
| 2 | # | ||
| 3 | # Copyright (C) 2014-2016 Intel Corporation | ||
| 4 | # | ||
| 5 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 6 | # | ||
| 7 | |||
| 8 | import os | ||
| 9 | import re | ||
| 10 | import logging | ||
| 11 | from recipetool.create import RecipeHandler, validate_pv | ||
| 12 | |||
| 13 | logger = logging.getLogger('recipetool') | ||
| 14 | |||
| 15 | tinfoil = None | ||
| 16 | plugins = None | ||
| 17 | |||
| 18 | def plugin_init(pluginlist): | ||
| 19 | # Take a reference to the list so we can use it later | ||
| 20 | global plugins | ||
| 21 | plugins = pluginlist | ||
| 22 | |||
| 23 | def tinfoil_init(instance): | ||
| 24 | global tinfoil | ||
| 25 | tinfoil = instance | ||
| 26 | |||
| 27 | |||
| 28 | class CmakeRecipeHandler(RecipeHandler): | ||
| 29 | def process(self, srctree, classes, lines_before, lines_after, handled, extravalues): | ||
| 30 | if 'buildsystem' in handled: | ||
| 31 | return False | ||
| 32 | |||
| 33 | if RecipeHandler.checkfiles(srctree, ['CMakeLists.txt']): | ||
| 34 | classes.append('cmake') | ||
| 35 | values = CmakeRecipeHandler.extract_cmake_deps(lines_before, srctree, extravalues) | ||
| 36 | classes.extend(values.pop('inherit', '').split()) | ||
| 37 | for var, value in values.items(): | ||
| 38 | lines_before.append('%s = "%s"' % (var, value)) | ||
| 39 | lines_after.append('# Specify any options you want to pass to cmake using EXTRA_OECMAKE:') | ||
| 40 | lines_after.append('EXTRA_OECMAKE = ""') | ||
| 41 | lines_after.append('') | ||
| 42 | handled.append('buildsystem') | ||
| 43 | return True | ||
| 44 | return False | ||
| 45 | |||
| 46 | @staticmethod | ||
| 47 | def extract_cmake_deps(outlines, srctree, extravalues, cmakelistsfile=None): | ||
| 48 | # Find all plugins that want to register handlers | ||
| 49 | logger.debug('Loading cmake handlers') | ||
| 50 | handlers = [] | ||
| 51 | for plugin in plugins: | ||
| 52 | if hasattr(plugin, 'register_cmake_handlers'): | ||
| 53 | plugin.register_cmake_handlers(handlers) | ||
| 54 | |||
| 55 | values = {} | ||
| 56 | inherits = [] | ||
| 57 | |||
| 58 | if cmakelistsfile: | ||
| 59 | srcfiles = [cmakelistsfile] | ||
| 60 | else: | ||
| 61 | srcfiles = RecipeHandler.checkfiles(srctree, ['CMakeLists.txt']) | ||
| 62 | |||
| 63 | # Note that some of these are non-standard, but probably better to | ||
| 64 | # be able to map them anyway if we see them | ||
| 65 | cmake_pkgmap = {'alsa': 'alsa-lib', | ||
| 66 | 'aspell': 'aspell', | ||
| 67 | 'atk': 'atk', | ||
| 68 | 'bison': 'bison-native', | ||
| 69 | 'boost': 'boost', | ||
| 70 | 'bzip2': 'bzip2', | ||
| 71 | 'cairo': 'cairo', | ||
| 72 | 'cups': 'cups', | ||
| 73 | 'curl': 'curl', | ||
| 74 | 'curses': 'ncurses', | ||
| 75 | 'cvs': 'cvs', | ||
| 76 | 'drm': 'libdrm', | ||
| 77 | 'dbus': 'dbus', | ||
| 78 | 'dbusglib': 'dbus-glib', | ||
| 79 | 'egl': 'virtual/egl', | ||
| 80 | 'expat': 'expat', | ||
| 81 | 'flex': 'flex-native', | ||
| 82 | 'fontconfig': 'fontconfig', | ||
| 83 | 'freetype': 'freetype', | ||
| 84 | 'gettext': '', | ||
| 85 | 'git': '', | ||
| 86 | 'gio': 'glib-2.0', | ||
| 87 | 'giounix': 'glib-2.0', | ||
| 88 | 'glew': 'glew', | ||
| 89 | 'glib': 'glib-2.0', | ||
| 90 | 'glib2': 'glib-2.0', | ||
| 91 | 'glu': 'libglu', | ||
| 92 | 'glut': 'freeglut', | ||
| 93 | 'gobject': 'glib-2.0', | ||
| 94 | 'gperf': 'gperf-native', | ||
| 95 | 'gnutls': 'gnutls', | ||
| 96 | 'gtk2': 'gtk+', | ||
| 97 | 'gtk3': 'gtk+3', | ||
| 98 | 'gtk': 'gtk+3', | ||
| 99 | 'harfbuzz': 'harfbuzz', | ||
| 100 | 'icu': 'icu', | ||
| 101 | 'intl': 'virtual/libintl', | ||
| 102 | 'jpeg': 'jpeg', | ||
| 103 | 'libarchive': 'libarchive', | ||
| 104 | 'libiconv': 'virtual/libiconv', | ||
| 105 | 'liblzma': 'xz', | ||
| 106 | 'libxml2': 'libxml2', | ||
| 107 | 'libxslt': 'libxslt', | ||
| 108 | 'opengl': 'virtual/libgl', | ||
| 109 | 'openmp': '', | ||
| 110 | 'openssl': 'openssl', | ||
| 111 | 'pango': 'pango', | ||
| 112 | 'perl': '', | ||
| 113 | 'perllibs': '', | ||
| 114 | 'pkgconfig': '', | ||
| 115 | 'png': 'libpng', | ||
| 116 | 'pthread': '', | ||
| 117 | 'pythoninterp': '', | ||
| 118 | 'pythonlibs': '', | ||
| 119 | 'ruby': 'ruby-native', | ||
| 120 | 'sdl': 'libsdl', | ||
| 121 | 'sdl2': 'libsdl2', | ||
| 122 | 'subversion': 'subversion-native', | ||
| 123 | 'swig': 'swig-native', | ||
| 124 | 'tcl': 'tcl-native', | ||
| 125 | 'threads': '', | ||
| 126 | 'tiff': 'tiff', | ||
| 127 | 'wget': 'wget', | ||
| 128 | 'x11': 'libx11', | ||
| 129 | 'xcb': 'libxcb', | ||
| 130 | 'xext': 'libxext', | ||
| 131 | 'xfixes': 'libxfixes', | ||
| 132 | 'zlib': 'zlib', | ||
| 133 | } | ||
| 134 | |||
| 135 | pcdeps = [] | ||
| 136 | libdeps = [] | ||
| 137 | deps = [] | ||
| 138 | unmappedpkgs = [] | ||
| 139 | |||
| 140 | proj_re = re.compile(r'project\s*\(([^)]*)\)', re.IGNORECASE) | ||
| 141 | pkgcm_re = re.compile(r'pkg_check_modules\s*\(\s*[a-zA-Z0-9-_]+\s*(REQUIRED)?\s+([^)\s]+)\s*\)', re.IGNORECASE) | ||
| 142 | pkgsm_re = re.compile(r'pkg_search_module\s*\(\s*[a-zA-Z0-9-_]+\s*(REQUIRED)?((\s+[^)\s]+)+)\s*\)', re.IGNORECASE) | ||
| 143 | findpackage_re = re.compile(r'find_package\s*\(\s*([a-zA-Z0-9-_]+)\s*.*', re.IGNORECASE) | ||
| 144 | findlibrary_re = re.compile(r'find_library\s*\(\s*[a-zA-Z0-9-_]+\s*(NAMES\s+)?([a-zA-Z0-9-_ ]+)\s*.*') | ||
| 145 | checklib_re = re.compile(r'check_library_exists\s*\(\s*([^\s)]+)\s*.*', re.IGNORECASE) | ||
| 146 | include_re = re.compile(r'include\s*\(\s*([^)\s]*)\s*\)', re.IGNORECASE) | ||
| 147 | subdir_re = re.compile(r'add_subdirectory\s*\(\s*([^)\s]*)\s*([^)\s]*)\s*\)', re.IGNORECASE) | ||
| 148 | dep_re = re.compile(r'([^ ><=]+)( *[<>=]+ *[^ ><=]+)?') | ||
| 149 | |||
| 150 | def find_cmake_package(pkg): | ||
| 151 | RecipeHandler.load_devel_filemap(tinfoil.config_data) | ||
| 152 | for fn, pn in RecipeHandler.recipecmakefilemap.items(): | ||
| 153 | splitname = fn.split('/') | ||
| 154 | if len(splitname) > 1: | ||
| 155 | if splitname[0].lower().startswith(pkg.lower()): | ||
| 156 | if splitname[1] == '%s-config.cmake' % pkg.lower() or splitname[1] == '%sConfig.cmake' % pkg or splitname[1] == 'Find%s.cmake' % pkg: | ||
| 157 | return pn | ||
| 158 | return None | ||
| 159 | |||
| 160 | def interpret_value(value): | ||
| 161 | return value.strip('"') | ||
| 162 | |||
| 163 | def parse_cmake_file(fn, paths=None): | ||
| 164 | searchpaths = (paths or []) + [os.path.dirname(fn)] | ||
| 165 | logger.debug('Parsing file %s' % fn) | ||
| 166 | with open(fn, 'r', errors='surrogateescape') as f: | ||
| 167 | for line in f: | ||
| 168 | line = line.strip() | ||
| 169 | for handler in handlers: | ||
| 170 | if handler.process_line(srctree, fn, line, libdeps, pcdeps, deps, outlines, inherits, values): | ||
| 171 | continue | ||
| 172 | res = include_re.match(line) | ||
| 173 | if res: | ||
| 174 | includefn = bb.utils.which(':'.join(searchpaths), res.group(1)) | ||
| 175 | if includefn: | ||
| 176 | parse_cmake_file(includefn, searchpaths) | ||
| 177 | else: | ||
| 178 | logger.debug('Unable to recurse into include file %s' % res.group(1)) | ||
| 179 | continue | ||
| 180 | res = subdir_re.match(line) | ||
| 181 | if res: | ||
| 182 | subdirfn = os.path.join(os.path.dirname(fn), res.group(1), 'CMakeLists.txt') | ||
| 183 | if os.path.exists(subdirfn): | ||
| 184 | parse_cmake_file(subdirfn, searchpaths) | ||
| 185 | else: | ||
| 186 | logger.debug('Unable to recurse into subdirectory file %s' % subdirfn) | ||
| 187 | continue | ||
| 188 | res = proj_re.match(line) | ||
| 189 | if res: | ||
| 190 | extravalues['PN'] = interpret_value(res.group(1).split()[0]) | ||
| 191 | continue | ||
| 192 | res = pkgcm_re.match(line) | ||
| 193 | if res: | ||
| 194 | res = dep_re.findall(res.group(2)) | ||
| 195 | if res: | ||
| 196 | pcdeps.extend([interpret_value(x[0]) for x in res]) | ||
| 197 | inherits.append('pkgconfig') | ||
| 198 | continue | ||
| 199 | res = pkgsm_re.match(line) | ||
| 200 | if res: | ||
| 201 | res = dep_re.findall(res.group(2)) | ||
| 202 | if res: | ||
| 203 | # Note: appending a tuple here! | ||
| 204 | item = tuple((interpret_value(x[0]) for x in res)) | ||
| 205 | if len(item) == 1: | ||
| 206 | item = item[0] | ||
| 207 | pcdeps.append(item) | ||
| 208 | inherits.append('pkgconfig') | ||
| 209 | continue | ||
| 210 | res = findpackage_re.match(line) | ||
| 211 | if res: | ||
| 212 | origpkg = res.group(1) | ||
| 213 | pkg = interpret_value(origpkg) | ||
| 214 | found = False | ||
| 215 | for handler in handlers: | ||
| 216 | if handler.process_findpackage(srctree, fn, pkg, deps, outlines, inherits, values): | ||
| 217 | logger.debug('Mapped CMake package %s via handler %s' % (pkg, handler.__class__.__name__)) | ||
| 218 | found = True | ||
| 219 | break | ||
| 220 | if found: | ||
| 221 | continue | ||
| 222 | elif pkg == 'Gettext': | ||
| 223 | inherits.append('gettext') | ||
| 224 | elif pkg == 'Perl': | ||
| 225 | inherits.append('perlnative') | ||
| 226 | elif pkg == 'PkgConfig': | ||
| 227 | inherits.append('pkgconfig') | ||
| 228 | elif pkg == 'PythonInterp': | ||
| 229 | inherits.append('python3native') | ||
| 230 | elif pkg == 'PythonLibs': | ||
| 231 | inherits.append('python3-dir') | ||
| 232 | else: | ||
| 233 | # Try to map via looking at installed CMake packages in pkgdata | ||
| 234 | dep = find_cmake_package(pkg) | ||
| 235 | if dep: | ||
| 236 | logger.debug('Mapped CMake package %s to recipe %s via pkgdata' % (pkg, dep)) | ||
| 237 | deps.append(dep) | ||
| 238 | else: | ||
| 239 | dep = cmake_pkgmap.get(pkg.lower(), None) | ||
| 240 | if dep: | ||
| 241 | logger.debug('Mapped CMake package %s to recipe %s via internal list' % (pkg, dep)) | ||
| 242 | deps.append(dep) | ||
| 243 | elif dep is None: | ||
| 244 | unmappedpkgs.append(origpkg) | ||
| 245 | continue | ||
| 246 | res = checklib_re.match(line) | ||
| 247 | if res: | ||
| 248 | lib = interpret_value(res.group(1)) | ||
| 249 | if not lib.startswith('$'): | ||
| 250 | libdeps.append(lib) | ||
| 251 | res = findlibrary_re.match(line) | ||
| 252 | if res: | ||
| 253 | libs = res.group(2).split() | ||
| 254 | for lib in libs: | ||
| 255 | if lib in ['HINTS', 'PATHS', 'PATH_SUFFIXES', 'DOC', 'NAMES_PER_DIR'] or lib.startswith(('NO_', 'CMAKE_', 'ONLY_CMAKE_')): | ||
| 256 | break | ||
| 257 | lib = interpret_value(lib) | ||
| 258 | if not lib.startswith('$'): | ||
| 259 | libdeps.append(lib) | ||
| 260 | if line.lower().startswith('useswig'): | ||
| 261 | deps.append('swig-native') | ||
| 262 | continue | ||
| 263 | |||
| 264 | parse_cmake_file(srcfiles[0]) | ||
| 265 | |||
| 266 | if unmappedpkgs: | ||
| 267 | outlines.append('# NOTE: unable to map the following CMake package dependencies: %s' % ' '.join(list(set(unmappedpkgs)))) | ||
| 268 | |||
| 269 | RecipeHandler.handle_depends(libdeps, pcdeps, deps, outlines, values, tinfoil.config_data) | ||
| 270 | |||
| 271 | for handler in handlers: | ||
| 272 | handler.post_process(srctree, libdeps, pcdeps, deps, outlines, inherits, values) | ||
| 273 | |||
| 274 | if inherits: | ||
| 275 | values['inherit'] = ' '.join(list(set(inherits))) | ||
| 276 | |||
| 277 | return values | ||
| 278 | |||
| 279 | |||
| 280 | class CmakeExtensionHandler(object): | ||
| 281 | '''Base class for CMake extension handlers''' | ||
| 282 | def process_line(self, srctree, fn, line, libdeps, pcdeps, deps, outlines, inherits, values): | ||
| 283 | ''' | ||
| 284 | Handle a line parsed out of an CMake file. | ||
| 285 | Return True if you've completely handled the passed in line, otherwise return False. | ||
| 286 | ''' | ||
| 287 | return False | ||
| 288 | |||
| 289 | def process_findpackage(self, srctree, fn, pkg, deps, outlines, inherits, values): | ||
| 290 | ''' | ||
| 291 | Handle a find_package package parsed out of a CMake file. | ||
| 292 | Return True if you've completely handled the passed in package, otherwise return False. | ||
| 293 | ''' | ||
| 294 | return False | ||
| 295 | |||
| 296 | def post_process(self, srctree, fn, pkg, deps, outlines, inherits, values): | ||
| 297 | ''' | ||
| 298 | Apply any desired post-processing on the output | ||
| 299 | ''' | ||
| 300 | return | ||
| 301 | |||
| 302 | |||
| 303 | |||
| 304 | class SconsRecipeHandler(RecipeHandler): | ||
| 305 | def process(self, srctree, classes, lines_before, lines_after, handled, extravalues): | ||
| 306 | if 'buildsystem' in handled: | ||
| 307 | return False | ||
| 308 | |||
| 309 | if RecipeHandler.checkfiles(srctree, ['SConstruct', 'Sconstruct', 'sconstruct']): | ||
| 310 | classes.append('scons') | ||
| 311 | lines_after.append('# Specify any options you want to pass to scons using EXTRA_OESCONS:') | ||
| 312 | lines_after.append('EXTRA_OESCONS = ""') | ||
| 313 | lines_after.append('') | ||
| 314 | handled.append('buildsystem') | ||
| 315 | return True | ||
| 316 | return False | ||
| 317 | |||
| 318 | |||
| 319 | class QmakeRecipeHandler(RecipeHandler): | ||
| 320 | def process(self, srctree, classes, lines_before, lines_after, handled, extravalues): | ||
| 321 | if 'buildsystem' in handled: | ||
| 322 | return False | ||
| 323 | |||
| 324 | if RecipeHandler.checkfiles(srctree, ['*.pro']): | ||
| 325 | classes.append('qmake2') | ||
| 326 | handled.append('buildsystem') | ||
| 327 | return True | ||
| 328 | return False | ||
| 329 | |||
| 330 | |||
| 331 | class AutotoolsRecipeHandler(RecipeHandler): | ||
| 332 | def process(self, srctree, classes, lines_before, lines_after, handled, extravalues): | ||
| 333 | if 'buildsystem' in handled: | ||
| 334 | return False | ||
| 335 | |||
| 336 | autoconf = False | ||
| 337 | if RecipeHandler.checkfiles(srctree, ['configure.ac', 'configure.in']): | ||
| 338 | autoconf = True | ||
| 339 | values = AutotoolsRecipeHandler.extract_autotools_deps(lines_before, srctree, extravalues) | ||
| 340 | classes.extend(values.pop('inherit', '').split()) | ||
| 341 | for var, value in values.items(): | ||
| 342 | lines_before.append('%s = "%s"' % (var, value)) | ||
| 343 | else: | ||
| 344 | conffile = RecipeHandler.checkfiles(srctree, ['configure']) | ||
| 345 | if conffile: | ||
| 346 | # Check if this is just a pre-generated autoconf configure script | ||
| 347 | with open(conffile[0], 'r', errors='surrogateescape') as f: | ||
| 348 | for i in range(1, 10): | ||
| 349 | if 'Generated by GNU Autoconf' in f.readline(): | ||
| 350 | autoconf = True | ||
| 351 | break | ||
| 352 | |||
| 353 | if autoconf and not ('PV' in extravalues and 'PN' in extravalues): | ||
| 354 | # Last resort | ||
| 355 | conffile = RecipeHandler.checkfiles(srctree, ['configure']) | ||
| 356 | if conffile: | ||
| 357 | with open(conffile[0], 'r', errors='surrogateescape') as f: | ||
| 358 | for line in f: | ||
| 359 | line = line.strip() | ||
| 360 | if line.startswith('VERSION=') or line.startswith('PACKAGE_VERSION='): | ||
| 361 | pv = line.split('=')[1].strip('"\'') | ||
| 362 | if pv and not 'PV' in extravalues and validate_pv(pv): | ||
| 363 | extravalues['PV'] = pv | ||
| 364 | elif line.startswith('PACKAGE_NAME=') or line.startswith('PACKAGE='): | ||
| 365 | pn = line.split('=')[1].strip('"\'') | ||
| 366 | if pn and not 'PN' in extravalues: | ||
| 367 | extravalues['PN'] = pn | ||
| 368 | |||
| 369 | if autoconf: | ||
| 370 | lines_before.append('') | ||
| 371 | lines_before.append('# NOTE: if this software is not capable of being built in a separate build directory') | ||
| 372 | lines_before.append('# from the source, you should replace autotools with autotools-brokensep in the') | ||
| 373 | lines_before.append('# inherit line') | ||
| 374 | classes.append('autotools') | ||
| 375 | lines_after.append('# Specify any options you want to pass to the configure script using EXTRA_OECONF:') | ||
| 376 | lines_after.append('EXTRA_OECONF = ""') | ||
| 377 | lines_after.append('') | ||
| 378 | handled.append('buildsystem') | ||
| 379 | return True | ||
| 380 | |||
| 381 | return False | ||
| 382 | |||
| 383 | @staticmethod | ||
| 384 | def extract_autotools_deps(outlines, srctree, extravalues=None, acfile=None): | ||
| 385 | import shlex | ||
| 386 | |||
| 387 | # Find all plugins that want to register handlers | ||
| 388 | logger.debug('Loading autotools handlers') | ||
| 389 | handlers = [] | ||
| 390 | for plugin in plugins: | ||
| 391 | if hasattr(plugin, 'register_autotools_handlers'): | ||
| 392 | plugin.register_autotools_handlers(handlers) | ||
| 393 | |||
| 394 | values = {} | ||
| 395 | inherits = [] | ||
| 396 | |||
| 397 | # Hardcoded map, we also use a dynamic one based on what's in the sysroot | ||
| 398 | progmap = {'flex': 'flex-native', | ||
| 399 | 'bison': 'bison-native', | ||
| 400 | 'm4': 'm4-native', | ||
| 401 | 'tar': 'tar-native', | ||
| 402 | 'ar': 'binutils-native', | ||
| 403 | 'ranlib': 'binutils-native', | ||
| 404 | 'ld': 'binutils-native', | ||
| 405 | 'strip': 'binutils-native', | ||
| 406 | 'libtool': '', | ||
| 407 | 'autoconf': '', | ||
| 408 | 'autoheader': '', | ||
| 409 | 'automake': '', | ||
| 410 | 'uname': '', | ||
| 411 | 'rm': '', | ||
| 412 | 'cp': '', | ||
| 413 | 'mv': '', | ||
| 414 | 'find': '', | ||
| 415 | 'awk': '', | ||
| 416 | 'sed': '', | ||
| 417 | } | ||
| 418 | progclassmap = {'gconftool-2': 'gconf', | ||
| 419 | 'pkg-config': 'pkgconfig', | ||
| 420 | 'python': 'python3native', | ||
| 421 | 'python3': 'python3native', | ||
| 422 | 'perl': 'perlnative', | ||
| 423 | 'makeinfo': 'texinfo', | ||
| 424 | } | ||
| 425 | |||
| 426 | pkg_re = re.compile(r'PKG_CHECK_MODULES\(\s*\[?[a-zA-Z0-9_]*\]?,\s*\[?([^,\]]*)\]?[),].*') | ||
| 427 | pkgce_re = re.compile(r'PKG_CHECK_EXISTS\(\s*\[?([^,\]]*)\]?[),].*') | ||
| 428 | lib_re = re.compile(r'AC_CHECK_LIB\(\s*\[?([^,\]]*)\]?,.*') | ||
| 429 | libx_re = re.compile(r'AX_CHECK_LIBRARY\(\s*\[?[^,\]]*\]?,\s*\[?([^,\]]*)\]?,\s*\[?([a-zA-Z0-9-]*)\]?,.*') | ||
| 430 | progs_re = re.compile(r'_PROGS?\(\s*\[?[a-zA-Z0-9_]*\]?,\s*\[?([^,\]]*)\]?[),].*') | ||
| 431 | dep_re = re.compile(r'([^ ><=]+)( [<>=]+ [^ ><=]+)?') | ||
| 432 | ac_init_re = re.compile(r'AC_INIT\(\s*([^,]+),\s*([^,]+)[,)].*') | ||
| 433 | am_init_re = re.compile(r'AM_INIT_AUTOMAKE\(\s*([^,]+),\s*([^,]+)[,)].*') | ||
| 434 | define_re = re.compile(r'\s*(m4_)?define\(\s*([^,]+),\s*([^,]+)\)') | ||
| 435 | version_re = re.compile(r'([0-9.]+)') | ||
| 436 | |||
| 437 | defines = {} | ||
| 438 | def subst_defines(value): | ||
| 439 | newvalue = value | ||
| 440 | for define, defval in defines.items(): | ||
| 441 | newvalue = newvalue.replace(define, defval) | ||
| 442 | if newvalue != value: | ||
| 443 | return subst_defines(newvalue) | ||
| 444 | return value | ||
| 445 | |||
| 446 | def process_value(value): | ||
| 447 | value = value.replace('[', '').replace(']', '') | ||
| 448 | if value.startswith('m4_esyscmd(') or value.startswith('m4_esyscmd_s('): | ||
| 449 | cmd = subst_defines(value[value.index('(')+1:-1]) | ||
| 450 | try: | ||
| 451 | if '|' in cmd: | ||
| 452 | cmd = 'set -o pipefail; ' + cmd | ||
| 453 | stdout, _ = bb.process.run(cmd, cwd=srctree, shell=True) | ||
| 454 | ret = stdout.rstrip() | ||
| 455 | except bb.process.ExecutionError as e: | ||
| 456 | ret = '' | ||
| 457 | elif value.startswith('m4_'): | ||
| 458 | return None | ||
| 459 | ret = subst_defines(value) | ||
| 460 | if ret: | ||
| 461 | ret = ret.strip('"\'') | ||
| 462 | return ret | ||
| 463 | |||
| 464 | # Since a configure.ac file is essentially a program, this is only ever going to be | ||
| 465 | # a hack unfortunately; but it ought to be enough of an approximation | ||
| 466 | if acfile: | ||
| 467 | srcfiles = [acfile] | ||
| 468 | else: | ||
| 469 | srcfiles = RecipeHandler.checkfiles(srctree, ['acinclude.m4', 'configure.ac', 'configure.in']) | ||
| 470 | |||
| 471 | pcdeps = [] | ||
| 472 | libdeps = [] | ||
| 473 | deps = [] | ||
| 474 | unmapped = [] | ||
| 475 | |||
| 476 | RecipeHandler.load_binmap(tinfoil.config_data) | ||
| 477 | |||
| 478 | def process_macro(keyword, value): | ||
| 479 | for handler in handlers: | ||
| 480 | if handler.process_macro(srctree, keyword, value, process_value, libdeps, pcdeps, deps, outlines, inherits, values): | ||
| 481 | return | ||
| 482 | logger.debug('Found keyword %s with value "%s"' % (keyword, value)) | ||
| 483 | if keyword == 'PKG_CHECK_MODULES': | ||
| 484 | res = pkg_re.search(value) | ||
| 485 | if res: | ||
| 486 | res = dep_re.findall(res.group(1)) | ||
| 487 | if res: | ||
| 488 | pcdeps.extend([x[0] for x in res]) | ||
| 489 | inherits.append('pkgconfig') | ||
| 490 | elif keyword == 'PKG_CHECK_EXISTS': | ||
| 491 | res = pkgce_re.search(value) | ||
| 492 | if res: | ||
| 493 | res = dep_re.findall(res.group(1)) | ||
| 494 | if res: | ||
| 495 | pcdeps.extend([x[0] for x in res]) | ||
| 496 | inherits.append('pkgconfig') | ||
| 497 | elif keyword in ('AM_GNU_GETTEXT', 'AM_GLIB_GNU_GETTEXT', 'GETTEXT_PACKAGE'): | ||
| 498 | inherits.append('gettext') | ||
| 499 | elif keyword in ('AC_PROG_INTLTOOL', 'IT_PROG_INTLTOOL'): | ||
| 500 | deps.append('intltool-native') | ||
| 501 | elif keyword == 'AM_PATH_GLIB_2_0': | ||
| 502 | deps.append('glib-2.0') | ||
| 503 | elif keyword in ('AC_CHECK_PROG', 'AC_PATH_PROG', 'AX_WITH_PROG'): | ||
| 504 | res = progs_re.search(value) | ||
| 505 | if res: | ||
| 506 | for prog in shlex.split(res.group(1)): | ||
| 507 | prog = prog.split()[0] | ||
| 508 | for handler in handlers: | ||
| 509 | if handler.process_prog(srctree, keyword, value, prog, deps, outlines, inherits, values): | ||
| 510 | return | ||
| 511 | progclass = progclassmap.get(prog, None) | ||
| 512 | if progclass: | ||
| 513 | inherits.append(progclass) | ||
| 514 | else: | ||
| 515 | progdep = RecipeHandler.recipebinmap.get(prog, None) | ||
| 516 | if not progdep: | ||
| 517 | progdep = progmap.get(prog, None) | ||
| 518 | if progdep: | ||
| 519 | deps.append(progdep) | ||
| 520 | elif progdep is None: | ||
| 521 | if not prog.startswith('$'): | ||
| 522 | unmapped.append(prog) | ||
| 523 | elif keyword == 'AC_CHECK_LIB': | ||
| 524 | res = lib_re.search(value) | ||
| 525 | if res: | ||
| 526 | lib = res.group(1) | ||
| 527 | if not lib.startswith('$'): | ||
| 528 | libdeps.append(lib) | ||
| 529 | elif keyword == 'AX_CHECK_LIBRARY': | ||
| 530 | res = libx_re.search(value) | ||
| 531 | if res: | ||
| 532 | lib = res.group(2) | ||
| 533 | if not lib.startswith('$'): | ||
| 534 | header = res.group(1) | ||
| 535 | libdeps.append((lib, header)) | ||
| 536 | elif keyword == 'AC_PATH_X': | ||
| 537 | deps.append('libx11') | ||
| 538 | elif keyword in ('AX_BOOST', 'BOOST_REQUIRE'): | ||
| 539 | deps.append('boost') | ||
| 540 | elif keyword in ('AC_PROG_LEX', 'AM_PROG_LEX', 'AX_PROG_FLEX'): | ||
| 541 | deps.append('flex-native') | ||
| 542 | elif keyword in ('AC_PROG_YACC', 'AX_PROG_BISON'): | ||
| 543 | deps.append('bison-native') | ||
| 544 | elif keyword == 'AX_CHECK_ZLIB': | ||
| 545 | deps.append('zlib') | ||
| 546 | elif keyword in ('AX_CHECK_OPENSSL', 'AX_LIB_CRYPTO'): | ||
| 547 | deps.append('openssl') | ||
| 548 | elif keyword in ('AX_LIB_CURL', 'LIBCURL_CHECK_CONFIG'): | ||
| 549 | deps.append('curl') | ||
| 550 | elif keyword == 'AX_LIB_BEECRYPT': | ||
| 551 | deps.append('beecrypt') | ||
| 552 | elif keyword == 'AX_LIB_EXPAT': | ||
| 553 | deps.append('expat') | ||
| 554 | elif keyword == 'AX_LIB_GCRYPT': | ||
| 555 | deps.append('libgcrypt') | ||
| 556 | elif keyword == 'AX_LIB_NETTLE': | ||
| 557 | deps.append('nettle') | ||
| 558 | elif keyword == 'AX_LIB_READLINE': | ||
| 559 | deps.append('readline') | ||
| 560 | elif keyword == 'AX_LIB_SQLITE3': | ||
| 561 | deps.append('sqlite3') | ||
| 562 | elif keyword == 'AX_LIB_TAGLIB': | ||
| 563 | deps.append('taglib') | ||
| 564 | elif keyword in ['AX_PKG_SWIG', 'AC_PROG_SWIG']: | ||
| 565 | deps.append('swig-native') | ||
| 566 | elif keyword == 'AX_PROG_XSLTPROC': | ||
| 567 | deps.append('libxslt-native') | ||
| 568 | elif keyword in ['AC_PYTHON_DEVEL', 'AX_PYTHON_DEVEL', 'AM_PATH_PYTHON']: | ||
| 569 | pythonclass = 'python3native' | ||
| 570 | elif keyword == 'AX_WITH_CURSES': | ||
| 571 | deps.append('ncurses') | ||
| 572 | elif keyword == 'AX_PATH_BDB': | ||
| 573 | deps.append('db') | ||
| 574 | elif keyword == 'AX_PATH_LIB_PCRE': | ||
| 575 | deps.append('libpcre') | ||
| 576 | elif keyword == 'AC_INIT': | ||
| 577 | if extravalues is not None: | ||
| 578 | res = ac_init_re.match(value) | ||
| 579 | if res: | ||
| 580 | extravalues['PN'] = process_value(res.group(1)) | ||
| 581 | pv = process_value(res.group(2)) | ||
| 582 | if validate_pv(pv): | ||
| 583 | extravalues['PV'] = pv | ||
| 584 | elif keyword == 'AM_INIT_AUTOMAKE': | ||
| 585 | if extravalues is not None: | ||
| 586 | if 'PN' not in extravalues: | ||
| 587 | res = am_init_re.match(value) | ||
| 588 | if res: | ||
| 589 | if res.group(1) != 'AC_PACKAGE_NAME': | ||
| 590 | extravalues['PN'] = process_value(res.group(1)) | ||
| 591 | pv = process_value(res.group(2)) | ||
| 592 | if validate_pv(pv): | ||
| 593 | extravalues['PV'] = pv | ||
| 594 | elif keyword == 'define(': | ||
| 595 | res = define_re.match(value) | ||
| 596 | if res: | ||
| 597 | key = res.group(2).strip('[]') | ||
| 598 | value = process_value(res.group(3)) | ||
| 599 | if value is not None: | ||
| 600 | defines[key] = value | ||
| 601 | |||
| 602 | keywords = ['PKG_CHECK_MODULES', | ||
| 603 | 'PKG_CHECK_EXISTS', | ||
| 604 | 'AM_GNU_GETTEXT', | ||
| 605 | 'AM_GLIB_GNU_GETTEXT', | ||
| 606 | 'GETTEXT_PACKAGE', | ||
| 607 | 'AC_PROG_INTLTOOL', | ||
| 608 | 'IT_PROG_INTLTOOL', | ||
| 609 | 'AM_PATH_GLIB_2_0', | ||
| 610 | 'AC_CHECK_PROG', | ||
| 611 | 'AC_PATH_PROG', | ||
| 612 | 'AX_WITH_PROG', | ||
| 613 | 'AC_CHECK_LIB', | ||
| 614 | 'AX_CHECK_LIBRARY', | ||
| 615 | 'AC_PATH_X', | ||
| 616 | 'AX_BOOST', | ||
| 617 | 'BOOST_REQUIRE', | ||
| 618 | 'AC_PROG_LEX', | ||
| 619 | 'AM_PROG_LEX', | ||
| 620 | 'AX_PROG_FLEX', | ||
| 621 | 'AC_PROG_YACC', | ||
| 622 | 'AX_PROG_BISON', | ||
| 623 | 'AX_CHECK_ZLIB', | ||
| 624 | 'AX_CHECK_OPENSSL', | ||
| 625 | 'AX_LIB_CRYPTO', | ||
| 626 | 'AX_LIB_CURL', | ||
| 627 | 'LIBCURL_CHECK_CONFIG', | ||
| 628 | 'AX_LIB_BEECRYPT', | ||
| 629 | 'AX_LIB_EXPAT', | ||
| 630 | 'AX_LIB_GCRYPT', | ||
| 631 | 'AX_LIB_NETTLE', | ||
| 632 | 'AX_LIB_READLINE' | ||
| 633 | 'AX_LIB_SQLITE3', | ||
| 634 | 'AX_LIB_TAGLIB', | ||
| 635 | 'AX_PKG_SWIG', | ||
| 636 | 'AC_PROG_SWIG', | ||
| 637 | 'AX_PROG_XSLTPROC', | ||
| 638 | 'AC_PYTHON_DEVEL', | ||
| 639 | 'AX_PYTHON_DEVEL', | ||
| 640 | 'AM_PATH_PYTHON', | ||
| 641 | 'AX_WITH_CURSES', | ||
| 642 | 'AX_PATH_BDB', | ||
| 643 | 'AX_PATH_LIB_PCRE', | ||
| 644 | 'AC_INIT', | ||
| 645 | 'AM_INIT_AUTOMAKE', | ||
| 646 | 'define(', | ||
| 647 | ] | ||
| 648 | |||
| 649 | for handler in handlers: | ||
| 650 | handler.extend_keywords(keywords) | ||
| 651 | |||
| 652 | for srcfile in srcfiles: | ||
| 653 | nesting = 0 | ||
| 654 | in_keyword = '' | ||
| 655 | partial = '' | ||
| 656 | with open(srcfile, 'r', errors='surrogateescape') as f: | ||
| 657 | for line in f: | ||
| 658 | if in_keyword: | ||
| 659 | partial += ' ' + line.strip() | ||
| 660 | if partial.endswith('\\'): | ||
| 661 | partial = partial[:-1] | ||
| 662 | nesting = nesting + line.count('(') - line.count(')') | ||
| 663 | if nesting == 0: | ||
| 664 | process_macro(in_keyword, partial) | ||
| 665 | partial = '' | ||
| 666 | in_keyword = '' | ||
| 667 | else: | ||
| 668 | for keyword in keywords: | ||
| 669 | if keyword in line: | ||
| 670 | nesting = line.count('(') - line.count(')') | ||
| 671 | if nesting > 0: | ||
| 672 | partial = line.strip() | ||
| 673 | if partial.endswith('\\'): | ||
| 674 | partial = partial[:-1] | ||
| 675 | in_keyword = keyword | ||
| 676 | else: | ||
| 677 | process_macro(keyword, line.strip()) | ||
| 678 | break | ||
| 679 | |||
| 680 | if in_keyword: | ||
| 681 | process_macro(in_keyword, partial) | ||
| 682 | |||
| 683 | if extravalues: | ||
| 684 | for k,v in list(extravalues.items()): | ||
| 685 | if v: | ||
| 686 | if v.startswith('$') or v.startswith('@') or v.startswith('%'): | ||
| 687 | del extravalues[k] | ||
| 688 | else: | ||
| 689 | extravalues[k] = v.strip('"\'').rstrip('()') | ||
| 690 | |||
| 691 | if unmapped: | ||
| 692 | outlines.append('# NOTE: the following prog dependencies are unknown, ignoring: %s' % ' '.join(list(set(unmapped)))) | ||
| 693 | |||
| 694 | RecipeHandler.handle_depends(libdeps, pcdeps, deps, outlines, values, tinfoil.config_data) | ||
| 695 | |||
| 696 | for handler in handlers: | ||
| 697 | handler.post_process(srctree, libdeps, pcdeps, deps, outlines, inherits, values) | ||
| 698 | |||
| 699 | if inherits: | ||
| 700 | values['inherit'] = ' '.join(list(set(inherits))) | ||
| 701 | |||
| 702 | return values | ||
| 703 | |||
| 704 | |||
| 705 | class AutotoolsExtensionHandler(object): | ||
| 706 | '''Base class for Autotools extension handlers''' | ||
| 707 | def process_macro(self, srctree, keyword, value, process_value, libdeps, pcdeps, deps, outlines, inherits, values): | ||
| 708 | ''' | ||
| 709 | Handle a macro parsed out of an autotools file. Note that if you want this to be called | ||
| 710 | for any macro other than the ones AutotoolsRecipeHandler already looks for, you'll need | ||
| 711 | to add it to the keywords list in extend_keywords(). | ||
| 712 | Return True if you've completely handled the passed in macro, otherwise return False. | ||
| 713 | ''' | ||
| 714 | return False | ||
| 715 | |||
| 716 | def extend_keywords(self, keywords): | ||
| 717 | '''Adds keywords to be recognised by the parser (so that you get a call to process_macro)''' | ||
| 718 | return | ||
| 719 | |||
| 720 | def process_prog(self, srctree, keyword, value, prog, deps, outlines, inherits, values): | ||
| 721 | ''' | ||
| 722 | Handle an AC_PATH_PROG, AC_CHECK_PROG etc. line | ||
| 723 | Return True if you've completely handled the passed in macro, otherwise return False. | ||
| 724 | ''' | ||
| 725 | return False | ||
| 726 | |||
| 727 | def post_process(self, srctree, fn, pkg, deps, outlines, inherits, values): | ||
| 728 | ''' | ||
| 729 | Apply any desired post-processing on the output | ||
| 730 | ''' | ||
| 731 | return | ||
| 732 | |||
| 733 | |||
| 734 | class MakefileRecipeHandler(RecipeHandler): | ||
| 735 | def process(self, srctree, classes, lines_before, lines_after, handled, extravalues): | ||
| 736 | if 'buildsystem' in handled: | ||
| 737 | return False | ||
| 738 | |||
| 739 | makefile = RecipeHandler.checkfiles(srctree, ['Makefile', 'makefile', 'GNUmakefile']) | ||
| 740 | if makefile: | ||
| 741 | lines_after.append('# NOTE: this is a Makefile-only piece of software, so we cannot generate much of the') | ||
| 742 | lines_after.append('# recipe automatically - you will need to examine the Makefile yourself and ensure') | ||
| 743 | lines_after.append('# that the appropriate arguments are passed in.') | ||
| 744 | lines_after.append('') | ||
| 745 | |||
| 746 | scanfile = os.path.join(srctree, 'configure.scan') | ||
| 747 | skipscan = False | ||
| 748 | try: | ||
| 749 | stdout, stderr = bb.process.run('autoscan', cwd=srctree, shell=True) | ||
| 750 | except bb.process.ExecutionError as e: | ||
| 751 | skipscan = True | ||
| 752 | if scanfile and os.path.exists(scanfile): | ||
| 753 | values = AutotoolsRecipeHandler.extract_autotools_deps(lines_before, srctree, acfile=scanfile) | ||
| 754 | classes.extend(values.pop('inherit', '').split()) | ||
| 755 | for var, value in values.items(): | ||
| 756 | if var == 'DEPENDS': | ||
| 757 | lines_before.append('# NOTE: some of these dependencies may be optional, check the Makefile and/or upstream documentation') | ||
| 758 | lines_before.append('%s = "%s"' % (var, value)) | ||
| 759 | lines_before.append('') | ||
| 760 | for f in ['configure.scan', 'autoscan.log']: | ||
| 761 | fp = os.path.join(srctree, f) | ||
| 762 | if os.path.exists(fp): | ||
| 763 | os.remove(fp) | ||
| 764 | |||
| 765 | self.genfunction(lines_after, 'do_configure', ['# Specify any needed configure commands here']) | ||
| 766 | |||
| 767 | func = [] | ||
| 768 | func.append('# You will almost certainly need to add additional arguments here') | ||
| 769 | func.append('oe_runmake') | ||
| 770 | self.genfunction(lines_after, 'do_compile', func) | ||
| 771 | |||
| 772 | installtarget = True | ||
| 773 | try: | ||
| 774 | stdout, stderr = bb.process.run('make -n install', cwd=srctree, shell=True) | ||
| 775 | except bb.process.ExecutionError as e: | ||
| 776 | if e.exitcode != 1: | ||
| 777 | installtarget = False | ||
| 778 | func = [] | ||
| 779 | if installtarget: | ||
| 780 | func.append('# This is a guess; additional arguments may be required') | ||
| 781 | makeargs = '' | ||
| 782 | with open(makefile[0], 'r', errors='surrogateescape') as f: | ||
| 783 | for i in range(1, 100): | ||
| 784 | if 'DESTDIR' in f.readline(): | ||
| 785 | makeargs += " 'DESTDIR=${D}'" | ||
| 786 | break | ||
| 787 | func.append('oe_runmake install%s' % makeargs) | ||
| 788 | else: | ||
| 789 | func.append('# NOTE: unable to determine what to put here - there is a Makefile but no') | ||
| 790 | func.append('# target named "install", so you will need to define this yourself') | ||
| 791 | self.genfunction(lines_after, 'do_install', func) | ||
| 792 | |||
| 793 | handled.append('buildsystem') | ||
| 794 | else: | ||
| 795 | lines_after.append('# NOTE: no Makefile found, unable to determine what needs to be done') | ||
| 796 | lines_after.append('') | ||
| 797 | self.genfunction(lines_after, 'do_configure', ['# Specify any needed configure commands here']) | ||
| 798 | self.genfunction(lines_after, 'do_compile', ['# Specify compilation commands here']) | ||
| 799 | self.genfunction(lines_after, 'do_install', ['# Specify install commands here']) | ||
| 800 | |||
| 801 | |||
| 802 | class VersionFileRecipeHandler(RecipeHandler): | ||
| 803 | def process(self, srctree, classes, lines_before, lines_after, handled, extravalues): | ||
| 804 | if 'PV' not in extravalues: | ||
| 805 | # Look for a VERSION or version file containing a single line consisting | ||
| 806 | # only of a version number | ||
| 807 | filelist = RecipeHandler.checkfiles(srctree, ['VERSION', 'version']) | ||
| 808 | version = None | ||
| 809 | for fileitem in filelist: | ||
| 810 | linecount = 0 | ||
| 811 | with open(fileitem, 'r', errors='surrogateescape') as f: | ||
| 812 | for line in f: | ||
| 813 | line = line.rstrip().strip('"\'') | ||
| 814 | linecount += 1 | ||
| 815 | if line: | ||
| 816 | if linecount > 1: | ||
| 817 | version = None | ||
| 818 | break | ||
| 819 | else: | ||
| 820 | if validate_pv(line): | ||
| 821 | version = line | ||
| 822 | if version: | ||
| 823 | extravalues['PV'] = version | ||
| 824 | break | ||
| 825 | |||
| 826 | |||
| 827 | class SpecFileRecipeHandler(RecipeHandler): | ||
| 828 | def process(self, srctree, classes, lines_before, lines_after, handled, extravalues): | ||
| 829 | if 'PV' in extravalues and 'PN' in extravalues: | ||
| 830 | return | ||
| 831 | filelist = RecipeHandler.checkfiles(srctree, ['*.spec'], recursive=True) | ||
| 832 | valuemap = {'Name': 'PN', | ||
| 833 | 'Version': 'PV', | ||
| 834 | 'Summary': 'SUMMARY', | ||
| 835 | 'Url': 'HOMEPAGE', | ||
| 836 | 'License': 'LICENSE'} | ||
| 837 | foundvalues = {} | ||
| 838 | for fileitem in filelist: | ||
| 839 | linecount = 0 | ||
| 840 | with open(fileitem, 'r', errors='surrogateescape') as f: | ||
| 841 | for line in f: | ||
| 842 | for value, varname in valuemap.items(): | ||
| 843 | if line.startswith(value + ':') and not varname in foundvalues: | ||
| 844 | foundvalues[varname] = line.split(':', 1)[1].strip() | ||
| 845 | break | ||
| 846 | if len(foundvalues) == len(valuemap): | ||
| 847 | break | ||
| 848 | # Drop values containing unexpanded RPM macros | ||
| 849 | for k in list(foundvalues.keys()): | ||
| 850 | if '%' in foundvalues[k]: | ||
| 851 | del foundvalues[k] | ||
| 852 | if 'PV' in foundvalues: | ||
| 853 | if not validate_pv(foundvalues['PV']): | ||
| 854 | del foundvalues['PV'] | ||
| 855 | license = foundvalues.pop('LICENSE', None) | ||
| 856 | if license: | ||
| 857 | liccomment = '# NOTE: spec file indicates the license may be "%s"' % license | ||
| 858 | for i, line in enumerate(lines_before): | ||
| 859 | if line.startswith('LICENSE ='): | ||
| 860 | lines_before.insert(i, liccomment) | ||
| 861 | break | ||
| 862 | else: | ||
| 863 | lines_before.append(liccomment) | ||
| 864 | extravalues.update(foundvalues) | ||
| 865 | |||
| 866 | def register_recipe_handlers(handlers): | ||
| 867 | # Set priorities with some gaps so that other plugins can insert | ||
| 868 | # their own handlers (so avoid changing these numbers) | ||
| 869 | handlers.append((CmakeRecipeHandler(), 50)) | ||
| 870 | handlers.append((AutotoolsRecipeHandler(), 40)) | ||
| 871 | handlers.append((SconsRecipeHandler(), 30)) | ||
| 872 | handlers.append((QmakeRecipeHandler(), 20)) | ||
| 873 | handlers.append((MakefileRecipeHandler(), 10)) | ||
| 874 | handlers.append((VersionFileRecipeHandler(), -1)) | ||
| 875 | handlers.append((SpecFileRecipeHandler(), -1)) | ||
diff --git a/scripts/lib/recipetool/create_buildsys_python.py b/scripts/lib/recipetool/create_buildsys_python.py deleted file mode 100644 index a807dafae5..0000000000 --- a/scripts/lib/recipetool/create_buildsys_python.py +++ /dev/null | |||
| @@ -1,1124 +0,0 @@ | |||
| 1 | # Recipe creation tool - create build system handler for python | ||
| 2 | # | ||
| 3 | # Copyright (C) 2015 Mentor Graphics Corporation | ||
| 4 | # | ||
| 5 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 6 | # | ||
| 7 | |||
| 8 | import ast | ||
| 9 | import codecs | ||
| 10 | import collections | ||
| 11 | import setuptools.command.build_py | ||
| 12 | import email | ||
| 13 | import importlib | ||
| 14 | import glob | ||
| 15 | import itertools | ||
| 16 | import logging | ||
| 17 | import os | ||
| 18 | import re | ||
| 19 | import sys | ||
| 20 | import subprocess | ||
| 21 | import json | ||
| 22 | import urllib.request | ||
| 23 | from recipetool.create import RecipeHandler | ||
| 24 | from urllib.parse import urldefrag | ||
| 25 | from recipetool.create import determine_from_url | ||
| 26 | |||
| 27 | logger = logging.getLogger('recipetool') | ||
| 28 | |||
| 29 | tinfoil = None | ||
| 30 | |||
| 31 | |||
| 32 | def tinfoil_init(instance): | ||
| 33 | global tinfoil | ||
| 34 | tinfoil = instance | ||
| 35 | |||
| 36 | |||
| 37 | class PythonRecipeHandler(RecipeHandler): | ||
| 38 | base_pkgdeps = ['python3-core'] | ||
| 39 | excluded_pkgdeps = ['python3-dbg'] | ||
| 40 | # os.path is provided by python3-core | ||
| 41 | assume_provided = ['builtins', 'os.path'] | ||
| 42 | # Assumes that the host python3 builtin_module_names is sane for target too | ||
| 43 | assume_provided = assume_provided + list(sys.builtin_module_names) | ||
| 44 | excluded_fields = [] | ||
| 45 | |||
| 46 | |||
| 47 | classifier_license_map = { | ||
| 48 | 'License :: OSI Approved :: Academic Free License (AFL)': 'AFL', | ||
| 49 | 'License :: OSI Approved :: Apache Software License': 'Apache', | ||
| 50 | 'License :: OSI Approved :: Apple Public Source License': 'APSL', | ||
| 51 | 'License :: OSI Approved :: Artistic License': 'Artistic', | ||
| 52 | 'License :: OSI Approved :: Attribution Assurance License': 'AAL', | ||
| 53 | 'License :: OSI Approved :: BSD License': 'BSD-3-Clause', | ||
| 54 | 'License :: OSI Approved :: Boost Software License 1.0 (BSL-1.0)': 'BSL-1.0', | ||
| 55 | 'License :: OSI Approved :: CEA CNRS Inria Logiciel Libre License, version 2.1 (CeCILL-2.1)': 'CECILL-2.1', | ||
| 56 | 'License :: OSI Approved :: Common Development and Distribution License 1.0 (CDDL-1.0)': 'CDDL-1.0', | ||
| 57 | 'License :: OSI Approved :: Common Public License': 'CPL', | ||
| 58 | 'License :: OSI Approved :: Eclipse Public License 1.0 (EPL-1.0)': 'EPL-1.0', | ||
| 59 | 'License :: OSI Approved :: Eclipse Public License 2.0 (EPL-2.0)': 'EPL-2.0', | ||
| 60 | 'License :: OSI Approved :: Eiffel Forum License': 'EFL', | ||
| 61 | 'License :: OSI Approved :: European Union Public Licence 1.0 (EUPL 1.0)': 'EUPL-1.0', | ||
| 62 | 'License :: OSI Approved :: European Union Public Licence 1.1 (EUPL 1.1)': 'EUPL-1.1', | ||
| 63 | 'License :: OSI Approved :: European Union Public Licence 1.2 (EUPL 1.2)': 'EUPL-1.2', | ||
| 64 | 'License :: OSI Approved :: GNU Affero General Public License v3': 'AGPL-3.0-only', | ||
| 65 | 'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)': 'AGPL-3.0-or-later', | ||
| 66 | 'License :: OSI Approved :: GNU Free Documentation License (FDL)': 'GFDL', | ||
| 67 | 'License :: OSI Approved :: GNU General Public License (GPL)': 'GPL', | ||
| 68 | 'License :: OSI Approved :: GNU General Public License v2 (GPLv2)': 'GPL-2.0-only', | ||
| 69 | 'License :: OSI Approved :: GNU General Public License v2 or later (GPLv2+)': 'GPL-2.0-or-later', | ||
| 70 | 'License :: OSI Approved :: GNU General Public License v3 (GPLv3)': 'GPL-3.0-only', | ||
| 71 | 'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)': 'GPL-3.0-or-later', | ||
| 72 | 'License :: OSI Approved :: GNU Lesser General Public License v2 (LGPLv2)': 'LGPL-2.0-only', | ||
| 73 | 'License :: OSI Approved :: GNU Lesser General Public License v2 or later (LGPLv2+)': 'LGPL-2.0-or-later', | ||
| 74 | 'License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)': 'LGPL-3.0-only', | ||
| 75 | 'License :: OSI Approved :: GNU Lesser General Public License v3 or later (LGPLv3+)': 'LGPL-3.0-or-later', | ||
| 76 | 'License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)': 'LGPL', | ||
| 77 | 'License :: OSI Approved :: Historical Permission Notice and Disclaimer (HPND)': 'HPND', | ||
| 78 | 'License :: OSI Approved :: IBM Public License': 'IPL', | ||
| 79 | 'License :: OSI Approved :: ISC License (ISCL)': 'ISC', | ||
| 80 | 'License :: OSI Approved :: Intel Open Source License': 'Intel', | ||
| 81 | 'License :: OSI Approved :: Jabber Open Source License': 'Jabber', | ||
| 82 | 'License :: OSI Approved :: MIT License': 'MIT', | ||
| 83 | 'License :: OSI Approved :: MIT No Attribution License (MIT-0)': 'MIT-0', | ||
| 84 | 'License :: OSI Approved :: MITRE Collaborative Virtual Workspace License (CVW)': 'CVWL', | ||
| 85 | 'License :: OSI Approved :: MirOS License (MirOS)': 'MirOS', | ||
| 86 | 'License :: OSI Approved :: Motosoto License': 'Motosoto', | ||
| 87 | 'License :: OSI Approved :: Mozilla Public License 1.0 (MPL)': 'MPL-1.0', | ||
| 88 | 'License :: OSI Approved :: Mozilla Public License 1.1 (MPL 1.1)': 'MPL-1.1', | ||
| 89 | 'License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)': 'MPL-2.0', | ||
| 90 | 'License :: OSI Approved :: Nethack General Public License': 'NGPL', | ||
| 91 | 'License :: OSI Approved :: Nokia Open Source License': 'Nokia', | ||
| 92 | 'License :: OSI Approved :: Open Group Test Suite License': 'OGTSL', | ||
| 93 | 'License :: OSI Approved :: Open Software License 3.0 (OSL-3.0)': 'OSL-3.0', | ||
| 94 | 'License :: OSI Approved :: PostgreSQL License': 'PostgreSQL', | ||
| 95 | 'License :: OSI Approved :: Python License (CNRI Python License)': 'CNRI-Python', | ||
| 96 | 'License :: OSI Approved :: Python Software Foundation License': 'PSF-2.0', | ||
| 97 | 'License :: OSI Approved :: Qt Public License (QPL)': 'QPL', | ||
| 98 | 'License :: OSI Approved :: Ricoh Source Code Public License': 'RSCPL', | ||
| 99 | 'License :: OSI Approved :: SIL Open Font License 1.1 (OFL-1.1)': 'OFL-1.1', | ||
| 100 | 'License :: OSI Approved :: Sleepycat License': 'Sleepycat', | ||
| 101 | 'License :: OSI Approved :: Sun Industry Standards Source License (SISSL)': 'SISSL', | ||
| 102 | 'License :: OSI Approved :: Sun Public License': 'SPL', | ||
| 103 | 'License :: OSI Approved :: The Unlicense (Unlicense)': 'Unlicense', | ||
| 104 | 'License :: OSI Approved :: Universal Permissive License (UPL)': 'UPL-1.0', | ||
| 105 | 'License :: OSI Approved :: University of Illinois/NCSA Open Source License': 'NCSA', | ||
| 106 | 'License :: OSI Approved :: Vovida Software License 1.0': 'VSL-1.0', | ||
| 107 | 'License :: OSI Approved :: W3C License': 'W3C', | ||
| 108 | 'License :: OSI Approved :: X.Net License': 'Xnet', | ||
| 109 | 'License :: OSI Approved :: Zope Public License': 'ZPL', | ||
| 110 | 'License :: OSI Approved :: zlib/libpng License': 'Zlib', | ||
| 111 | 'License :: Other/Proprietary License': 'Proprietary', | ||
| 112 | 'License :: Public Domain': 'PD', | ||
| 113 | } | ||
| 114 | |||
| 115 | def __init__(self): | ||
| 116 | pass | ||
| 117 | |||
| 118 | def process_url(self, args, classes, handled, extravalues): | ||
| 119 | """ | ||
| 120 | Convert any pypi url https://pypi.org/project/<package>/<version> into https://files.pythonhosted.org/packages/source/... | ||
| 121 | which corresponds to the archive location, and add pypi class | ||
| 122 | """ | ||
| 123 | |||
| 124 | if 'url' in handled: | ||
| 125 | return None | ||
| 126 | |||
| 127 | fetch_uri = None | ||
| 128 | source = args.source | ||
| 129 | required_version = args.version if args.version else None | ||
| 130 | match = re.match(r'https?://pypi.org/project/([^/]+)(?:/([^/]+))?/?$', urldefrag(source)[0]) | ||
| 131 | if match: | ||
| 132 | package = match.group(1) | ||
| 133 | version = match.group(2) if match.group(2) else required_version | ||
| 134 | |||
| 135 | json_url = f"https://pypi.org/pypi/%s/json" % package | ||
| 136 | response = urllib.request.urlopen(json_url) | ||
| 137 | if response.status == 200: | ||
| 138 | data = json.loads(response.read()) | ||
| 139 | if not version: | ||
| 140 | # grab latest version | ||
| 141 | version = data["info"]["version"] | ||
| 142 | pypi_package = data["info"]["name"] | ||
| 143 | for release in reversed(data["releases"][version]): | ||
| 144 | if release["packagetype"] == "sdist": | ||
| 145 | fetch_uri = release["url"] | ||
| 146 | break | ||
| 147 | else: | ||
| 148 | logger.warning("Cannot handle pypi url %s: cannot fetch package information using %s", source, json_url) | ||
| 149 | return None | ||
| 150 | else: | ||
| 151 | match = re.match(r'^https?://files.pythonhosted.org/packages.*/(.*)-.*$', source) | ||
| 152 | if match: | ||
| 153 | fetch_uri = source | ||
| 154 | pypi_package = match.group(1) | ||
| 155 | _, version = determine_from_url(fetch_uri) | ||
| 156 | |||
| 157 | if match and not args.no_pypi: | ||
| 158 | if required_version and version != required_version: | ||
| 159 | raise Exception("Version specified using --version/-V (%s) and version specified in the url (%s) do not match" % (required_version, version)) | ||
| 160 | # This is optionnal if BPN looks like "python-<pypi_package>" or "python3-<pypi_package>" (see pypi.bbclass) | ||
| 161 | # but at this point we cannot know because because user can specify the output name of the recipe on the command line | ||
| 162 | extravalues["PYPI_PACKAGE"] = pypi_package | ||
| 163 | # If the tarball extension is not 'tar.gz' (default value in pypi.bblcass) whe should set PYPI_PACKAGE_EXT in the recipe | ||
| 164 | pypi_package_ext = re.match(r'.*%s-%s\.(.*)$' % (pypi_package, version), fetch_uri) | ||
| 165 | if pypi_package_ext: | ||
| 166 | pypi_package_ext = pypi_package_ext.group(1) | ||
| 167 | if pypi_package_ext != "tar.gz": | ||
| 168 | extravalues["PYPI_PACKAGE_EXT"] = pypi_package_ext | ||
| 169 | |||
| 170 | # Pypi class will handle S and SRC_URI variables, so remove them | ||
| 171 | # TODO: allow oe.recipeutils.patch_recipe_lines() to accept regexp so we can simplify the following to: | ||
| 172 | # extravalues['SRC_URI(?:\[.*?\])?'] = None | ||
| 173 | extravalues['S'] = None | ||
| 174 | extravalues['SRC_URI'] = None | ||
| 175 | |||
| 176 | classes.append('pypi') | ||
| 177 | |||
| 178 | handled.append('url') | ||
| 179 | return fetch_uri | ||
| 180 | |||
| 181 | def handle_classifier_license(self, classifiers, existing_licenses=""): | ||
| 182 | |||
| 183 | licenses = [] | ||
| 184 | for classifier in classifiers: | ||
| 185 | if classifier in self.classifier_license_map: | ||
| 186 | license = self.classifier_license_map[classifier] | ||
| 187 | if license == 'Apache' and 'Apache-2.0' in existing_licenses: | ||
| 188 | license = 'Apache-2.0' | ||
| 189 | elif license == 'GPL': | ||
| 190 | if 'GPL-2.0' in existing_licenses or 'GPLv2' in existing_licenses: | ||
| 191 | license = 'GPL-2.0' | ||
| 192 | elif 'GPL-3.0' in existing_licenses or 'GPLv3' in existing_licenses: | ||
| 193 | license = 'GPL-3.0' | ||
| 194 | elif license == 'LGPL': | ||
| 195 | if 'LGPL-2.1' in existing_licenses or 'LGPLv2.1' in existing_licenses: | ||
| 196 | license = 'LGPL-2.1' | ||
| 197 | elif 'LGPL-2.0' in existing_licenses or 'LGPLv2' in existing_licenses: | ||
| 198 | license = 'LGPL-2.0' | ||
| 199 | elif 'LGPL-3.0' in existing_licenses or 'LGPLv3' in existing_licenses: | ||
| 200 | license = 'LGPL-3.0' | ||
| 201 | licenses.append(license) | ||
| 202 | |||
| 203 | if licenses: | ||
| 204 | return ' & '.join(licenses) | ||
| 205 | |||
| 206 | return None | ||
| 207 | |||
| 208 | def map_info_to_bbvar(self, info, extravalues): | ||
| 209 | |||
| 210 | # Map PKG-INFO & setup.py fields to bitbake variables | ||
| 211 | for field, values in info.items(): | ||
| 212 | if field in self.excluded_fields: | ||
| 213 | continue | ||
| 214 | |||
| 215 | if field not in self.bbvar_map: | ||
| 216 | continue | ||
| 217 | |||
| 218 | if isinstance(values, str): | ||
| 219 | value = values | ||
| 220 | else: | ||
| 221 | value = ' '.join(str(v) for v in values if v) | ||
| 222 | |||
| 223 | bbvar = self.bbvar_map[field] | ||
| 224 | if bbvar == "PN": | ||
| 225 | # by convention python recipes start with "python3-" | ||
| 226 | if not value.startswith('python'): | ||
| 227 | value = 'python3-' + value | ||
| 228 | |||
| 229 | if bbvar not in extravalues and value: | ||
| 230 | extravalues[bbvar] = value | ||
| 231 | |||
| 232 | def apply_info_replacements(self, info): | ||
| 233 | if not self.replacements: | ||
| 234 | return | ||
| 235 | |||
| 236 | for variable, search, replace in self.replacements: | ||
| 237 | if variable not in info: | ||
| 238 | continue | ||
| 239 | |||
| 240 | def replace_value(search, replace, value): | ||
| 241 | if replace is None: | ||
| 242 | if re.search(search, value): | ||
| 243 | return None | ||
| 244 | else: | ||
| 245 | new_value = re.sub(search, replace, value) | ||
| 246 | if value != new_value: | ||
| 247 | return new_value | ||
| 248 | return value | ||
| 249 | |||
| 250 | value = info[variable] | ||
| 251 | if isinstance(value, str): | ||
| 252 | new_value = replace_value(search, replace, value) | ||
| 253 | if new_value is None: | ||
| 254 | del info[variable] | ||
| 255 | elif new_value != value: | ||
| 256 | info[variable] = new_value | ||
| 257 | elif hasattr(value, 'items'): | ||
| 258 | for dkey, dvalue in list(value.items()): | ||
| 259 | new_list = [] | ||
| 260 | for pos, a_value in enumerate(dvalue): | ||
| 261 | new_value = replace_value(search, replace, a_value) | ||
| 262 | if new_value is not None and new_value != value: | ||
| 263 | new_list.append(new_value) | ||
| 264 | |||
| 265 | if value != new_list: | ||
| 266 | value[dkey] = new_list | ||
| 267 | else: | ||
| 268 | new_list = [] | ||
| 269 | for pos, a_value in enumerate(value): | ||
| 270 | new_value = replace_value(search, replace, a_value) | ||
| 271 | if new_value is not None and new_value != value: | ||
| 272 | new_list.append(new_value) | ||
| 273 | |||
| 274 | if value != new_list: | ||
| 275 | info[variable] = new_list | ||
| 276 | |||
| 277 | |||
| 278 | def scan_python_dependencies(self, paths): | ||
| 279 | deps = set() | ||
| 280 | try: | ||
| 281 | dep_output = self.run_command(['pythondeps', '-d'] + paths) | ||
| 282 | except (OSError, subprocess.CalledProcessError): | ||
| 283 | pass | ||
| 284 | else: | ||
| 285 | for line in dep_output.splitlines(): | ||
| 286 | line = line.rstrip() | ||
| 287 | dep, filename = line.split('\t', 1) | ||
| 288 | if filename.endswith('/setup.py'): | ||
| 289 | continue | ||
| 290 | deps.add(dep) | ||
| 291 | |||
| 292 | try: | ||
| 293 | provides_output = self.run_command(['pythondeps', '-p'] + paths) | ||
| 294 | except (OSError, subprocess.CalledProcessError): | ||
| 295 | pass | ||
| 296 | else: | ||
| 297 | provides_lines = (l.rstrip() for l in provides_output.splitlines()) | ||
| 298 | provides = set(l for l in provides_lines if l and l != 'setup') | ||
| 299 | deps -= provides | ||
| 300 | |||
| 301 | return deps | ||
| 302 | |||
| 303 | def parse_pkgdata_for_python_packages(self): | ||
| 304 | pkgdata_dir = tinfoil.config_data.getVar('PKGDATA_DIR') | ||
| 305 | |||
| 306 | ldata = tinfoil.config_data.createCopy() | ||
| 307 | bb.parse.handle('classes-recipe/python3-dir.bbclass', ldata, True) | ||
| 308 | python_sitedir = ldata.getVar('PYTHON_SITEPACKAGES_DIR') | ||
| 309 | |||
| 310 | dynload_dir = os.path.join(os.path.dirname(python_sitedir), 'lib-dynload') | ||
| 311 | python_dirs = [python_sitedir + os.sep, | ||
| 312 | os.path.join(os.path.dirname(python_sitedir), 'dist-packages') + os.sep, | ||
| 313 | os.path.dirname(python_sitedir) + os.sep] | ||
| 314 | packages = {} | ||
| 315 | for pkgdatafile in glob.glob('{}/runtime/*'.format(pkgdata_dir)): | ||
| 316 | files_info = None | ||
| 317 | with open(pkgdatafile, 'r') as f: | ||
| 318 | for line in f.readlines(): | ||
| 319 | field, value = line.split(': ', 1) | ||
| 320 | if field.startswith('FILES_INFO'): | ||
| 321 | files_info = ast.literal_eval(value) | ||
| 322 | break | ||
| 323 | else: | ||
| 324 | continue | ||
| 325 | |||
| 326 | for fn in files_info: | ||
| 327 | for suffix in importlib.machinery.all_suffixes(): | ||
| 328 | if fn.endswith(suffix): | ||
| 329 | break | ||
| 330 | else: | ||
| 331 | continue | ||
| 332 | |||
| 333 | if fn.startswith(dynload_dir + os.sep): | ||
| 334 | if '/.debug/' in fn: | ||
| 335 | continue | ||
| 336 | base = os.path.basename(fn) | ||
| 337 | provided = base.split('.', 1)[0] | ||
| 338 | packages[provided] = os.path.basename(pkgdatafile) | ||
| 339 | continue | ||
| 340 | |||
| 341 | for python_dir in python_dirs: | ||
| 342 | if fn.startswith(python_dir): | ||
| 343 | relpath = fn[len(python_dir):] | ||
| 344 | relstart, _, relremaining = relpath.partition(os.sep) | ||
| 345 | if relstart.endswith('.egg'): | ||
| 346 | relpath = relremaining | ||
| 347 | base, _ = os.path.splitext(relpath) | ||
| 348 | |||
| 349 | if '/.debug/' in base: | ||
| 350 | continue | ||
| 351 | if os.path.basename(base) == '__init__': | ||
| 352 | base = os.path.dirname(base) | ||
| 353 | base = base.replace(os.sep + os.sep, os.sep) | ||
| 354 | provided = base.replace(os.sep, '.') | ||
| 355 | packages[provided] = os.path.basename(pkgdatafile) | ||
| 356 | return packages | ||
| 357 | |||
| 358 | @classmethod | ||
| 359 | def run_command(cls, cmd, **popenargs): | ||
| 360 | if 'stderr' not in popenargs: | ||
| 361 | popenargs['stderr'] = subprocess.STDOUT | ||
| 362 | try: | ||
| 363 | return subprocess.check_output(cmd, **popenargs).decode('utf-8') | ||
| 364 | except OSError as exc: | ||
| 365 | logger.error('Unable to run `{}`: {}', ' '.join(cmd), exc) | ||
| 366 | raise | ||
| 367 | except subprocess.CalledProcessError as exc: | ||
| 368 | logger.error('Unable to run `{}`: {}', ' '.join(cmd), exc.output) | ||
| 369 | raise | ||
| 370 | |||
| 371 | class PythonSetupPyRecipeHandler(PythonRecipeHandler): | ||
| 372 | bbvar_map = { | ||
| 373 | 'Name': 'PN', | ||
| 374 | 'Version': 'PV', | ||
| 375 | 'Home-page': 'HOMEPAGE', | ||
| 376 | 'Summary': 'SUMMARY', | ||
| 377 | 'Description': 'DESCRIPTION', | ||
| 378 | 'License': 'LICENSE', | ||
| 379 | 'Requires': 'RDEPENDS:${PN}', | ||
| 380 | 'Provides': 'RPROVIDES:${PN}', | ||
| 381 | 'Obsoletes': 'RREPLACES:${PN}', | ||
| 382 | } | ||
| 383 | # PN/PV are already set by recipetool core & desc can be extremely long | ||
| 384 | excluded_fields = [ | ||
| 385 | 'Description', | ||
| 386 | ] | ||
| 387 | setup_parse_map = { | ||
| 388 | 'Url': 'Home-page', | ||
| 389 | 'Classifiers': 'Classifier', | ||
| 390 | 'Description': 'Summary', | ||
| 391 | } | ||
| 392 | setuparg_map = { | ||
| 393 | 'Home-page': 'url', | ||
| 394 | 'Classifier': 'classifiers', | ||
| 395 | 'Summary': 'description', | ||
| 396 | 'Description': 'long-description', | ||
| 397 | } | ||
| 398 | # Values which are lists, used by the setup.py argument based metadata | ||
| 399 | # extraction method, to determine how to process the setup.py output. | ||
| 400 | setuparg_list_fields = [ | ||
| 401 | 'Classifier', | ||
| 402 | 'Requires', | ||
| 403 | 'Provides', | ||
| 404 | 'Obsoletes', | ||
| 405 | 'Platform', | ||
| 406 | 'Supported-Platform', | ||
| 407 | ] | ||
| 408 | setuparg_multi_line_values = ['Description'] | ||
| 409 | |||
| 410 | replacements = [ | ||
| 411 | ('License', r' +$', ''), | ||
| 412 | ('License', r'^ +', ''), | ||
| 413 | ('License', r' ', '-'), | ||
| 414 | ('License', r'^GNU-', ''), | ||
| 415 | ('License', r'-[Ll]icen[cs]e(,?-[Vv]ersion)?', ''), | ||
| 416 | ('License', r'^UNKNOWN$', ''), | ||
| 417 | |||
| 418 | # Remove currently unhandled version numbers from these variables | ||
| 419 | ('Requires', r' *\([^)]*\)', ''), | ||
| 420 | ('Provides', r' *\([^)]*\)', ''), | ||
| 421 | ('Obsoletes', r' *\([^)]*\)', ''), | ||
| 422 | ('Install-requires', r'^([^><= ]+).*', r'\1'), | ||
| 423 | ('Extras-require', r'^([^><= ]+).*', r'\1'), | ||
| 424 | ('Tests-require', r'^([^><= ]+).*', r'\1'), | ||
| 425 | |||
| 426 | # Remove unhandled dependency on particular features (e.g. foo[PDF]) | ||
| 427 | ('Install-requires', r'\[[^\]]+\]$', ''), | ||
| 428 | ] | ||
| 429 | |||
| 430 | def __init__(self): | ||
| 431 | pass | ||
| 432 | |||
| 433 | def parse_setup_py(self, setupscript='./setup.py'): | ||
| 434 | with codecs.open(setupscript) as f: | ||
| 435 | info, imported_modules, non_literals, extensions = gather_setup_info(f) | ||
| 436 | |||
| 437 | def _map(key): | ||
| 438 | key = key.replace('_', '-') | ||
| 439 | key = key[0].upper() + key[1:] | ||
| 440 | if key in self.setup_parse_map: | ||
| 441 | key = self.setup_parse_map[key] | ||
| 442 | return key | ||
| 443 | |||
| 444 | # Naive mapping of setup() arguments to PKG-INFO field names | ||
| 445 | for d in [info, non_literals]: | ||
| 446 | for key, value in list(d.items()): | ||
| 447 | if key is None: | ||
| 448 | continue | ||
| 449 | new_key = _map(key) | ||
| 450 | if new_key != key: | ||
| 451 | del d[key] | ||
| 452 | d[new_key] = value | ||
| 453 | |||
| 454 | return info, 'setuptools' in imported_modules, non_literals, extensions | ||
| 455 | |||
| 456 | def get_setup_args_info(self, setupscript='./setup.py'): | ||
| 457 | cmd = ['python3', setupscript] | ||
| 458 | info = {} | ||
| 459 | keys = set(self.bbvar_map.keys()) | ||
| 460 | keys |= set(self.setuparg_list_fields) | ||
| 461 | keys |= set(self.setuparg_multi_line_values) | ||
| 462 | grouped_keys = itertools.groupby(keys, lambda k: (k in self.setuparg_list_fields, k in self.setuparg_multi_line_values)) | ||
| 463 | for index, keys in grouped_keys: | ||
| 464 | if index == (True, False): | ||
| 465 | # Splitlines output for each arg as a list value | ||
| 466 | for key in keys: | ||
| 467 | arg = self.setuparg_map.get(key, key.lower()) | ||
| 468 | try: | ||
| 469 | arg_info = self.run_command(cmd + ['--' + arg], cwd=os.path.dirname(setupscript)) | ||
| 470 | except (OSError, subprocess.CalledProcessError): | ||
| 471 | pass | ||
| 472 | else: | ||
| 473 | info[key] = [l.rstrip() for l in arg_info.splitlines()] | ||
| 474 | elif index == (False, True): | ||
| 475 | # Entire output for each arg | ||
| 476 | for key in keys: | ||
| 477 | arg = self.setuparg_map.get(key, key.lower()) | ||
| 478 | try: | ||
| 479 | arg_info = self.run_command(cmd + ['--' + arg], cwd=os.path.dirname(setupscript)) | ||
| 480 | except (OSError, subprocess.CalledProcessError): | ||
| 481 | pass | ||
| 482 | else: | ||
| 483 | info[key] = arg_info | ||
| 484 | else: | ||
| 485 | info.update(self.get_setup_byline(list(keys), setupscript)) | ||
| 486 | return info | ||
| 487 | |||
| 488 | def get_setup_byline(self, fields, setupscript='./setup.py'): | ||
| 489 | info = {} | ||
| 490 | |||
| 491 | cmd = ['python3', setupscript] | ||
| 492 | cmd.extend('--' + self.setuparg_map.get(f, f.lower()) for f in fields) | ||
| 493 | try: | ||
| 494 | info_lines = self.run_command(cmd, cwd=os.path.dirname(setupscript)).splitlines() | ||
| 495 | except (OSError, subprocess.CalledProcessError): | ||
| 496 | pass | ||
| 497 | else: | ||
| 498 | if len(fields) != len(info_lines): | ||
| 499 | logger.error('Mismatch between setup.py output lines and number of fields') | ||
| 500 | sys.exit(1) | ||
| 501 | |||
| 502 | for lineno, line in enumerate(info_lines): | ||
| 503 | line = line.rstrip() | ||
| 504 | info[fields[lineno]] = line | ||
| 505 | return info | ||
| 506 | |||
| 507 | def get_pkginfo(self, pkginfo_fn): | ||
| 508 | msg = email.message_from_file(open(pkginfo_fn, 'r')) | ||
| 509 | msginfo = {} | ||
| 510 | for field in msg.keys(): | ||
| 511 | values = msg.get_all(field) | ||
| 512 | if len(values) == 1: | ||
| 513 | msginfo[field] = values[0] | ||
| 514 | else: | ||
| 515 | msginfo[field] = values | ||
| 516 | return msginfo | ||
| 517 | |||
| 518 | def scan_setup_python_deps(self, srctree, setup_info, setup_non_literals): | ||
| 519 | if 'Package-dir' in setup_info: | ||
| 520 | package_dir = setup_info['Package-dir'] | ||
| 521 | else: | ||
| 522 | package_dir = {} | ||
| 523 | |||
| 524 | dist = setuptools.Distribution() | ||
| 525 | |||
| 526 | class PackageDir(setuptools.command.build_py.build_py): | ||
| 527 | def __init__(self, package_dir): | ||
| 528 | self.package_dir = package_dir | ||
| 529 | self.dist = dist | ||
| 530 | super().__init__(self.dist) | ||
| 531 | |||
| 532 | pd = PackageDir(package_dir) | ||
| 533 | to_scan = [] | ||
| 534 | if not any(v in setup_non_literals for v in ['Py-modules', 'Scripts', 'Packages']): | ||
| 535 | if 'Py-modules' in setup_info: | ||
| 536 | for module in setup_info['Py-modules']: | ||
| 537 | try: | ||
| 538 | package, module = module.rsplit('.', 1) | ||
| 539 | except ValueError: | ||
| 540 | package, module = '.', module | ||
| 541 | module_path = os.path.join(pd.get_package_dir(package), module + '.py') | ||
| 542 | to_scan.append(module_path) | ||
| 543 | |||
| 544 | if 'Packages' in setup_info: | ||
| 545 | for package in setup_info['Packages']: | ||
| 546 | to_scan.append(pd.get_package_dir(package)) | ||
| 547 | |||
| 548 | if 'Scripts' in setup_info: | ||
| 549 | to_scan.extend(setup_info['Scripts']) | ||
| 550 | else: | ||
| 551 | logger.info("Scanning the entire source tree, as one or more of the following setup keywords are non-literal: py_modules, scripts, packages.") | ||
| 552 | |||
| 553 | if not to_scan: | ||
| 554 | to_scan = ['.'] | ||
| 555 | |||
| 556 | logger.info("Scanning paths for packages & dependencies: %s", ', '.join(to_scan)) | ||
| 557 | |||
| 558 | provided_packages = self.parse_pkgdata_for_python_packages() | ||
| 559 | scanned_deps = self.scan_python_dependencies([os.path.join(srctree, p) for p in to_scan]) | ||
| 560 | mapped_deps, unmapped_deps = set(self.base_pkgdeps), set() | ||
| 561 | for dep in scanned_deps: | ||
| 562 | mapped = provided_packages.get(dep) | ||
| 563 | if mapped: | ||
| 564 | logger.debug('Mapped %s to %s' % (dep, mapped)) | ||
| 565 | mapped_deps.add(mapped) | ||
| 566 | else: | ||
| 567 | logger.debug('Could not map %s' % dep) | ||
| 568 | unmapped_deps.add(dep) | ||
| 569 | return mapped_deps, unmapped_deps | ||
| 570 | |||
| 571 | def process(self, srctree, classes, lines_before, lines_after, handled, extravalues): | ||
| 572 | |||
| 573 | if 'buildsystem' in handled: | ||
| 574 | return False | ||
| 575 | |||
| 576 | logger.debug("Trying setup.py parser") | ||
| 577 | |||
| 578 | # Check for non-zero size setup.py files | ||
| 579 | setupfiles = RecipeHandler.checkfiles(srctree, ['setup.py']) | ||
| 580 | for fn in setupfiles: | ||
| 581 | if os.path.getsize(fn): | ||
| 582 | break | ||
| 583 | else: | ||
| 584 | logger.debug("No setup.py found") | ||
| 585 | return False | ||
| 586 | |||
| 587 | # setup.py is always parsed to get at certain required information, such as | ||
| 588 | # distutils vs setuptools | ||
| 589 | # | ||
| 590 | # If egg info is available, we use it for both its PKG-INFO metadata | ||
| 591 | # and for its requires.txt for install_requires. | ||
| 592 | # If PKG-INFO is available but no egg info is, we use that for metadata in preference to | ||
| 593 | # the parsed setup.py, but use the install_requires info from the | ||
| 594 | # parsed setup.py. | ||
| 595 | |||
| 596 | setupscript = os.path.join(srctree, 'setup.py') | ||
| 597 | try: | ||
| 598 | setup_info, uses_setuptools, setup_non_literals, extensions = self.parse_setup_py(setupscript) | ||
| 599 | except Exception: | ||
| 600 | logger.exception("Failed to parse setup.py") | ||
| 601 | setup_info, uses_setuptools, setup_non_literals, extensions = {}, True, [], [] | ||
| 602 | |||
| 603 | egginfo = glob.glob(os.path.join(srctree, '*.egg-info')) | ||
| 604 | if egginfo: | ||
| 605 | info = self.get_pkginfo(os.path.join(egginfo[0], 'PKG-INFO')) | ||
| 606 | requires_txt = os.path.join(egginfo[0], 'requires.txt') | ||
| 607 | if os.path.exists(requires_txt): | ||
| 608 | with codecs.open(requires_txt) as f: | ||
| 609 | inst_req = [] | ||
| 610 | extras_req = collections.defaultdict(list) | ||
| 611 | current_feature = None | ||
| 612 | for line in f.readlines(): | ||
| 613 | line = line.rstrip() | ||
| 614 | if not line: | ||
| 615 | continue | ||
| 616 | |||
| 617 | if line.startswith('['): | ||
| 618 | # PACKAGECONFIG must not contain expressions or whitespace | ||
| 619 | line = line.replace(" ", "") | ||
| 620 | line = line.replace(':', "") | ||
| 621 | line = line.replace('.', "-dot-") | ||
| 622 | line = line.replace('"', "") | ||
| 623 | line = line.replace('<', "-smaller-") | ||
| 624 | line = line.replace('>', "-bigger-") | ||
| 625 | line = line.replace('_', "-") | ||
| 626 | line = line.replace('(', "") | ||
| 627 | line = line.replace(')', "") | ||
| 628 | line = line.replace('!', "-not-") | ||
| 629 | line = line.replace('=', "-equals-") | ||
| 630 | current_feature = line[1:-1] | ||
| 631 | elif current_feature: | ||
| 632 | extras_req[current_feature].append(line) | ||
| 633 | else: | ||
| 634 | inst_req.append(line) | ||
| 635 | info['Install-requires'] = inst_req | ||
| 636 | info['Extras-require'] = extras_req | ||
| 637 | elif RecipeHandler.checkfiles(srctree, ['PKG-INFO']): | ||
| 638 | info = self.get_pkginfo(os.path.join(srctree, 'PKG-INFO')) | ||
| 639 | |||
| 640 | if setup_info: | ||
| 641 | if 'Install-requires' in setup_info: | ||
| 642 | info['Install-requires'] = setup_info['Install-requires'] | ||
| 643 | if 'Extras-require' in setup_info: | ||
| 644 | info['Extras-require'] = setup_info['Extras-require'] | ||
| 645 | else: | ||
| 646 | if setup_info: | ||
| 647 | info = setup_info | ||
| 648 | else: | ||
| 649 | info = self.get_setup_args_info(setupscript) | ||
| 650 | |||
| 651 | # Grab the license value before applying replacements | ||
| 652 | license_str = info.get('License', '').strip() | ||
| 653 | |||
| 654 | self.apply_info_replacements(info) | ||
| 655 | |||
| 656 | if uses_setuptools: | ||
| 657 | classes.append('setuptools3') | ||
| 658 | else: | ||
| 659 | classes.append('distutils3') | ||
| 660 | |||
| 661 | if license_str: | ||
| 662 | for i, line in enumerate(lines_before): | ||
| 663 | if line.startswith('##LICENSE_PLACEHOLDER##'): | ||
| 664 | lines_before.insert(i, '# NOTE: License in setup.py/PKGINFO is: %s' % license_str) | ||
| 665 | break | ||
| 666 | |||
| 667 | if 'Classifier' in info: | ||
| 668 | license = self.handle_classifier_license(info['Classifier'], info.get('License', '')) | ||
| 669 | if license: | ||
| 670 | info['License'] = license | ||
| 671 | |||
| 672 | self.map_info_to_bbvar(info, extravalues) | ||
| 673 | |||
| 674 | mapped_deps, unmapped_deps = self.scan_setup_python_deps(srctree, setup_info, setup_non_literals) | ||
| 675 | |||
| 676 | extras_req = set() | ||
| 677 | if 'Extras-require' in info: | ||
| 678 | extras_req = info['Extras-require'] | ||
| 679 | if extras_req: | ||
| 680 | lines_after.append('# The following configs & dependencies are from setuptools extras_require.') | ||
| 681 | lines_after.append('# These dependencies are optional, hence can be controlled via PACKAGECONFIG.') | ||
| 682 | lines_after.append('# The upstream names may not correspond exactly to bitbake package names.') | ||
| 683 | lines_after.append('# The configs are might not correct, since PACKAGECONFIG does not support expressions as may used in requires.txt - they are just replaced by text.') | ||
| 684 | lines_after.append('#') | ||
| 685 | lines_after.append('# Uncomment this line to enable all the optional features.') | ||
| 686 | lines_after.append('#PACKAGECONFIG ?= "{}"'.format(' '.join(k.lower() for k in extras_req))) | ||
| 687 | for feature, feature_reqs in extras_req.items(): | ||
| 688 | unmapped_deps.difference_update(feature_reqs) | ||
| 689 | |||
| 690 | feature_req_deps = ('python3-' + r.replace('.', '-').lower() for r in sorted(feature_reqs)) | ||
| 691 | lines_after.append('PACKAGECONFIG[{}] = ",,,{}"'.format(feature.lower(), ' '.join(feature_req_deps))) | ||
| 692 | |||
| 693 | inst_reqs = set() | ||
| 694 | if 'Install-requires' in info: | ||
| 695 | if extras_req: | ||
| 696 | lines_after.append('') | ||
| 697 | inst_reqs = info['Install-requires'] | ||
| 698 | if inst_reqs: | ||
| 699 | unmapped_deps.difference_update(inst_reqs) | ||
| 700 | |||
| 701 | inst_req_deps = ('python3-' + r.replace('.', '-').lower() for r in sorted(inst_reqs)) | ||
| 702 | lines_after.append('# WARNING: the following rdepends are from setuptools install_requires. These') | ||
| 703 | lines_after.append('# upstream names may not correspond exactly to bitbake package names.') | ||
| 704 | lines_after.append('RDEPENDS:${{PN}} += "{}"'.format(' '.join(inst_req_deps))) | ||
| 705 | |||
| 706 | if mapped_deps: | ||
| 707 | name = info.get('Name') | ||
| 708 | if name and name[0] in mapped_deps: | ||
| 709 | # Attempt to avoid self-reference | ||
| 710 | mapped_deps.remove(name[0]) | ||
| 711 | mapped_deps -= set(self.excluded_pkgdeps) | ||
| 712 | if inst_reqs or extras_req: | ||
| 713 | lines_after.append('') | ||
| 714 | lines_after.append('# WARNING: the following rdepends are determined through basic analysis of the') | ||
| 715 | lines_after.append('# python sources, and might not be 100% accurate.') | ||
| 716 | lines_after.append('RDEPENDS:${{PN}} += "{}"'.format(' '.join(sorted(mapped_deps)))) | ||
| 717 | |||
| 718 | unmapped_deps -= set(extensions) | ||
| 719 | unmapped_deps -= set(self.assume_provided) | ||
| 720 | if unmapped_deps: | ||
| 721 | if mapped_deps: | ||
| 722 | lines_after.append('') | ||
| 723 | lines_after.append('# WARNING: We were unable to map the following python package/module') | ||
| 724 | lines_after.append('# dependencies to the bitbake packages which include them:') | ||
| 725 | lines_after.extend('# {}'.format(d) for d in sorted(unmapped_deps)) | ||
| 726 | |||
| 727 | handled.append('buildsystem') | ||
| 728 | |||
| 729 | class PythonPyprojectTomlRecipeHandler(PythonRecipeHandler): | ||
| 730 | """Base class to support PEP517 and PEP518 | ||
| 731 | |||
| 732 | PEP517 https://peps.python.org/pep-0517/#source-trees | ||
| 733 | PEP518 https://peps.python.org/pep-0518/#build-system-table | ||
| 734 | """ | ||
| 735 | # bitbake currently supports the 4 following backends | ||
| 736 | build_backend_map = { | ||
| 737 | "setuptools.build_meta": "python_setuptools_build_meta", | ||
| 738 | "poetry.core.masonry.api": "python_poetry_core", | ||
| 739 | "flit_core.buildapi": "python_flit_core", | ||
| 740 | "hatchling.build": "python_hatchling", | ||
| 741 | "maturin": "python_maturin", | ||
| 742 | "mesonpy": "python_mesonpy", | ||
| 743 | } | ||
| 744 | |||
| 745 | # setuptools.build_meta and flit declare project metadata into the "project" section of pyproject.toml | ||
| 746 | # according to PEP-621: https://packaging.python.org/en/latest/specifications/declaring-project-metadata/#declaring-project-metadata | ||
| 747 | # while poetry uses the "tool.poetry" section according to its official documentation: https://python-poetry.org/docs/pyproject/ | ||
| 748 | # keys from "project" and "tool.poetry" sections are almost the same except for the HOMEPAGE which is "homepage" for tool.poetry | ||
| 749 | # and "Homepage" for "project" section. So keep both | ||
| 750 | bbvar_map = { | ||
| 751 | "name": "PN", | ||
| 752 | "version": "PV", | ||
| 753 | "Homepage": "HOMEPAGE", | ||
| 754 | "homepage": "HOMEPAGE", | ||
| 755 | "description": "SUMMARY", | ||
| 756 | "license": "LICENSE", | ||
| 757 | "dependencies": "RDEPENDS:${PN}", | ||
| 758 | "requires": "DEPENDS", | ||
| 759 | } | ||
| 760 | |||
| 761 | replacements = [ | ||
| 762 | ("license", r" +$", ""), | ||
| 763 | ("license", r"^ +", ""), | ||
| 764 | ("license", r" ", "-"), | ||
| 765 | ("license", r"^GNU-", ""), | ||
| 766 | ("license", r"-[Ll]icen[cs]e(,?-[Vv]ersion)?", ""), | ||
| 767 | ("license", r"^UNKNOWN$", ""), | ||
| 768 | # Remove currently unhandled version numbers from these variables | ||
| 769 | ("requires", r"\[[^\]]+\]$", ""), | ||
| 770 | ("requires", r"^([^><= ]+).*", r"\1"), | ||
| 771 | ("dependencies", r"\[[^\]]+\]$", ""), | ||
| 772 | ("dependencies", r"^([^><= ]+).*", r"\1"), | ||
| 773 | ] | ||
| 774 | |||
| 775 | excluded_native_pkgdeps = [ | ||
| 776 | # already provided by python_setuptools_build_meta.bbclass | ||
| 777 | "python3-setuptools-native", | ||
| 778 | "python3-wheel-native", | ||
| 779 | # already provided by python_poetry_core.bbclass | ||
| 780 | "python3-poetry-core-native", | ||
| 781 | # already provided by python_flit_core.bbclass | ||
| 782 | "python3-flit-core-native", | ||
| 783 | # already provided by python_mesonpy | ||
| 784 | "python3-meson-python-native", | ||
| 785 | ] | ||
| 786 | |||
| 787 | # add here a list of known and often used packages and the corresponding bitbake package | ||
| 788 | known_deps_map = { | ||
| 789 | "setuptools": "python3-setuptools", | ||
| 790 | "wheel": "python3-wheel", | ||
| 791 | "poetry-core": "python3-poetry-core", | ||
| 792 | "flit_core": "python3-flit-core", | ||
| 793 | "setuptools-scm": "python3-setuptools-scm", | ||
| 794 | "hatchling": "python3-hatchling", | ||
| 795 | "hatch-vcs": "python3-hatch-vcs", | ||
| 796 | "meson-python" : "python3-meson-python", | ||
| 797 | } | ||
| 798 | |||
| 799 | def __init__(self): | ||
| 800 | pass | ||
| 801 | |||
| 802 | def process(self, srctree, classes, lines_before, lines_after, handled, extravalues): | ||
| 803 | info = {} | ||
| 804 | metadata = {} | ||
| 805 | |||
| 806 | if 'buildsystem' in handled: | ||
| 807 | return False | ||
| 808 | |||
| 809 | logger.debug("Trying pyproject.toml parser") | ||
| 810 | |||
| 811 | # Check for non-zero size setup.py files | ||
| 812 | setupfiles = RecipeHandler.checkfiles(srctree, ["pyproject.toml"]) | ||
| 813 | for fn in setupfiles: | ||
| 814 | if os.path.getsize(fn): | ||
| 815 | break | ||
| 816 | else: | ||
| 817 | logger.debug("No pyproject.toml found") | ||
| 818 | return False | ||
| 819 | |||
| 820 | setupscript = os.path.join(srctree, "pyproject.toml") | ||
| 821 | |||
| 822 | try: | ||
| 823 | try: | ||
| 824 | import tomllib | ||
| 825 | except ImportError: | ||
| 826 | try: | ||
| 827 | import tomli as tomllib | ||
| 828 | except ImportError: | ||
| 829 | logger.error("Neither 'tomllib' nor 'tomli' could be imported, cannot scan pyproject.toml.") | ||
| 830 | return False | ||
| 831 | |||
| 832 | try: | ||
| 833 | with open(setupscript, "rb") as f: | ||
| 834 | config = tomllib.load(f) | ||
| 835 | except Exception: | ||
| 836 | logger.exception("Failed to parse pyproject.toml") | ||
| 837 | return False | ||
| 838 | |||
| 839 | build_backend = config["build-system"]["build-backend"] | ||
| 840 | if build_backend in self.build_backend_map: | ||
| 841 | classes.append(self.build_backend_map[build_backend]) | ||
| 842 | else: | ||
| 843 | logger.error( | ||
| 844 | "Unsupported build-backend: %s, cannot use pyproject.toml. Will try to use legacy setup.py" | ||
| 845 | % build_backend | ||
| 846 | ) | ||
| 847 | return False | ||
| 848 | |||
| 849 | licfile = "" | ||
| 850 | |||
| 851 | if build_backend == "poetry.core.masonry.api": | ||
| 852 | if "tool" in config and "poetry" in config["tool"]: | ||
| 853 | metadata = config["tool"]["poetry"] | ||
| 854 | else: | ||
| 855 | if "project" in config: | ||
| 856 | metadata = config["project"] | ||
| 857 | |||
| 858 | if metadata: | ||
| 859 | for field, values in metadata.items(): | ||
| 860 | if field == "license": | ||
| 861 | # For setuptools.build_meta and flit, licence is a table | ||
| 862 | # but for poetry licence is a string | ||
| 863 | # for hatchling, both table (jsonschema) and string (iniconfig) have been used | ||
| 864 | if build_backend == "poetry.core.masonry.api": | ||
| 865 | value = values | ||
| 866 | else: | ||
| 867 | value = values.get("text", "") | ||
| 868 | if not value: | ||
| 869 | licfile = values.get("file", "") | ||
| 870 | continue | ||
| 871 | elif field == "dependencies" and build_backend == "poetry.core.masonry.api": | ||
| 872 | # For poetry backend, "dependencies" section looks like: | ||
| 873 | # [tool.poetry.dependencies] | ||
| 874 | # requests = "^2.13.0" | ||
| 875 | # requests = { version = "^2.13.0", source = "private" } | ||
| 876 | # See https://python-poetry.org/docs/master/pyproject/#dependencies-and-dependency-groups for more details | ||
| 877 | # This class doesn't handle versions anyway, so we just get the dependencies name here and construct a list | ||
| 878 | value = [] | ||
| 879 | for k in values.keys(): | ||
| 880 | value.append(k) | ||
| 881 | elif isinstance(values, dict): | ||
| 882 | for k, v in values.items(): | ||
| 883 | info[k] = v | ||
| 884 | continue | ||
| 885 | else: | ||
| 886 | value = values | ||
| 887 | |||
| 888 | info[field] = value | ||
| 889 | |||
| 890 | # Grab the license value before applying replacements | ||
| 891 | license_str = info.get("license", "").strip() | ||
| 892 | |||
| 893 | if license_str: | ||
| 894 | for i, line in enumerate(lines_before): | ||
| 895 | if line.startswith("##LICENSE_PLACEHOLDER##"): | ||
| 896 | lines_before.insert( | ||
| 897 | i, "# NOTE: License in pyproject.toml is: %s" % license_str | ||
| 898 | ) | ||
| 899 | break | ||
| 900 | |||
| 901 | info["requires"] = config["build-system"]["requires"] | ||
| 902 | |||
| 903 | self.apply_info_replacements(info) | ||
| 904 | |||
| 905 | if "classifiers" in info: | ||
| 906 | license = self.handle_classifier_license( | ||
| 907 | info["classifiers"], info.get("license", "") | ||
| 908 | ) | ||
| 909 | if license: | ||
| 910 | if licfile: | ||
| 911 | lines = [] | ||
| 912 | md5value = bb.utils.md5_file(os.path.join(srctree, licfile)) | ||
| 913 | lines.append('LICENSE = "%s"' % license) | ||
| 914 | lines.append( | ||
| 915 | 'LIC_FILES_CHKSUM = "file://%s;md5=%s"' | ||
| 916 | % (licfile, md5value) | ||
| 917 | ) | ||
| 918 | lines.append("") | ||
| 919 | |||
| 920 | # Replace the placeholder so we get the values in the right place in the recipe file | ||
| 921 | try: | ||
| 922 | pos = lines_before.index("##LICENSE_PLACEHOLDER##") | ||
| 923 | except ValueError: | ||
| 924 | pos = -1 | ||
| 925 | if pos == -1: | ||
| 926 | lines_before.extend(lines) | ||
| 927 | else: | ||
| 928 | lines_before[pos : pos + 1] = lines | ||
| 929 | |||
| 930 | handled.append(("license", [license, licfile, md5value])) | ||
| 931 | else: | ||
| 932 | info["license"] = license | ||
| 933 | |||
| 934 | provided_packages = self.parse_pkgdata_for_python_packages() | ||
| 935 | provided_packages.update(self.known_deps_map) | ||
| 936 | native_mapped_deps, native_unmapped_deps = set(), set() | ||
| 937 | mapped_deps, unmapped_deps = set(), set() | ||
| 938 | |||
| 939 | if "requires" in info: | ||
| 940 | for require in info["requires"]: | ||
| 941 | mapped = provided_packages.get(require) | ||
| 942 | |||
| 943 | if mapped: | ||
| 944 | logger.debug("Mapped %s to %s" % (require, mapped)) | ||
| 945 | native_mapped_deps.add(mapped) | ||
| 946 | else: | ||
| 947 | logger.debug("Could not map %s" % require) | ||
| 948 | native_unmapped_deps.add(require) | ||
| 949 | |||
| 950 | info.pop("requires") | ||
| 951 | |||
| 952 | if native_mapped_deps != set(): | ||
| 953 | native_mapped_deps = { | ||
| 954 | item + "-native" for item in native_mapped_deps | ||
| 955 | } | ||
| 956 | native_mapped_deps -= set(self.excluded_native_pkgdeps) | ||
| 957 | if native_mapped_deps != set(): | ||
| 958 | info["requires"] = " ".join(sorted(native_mapped_deps)) | ||
| 959 | |||
| 960 | if native_unmapped_deps: | ||
| 961 | lines_after.append("") | ||
| 962 | lines_after.append( | ||
| 963 | "# WARNING: We were unable to map the following python package/module" | ||
| 964 | ) | ||
| 965 | lines_after.append( | ||
| 966 | "# dependencies to the bitbake packages which include them:" | ||
| 967 | ) | ||
| 968 | lines_after.extend( | ||
| 969 | "# {}".format(d) for d in sorted(native_unmapped_deps) | ||
| 970 | ) | ||
| 971 | |||
| 972 | if "dependencies" in info: | ||
| 973 | for dependency in info["dependencies"]: | ||
| 974 | mapped = provided_packages.get(dependency) | ||
| 975 | if mapped: | ||
| 976 | logger.debug("Mapped %s to %s" % (dependency, mapped)) | ||
| 977 | mapped_deps.add(mapped) | ||
| 978 | else: | ||
| 979 | logger.debug("Could not map %s" % dependency) | ||
| 980 | unmapped_deps.add(dependency) | ||
| 981 | |||
| 982 | info.pop("dependencies") | ||
| 983 | |||
| 984 | if mapped_deps != set(): | ||
| 985 | if mapped_deps != set(): | ||
| 986 | info["dependencies"] = " ".join(sorted(mapped_deps)) | ||
| 987 | |||
| 988 | if unmapped_deps: | ||
| 989 | lines_after.append("") | ||
| 990 | lines_after.append( | ||
| 991 | "# WARNING: We were unable to map the following python package/module" | ||
| 992 | ) | ||
| 993 | lines_after.append( | ||
| 994 | "# runtime dependencies to the bitbake packages which include them:" | ||
| 995 | ) | ||
| 996 | lines_after.extend( | ||
| 997 | "# {}".format(d) for d in sorted(unmapped_deps) | ||
| 998 | ) | ||
| 999 | |||
| 1000 | self.map_info_to_bbvar(info, extravalues) | ||
| 1001 | |||
| 1002 | handled.append("buildsystem") | ||
| 1003 | except Exception: | ||
| 1004 | logger.exception("Failed to correctly handle pyproject.toml, falling back to another method") | ||
| 1005 | return False | ||
| 1006 | |||
| 1007 | |||
| 1008 | def gather_setup_info(fileobj): | ||
| 1009 | parsed = ast.parse(fileobj.read(), fileobj.name) | ||
| 1010 | visitor = SetupScriptVisitor() | ||
| 1011 | visitor.visit(parsed) | ||
| 1012 | |||
| 1013 | non_literals, extensions = {}, [] | ||
| 1014 | for key, value in list(visitor.keywords.items()): | ||
| 1015 | if key == 'ext_modules': | ||
| 1016 | if isinstance(value, list): | ||
| 1017 | for ext in value: | ||
| 1018 | if (isinstance(ext, ast.Call) and | ||
| 1019 | isinstance(ext.func, ast.Name) and | ||
| 1020 | ext.func.id == 'Extension' and | ||
| 1021 | not has_non_literals(ext.args)): | ||
| 1022 | extensions.append(ext.args[0]) | ||
| 1023 | elif has_non_literals(value): | ||
| 1024 | non_literals[key] = value | ||
| 1025 | del visitor.keywords[key] | ||
| 1026 | |||
| 1027 | return visitor.keywords, visitor.imported_modules, non_literals, extensions | ||
| 1028 | |||
| 1029 | |||
| 1030 | class SetupScriptVisitor(ast.NodeVisitor): | ||
| 1031 | def __init__(self): | ||
| 1032 | ast.NodeVisitor.__init__(self) | ||
| 1033 | self.keywords = {} | ||
| 1034 | self.non_literals = [] | ||
| 1035 | self.imported_modules = set() | ||
| 1036 | |||
| 1037 | def visit_Expr(self, node): | ||
| 1038 | if isinstance(node.value, ast.Call) and \ | ||
| 1039 | isinstance(node.value.func, ast.Name) and \ | ||
| 1040 | node.value.func.id == 'setup': | ||
| 1041 | self.visit_setup(node.value) | ||
| 1042 | |||
| 1043 | def visit_setup(self, node): | ||
| 1044 | call = LiteralAstTransform().visit(node) | ||
| 1045 | self.keywords = call.keywords | ||
| 1046 | for k, v in self.keywords.items(): | ||
| 1047 | if has_non_literals(v): | ||
| 1048 | self.non_literals.append(k) | ||
| 1049 | |||
| 1050 | def visit_Import(self, node): | ||
| 1051 | for alias in node.names: | ||
| 1052 | self.imported_modules.add(alias.name) | ||
| 1053 | |||
| 1054 | def visit_ImportFrom(self, node): | ||
| 1055 | self.imported_modules.add(node.module) | ||
| 1056 | |||
| 1057 | |||
| 1058 | class LiteralAstTransform(ast.NodeTransformer): | ||
| 1059 | """Simplify the ast through evaluation of literals.""" | ||
| 1060 | excluded_fields = ['ctx'] | ||
| 1061 | |||
| 1062 | def visit(self, node): | ||
| 1063 | if not isinstance(node, ast.AST): | ||
| 1064 | return node | ||
| 1065 | else: | ||
| 1066 | return ast.NodeTransformer.visit(self, node) | ||
| 1067 | |||
| 1068 | def generic_visit(self, node): | ||
| 1069 | try: | ||
| 1070 | return ast.literal_eval(node) | ||
| 1071 | except ValueError: | ||
| 1072 | for field, value in ast.iter_fields(node): | ||
| 1073 | if field in self.excluded_fields: | ||
| 1074 | delattr(node, field) | ||
| 1075 | if value is None: | ||
| 1076 | continue | ||
| 1077 | |||
| 1078 | if isinstance(value, list): | ||
| 1079 | if field in ('keywords', 'kwargs'): | ||
| 1080 | new_value = dict((kw.arg, self.visit(kw.value)) for kw in value) | ||
| 1081 | else: | ||
| 1082 | new_value = [self.visit(i) for i in value] | ||
| 1083 | else: | ||
| 1084 | new_value = self.visit(value) | ||
| 1085 | setattr(node, field, new_value) | ||
| 1086 | return node | ||
| 1087 | |||
| 1088 | def visit_Name(self, node): | ||
| 1089 | if hasattr('__builtins__', node.id): | ||
| 1090 | return getattr(__builtins__, node.id) | ||
| 1091 | else: | ||
| 1092 | return self.generic_visit(node) | ||
| 1093 | |||
| 1094 | def visit_Tuple(self, node): | ||
| 1095 | return tuple(self.visit(v) for v in node.elts) | ||
| 1096 | |||
| 1097 | def visit_List(self, node): | ||
| 1098 | return [self.visit(v) for v in node.elts] | ||
| 1099 | |||
| 1100 | def visit_Set(self, node): | ||
| 1101 | return set(self.visit(v) for v in node.elts) | ||
| 1102 | |||
| 1103 | def visit_Dict(self, node): | ||
| 1104 | keys = (self.visit(k) for k in node.keys) | ||
| 1105 | values = (self.visit(v) for v in node.values) | ||
| 1106 | return dict(zip(keys, values)) | ||
| 1107 | |||
| 1108 | |||
| 1109 | def has_non_literals(value): | ||
| 1110 | if isinstance(value, ast.AST): | ||
| 1111 | return True | ||
| 1112 | elif isinstance(value, str): | ||
| 1113 | return False | ||
| 1114 | elif hasattr(value, 'values'): | ||
| 1115 | return any(has_non_literals(v) for v in value.values()) | ||
| 1116 | elif hasattr(value, '__iter__'): | ||
| 1117 | return any(has_non_literals(v) for v in value) | ||
| 1118 | |||
| 1119 | |||
| 1120 | def register_recipe_handlers(handlers): | ||
| 1121 | # We need to make sure these are ahead of the makefile fallback handler | ||
| 1122 | # and the pyproject.toml handler ahead of the setup.py handler | ||
| 1123 | handlers.append((PythonPyprojectTomlRecipeHandler(), 75)) | ||
| 1124 | handlers.append((PythonSetupPyRecipeHandler(), 70)) | ||
diff --git a/scripts/lib/recipetool/create_go.py b/scripts/lib/recipetool/create_go.py deleted file mode 100644 index 5cc53931f0..0000000000 --- a/scripts/lib/recipetool/create_go.py +++ /dev/null | |||
| @@ -1,777 +0,0 @@ | |||
| 1 | # Recipe creation tool - go support plugin | ||
| 2 | # | ||
| 3 | # The code is based on golang internals. See the afftected | ||
| 4 | # methods for further reference and information. | ||
| 5 | # | ||
| 6 | # Copyright (C) 2023 Weidmueller GmbH & Co KG | ||
| 7 | # Author: Lukas Funke <lukas.funke@weidmueller.com> | ||
| 8 | # | ||
| 9 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 10 | # | ||
| 11 | |||
| 12 | |||
| 13 | from collections import namedtuple | ||
| 14 | from enum import Enum | ||
| 15 | from html.parser import HTMLParser | ||
| 16 | from recipetool.create import RecipeHandler, handle_license_vars | ||
| 17 | from recipetool.create import find_licenses, tidy_licenses, fixup_license | ||
| 18 | from recipetool.create import determine_from_url | ||
| 19 | from urllib.error import URLError, HTTPError | ||
| 20 | |||
| 21 | import bb.utils | ||
| 22 | import json | ||
| 23 | import logging | ||
| 24 | import os | ||
| 25 | import re | ||
| 26 | import subprocess | ||
| 27 | import sys | ||
| 28 | import shutil | ||
| 29 | import tempfile | ||
| 30 | import urllib.parse | ||
| 31 | import urllib.request | ||
| 32 | |||
| 33 | |||
| 34 | GoImport = namedtuple('GoImport', 'root vcs url suffix') | ||
| 35 | logger = logging.getLogger('recipetool') | ||
| 36 | CodeRepo = namedtuple( | ||
| 37 | 'CodeRepo', 'path codeRoot codeDir pathMajor pathPrefix pseudoMajor') | ||
| 38 | |||
| 39 | tinfoil = None | ||
| 40 | |||
| 41 | # Regular expression to parse pseudo semantic version | ||
| 42 | # see https://go.dev/ref/mod#pseudo-versions | ||
| 43 | re_pseudo_semver = re.compile( | ||
| 44 | r"^v[0-9]+\.(0\.0-|\d+\.\d+-([^+]*\.)?0\.)(?P<utc>\d{14})-(?P<commithash>[A-Za-z0-9]+)(\+[0-9A-Za-z-]+(\.[0-9A-Za-z-]+)*)?$") | ||
| 45 | # Regular expression to parse semantic version | ||
| 46 | re_semver = re.compile( | ||
| 47 | r"^v(?P<major>0|[1-9]\d*)\.(?P<minor>0|[1-9]\d*)\.(?P<patch>0|[1-9]\d*)(?:-(?P<prerelease>(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+(?P<buildmetadata>[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$") | ||
| 48 | |||
| 49 | |||
| 50 | def tinfoil_init(instance): | ||
| 51 | global tinfoil | ||
| 52 | tinfoil = instance | ||
| 53 | |||
| 54 | |||
| 55 | class GoRecipeHandler(RecipeHandler): | ||
| 56 | """Class to handle the go recipe creation""" | ||
| 57 | |||
| 58 | @staticmethod | ||
| 59 | def __ensure_go(): | ||
| 60 | """Check if the 'go' command is available in the recipes""" | ||
| 61 | recipe = "go-native" | ||
| 62 | if not tinfoil.recipes_parsed: | ||
| 63 | tinfoil.parse_recipes() | ||
| 64 | try: | ||
| 65 | rd = tinfoil.parse_recipe(recipe) | ||
| 66 | except bb.providers.NoProvider: | ||
| 67 | bb.error( | ||
| 68 | "Nothing provides '%s' which is required for the build" % (recipe)) | ||
| 69 | bb.note( | ||
| 70 | "You will likely need to add a layer that provides '%s'" % (recipe)) | ||
| 71 | return None | ||
| 72 | |||
| 73 | bindir = rd.getVar('STAGING_BINDIR_NATIVE') | ||
| 74 | gopath = os.path.join(bindir, 'go') | ||
| 75 | |||
| 76 | if not os.path.exists(gopath): | ||
| 77 | tinfoil.build_targets(recipe, 'addto_recipe_sysroot') | ||
| 78 | |||
| 79 | if not os.path.exists(gopath): | ||
| 80 | logger.error( | ||
| 81 | '%s required to process specified source, but %s did not seem to populate it' % 'go', recipe) | ||
| 82 | return None | ||
| 83 | |||
| 84 | return bindir | ||
| 85 | |||
| 86 | def __resolve_repository_static(self, modulepath): | ||
| 87 | """Resolve the repository in a static manner | ||
| 88 | |||
| 89 | The method is based on the go implementation of | ||
| 90 | `repoRootFromVCSPaths` in | ||
| 91 | https://github.com/golang/go/blob/master/src/cmd/go/internal/vcs/vcs.go | ||
| 92 | """ | ||
| 93 | |||
| 94 | url = urllib.parse.urlparse("https://" + modulepath) | ||
| 95 | req = urllib.request.Request(url.geturl()) | ||
| 96 | |||
| 97 | try: | ||
| 98 | resp = urllib.request.urlopen(req) | ||
| 99 | # Some modulepath are just redirects to github (or some other vcs | ||
| 100 | # hoster). Therefore, we check if this modulepath redirects to | ||
| 101 | # somewhere else | ||
| 102 | if resp.geturl() != url.geturl(): | ||
| 103 | bb.debug(1, "%s is redirectred to %s" % | ||
| 104 | (url.geturl(), resp.geturl())) | ||
| 105 | url = urllib.parse.urlparse(resp.geturl()) | ||
| 106 | modulepath = url.netloc + url.path | ||
| 107 | |||
| 108 | except URLError as url_err: | ||
| 109 | # This is probably because the module path | ||
| 110 | # contains the subdir and major path. Thus, | ||
| 111 | # we ignore this error for now | ||
| 112 | logger.debug( | ||
| 113 | 1, "Failed to fetch page from [%s]: %s" % (url, str(url_err))) | ||
| 114 | |||
| 115 | host, _, _ = modulepath.partition('/') | ||
| 116 | |||
| 117 | class vcs(Enum): | ||
| 118 | pathprefix = "pathprefix" | ||
| 119 | regexp = "regexp" | ||
| 120 | type = "type" | ||
| 121 | repo = "repo" | ||
| 122 | check = "check" | ||
| 123 | schemelessRepo = "schemelessRepo" | ||
| 124 | |||
| 125 | # GitHub | ||
| 126 | vcsGitHub = {} | ||
| 127 | vcsGitHub[vcs.pathprefix] = "github.com" | ||
| 128 | vcsGitHub[vcs.regexp] = re.compile( | ||
| 129 | r'^(?P<root>github\.com/[A-Za-z0-9_.\-]+/[A-Za-z0-9_.\-]+)(/(?P<suffix>[A-Za-z0-9_.\-]+))*$') | ||
| 130 | vcsGitHub[vcs.type] = "git" | ||
| 131 | vcsGitHub[vcs.repo] = "https://\\g<root>" | ||
| 132 | |||
| 133 | # Bitbucket | ||
| 134 | vcsBitbucket = {} | ||
| 135 | vcsBitbucket[vcs.pathprefix] = "bitbucket.org" | ||
| 136 | vcsBitbucket[vcs.regexp] = re.compile( | ||
| 137 | r'^(?P<root>bitbucket\.org/(?P<bitname>[A-Za-z0-9_.\-]+/[A-Za-z0-9_.\-]+))(/(?P<suffix>[A-Za-z0-9_.\-]+))*$') | ||
| 138 | vcsBitbucket[vcs.type] = "git" | ||
| 139 | vcsBitbucket[vcs.repo] = "https://\\g<root>" | ||
| 140 | |||
| 141 | # IBM DevOps Services (JazzHub) | ||
| 142 | vcsIBMDevOps = {} | ||
| 143 | vcsIBMDevOps[vcs.pathprefix] = "hub.jazz.net/git" | ||
| 144 | vcsIBMDevOps[vcs.regexp] = re.compile( | ||
| 145 | r'^(?P<root>hub\.jazz\.net/git/[a-z0-9]+/[A-Za-z0-9_.\-]+)(/(?P<suffix>[A-Za-z0-9_.\-]+))*$') | ||
| 146 | vcsIBMDevOps[vcs.type] = "git" | ||
| 147 | vcsIBMDevOps[vcs.repo] = "https://\\g<root>" | ||
| 148 | |||
| 149 | # Git at Apache | ||
| 150 | vcsApacheGit = {} | ||
| 151 | vcsApacheGit[vcs.pathprefix] = "git.apache.org" | ||
| 152 | vcsApacheGit[vcs.regexp] = re.compile( | ||
| 153 | r'^(?P<root>git\.apache\.org/[a-z0-9_.\-]+\.git)(/(?P<suffix>[A-Za-z0-9_.\-]+))*$') | ||
| 154 | vcsApacheGit[vcs.type] = "git" | ||
| 155 | vcsApacheGit[vcs.repo] = "https://\\g<root>" | ||
| 156 | |||
| 157 | # Git at OpenStack | ||
| 158 | vcsOpenStackGit = {} | ||
| 159 | vcsOpenStackGit[vcs.pathprefix] = "git.openstack.org" | ||
| 160 | vcsOpenStackGit[vcs.regexp] = re.compile( | ||
| 161 | r'^(?P<root>git\.openstack\.org/[A-Za-z0-9_.\-]+/[A-Za-z0-9_.\-]+)(\.git)?(/(?P<suffix>[A-Za-z0-9_.\-]+))*$') | ||
| 162 | vcsOpenStackGit[vcs.type] = "git" | ||
| 163 | vcsOpenStackGit[vcs.repo] = "https://\\g<root>" | ||
| 164 | |||
| 165 | # chiselapp.com for fossil | ||
| 166 | vcsChiselapp = {} | ||
| 167 | vcsChiselapp[vcs.pathprefix] = "chiselapp.com" | ||
| 168 | vcsChiselapp[vcs.regexp] = re.compile( | ||
| 169 | r'^(?P<root>chiselapp\.com/user/[A-Za-z0-9]+/repository/[A-Za-z0-9_.\-]+)$') | ||
| 170 | vcsChiselapp[vcs.type] = "fossil" | ||
| 171 | vcsChiselapp[vcs.repo] = "https://\\g<root>" | ||
| 172 | |||
| 173 | # General syntax for any server. | ||
| 174 | # Must be last. | ||
| 175 | vcsGeneralServer = {} | ||
| 176 | vcsGeneralServer[vcs.regexp] = re.compile( | ||
| 177 | "(?P<root>(?P<repo>([a-z0-9.\\-]+\\.)+[a-z0-9.\\-]+(:[0-9]+)?(/~?[A-Za-z0-9_.\\-]+)+?)\\.(?P<vcs>bzr|fossil|git|hg|svn))(/~?(?P<suffix>[A-Za-z0-9_.\\-]+))*$") | ||
| 178 | vcsGeneralServer[vcs.schemelessRepo] = True | ||
| 179 | |||
| 180 | vcsPaths = [vcsGitHub, vcsBitbucket, vcsIBMDevOps, | ||
| 181 | vcsApacheGit, vcsOpenStackGit, vcsChiselapp, | ||
| 182 | vcsGeneralServer] | ||
| 183 | |||
| 184 | if modulepath.startswith("example.net") or modulepath == "rsc.io": | ||
| 185 | logger.warning("Suspicious module path %s" % modulepath) | ||
| 186 | return None | ||
| 187 | if modulepath.startswith("http:") or modulepath.startswith("https:"): | ||
| 188 | logger.warning("Import path should not start with %s %s" % | ||
| 189 | ("http", "https")) | ||
| 190 | return None | ||
| 191 | |||
| 192 | rootpath = None | ||
| 193 | vcstype = None | ||
| 194 | repourl = None | ||
| 195 | suffix = None | ||
| 196 | |||
| 197 | for srv in vcsPaths: | ||
| 198 | m = srv[vcs.regexp].match(modulepath) | ||
| 199 | if vcs.pathprefix in srv: | ||
| 200 | if host == srv[vcs.pathprefix]: | ||
| 201 | rootpath = m.group('root') | ||
| 202 | vcstype = srv[vcs.type] | ||
| 203 | repourl = m.expand(srv[vcs.repo]) | ||
| 204 | suffix = m.group('suffix') | ||
| 205 | break | ||
| 206 | elif m and srv[vcs.schemelessRepo]: | ||
| 207 | rootpath = m.group('root') | ||
| 208 | vcstype = m[vcs.type] | ||
| 209 | repourl = m[vcs.repo] | ||
| 210 | suffix = m.group('suffix') | ||
| 211 | break | ||
| 212 | |||
| 213 | return GoImport(rootpath, vcstype, repourl, suffix) | ||
| 214 | |||
| 215 | def __resolve_repository_dynamic(self, modulepath): | ||
| 216 | """Resolve the repository root in a dynamic manner. | ||
| 217 | |||
| 218 | The method is based on the go implementation of | ||
| 219 | `repoRootForImportDynamic` in | ||
| 220 | https://github.com/golang/go/blob/master/src/cmd/go/internal/vcs/vcs.go | ||
| 221 | """ | ||
| 222 | url = urllib.parse.urlparse("https://" + modulepath) | ||
| 223 | |||
| 224 | class GoImportHTMLParser(HTMLParser): | ||
| 225 | |||
| 226 | def __init__(self): | ||
| 227 | super().__init__() | ||
| 228 | self.__srv = {} | ||
| 229 | |||
| 230 | def handle_starttag(self, tag, attrs): | ||
| 231 | if tag == 'meta' and list( | ||
| 232 | filter(lambda a: (a[0] == 'name' and a[1] == 'go-import'), attrs)): | ||
| 233 | content = list( | ||
| 234 | filter(lambda a: (a[0] == 'content'), attrs)) | ||
| 235 | if content: | ||
| 236 | srv = content[0][1].split() | ||
| 237 | self.__srv[srv[0]] = srv | ||
| 238 | |||
| 239 | def go_import(self, modulepath): | ||
| 240 | if modulepath in self.__srv: | ||
| 241 | srv = self.__srv[modulepath] | ||
| 242 | return GoImport(srv[0], srv[1], srv[2], None) | ||
| 243 | return None | ||
| 244 | |||
| 245 | url = url.geturl() + "?go-get=1" | ||
| 246 | req = urllib.request.Request(url) | ||
| 247 | |||
| 248 | try: | ||
| 249 | body = urllib.request.urlopen(req).read() | ||
| 250 | except HTTPError as http_err: | ||
| 251 | logger.warning( | ||
| 252 | "Unclean status when fetching page from [%s]: %s", url, str(http_err)) | ||
| 253 | body = http_err.fp.read() | ||
| 254 | except URLError as url_err: | ||
| 255 | logger.warning( | ||
| 256 | "Failed to fetch page from [%s]: %s", url, str(url_err)) | ||
| 257 | return None | ||
| 258 | |||
| 259 | parser = GoImportHTMLParser() | ||
| 260 | parser.feed(body.decode('utf-8')) | ||
| 261 | parser.close() | ||
| 262 | |||
| 263 | return parser.go_import(modulepath) | ||
| 264 | |||
| 265 | def __resolve_from_golang_proxy(self, modulepath, version): | ||
| 266 | """ | ||
| 267 | Resolves repository data from golang proxy | ||
| 268 | """ | ||
| 269 | url = urllib.parse.urlparse("https://proxy.golang.org/" | ||
| 270 | + modulepath | ||
| 271 | + "/@v/" | ||
| 272 | + version | ||
| 273 | + ".info") | ||
| 274 | |||
| 275 | # Transform url to lower case, golang proxy doesn't like mixed case | ||
| 276 | req = urllib.request.Request(url.geturl().lower()) | ||
| 277 | |||
| 278 | try: | ||
| 279 | resp = urllib.request.urlopen(req) | ||
| 280 | except URLError as url_err: | ||
| 281 | logger.warning( | ||
| 282 | "Failed to fetch page from [%s]: %s", url, str(url_err)) | ||
| 283 | return None | ||
| 284 | |||
| 285 | golang_proxy_res = resp.read().decode('utf-8') | ||
| 286 | modinfo = json.loads(golang_proxy_res) | ||
| 287 | |||
| 288 | if modinfo and 'Origin' in modinfo: | ||
| 289 | origin = modinfo['Origin'] | ||
| 290 | _root_url = urllib.parse.urlparse(origin['URL']) | ||
| 291 | |||
| 292 | # We normalize the repo URL since we don't want the scheme in it | ||
| 293 | _subdir = origin['Subdir'] if 'Subdir' in origin else None | ||
| 294 | _root, _, _ = self.__split_path_version(modulepath) | ||
| 295 | if _subdir: | ||
| 296 | _root = _root[:-len(_subdir)].strip('/') | ||
| 297 | |||
| 298 | _commit = origin['Hash'] | ||
| 299 | _vcs = origin['VCS'] | ||
| 300 | return (GoImport(_root, _vcs, _root_url.geturl(), None), _commit) | ||
| 301 | |||
| 302 | return None | ||
| 303 | |||
| 304 | def __resolve_repository(self, modulepath): | ||
| 305 | """ | ||
| 306 | Resolves src uri from go module-path | ||
| 307 | """ | ||
| 308 | repodata = self.__resolve_repository_static(modulepath) | ||
| 309 | if not repodata or not repodata.url: | ||
| 310 | repodata = self.__resolve_repository_dynamic(modulepath) | ||
| 311 | if not repodata or not repodata.url: | ||
| 312 | logger.error( | ||
| 313 | "Could not resolve repository for module path '%s'" % modulepath) | ||
| 314 | # There is no way to recover from this | ||
| 315 | sys.exit(14) | ||
| 316 | if repodata: | ||
| 317 | logger.debug(1, "Resolved download path for import '%s' => %s" % ( | ||
| 318 | modulepath, repodata.url)) | ||
| 319 | return repodata | ||
| 320 | |||
| 321 | def __split_path_version(self, path): | ||
| 322 | i = len(path) | ||
| 323 | dot = False | ||
| 324 | for j in range(i, 0, -1): | ||
| 325 | if path[j - 1] < '0' or path[j - 1] > '9': | ||
| 326 | break | ||
| 327 | if path[j - 1] == '.': | ||
| 328 | dot = True | ||
| 329 | break | ||
| 330 | i = j - 1 | ||
| 331 | |||
| 332 | if i <= 1 or i == len( | ||
| 333 | path) or path[i - 1] != 'v' or path[i - 2] != '/': | ||
| 334 | return path, "", True | ||
| 335 | |||
| 336 | prefix, pathMajor = path[:i - 2], path[i - 2:] | ||
| 337 | if dot or len( | ||
| 338 | pathMajor) <= 2 or pathMajor[2] == '0' or pathMajor == "/v1": | ||
| 339 | return path, "", False | ||
| 340 | |||
| 341 | return prefix, pathMajor, True | ||
| 342 | |||
| 343 | def __get_path_major(self, pathMajor): | ||
| 344 | if not pathMajor: | ||
| 345 | return "" | ||
| 346 | |||
| 347 | if pathMajor[0] != '/' and pathMajor[0] != '.': | ||
| 348 | logger.error( | ||
| 349 | "pathMajor suffix %s passed to PathMajorPrefix lacks separator", pathMajor) | ||
| 350 | |||
| 351 | if pathMajor.startswith(".v") and pathMajor.endswith("-unstable"): | ||
| 352 | pathMajor = pathMajor[:len("-unstable") - 2] | ||
| 353 | |||
| 354 | return pathMajor[1:] | ||
| 355 | |||
| 356 | def __build_coderepo(self, repo, path): | ||
| 357 | codedir = "" | ||
| 358 | pathprefix, pathMajor, _ = self.__split_path_version(path) | ||
| 359 | if repo.root == path: | ||
| 360 | pathprefix = path | ||
| 361 | elif path.startswith(repo.root): | ||
| 362 | codedir = pathprefix[len(repo.root):].strip('/') | ||
| 363 | |||
| 364 | pseudoMajor = self.__get_path_major(pathMajor) | ||
| 365 | |||
| 366 | logger.debug("root='%s', codedir='%s', prefix='%s', pathMajor='%s', pseudoMajor='%s'", | ||
| 367 | repo.root, codedir, pathprefix, pathMajor, pseudoMajor) | ||
| 368 | |||
| 369 | return CodeRepo(path, repo.root, codedir, | ||
| 370 | pathMajor, pathprefix, pseudoMajor) | ||
| 371 | |||
| 372 | def __resolve_version(self, repo, path, version): | ||
| 373 | hash = None | ||
| 374 | coderoot = self.__build_coderepo(repo, path) | ||
| 375 | |||
| 376 | def vcs_fetch_all(): | ||
| 377 | tmpdir = tempfile.mkdtemp() | ||
| 378 | clone_cmd = "%s clone --bare %s %s" % ('git', repo.url, tmpdir) | ||
| 379 | bb.process.run(clone_cmd) | ||
| 380 | log_cmd = "git log --all --pretty='%H %d' --decorate=short" | ||
| 381 | output, _ = bb.process.run( | ||
| 382 | log_cmd, shell=True, stderr=subprocess.PIPE, cwd=tmpdir) | ||
| 383 | bb.utils.prunedir(tmpdir) | ||
| 384 | return output.strip().split('\n') | ||
| 385 | |||
| 386 | def vcs_fetch_remote(tag): | ||
| 387 | # add * to grab ^{} | ||
| 388 | refs = {} | ||
| 389 | ls_remote_cmd = "git ls-remote -q --tags {} {}*".format( | ||
| 390 | repo.url, tag) | ||
| 391 | output, _ = bb.process.run(ls_remote_cmd) | ||
| 392 | output = output.strip().split('\n') | ||
| 393 | for line in output: | ||
| 394 | f = line.split(maxsplit=1) | ||
| 395 | if len(f) != 2: | ||
| 396 | continue | ||
| 397 | |||
| 398 | for prefix in ["HEAD", "refs/heads/", "refs/tags/"]: | ||
| 399 | if f[1].startswith(prefix): | ||
| 400 | refs[f[1][len(prefix):]] = f[0] | ||
| 401 | |||
| 402 | for key, hash in refs.items(): | ||
| 403 | if key.endswith(r"^{}"): | ||
| 404 | refs[key.strip(r"^{}")] = hash | ||
| 405 | |||
| 406 | return refs[tag] | ||
| 407 | |||
| 408 | m_pseudo_semver = re_pseudo_semver.match(version) | ||
| 409 | |||
| 410 | if m_pseudo_semver: | ||
| 411 | remote_refs = vcs_fetch_all() | ||
| 412 | short_commit = m_pseudo_semver.group('commithash') | ||
| 413 | for l in remote_refs: | ||
| 414 | r = l.split(maxsplit=1) | ||
| 415 | sha1 = r[0] if len(r) else None | ||
| 416 | if not sha1: | ||
| 417 | logger.error( | ||
| 418 | "Ups: could not resolve abbref commit for %s" % short_commit) | ||
| 419 | |||
| 420 | elif sha1.startswith(short_commit): | ||
| 421 | hash = sha1 | ||
| 422 | break | ||
| 423 | else: | ||
| 424 | m_semver = re_semver.match(version) | ||
| 425 | if m_semver: | ||
| 426 | |||
| 427 | def get_sha1_remote(re): | ||
| 428 | rsha1 = None | ||
| 429 | for line in remote_refs: | ||
| 430 | # Split lines of the following format: | ||
| 431 | # 22e90d9b964610628c10f673ca5f85b8c2a2ca9a (tag: sometag) | ||
| 432 | lineparts = line.split(maxsplit=1) | ||
| 433 | sha1 = lineparts[0] if len(lineparts) else None | ||
| 434 | refstring = lineparts[1] if len( | ||
| 435 | lineparts) == 2 else None | ||
| 436 | if refstring: | ||
| 437 | # Normalize tag string and split in case of multiple | ||
| 438 | # regs e.g. (tag: speech/v1.10.0, tag: orchestration/v1.5.0 ...) | ||
| 439 | refs = refstring.strip('(), ').split(',') | ||
| 440 | for ref in refs: | ||
| 441 | if re.match(ref.strip()): | ||
| 442 | rsha1 = sha1 | ||
| 443 | return rsha1 | ||
| 444 | |||
| 445 | semver = "v" + m_semver.group('major') + "."\ | ||
| 446 | + m_semver.group('minor') + "."\ | ||
| 447 | + m_semver.group('patch') \ | ||
| 448 | + (("-" + m_semver.group('prerelease')) | ||
| 449 | if m_semver.group('prerelease') else "") | ||
| 450 | |||
| 451 | tag = os.path.join( | ||
| 452 | coderoot.codeDir, semver) if coderoot.codeDir else semver | ||
| 453 | |||
| 454 | # probe tag using 'ls-remote', which is faster than fetching | ||
| 455 | # complete history | ||
| 456 | hash = vcs_fetch_remote(tag) | ||
| 457 | if not hash: | ||
| 458 | # backup: fetch complete history | ||
| 459 | remote_refs = vcs_fetch_all() | ||
| 460 | hash = get_sha1_remote( | ||
| 461 | re.compile(fr"(tag:|HEAD ->) ({tag})")) | ||
| 462 | |||
| 463 | logger.debug( | ||
| 464 | "Resolving commit for tag '%s' -> '%s'", tag, hash) | ||
| 465 | return hash | ||
| 466 | |||
| 467 | def __generate_srcuri_inline_fcn(self, path, version, replaces=None): | ||
| 468 | """Generate SRC_URI functions for go imports""" | ||
| 469 | |||
| 470 | logger.info("Resolving repository for module %s", path) | ||
| 471 | # First try to resolve repo and commit from golang proxy | ||
| 472 | # Most info is already there and we don't have to go through the | ||
| 473 | # repository or even perform the version resolve magic | ||
| 474 | golang_proxy_info = self.__resolve_from_golang_proxy(path, version) | ||
| 475 | if golang_proxy_info: | ||
| 476 | repo = golang_proxy_info[0] | ||
| 477 | commit = golang_proxy_info[1] | ||
| 478 | else: | ||
| 479 | # Fallback | ||
| 480 | # Resolve repository by 'hand' | ||
| 481 | repo = self.__resolve_repository(path) | ||
| 482 | commit = self.__resolve_version(repo, path, version) | ||
| 483 | |||
| 484 | url = urllib.parse.urlparse(repo.url) | ||
| 485 | repo_url = url.netloc + url.path | ||
| 486 | |||
| 487 | coderoot = self.__build_coderepo(repo, path) | ||
| 488 | |||
| 489 | inline_fcn = "${@go_src_uri(" | ||
| 490 | inline_fcn += f"'{repo_url}','{version}'" | ||
| 491 | if repo_url != path: | ||
| 492 | inline_fcn += f",path='{path}'" | ||
| 493 | if coderoot.codeDir: | ||
| 494 | inline_fcn += f",subdir='{coderoot.codeDir}'" | ||
| 495 | if repo.vcs != 'git': | ||
| 496 | inline_fcn += f",vcs='{repo.vcs}'" | ||
| 497 | if replaces: | ||
| 498 | inline_fcn += f",replaces='{replaces}'" | ||
| 499 | if coderoot.pathMajor: | ||
| 500 | inline_fcn += f",pathmajor='{coderoot.pathMajor}'" | ||
| 501 | inline_fcn += ")}" | ||
| 502 | |||
| 503 | return inline_fcn, commit | ||
| 504 | |||
| 505 | def __go_handle_dependencies(self, go_mod, srctree, localfilesdir, extravalues, d): | ||
| 506 | |||
| 507 | import re | ||
| 508 | src_uris = [] | ||
| 509 | src_revs = [] | ||
| 510 | |||
| 511 | def generate_src_rev(path, version, commithash): | ||
| 512 | src_rev = f"# {path}@{version} => {commithash}\n" | ||
| 513 | # Ups...maybe someone manipulated the source repository and the | ||
| 514 | # version or commit could not be resolved. This is a sign of | ||
| 515 | # a) the supply chain was manipulated (bad) | ||
| 516 | # b) the implementation for the version resolving didn't work | ||
| 517 | # anymore (less bad) | ||
| 518 | if not commithash: | ||
| 519 | src_rev += f"#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n" | ||
| 520 | src_rev += f"#!!! Could not resolve version !!!\n" | ||
| 521 | src_rev += f"#!!! Possible supply chain attack !!!\n" | ||
| 522 | src_rev += f"#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n" | ||
| 523 | src_rev += f"SRCREV_{path.replace('/', '.')} = \"{commithash}\"" | ||
| 524 | |||
| 525 | return src_rev | ||
| 526 | |||
| 527 | # we first go over replacement list, because we are essentialy | ||
| 528 | # interested only in the replaced path | ||
| 529 | if go_mod['Replace']: | ||
| 530 | for replacement in go_mod['Replace']: | ||
| 531 | oldpath = replacement['Old']['Path'] | ||
| 532 | path = replacement['New']['Path'] | ||
| 533 | version = '' | ||
| 534 | if 'Version' in replacement['New']: | ||
| 535 | version = replacement['New']['Version'] | ||
| 536 | |||
| 537 | if os.path.exists(os.path.join(srctree, path)): | ||
| 538 | # the module refers to the local path, remove it from requirement list | ||
| 539 | # because it's a local module | ||
| 540 | go_mod['Require'][:] = [v for v in go_mod['Require'] if v.get('Path') != oldpath] | ||
| 541 | else: | ||
| 542 | # Replace the path and the version, so we don't iterate replacement list anymore | ||
| 543 | for require in go_mod['Require']: | ||
| 544 | if require['Path'] == oldpath: | ||
| 545 | require.update({'Path': path, 'Version': version}) | ||
| 546 | break | ||
| 547 | |||
| 548 | for require in go_mod['Require']: | ||
| 549 | path = require['Path'] | ||
| 550 | version = require['Version'] | ||
| 551 | |||
| 552 | inline_fcn, commithash = self.__generate_srcuri_inline_fcn( | ||
| 553 | path, version) | ||
| 554 | src_uris.append(inline_fcn) | ||
| 555 | src_revs.append(generate_src_rev(path, version, commithash)) | ||
| 556 | |||
| 557 | # strip version part from module URL /vXX | ||
| 558 | baseurl = re.sub(r'/v(\d+)$', '', go_mod['Module']['Path']) | ||
| 559 | pn, _ = determine_from_url(baseurl) | ||
| 560 | go_mods_basename = "%s-modules.inc" % pn | ||
| 561 | |||
| 562 | go_mods_filename = os.path.join(localfilesdir, go_mods_basename) | ||
| 563 | with open(go_mods_filename, "w") as f: | ||
| 564 | # We introduce this indirection to make the tests a little easier | ||
| 565 | f.write("SRC_URI += \"${GO_DEPENDENCIES_SRC_URI}\"\n") | ||
| 566 | f.write("GO_DEPENDENCIES_SRC_URI = \"\\\n") | ||
| 567 | for uri in src_uris: | ||
| 568 | f.write(" " + uri + " \\\n") | ||
| 569 | f.write("\"\n\n") | ||
| 570 | for rev in src_revs: | ||
| 571 | f.write(rev + "\n") | ||
| 572 | |||
| 573 | extravalues['extrafiles'][go_mods_basename] = go_mods_filename | ||
| 574 | |||
| 575 | def __go_run_cmd(self, cmd, cwd, d): | ||
| 576 | return bb.process.run(cmd, env=dict(os.environ, PATH=d.getVar('PATH')), | ||
| 577 | shell=True, cwd=cwd) | ||
| 578 | |||
| 579 | def __go_native_version(self, d): | ||
| 580 | stdout, _ = self.__go_run_cmd("go version", None, d) | ||
| 581 | m = re.match(r".*\sgo((\d+).(\d+).(\d+))\s([\w\/]*)", stdout) | ||
| 582 | major = int(m.group(2)) | ||
| 583 | minor = int(m.group(3)) | ||
| 584 | patch = int(m.group(4)) | ||
| 585 | |||
| 586 | return major, minor, patch | ||
| 587 | |||
| 588 | def __go_mod_patch(self, srctree, localfilesdir, extravalues, d): | ||
| 589 | |||
| 590 | patchfilename = "go.mod.patch" | ||
| 591 | go_native_version_major, go_native_version_minor, _ = self.__go_native_version( | ||
| 592 | d) | ||
| 593 | self.__go_run_cmd("go mod tidy -go=%d.%d" % | ||
| 594 | (go_native_version_major, go_native_version_minor), srctree, d) | ||
| 595 | stdout, _ = self.__go_run_cmd("go mod edit -json", srctree, d) | ||
| 596 | |||
| 597 | # Create patch in order to upgrade go version | ||
| 598 | self.__go_run_cmd("git diff go.mod > %s" % (patchfilename), srctree, d) | ||
| 599 | # Restore original state | ||
| 600 | self.__go_run_cmd("git checkout HEAD go.mod go.sum", srctree, d) | ||
| 601 | |||
| 602 | go_mod = json.loads(stdout) | ||
| 603 | tmpfile = os.path.join(localfilesdir, patchfilename) | ||
| 604 | shutil.move(os.path.join(srctree, patchfilename), tmpfile) | ||
| 605 | |||
| 606 | extravalues['extrafiles'][patchfilename] = tmpfile | ||
| 607 | |||
| 608 | return go_mod, patchfilename | ||
| 609 | |||
| 610 | def __go_mod_vendor(self, go_mod, srctree, localfilesdir, extravalues, d): | ||
| 611 | # Perform vendoring to retrieve the correct modules.txt | ||
| 612 | tmp_vendor_dir = tempfile.mkdtemp() | ||
| 613 | |||
| 614 | # -v causes to go to print modules.txt to stderr | ||
| 615 | _, stderr = self.__go_run_cmd( | ||
| 616 | "go mod vendor -v -o %s" % (tmp_vendor_dir), srctree, d) | ||
| 617 | |||
| 618 | modules_txt_basename = "modules.txt" | ||
| 619 | modules_txt_filename = os.path.join(localfilesdir, modules_txt_basename) | ||
| 620 | with open(modules_txt_filename, "w") as f: | ||
| 621 | f.write(stderr) | ||
| 622 | |||
| 623 | extravalues['extrafiles'][modules_txt_basename] = modules_txt_filename | ||
| 624 | |||
| 625 | licenses = [] | ||
| 626 | lic_files_chksum = [] | ||
| 627 | licvalues = find_licenses(tmp_vendor_dir, d) | ||
| 628 | shutil.rmtree(tmp_vendor_dir) | ||
| 629 | |||
| 630 | if licvalues: | ||
| 631 | for licvalue in licvalues: | ||
| 632 | license = licvalue[0] | ||
| 633 | lics = tidy_licenses(fixup_license(license)) | ||
| 634 | lics = [lic for lic in lics if lic not in licenses] | ||
| 635 | if len(lics): | ||
| 636 | licenses.extend(lics) | ||
| 637 | lic_files_chksum.append( | ||
| 638 | 'file://src/${GO_IMPORT}/vendor/%s;md5=%s' % (licvalue[1], licvalue[2])) | ||
| 639 | |||
| 640 | # strip version part from module URL /vXX | ||
| 641 | baseurl = re.sub(r'/v(\d+)$', '', go_mod['Module']['Path']) | ||
| 642 | pn, _ = determine_from_url(baseurl) | ||
| 643 | licenses_basename = "%s-licenses.inc" % pn | ||
| 644 | |||
| 645 | licenses_filename = os.path.join(localfilesdir, licenses_basename) | ||
| 646 | with open(licenses_filename, "w") as f: | ||
| 647 | f.write("GO_MOD_LICENSES = \"%s\"\n\n" % | ||
| 648 | ' & '.join(sorted(licenses, key=str.casefold))) | ||
| 649 | # We introduce this indirection to make the tests a little easier | ||
| 650 | f.write("LIC_FILES_CHKSUM += \"${VENDORED_LIC_FILES_CHKSUM}\"\n") | ||
| 651 | f.write("VENDORED_LIC_FILES_CHKSUM = \"\\\n") | ||
| 652 | for lic in lic_files_chksum: | ||
| 653 | f.write(" " + lic + " \\\n") | ||
| 654 | f.write("\"\n") | ||
| 655 | |||
| 656 | extravalues['extrafiles'][licenses_basename] = licenses_filename | ||
| 657 | |||
| 658 | def process(self, srctree, classes, lines_before, | ||
| 659 | lines_after, handled, extravalues): | ||
| 660 | |||
| 661 | if 'buildsystem' in handled: | ||
| 662 | return False | ||
| 663 | |||
| 664 | files = RecipeHandler.checkfiles(srctree, ['go.mod']) | ||
| 665 | if not files: | ||
| 666 | return False | ||
| 667 | |||
| 668 | d = bb.data.createCopy(tinfoil.config_data) | ||
| 669 | go_bindir = self.__ensure_go() | ||
| 670 | if not go_bindir: | ||
| 671 | sys.exit(14) | ||
| 672 | |||
| 673 | d.prependVar('PATH', '%s:' % go_bindir) | ||
| 674 | handled.append('buildsystem') | ||
| 675 | classes.append("go-vendor") | ||
| 676 | |||
| 677 | stdout, _ = self.__go_run_cmd("go mod edit -json", srctree, d) | ||
| 678 | |||
| 679 | go_mod = json.loads(stdout) | ||
| 680 | go_import = go_mod['Module']['Path'] | ||
| 681 | go_version_match = re.match("([0-9]+).([0-9]+)", go_mod['Go']) | ||
| 682 | go_version_major = int(go_version_match.group(1)) | ||
| 683 | go_version_minor = int(go_version_match.group(2)) | ||
| 684 | src_uris = [] | ||
| 685 | |||
| 686 | localfilesdir = tempfile.mkdtemp(prefix='recipetool-go-') | ||
| 687 | extravalues.setdefault('extrafiles', {}) | ||
| 688 | |||
| 689 | # Use an explicit name determined from the module name because it | ||
| 690 | # might differ from the actual URL for replaced modules | ||
| 691 | # strip version part from module URL /vXX | ||
| 692 | baseurl = re.sub(r'/v(\d+)$', '', go_mod['Module']['Path']) | ||
| 693 | pn, _ = determine_from_url(baseurl) | ||
| 694 | |||
| 695 | # go.mod files with version < 1.17 may not include all indirect | ||
| 696 | # dependencies. Thus, we have to upgrade the go version. | ||
| 697 | if go_version_major == 1 and go_version_minor < 17: | ||
| 698 | logger.warning( | ||
| 699 | "go.mod files generated by Go < 1.17 might have incomplete indirect dependencies.") | ||
| 700 | go_mod, patchfilename = self.__go_mod_patch(srctree, localfilesdir, | ||
| 701 | extravalues, d) | ||
| 702 | src_uris.append( | ||
| 703 | "file://%s;patchdir=src/${GO_IMPORT}" % (patchfilename)) | ||
| 704 | |||
| 705 | # Check whether the module is vendored. If so, we have nothing to do. | ||
| 706 | # Otherwise we gather all dependencies and add them to the recipe | ||
| 707 | if not os.path.exists(os.path.join(srctree, "vendor")): | ||
| 708 | |||
| 709 | # Write additional $BPN-modules.inc file | ||
| 710 | self.__go_mod_vendor(go_mod, srctree, localfilesdir, extravalues, d) | ||
| 711 | lines_before.append("LICENSE += \" & ${GO_MOD_LICENSES}\"") | ||
| 712 | lines_before.append("require %s-licenses.inc" % (pn)) | ||
| 713 | |||
| 714 | self.__rewrite_src_uri(lines_before, ["file://modules.txt"]) | ||
| 715 | |||
| 716 | self.__go_handle_dependencies(go_mod, srctree, localfilesdir, extravalues, d) | ||
| 717 | lines_before.append("require %s-modules.inc" % (pn)) | ||
| 718 | |||
| 719 | # Do generic license handling | ||
| 720 | handle_license_vars(srctree, lines_before, handled, extravalues, d) | ||
| 721 | self.__rewrite_lic_uri(lines_before) | ||
| 722 | |||
| 723 | lines_before.append("GO_IMPORT = \"{}\"".format(baseurl)) | ||
| 724 | lines_before.append("SRCREV_FORMAT = \"${BPN}\"") | ||
| 725 | |||
| 726 | def __update_lines_before(self, updated, newlines, lines_before): | ||
| 727 | if updated: | ||
| 728 | del lines_before[:] | ||
| 729 | for line in newlines: | ||
| 730 | # Hack to avoid newlines that edit_metadata inserts | ||
| 731 | if line.endswith('\n'): | ||
| 732 | line = line[:-1] | ||
| 733 | lines_before.append(line) | ||
| 734 | return updated | ||
| 735 | |||
| 736 | def __rewrite_lic_uri(self, lines_before): | ||
| 737 | |||
| 738 | def varfunc(varname, origvalue, op, newlines): | ||
| 739 | if varname == 'LIC_FILES_CHKSUM': | ||
| 740 | new_licenses = [] | ||
| 741 | licenses = origvalue.split('\\') | ||
| 742 | for license in licenses: | ||
| 743 | if not license: | ||
| 744 | logger.warning("No license file was detected for the main module!") | ||
| 745 | # the license list of the main recipe must be empty | ||
| 746 | # this can happen for example in case of CLOSED license | ||
| 747 | # Fall through to complete recipe generation | ||
| 748 | continue | ||
| 749 | license = license.strip() | ||
| 750 | uri, chksum = license.split(';', 1) | ||
| 751 | url = urllib.parse.urlparse(uri) | ||
| 752 | new_uri = os.path.join( | ||
| 753 | url.scheme + "://", "src", "${GO_IMPORT}", url.netloc + url.path) + ";" + chksum | ||
| 754 | new_licenses.append(new_uri) | ||
| 755 | |||
| 756 | return new_licenses, None, -1, True | ||
| 757 | return origvalue, None, 0, True | ||
| 758 | |||
| 759 | updated, newlines = bb.utils.edit_metadata( | ||
| 760 | lines_before, ['LIC_FILES_CHKSUM'], varfunc) | ||
| 761 | return self.__update_lines_before(updated, newlines, lines_before) | ||
| 762 | |||
| 763 | def __rewrite_src_uri(self, lines_before, additional_uris = []): | ||
| 764 | |||
| 765 | def varfunc(varname, origvalue, op, newlines): | ||
| 766 | if varname == 'SRC_URI': | ||
| 767 | src_uri = ["git://${GO_IMPORT};destsuffix=git/src/${GO_IMPORT};nobranch=1;name=${BPN};protocol=https"] | ||
| 768 | src_uri.extend(additional_uris) | ||
| 769 | return src_uri, None, -1, True | ||
| 770 | return origvalue, None, 0, True | ||
| 771 | |||
| 772 | updated, newlines = bb.utils.edit_metadata(lines_before, ['SRC_URI'], varfunc) | ||
| 773 | return self.__update_lines_before(updated, newlines, lines_before) | ||
| 774 | |||
| 775 | |||
| 776 | def register_recipe_handlers(handlers): | ||
| 777 | handlers.append((GoRecipeHandler(), 60)) | ||
diff --git a/scripts/lib/recipetool/create_kernel.py b/scripts/lib/recipetool/create_kernel.py deleted file mode 100644 index 5740589a68..0000000000 --- a/scripts/lib/recipetool/create_kernel.py +++ /dev/null | |||
| @@ -1,89 +0,0 @@ | |||
| 1 | # Recipe creation tool - kernel support plugin | ||
| 2 | # | ||
| 3 | # Copyright (C) 2016 Intel Corporation | ||
| 4 | # | ||
| 5 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 6 | # | ||
| 7 | |||
| 8 | import re | ||
| 9 | import logging | ||
| 10 | from recipetool.create import RecipeHandler, read_pkgconfig_provides, validate_pv | ||
| 11 | |||
| 12 | logger = logging.getLogger('recipetool') | ||
| 13 | |||
| 14 | tinfoil = None | ||
| 15 | |||
| 16 | def tinfoil_init(instance): | ||
| 17 | global tinfoil | ||
| 18 | tinfoil = instance | ||
| 19 | |||
| 20 | |||
| 21 | class KernelRecipeHandler(RecipeHandler): | ||
| 22 | def process(self, srctree, classes, lines_before, lines_after, handled, extravalues): | ||
| 23 | import bb.process | ||
| 24 | if 'buildsystem' in handled: | ||
| 25 | return False | ||
| 26 | |||
| 27 | for tell in ['arch', 'firmware', 'Kbuild', 'Kconfig']: | ||
| 28 | if not os.path.exists(os.path.join(srctree, tell)): | ||
| 29 | return False | ||
| 30 | |||
| 31 | handled.append('buildsystem') | ||
| 32 | del lines_after[:] | ||
| 33 | del classes[:] | ||
| 34 | template = os.path.join(tinfoil.config_data.getVar('COREBASE'), 'meta-skeleton', 'recipes-kernel', 'linux', 'linux-yocto-custom.bb') | ||
| 35 | def handle_var(varname, origvalue, op, newlines): | ||
| 36 | if varname in ['SRCREV', 'SRCREV_machine']: | ||
| 37 | while newlines[-1].startswith('#'): | ||
| 38 | del newlines[-1] | ||
| 39 | try: | ||
| 40 | stdout, _ = bb.process.run('git rev-parse HEAD', cwd=srctree, shell=True) | ||
| 41 | except bb.process.ExecutionError as e: | ||
| 42 | stdout = None | ||
| 43 | if stdout: | ||
| 44 | return stdout.strip(), op, 0, True | ||
| 45 | elif varname == 'LINUX_VERSION': | ||
| 46 | makefile = os.path.join(srctree, 'Makefile') | ||
| 47 | if os.path.exists(makefile): | ||
| 48 | kversion = -1 | ||
| 49 | kpatchlevel = -1 | ||
| 50 | ksublevel = -1 | ||
| 51 | kextraversion = '' | ||
| 52 | with open(makefile, 'r', errors='surrogateescape') as f: | ||
| 53 | for i, line in enumerate(f): | ||
| 54 | if i > 10: | ||
| 55 | break | ||
| 56 | if line.startswith('VERSION ='): | ||
| 57 | kversion = int(line.split('=')[1].strip()) | ||
| 58 | elif line.startswith('PATCHLEVEL ='): | ||
| 59 | kpatchlevel = int(line.split('=')[1].strip()) | ||
| 60 | elif line.startswith('SUBLEVEL ='): | ||
| 61 | ksublevel = int(line.split('=')[1].strip()) | ||
| 62 | elif line.startswith('EXTRAVERSION ='): | ||
| 63 | kextraversion = line.split('=')[1].strip() | ||
| 64 | version = '' | ||
| 65 | if kversion > -1 and kpatchlevel > -1: | ||
| 66 | version = '%d.%d' % (kversion, kpatchlevel) | ||
| 67 | if ksublevel > -1: | ||
| 68 | version += '.%d' % ksublevel | ||
| 69 | version += kextraversion | ||
| 70 | if version: | ||
| 71 | return version, op, 0, True | ||
| 72 | elif varname == 'SRC_URI': | ||
| 73 | while newlines[-1].startswith('#'): | ||
| 74 | del newlines[-1] | ||
| 75 | elif varname == 'COMPATIBLE_MACHINE': | ||
| 76 | while newlines[-1].startswith('#'): | ||
| 77 | del newlines[-1] | ||
| 78 | machine = tinfoil.config_data.getVar('MACHINE') | ||
| 79 | return machine, op, 0, True | ||
| 80 | return origvalue, op, 0, True | ||
| 81 | with open(template, 'r') as f: | ||
| 82 | varlist = ['SRCREV', 'SRCREV_machine', 'SRC_URI', 'LINUX_VERSION', 'COMPATIBLE_MACHINE'] | ||
| 83 | (_, newlines) = bb.utils.edit_metadata(f, varlist, handle_var) | ||
| 84 | lines_before[:] = [line.rstrip('\n') for line in newlines] | ||
| 85 | |||
| 86 | return True | ||
| 87 | |||
| 88 | def register_recipe_handlers(handlers): | ||
| 89 | handlers.append((KernelRecipeHandler(), 100)) | ||
diff --git a/scripts/lib/recipetool/create_kmod.py b/scripts/lib/recipetool/create_kmod.py deleted file mode 100644 index cc00106961..0000000000 --- a/scripts/lib/recipetool/create_kmod.py +++ /dev/null | |||
| @@ -1,142 +0,0 @@ | |||
| 1 | # Recipe creation tool - kernel module support plugin | ||
| 2 | # | ||
| 3 | # Copyright (C) 2016 Intel Corporation | ||
| 4 | # | ||
| 5 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 6 | # | ||
| 7 | |||
| 8 | import re | ||
| 9 | import logging | ||
| 10 | from recipetool.create import RecipeHandler, read_pkgconfig_provides, validate_pv | ||
| 11 | |||
| 12 | logger = logging.getLogger('recipetool') | ||
| 13 | |||
| 14 | tinfoil = None | ||
| 15 | |||
| 16 | def tinfoil_init(instance): | ||
| 17 | global tinfoil | ||
| 18 | tinfoil = instance | ||
| 19 | |||
| 20 | |||
| 21 | class KernelModuleRecipeHandler(RecipeHandler): | ||
| 22 | def process(self, srctree, classes, lines_before, lines_after, handled, extravalues): | ||
| 23 | import bb.process | ||
| 24 | if 'buildsystem' in handled: | ||
| 25 | return False | ||
| 26 | |||
| 27 | module_inc_re = re.compile(r'^#include\s+<linux/module.h>$') | ||
| 28 | makefiles = [] | ||
| 29 | is_module = False | ||
| 30 | |||
| 31 | makefiles = [] | ||
| 32 | |||
| 33 | files = RecipeHandler.checkfiles(srctree, ['*.c', '*.h'], recursive=True, excludedirs=['contrib', 'test', 'examples']) | ||
| 34 | if files: | ||
| 35 | for cfile in files: | ||
| 36 | # Look in same dir or parent for Makefile | ||
| 37 | for makefile in [os.path.join(os.path.dirname(cfile), 'Makefile'), os.path.join(os.path.dirname(os.path.dirname(cfile)), 'Makefile')]: | ||
| 38 | if makefile in makefiles: | ||
| 39 | break | ||
| 40 | else: | ||
| 41 | if os.path.exists(makefile): | ||
| 42 | makefiles.append(makefile) | ||
| 43 | break | ||
| 44 | else: | ||
| 45 | continue | ||
| 46 | with open(cfile, 'r', errors='surrogateescape') as f: | ||
| 47 | for line in f: | ||
| 48 | if module_inc_re.match(line.strip()): | ||
| 49 | is_module = True | ||
| 50 | break | ||
| 51 | if is_module: | ||
| 52 | break | ||
| 53 | |||
| 54 | if is_module: | ||
| 55 | classes.append('module') | ||
| 56 | handled.append('buildsystem') | ||
| 57 | # module.bbclass and the classes it inherits do most of the hard | ||
| 58 | # work, but we need to tweak it slightly depending on what the | ||
| 59 | # Makefile does (and there is a range of those) | ||
| 60 | # Check the makefile for the appropriate install target | ||
| 61 | install_lines = [] | ||
| 62 | compile_lines = [] | ||
| 63 | in_install = False | ||
| 64 | in_compile = False | ||
| 65 | install_target = None | ||
| 66 | with open(makefile, 'r', errors='surrogateescape') as f: | ||
| 67 | for line in f: | ||
| 68 | if line.startswith('install:'): | ||
| 69 | if not install_lines: | ||
| 70 | in_install = True | ||
| 71 | install_target = 'install' | ||
| 72 | elif line.startswith('modules_install:'): | ||
| 73 | install_lines = [] | ||
| 74 | in_install = True | ||
| 75 | install_target = 'modules_install' | ||
| 76 | elif line.startswith('modules:'): | ||
| 77 | compile_lines = [] | ||
| 78 | in_compile = True | ||
| 79 | elif line.startswith(('all:', 'default:')): | ||
| 80 | if not compile_lines: | ||
| 81 | in_compile = True | ||
| 82 | elif line: | ||
| 83 | if line[0] == '\t': | ||
| 84 | if in_install: | ||
| 85 | install_lines.append(line) | ||
| 86 | elif in_compile: | ||
| 87 | compile_lines.append(line) | ||
| 88 | elif ':' in line: | ||
| 89 | in_install = False | ||
| 90 | in_compile = False | ||
| 91 | |||
| 92 | def check_target(lines, install): | ||
| 93 | kdirpath = '' | ||
| 94 | manual_install = False | ||
| 95 | for line in lines: | ||
| 96 | splitline = line.split() | ||
| 97 | if splitline[0] in ['make', 'gmake', '$(MAKE)']: | ||
| 98 | if '-C' in splitline: | ||
| 99 | idx = splitline.index('-C') + 1 | ||
| 100 | if idx < len(splitline): | ||
| 101 | kdirpath = splitline[idx] | ||
| 102 | break | ||
| 103 | elif install and splitline[0] == 'install': | ||
| 104 | if '.ko' in line: | ||
| 105 | manual_install = True | ||
| 106 | return kdirpath, manual_install | ||
| 107 | |||
| 108 | kdirpath = None | ||
| 109 | manual_install = False | ||
| 110 | if install_lines: | ||
| 111 | kdirpath, manual_install = check_target(install_lines, install=True) | ||
| 112 | if compile_lines and not kdirpath: | ||
| 113 | kdirpath, _ = check_target(compile_lines, install=False) | ||
| 114 | |||
| 115 | if manual_install or not install_lines: | ||
| 116 | lines_after.append('EXTRA_OEMAKE:append:task-install = " -C ${STAGING_KERNEL_DIR} M=${S}"') | ||
| 117 | elif install_target and install_target != 'modules_install': | ||
| 118 | lines_after.append('MODULES_INSTALL_TARGET = "install"') | ||
| 119 | |||
| 120 | warnmsg = None | ||
| 121 | kdirvar = None | ||
| 122 | if kdirpath: | ||
| 123 | res = re.match(r'\$\(([^$)]+)\)', kdirpath) | ||
| 124 | if res: | ||
| 125 | kdirvar = res.group(1) | ||
| 126 | if kdirvar != 'KERNEL_SRC': | ||
| 127 | lines_after.append('EXTRA_OEMAKE += "%s=${STAGING_KERNEL_DIR}"' % kdirvar) | ||
| 128 | elif kdirpath.startswith('/lib/'): | ||
| 129 | warnmsg = 'Kernel path in install makefile is hardcoded - you will need to patch the makefile' | ||
| 130 | if not kdirvar and not warnmsg: | ||
| 131 | warnmsg = 'Unable to find means of passing kernel path into install makefile - if kernel path is hardcoded you will need to patch the makefile' | ||
| 132 | if warnmsg: | ||
| 133 | warnmsg += '. Note that the variable KERNEL_SRC will be passed in as the kernel source path.' | ||
| 134 | logger.warning(warnmsg) | ||
| 135 | lines_after.append('# %s' % warnmsg) | ||
| 136 | |||
| 137 | return True | ||
| 138 | |||
| 139 | return False | ||
| 140 | |||
| 141 | def register_recipe_handlers(handlers): | ||
| 142 | handlers.append((KernelModuleRecipeHandler(), 15)) | ||
diff --git a/scripts/lib/recipetool/create_npm.py b/scripts/lib/recipetool/create_npm.py deleted file mode 100644 index 3363a0e7ee..0000000000 --- a/scripts/lib/recipetool/create_npm.py +++ /dev/null | |||
| @@ -1,299 +0,0 @@ | |||
| 1 | # Copyright (C) 2016 Intel Corporation | ||
| 2 | # Copyright (C) 2020 Savoir-Faire Linux | ||
| 3 | # | ||
| 4 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 5 | # | ||
| 6 | """Recipe creation tool - npm module support plugin""" | ||
| 7 | |||
| 8 | import json | ||
| 9 | import logging | ||
| 10 | import os | ||
| 11 | import re | ||
| 12 | import sys | ||
| 13 | import tempfile | ||
| 14 | import bb | ||
| 15 | from bb.fetch2.npm import NpmEnvironment | ||
| 16 | from bb.fetch2.npm import npm_package | ||
| 17 | from bb.fetch2.npmsw import foreach_dependencies | ||
| 18 | from recipetool.create import RecipeHandler | ||
| 19 | from recipetool.create import match_licenses, find_license_files, generate_common_licenses_chksums | ||
| 20 | from recipetool.create import split_pkg_licenses | ||
| 21 | logger = logging.getLogger('recipetool') | ||
| 22 | |||
| 23 | TINFOIL = None | ||
| 24 | |||
| 25 | def tinfoil_init(instance): | ||
| 26 | """Initialize tinfoil""" | ||
| 27 | global TINFOIL | ||
| 28 | TINFOIL = instance | ||
| 29 | |||
| 30 | class NpmRecipeHandler(RecipeHandler): | ||
| 31 | """Class to handle the npm recipe creation""" | ||
| 32 | |||
| 33 | @staticmethod | ||
| 34 | def _get_registry(lines): | ||
| 35 | """Get the registry value from the 'npm://registry' url""" | ||
| 36 | registry = None | ||
| 37 | |||
| 38 | def _handle_registry(varname, origvalue, op, newlines): | ||
| 39 | nonlocal registry | ||
| 40 | if origvalue.startswith("npm://"): | ||
| 41 | registry = re.sub(r"^npm://", "http://", origvalue.split(";")[0]) | ||
| 42 | return origvalue, None, 0, True | ||
| 43 | |||
| 44 | bb.utils.edit_metadata(lines, ["SRC_URI"], _handle_registry) | ||
| 45 | |||
| 46 | return registry | ||
| 47 | |||
| 48 | @staticmethod | ||
| 49 | def _ensure_npm(): | ||
| 50 | """Check if the 'npm' command is available in the recipes""" | ||
| 51 | if not TINFOIL.recipes_parsed: | ||
| 52 | TINFOIL.parse_recipes() | ||
| 53 | |||
| 54 | try: | ||
| 55 | d = TINFOIL.parse_recipe("nodejs-native") | ||
| 56 | except bb.providers.NoProvider: | ||
| 57 | bb.error("Nothing provides 'nodejs-native' which is required for the build") | ||
| 58 | bb.note("You will likely need to add a layer that provides nodejs") | ||
| 59 | sys.exit(14) | ||
| 60 | |||
| 61 | bindir = d.getVar("STAGING_BINDIR_NATIVE") | ||
| 62 | npmpath = os.path.join(bindir, "npm") | ||
| 63 | |||
| 64 | if not os.path.exists(npmpath): | ||
| 65 | TINFOIL.build_targets("nodejs-native", "addto_recipe_sysroot") | ||
| 66 | |||
| 67 | if not os.path.exists(npmpath): | ||
| 68 | bb.error("Failed to add 'npm' to sysroot") | ||
| 69 | sys.exit(14) | ||
| 70 | |||
| 71 | return bindir | ||
| 72 | |||
| 73 | @staticmethod | ||
| 74 | def _npm_global_configs(dev): | ||
| 75 | """Get the npm global configuration""" | ||
| 76 | configs = [] | ||
| 77 | |||
| 78 | if dev: | ||
| 79 | configs.append(("also", "development")) | ||
| 80 | else: | ||
| 81 | configs.append(("only", "production")) | ||
| 82 | |||
| 83 | configs.append(("save", "false")) | ||
| 84 | configs.append(("package-lock", "false")) | ||
| 85 | configs.append(("shrinkwrap", "false")) | ||
| 86 | return configs | ||
| 87 | |||
| 88 | def _run_npm_install(self, d, srctree, registry, dev): | ||
| 89 | """Run the 'npm install' command without building the addons""" | ||
| 90 | configs = self._npm_global_configs(dev) | ||
| 91 | configs.append(("ignore-scripts", "true")) | ||
| 92 | |||
| 93 | if registry: | ||
| 94 | configs.append(("registry", registry)) | ||
| 95 | |||
| 96 | bb.utils.remove(os.path.join(srctree, "node_modules"), recurse=True) | ||
| 97 | |||
| 98 | env = NpmEnvironment(d, configs=configs) | ||
| 99 | env.run("npm install", workdir=srctree) | ||
| 100 | |||
| 101 | def _generate_shrinkwrap(self, d, srctree, dev): | ||
| 102 | """Check and generate the 'npm-shrinkwrap.json' file if needed""" | ||
| 103 | configs = self._npm_global_configs(dev) | ||
| 104 | |||
| 105 | env = NpmEnvironment(d, configs=configs) | ||
| 106 | env.run("npm shrinkwrap", workdir=srctree) | ||
| 107 | |||
| 108 | return os.path.join(srctree, "npm-shrinkwrap.json") | ||
| 109 | |||
| 110 | def _handle_licenses(self, srctree, shrinkwrap_file, dev): | ||
| 111 | """Return the extra license files and the list of packages""" | ||
| 112 | licfiles = [] | ||
| 113 | packages = {} | ||
| 114 | # Licenses from package.json will point to COMMON_LICENSE_DIR so we need | ||
| 115 | # to associate them explicitely to packages for split_pkg_licenses() | ||
| 116 | fallback_licenses = dict() | ||
| 117 | |||
| 118 | def _find_package_licenses(destdir): | ||
| 119 | """Either find license files, or use package.json metadata""" | ||
| 120 | def _get_licenses_from_package_json(package_json): | ||
| 121 | with open(os.path.join(srctree, package_json), "r") as f: | ||
| 122 | data = json.load(f) | ||
| 123 | if "license" in data: | ||
| 124 | licenses = data["license"].split(" ") | ||
| 125 | licenses = [license.strip("()") for license in licenses if license != "OR" and license != "AND"] | ||
| 126 | return [], licenses | ||
| 127 | else: | ||
| 128 | return [package_json], None | ||
| 129 | |||
| 130 | basedir = os.path.join(srctree, destdir) | ||
| 131 | licfiles = find_license_files(basedir) | ||
| 132 | if len(licfiles) > 0: | ||
| 133 | return licfiles, None | ||
| 134 | else: | ||
| 135 | # A license wasn't found in the package directory, so we'll use the package.json metadata | ||
| 136 | pkg_json = os.path.join(basedir, "package.json") | ||
| 137 | return _get_licenses_from_package_json(pkg_json) | ||
| 138 | |||
| 139 | def _get_package_licenses(destdir, package): | ||
| 140 | (package_licfiles, package_licenses) = _find_package_licenses(destdir) | ||
| 141 | if package_licfiles: | ||
| 142 | licfiles.extend(package_licfiles) | ||
| 143 | else: | ||
| 144 | fallback_licenses[package] = package_licenses | ||
| 145 | |||
| 146 | # Handle the dependencies | ||
| 147 | def _handle_dependency(name, params, destdir): | ||
| 148 | deptree = destdir.split('node_modules/') | ||
| 149 | suffix = "-".join([npm_package(dep) for dep in deptree]) | ||
| 150 | packages["${PN}" + suffix] = destdir | ||
| 151 | _get_package_licenses(destdir, "${PN}" + suffix) | ||
| 152 | |||
| 153 | with open(shrinkwrap_file, "r") as f: | ||
| 154 | shrinkwrap = json.load(f) | ||
| 155 | foreach_dependencies(shrinkwrap, _handle_dependency, dev) | ||
| 156 | |||
| 157 | # Handle the parent package | ||
| 158 | packages["${PN}"] = "" | ||
| 159 | _get_package_licenses(srctree, "${PN}") | ||
| 160 | |||
| 161 | return licfiles, packages, fallback_licenses | ||
| 162 | |||
| 163 | # Handle the peer dependencies | ||
| 164 | def _handle_peer_dependency(self, shrinkwrap_file): | ||
| 165 | """Check if package has peer dependencies and show warning if it is the case""" | ||
| 166 | with open(shrinkwrap_file, "r") as f: | ||
| 167 | shrinkwrap = json.load(f) | ||
| 168 | |||
| 169 | packages = shrinkwrap.get("packages", {}) | ||
| 170 | peer_deps = packages.get("", {}).get("peerDependencies", {}) | ||
| 171 | |||
| 172 | for peer_dep in peer_deps: | ||
| 173 | peer_dep_yocto_name = npm_package(peer_dep) | ||
| 174 | bb.warn(peer_dep + " is a peer dependencie of the actual package. " + | ||
| 175 | "Please add this peer dependencie to the RDEPENDS variable as %s and generate its recipe with devtool" | ||
| 176 | % peer_dep_yocto_name) | ||
| 177 | |||
| 178 | |||
| 179 | |||
| 180 | def process(self, srctree, classes, lines_before, lines_after, handled, extravalues): | ||
| 181 | """Handle the npm recipe creation""" | ||
| 182 | |||
| 183 | if "buildsystem" in handled: | ||
| 184 | return False | ||
| 185 | |||
| 186 | files = RecipeHandler.checkfiles(srctree, ["package.json"]) | ||
| 187 | |||
| 188 | if not files: | ||
| 189 | return False | ||
| 190 | |||
| 191 | with open(files[0], "r") as f: | ||
| 192 | data = json.load(f) | ||
| 193 | |||
| 194 | if "name" not in data or "version" not in data: | ||
| 195 | return False | ||
| 196 | |||
| 197 | extravalues["PN"] = npm_package(data["name"]) | ||
| 198 | extravalues["PV"] = data["version"] | ||
| 199 | |||
| 200 | if "description" in data: | ||
| 201 | extravalues["SUMMARY"] = data["description"] | ||
| 202 | |||
| 203 | if "homepage" in data: | ||
| 204 | extravalues["HOMEPAGE"] = data["homepage"] | ||
| 205 | |||
| 206 | dev = bb.utils.to_boolean(str(extravalues.get("NPM_INSTALL_DEV", "0")), False) | ||
| 207 | registry = self._get_registry(lines_before) | ||
| 208 | |||
| 209 | bb.note("Checking if npm is available ...") | ||
| 210 | # The native npm is used here (and not the host one) to ensure that the | ||
| 211 | # npm version is high enough to ensure an efficient dependency tree | ||
| 212 | # resolution and avoid issue with the shrinkwrap file format. | ||
| 213 | # Moreover the native npm is mandatory for the build. | ||
| 214 | bindir = self._ensure_npm() | ||
| 215 | |||
| 216 | d = bb.data.createCopy(TINFOIL.config_data) | ||
| 217 | d.prependVar("PATH", bindir + ":") | ||
| 218 | d.setVar("S", srctree) | ||
| 219 | |||
| 220 | bb.note("Generating shrinkwrap file ...") | ||
| 221 | # To generate the shrinkwrap file the dependencies have to be installed | ||
| 222 | # first. During the generation process some files may be updated / | ||
| 223 | # deleted. By default devtool tracks the diffs in the srctree and raises | ||
| 224 | # errors when finishing the recipe if some diffs are found. | ||
| 225 | git_exclude_file = os.path.join(srctree, ".git", "info", "exclude") | ||
| 226 | if os.path.exists(git_exclude_file): | ||
| 227 | with open(git_exclude_file, "r+") as f: | ||
| 228 | lines = f.readlines() | ||
| 229 | for line in ["/node_modules/", "/npm-shrinkwrap.json"]: | ||
| 230 | if line not in lines: | ||
| 231 | f.write(line + "\n") | ||
| 232 | |||
| 233 | lock_file = os.path.join(srctree, "package-lock.json") | ||
| 234 | lock_copy = lock_file + ".copy" | ||
| 235 | if os.path.exists(lock_file): | ||
| 236 | bb.utils.copyfile(lock_file, lock_copy) | ||
| 237 | |||
| 238 | self._run_npm_install(d, srctree, registry, dev) | ||
| 239 | shrinkwrap_file = self._generate_shrinkwrap(d, srctree, dev) | ||
| 240 | |||
| 241 | with open(shrinkwrap_file, "r") as f: | ||
| 242 | shrinkwrap = json.load(f) | ||
| 243 | |||
| 244 | if os.path.exists(lock_copy): | ||
| 245 | bb.utils.movefile(lock_copy, lock_file) | ||
| 246 | |||
| 247 | # Add the shrinkwrap file as 'extrafiles' | ||
| 248 | shrinkwrap_copy = shrinkwrap_file + ".copy" | ||
| 249 | bb.utils.copyfile(shrinkwrap_file, shrinkwrap_copy) | ||
| 250 | extravalues.setdefault("extrafiles", {}) | ||
| 251 | extravalues["extrafiles"]["npm-shrinkwrap.json"] = shrinkwrap_copy | ||
| 252 | |||
| 253 | url_local = "npmsw://%s" % shrinkwrap_file | ||
| 254 | url_recipe= "npmsw://${THISDIR}/${BPN}/npm-shrinkwrap.json" | ||
| 255 | |||
| 256 | if dev: | ||
| 257 | url_local += ";dev=1" | ||
| 258 | url_recipe += ";dev=1" | ||
| 259 | |||
| 260 | # Add the npmsw url in the SRC_URI of the generated recipe | ||
| 261 | def _handle_srcuri(varname, origvalue, op, newlines): | ||
| 262 | """Update the version value and add the 'npmsw://' url""" | ||
| 263 | value = origvalue.replace("version=" + data["version"], "version=${PV}") | ||
| 264 | value = value.replace("version=latest", "version=${PV}") | ||
| 265 | values = [line.strip() for line in value.strip('\n').splitlines()] | ||
| 266 | if "dependencies" in shrinkwrap.get("packages", {}).get("", {}): | ||
| 267 | values.append(url_recipe) | ||
| 268 | return values, None, 4, False | ||
| 269 | |||
| 270 | (_, newlines) = bb.utils.edit_metadata(lines_before, ["SRC_URI"], _handle_srcuri) | ||
| 271 | lines_before[:] = [line.rstrip('\n') for line in newlines] | ||
| 272 | |||
| 273 | # In order to generate correct licence checksums in the recipe the | ||
| 274 | # dependencies have to be fetched again using the npmsw url | ||
| 275 | bb.note("Fetching npm dependencies ...") | ||
| 276 | bb.utils.remove(os.path.join(srctree, "node_modules"), recurse=True) | ||
| 277 | fetcher = bb.fetch2.Fetch([url_local], d) | ||
| 278 | fetcher.download() | ||
| 279 | fetcher.unpack(srctree) | ||
| 280 | |||
| 281 | bb.note("Handling licences ...") | ||
| 282 | (licfiles, packages, fallback_licenses) = self._handle_licenses(srctree, shrinkwrap_file, dev) | ||
| 283 | licvalues = match_licenses(licfiles, srctree, d) | ||
| 284 | split_pkg_licenses(licvalues, packages, lines_after, fallback_licenses) | ||
| 285 | fallback_licenses_flat = [license for sublist in fallback_licenses.values() for license in sublist] | ||
| 286 | extravalues["LIC_FILES_CHKSUM"] = generate_common_licenses_chksums(fallback_licenses_flat, d) | ||
| 287 | extravalues["LICENSE"] = fallback_licenses_flat | ||
| 288 | |||
| 289 | classes.append("npm") | ||
| 290 | handled.append("buildsystem") | ||
| 291 | |||
| 292 | # Check if package has peer dependencies and inform the user | ||
| 293 | self._handle_peer_dependency(shrinkwrap_file) | ||
| 294 | |||
| 295 | return True | ||
| 296 | |||
| 297 | def register_recipe_handlers(handlers): | ||
| 298 | """Register the npm handler""" | ||
| 299 | handlers.append((NpmRecipeHandler(), 60)) | ||
diff --git a/scripts/lib/recipetool/edit.py b/scripts/lib/recipetool/edit.py deleted file mode 100644 index d5b980a1c0..0000000000 --- a/scripts/lib/recipetool/edit.py +++ /dev/null | |||
| @@ -1,44 +0,0 @@ | |||
| 1 | # Recipe creation tool - edit plugin | ||
| 2 | # | ||
| 3 | # This sub-command edits the recipe and appends for the specified target | ||
| 4 | # | ||
| 5 | # Example: recipetool edit busybox | ||
| 6 | # | ||
| 7 | # Copyright (C) 2018 Mentor Graphics Corporation | ||
| 8 | # | ||
| 9 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 10 | # | ||
| 11 | |||
| 12 | import argparse | ||
| 13 | import errno | ||
| 14 | import logging | ||
| 15 | import os | ||
| 16 | import re | ||
| 17 | import subprocess | ||
| 18 | import sys | ||
| 19 | import scriptutils | ||
| 20 | |||
| 21 | |||
| 22 | logger = logging.getLogger('recipetool') | ||
| 23 | tinfoil = None | ||
| 24 | |||
| 25 | |||
| 26 | def tinfoil_init(instance): | ||
| 27 | global tinfoil | ||
| 28 | tinfoil = instance | ||
| 29 | |||
| 30 | |||
| 31 | def edit(args): | ||
| 32 | import oe.recipeutils | ||
| 33 | |||
| 34 | recipe_path = tinfoil.get_recipe_file(args.target) | ||
| 35 | appends = tinfoil.get_file_appends(recipe_path) | ||
| 36 | |||
| 37 | return scriptutils.run_editor([recipe_path] + list(appends), logger) | ||
| 38 | |||
| 39 | |||
| 40 | def register_commands(subparsers): | ||
| 41 | parser = subparsers.add_parser('edit', | ||
| 42 | help='Edit the recipe and appends for the specified target. This obeys $VISUAL if set, otherwise $EDITOR, otherwise vi.') | ||
| 43 | parser.add_argument('target', help='Target recipe/provide to edit') | ||
| 44 | parser.set_defaults(func=edit, parserecipes=True) | ||
diff --git a/scripts/lib/recipetool/licenses.csv b/scripts/lib/recipetool/licenses.csv deleted file mode 100644 index 80851111b3..0000000000 --- a/scripts/lib/recipetool/licenses.csv +++ /dev/null | |||
| @@ -1,37 +0,0 @@ | |||
| 1 | 0636e73ff0215e8d672dc4c32c317bb3,GPL-2.0-only | ||
| 2 | 12f884d2ae1ff87c09e5b7ccc2c4ca7e,GPL-2.0-only | ||
| 3 | 18810669f13b87348459e611d31ab760,GPL-2.0-only | ||
| 4 | 252890d9eee26aab7b432e8b8a616475,LGPL-2.0-only | ||
| 5 | 2d5025d4aa3495befef8f17206a5b0a1,LGPL-2.1-only | ||
| 6 | 3214f080875748938ba060314b4f727d,LGPL-2.0-only | ||
| 7 | 385c55653886acac3821999a3ccd17b3,Artistic-1.0 | GPL-2.0-only | ||
| 8 | 393a5ca445f6965873eca0259a17f833,GPL-2.0-only | ||
| 9 | 3b83ef96387f14655fc854ddc3c6bd57,Apache-2.0 | ||
| 10 | 3bf50002aefd002f49e7bb854063f7e7,LGPL-2.0-only | ||
| 11 | 4325afd396febcb659c36b49533135d4,GPL-2.0-only | ||
| 12 | 4fbd65380cdd255951079008b364516c,LGPL-2.1-only | ||
| 13 | 54c7042be62e169199200bc6477f04d1,BSD-3-Clause | ||
| 14 | 55ca817ccb7d5b5b66355690e9abc605,LGPL-2.0-only | ||
| 15 | 59530bdf33659b29e73d4adb9f9f6552,GPL-2.0-only | ||
| 16 | 5f30f0716dfdd0d91eb439ebec522ec2,LGPL-2.0-only | ||
| 17 | 6a6a8e020838b23406c81b19c1d46df6,LGPL-3.0-only | ||
| 18 | 751419260aa954499f7abaabaa882bbe,GPL-2.0-only | ||
| 19 | 7fbc338309ac38fefcd64b04bb903e34,LGPL-2.1-only | ||
| 20 | 8ca43cbc842c2336e835926c2166c28b,GPL-2.0-only | ||
| 21 | 94d55d512a9ba36caa9b7df079bae19f,GPL-2.0-only | ||
| 22 | 9ac2e7cff1ddaf48b6eab6028f23ef88,GPL-2.0-only | ||
| 23 | 9f604d8a4f8e74f4f5140845a21b6674,LGPL-2.0-only | ||
| 24 | a6f89e2100d9b6cdffcea4f398e37343,LGPL-2.1-only | ||
| 25 | b234ee4d69f5fce4486a80fdaf4a4263,GPL-2.0-only | ||
| 26 | bbb461211a33b134d42ed5ee802b37ff,LGPL-2.1-only | ||
| 27 | bfe1f75d606912a4111c90743d6c7325,MPL-1.1-only | ||
| 28 | c93c0550bd3173f4504b2cbd8991e50b,GPL-2.0-only | ||
| 29 | d32239bcb673463ab874e80d47fae504,GPL-3.0-only | ||
| 30 | d7810fab7487fb0aad327b76f1be7cd7,GPL-2.0-only | ||
| 31 | d8045f3b8f929c1cb29a1e3fd737b499,LGPL-2.1-only | ||
| 32 | db979804f025cf55aabec7129cb671ed,LGPL-2.0-only | ||
| 33 | eb723b61539feef013de476e68b5c50a,GPL-2.0-only | ||
| 34 | ebb5c50ab7cab4baeffba14977030c07,GPL-2.0-only | ||
| 35 | f27defe1e96c2e1ecd4e0c9be8967949,GPL-3.0-only | ||
| 36 | fad9b3332be894bab9bc501572864b29,LGPL-2.1-only | ||
| 37 | fbc093901857fcd118f065f900982c24,LGPL-2.1-only | ||
diff --git a/scripts/lib/recipetool/newappend.py b/scripts/lib/recipetool/newappend.py deleted file mode 100644 index 08e2474dc4..0000000000 --- a/scripts/lib/recipetool/newappend.py +++ /dev/null | |||
| @@ -1,79 +0,0 @@ | |||
| 1 | # Recipe creation tool - newappend plugin | ||
| 2 | # | ||
| 3 | # This sub-command creates a bbappend for the specified target and prints the | ||
| 4 | # path to the bbappend. | ||
| 5 | # | ||
| 6 | # Example: recipetool newappend meta-mylayer busybox | ||
| 7 | # | ||
| 8 | # Copyright (C) 2015 Christopher Larson <kergoth@gmail.com> | ||
| 9 | # | ||
| 10 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 11 | # | ||
| 12 | |||
| 13 | import argparse | ||
| 14 | import errno | ||
| 15 | import logging | ||
| 16 | import os | ||
| 17 | import re | ||
| 18 | import subprocess | ||
| 19 | import sys | ||
| 20 | import scriptutils | ||
| 21 | |||
| 22 | |||
| 23 | logger = logging.getLogger('recipetool') | ||
| 24 | tinfoil = None | ||
| 25 | |||
| 26 | |||
| 27 | def tinfoil_init(instance): | ||
| 28 | global tinfoil | ||
| 29 | tinfoil = instance | ||
| 30 | |||
| 31 | |||
| 32 | def layer(layerpath): | ||
| 33 | if not os.path.exists(os.path.join(layerpath, 'conf', 'layer.conf')): | ||
| 34 | raise argparse.ArgumentTypeError('{0!r} must be a path to a valid layer'.format(layerpath)) | ||
| 35 | return layerpath | ||
| 36 | |||
| 37 | |||
| 38 | def newappend(args): | ||
| 39 | import oe.recipeutils | ||
| 40 | |||
| 41 | recipe_path = tinfoil.get_recipe_file(args.target) | ||
| 42 | |||
| 43 | rd = tinfoil.config_data.createCopy() | ||
| 44 | rd.setVar('FILE', recipe_path) | ||
| 45 | append_path, path_ok = oe.recipeutils.get_bbappend_path(rd, args.destlayer, args.wildcard_version) | ||
| 46 | if not append_path: | ||
| 47 | logger.error('Unable to determine layer directory containing %s', recipe_path) | ||
| 48 | return 1 | ||
| 49 | |||
| 50 | if not path_ok: | ||
| 51 | logger.warning('Unable to determine correct subdirectory path for bbappend file - check that what %s adds to BBFILES also matches .bbappend files. Using %s for now, but until you fix this the bbappend will not be applied.', os.path.join(args.destlayer, 'conf', 'layer.conf'), os.path.dirname(append_path)) | ||
| 52 | |||
| 53 | layerdirs = [os.path.abspath(layerdir) for layerdir in rd.getVar('BBLAYERS').split()] | ||
| 54 | if not os.path.abspath(args.destlayer) in layerdirs: | ||
| 55 | logger.warning('Specified layer is not currently enabled in bblayers.conf, you will need to add it before this bbappend will be active') | ||
| 56 | |||
| 57 | if not os.path.exists(append_path): | ||
| 58 | bb.utils.mkdirhier(os.path.dirname(append_path)) | ||
| 59 | |||
| 60 | try: | ||
| 61 | open(append_path, 'a').close() | ||
| 62 | except (OSError, IOError) as exc: | ||
| 63 | logger.critical(str(exc)) | ||
| 64 | return 1 | ||
| 65 | |||
| 66 | if args.edit: | ||
| 67 | return scriptutils.run_editor([append_path, recipe_path], logger) | ||
| 68 | else: | ||
| 69 | print(append_path) | ||
| 70 | |||
| 71 | |||
| 72 | def register_commands(subparsers): | ||
| 73 | parser = subparsers.add_parser('newappend', | ||
| 74 | help='Create a bbappend for the specified target in the specified layer') | ||
| 75 | parser.add_argument('-e', '--edit', help='Edit the new append. This obeys $VISUAL if set, otherwise $EDITOR, otherwise vi.', action='store_true') | ||
| 76 | parser.add_argument('-w', '--wildcard-version', help='Use wildcard to make the bbappend apply to any recipe version', action='store_true') | ||
| 77 | parser.add_argument('destlayer', help='Base directory of the destination layer to write the bbappend to', type=layer) | ||
| 78 | parser.add_argument('target', help='Target recipe/provide to append') | ||
| 79 | parser.set_defaults(func=newappend, parserecipes=True) | ||
diff --git a/scripts/lib/recipetool/setvar.py b/scripts/lib/recipetool/setvar.py deleted file mode 100644 index b5ad335cae..0000000000 --- a/scripts/lib/recipetool/setvar.py +++ /dev/null | |||
| @@ -1,66 +0,0 @@ | |||
| 1 | # Recipe creation tool - set variable plugin | ||
| 2 | # | ||
| 3 | # Copyright (C) 2015 Intel Corporation | ||
| 4 | # | ||
| 5 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 6 | # | ||
| 7 | |||
| 8 | import sys | ||
| 9 | import os | ||
| 10 | import argparse | ||
| 11 | import glob | ||
| 12 | import fnmatch | ||
| 13 | import re | ||
| 14 | import logging | ||
| 15 | import scriptutils | ||
| 16 | |||
| 17 | logger = logging.getLogger('recipetool') | ||
| 18 | |||
| 19 | tinfoil = None | ||
| 20 | plugins = None | ||
| 21 | |||
| 22 | def tinfoil_init(instance): | ||
| 23 | global tinfoil | ||
| 24 | tinfoil = instance | ||
| 25 | |||
| 26 | def setvar(args): | ||
| 27 | import oe.recipeutils | ||
| 28 | |||
| 29 | if args.delete: | ||
| 30 | if args.value: | ||
| 31 | logger.error('-D/--delete and specifying a value are mutually exclusive') | ||
| 32 | return 1 | ||
| 33 | value = None | ||
| 34 | else: | ||
| 35 | if args.value is None: | ||
| 36 | logger.error('You must specify a value if not using -D/--delete') | ||
| 37 | return 1 | ||
| 38 | value = args.value | ||
| 39 | varvalues = {args.varname: value} | ||
| 40 | |||
| 41 | if args.recipe_only: | ||
| 42 | patches = [oe.recipeutils.patch_recipe_file(args.recipefile, varvalues, patch=args.patch)] | ||
| 43 | else: | ||
| 44 | rd = tinfoil.parse_recipe_file(args.recipefile, False) | ||
| 45 | if not rd: | ||
| 46 | return 1 | ||
| 47 | patches = oe.recipeutils.patch_recipe(rd, args.recipefile, varvalues, patch=args.patch) | ||
| 48 | if args.patch: | ||
| 49 | for patch in patches: | ||
| 50 | for line in patch: | ||
| 51 | sys.stdout.write(line) | ||
| 52 | tinfoil.modified_files() | ||
| 53 | return 0 | ||
| 54 | |||
| 55 | |||
| 56 | def register_commands(subparsers): | ||
| 57 | parser_setvar = subparsers.add_parser('setvar', | ||
| 58 | help='Set a variable within a recipe', | ||
| 59 | description='Adds/updates the value a variable is set to in a recipe') | ||
| 60 | parser_setvar.add_argument('recipefile', help='Recipe file to update') | ||
| 61 | parser_setvar.add_argument('varname', help='Variable name to set') | ||
| 62 | parser_setvar.add_argument('value', nargs='?', help='New value to set the variable to') | ||
| 63 | parser_setvar.add_argument('--recipe-only', '-r', help='Do not set variable in any include file if present', action='store_true') | ||
| 64 | parser_setvar.add_argument('--patch', '-p', help='Create a patch to make the change instead of modifying the recipe', action='store_true') | ||
| 65 | parser_setvar.add_argument('--delete', '-D', help='Delete the specified value instead of setting it', action='store_true') | ||
| 66 | parser_setvar.set_defaults(func=setvar) | ||
