summaryrefslogtreecommitdiffstats
path: root/scripts/lib/recipetool
diff options
context:
space:
mode:
Diffstat (limited to 'scripts/lib/recipetool')
-rw-r--r--scripts/lib/recipetool/append.py80
-rw-r--r--scripts/lib/recipetool/create.py368
-rw-r--r--scripts/lib/recipetool/create_buildsys.py40
-rw-r--r--scripts/lib/recipetool/create_buildsys_python.py1090
-rw-r--r--scripts/lib/recipetool/create_go.py174
-rw-r--r--scripts/lib/recipetool/create_npm.py139
-rw-r--r--scripts/lib/recipetool/licenses.csv37
-rw-r--r--scripts/lib/recipetool/setvar.py1
8 files changed, 1169 insertions, 760 deletions
diff --git a/scripts/lib/recipetool/append.py b/scripts/lib/recipetool/append.py
index 88ed8c5f01..041d79f162 100644
--- a/scripts/lib/recipetool/append.py
+++ b/scripts/lib/recipetool/append.py
@@ -18,6 +18,7 @@ import shutil
18import scriptutils 18import scriptutils
19import errno 19import errno
20from collections import defaultdict 20from collections import defaultdict
21import difflib
21 22
22logger = logging.getLogger('recipetool') 23logger = logging.getLogger('recipetool')
23 24
@@ -100,7 +101,7 @@ def determine_file_source(targetpath, rd):
100 import oe.recipeutils 101 import oe.recipeutils
101 102
102 # See if it's in do_install for the recipe 103 # See if it's in do_install for the recipe
103 workdir = rd.getVar('WORKDIR') 104 unpackdir = rd.getVar('UNPACKDIR')
104 src_uri = rd.getVar('SRC_URI') 105 src_uri = rd.getVar('SRC_URI')
105 srcfile = '' 106 srcfile = ''
106 modpatches = [] 107 modpatches = []
@@ -112,9 +113,9 @@ def determine_file_source(targetpath, rd):
112 if not srcpath.startswith('/'): 113 if not srcpath.startswith('/'):
113 # Handle non-absolute path 114 # Handle non-absolute path
114 srcpath = os.path.abspath(os.path.join(rd.getVarFlag('do_install', 'dirs').split()[-1], srcpath)) 115 srcpath = os.path.abspath(os.path.join(rd.getVarFlag('do_install', 'dirs').split()[-1], srcpath))
115 if srcpath.startswith(workdir): 116 if srcpath.startswith(unpackdir):
116 # OK, now we have the source file name, look for it in SRC_URI 117 # OK, now we have the source file name, look for it in SRC_URI
117 workdirfile = os.path.relpath(srcpath, workdir) 118 workdirfile = os.path.relpath(srcpath, unpackdir)
118 # FIXME this is where we ought to have some code in the fetcher, because this is naive 119 # FIXME this is where we ought to have some code in the fetcher, because this is naive
119 for item in src_uri.split(): 120 for item in src_uri.split():
120 localpath = bb.fetch2.localpath(item, rd) 121 localpath = bb.fetch2.localpath(item, rd)
@@ -299,7 +300,10 @@ def appendfile(args):
299 if st.st_mode & stat.S_IXUSR: 300 if st.st_mode & stat.S_IXUSR:
300 perms = '0755' 301 perms = '0755'
301 install = {args.newfile: (args.targetpath, perms)} 302 install = {args.newfile: (args.targetpath, perms)}
302 oe.recipeutils.bbappend_recipe(rd, args.destlayer, {args.newfile: sourcepath}, install, wildcardver=args.wildcard_version, machine=args.machine) 303 if sourcepath:
304 sourcepath = os.path.basename(sourcepath)
305 oe.recipeutils.bbappend_recipe(rd, args.destlayer, {args.newfile: {'newname' : sourcepath}}, install, wildcardver=args.wildcard_version, machine=args.machine)
306 tinfoil.modified_files()
303 return 0 307 return 0
304 else: 308 else:
305 if alternative_pns: 309 if alternative_pns:
@@ -313,7 +317,7 @@ def appendsrc(args, files, rd, extralines=None):
313 import oe.recipeutils 317 import oe.recipeutils
314 318
315 srcdir = rd.getVar('S') 319 srcdir = rd.getVar('S')
316 workdir = rd.getVar('WORKDIR') 320 unpackdir = rd.getVar('UNPACKDIR')
317 321
318 import bb.fetch 322 import bb.fetch
319 simplified = {} 323 simplified = {}
@@ -327,35 +331,57 @@ def appendsrc(args, files, rd, extralines=None):
327 331
328 copyfiles = {} 332 copyfiles = {}
329 extralines = extralines or [] 333 extralines = extralines or []
334 params = []
330 for newfile, srcfile in files.items(): 335 for newfile, srcfile in files.items():
331 src_destdir = os.path.dirname(srcfile) 336 src_destdir = os.path.dirname(srcfile)
332 if not args.use_workdir: 337 if not args.use_workdir:
333 if rd.getVar('S') == rd.getVar('STAGING_KERNEL_DIR'): 338 if rd.getVar('S') == rd.getVar('STAGING_KERNEL_DIR'):
334 srcdir = os.path.join(workdir, 'git') 339 srcdir = os.path.join(unpackdir, rd.getVar('BB_GIT_DEFAULT_DESTSUFFIX'))
335 if not bb.data.inherits_class('kernel-yocto', rd): 340 if not bb.data.inherits_class('kernel-yocto', rd):
336 logger.warning('S == STAGING_KERNEL_DIR and non-kernel-yocto, unable to determine path to srcdir, defaulting to ${WORKDIR}/git') 341 logger.warning('S == STAGING_KERNEL_DIR and non-kernel-yocto, unable to determine path to srcdir, defaulting to ${UNPACKDIR}/${BB_GIT_DEFAULT_DESTSUFFIX}')
337 src_destdir = os.path.join(os.path.relpath(srcdir, workdir), src_destdir) 342 src_destdir = os.path.join(os.path.relpath(srcdir, unpackdir), src_destdir)
338 src_destdir = os.path.normpath(src_destdir) 343 src_destdir = os.path.normpath(src_destdir)
339 344
340 source_uri = 'file://{0}'.format(os.path.basename(srcfile))
341 if src_destdir and src_destdir != '.': 345 if src_destdir and src_destdir != '.':
342 source_uri += ';subdir={0}'.format(src_destdir) 346 params.append({'subdir': src_destdir})
343
344 simple = bb.fetch.URI(source_uri)
345 simple.params = {}
346 simple_str = str(simple)
347 if simple_str in simplified:
348 existing = simplified[simple_str]
349 if source_uri != existing:
350 logger.warning('{0!r} is already in SRC_URI, with different parameters: {1!r}, not adding'.format(source_uri, existing))
351 else:
352 logger.warning('{0!r} is already in SRC_URI, not adding'.format(source_uri))
353 else: 347 else:
354 extralines.append('SRC_URI += {0}'.format(source_uri)) 348 params.append({})
355 copyfiles[newfile] = srcfile 349
356 350 copyfiles[newfile] = {'newname' : os.path.basename(srcfile)}
357 oe.recipeutils.bbappend_recipe(rd, args.destlayer, copyfiles, None, wildcardver=args.wildcard_version, machine=args.machine, extralines=extralines) 351
358 352 dry_run_output = None
353 dry_run_outdir = None
354 if args.dry_run:
355 import tempfile
356 dry_run_output = tempfile.TemporaryDirectory(prefix='devtool')
357 dry_run_outdir = dry_run_output.name
358
359 appendfile, _ = oe.recipeutils.bbappend_recipe(rd, args.destlayer, copyfiles, None, wildcardver=args.wildcard_version, machine=args.machine, extralines=extralines, params=params,
360 redirect_output=dry_run_outdir, update_original_recipe=args.update_recipe)
361 if not appendfile:
362 return
363 if args.dry_run:
364 output = ''
365 appendfilename = os.path.basename(appendfile)
366 newappendfile = appendfile
367 if appendfile and os.path.exists(appendfile):
368 with open(appendfile, 'r') as f:
369 oldlines = f.readlines()
370 else:
371 appendfile = '/dev/null'
372 oldlines = []
373
374 with open(os.path.join(dry_run_outdir, appendfilename), 'r') as f:
375 newlines = f.readlines()
376 diff = difflib.unified_diff(oldlines, newlines, appendfile, newappendfile)
377 difflines = list(diff)
378 if difflines:
379 output += ''.join(difflines)
380 if output:
381 logger.info('Diff of changed files:\n%s' % output)
382 else:
383 logger.info('No changed files')
384 tinfoil.modified_files()
359 385
360def appendsrcfiles(parser, args): 386def appendsrcfiles(parser, args):
361 recipedata = _parse_recipe(args.recipe, tinfoil) 387 recipedata = _parse_recipe(args.recipe, tinfoil)
@@ -435,6 +461,8 @@ def register_commands(subparsers):
435 help='Create/update a bbappend to add or replace source files', 461 help='Create/update a bbappend to add or replace source files',
436 description='Creates a bbappend (or updates an existing one) to add or replace the specified file in the recipe sources, either those in WORKDIR or those in the source tree. This command lets you specify multiple files with a destination directory, so cannot specify the destination filename. See the `appendsrcfile` command for the other behavior.') 462 description='Creates a bbappend (or updates an existing one) to add or replace the specified file in the recipe sources, either those in WORKDIR or those in the source tree. This command lets you specify multiple files with a destination directory, so cannot specify the destination filename. See the `appendsrcfile` command for the other behavior.')
437 parser.add_argument('-D', '--destdir', help='Destination directory (relative to S or WORKDIR, defaults to ".")', default='', type=destination_path) 463 parser.add_argument('-D', '--destdir', help='Destination directory (relative to S or WORKDIR, defaults to ".")', default='', type=destination_path)
464 parser.add_argument('-u', '--update-recipe', help='Update recipe instead of creating (or updating) a bbapend file. DESTLAYER must contains the recipe to update', action='store_true')
465 parser.add_argument('-n', '--dry-run', help='Dry run mode', action='store_true')
438 parser.add_argument('files', nargs='+', metavar='FILE', help='File(s) to be added to the recipe sources (WORKDIR or S)', type=existing_path) 466 parser.add_argument('files', nargs='+', metavar='FILE', help='File(s) to be added to the recipe sources (WORKDIR or S)', type=existing_path)
439 parser.set_defaults(func=lambda a: appendsrcfiles(parser, a), parserecipes=True) 467 parser.set_defaults(func=lambda a: appendsrcfiles(parser, a), parserecipes=True)
440 468
@@ -442,6 +470,8 @@ def register_commands(subparsers):
442 parents=[common_src], 470 parents=[common_src],
443 help='Create/update a bbappend to add or replace a source file', 471 help='Create/update a bbappend to add or replace a source file',
444 description='Creates a bbappend (or updates an existing one) to add or replace the specified files in the recipe sources, either those in WORKDIR or those in the source tree. This command lets you specify the destination filename, not just destination directory, but only works for one file. See the `appendsrcfiles` command for the other behavior.') 472 description='Creates a bbappend (or updates an existing one) to add or replace the specified files in the recipe sources, either those in WORKDIR or those in the source tree. This command lets you specify the destination filename, not just destination directory, but only works for one file. See the `appendsrcfiles` command for the other behavior.')
473 parser.add_argument('-u', '--update-recipe', help='Update recipe instead of creating (or updating) a bbapend file. DESTLAYER must contains the recipe to update', action='store_true')
474 parser.add_argument('-n', '--dry-run', help='Dry run mode', action='store_true')
445 parser.add_argument('file', metavar='FILE', help='File to be added to the recipe sources (WORKDIR or S)', type=existing_path) 475 parser.add_argument('file', metavar='FILE', help='File to be added to the recipe sources (WORKDIR or S)', type=existing_path)
446 parser.add_argument('destfile', metavar='DESTFILE', nargs='?', help='Destination path (relative to S or WORKDIR, optional)', type=destination_path) 476 parser.add_argument('destfile', metavar='DESTFILE', nargs='?', help='Destination path (relative to S or WORKDIR, optional)', type=destination_path)
447 parser.set_defaults(func=lambda a: appendsrcfile(parser, a), parserecipes=True) 477 parser.set_defaults(func=lambda a: appendsrcfile(parser, a), parserecipes=True)
diff --git a/scripts/lib/recipetool/create.py b/scripts/lib/recipetool/create.py
index 4f6e01c639..ef0ba974a9 100644
--- a/scripts/lib/recipetool/create.py
+++ b/scripts/lib/recipetool/create.py
@@ -18,6 +18,8 @@ from urllib.parse import urlparse, urldefrag, urlsplit
18import hashlib 18import hashlib
19import bb.fetch2 19import bb.fetch2
20logger = logging.getLogger('recipetool') 20logger = logging.getLogger('recipetool')
21from oe.license import tidy_licenses
22from oe.license_finder import find_licenses
21 23
22tinfoil = None 24tinfoil = None
23plugins = None 25plugins = None
@@ -389,9 +391,6 @@ def reformat_git_uri(uri):
389 parms.update({('protocol', 'ssh')}) 391 parms.update({('protocol', 'ssh')})
390 elif (scheme == "http" or scheme == 'https' or scheme == 'ssh') and not ('protocol' in parms): 392 elif (scheme == "http" or scheme == 'https' or scheme == 'ssh') and not ('protocol' in parms):
391 parms.update({('protocol', scheme)}) 393 parms.update({('protocol', scheme)})
392 # We assume 'master' branch if not set
393 if not 'branch' in parms:
394 parms.update({('branch', 'master')})
395 # Always append 'git://' 394 # Always append 'git://'
396 fUrl = bb.fetch2.encodeurl(('git', host, path, user, pswd, parms)) 395 fUrl = bb.fetch2.encodeurl(('git', host, path, user, pswd, parms))
397 return fUrl 396 return fUrl
@@ -426,6 +425,36 @@ def create_recipe(args):
426 storeTagName = '' 425 storeTagName = ''
427 pv_srcpv = False 426 pv_srcpv = False
428 427
428 handled = []
429 classes = []
430
431 # Find all plugins that want to register handlers
432 logger.debug('Loading recipe handlers')
433 raw_handlers = []
434 for plugin in plugins:
435 if hasattr(plugin, 'register_recipe_handlers'):
436 plugin.register_recipe_handlers(raw_handlers)
437 # Sort handlers by priority
438 handlers = []
439 for i, handler in enumerate(raw_handlers):
440 if isinstance(handler, tuple):
441 handlers.append((handler[0], handler[1], i))
442 else:
443 handlers.append((handler, 0, i))
444 handlers.sort(key=lambda item: (item[1], -item[2]), reverse=True)
445 for handler, priority, _ in handlers:
446 logger.debug('Handler: %s (priority %d)' % (handler.__class__.__name__, priority))
447 setattr(handler, '_devtool', args.devtool)
448 handlers = [item[0] for item in handlers]
449
450 fetchuri = None
451 for handler in handlers:
452 if hasattr(handler, 'process_url'):
453 ret = handler.process_url(args, classes, handled, extravalues)
454 if 'url' in handled and ret:
455 fetchuri = ret
456 break
457
429 if os.path.isfile(source): 458 if os.path.isfile(source):
430 source = 'file://%s' % os.path.abspath(source) 459 source = 'file://%s' % os.path.abspath(source)
431 460
@@ -434,11 +463,12 @@ def create_recipe(args):
434 if re.match(r'https?://github.com/[^/]+/[^/]+/archive/.+(\.tar\..*|\.zip)$', source): 463 if re.match(r'https?://github.com/[^/]+/[^/]+/archive/.+(\.tar\..*|\.zip)$', source):
435 logger.warning('github archive files are not guaranteed to be stable and may be re-generated over time. If the latter occurs, the checksums will likely change and the recipe will fail at do_fetch. It is recommended that you point to an actual commit or tag in the repository instead (using the repository URL in conjunction with the -S/--srcrev option).') 464 logger.warning('github archive files are not guaranteed to be stable and may be re-generated over time. If the latter occurs, the checksums will likely change and the recipe will fail at do_fetch. It is recommended that you point to an actual commit or tag in the repository instead (using the repository URL in conjunction with the -S/--srcrev option).')
436 # Fetch a URL 465 # Fetch a URL
437 fetchuri = reformat_git_uri(urldefrag(source)[0]) 466 if not fetchuri:
467 fetchuri = reformat_git_uri(urldefrag(source)[0])
438 if args.binary: 468 if args.binary:
439 # Assume the archive contains the directory structure verbatim 469 # Assume the archive contains the directory structure verbatim
440 # so we need to extract to a subdirectory 470 # so we need to extract to a subdirectory
441 fetchuri += ';subdir=${BP}' 471 fetchuri += ';subdir=${BPN}'
442 srcuri = fetchuri 472 srcuri = fetchuri
443 rev_re = re.compile(';rev=([^;]+)') 473 rev_re = re.compile(';rev=([^;]+)')
444 res = rev_re.search(srcuri) 474 res = rev_re.search(srcuri)
@@ -481,6 +511,9 @@ def create_recipe(args):
481 storeTagName = params['tag'] 511 storeTagName = params['tag']
482 params['nobranch'] = '1' 512 params['nobranch'] = '1'
483 del params['tag'] 513 del params['tag']
514 # Assume 'master' branch if not set
515 if scheme in ['git', 'gitsm'] and 'branch' not in params and 'nobranch' not in params:
516 params['branch'] = 'master'
484 fetchuri = bb.fetch2.encodeurl((scheme, network, path, user, passwd, params)) 517 fetchuri = bb.fetch2.encodeurl((scheme, network, path, user, passwd, params))
485 518
486 tmpparent = tinfoil.config_data.getVar('BASE_WORKDIR') 519 tmpparent = tinfoil.config_data.getVar('BASE_WORKDIR')
@@ -497,7 +530,7 @@ def create_recipe(args):
497 if ftmpdir and args.keep_temp: 530 if ftmpdir and args.keep_temp:
498 logger.info('Fetch temp directory is %s' % ftmpdir) 531 logger.info('Fetch temp directory is %s' % ftmpdir)
499 532
500 dirlist = scriptutils.filter_src_subdirs(srctree) 533 dirlist = os.listdir(srctree)
501 logger.debug('Directory listing (excluding filtered out):\n %s' % '\n '.join(dirlist)) 534 logger.debug('Directory listing (excluding filtered out):\n %s' % '\n '.join(dirlist))
502 if len(dirlist) == 1: 535 if len(dirlist) == 1:
503 singleitem = os.path.join(srctree, dirlist[0]) 536 singleitem = os.path.join(srctree, dirlist[0])
@@ -530,10 +563,9 @@ def create_recipe(args):
530 # Remove HEAD reference point and drop remote prefix 563 # Remove HEAD reference point and drop remote prefix
531 get_branch = [x.split('/', 1)[1] for x in get_branch if not x.startswith('origin/HEAD')] 564 get_branch = [x.split('/', 1)[1] for x in get_branch if not x.startswith('origin/HEAD')]
532 if 'master' in get_branch: 565 if 'master' in get_branch:
533 # If it is master, we do not need to append 'branch=master' as this is default.
534 # Even with the case where get_branch has multiple objects, if 'master' is one 566 # Even with the case where get_branch has multiple objects, if 'master' is one
535 # of them, we should default take from 'master' 567 # of them, we should default take from 'master'
536 srcbranch = '' 568 srcbranch = 'master'
537 elif len(get_branch) == 1: 569 elif len(get_branch) == 1:
538 # If 'master' isn't in get_branch and get_branch contains only ONE object, then store result into 'srcbranch' 570 # If 'master' isn't in get_branch and get_branch contains only ONE object, then store result into 'srcbranch'
539 srcbranch = get_branch[0] 571 srcbranch = get_branch[0]
@@ -546,8 +578,8 @@ def create_recipe(args):
546 # Since we might have a value in srcbranch, we need to 578 # Since we might have a value in srcbranch, we need to
547 # recontruct the srcuri to include 'branch' in params. 579 # recontruct the srcuri to include 'branch' in params.
548 scheme, network, path, user, passwd, params = bb.fetch2.decodeurl(srcuri) 580 scheme, network, path, user, passwd, params = bb.fetch2.decodeurl(srcuri)
549 if srcbranch: 581 if scheme in ['git', 'gitsm']:
550 params['branch'] = srcbranch 582 params['branch'] = srcbranch or 'master'
551 583
552 if storeTagName and scheme in ['git', 'gitsm']: 584 if storeTagName and scheme in ['git', 'gitsm']:
553 # Check srcrev using tag and check validity of the tag 585 # Check srcrev using tag and check validity of the tag
@@ -606,8 +638,7 @@ def create_recipe(args):
606 splitline = line.split() 638 splitline = line.split()
607 if len(splitline) > 1: 639 if len(splitline) > 1:
608 if splitline[0] == 'origin' and scriptutils.is_src_url(splitline[1]): 640 if splitline[0] == 'origin' and scriptutils.is_src_url(splitline[1]):
609 srcuri = reformat_git_uri(splitline[1]) 641 srcuri = reformat_git_uri(splitline[1]) + ';branch=master'
610 srcsubdir = 'git'
611 break 642 break
612 643
613 if args.src_subdir: 644 if args.src_subdir:
@@ -639,8 +670,6 @@ def create_recipe(args):
639 # We'll come back and replace this later in handle_license_vars() 670 # We'll come back and replace this later in handle_license_vars()
640 lines_before.append('##LICENSE_PLACEHOLDER##') 671 lines_before.append('##LICENSE_PLACEHOLDER##')
641 672
642 handled = []
643 classes = []
644 673
645 # FIXME This is kind of a hack, we probably ought to be using bitbake to do this 674 # FIXME This is kind of a hack, we probably ought to be using bitbake to do this
646 pn = None 675 pn = None
@@ -678,8 +707,10 @@ def create_recipe(args):
678 if not srcuri: 707 if not srcuri:
679 lines_before.append('# No information for SRC_URI yet (only an external source tree was specified)') 708 lines_before.append('# No information for SRC_URI yet (only an external source tree was specified)')
680 lines_before.append('SRC_URI = "%s"' % srcuri) 709 lines_before.append('SRC_URI = "%s"' % srcuri)
710 shown_checksums = ["%ssum" % s for s in bb.fetch2.SHOWN_CHECKSUM_LIST]
681 for key, value in sorted(checksums.items()): 711 for key, value in sorted(checksums.items()):
682 lines_before.append('SRC_URI[%s] = "%s"' % (key, value)) 712 if key in shown_checksums:
713 lines_before.append('SRC_URI[%s] = "%s"' % (key, value))
683 if srcuri and supports_srcrev(srcuri): 714 if srcuri and supports_srcrev(srcuri):
684 lines_before.append('') 715 lines_before.append('')
685 lines_before.append('# Modify these as desired') 716 lines_before.append('# Modify these as desired')
@@ -691,7 +722,7 @@ def create_recipe(args):
691 srcpvprefix = 'svnr' 722 srcpvprefix = 'svnr'
692 else: 723 else:
693 srcpvprefix = scheme 724 srcpvprefix = scheme
694 lines_before.append('PV = "%s+%s${SRCPV}"' % (realpv or '1.0', srcpvprefix)) 725 lines_before.append('PV = "%s+%s"' % (realpv or '1.0', srcpvprefix))
695 pv_srcpv = True 726 pv_srcpv = True
696 if not args.autorev and srcrev == '${AUTOREV}': 727 if not args.autorev and srcrev == '${AUTOREV}':
697 if os.path.exists(os.path.join(srctree, '.git')): 728 if os.path.exists(os.path.join(srctree, '.git')):
@@ -705,7 +736,7 @@ def create_recipe(args):
705 if srcsubdir and not args.binary: 736 if srcsubdir and not args.binary:
706 # (for binary packages we explicitly specify subdir= when fetching to 737 # (for binary packages we explicitly specify subdir= when fetching to
707 # match the default value of S, so we don't need to set it in that case) 738 # match the default value of S, so we don't need to set it in that case)
708 lines_before.append('S = "${WORKDIR}/%s"' % srcsubdir) 739 lines_before.append('S = "${UNPACKDIR}/%s"' % srcsubdir)
709 lines_before.append('') 740 lines_before.append('')
710 741
711 if pkgarch: 742 if pkgarch:
@@ -719,25 +750,6 @@ def create_recipe(args):
719 if args.npm_dev: 750 if args.npm_dev:
720 extravalues['NPM_INSTALL_DEV'] = 1 751 extravalues['NPM_INSTALL_DEV'] = 1
721 752
722 # Find all plugins that want to register handlers
723 logger.debug('Loading recipe handlers')
724 raw_handlers = []
725 for plugin in plugins:
726 if hasattr(plugin, 'register_recipe_handlers'):
727 plugin.register_recipe_handlers(raw_handlers)
728 # Sort handlers by priority
729 handlers = []
730 for i, handler in enumerate(raw_handlers):
731 if isinstance(handler, tuple):
732 handlers.append((handler[0], handler[1], i))
733 else:
734 handlers.append((handler, 0, i))
735 handlers.sort(key=lambda item: (item[1], -item[2]), reverse=True)
736 for handler, priority, _ in handlers:
737 logger.debug('Handler: %s (priority %d)' % (handler.__class__.__name__, priority))
738 setattr(handler, '_devtool', args.devtool)
739 handlers = [item[0] for item in handlers]
740
741 # Apply the handlers 753 # Apply the handlers
742 if args.binary: 754 if args.binary:
743 classes.append('bin_package') 755 classes.append('bin_package')
@@ -746,9 +758,14 @@ def create_recipe(args):
746 for handler in handlers: 758 for handler in handlers:
747 handler.process(srctree_use, classes, lines_before, lines_after, handled, extravalues) 759 handler.process(srctree_use, classes, lines_before, lines_after, handled, extravalues)
748 760
761 # native and nativesdk classes are special and must be inherited last
762 # If present, put them at the end of the classes list
763 classes.sort(key=lambda c: c in ("native", "nativesdk"))
764
749 extrafiles = extravalues.pop('extrafiles', {}) 765 extrafiles = extravalues.pop('extrafiles', {})
750 extra_pn = extravalues.pop('PN', None) 766 extra_pn = extravalues.pop('PN', None)
751 extra_pv = extravalues.pop('PV', None) 767 extra_pv = extravalues.pop('PV', None)
768 run_tasks = extravalues.pop('run_tasks', "").split()
752 769
753 if extra_pv and not realpv: 770 if extra_pv and not realpv:
754 realpv = extra_pv 771 realpv = extra_pv
@@ -809,7 +826,8 @@ def create_recipe(args):
809 extraoutdir = os.path.join(os.path.dirname(outfile), pn) 826 extraoutdir = os.path.join(os.path.dirname(outfile), pn)
810 bb.utils.mkdirhier(extraoutdir) 827 bb.utils.mkdirhier(extraoutdir)
811 for destfn, extrafile in extrafiles.items(): 828 for destfn, extrafile in extrafiles.items():
812 shutil.move(extrafile, os.path.join(extraoutdir, destfn)) 829 fn = destfn.format(pn=pn, pv=realpv)
830 shutil.move(extrafile, os.path.join(extraoutdir, fn))
813 831
814 lines = lines_before 832 lines = lines_before
815 lines_before = [] 833 lines_before = []
@@ -824,7 +842,7 @@ def create_recipe(args):
824 line = line.replace(realpv, '${PV}') 842 line = line.replace(realpv, '${PV}')
825 if pn: 843 if pn:
826 line = line.replace(pn, '${BPN}') 844 line = line.replace(pn, '${BPN}')
827 if line == 'S = "${WORKDIR}/${BPN}-${PV}"': 845 if line == 'S = "${UNPACKDIR}/${BPN}-${PV}"' or 'tmp-recipetool-' in line:
828 skipblank = True 846 skipblank = True
829 continue 847 continue
830 elif line.startswith('SRC_URI = '): 848 elif line.startswith('SRC_URI = '):
@@ -870,8 +888,10 @@ def create_recipe(args):
870 outlines.append('') 888 outlines.append('')
871 outlines.extend(lines_after) 889 outlines.extend(lines_after)
872 890
891 outlines = [ line.rstrip('\n') +"\n" for line in outlines]
892
873 if extravalues: 893 if extravalues:
874 _, outlines = oe.recipeutils.patch_recipe_lines(outlines, extravalues, trailing_newline=False) 894 _, outlines = oe.recipeutils.patch_recipe_lines(outlines, extravalues, trailing_newline=True)
875 895
876 if args.extract_to: 896 if args.extract_to:
877 scriptutils.git_convert_standalone_clone(srctree) 897 scriptutils.git_convert_standalone_clone(srctree)
@@ -887,7 +907,7 @@ def create_recipe(args):
887 log_info_cond('Source extracted to %s' % args.extract_to, args.devtool) 907 log_info_cond('Source extracted to %s' % args.extract_to, args.devtool)
888 908
889 if outfile == '-': 909 if outfile == '-':
890 sys.stdout.write('\n'.join(outlines) + '\n') 910 sys.stdout.write(''.join(outlines) + '\n')
891 else: 911 else:
892 with open(outfile, 'w') as f: 912 with open(outfile, 'w') as f:
893 lastline = None 913 lastline = None
@@ -895,9 +915,14 @@ def create_recipe(args):
895 if not lastline and not line: 915 if not lastline and not line:
896 # Skip extra blank lines 916 # Skip extra blank lines
897 continue 917 continue
898 f.write('%s\n' % line) 918 f.write('%s' % line)
899 lastline = line 919 lastline = line
900 log_info_cond('Recipe %s has been created; further editing may be required to make it fully functional' % outfile, args.devtool) 920 log_info_cond('Recipe %s has been created; further editing may be required to make it fully functional' % outfile, args.devtool)
921 tinfoil.modified_files()
922
923 for task in run_tasks:
924 logger.info("Running task %s" % task)
925 tinfoil.build_file_sync(outfile, task)
901 926
902 if tempsrc: 927 if tempsrc:
903 if args.keep_temp: 928 if args.keep_temp:
@@ -920,23 +945,32 @@ def split_value(value):
920 else: 945 else:
921 return value 946 return value
922 947
948def fixup_license(value):
949 # Ensure licenses with OR starts and ends with brackets
950 if '|' in value:
951 return '(' + value + ')'
952 return value
953
923def handle_license_vars(srctree, lines_before, handled, extravalues, d): 954def handle_license_vars(srctree, lines_before, handled, extravalues, d):
924 lichandled = [x for x in handled if x[0] == 'license'] 955 lichandled = [x for x in handled if x[0] == 'license']
925 if lichandled: 956 if lichandled:
926 # Someone else has already handled the license vars, just return their value 957 # Someone else has already handled the license vars, just return their value
927 return lichandled[0][1] 958 return lichandled[0][1]
928 959
929 licvalues = guess_license(srctree, d) 960 licvalues = find_licenses(srctree, d)
930 licenses = [] 961 licenses = []
931 lic_files_chksum = [] 962 lic_files_chksum = []
932 lic_unknown = [] 963 lic_unknown = []
933 lines = [] 964 lines = []
934 if licvalues: 965 if licvalues:
935 for licvalue in licvalues: 966 for licvalue in licvalues:
936 if not licvalue[0] in licenses: 967 license = licvalue[0]
937 licenses.append(licvalue[0]) 968 lics = tidy_licenses(fixup_license(license))
969 lics = [lic for lic in lics if lic not in licenses]
970 if len(lics):
971 licenses.extend(lics)
938 lic_files_chksum.append('file://%s;md5=%s' % (licvalue[1], licvalue[2])) 972 lic_files_chksum.append('file://%s;md5=%s' % (licvalue[1], licvalue[2]))
939 if licvalue[0] == 'Unknown': 973 if license == 'Unknown':
940 lic_unknown.append(licvalue[1]) 974 lic_unknown.append(licvalue[1])
941 if lic_unknown: 975 if lic_unknown:
942 lines.append('#') 976 lines.append('#')
@@ -945,9 +979,7 @@ def handle_license_vars(srctree, lines_before, handled, extravalues, d):
945 for licfile in lic_unknown: 979 for licfile in lic_unknown:
946 lines.append('# %s' % licfile) 980 lines.append('# %s' % licfile)
947 981
948 extra_license = split_value(extravalues.pop('LICENSE', [])) 982 extra_license = tidy_licenses(extravalues.pop('LICENSE', ''))
949 if '&' in extra_license:
950 extra_license.remove('&')
951 if extra_license: 983 if extra_license:
952 if licenses == ['Unknown']: 984 if licenses == ['Unknown']:
953 licenses = extra_license 985 licenses = extra_license
@@ -988,7 +1020,7 @@ def handle_license_vars(srctree, lines_before, handled, extravalues, d):
988 lines.append('# instead of &. If there is any doubt, check the accompanying documentation') 1020 lines.append('# instead of &. If there is any doubt, check the accompanying documentation')
989 lines.append('# to determine which situation is applicable.') 1021 lines.append('# to determine which situation is applicable.')
990 1022
991 lines.append('LICENSE = "%s"' % ' & '.join(licenses)) 1023 lines.append('LICENSE = "%s"' % ' & '.join(sorted(licenses, key=str.casefold)))
992 lines.append('LIC_FILES_CHKSUM = "%s"' % ' \\\n '.join(lic_files_chksum)) 1024 lines.append('LIC_FILES_CHKSUM = "%s"' % ' \\\n '.join(lic_files_chksum))
993 lines.append('') 1025 lines.append('')
994 1026
@@ -1005,228 +1037,15 @@ def handle_license_vars(srctree, lines_before, handled, extravalues, d):
1005 handled.append(('license', licvalues)) 1037 handled.append(('license', licvalues))
1006 return licvalues 1038 return licvalues
1007 1039
1008def get_license_md5sums(d, static_only=False, linenumbers=False): 1040def split_pkg_licenses(licvalues, packages, outlines, fallback_licenses=None, pn='${PN}'):
1009 import bb.utils
1010 import csv
1011 md5sums = {}
1012 if not static_only and not linenumbers:
1013 # Gather md5sums of license files in common license dir
1014 commonlicdir = d.getVar('COMMON_LICENSE_DIR')
1015 for fn in os.listdir(commonlicdir):
1016 md5value = bb.utils.md5_file(os.path.join(commonlicdir, fn))
1017 md5sums[md5value] = fn
1018
1019 # The following were extracted from common values in various recipes
1020 # (double checking the license against the license file itself, not just
1021 # the LICENSE value in the recipe)
1022
1023 # Read license md5sums from csv file
1024 scripts_path = os.path.dirname(os.path.realpath(__file__))
1025 for path in (d.getVar('BBPATH').split(':')
1026 + [os.path.join(scripts_path, '..', '..')]):
1027 csv_path = os.path.join(path, 'lib', 'recipetool', 'licenses.csv')
1028 if os.path.isfile(csv_path):
1029 with open(csv_path, newline='') as csv_file:
1030 fieldnames = ['md5sum', 'license', 'beginline', 'endline', 'md5']
1031 reader = csv.DictReader(csv_file, delimiter=',', fieldnames=fieldnames)
1032 for row in reader:
1033 if linenumbers:
1034 md5sums[row['md5sum']] = (
1035 row['license'], row['beginline'], row['endline'], row['md5'])
1036 else:
1037 md5sums[row['md5sum']] = row['license']
1038
1039 return md5sums
1040
1041def crunch_license(licfile):
1042 '''
1043 Remove non-material text from a license file and then check
1044 its md5sum against a known list. This works well for licenses
1045 which contain a copyright statement, but is also a useful way
1046 to handle people's insistence upon reformatting the license text
1047 slightly (with no material difference to the text of the
1048 license).
1049 '''
1050
1051 import oe.utils
1052
1053 # Note: these are carefully constructed!
1054 license_title_re = re.compile(r'^#*\(? *(This is )?([Tt]he )?.{0,15} ?[Ll]icen[sc]e( \(.{1,10}\))?\)?[:\.]? ?#*$')
1055 license_statement_re = re.compile(r'^((This (project|software)|.{1,10}) is( free software)? (released|licen[sc]ed)|(Released|Licen[cs]ed)) under the .{1,10} [Ll]icen[sc]e:?$')
1056 copyright_re = re.compile('^ *[#\*]* *(Modified work |MIT LICENSED )?Copyright ?(\([cC]\))? .*$')
1057 disclaimer_re = re.compile('^ *\*? ?All [Rr]ights [Rr]eserved\.$')
1058 email_re = re.compile('^.*<[\w\.-]*@[\w\.\-]*>$')
1059 header_re = re.compile('^(\/\**!?)? ?[\-=\*]* ?(\*\/)?$')
1060 tag_re = re.compile('^ *@?\(?([Ll]icense|MIT)\)?$')
1061 url_re = re.compile('^ *[#\*]* *https?:\/\/[\w\.\/\-]+$')
1062
1063 crunched_md5sums = {}
1064
1065 # common licenses
1066 crunched_md5sums['89f3bf322f30a1dcfe952e09945842f0'] = 'Apache-2.0'
1067 crunched_md5sums['13b6fe3075f8f42f2270a748965bf3a1'] = 'BSD-0-Clause'
1068 crunched_md5sums['ba87a7d7c20719c8df4b8beed9b78c43'] = 'BSD-2-Clause'
1069 crunched_md5sums['7f8892c03b72de419c27be4ebfa253f8'] = 'BSD-3-Clause'
1070 crunched_md5sums['21128c0790b23a8a9f9e260d5f6b3619'] = 'BSL-1.0'
1071 crunched_md5sums['975742a59ae1b8abdea63a97121f49f4'] = 'EDL-1.0'
1072 crunched_md5sums['5322cee4433d84fb3aafc9e253116447'] = 'EPL-1.0'
1073 crunched_md5sums['6922352e87de080f42419bed93063754'] = 'EPL-2.0'
1074 crunched_md5sums['793475baa22295cae1d3d4046a3a0ceb'] = 'GPL-2.0-only'
1075 crunched_md5sums['ff9047f969b02c20f0559470df5cb433'] = 'GPL-2.0-or-later'
1076 crunched_md5sums['ea6de5453fcadf534df246e6cdafadcd'] = 'GPL-3.0-only'
1077 crunched_md5sums['b419257d4d153a6fde92ddf96acf5b67'] = 'GPL-3.0-or-later'
1078 crunched_md5sums['228737f4c49d3ee75b8fb3706b090b84'] = 'ISC'
1079 crunched_md5sums['c6a782e826ca4e85bf7f8b89435a677d'] = 'LGPL-2.0-only'
1080 crunched_md5sums['32d8f758a066752f0db09bd7624b8090'] = 'LGPL-2.0-or-later'
1081 crunched_md5sums['4820937eb198b4f84c52217ed230be33'] = 'LGPL-2.1-only'
1082 crunched_md5sums['db13fe9f3a13af7adab2dc7a76f9e44a'] = 'LGPL-2.1-or-later'
1083 crunched_md5sums['d7a0f2e4e0950e837ac3eabf5bd1d246'] = 'LGPL-3.0-only'
1084 crunched_md5sums['abbf328e2b434f9153351f06b9f79d02'] = 'LGPL-3.0-or-later'
1085 crunched_md5sums['eecf6429523cbc9693547cf2db790b5c'] = 'MIT'
1086 crunched_md5sums['b218b0e94290b9b818c4be67c8e1cc82'] = 'MIT-0'
1087 crunched_md5sums['ddc18131d6748374f0f35a621c245b49'] = 'Unlicense'
1088 crunched_md5sums['51f9570ff32571fc0a443102285c5e33'] = 'WTFPL'
1089
1090 # The following two were gleaned from the "forever" npm package
1091 crunched_md5sums['0a97f8e4cbaf889d6fa51f84b89a79f6'] = 'ISC'
1092 # https://github.com/waffle-gl/waffle/blob/master/LICENSE.txt
1093 crunched_md5sums['50fab24ce589d69af8964fdbfe414c60'] = 'BSD-2-Clause'
1094 # https://github.com/spigwitmer/fakeds1963s/blob/master/LICENSE
1095 crunched_md5sums['88a4355858a1433fea99fae34a44da88'] = 'GPLv2'
1096 # http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt
1097 crunched_md5sums['063b5c3ebb5f3aa4c85a2ed18a31fbe7'] = 'GPLv2'
1098 # https://github.com/FFmpeg/FFmpeg/blob/master/COPYING.LGPLv2.1
1099 crunched_md5sums['7f5202f4d44ed15dcd4915f5210417d8'] = 'LGPLv2.1'
1100 # unixODBC-2.3.4 COPYING
1101 crunched_md5sums['3debde09238a8c8e1f6a847e1ec9055b'] = 'LGPLv2.1'
1102 # https://github.com/FFmpeg/FFmpeg/blob/master/COPYING.LGPLv3
1103 crunched_md5sums['f90c613c51aa35da4d79dd55fc724ceb'] = 'LGPLv3'
1104 # https://raw.githubusercontent.com/eclipse/mosquitto/v1.4.14/epl-v10
1105 crunched_md5sums['efe2cb9a35826992b9df68224e3c2628'] = 'EPL-1.0'
1106
1107 # https://raw.githubusercontent.com/jquery/esprima/3.1.3/LICENSE.BSD
1108 crunched_md5sums['80fa7b56a28e8c902e6af194003220a5'] = 'BSD-2-Clause'
1109 # https://raw.githubusercontent.com/npm/npm-install-checks/master/LICENSE
1110 crunched_md5sums['e659f77bfd9002659e112d0d3d59b2c1'] = 'BSD-2-Clause'
1111 # https://raw.githubusercontent.com/silverwind/default-gateway/4.2.0/LICENSE
1112 crunched_md5sums['4c641f2d995c47f5cb08bdb4b5b6ea05'] = 'BSD-2-Clause'
1113 # https://raw.githubusercontent.com/tad-lispy/node-damerau-levenshtein/v1.0.5/LICENSE
1114 crunched_md5sums['2b8c039b2b9a25f0feb4410c4542d346'] = 'BSD-2-Clause'
1115 # https://raw.githubusercontent.com/terser/terser/v3.17.0/LICENSE
1116 crunched_md5sums['8bd23871802951c9ad63855151204c2c'] = 'BSD-2-Clause'
1117 # https://raw.githubusercontent.com/alexei/sprintf.js/1.0.3/LICENSE
1118 crunched_md5sums['008c22318c8ea65928bf730ddd0273e3'] = 'BSD-3-Clause'
1119 # https://raw.githubusercontent.com/Caligatio/jsSHA/v3.2.0/LICENSE
1120 crunched_md5sums['0e46634a01bfef056892949acaea85b1'] = 'BSD-3-Clause'
1121 # https://raw.githubusercontent.com/d3/d3-path/v1.0.9/LICENSE
1122 crunched_md5sums['b5f72aef53d3b2b432702c30b0215666'] = 'BSD-3-Clause'
1123 # https://raw.githubusercontent.com/feross/ieee754/v1.1.13/LICENSE
1124 crunched_md5sums['a39327c997c20da0937955192d86232d'] = 'BSD-3-Clause'
1125 # https://raw.githubusercontent.com/joyent/node-extsprintf/v1.3.0/LICENSE
1126 crunched_md5sums['721f23a96ff4161ca3a5f071bbe18108'] = 'MIT'
1127 # https://raw.githubusercontent.com/pvorb/clone/v0.2.0/LICENSE
1128 crunched_md5sums['b376d29a53c9573006b9970709231431'] = 'MIT'
1129 # https://raw.githubusercontent.com/andris9/encoding/v0.1.12/LICENSE
1130 crunched_md5sums['85d8a977ee9d7c5ab4ac03c9b95431c4'] = 'MIT-0'
1131 # https://raw.githubusercontent.com/faye/websocket-driver-node/0.7.3/LICENSE.md
1132 crunched_md5sums['b66384e7137e41a9b1904ef4d39703b6'] = 'Apache-2.0'
1133 # https://raw.githubusercontent.com/less/less.js/v4.1.1/LICENSE
1134 crunched_md5sums['b27575459e02221ccef97ec0bfd457ae'] = 'Apache-2.0'
1135 # https://raw.githubusercontent.com/microsoft/TypeScript/v3.5.3/LICENSE.txt
1136 crunched_md5sums['a54a1a6a39e7f9dbb4a23a42f5c7fd1c'] = 'Apache-2.0'
1137 # https://raw.githubusercontent.com/request/request/v2.87.0/LICENSE
1138 crunched_md5sums['1034431802e57486b393d00c5d262b8a'] = 'Apache-2.0'
1139 # https://raw.githubusercontent.com/dchest/tweetnacl-js/v0.14.5/LICENSE
1140 crunched_md5sums['75605e6bdd564791ab698fca65c94a4f'] = 'Unlicense'
1141 # https://raw.githubusercontent.com/stackgl/gl-mat3/v2.0.0/LICENSE.md
1142 crunched_md5sums['75512892d6f59dddb6d1c7e191957e9c'] = 'Zlib'
1143
1144 lictext = []
1145 with open(licfile, 'r', errors='surrogateescape') as f:
1146 for line in f:
1147 # Drop opening statements
1148 if copyright_re.match(line):
1149 continue
1150 elif disclaimer_re.match(line):
1151 continue
1152 elif email_re.match(line):
1153 continue
1154 elif header_re.match(line):
1155 continue
1156 elif tag_re.match(line):
1157 continue
1158 elif url_re.match(line):
1159 continue
1160 elif license_title_re.match(line):
1161 continue
1162 elif license_statement_re.match(line):
1163 continue
1164 # Strip comment symbols
1165 line = line.replace('*', '') \
1166 .replace('#', '')
1167 # Unify spelling
1168 line = line.replace('sub-license', 'sublicense')
1169 # Squash spaces
1170 line = oe.utils.squashspaces(line.strip())
1171 # Replace smart quotes, double quotes and backticks with single quotes
1172 line = line.replace(u"\u2018", "'").replace(u"\u2019", "'").replace(u"\u201c","'").replace(u"\u201d", "'").replace('"', '\'').replace('`', '\'')
1173 # Unify brackets
1174 line = line.replace("{", "[").replace("}", "]")
1175 if line:
1176 lictext.append(line)
1177
1178 m = hashlib.md5()
1179 try:
1180 m.update(' '.join(lictext).encode('utf-8'))
1181 md5val = m.hexdigest()
1182 except UnicodeEncodeError:
1183 md5val = None
1184 lictext = ''
1185 license = crunched_md5sums.get(md5val, None)
1186 return license, md5val, lictext
1187
1188def guess_license(srctree, d):
1189 import bb
1190 md5sums = get_license_md5sums(d)
1191
1192 licenses = []
1193 licspecs = ['*LICEN[CS]E*', 'COPYING*', '*[Ll]icense*', 'LEGAL*', '[Ll]egal*', '*GPL*', 'README.lic*', 'COPYRIGHT*', '[Cc]opyright*', 'e[dp]l-v10']
1194 skip_extensions = (".html", ".js", ".json", ".svg", ".ts")
1195 licfiles = []
1196 for root, dirs, files in os.walk(srctree):
1197 for fn in files:
1198 if fn.endswith(skip_extensions):
1199 continue
1200 for spec in licspecs:
1201 if fnmatch.fnmatch(fn, spec):
1202 fullpath = os.path.join(root, fn)
1203 if not fullpath in licfiles:
1204 licfiles.append(fullpath)
1205 for licfile in licfiles:
1206 md5value = bb.utils.md5_file(licfile)
1207 license = md5sums.get(md5value, None)
1208 if not license:
1209 license, crunched_md5, lictext = crunch_license(licfile)
1210 if lictext and not license:
1211 license = 'Unknown'
1212 logger.info("Please add the following line for '%s' to a 'lib/recipetool/licenses.csv' " \
1213 "and replace `Unknown` with the license:\n" \
1214 "%s,Unknown" % (os.path.relpath(licfile, srctree), md5value))
1215 if license:
1216 licenses.append((license, os.path.relpath(licfile, srctree), md5value))
1217
1218 # FIXME should we grab at least one source file with a license header and add that too?
1219
1220 return licenses
1221
1222def split_pkg_licenses(licvalues, packages, outlines, fallback_licenses=[], pn='${PN}'):
1223 """ 1041 """
1224 Given a list of (license, path, md5sum) as returned by guess_license(), 1042 Given a list of (license, path, md5sum) as returned by match_licenses(),
1225 a dict of package name to path mappings, write out a set of 1043 a dict of package name to path mappings, write out a set of
1226 package-specific LICENSE values. 1044 package-specific LICENSE values.
1227 """ 1045 """
1228 pkglicenses = {pn: []} 1046 pkglicenses = {pn: []}
1229 for license, licpath, _ in licvalues: 1047 for license, licpath, _ in licvalues:
1048 license = fixup_license(license)
1230 for pkgname, pkgpath in packages.items(): 1049 for pkgname, pkgpath in packages.items():
1231 if licpath.startswith(pkgpath + '/'): 1050 if licpath.startswith(pkgpath + '/'):
1232 if pkgname in pkglicenses: 1051 if pkgname in pkglicenses:
@@ -1239,13 +1058,24 @@ def split_pkg_licenses(licvalues, packages, outlines, fallback_licenses=[], pn='
1239 pkglicenses[pn].append(license) 1058 pkglicenses[pn].append(license)
1240 outlicenses = {} 1059 outlicenses = {}
1241 for pkgname in packages: 1060 for pkgname in packages:
1242 license = ' '.join(list(set(pkglicenses.get(pkgname, ['Unknown'])))) or 'Unknown' 1061 # Assume AND operator between license files
1243 if license == 'Unknown' and pkgname in fallback_licenses: 1062 license = ' & '.join(list(set(pkglicenses.get(pkgname, ['Unknown'])))) or 'Unknown'
1063 if license == 'Unknown' and fallback_licenses and pkgname in fallback_licenses:
1244 license = fallback_licenses[pkgname] 1064 license = fallback_licenses[pkgname]
1065 licenses = tidy_licenses(license)
1066 license = ' & '.join(licenses)
1245 outlines.append('LICENSE:%s = "%s"' % (pkgname, license)) 1067 outlines.append('LICENSE:%s = "%s"' % (pkgname, license))
1246 outlicenses[pkgname] = license.split() 1068 outlicenses[pkgname] = licenses
1247 return outlicenses 1069 return outlicenses
1248 1070
1071def generate_common_licenses_chksums(common_licenses, d):
1072 lic_files_chksums = []
1073 for license in tidy_licenses(common_licenses):
1074 licfile = '${COMMON_LICENSE_DIR}/' + license
1075 md5value = bb.utils.md5_file(d.expand(licfile))
1076 lic_files_chksums.append('file://%s;md5=%s' % (licfile, md5value))
1077 return lic_files_chksums
1078
1249def read_pkgconfig_provides(d): 1079def read_pkgconfig_provides(d):
1250 pkgdatadir = d.getVar('PKGDATA_DIR') 1080 pkgdatadir = d.getVar('PKGDATA_DIR')
1251 pkgmap = {} 1081 pkgmap = {}
@@ -1376,7 +1206,7 @@ def register_commands(subparsers):
1376 parser_create.add_argument('-B', '--srcbranch', help='Branch in source repository if fetching from an SCM such as git (default master)') 1206 parser_create.add_argument('-B', '--srcbranch', help='Branch in source repository if fetching from an SCM such as git (default master)')
1377 parser_create.add_argument('--keep-temp', action="store_true", help='Keep temporary directory (for debugging)') 1207 parser_create.add_argument('--keep-temp', action="store_true", help='Keep temporary directory (for debugging)')
1378 parser_create.add_argument('--npm-dev', action="store_true", help='For npm, also fetch devDependencies') 1208 parser_create.add_argument('--npm-dev', action="store_true", help='For npm, also fetch devDependencies')
1209 parser_create.add_argument('--no-pypi', action="store_true", help='Do not inherit pypi class')
1379 parser_create.add_argument('--devtool', action="store_true", help=argparse.SUPPRESS) 1210 parser_create.add_argument('--devtool', action="store_true", help=argparse.SUPPRESS)
1380 parser_create.add_argument('--mirrors', action="store_true", help='Enable PREMIRRORS and MIRRORS for source tree fetching (disabled by default).') 1211 parser_create.add_argument('--mirrors', action="store_true", help='Enable PREMIRRORS and MIRRORS for source tree fetching (disabled by default).')
1381 parser_create.set_defaults(func=create_recipe) 1212 parser_create.set_defaults(func=create_recipe)
1382
diff --git a/scripts/lib/recipetool/create_buildsys.py b/scripts/lib/recipetool/create_buildsys.py
index 5015634476..ec9d510e23 100644
--- a/scripts/lib/recipetool/create_buildsys.py
+++ b/scripts/lib/recipetool/create_buildsys.py
@@ -5,9 +5,9 @@
5# SPDX-License-Identifier: GPL-2.0-only 5# SPDX-License-Identifier: GPL-2.0-only
6# 6#
7 7
8import os
8import re 9import re
9import logging 10import logging
10import glob
11from recipetool.create import RecipeHandler, validate_pv 11from recipetool.create import RecipeHandler, validate_pv
12 12
13logger = logging.getLogger('recipetool') 13logger = logging.getLogger('recipetool')
@@ -137,15 +137,15 @@ class CmakeRecipeHandler(RecipeHandler):
137 deps = [] 137 deps = []
138 unmappedpkgs = [] 138 unmappedpkgs = []
139 139
140 proj_re = re.compile('project\s*\(([^)]*)\)', re.IGNORECASE) 140 proj_re = re.compile(r'project\s*\(([^)]*)\)', re.IGNORECASE)
141 pkgcm_re = re.compile('pkg_check_modules\s*\(\s*[a-zA-Z0-9-_]+\s*(REQUIRED)?\s+([^)\s]+)\s*\)', re.IGNORECASE) 141 pkgcm_re = re.compile(r'pkg_check_modules\s*\(\s*[a-zA-Z0-9-_]+\s*(REQUIRED)?\s+([^)\s]+)\s*\)', re.IGNORECASE)
142 pkgsm_re = re.compile('pkg_search_module\s*\(\s*[a-zA-Z0-9-_]+\s*(REQUIRED)?((\s+[^)\s]+)+)\s*\)', re.IGNORECASE) 142 pkgsm_re = re.compile(r'pkg_search_module\s*\(\s*[a-zA-Z0-9-_]+\s*(REQUIRED)?((\s+[^)\s]+)+)\s*\)', re.IGNORECASE)
143 findpackage_re = re.compile('find_package\s*\(\s*([a-zA-Z0-9-_]+)\s*.*', re.IGNORECASE) 143 findpackage_re = re.compile(r'find_package\s*\(\s*([a-zA-Z0-9-_]+)\s*.*', re.IGNORECASE)
144 findlibrary_re = re.compile('find_library\s*\(\s*[a-zA-Z0-9-_]+\s*(NAMES\s+)?([a-zA-Z0-9-_ ]+)\s*.*') 144 findlibrary_re = re.compile(r'find_library\s*\(\s*[a-zA-Z0-9-_]+\s*(NAMES\s+)?([a-zA-Z0-9-_ ]+)\s*.*')
145 checklib_re = re.compile('check_library_exists\s*\(\s*([^\s)]+)\s*.*', re.IGNORECASE) 145 checklib_re = re.compile(r'check_library_exists\s*\(\s*([^\s)]+)\s*.*', re.IGNORECASE)
146 include_re = re.compile('include\s*\(\s*([^)\s]*)\s*\)', re.IGNORECASE) 146 include_re = re.compile(r'include\s*\(\s*([^)\s]*)\s*\)', re.IGNORECASE)
147 subdir_re = re.compile('add_subdirectory\s*\(\s*([^)\s]*)\s*([^)\s]*)\s*\)', re.IGNORECASE) 147 subdir_re = re.compile(r'add_subdirectory\s*\(\s*([^)\s]*)\s*([^)\s]*)\s*\)', re.IGNORECASE)
148 dep_re = re.compile('([^ ><=]+)( *[<>=]+ *[^ ><=]+)?') 148 dep_re = re.compile(r'([^ ><=]+)( *[<>=]+ *[^ ><=]+)?')
149 149
150 def find_cmake_package(pkg): 150 def find_cmake_package(pkg):
151 RecipeHandler.load_devel_filemap(tinfoil.config_data) 151 RecipeHandler.load_devel_filemap(tinfoil.config_data)
@@ -423,16 +423,16 @@ class AutotoolsRecipeHandler(RecipeHandler):
423 'makeinfo': 'texinfo', 423 'makeinfo': 'texinfo',
424 } 424 }
425 425
426 pkg_re = re.compile('PKG_CHECK_MODULES\(\s*\[?[a-zA-Z0-9_]*\]?,\s*\[?([^,\]]*)\]?[),].*') 426 pkg_re = re.compile(r'PKG_CHECK_MODULES\(\s*\[?[a-zA-Z0-9_]*\]?,\s*\[?([^,\]]*)\]?[),].*')
427 pkgce_re = re.compile('PKG_CHECK_EXISTS\(\s*\[?([^,\]]*)\]?[),].*') 427 pkgce_re = re.compile(r'PKG_CHECK_EXISTS\(\s*\[?([^,\]]*)\]?[),].*')
428 lib_re = re.compile('AC_CHECK_LIB\(\s*\[?([^,\]]*)\]?,.*') 428 lib_re = re.compile(r'AC_CHECK_LIB\(\s*\[?([^,\]]*)\]?,.*')
429 libx_re = re.compile('AX_CHECK_LIBRARY\(\s*\[?[^,\]]*\]?,\s*\[?([^,\]]*)\]?,\s*\[?([a-zA-Z0-9-]*)\]?,.*') 429 libx_re = re.compile(r'AX_CHECK_LIBRARY\(\s*\[?[^,\]]*\]?,\s*\[?([^,\]]*)\]?,\s*\[?([a-zA-Z0-9-]*)\]?,.*')
430 progs_re = re.compile('_PROGS?\(\s*\[?[a-zA-Z0-9_]*\]?,\s*\[?([^,\]]*)\]?[),].*') 430 progs_re = re.compile(r'_PROGS?\(\s*\[?[a-zA-Z0-9_]*\]?,\s*\[?([^,\]]*)\]?[),].*')
431 dep_re = re.compile('([^ ><=]+)( [<>=]+ [^ ><=]+)?') 431 dep_re = re.compile(r'([^ ><=]+)( [<>=]+ [^ ><=]+)?')
432 ac_init_re = re.compile('AC_INIT\(\s*([^,]+),\s*([^,]+)[,)].*') 432 ac_init_re = re.compile(r'AC_INIT\(\s*([^,]+),\s*([^,]+)[,)].*')
433 am_init_re = re.compile('AM_INIT_AUTOMAKE\(\s*([^,]+),\s*([^,]+)[,)].*') 433 am_init_re = re.compile(r'AM_INIT_AUTOMAKE\(\s*([^,]+),\s*([^,]+)[,)].*')
434 define_re = re.compile('\s*(m4_)?define\(\s*([^,]+),\s*([^,]+)\)') 434 define_re = re.compile(r'\s*(m4_)?define\(\s*([^,]+),\s*([^,]+)\)')
435 version_re = re.compile('([0-9.]+)') 435 version_re = re.compile(r'([0-9.]+)')
436 436
437 defines = {} 437 defines = {}
438 def subst_defines(value): 438 def subst_defines(value):
diff --git a/scripts/lib/recipetool/create_buildsys_python.py b/scripts/lib/recipetool/create_buildsys_python.py
index 0b6b042ed1..a807dafae5 100644
--- a/scripts/lib/recipetool/create_buildsys_python.py
+++ b/scripts/lib/recipetool/create_buildsys_python.py
@@ -8,9 +8,9 @@
8import ast 8import ast
9import codecs 9import codecs
10import collections 10import collections
11import distutils.command.build_py 11import setuptools.command.build_py
12import email 12import email
13import imp 13import importlib
14import glob 14import glob
15import itertools 15import itertools
16import logging 16import logging
@@ -18,7 +18,11 @@ import os
18import re 18import re
19import sys 19import sys
20import subprocess 20import subprocess
21import json
22import urllib.request
21from recipetool.create import RecipeHandler 23from recipetool.create import RecipeHandler
24from urllib.parse import urldefrag
25from recipetool.create import determine_from_url
22 26
23logger = logging.getLogger('recipetool') 27logger = logging.getLogger('recipetool')
24 28
@@ -37,7 +41,334 @@ class PythonRecipeHandler(RecipeHandler):
37 assume_provided = ['builtins', 'os.path'] 41 assume_provided = ['builtins', 'os.path']
38 # Assumes that the host python3 builtin_module_names is sane for target too 42 # Assumes that the host python3 builtin_module_names is sane for target too
39 assume_provided = assume_provided + list(sys.builtin_module_names) 43 assume_provided = assume_provided + list(sys.builtin_module_names)
44 excluded_fields = []
40 45
46
47 classifier_license_map = {
48 'License :: OSI Approved :: Academic Free License (AFL)': 'AFL',
49 'License :: OSI Approved :: Apache Software License': 'Apache',
50 'License :: OSI Approved :: Apple Public Source License': 'APSL',
51 'License :: OSI Approved :: Artistic License': 'Artistic',
52 'License :: OSI Approved :: Attribution Assurance License': 'AAL',
53 'License :: OSI Approved :: BSD License': 'BSD-3-Clause',
54 'License :: OSI Approved :: Boost Software License 1.0 (BSL-1.0)': 'BSL-1.0',
55 'License :: OSI Approved :: CEA CNRS Inria Logiciel Libre License, version 2.1 (CeCILL-2.1)': 'CECILL-2.1',
56 'License :: OSI Approved :: Common Development and Distribution License 1.0 (CDDL-1.0)': 'CDDL-1.0',
57 'License :: OSI Approved :: Common Public License': 'CPL',
58 'License :: OSI Approved :: Eclipse Public License 1.0 (EPL-1.0)': 'EPL-1.0',
59 'License :: OSI Approved :: Eclipse Public License 2.0 (EPL-2.0)': 'EPL-2.0',
60 'License :: OSI Approved :: Eiffel Forum License': 'EFL',
61 'License :: OSI Approved :: European Union Public Licence 1.0 (EUPL 1.0)': 'EUPL-1.0',
62 'License :: OSI Approved :: European Union Public Licence 1.1 (EUPL 1.1)': 'EUPL-1.1',
63 'License :: OSI Approved :: European Union Public Licence 1.2 (EUPL 1.2)': 'EUPL-1.2',
64 'License :: OSI Approved :: GNU Affero General Public License v3': 'AGPL-3.0-only',
65 'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)': 'AGPL-3.0-or-later',
66 'License :: OSI Approved :: GNU Free Documentation License (FDL)': 'GFDL',
67 'License :: OSI Approved :: GNU General Public License (GPL)': 'GPL',
68 'License :: OSI Approved :: GNU General Public License v2 (GPLv2)': 'GPL-2.0-only',
69 'License :: OSI Approved :: GNU General Public License v2 or later (GPLv2+)': 'GPL-2.0-or-later',
70 'License :: OSI Approved :: GNU General Public License v3 (GPLv3)': 'GPL-3.0-only',
71 'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)': 'GPL-3.0-or-later',
72 'License :: OSI Approved :: GNU Lesser General Public License v2 (LGPLv2)': 'LGPL-2.0-only',
73 'License :: OSI Approved :: GNU Lesser General Public License v2 or later (LGPLv2+)': 'LGPL-2.0-or-later',
74 'License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)': 'LGPL-3.0-only',
75 'License :: OSI Approved :: GNU Lesser General Public License v3 or later (LGPLv3+)': 'LGPL-3.0-or-later',
76 'License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)': 'LGPL',
77 'License :: OSI Approved :: Historical Permission Notice and Disclaimer (HPND)': 'HPND',
78 'License :: OSI Approved :: IBM Public License': 'IPL',
79 'License :: OSI Approved :: ISC License (ISCL)': 'ISC',
80 'License :: OSI Approved :: Intel Open Source License': 'Intel',
81 'License :: OSI Approved :: Jabber Open Source License': 'Jabber',
82 'License :: OSI Approved :: MIT License': 'MIT',
83 'License :: OSI Approved :: MIT No Attribution License (MIT-0)': 'MIT-0',
84 'License :: OSI Approved :: MITRE Collaborative Virtual Workspace License (CVW)': 'CVWL',
85 'License :: OSI Approved :: MirOS License (MirOS)': 'MirOS',
86 'License :: OSI Approved :: Motosoto License': 'Motosoto',
87 'License :: OSI Approved :: Mozilla Public License 1.0 (MPL)': 'MPL-1.0',
88 'License :: OSI Approved :: Mozilla Public License 1.1 (MPL 1.1)': 'MPL-1.1',
89 'License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)': 'MPL-2.0',
90 'License :: OSI Approved :: Nethack General Public License': 'NGPL',
91 'License :: OSI Approved :: Nokia Open Source License': 'Nokia',
92 'License :: OSI Approved :: Open Group Test Suite License': 'OGTSL',
93 'License :: OSI Approved :: Open Software License 3.0 (OSL-3.0)': 'OSL-3.0',
94 'License :: OSI Approved :: PostgreSQL License': 'PostgreSQL',
95 'License :: OSI Approved :: Python License (CNRI Python License)': 'CNRI-Python',
96 'License :: OSI Approved :: Python Software Foundation License': 'PSF-2.0',
97 'License :: OSI Approved :: Qt Public License (QPL)': 'QPL',
98 'License :: OSI Approved :: Ricoh Source Code Public License': 'RSCPL',
99 'License :: OSI Approved :: SIL Open Font License 1.1 (OFL-1.1)': 'OFL-1.1',
100 'License :: OSI Approved :: Sleepycat License': 'Sleepycat',
101 'License :: OSI Approved :: Sun Industry Standards Source License (SISSL)': 'SISSL',
102 'License :: OSI Approved :: Sun Public License': 'SPL',
103 'License :: OSI Approved :: The Unlicense (Unlicense)': 'Unlicense',
104 'License :: OSI Approved :: Universal Permissive License (UPL)': 'UPL-1.0',
105 'License :: OSI Approved :: University of Illinois/NCSA Open Source License': 'NCSA',
106 'License :: OSI Approved :: Vovida Software License 1.0': 'VSL-1.0',
107 'License :: OSI Approved :: W3C License': 'W3C',
108 'License :: OSI Approved :: X.Net License': 'Xnet',
109 'License :: OSI Approved :: Zope Public License': 'ZPL',
110 'License :: OSI Approved :: zlib/libpng License': 'Zlib',
111 'License :: Other/Proprietary License': 'Proprietary',
112 'License :: Public Domain': 'PD',
113 }
114
115 def __init__(self):
116 pass
117
118 def process_url(self, args, classes, handled, extravalues):
119 """
120 Convert any pypi url https://pypi.org/project/<package>/<version> into https://files.pythonhosted.org/packages/source/...
121 which corresponds to the archive location, and add pypi class
122 """
123
124 if 'url' in handled:
125 return None
126
127 fetch_uri = None
128 source = args.source
129 required_version = args.version if args.version else None
130 match = re.match(r'https?://pypi.org/project/([^/]+)(?:/([^/]+))?/?$', urldefrag(source)[0])
131 if match:
132 package = match.group(1)
133 version = match.group(2) if match.group(2) else required_version
134
135 json_url = f"https://pypi.org/pypi/%s/json" % package
136 response = urllib.request.urlopen(json_url)
137 if response.status == 200:
138 data = json.loads(response.read())
139 if not version:
140 # grab latest version
141 version = data["info"]["version"]
142 pypi_package = data["info"]["name"]
143 for release in reversed(data["releases"][version]):
144 if release["packagetype"] == "sdist":
145 fetch_uri = release["url"]
146 break
147 else:
148 logger.warning("Cannot handle pypi url %s: cannot fetch package information using %s", source, json_url)
149 return None
150 else:
151 match = re.match(r'^https?://files.pythonhosted.org/packages.*/(.*)-.*$', source)
152 if match:
153 fetch_uri = source
154 pypi_package = match.group(1)
155 _, version = determine_from_url(fetch_uri)
156
157 if match and not args.no_pypi:
158 if required_version and version != required_version:
159 raise Exception("Version specified using --version/-V (%s) and version specified in the url (%s) do not match" % (required_version, version))
160 # This is optionnal if BPN looks like "python-<pypi_package>" or "python3-<pypi_package>" (see pypi.bbclass)
161 # but at this point we cannot know because because user can specify the output name of the recipe on the command line
162 extravalues["PYPI_PACKAGE"] = pypi_package
163 # If the tarball extension is not 'tar.gz' (default value in pypi.bblcass) whe should set PYPI_PACKAGE_EXT in the recipe
164 pypi_package_ext = re.match(r'.*%s-%s\.(.*)$' % (pypi_package, version), fetch_uri)
165 if pypi_package_ext:
166 pypi_package_ext = pypi_package_ext.group(1)
167 if pypi_package_ext != "tar.gz":
168 extravalues["PYPI_PACKAGE_EXT"] = pypi_package_ext
169
170 # Pypi class will handle S and SRC_URI variables, so remove them
171 # TODO: allow oe.recipeutils.patch_recipe_lines() to accept regexp so we can simplify the following to:
172 # extravalues['SRC_URI(?:\[.*?\])?'] = None
173 extravalues['S'] = None
174 extravalues['SRC_URI'] = None
175
176 classes.append('pypi')
177
178 handled.append('url')
179 return fetch_uri
180
181 def handle_classifier_license(self, classifiers, existing_licenses=""):
182
183 licenses = []
184 for classifier in classifiers:
185 if classifier in self.classifier_license_map:
186 license = self.classifier_license_map[classifier]
187 if license == 'Apache' and 'Apache-2.0' in existing_licenses:
188 license = 'Apache-2.0'
189 elif license == 'GPL':
190 if 'GPL-2.0' in existing_licenses or 'GPLv2' in existing_licenses:
191 license = 'GPL-2.0'
192 elif 'GPL-3.0' in existing_licenses or 'GPLv3' in existing_licenses:
193 license = 'GPL-3.0'
194 elif license == 'LGPL':
195 if 'LGPL-2.1' in existing_licenses or 'LGPLv2.1' in existing_licenses:
196 license = 'LGPL-2.1'
197 elif 'LGPL-2.0' in existing_licenses or 'LGPLv2' in existing_licenses:
198 license = 'LGPL-2.0'
199 elif 'LGPL-3.0' in existing_licenses or 'LGPLv3' in existing_licenses:
200 license = 'LGPL-3.0'
201 licenses.append(license)
202
203 if licenses:
204 return ' & '.join(licenses)
205
206 return None
207
208 def map_info_to_bbvar(self, info, extravalues):
209
210 # Map PKG-INFO & setup.py fields to bitbake variables
211 for field, values in info.items():
212 if field in self.excluded_fields:
213 continue
214
215 if field not in self.bbvar_map:
216 continue
217
218 if isinstance(values, str):
219 value = values
220 else:
221 value = ' '.join(str(v) for v in values if v)
222
223 bbvar = self.bbvar_map[field]
224 if bbvar == "PN":
225 # by convention python recipes start with "python3-"
226 if not value.startswith('python'):
227 value = 'python3-' + value
228
229 if bbvar not in extravalues and value:
230 extravalues[bbvar] = value
231
232 def apply_info_replacements(self, info):
233 if not self.replacements:
234 return
235
236 for variable, search, replace in self.replacements:
237 if variable not in info:
238 continue
239
240 def replace_value(search, replace, value):
241 if replace is None:
242 if re.search(search, value):
243 return None
244 else:
245 new_value = re.sub(search, replace, value)
246 if value != new_value:
247 return new_value
248 return value
249
250 value = info[variable]
251 if isinstance(value, str):
252 new_value = replace_value(search, replace, value)
253 if new_value is None:
254 del info[variable]
255 elif new_value != value:
256 info[variable] = new_value
257 elif hasattr(value, 'items'):
258 for dkey, dvalue in list(value.items()):
259 new_list = []
260 for pos, a_value in enumerate(dvalue):
261 new_value = replace_value(search, replace, a_value)
262 if new_value is not None and new_value != value:
263 new_list.append(new_value)
264
265 if value != new_list:
266 value[dkey] = new_list
267 else:
268 new_list = []
269 for pos, a_value in enumerate(value):
270 new_value = replace_value(search, replace, a_value)
271 if new_value is not None and new_value != value:
272 new_list.append(new_value)
273
274 if value != new_list:
275 info[variable] = new_list
276
277
278 def scan_python_dependencies(self, paths):
279 deps = set()
280 try:
281 dep_output = self.run_command(['pythondeps', '-d'] + paths)
282 except (OSError, subprocess.CalledProcessError):
283 pass
284 else:
285 for line in dep_output.splitlines():
286 line = line.rstrip()
287 dep, filename = line.split('\t', 1)
288 if filename.endswith('/setup.py'):
289 continue
290 deps.add(dep)
291
292 try:
293 provides_output = self.run_command(['pythondeps', '-p'] + paths)
294 except (OSError, subprocess.CalledProcessError):
295 pass
296 else:
297 provides_lines = (l.rstrip() for l in provides_output.splitlines())
298 provides = set(l for l in provides_lines if l and l != 'setup')
299 deps -= provides
300
301 return deps
302
303 def parse_pkgdata_for_python_packages(self):
304 pkgdata_dir = tinfoil.config_data.getVar('PKGDATA_DIR')
305
306 ldata = tinfoil.config_data.createCopy()
307 bb.parse.handle('classes-recipe/python3-dir.bbclass', ldata, True)
308 python_sitedir = ldata.getVar('PYTHON_SITEPACKAGES_DIR')
309
310 dynload_dir = os.path.join(os.path.dirname(python_sitedir), 'lib-dynload')
311 python_dirs = [python_sitedir + os.sep,
312 os.path.join(os.path.dirname(python_sitedir), 'dist-packages') + os.sep,
313 os.path.dirname(python_sitedir) + os.sep]
314 packages = {}
315 for pkgdatafile in glob.glob('{}/runtime/*'.format(pkgdata_dir)):
316 files_info = None
317 with open(pkgdatafile, 'r') as f:
318 for line in f.readlines():
319 field, value = line.split(': ', 1)
320 if field.startswith('FILES_INFO'):
321 files_info = ast.literal_eval(value)
322 break
323 else:
324 continue
325
326 for fn in files_info:
327 for suffix in importlib.machinery.all_suffixes():
328 if fn.endswith(suffix):
329 break
330 else:
331 continue
332
333 if fn.startswith(dynload_dir + os.sep):
334 if '/.debug/' in fn:
335 continue
336 base = os.path.basename(fn)
337 provided = base.split('.', 1)[0]
338 packages[provided] = os.path.basename(pkgdatafile)
339 continue
340
341 for python_dir in python_dirs:
342 if fn.startswith(python_dir):
343 relpath = fn[len(python_dir):]
344 relstart, _, relremaining = relpath.partition(os.sep)
345 if relstart.endswith('.egg'):
346 relpath = relremaining
347 base, _ = os.path.splitext(relpath)
348
349 if '/.debug/' in base:
350 continue
351 if os.path.basename(base) == '__init__':
352 base = os.path.dirname(base)
353 base = base.replace(os.sep + os.sep, os.sep)
354 provided = base.replace(os.sep, '.')
355 packages[provided] = os.path.basename(pkgdatafile)
356 return packages
357
358 @classmethod
359 def run_command(cls, cmd, **popenargs):
360 if 'stderr' not in popenargs:
361 popenargs['stderr'] = subprocess.STDOUT
362 try:
363 return subprocess.check_output(cmd, **popenargs).decode('utf-8')
364 except OSError as exc:
365 logger.error('Unable to run `{}`: {}', ' '.join(cmd), exc)
366 raise
367 except subprocess.CalledProcessError as exc:
368 logger.error('Unable to run `{}`: {}', ' '.join(cmd), exc.output)
369 raise
370
371class PythonSetupPyRecipeHandler(PythonRecipeHandler):
41 bbvar_map = { 372 bbvar_map = {
42 'Name': 'PN', 373 'Name': 'PN',
43 'Version': 'PV', 374 'Version': 'PV',
@@ -75,6 +406,7 @@ class PythonRecipeHandler(RecipeHandler):
75 'Supported-Platform', 406 'Supported-Platform',
76 ] 407 ]
77 setuparg_multi_line_values = ['Description'] 408 setuparg_multi_line_values = ['Description']
409
78 replacements = [ 410 replacements = [
79 ('License', r' +$', ''), 411 ('License', r' +$', ''),
80 ('License', r'^ +', ''), 412 ('License', r'^ +', ''),
@@ -95,71 +427,161 @@ class PythonRecipeHandler(RecipeHandler):
95 ('Install-requires', r'\[[^\]]+\]$', ''), 427 ('Install-requires', r'\[[^\]]+\]$', ''),
96 ] 428 ]
97 429
98 classifier_license_map = {
99 'License :: OSI Approved :: Academic Free License (AFL)': 'AFL',
100 'License :: OSI Approved :: Apache Software License': 'Apache',
101 'License :: OSI Approved :: Apple Public Source License': 'APSL',
102 'License :: OSI Approved :: Artistic License': 'Artistic',
103 'License :: OSI Approved :: Attribution Assurance License': 'AAL',
104 'License :: OSI Approved :: BSD License': 'BSD-3-Clause',
105 'License :: OSI Approved :: Common Public License': 'CPL',
106 'License :: OSI Approved :: Eiffel Forum License': 'EFL',
107 'License :: OSI Approved :: European Union Public Licence 1.0 (EUPL 1.0)': 'EUPL-1.0',
108 'License :: OSI Approved :: European Union Public Licence 1.1 (EUPL 1.1)': 'EUPL-1.1',
109 'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)': 'AGPL-3.0+',
110 'License :: OSI Approved :: GNU Affero General Public License v3': 'AGPL-3.0',
111 'License :: OSI Approved :: GNU Free Documentation License (FDL)': 'GFDL',
112 'License :: OSI Approved :: GNU General Public License (GPL)': 'GPL',
113 'License :: OSI Approved :: GNU General Public License v2 (GPLv2)': 'GPL-2.0',
114 'License :: OSI Approved :: GNU General Public License v2 or later (GPLv2+)': 'GPL-2.0+',
115 'License :: OSI Approved :: GNU General Public License v3 (GPLv3)': 'GPL-3.0',
116 'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)': 'GPL-3.0+',
117 'License :: OSI Approved :: GNU Lesser General Public License v2 (LGPLv2)': 'LGPL-2.0',
118 'License :: OSI Approved :: GNU Lesser General Public License v2 or later (LGPLv2+)': 'LGPL-2.0+',
119 'License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)': 'LGPL-3.0',
120 'License :: OSI Approved :: GNU Lesser General Public License v3 or later (LGPLv3+)': 'LGPL-3.0+',
121 'License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)': 'LGPL',
122 'License :: OSI Approved :: IBM Public License': 'IPL',
123 'License :: OSI Approved :: ISC License (ISCL)': 'ISC',
124 'License :: OSI Approved :: Intel Open Source License': 'Intel',
125 'License :: OSI Approved :: Jabber Open Source License': 'Jabber',
126 'License :: OSI Approved :: MIT License': 'MIT',
127 'License :: OSI Approved :: MITRE Collaborative Virtual Workspace License (CVW)': 'CVWL',
128 'License :: OSI Approved :: Motosoto License': 'Motosoto',
129 'License :: OSI Approved :: Mozilla Public License 1.0 (MPL)': 'MPL-1.0',
130 'License :: OSI Approved :: Mozilla Public License 1.1 (MPL 1.1)': 'MPL-1.1',
131 'License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)': 'MPL-2.0',
132 'License :: OSI Approved :: Nethack General Public License': 'NGPL',
133 'License :: OSI Approved :: Nokia Open Source License': 'Nokia',
134 'License :: OSI Approved :: Open Group Test Suite License': 'OGTSL',
135 'License :: OSI Approved :: Python License (CNRI Python License)': 'CNRI-Python',
136 'License :: OSI Approved :: Python Software Foundation License': 'PSF',
137 'License :: OSI Approved :: Qt Public License (QPL)': 'QPL',
138 'License :: OSI Approved :: Ricoh Source Code Public License': 'RSCPL',
139 'License :: OSI Approved :: Sleepycat License': 'Sleepycat',
140 'License :: OSI Approved :: Sun Industry Standards Source License (SISSL)': '-- Sun Industry Standards Source License (SISSL)',
141 'License :: OSI Approved :: Sun Public License': 'SPL',
142 'License :: OSI Approved :: University of Illinois/NCSA Open Source License': 'NCSA',
143 'License :: OSI Approved :: Vovida Software License 1.0': 'VSL-1.0',
144 'License :: OSI Approved :: W3C License': 'W3C',
145 'License :: OSI Approved :: X.Net License': 'Xnet',
146 'License :: OSI Approved :: Zope Public License': 'ZPL',
147 'License :: OSI Approved :: zlib/libpng License': 'Zlib',
148 }
149
150 def __init__(self): 430 def __init__(self):
151 pass 431 pass
152 432
433 def parse_setup_py(self, setupscript='./setup.py'):
434 with codecs.open(setupscript) as f:
435 info, imported_modules, non_literals, extensions = gather_setup_info(f)
436
437 def _map(key):
438 key = key.replace('_', '-')
439 key = key[0].upper() + key[1:]
440 if key in self.setup_parse_map:
441 key = self.setup_parse_map[key]
442 return key
443
444 # Naive mapping of setup() arguments to PKG-INFO field names
445 for d in [info, non_literals]:
446 for key, value in list(d.items()):
447 if key is None:
448 continue
449 new_key = _map(key)
450 if new_key != key:
451 del d[key]
452 d[new_key] = value
453
454 return info, 'setuptools' in imported_modules, non_literals, extensions
455
456 def get_setup_args_info(self, setupscript='./setup.py'):
457 cmd = ['python3', setupscript]
458 info = {}
459 keys = set(self.bbvar_map.keys())
460 keys |= set(self.setuparg_list_fields)
461 keys |= set(self.setuparg_multi_line_values)
462 grouped_keys = itertools.groupby(keys, lambda k: (k in self.setuparg_list_fields, k in self.setuparg_multi_line_values))
463 for index, keys in grouped_keys:
464 if index == (True, False):
465 # Splitlines output for each arg as a list value
466 for key in keys:
467 arg = self.setuparg_map.get(key, key.lower())
468 try:
469 arg_info = self.run_command(cmd + ['--' + arg], cwd=os.path.dirname(setupscript))
470 except (OSError, subprocess.CalledProcessError):
471 pass
472 else:
473 info[key] = [l.rstrip() for l in arg_info.splitlines()]
474 elif index == (False, True):
475 # Entire output for each arg
476 for key in keys:
477 arg = self.setuparg_map.get(key, key.lower())
478 try:
479 arg_info = self.run_command(cmd + ['--' + arg], cwd=os.path.dirname(setupscript))
480 except (OSError, subprocess.CalledProcessError):
481 pass
482 else:
483 info[key] = arg_info
484 else:
485 info.update(self.get_setup_byline(list(keys), setupscript))
486 return info
487
488 def get_setup_byline(self, fields, setupscript='./setup.py'):
489 info = {}
490
491 cmd = ['python3', setupscript]
492 cmd.extend('--' + self.setuparg_map.get(f, f.lower()) for f in fields)
493 try:
494 info_lines = self.run_command(cmd, cwd=os.path.dirname(setupscript)).splitlines()
495 except (OSError, subprocess.CalledProcessError):
496 pass
497 else:
498 if len(fields) != len(info_lines):
499 logger.error('Mismatch between setup.py output lines and number of fields')
500 sys.exit(1)
501
502 for lineno, line in enumerate(info_lines):
503 line = line.rstrip()
504 info[fields[lineno]] = line
505 return info
506
507 def get_pkginfo(self, pkginfo_fn):
508 msg = email.message_from_file(open(pkginfo_fn, 'r'))
509 msginfo = {}
510 for field in msg.keys():
511 values = msg.get_all(field)
512 if len(values) == 1:
513 msginfo[field] = values[0]
514 else:
515 msginfo[field] = values
516 return msginfo
517
518 def scan_setup_python_deps(self, srctree, setup_info, setup_non_literals):
519 if 'Package-dir' in setup_info:
520 package_dir = setup_info['Package-dir']
521 else:
522 package_dir = {}
523
524 dist = setuptools.Distribution()
525
526 class PackageDir(setuptools.command.build_py.build_py):
527 def __init__(self, package_dir):
528 self.package_dir = package_dir
529 self.dist = dist
530 super().__init__(self.dist)
531
532 pd = PackageDir(package_dir)
533 to_scan = []
534 if not any(v in setup_non_literals for v in ['Py-modules', 'Scripts', 'Packages']):
535 if 'Py-modules' in setup_info:
536 for module in setup_info['Py-modules']:
537 try:
538 package, module = module.rsplit('.', 1)
539 except ValueError:
540 package, module = '.', module
541 module_path = os.path.join(pd.get_package_dir(package), module + '.py')
542 to_scan.append(module_path)
543
544 if 'Packages' in setup_info:
545 for package in setup_info['Packages']:
546 to_scan.append(pd.get_package_dir(package))
547
548 if 'Scripts' in setup_info:
549 to_scan.extend(setup_info['Scripts'])
550 else:
551 logger.info("Scanning the entire source tree, as one or more of the following setup keywords are non-literal: py_modules, scripts, packages.")
552
553 if not to_scan:
554 to_scan = ['.']
555
556 logger.info("Scanning paths for packages & dependencies: %s", ', '.join(to_scan))
557
558 provided_packages = self.parse_pkgdata_for_python_packages()
559 scanned_deps = self.scan_python_dependencies([os.path.join(srctree, p) for p in to_scan])
560 mapped_deps, unmapped_deps = set(self.base_pkgdeps), set()
561 for dep in scanned_deps:
562 mapped = provided_packages.get(dep)
563 if mapped:
564 logger.debug('Mapped %s to %s' % (dep, mapped))
565 mapped_deps.add(mapped)
566 else:
567 logger.debug('Could not map %s' % dep)
568 unmapped_deps.add(dep)
569 return mapped_deps, unmapped_deps
570
153 def process(self, srctree, classes, lines_before, lines_after, handled, extravalues): 571 def process(self, srctree, classes, lines_before, lines_after, handled, extravalues):
572
154 if 'buildsystem' in handled: 573 if 'buildsystem' in handled:
155 return False 574 return False
156 575
576 logger.debug("Trying setup.py parser")
577
157 # Check for non-zero size setup.py files 578 # Check for non-zero size setup.py files
158 setupfiles = RecipeHandler.checkfiles(srctree, ['setup.py']) 579 setupfiles = RecipeHandler.checkfiles(srctree, ['setup.py'])
159 for fn in setupfiles: 580 for fn in setupfiles:
160 if os.path.getsize(fn): 581 if os.path.getsize(fn):
161 break 582 break
162 else: 583 else:
584 logger.debug("No setup.py found")
163 return False 585 return False
164 586
165 # setup.py is always parsed to get at certain required information, such as 587 # setup.py is always parsed to get at certain required information, such as
@@ -193,6 +615,18 @@ class PythonRecipeHandler(RecipeHandler):
193 continue 615 continue
194 616
195 if line.startswith('['): 617 if line.startswith('['):
618 # PACKAGECONFIG must not contain expressions or whitespace
619 line = line.replace(" ", "")
620 line = line.replace(':', "")
621 line = line.replace('.', "-dot-")
622 line = line.replace('"', "")
623 line = line.replace('<', "-smaller-")
624 line = line.replace('>', "-bigger-")
625 line = line.replace('_', "-")
626 line = line.replace('(', "")
627 line = line.replace(')', "")
628 line = line.replace('!', "-not-")
629 line = line.replace('=', "-equals-")
196 current_feature = line[1:-1] 630 current_feature = line[1:-1]
197 elif current_feature: 631 elif current_feature:
198 extras_req[current_feature].append(line) 632 extras_req[current_feature].append(line)
@@ -226,51 +660,16 @@ class PythonRecipeHandler(RecipeHandler):
226 660
227 if license_str: 661 if license_str:
228 for i, line in enumerate(lines_before): 662 for i, line in enumerate(lines_before):
229 if line.startswith('LICENSE = '): 663 if line.startswith('##LICENSE_PLACEHOLDER##'):
230 lines_before.insert(i, '# NOTE: License in setup.py/PKGINFO is: %s' % license_str) 664 lines_before.insert(i, '# NOTE: License in setup.py/PKGINFO is: %s' % license_str)
231 break 665 break
232 666
233 if 'Classifier' in info: 667 if 'Classifier' in info:
234 existing_licenses = info.get('License', '') 668 license = self.handle_classifier_license(info['Classifier'], info.get('License', ''))
235 licenses = [] 669 if license:
236 for classifier in info['Classifier']: 670 info['License'] = license
237 if classifier in self.classifier_license_map:
238 license = self.classifier_license_map[classifier]
239 if license == 'Apache' and 'Apache-2.0' in existing_licenses:
240 license = 'Apache-2.0'
241 elif license == 'GPL':
242 if 'GPL-2.0' in existing_licenses or 'GPLv2' in existing_licenses:
243 license = 'GPL-2.0'
244 elif 'GPL-3.0' in existing_licenses or 'GPLv3' in existing_licenses:
245 license = 'GPL-3.0'
246 elif license == 'LGPL':
247 if 'LGPL-2.1' in existing_licenses or 'LGPLv2.1' in existing_licenses:
248 license = 'LGPL-2.1'
249 elif 'LGPL-2.0' in existing_licenses or 'LGPLv2' in existing_licenses:
250 license = 'LGPL-2.0'
251 elif 'LGPL-3.0' in existing_licenses or 'LGPLv3' in existing_licenses:
252 license = 'LGPL-3.0'
253 licenses.append(license)
254
255 if licenses:
256 info['License'] = ' & '.join(licenses)
257 671
258 # Map PKG-INFO & setup.py fields to bitbake variables 672 self.map_info_to_bbvar(info, extravalues)
259 for field, values in info.items():
260 if field in self.excluded_fields:
261 continue
262
263 if field not in self.bbvar_map:
264 continue
265
266 if isinstance(values, str):
267 value = values
268 else:
269 value = ' '.join(str(v) for v in values if v)
270
271 bbvar = self.bbvar_map[field]
272 if bbvar not in extravalues and value:
273 extravalues[bbvar] = value
274 673
275 mapped_deps, unmapped_deps = self.scan_setup_python_deps(srctree, setup_info, setup_non_literals) 674 mapped_deps, unmapped_deps = self.scan_setup_python_deps(srctree, setup_info, setup_non_literals)
276 675
@@ -281,6 +680,7 @@ class PythonRecipeHandler(RecipeHandler):
281 lines_after.append('# The following configs & dependencies are from setuptools extras_require.') 680 lines_after.append('# The following configs & dependencies are from setuptools extras_require.')
282 lines_after.append('# These dependencies are optional, hence can be controlled via PACKAGECONFIG.') 681 lines_after.append('# These dependencies are optional, hence can be controlled via PACKAGECONFIG.')
283 lines_after.append('# The upstream names may not correspond exactly to bitbake package names.') 682 lines_after.append('# The upstream names may not correspond exactly to bitbake package names.')
683 lines_after.append('# The configs are might not correct, since PACKAGECONFIG does not support expressions as may used in requires.txt - they are just replaced by text.')
284 lines_after.append('#') 684 lines_after.append('#')
285 lines_after.append('# Uncomment this line to enable all the optional features.') 685 lines_after.append('# Uncomment this line to enable all the optional features.')
286 lines_after.append('#PACKAGECONFIG ?= "{}"'.format(' '.join(k.lower() for k in extras_req))) 686 lines_after.append('#PACKAGECONFIG ?= "{}"'.format(' '.join(k.lower() for k in extras_req)))
@@ -326,275 +726,283 @@ class PythonRecipeHandler(RecipeHandler):
326 726
327 handled.append('buildsystem') 727 handled.append('buildsystem')
328 728
329 def get_pkginfo(self, pkginfo_fn): 729class PythonPyprojectTomlRecipeHandler(PythonRecipeHandler):
330 msg = email.message_from_file(open(pkginfo_fn, 'r')) 730 """Base class to support PEP517 and PEP518
331 msginfo = {} 731
332 for field in msg.keys(): 732 PEP517 https://peps.python.org/pep-0517/#source-trees
333 values = msg.get_all(field) 733 PEP518 https://peps.python.org/pep-0518/#build-system-table
334 if len(values) == 1: 734 """
335 msginfo[field] = values[0] 735 # bitbake currently supports the 4 following backends
336 else: 736 build_backend_map = {
337 msginfo[field] = values 737 "setuptools.build_meta": "python_setuptools_build_meta",
338 return msginfo 738 "poetry.core.masonry.api": "python_poetry_core",
739 "flit_core.buildapi": "python_flit_core",
740 "hatchling.build": "python_hatchling",
741 "maturin": "python_maturin",
742 "mesonpy": "python_mesonpy",
743 }
339 744
340 def parse_setup_py(self, setupscript='./setup.py'): 745 # setuptools.build_meta and flit declare project metadata into the "project" section of pyproject.toml
341 with codecs.open(setupscript) as f: 746 # according to PEP-621: https://packaging.python.org/en/latest/specifications/declaring-project-metadata/#declaring-project-metadata
342 info, imported_modules, non_literals, extensions = gather_setup_info(f) 747 # while poetry uses the "tool.poetry" section according to its official documentation: https://python-poetry.org/docs/pyproject/
748 # keys from "project" and "tool.poetry" sections are almost the same except for the HOMEPAGE which is "homepage" for tool.poetry
749 # and "Homepage" for "project" section. So keep both
750 bbvar_map = {
751 "name": "PN",
752 "version": "PV",
753 "Homepage": "HOMEPAGE",
754 "homepage": "HOMEPAGE",
755 "description": "SUMMARY",
756 "license": "LICENSE",
757 "dependencies": "RDEPENDS:${PN}",
758 "requires": "DEPENDS",
759 }
343 760
344 def _map(key): 761 replacements = [
345 key = key.replace('_', '-') 762 ("license", r" +$", ""),
346 key = key[0].upper() + key[1:] 763 ("license", r"^ +", ""),
347 if key in self.setup_parse_map: 764 ("license", r" ", "-"),
348 key = self.setup_parse_map[key] 765 ("license", r"^GNU-", ""),
349 return key 766 ("license", r"-[Ll]icen[cs]e(,?-[Vv]ersion)?", ""),
767 ("license", r"^UNKNOWN$", ""),
768 # Remove currently unhandled version numbers from these variables
769 ("requires", r"\[[^\]]+\]$", ""),
770 ("requires", r"^([^><= ]+).*", r"\1"),
771 ("dependencies", r"\[[^\]]+\]$", ""),
772 ("dependencies", r"^([^><= ]+).*", r"\1"),
773 ]
350 774
351 # Naive mapping of setup() arguments to PKG-INFO field names 775 excluded_native_pkgdeps = [
352 for d in [info, non_literals]: 776 # already provided by python_setuptools_build_meta.bbclass
353 for key, value in list(d.items()): 777 "python3-setuptools-native",
354 if key is None: 778 "python3-wheel-native",
355 continue 779 # already provided by python_poetry_core.bbclass
356 new_key = _map(key) 780 "python3-poetry-core-native",
357 if new_key != key: 781 # already provided by python_flit_core.bbclass
358 del d[key] 782 "python3-flit-core-native",
359 d[new_key] = value 783 # already provided by python_mesonpy
784 "python3-meson-python-native",
785 ]
360 786
361 return info, 'setuptools' in imported_modules, non_literals, extensions 787 # add here a list of known and often used packages and the corresponding bitbake package
788 known_deps_map = {
789 "setuptools": "python3-setuptools",
790 "wheel": "python3-wheel",
791 "poetry-core": "python3-poetry-core",
792 "flit_core": "python3-flit-core",
793 "setuptools-scm": "python3-setuptools-scm",
794 "hatchling": "python3-hatchling",
795 "hatch-vcs": "python3-hatch-vcs",
796 "meson-python" : "python3-meson-python",
797 }
362 798
363 def get_setup_args_info(self, setupscript='./setup.py'): 799 def __init__(self):
364 cmd = ['python3', setupscript] 800 pass
365 info = {}
366 keys = set(self.bbvar_map.keys())
367 keys |= set(self.setuparg_list_fields)
368 keys |= set(self.setuparg_multi_line_values)
369 grouped_keys = itertools.groupby(keys, lambda k: (k in self.setuparg_list_fields, k in self.setuparg_multi_line_values))
370 for index, keys in grouped_keys:
371 if index == (True, False):
372 # Splitlines output for each arg as a list value
373 for key in keys:
374 arg = self.setuparg_map.get(key, key.lower())
375 try:
376 arg_info = self.run_command(cmd + ['--' + arg], cwd=os.path.dirname(setupscript))
377 except (OSError, subprocess.CalledProcessError):
378 pass
379 else:
380 info[key] = [l.rstrip() for l in arg_info.splitlines()]
381 elif index == (False, True):
382 # Entire output for each arg
383 for key in keys:
384 arg = self.setuparg_map.get(key, key.lower())
385 try:
386 arg_info = self.run_command(cmd + ['--' + arg], cwd=os.path.dirname(setupscript))
387 except (OSError, subprocess.CalledProcessError):
388 pass
389 else:
390 info[key] = arg_info
391 else:
392 info.update(self.get_setup_byline(list(keys), setupscript))
393 return info
394 801
395 def get_setup_byline(self, fields, setupscript='./setup.py'): 802 def process(self, srctree, classes, lines_before, lines_after, handled, extravalues):
396 info = {} 803 info = {}
804 metadata = {}
397 805
398 cmd = ['python3', setupscript] 806 if 'buildsystem' in handled:
399 cmd.extend('--' + self.setuparg_map.get(f, f.lower()) for f in fields) 807 return False
400 try:
401 info_lines = self.run_command(cmd, cwd=os.path.dirname(setupscript)).splitlines()
402 except (OSError, subprocess.CalledProcessError):
403 pass
404 else:
405 if len(fields) != len(info_lines):
406 logger.error('Mismatch between setup.py output lines and number of fields')
407 sys.exit(1)
408
409 for lineno, line in enumerate(info_lines):
410 line = line.rstrip()
411 info[fields[lineno]] = line
412 return info
413
414 def apply_info_replacements(self, info):
415 for variable, search, replace in self.replacements:
416 if variable not in info:
417 continue
418
419 def replace_value(search, replace, value):
420 if replace is None:
421 if re.search(search, value):
422 return None
423 else:
424 new_value = re.sub(search, replace, value)
425 if value != new_value:
426 return new_value
427 return value
428
429 value = info[variable]
430 if isinstance(value, str):
431 new_value = replace_value(search, replace, value)
432 if new_value is None:
433 del info[variable]
434 elif new_value != value:
435 info[variable] = new_value
436 elif hasattr(value, 'items'):
437 for dkey, dvalue in list(value.items()):
438 new_list = []
439 for pos, a_value in enumerate(dvalue):
440 new_value = replace_value(search, replace, a_value)
441 if new_value is not None and new_value != value:
442 new_list.append(new_value)
443
444 if value != new_list:
445 value[dkey] = new_list
446 else:
447 new_list = []
448 for pos, a_value in enumerate(value):
449 new_value = replace_value(search, replace, a_value)
450 if new_value is not None and new_value != value:
451 new_list.append(new_value)
452
453 if value != new_list:
454 info[variable] = new_list
455
456 def scan_setup_python_deps(self, srctree, setup_info, setup_non_literals):
457 if 'Package-dir' in setup_info:
458 package_dir = setup_info['Package-dir']
459 else:
460 package_dir = {}
461
462 class PackageDir(distutils.command.build_py.build_py):
463 def __init__(self, package_dir):
464 self.package_dir = package_dir
465
466 pd = PackageDir(package_dir)
467 to_scan = []
468 if not any(v in setup_non_literals for v in ['Py-modules', 'Scripts', 'Packages']):
469 if 'Py-modules' in setup_info:
470 for module in setup_info['Py-modules']:
471 try:
472 package, module = module.rsplit('.', 1)
473 except ValueError:
474 package, module = '.', module
475 module_path = os.path.join(pd.get_package_dir(package), module + '.py')
476 to_scan.append(module_path)
477 808
478 if 'Packages' in setup_info: 809 logger.debug("Trying pyproject.toml parser")
479 for package in setup_info['Packages']:
480 to_scan.append(pd.get_package_dir(package))
481 810
482 if 'Scripts' in setup_info: 811 # Check for non-zero size setup.py files
483 to_scan.extend(setup_info['Scripts']) 812 setupfiles = RecipeHandler.checkfiles(srctree, ["pyproject.toml"])
813 for fn in setupfiles:
814 if os.path.getsize(fn):
815 break
484 else: 816 else:
485 logger.info("Scanning the entire source tree, as one or more of the following setup keywords are non-literal: py_modules, scripts, packages.") 817 logger.debug("No pyproject.toml found")
486 818 return False
487 if not to_scan:
488 to_scan = ['.']
489
490 logger.info("Scanning paths for packages & dependencies: %s", ', '.join(to_scan))
491 819
492 provided_packages = self.parse_pkgdata_for_python_packages() 820 setupscript = os.path.join(srctree, "pyproject.toml")
493 scanned_deps = self.scan_python_dependencies([os.path.join(srctree, p) for p in to_scan])
494 mapped_deps, unmapped_deps = set(self.base_pkgdeps), set()
495 for dep in scanned_deps:
496 mapped = provided_packages.get(dep)
497 if mapped:
498 logger.debug('Mapped %s to %s' % (dep, mapped))
499 mapped_deps.add(mapped)
500 else:
501 logger.debug('Could not map %s' % dep)
502 unmapped_deps.add(dep)
503 return mapped_deps, unmapped_deps
504 821
505 def scan_python_dependencies(self, paths):
506 deps = set()
507 try: 822 try:
508 dep_output = self.run_command(['pythondeps', '-d'] + paths) 823 try:
509 except (OSError, subprocess.CalledProcessError): 824 import tomllib
510 pass 825 except ImportError:
511 else: 826 try:
512 for line in dep_output.splitlines(): 827 import tomli as tomllib
513 line = line.rstrip() 828 except ImportError:
514 dep, filename = line.split('\t', 1) 829 logger.error("Neither 'tomllib' nor 'tomli' could be imported, cannot scan pyproject.toml.")
515 if filename.endswith('/setup.py'): 830 return False
516 continue 831
517 deps.add(dep) 832 try:
833 with open(setupscript, "rb") as f:
834 config = tomllib.load(f)
835 except Exception:
836 logger.exception("Failed to parse pyproject.toml")
837 return False
838
839 build_backend = config["build-system"]["build-backend"]
840 if build_backend in self.build_backend_map:
841 classes.append(self.build_backend_map[build_backend])
842 else:
843 logger.error(
844 "Unsupported build-backend: %s, cannot use pyproject.toml. Will try to use legacy setup.py"
845 % build_backend
846 )
847 return False
518 848
519 try: 849 licfile = ""
520 provides_output = self.run_command(['pythondeps', '-p'] + paths)
521 except (OSError, subprocess.CalledProcessError):
522 pass
523 else:
524 provides_lines = (l.rstrip() for l in provides_output.splitlines())
525 provides = set(l for l in provides_lines if l and l != 'setup')
526 deps -= provides
527 850
528 return deps 851 if build_backend == "poetry.core.masonry.api":
852 if "tool" in config and "poetry" in config["tool"]:
853 metadata = config["tool"]["poetry"]
854 else:
855 if "project" in config:
856 metadata = config["project"]
857
858 if metadata:
859 for field, values in metadata.items():
860 if field == "license":
861 # For setuptools.build_meta and flit, licence is a table
862 # but for poetry licence is a string
863 # for hatchling, both table (jsonschema) and string (iniconfig) have been used
864 if build_backend == "poetry.core.masonry.api":
865 value = values
866 else:
867 value = values.get("text", "")
868 if not value:
869 licfile = values.get("file", "")
870 continue
871 elif field == "dependencies" and build_backend == "poetry.core.masonry.api":
872 # For poetry backend, "dependencies" section looks like:
873 # [tool.poetry.dependencies]
874 # requests = "^2.13.0"
875 # requests = { version = "^2.13.0", source = "private" }
876 # See https://python-poetry.org/docs/master/pyproject/#dependencies-and-dependency-groups for more details
877 # This class doesn't handle versions anyway, so we just get the dependencies name here and construct a list
878 value = []
879 for k in values.keys():
880 value.append(k)
881 elif isinstance(values, dict):
882 for k, v in values.items():
883 info[k] = v
884 continue
885 else:
886 value = values
529 887
530 def parse_pkgdata_for_python_packages(self): 888 info[field] = value
531 suffixes = [t[0] for t in imp.get_suffixes()]
532 pkgdata_dir = tinfoil.config_data.getVar('PKGDATA_DIR')
533 889
534 ldata = tinfoil.config_data.createCopy() 890 # Grab the license value before applying replacements
535 bb.parse.handle('classes/python3-dir.bbclass', ldata, True) 891 license_str = info.get("license", "").strip()
536 python_sitedir = ldata.getVar('PYTHON_SITEPACKAGES_DIR')
537 892
538 dynload_dir = os.path.join(os.path.dirname(python_sitedir), 'lib-dynload') 893 if license_str:
539 python_dirs = [python_sitedir + os.sep, 894 for i, line in enumerate(lines_before):
540 os.path.join(os.path.dirname(python_sitedir), 'dist-packages') + os.sep, 895 if line.startswith("##LICENSE_PLACEHOLDER##"):
541 os.path.dirname(python_sitedir) + os.sep] 896 lines_before.insert(
542 packages = {} 897 i, "# NOTE: License in pyproject.toml is: %s" % license_str
543 for pkgdatafile in glob.glob('{}/runtime/*'.format(pkgdata_dir)): 898 )
544 files_info = None
545 with open(pkgdatafile, 'r') as f:
546 for line in f.readlines():
547 field, value = line.split(': ', 1)
548 if field.startswith('FILES_INFO'):
549 files_info = ast.literal_eval(value)
550 break 899 break
551 else:
552 continue
553 900
554 for fn in files_info: 901 info["requires"] = config["build-system"]["requires"]
555 for suffix in suffixes: 902
556 if fn.endswith(suffix): 903 self.apply_info_replacements(info)
557 break 904
558 else: 905 if "classifiers" in info:
559 continue 906 license = self.handle_classifier_license(
907 info["classifiers"], info.get("license", "")
908 )
909 if license:
910 if licfile:
911 lines = []
912 md5value = bb.utils.md5_file(os.path.join(srctree, licfile))
913 lines.append('LICENSE = "%s"' % license)
914 lines.append(
915 'LIC_FILES_CHKSUM = "file://%s;md5=%s"'
916 % (licfile, md5value)
917 )
918 lines.append("")
919
920 # Replace the placeholder so we get the values in the right place in the recipe file
921 try:
922 pos = lines_before.index("##LICENSE_PLACEHOLDER##")
923 except ValueError:
924 pos = -1
925 if pos == -1:
926 lines_before.extend(lines)
927 else:
928 lines_before[pos : pos + 1] = lines
560 929
561 if fn.startswith(dynload_dir + os.sep): 930 handled.append(("license", [license, licfile, md5value]))
562 if '/.debug/' in fn: 931 else:
563 continue 932 info["license"] = license
564 base = os.path.basename(fn)
565 provided = base.split('.', 1)[0]
566 packages[provided] = os.path.basename(pkgdatafile)
567 continue
568 933
569 for python_dir in python_dirs: 934 provided_packages = self.parse_pkgdata_for_python_packages()
570 if fn.startswith(python_dir): 935 provided_packages.update(self.known_deps_map)
571 relpath = fn[len(python_dir):] 936 native_mapped_deps, native_unmapped_deps = set(), set()
572 relstart, _, relremaining = relpath.partition(os.sep) 937 mapped_deps, unmapped_deps = set(), set()
573 if relstart.endswith('.egg'):
574 relpath = relremaining
575 base, _ = os.path.splitext(relpath)
576 938
577 if '/.debug/' in base: 939 if "requires" in info:
578 continue 940 for require in info["requires"]:
579 if os.path.basename(base) == '__init__': 941 mapped = provided_packages.get(require)
580 base = os.path.dirname(base)
581 base = base.replace(os.sep + os.sep, os.sep)
582 provided = base.replace(os.sep, '.')
583 packages[provided] = os.path.basename(pkgdatafile)
584 return packages
585 942
586 @classmethod 943 if mapped:
587 def run_command(cls, cmd, **popenargs): 944 logger.debug("Mapped %s to %s" % (require, mapped))
588 if 'stderr' not in popenargs: 945 native_mapped_deps.add(mapped)
589 popenargs['stderr'] = subprocess.STDOUT 946 else:
590 try: 947 logger.debug("Could not map %s" % require)
591 return subprocess.check_output(cmd, **popenargs).decode('utf-8') 948 native_unmapped_deps.add(require)
592 except OSError as exc: 949
593 logger.error('Unable to run `{}`: {}', ' '.join(cmd), exc) 950 info.pop("requires")
594 raise 951
595 except subprocess.CalledProcessError as exc: 952 if native_mapped_deps != set():
596 logger.error('Unable to run `{}`: {}', ' '.join(cmd), exc.output) 953 native_mapped_deps = {
597 raise 954 item + "-native" for item in native_mapped_deps
955 }
956 native_mapped_deps -= set(self.excluded_native_pkgdeps)
957 if native_mapped_deps != set():
958 info["requires"] = " ".join(sorted(native_mapped_deps))
959
960 if native_unmapped_deps:
961 lines_after.append("")
962 lines_after.append(
963 "# WARNING: We were unable to map the following python package/module"
964 )
965 lines_after.append(
966 "# dependencies to the bitbake packages which include them:"
967 )
968 lines_after.extend(
969 "# {}".format(d) for d in sorted(native_unmapped_deps)
970 )
971
972 if "dependencies" in info:
973 for dependency in info["dependencies"]:
974 mapped = provided_packages.get(dependency)
975 if mapped:
976 logger.debug("Mapped %s to %s" % (dependency, mapped))
977 mapped_deps.add(mapped)
978 else:
979 logger.debug("Could not map %s" % dependency)
980 unmapped_deps.add(dependency)
981
982 info.pop("dependencies")
983
984 if mapped_deps != set():
985 if mapped_deps != set():
986 info["dependencies"] = " ".join(sorted(mapped_deps))
987
988 if unmapped_deps:
989 lines_after.append("")
990 lines_after.append(
991 "# WARNING: We were unable to map the following python package/module"
992 )
993 lines_after.append(
994 "# runtime dependencies to the bitbake packages which include them:"
995 )
996 lines_after.extend(
997 "# {}".format(d) for d in sorted(unmapped_deps)
998 )
999
1000 self.map_info_to_bbvar(info, extravalues)
1001
1002 handled.append("buildsystem")
1003 except Exception:
1004 logger.exception("Failed to correctly handle pyproject.toml, falling back to another method")
1005 return False
598 1006
599 1007
600def gather_setup_info(fileobj): 1008def gather_setup_info(fileobj):
@@ -710,5 +1118,7 @@ def has_non_literals(value):
710 1118
711 1119
712def register_recipe_handlers(handlers): 1120def register_recipe_handlers(handlers):
713 # We need to make sure this is ahead of the makefile fallback handler 1121 # We need to make sure these are ahead of the makefile fallback handler
714 handlers.append((PythonRecipeHandler(), 70)) 1122 # and the pyproject.toml handler ahead of the setup.py handler
1123 handlers.append((PythonPyprojectTomlRecipeHandler(), 75))
1124 handlers.append((PythonSetupPyRecipeHandler(), 70))
diff --git a/scripts/lib/recipetool/create_go.py b/scripts/lib/recipetool/create_go.py
new file mode 100644
index 0000000000..4b1fa39d13
--- /dev/null
+++ b/scripts/lib/recipetool/create_go.py
@@ -0,0 +1,174 @@
1# Recipe creation tool - go support plugin
2#
3# The code is based on golang internals. See the afftected
4# methods for further reference and information.
5#
6# Copyright (C) 2023 Weidmueller GmbH & Co KG
7# Author: Lukas Funke <lukas.funke@weidmueller.com>
8#
9# SPDX-License-Identifier: GPL-2.0-only
10#
11
12
13from recipetool.create import RecipeHandler, handle_license_vars
14
15import bb.utils
16import json
17import logging
18import os
19import re
20import subprocess
21import sys
22import tempfile
23
24
25logger = logging.getLogger('recipetool')
26
27tinfoil = None
28
29
30def tinfoil_init(instance):
31 global tinfoil
32 tinfoil = instance
33
34
35
36class GoRecipeHandler(RecipeHandler):
37 """Class to handle the go recipe creation"""
38
39 @staticmethod
40 def __ensure_go():
41 """Check if the 'go' command is available in the recipes"""
42 recipe = "go-native"
43 if not tinfoil.recipes_parsed:
44 tinfoil.parse_recipes()
45 try:
46 rd = tinfoil.parse_recipe(recipe)
47 except bb.providers.NoProvider:
48 bb.error(
49 "Nothing provides '%s' which is required for the build" % (recipe))
50 bb.note(
51 "You will likely need to add a layer that provides '%s'" % (recipe))
52 return None
53
54 bindir = rd.getVar('STAGING_BINDIR_NATIVE')
55 gopath = os.path.join(bindir, 'go')
56
57 if not os.path.exists(gopath):
58 tinfoil.build_targets(recipe, 'addto_recipe_sysroot')
59
60 if not os.path.exists(gopath):
61 logger.error(
62 '%s required to process specified source, but %s did not seem to populate it' % 'go', recipe)
63 return None
64
65 return bindir
66
67 def process(self, srctree, classes, lines_before,
68 lines_after, handled, extravalues):
69
70 if 'buildsystem' in handled:
71 return False
72
73 files = RecipeHandler.checkfiles(srctree, ['go.mod'])
74 if not files:
75 return False
76
77 go_bindir = self.__ensure_go()
78 if not go_bindir:
79 sys.exit(14)
80
81 handled.append('buildsystem')
82 classes.append("go-mod")
83
84 # Use go-mod-update-modules to set the full SRC_URI and LICENSE
85 classes.append("go-mod-update-modules")
86 extravalues["run_tasks"] = "update_modules"
87
88 with tempfile.TemporaryDirectory(prefix="go-mod-") as tmp_mod_dir:
89 env = dict(os.environ)
90 env["PATH"] += f":{go_bindir}"
91 env['GOMODCACHE'] = tmp_mod_dir
92
93 stdout = subprocess.check_output(["go", "mod", "edit", "-json"], cwd=srctree, env=env, text=True)
94 go_mod = json.loads(stdout)
95 go_import = re.sub(r'/v([0-9]+)$', '', go_mod['Module']['Path'])
96
97 localfilesdir = tempfile.mkdtemp(prefix='recipetool-go-')
98 extravalues.setdefault('extrafiles', {})
99
100 # Write the stub ${BPN}-licenses.inc and ${BPN}-go-mods.inc files
101 basename = "{pn}-licenses.inc"
102 filename = os.path.join(localfilesdir, basename)
103 with open(filename, "w") as f:
104 f.write("# FROM RECIPETOOL\n")
105 extravalues['extrafiles'][f"../{basename}"] = filename
106
107 basename = "{pn}-go-mods.inc"
108 filename = os.path.join(localfilesdir, basename)
109 with open(filename, "w") as f:
110 f.write("# FROM RECIPETOOL\n")
111 extravalues['extrafiles'][f"../{basename}"] = filename
112
113 # Do generic license handling
114 d = bb.data.createCopy(tinfoil.config_data)
115 handle_license_vars(srctree, lines_before, handled, extravalues, d)
116 self.__rewrite_lic_vars(lines_before)
117
118 self.__rewrite_src_uri(lines_before)
119
120 lines_before.append('require ${BPN}-licenses.inc')
121 lines_before.append('require ${BPN}-go-mods.inc')
122 lines_before.append(f'GO_IMPORT = "{go_import}"')
123
124 def __update_lines_before(self, updated, newlines, lines_before):
125 if updated:
126 del lines_before[:]
127 for line in newlines:
128 # Hack to avoid newlines that edit_metadata inserts
129 if line.endswith('\n'):
130 line = line[:-1]
131 lines_before.append(line)
132 return updated
133
134 def __rewrite_lic_vars(self, lines_before):
135 def varfunc(varname, origvalue, op, newlines):
136 import urllib.parse
137 if varname == 'LIC_FILES_CHKSUM':
138 new_licenses = []
139 licenses = origvalue.split('\\')
140 for license in licenses:
141 if not license:
142 logger.warning("No license file was detected for the main module!")
143 # the license list of the main recipe must be empty
144 # this can happen for example in case of CLOSED license
145 # Fall through to complete recipe generation
146 continue
147 license = license.strip()
148 uri, chksum = license.split(';', 1)
149 url = urllib.parse.urlparse(uri)
150 new_uri = os.path.join(
151 url.scheme + "://", "src", "${GO_IMPORT}", url.netloc + url.path) + ";" + chksum
152 new_licenses.append(new_uri)
153
154 return new_licenses, None, -1, True
155 return origvalue, None, 0, True
156
157 updated, newlines = bb.utils.edit_metadata(
158 lines_before, ['LIC_FILES_CHKSUM'], varfunc)
159 return self.__update_lines_before(updated, newlines, lines_before)
160
161 def __rewrite_src_uri(self, lines_before):
162
163 def varfunc(varname, origvalue, op, newlines):
164 if varname == 'SRC_URI':
165 src_uri = ['git://${GO_IMPORT};protocol=https;nobranch=1;destsuffix=${GO_SRCURI_DESTSUFFIX}']
166 return src_uri, None, -1, True
167 return origvalue, None, 0, True
168
169 updated, newlines = bb.utils.edit_metadata(lines_before, ['SRC_URI'], varfunc)
170 return self.__update_lines_before(updated, newlines, lines_before)
171
172
173def register_recipe_handlers(handlers):
174 handlers.append((GoRecipeHandler(), 60))
diff --git a/scripts/lib/recipetool/create_npm.py b/scripts/lib/recipetool/create_npm.py
index 3394a89970..8c4cdd5234 100644
--- a/scripts/lib/recipetool/create_npm.py
+++ b/scripts/lib/recipetool/create_npm.py
@@ -13,10 +13,11 @@ import sys
13import tempfile 13import tempfile
14import bb 14import bb
15from bb.fetch2.npm import NpmEnvironment 15from bb.fetch2.npm import NpmEnvironment
16from bb.fetch2.npm import npm_package
16from bb.fetch2.npmsw import foreach_dependencies 17from bb.fetch2.npmsw import foreach_dependencies
18from oe.license_finder import match_licenses, find_license_files
17from recipetool.create import RecipeHandler 19from recipetool.create import RecipeHandler
18from recipetool.create import get_license_md5sums 20from recipetool.create import generate_common_licenses_chksums
19from recipetool.create import guess_license
20from recipetool.create import split_pkg_licenses 21from recipetool.create import split_pkg_licenses
21logger = logging.getLogger('recipetool') 22logger = logging.getLogger('recipetool')
22 23
@@ -31,15 +32,6 @@ class NpmRecipeHandler(RecipeHandler):
31 """Class to handle the npm recipe creation""" 32 """Class to handle the npm recipe creation"""
32 33
33 @staticmethod 34 @staticmethod
34 def _npm_name(name):
35 """Generate a Yocto friendly npm name"""
36 name = re.sub("/", "-", name)
37 name = name.lower()
38 name = re.sub(r"[^\-a-z0-9]", "", name)
39 name = name.strip("-")
40 return name
41
42 @staticmethod
43 def _get_registry(lines): 35 def _get_registry(lines):
44 """Get the registry value from the 'npm://registry' url""" 36 """Get the registry value from the 'npm://registry' url"""
45 registry = None 37 registry = None
@@ -120,41 +112,71 @@ class NpmRecipeHandler(RecipeHandler):
120 """Return the extra license files and the list of packages""" 112 """Return the extra license files and the list of packages"""
121 licfiles = [] 113 licfiles = []
122 packages = {} 114 packages = {}
115 # Licenses from package.json will point to COMMON_LICENSE_DIR so we need
116 # to associate them explicitely to packages for split_pkg_licenses()
117 fallback_licenses = dict()
118
119 def _find_package_licenses(destdir):
120 """Either find license files, or use package.json metadata"""
121 def _get_licenses_from_package_json(package_json):
122 with open(os.path.join(srctree, package_json), "r") as f:
123 data = json.load(f)
124 if "license" in data:
125 licenses = data["license"].split(" ")
126 licenses = [license.strip("()") for license in licenses if license != "OR" and license != "AND"]
127 return [], licenses
128 else:
129 return [package_json], None
123 130
124 # Handle the parent package
125 packages["${PN}"] = ""
126
127 def _licfiles_append_fallback_readme_files(destdir):
128 """Append README files as fallback to license files if a license files is missing"""
129
130 fallback = True
131 readmes = []
132 basedir = os.path.join(srctree, destdir) 131 basedir = os.path.join(srctree, destdir)
133 for fn in os.listdir(basedir): 132 licfiles = find_license_files(basedir)
134 upper = fn.upper() 133 if len(licfiles) > 0:
135 if upper.startswith("README"): 134 return licfiles, None
136 fullpath = os.path.join(basedir, fn) 135 else:
137 readmes.append(fullpath) 136 # A license wasn't found in the package directory, so we'll use the package.json metadata
138 if upper.startswith("COPYING") or "LICENCE" in upper or "LICENSE" in upper: 137 pkg_json = os.path.join(basedir, "package.json")
139 fallback = False 138 return _get_licenses_from_package_json(pkg_json)
140 if fallback: 139
141 for readme in readmes: 140 def _get_package_licenses(destdir, package):
142 licfiles.append(os.path.relpath(readme, srctree)) 141 (package_licfiles, package_licenses) = _find_package_licenses(destdir)
142 if package_licfiles:
143 licfiles.extend(package_licfiles)
144 else:
145 fallback_licenses[package] = package_licenses
143 146
144 # Handle the dependencies 147 # Handle the dependencies
145 def _handle_dependency(name, params, deptree): 148 def _handle_dependency(name, params, destdir):
146 suffix = "-".join([self._npm_name(dep) for dep in deptree]) 149 deptree = destdir.split('node_modules/')
147 destdirs = [os.path.join("node_modules", dep) for dep in deptree] 150 suffix = "-".join([npm_package(dep) for dep in deptree])
148 destdir = os.path.join(*destdirs) 151 packages["${PN}" + suffix] = destdir
149 packages["${PN}-" + suffix] = destdir 152 _get_package_licenses(destdir, "${PN}" + suffix)
150 _licfiles_append_fallback_readme_files(destdir)
151 153
152 with open(shrinkwrap_file, "r") as f: 154 with open(shrinkwrap_file, "r") as f:
153 shrinkwrap = json.load(f) 155 shrinkwrap = json.load(f)
154
155 foreach_dependencies(shrinkwrap, _handle_dependency, dev) 156 foreach_dependencies(shrinkwrap, _handle_dependency, dev)
156 157
157 return licfiles, packages 158 # Handle the parent package
159 packages["${PN}"] = ""
160 _get_package_licenses(srctree, "${PN}")
161
162 return licfiles, packages, fallback_licenses
163
164 # Handle the peer dependencies
165 def _handle_peer_dependency(self, shrinkwrap_file):
166 """Check if package has peer dependencies and show warning if it is the case"""
167 with open(shrinkwrap_file, "r") as f:
168 shrinkwrap = json.load(f)
169
170 packages = shrinkwrap.get("packages", {})
171 peer_deps = packages.get("", {}).get("peerDependencies", {})
172
173 for peer_dep in peer_deps:
174 peer_dep_yocto_name = npm_package(peer_dep)
175 bb.warn(peer_dep + " is a peer dependencie of the actual package. " +
176 "Please add this peer dependencie to the RDEPENDS variable as %s and generate its recipe with devtool"
177 % peer_dep_yocto_name)
178
179
158 180
159 def process(self, srctree, classes, lines_before, lines_after, handled, extravalues): 181 def process(self, srctree, classes, lines_before, lines_after, handled, extravalues):
160 """Handle the npm recipe creation""" 182 """Handle the npm recipe creation"""
@@ -173,7 +195,7 @@ class NpmRecipeHandler(RecipeHandler):
173 if "name" not in data or "version" not in data: 195 if "name" not in data or "version" not in data:
174 return False 196 return False
175 197
176 extravalues["PN"] = self._npm_name(data["name"]) 198 extravalues["PN"] = npm_package(data["name"])
177 extravalues["PV"] = data["version"] 199 extravalues["PV"] = data["version"]
178 200
179 if "description" in data: 201 if "description" in data:
@@ -242,7 +264,7 @@ class NpmRecipeHandler(RecipeHandler):
242 value = origvalue.replace("version=" + data["version"], "version=${PV}") 264 value = origvalue.replace("version=" + data["version"], "version=${PV}")
243 value = value.replace("version=latest", "version=${PV}") 265 value = value.replace("version=latest", "version=${PV}")
244 values = [line.strip() for line in value.strip('\n').splitlines()] 266 values = [line.strip() for line in value.strip('\n').splitlines()]
245 if "dependencies" in shrinkwrap: 267 if "dependencies" in shrinkwrap.get("packages", {}).get("", {}):
246 values.append(url_recipe) 268 values.append(url_recipe)
247 return values, None, 4, False 269 return values, None, 4, False
248 270
@@ -258,40 +280,19 @@ class NpmRecipeHandler(RecipeHandler):
258 fetcher.unpack(srctree) 280 fetcher.unpack(srctree)
259 281
260 bb.note("Handling licences ...") 282 bb.note("Handling licences ...")
261 (licfiles, packages) = self._handle_licenses(srctree, shrinkwrap_file, dev) 283 (licfiles, packages, fallback_licenses) = self._handle_licenses(srctree, shrinkwrap_file, dev)
262 284 licvalues = match_licenses(licfiles, srctree, d)
263 def _guess_odd_license(licfiles): 285 split_pkg_licenses(licvalues, packages, lines_after, fallback_licenses)
264 import bb 286 fallback_licenses_flat = [license for sublist in fallback_licenses.values() for license in sublist]
265 287 extravalues["LIC_FILES_CHKSUM"] = generate_common_licenses_chksums(fallback_licenses_flat, d)
266 md5sums = get_license_md5sums(d, linenumbers=True) 288 extravalues["LICENSE"] = fallback_licenses_flat
267
268 chksums = []
269 licenses = []
270 for licfile in licfiles:
271 f = os.path.join(srctree, licfile)
272 md5value = bb.utils.md5_file(f)
273 (license, beginline, endline, md5) = md5sums.get(md5value,
274 (None, "", "", ""))
275 if not license:
276 license = "Unknown"
277 logger.info("Please add the following line for '%s' to a "
278 "'lib/recipetool/licenses.csv' and replace `Unknown`, "
279 "`X`, `Y` and `MD5` with the license, begin line, "
280 "end line and partial MD5 checksum:\n" \
281 "%s,Unknown,X,Y,MD5" % (licfile, md5value))
282 chksums.append("file://%s%s%s;md5=%s" % (licfile,
283 ";beginline=%s" % (beginline) if beginline else "",
284 ";endline=%s" % (endline) if endline else "",
285 md5 if md5 else md5value))
286 licenses.append((license, licfile, md5value))
287 return (licenses, chksums)
288
289 (licenses, extravalues["LIC_FILES_CHKSUM"]) = _guess_odd_license(licfiles)
290 split_pkg_licenses([*licenses, *guess_license(srctree, d)], packages, lines_after)
291 289
292 classes.append("npm") 290 classes.append("npm")
293 handled.append("buildsystem") 291 handled.append("buildsystem")
294 292
293 # Check if package has peer dependencies and inform the user
294 self._handle_peer_dependency(shrinkwrap_file)
295
295 return True 296 return True
296 297
297def register_recipe_handlers(handlers): 298def register_recipe_handlers(handlers):
diff --git a/scripts/lib/recipetool/licenses.csv b/scripts/lib/recipetool/licenses.csv
deleted file mode 100644
index 0d3fb0607b..0000000000
--- a/scripts/lib/recipetool/licenses.csv
+++ /dev/null
@@ -1,37 +0,0 @@
10636e73ff0215e8d672dc4c32c317bb3,GPLv2
212f884d2ae1ff87c09e5b7ccc2c4ca7e,GPLv2
318810669f13b87348459e611d31ab760,GPLv2
4252890d9eee26aab7b432e8b8a616475,LGPLv2
52d5025d4aa3495befef8f17206a5b0a1,LGPLv2.1
63214f080875748938ba060314b4f727d,LGPLv2
7385c55653886acac3821999a3ccd17b3,Artistic-1.0 | GPL-2.0
8393a5ca445f6965873eca0259a17f833,GPLv2
93b83ef96387f14655fc854ddc3c6bd57,Apache-2.0
103bf50002aefd002f49e7bb854063f7e7,LGPLv2
114325afd396febcb659c36b49533135d4,GPLv2
124fbd65380cdd255951079008b364516c,LGPLv2.1
1354c7042be62e169199200bc6477f04d1,BSD-3-Clause
1455ca817ccb7d5b5b66355690e9abc605,LGPLv2
1559530bdf33659b29e73d4adb9f9f6552,GPLv2
165f30f0716dfdd0d91eb439ebec522ec2,LGPLv2
176a6a8e020838b23406c81b19c1d46df6,LGPLv3
18751419260aa954499f7abaabaa882bbe,GPLv2
197fbc338309ac38fefcd64b04bb903e34,LGPLv2.1
208ca43cbc842c2336e835926c2166c28b,GPLv2
2194d55d512a9ba36caa9b7df079bae19f,GPLv2
229ac2e7cff1ddaf48b6eab6028f23ef88,GPLv2
239f604d8a4f8e74f4f5140845a21b6674,LGPLv2
24a6f89e2100d9b6cdffcea4f398e37343,LGPLv2.1
25b234ee4d69f5fce4486a80fdaf4a4263,GPLv2
26bbb461211a33b134d42ed5ee802b37ff,LGPLv2.1
27bfe1f75d606912a4111c90743d6c7325,MPL-1.1
28c93c0550bd3173f4504b2cbd8991e50b,GPLv2
29d32239bcb673463ab874e80d47fae504,GPLv3
30d7810fab7487fb0aad327b76f1be7cd7,GPLv2
31d8045f3b8f929c1cb29a1e3fd737b499,LGPLv2.1
32db979804f025cf55aabec7129cb671ed,LGPLv2
33eb723b61539feef013de476e68b5c50a,GPLv2
34ebb5c50ab7cab4baeffba14977030c07,GPLv2
35f27defe1e96c2e1ecd4e0c9be8967949,GPLv3
36fad9b3332be894bab9bc501572864b29,LGPLv2.1
37fbc093901857fcd118f065f900982c24,LGPLv2.1
diff --git a/scripts/lib/recipetool/setvar.py b/scripts/lib/recipetool/setvar.py
index f8e2ee75fb..b5ad335cae 100644
--- a/scripts/lib/recipetool/setvar.py
+++ b/scripts/lib/recipetool/setvar.py
@@ -49,6 +49,7 @@ def setvar(args):
49 for patch in patches: 49 for patch in patches:
50 for line in patch: 50 for line in patch:
51 sys.stdout.write(line) 51 sys.stdout.write(line)
52 tinfoil.modified_files()
52 return 0 53 return 0
53 54
54 55