summaryrefslogtreecommitdiffstats
path: root/scripts/lib/recipetool
diff options
context:
space:
mode:
Diffstat (limited to 'scripts/lib/recipetool')
-rw-r--r--scripts/lib/recipetool/append.py92
-rw-r--r--scripts/lib/recipetool/create.py311
-rw-r--r--scripts/lib/recipetool/create_buildsys.py43
-rw-r--r--scripts/lib/recipetool/create_buildsys_python.py1100
-rw-r--r--scripts/lib/recipetool/create_go.py174
-rw-r--r--scripts/lib/recipetool/create_kmod.py2
-rw-r--r--scripts/lib/recipetool/create_npm.py103
-rw-r--r--scripts/lib/recipetool/setvar.py1
8 files changed, 1189 insertions, 637 deletions
diff --git a/scripts/lib/recipetool/append.py b/scripts/lib/recipetool/append.py
index e9d52bb67b..041d79f162 100644
--- a/scripts/lib/recipetool/append.py
+++ b/scripts/lib/recipetool/append.py
@@ -18,6 +18,7 @@ import shutil
18import scriptutils 18import scriptutils
19import errno 19import errno
20from collections import defaultdict 20from collections import defaultdict
21import difflib
21 22
22logger = logging.getLogger('recipetool') 23logger = logging.getLogger('recipetool')
23 24
@@ -49,7 +50,7 @@ def find_target_file(targetpath, d, pkglist=None):
49 '/etc/group': '/etc/group should be managed through the useradd and extrausers classes', 50 '/etc/group': '/etc/group should be managed through the useradd and extrausers classes',
50 '/etc/shadow': '/etc/shadow should be managed through the useradd and extrausers classes', 51 '/etc/shadow': '/etc/shadow should be managed through the useradd and extrausers classes',
51 '/etc/gshadow': '/etc/gshadow should be managed through the useradd and extrausers classes', 52 '/etc/gshadow': '/etc/gshadow should be managed through the useradd and extrausers classes',
52 '${sysconfdir}/hostname': '${sysconfdir}/hostname contents should be set by setting hostname_pn-base-files = "value" in configuration',} 53 '${sysconfdir}/hostname': '${sysconfdir}/hostname contents should be set by setting hostname:pn-base-files = "value" in configuration',}
53 54
54 for pthspec, message in invalidtargets.items(): 55 for pthspec, message in invalidtargets.items():
55 if fnmatch.fnmatchcase(targetpath, d.expand(pthspec)): 56 if fnmatch.fnmatchcase(targetpath, d.expand(pthspec)):
@@ -72,15 +73,15 @@ def find_target_file(targetpath, d, pkglist=None):
72 # This does assume that PN comes before other values, but that's a fairly safe assumption 73 # This does assume that PN comes before other values, but that's a fairly safe assumption
73 for line in f: 74 for line in f:
74 if line.startswith('PN:'): 75 if line.startswith('PN:'):
75 pn = line.split(':', 1)[1].strip() 76 pn = line.split(': ', 1)[1].strip()
76 elif line.startswith('FILES_INFO:'): 77 elif line.startswith('FILES_INFO'):
77 val = line.split(':', 1)[1].strip() 78 val = line.split(': ', 1)[1].strip()
78 dictval = json.loads(val) 79 dictval = json.loads(val)
79 for fullpth in dictval.keys(): 80 for fullpth in dictval.keys():
80 if fnmatch.fnmatchcase(fullpth, targetpath): 81 if fnmatch.fnmatchcase(fullpth, targetpath):
81 recipes[targetpath].append(pn) 82 recipes[targetpath].append(pn)
82 elif line.startswith('pkg_preinst_') or line.startswith('pkg_postinst_'): 83 elif line.startswith('pkg_preinst:') or line.startswith('pkg_postinst:'):
83 scriptval = line.split(':', 1)[1].strip().encode('utf-8').decode('unicode_escape') 84 scriptval = line.split(': ', 1)[1].strip().encode('utf-8').decode('unicode_escape')
84 if 'update-alternatives --install %s ' % targetpath in scriptval: 85 if 'update-alternatives --install %s ' % targetpath in scriptval:
85 recipes[targetpath].append('?%s' % pn) 86 recipes[targetpath].append('?%s' % pn)
86 elif targetpath_re.search(scriptval): 87 elif targetpath_re.search(scriptval):
@@ -100,7 +101,7 @@ def determine_file_source(targetpath, rd):
100 import oe.recipeutils 101 import oe.recipeutils
101 102
102 # See if it's in do_install for the recipe 103 # See if it's in do_install for the recipe
103 workdir = rd.getVar('WORKDIR') 104 unpackdir = rd.getVar('UNPACKDIR')
104 src_uri = rd.getVar('SRC_URI') 105 src_uri = rd.getVar('SRC_URI')
105 srcfile = '' 106 srcfile = ''
106 modpatches = [] 107 modpatches = []
@@ -112,9 +113,9 @@ def determine_file_source(targetpath, rd):
112 if not srcpath.startswith('/'): 113 if not srcpath.startswith('/'):
113 # Handle non-absolute path 114 # Handle non-absolute path
114 srcpath = os.path.abspath(os.path.join(rd.getVarFlag('do_install', 'dirs').split()[-1], srcpath)) 115 srcpath = os.path.abspath(os.path.join(rd.getVarFlag('do_install', 'dirs').split()[-1], srcpath))
115 if srcpath.startswith(workdir): 116 if srcpath.startswith(unpackdir):
116 # OK, now we have the source file name, look for it in SRC_URI 117 # OK, now we have the source file name, look for it in SRC_URI
117 workdirfile = os.path.relpath(srcpath, workdir) 118 workdirfile = os.path.relpath(srcpath, unpackdir)
118 # FIXME this is where we ought to have some code in the fetcher, because this is naive 119 # FIXME this is where we ought to have some code in the fetcher, because this is naive
119 for item in src_uri.split(): 120 for item in src_uri.split():
120 localpath = bb.fetch2.localpath(item, rd) 121 localpath = bb.fetch2.localpath(item, rd)
@@ -299,7 +300,10 @@ def appendfile(args):
299 if st.st_mode & stat.S_IXUSR: 300 if st.st_mode & stat.S_IXUSR:
300 perms = '0755' 301 perms = '0755'
301 install = {args.newfile: (args.targetpath, perms)} 302 install = {args.newfile: (args.targetpath, perms)}
302 oe.recipeutils.bbappend_recipe(rd, args.destlayer, {args.newfile: sourcepath}, install, wildcardver=args.wildcard_version, machine=args.machine) 303 if sourcepath:
304 sourcepath = os.path.basename(sourcepath)
305 oe.recipeutils.bbappend_recipe(rd, args.destlayer, {args.newfile: {'newname' : sourcepath}}, install, wildcardver=args.wildcard_version, machine=args.machine)
306 tinfoil.modified_files()
303 return 0 307 return 0
304 else: 308 else:
305 if alternative_pns: 309 if alternative_pns:
@@ -313,7 +317,7 @@ def appendsrc(args, files, rd, extralines=None):
313 import oe.recipeutils 317 import oe.recipeutils
314 318
315 srcdir = rd.getVar('S') 319 srcdir = rd.getVar('S')
316 workdir = rd.getVar('WORKDIR') 320 unpackdir = rd.getVar('UNPACKDIR')
317 321
318 import bb.fetch 322 import bb.fetch
319 simplified = {} 323 simplified = {}
@@ -327,35 +331,57 @@ def appendsrc(args, files, rd, extralines=None):
327 331
328 copyfiles = {} 332 copyfiles = {}
329 extralines = extralines or [] 333 extralines = extralines or []
334 params = []
330 for newfile, srcfile in files.items(): 335 for newfile, srcfile in files.items():
331 src_destdir = os.path.dirname(srcfile) 336 src_destdir = os.path.dirname(srcfile)
332 if not args.use_workdir: 337 if not args.use_workdir:
333 if rd.getVar('S') == rd.getVar('STAGING_KERNEL_DIR'): 338 if rd.getVar('S') == rd.getVar('STAGING_KERNEL_DIR'):
334 srcdir = os.path.join(workdir, 'git') 339 srcdir = os.path.join(unpackdir, rd.getVar('BB_GIT_DEFAULT_DESTSUFFIX'))
335 if not bb.data.inherits_class('kernel-yocto', rd): 340 if not bb.data.inherits_class('kernel-yocto', rd):
336 logger.warning('S == STAGING_KERNEL_DIR and non-kernel-yocto, unable to determine path to srcdir, defaulting to ${WORKDIR}/git') 341 logger.warning('S == STAGING_KERNEL_DIR and non-kernel-yocto, unable to determine path to srcdir, defaulting to ${UNPACKDIR}/${BB_GIT_DEFAULT_DESTSUFFIX}')
337 src_destdir = os.path.join(os.path.relpath(srcdir, workdir), src_destdir) 342 src_destdir = os.path.join(os.path.relpath(srcdir, unpackdir), src_destdir)
338 src_destdir = os.path.normpath(src_destdir) 343 src_destdir = os.path.normpath(src_destdir)
339 344
340 source_uri = 'file://{0}'.format(os.path.basename(srcfile))
341 if src_destdir and src_destdir != '.': 345 if src_destdir and src_destdir != '.':
342 source_uri += ';subdir={0}'.format(src_destdir) 346 params.append({'subdir': src_destdir})
343
344 simple = bb.fetch.URI(source_uri)
345 simple.params = {}
346 simple_str = str(simple)
347 if simple_str in simplified:
348 existing = simplified[simple_str]
349 if source_uri != existing:
350 logger.warning('{0!r} is already in SRC_URI, with different parameters: {1!r}, not adding'.format(source_uri, existing))
351 else:
352 logger.warning('{0!r} is already in SRC_URI, not adding'.format(source_uri))
353 else: 347 else:
354 extralines.append('SRC_URI += {0}'.format(source_uri)) 348 params.append({})
355 copyfiles[newfile] = srcfile 349
356 350 copyfiles[newfile] = {'newname' : os.path.basename(srcfile)}
357 oe.recipeutils.bbappend_recipe(rd, args.destlayer, copyfiles, None, wildcardver=args.wildcard_version, machine=args.machine, extralines=extralines) 351
358 352 dry_run_output = None
353 dry_run_outdir = None
354 if args.dry_run:
355 import tempfile
356 dry_run_output = tempfile.TemporaryDirectory(prefix='devtool')
357 dry_run_outdir = dry_run_output.name
358
359 appendfile, _ = oe.recipeutils.bbappend_recipe(rd, args.destlayer, copyfiles, None, wildcardver=args.wildcard_version, machine=args.machine, extralines=extralines, params=params,
360 redirect_output=dry_run_outdir, update_original_recipe=args.update_recipe)
361 if not appendfile:
362 return
363 if args.dry_run:
364 output = ''
365 appendfilename = os.path.basename(appendfile)
366 newappendfile = appendfile
367 if appendfile and os.path.exists(appendfile):
368 with open(appendfile, 'r') as f:
369 oldlines = f.readlines()
370 else:
371 appendfile = '/dev/null'
372 oldlines = []
373
374 with open(os.path.join(dry_run_outdir, appendfilename), 'r') as f:
375 newlines = f.readlines()
376 diff = difflib.unified_diff(oldlines, newlines, appendfile, newappendfile)
377 difflines = list(diff)
378 if difflines:
379 output += ''.join(difflines)
380 if output:
381 logger.info('Diff of changed files:\n%s' % output)
382 else:
383 logger.info('No changed files')
384 tinfoil.modified_files()
359 385
360def appendsrcfiles(parser, args): 386def appendsrcfiles(parser, args):
361 recipedata = _parse_recipe(args.recipe, tinfoil) 387 recipedata = _parse_recipe(args.recipe, tinfoil)
@@ -435,6 +461,8 @@ def register_commands(subparsers):
435 help='Create/update a bbappend to add or replace source files', 461 help='Create/update a bbappend to add or replace source files',
436 description='Creates a bbappend (or updates an existing one) to add or replace the specified file in the recipe sources, either those in WORKDIR or those in the source tree. This command lets you specify multiple files with a destination directory, so cannot specify the destination filename. See the `appendsrcfile` command for the other behavior.') 462 description='Creates a bbappend (or updates an existing one) to add or replace the specified file in the recipe sources, either those in WORKDIR or those in the source tree. This command lets you specify multiple files with a destination directory, so cannot specify the destination filename. See the `appendsrcfile` command for the other behavior.')
437 parser.add_argument('-D', '--destdir', help='Destination directory (relative to S or WORKDIR, defaults to ".")', default='', type=destination_path) 463 parser.add_argument('-D', '--destdir', help='Destination directory (relative to S or WORKDIR, defaults to ".")', default='', type=destination_path)
464 parser.add_argument('-u', '--update-recipe', help='Update recipe instead of creating (or updating) a bbapend file. DESTLAYER must contains the recipe to update', action='store_true')
465 parser.add_argument('-n', '--dry-run', help='Dry run mode', action='store_true')
438 parser.add_argument('files', nargs='+', metavar='FILE', help='File(s) to be added to the recipe sources (WORKDIR or S)', type=existing_path) 466 parser.add_argument('files', nargs='+', metavar='FILE', help='File(s) to be added to the recipe sources (WORKDIR or S)', type=existing_path)
439 parser.set_defaults(func=lambda a: appendsrcfiles(parser, a), parserecipes=True) 467 parser.set_defaults(func=lambda a: appendsrcfiles(parser, a), parserecipes=True)
440 468
@@ -442,6 +470,8 @@ def register_commands(subparsers):
442 parents=[common_src], 470 parents=[common_src],
443 help='Create/update a bbappend to add or replace a source file', 471 help='Create/update a bbappend to add or replace a source file',
444 description='Creates a bbappend (or updates an existing one) to add or replace the specified files in the recipe sources, either those in WORKDIR or those in the source tree. This command lets you specify the destination filename, not just destination directory, but only works for one file. See the `appendsrcfiles` command for the other behavior.') 472 description='Creates a bbappend (or updates an existing one) to add or replace the specified files in the recipe sources, either those in WORKDIR or those in the source tree. This command lets you specify the destination filename, not just destination directory, but only works for one file. See the `appendsrcfiles` command for the other behavior.')
473 parser.add_argument('-u', '--update-recipe', help='Update recipe instead of creating (or updating) a bbapend file. DESTLAYER must contains the recipe to update', action='store_true')
474 parser.add_argument('-n', '--dry-run', help='Dry run mode', action='store_true')
445 parser.add_argument('file', metavar='FILE', help='File to be added to the recipe sources (WORKDIR or S)', type=existing_path) 475 parser.add_argument('file', metavar='FILE', help='File to be added to the recipe sources (WORKDIR or S)', type=existing_path)
446 parser.add_argument('destfile', metavar='DESTFILE', nargs='?', help='Destination path (relative to S or WORKDIR, optional)', type=destination_path) 476 parser.add_argument('destfile', metavar='DESTFILE', nargs='?', help='Destination path (relative to S or WORKDIR, optional)', type=destination_path)
447 parser.set_defaults(func=lambda a: appendsrcfile(parser, a), parserecipes=True) 477 parser.set_defaults(func=lambda a: appendsrcfile(parser, a), parserecipes=True)
diff --git a/scripts/lib/recipetool/create.py b/scripts/lib/recipetool/create.py
index 566c75369a..ef0ba974a9 100644
--- a/scripts/lib/recipetool/create.py
+++ b/scripts/lib/recipetool/create.py
@@ -18,6 +18,8 @@ from urllib.parse import urlparse, urldefrag, urlsplit
18import hashlib 18import hashlib
19import bb.fetch2 19import bb.fetch2
20logger = logging.getLogger('recipetool') 20logger = logging.getLogger('recipetool')
21from oe.license import tidy_licenses
22from oe.license_finder import find_licenses
21 23
22tinfoil = None 24tinfoil = None
23plugins = None 25plugins = None
@@ -115,8 +117,8 @@ class RecipeHandler(object):
115 for line in f: 117 for line in f:
116 if line.startswith('PN:'): 118 if line.startswith('PN:'):
117 pn = line.split(':', 1)[-1].strip() 119 pn = line.split(':', 1)[-1].strip()
118 elif line.startswith('FILES_INFO:'): 120 elif line.startswith('FILES_INFO:%s:' % pkg):
119 val = line.split(':', 1)[1].strip() 121 val = line.split(': ', 1)[1].strip()
120 dictval = json.loads(val) 122 dictval = json.loads(val)
121 for fullpth in sorted(dictval): 123 for fullpth in sorted(dictval):
122 if fullpth.startswith(includedir) and fullpth.endswith('.h'): 124 if fullpth.startswith(includedir) and fullpth.endswith('.h'):
@@ -366,7 +368,7 @@ def supports_srcrev(uri):
366def reformat_git_uri(uri): 368def reformat_git_uri(uri):
367 '''Convert any http[s]://....git URI into git://...;protocol=http[s]''' 369 '''Convert any http[s]://....git URI into git://...;protocol=http[s]'''
368 checkuri = uri.split(';', 1)[0] 370 checkuri = uri.split(';', 1)[0]
369 if checkuri.endswith('.git') or '/git/' in checkuri or re.match('https?://github.com/[^/]+/[^/]+/?$', checkuri): 371 if checkuri.endswith('.git') or '/git/' in checkuri or re.match('https?://git(hub|lab).com/[^/]+/[^/]+/?$', checkuri):
370 # Appends scheme if the scheme is missing 372 # Appends scheme if the scheme is missing
371 if not '://' in uri: 373 if not '://' in uri:
372 uri = 'git://' + uri 374 uri = 'git://' + uri
@@ -423,6 +425,36 @@ def create_recipe(args):
423 storeTagName = '' 425 storeTagName = ''
424 pv_srcpv = False 426 pv_srcpv = False
425 427
428 handled = []
429 classes = []
430
431 # Find all plugins that want to register handlers
432 logger.debug('Loading recipe handlers')
433 raw_handlers = []
434 for plugin in plugins:
435 if hasattr(plugin, 'register_recipe_handlers'):
436 plugin.register_recipe_handlers(raw_handlers)
437 # Sort handlers by priority
438 handlers = []
439 for i, handler in enumerate(raw_handlers):
440 if isinstance(handler, tuple):
441 handlers.append((handler[0], handler[1], i))
442 else:
443 handlers.append((handler, 0, i))
444 handlers.sort(key=lambda item: (item[1], -item[2]), reverse=True)
445 for handler, priority, _ in handlers:
446 logger.debug('Handler: %s (priority %d)' % (handler.__class__.__name__, priority))
447 setattr(handler, '_devtool', args.devtool)
448 handlers = [item[0] for item in handlers]
449
450 fetchuri = None
451 for handler in handlers:
452 if hasattr(handler, 'process_url'):
453 ret = handler.process_url(args, classes, handled, extravalues)
454 if 'url' in handled and ret:
455 fetchuri = ret
456 break
457
426 if os.path.isfile(source): 458 if os.path.isfile(source):
427 source = 'file://%s' % os.path.abspath(source) 459 source = 'file://%s' % os.path.abspath(source)
428 460
@@ -431,11 +463,12 @@ def create_recipe(args):
431 if re.match(r'https?://github.com/[^/]+/[^/]+/archive/.+(\.tar\..*|\.zip)$', source): 463 if re.match(r'https?://github.com/[^/]+/[^/]+/archive/.+(\.tar\..*|\.zip)$', source):
432 logger.warning('github archive files are not guaranteed to be stable and may be re-generated over time. If the latter occurs, the checksums will likely change and the recipe will fail at do_fetch. It is recommended that you point to an actual commit or tag in the repository instead (using the repository URL in conjunction with the -S/--srcrev option).') 464 logger.warning('github archive files are not guaranteed to be stable and may be re-generated over time. If the latter occurs, the checksums will likely change and the recipe will fail at do_fetch. It is recommended that you point to an actual commit or tag in the repository instead (using the repository URL in conjunction with the -S/--srcrev option).')
433 # Fetch a URL 465 # Fetch a URL
434 fetchuri = reformat_git_uri(urldefrag(source)[0]) 466 if not fetchuri:
467 fetchuri = reformat_git_uri(urldefrag(source)[0])
435 if args.binary: 468 if args.binary:
436 # Assume the archive contains the directory structure verbatim 469 # Assume the archive contains the directory structure verbatim
437 # so we need to extract to a subdirectory 470 # so we need to extract to a subdirectory
438 fetchuri += ';subdir=${BP}' 471 fetchuri += ';subdir=${BPN}'
439 srcuri = fetchuri 472 srcuri = fetchuri
440 rev_re = re.compile(';rev=([^;]+)') 473 rev_re = re.compile(';rev=([^;]+)')
441 res = rev_re.search(srcuri) 474 res = rev_re.search(srcuri)
@@ -478,6 +511,9 @@ def create_recipe(args):
478 storeTagName = params['tag'] 511 storeTagName = params['tag']
479 params['nobranch'] = '1' 512 params['nobranch'] = '1'
480 del params['tag'] 513 del params['tag']
514 # Assume 'master' branch if not set
515 if scheme in ['git', 'gitsm'] and 'branch' not in params and 'nobranch' not in params:
516 params['branch'] = 'master'
481 fetchuri = bb.fetch2.encodeurl((scheme, network, path, user, passwd, params)) 517 fetchuri = bb.fetch2.encodeurl((scheme, network, path, user, passwd, params))
482 518
483 tmpparent = tinfoil.config_data.getVar('BASE_WORKDIR') 519 tmpparent = tinfoil.config_data.getVar('BASE_WORKDIR')
@@ -494,7 +530,7 @@ def create_recipe(args):
494 if ftmpdir and args.keep_temp: 530 if ftmpdir and args.keep_temp:
495 logger.info('Fetch temp directory is %s' % ftmpdir) 531 logger.info('Fetch temp directory is %s' % ftmpdir)
496 532
497 dirlist = scriptutils.filter_src_subdirs(srctree) 533 dirlist = os.listdir(srctree)
498 logger.debug('Directory listing (excluding filtered out):\n %s' % '\n '.join(dirlist)) 534 logger.debug('Directory listing (excluding filtered out):\n %s' % '\n '.join(dirlist))
499 if len(dirlist) == 1: 535 if len(dirlist) == 1:
500 singleitem = os.path.join(srctree, dirlist[0]) 536 singleitem = os.path.join(srctree, dirlist[0])
@@ -527,10 +563,9 @@ def create_recipe(args):
527 # Remove HEAD reference point and drop remote prefix 563 # Remove HEAD reference point and drop remote prefix
528 get_branch = [x.split('/', 1)[1] for x in get_branch if not x.startswith('origin/HEAD')] 564 get_branch = [x.split('/', 1)[1] for x in get_branch if not x.startswith('origin/HEAD')]
529 if 'master' in get_branch: 565 if 'master' in get_branch:
530 # If it is master, we do not need to append 'branch=master' as this is default.
531 # Even with the case where get_branch has multiple objects, if 'master' is one 566 # Even with the case where get_branch has multiple objects, if 'master' is one
532 # of them, we should default take from 'master' 567 # of them, we should default take from 'master'
533 srcbranch = '' 568 srcbranch = 'master'
534 elif len(get_branch) == 1: 569 elif len(get_branch) == 1:
535 # If 'master' isn't in get_branch and get_branch contains only ONE object, then store result into 'srcbranch' 570 # If 'master' isn't in get_branch and get_branch contains only ONE object, then store result into 'srcbranch'
536 srcbranch = get_branch[0] 571 srcbranch = get_branch[0]
@@ -543,8 +578,8 @@ def create_recipe(args):
543 # Since we might have a value in srcbranch, we need to 578 # Since we might have a value in srcbranch, we need to
544 # recontruct the srcuri to include 'branch' in params. 579 # recontruct the srcuri to include 'branch' in params.
545 scheme, network, path, user, passwd, params = bb.fetch2.decodeurl(srcuri) 580 scheme, network, path, user, passwd, params = bb.fetch2.decodeurl(srcuri)
546 if srcbranch: 581 if scheme in ['git', 'gitsm']:
547 params['branch'] = srcbranch 582 params['branch'] = srcbranch or 'master'
548 583
549 if storeTagName and scheme in ['git', 'gitsm']: 584 if storeTagName and scheme in ['git', 'gitsm']:
550 # Check srcrev using tag and check validity of the tag 585 # Check srcrev using tag and check validity of the tag
@@ -603,8 +638,7 @@ def create_recipe(args):
603 splitline = line.split() 638 splitline = line.split()
604 if len(splitline) > 1: 639 if len(splitline) > 1:
605 if splitline[0] == 'origin' and scriptutils.is_src_url(splitline[1]): 640 if splitline[0] == 'origin' and scriptutils.is_src_url(splitline[1]):
606 srcuri = reformat_git_uri(splitline[1]) 641 srcuri = reformat_git_uri(splitline[1]) + ';branch=master'
607 srcsubdir = 'git'
608 break 642 break
609 643
610 if args.src_subdir: 644 if args.src_subdir:
@@ -636,8 +670,6 @@ def create_recipe(args):
636 # We'll come back and replace this later in handle_license_vars() 670 # We'll come back and replace this later in handle_license_vars()
637 lines_before.append('##LICENSE_PLACEHOLDER##') 671 lines_before.append('##LICENSE_PLACEHOLDER##')
638 672
639 handled = []
640 classes = []
641 673
642 # FIXME This is kind of a hack, we probably ought to be using bitbake to do this 674 # FIXME This is kind of a hack, we probably ought to be using bitbake to do this
643 pn = None 675 pn = None
@@ -675,8 +707,10 @@ def create_recipe(args):
675 if not srcuri: 707 if not srcuri:
676 lines_before.append('# No information for SRC_URI yet (only an external source tree was specified)') 708 lines_before.append('# No information for SRC_URI yet (only an external source tree was specified)')
677 lines_before.append('SRC_URI = "%s"' % srcuri) 709 lines_before.append('SRC_URI = "%s"' % srcuri)
710 shown_checksums = ["%ssum" % s for s in bb.fetch2.SHOWN_CHECKSUM_LIST]
678 for key, value in sorted(checksums.items()): 711 for key, value in sorted(checksums.items()):
679 lines_before.append('SRC_URI[%s] = "%s"' % (key, value)) 712 if key in shown_checksums:
713 lines_before.append('SRC_URI[%s] = "%s"' % (key, value))
680 if srcuri and supports_srcrev(srcuri): 714 if srcuri and supports_srcrev(srcuri):
681 lines_before.append('') 715 lines_before.append('')
682 lines_before.append('# Modify these as desired') 716 lines_before.append('# Modify these as desired')
@@ -688,7 +722,7 @@ def create_recipe(args):
688 srcpvprefix = 'svnr' 722 srcpvprefix = 'svnr'
689 else: 723 else:
690 srcpvprefix = scheme 724 srcpvprefix = scheme
691 lines_before.append('PV = "%s+%s${SRCPV}"' % (realpv or '1.0', srcpvprefix)) 725 lines_before.append('PV = "%s+%s"' % (realpv or '1.0', srcpvprefix))
692 pv_srcpv = True 726 pv_srcpv = True
693 if not args.autorev and srcrev == '${AUTOREV}': 727 if not args.autorev and srcrev == '${AUTOREV}':
694 if os.path.exists(os.path.join(srctree, '.git')): 728 if os.path.exists(os.path.join(srctree, '.git')):
@@ -702,7 +736,7 @@ def create_recipe(args):
702 if srcsubdir and not args.binary: 736 if srcsubdir and not args.binary:
703 # (for binary packages we explicitly specify subdir= when fetching to 737 # (for binary packages we explicitly specify subdir= when fetching to
704 # match the default value of S, so we don't need to set it in that case) 738 # match the default value of S, so we don't need to set it in that case)
705 lines_before.append('S = "${WORKDIR}/%s"' % srcsubdir) 739 lines_before.append('S = "${UNPACKDIR}/%s"' % srcsubdir)
706 lines_before.append('') 740 lines_before.append('')
707 741
708 if pkgarch: 742 if pkgarch:
@@ -710,31 +744,12 @@ def create_recipe(args):
710 lines_after.append('') 744 lines_after.append('')
711 745
712 if args.binary: 746 if args.binary:
713 lines_after.append('INSANE_SKIP_${PN} += "already-stripped"') 747 lines_after.append('INSANE_SKIP:${PN} += "already-stripped"')
714 lines_after.append('') 748 lines_after.append('')
715 749
716 if args.npm_dev: 750 if args.npm_dev:
717 extravalues['NPM_INSTALL_DEV'] = 1 751 extravalues['NPM_INSTALL_DEV'] = 1
718 752
719 # Find all plugins that want to register handlers
720 logger.debug('Loading recipe handlers')
721 raw_handlers = []
722 for plugin in plugins:
723 if hasattr(plugin, 'register_recipe_handlers'):
724 plugin.register_recipe_handlers(raw_handlers)
725 # Sort handlers by priority
726 handlers = []
727 for i, handler in enumerate(raw_handlers):
728 if isinstance(handler, tuple):
729 handlers.append((handler[0], handler[1], i))
730 else:
731 handlers.append((handler, 0, i))
732 handlers.sort(key=lambda item: (item[1], -item[2]), reverse=True)
733 for handler, priority, _ in handlers:
734 logger.debug('Handler: %s (priority %d)' % (handler.__class__.__name__, priority))
735 setattr(handler, '_devtool', args.devtool)
736 handlers = [item[0] for item in handlers]
737
738 # Apply the handlers 753 # Apply the handlers
739 if args.binary: 754 if args.binary:
740 classes.append('bin_package') 755 classes.append('bin_package')
@@ -743,9 +758,14 @@ def create_recipe(args):
743 for handler in handlers: 758 for handler in handlers:
744 handler.process(srctree_use, classes, lines_before, lines_after, handled, extravalues) 759 handler.process(srctree_use, classes, lines_before, lines_after, handled, extravalues)
745 760
761 # native and nativesdk classes are special and must be inherited last
762 # If present, put them at the end of the classes list
763 classes.sort(key=lambda c: c in ("native", "nativesdk"))
764
746 extrafiles = extravalues.pop('extrafiles', {}) 765 extrafiles = extravalues.pop('extrafiles', {})
747 extra_pn = extravalues.pop('PN', None) 766 extra_pn = extravalues.pop('PN', None)
748 extra_pv = extravalues.pop('PV', None) 767 extra_pv = extravalues.pop('PV', None)
768 run_tasks = extravalues.pop('run_tasks', "").split()
749 769
750 if extra_pv and not realpv: 770 if extra_pv and not realpv:
751 realpv = extra_pv 771 realpv = extra_pv
@@ -806,7 +826,8 @@ def create_recipe(args):
806 extraoutdir = os.path.join(os.path.dirname(outfile), pn) 826 extraoutdir = os.path.join(os.path.dirname(outfile), pn)
807 bb.utils.mkdirhier(extraoutdir) 827 bb.utils.mkdirhier(extraoutdir)
808 for destfn, extrafile in extrafiles.items(): 828 for destfn, extrafile in extrafiles.items():
809 shutil.move(extrafile, os.path.join(extraoutdir, destfn)) 829 fn = destfn.format(pn=pn, pv=realpv)
830 shutil.move(extrafile, os.path.join(extraoutdir, fn))
810 831
811 lines = lines_before 832 lines = lines_before
812 lines_before = [] 833 lines_before = []
@@ -821,7 +842,7 @@ def create_recipe(args):
821 line = line.replace(realpv, '${PV}') 842 line = line.replace(realpv, '${PV}')
822 if pn: 843 if pn:
823 line = line.replace(pn, '${BPN}') 844 line = line.replace(pn, '${BPN}')
824 if line == 'S = "${WORKDIR}/${BPN}-${PV}"': 845 if line == 'S = "${UNPACKDIR}/${BPN}-${PV}"' or 'tmp-recipetool-' in line:
825 skipblank = True 846 skipblank = True
826 continue 847 continue
827 elif line.startswith('SRC_URI = '): 848 elif line.startswith('SRC_URI = '):
@@ -867,8 +888,10 @@ def create_recipe(args):
867 outlines.append('') 888 outlines.append('')
868 outlines.extend(lines_after) 889 outlines.extend(lines_after)
869 890
891 outlines = [ line.rstrip('\n') +"\n" for line in outlines]
892
870 if extravalues: 893 if extravalues:
871 _, outlines = oe.recipeutils.patch_recipe_lines(outlines, extravalues, trailing_newline=False) 894 _, outlines = oe.recipeutils.patch_recipe_lines(outlines, extravalues, trailing_newline=True)
872 895
873 if args.extract_to: 896 if args.extract_to:
874 scriptutils.git_convert_standalone_clone(srctree) 897 scriptutils.git_convert_standalone_clone(srctree)
@@ -884,7 +907,7 @@ def create_recipe(args):
884 log_info_cond('Source extracted to %s' % args.extract_to, args.devtool) 907 log_info_cond('Source extracted to %s' % args.extract_to, args.devtool)
885 908
886 if outfile == '-': 909 if outfile == '-':
887 sys.stdout.write('\n'.join(outlines) + '\n') 910 sys.stdout.write(''.join(outlines) + '\n')
888 else: 911 else:
889 with open(outfile, 'w') as f: 912 with open(outfile, 'w') as f:
890 lastline = None 913 lastline = None
@@ -892,9 +915,14 @@ def create_recipe(args):
892 if not lastline and not line: 915 if not lastline and not line:
893 # Skip extra blank lines 916 # Skip extra blank lines
894 continue 917 continue
895 f.write('%s\n' % line) 918 f.write('%s' % line)
896 lastline = line 919 lastline = line
897 log_info_cond('Recipe %s has been created; further editing may be required to make it fully functional' % outfile, args.devtool) 920 log_info_cond('Recipe %s has been created; further editing may be required to make it fully functional' % outfile, args.devtool)
921 tinfoil.modified_files()
922
923 for task in run_tasks:
924 logger.info("Running task %s" % task)
925 tinfoil.build_file_sync(outfile, task)
898 926
899 if tempsrc: 927 if tempsrc:
900 if args.keep_temp: 928 if args.keep_temp:
@@ -917,23 +945,32 @@ def split_value(value):
917 else: 945 else:
918 return value 946 return value
919 947
948def fixup_license(value):
949 # Ensure licenses with OR starts and ends with brackets
950 if '|' in value:
951 return '(' + value + ')'
952 return value
953
920def handle_license_vars(srctree, lines_before, handled, extravalues, d): 954def handle_license_vars(srctree, lines_before, handled, extravalues, d):
921 lichandled = [x for x in handled if x[0] == 'license'] 955 lichandled = [x for x in handled if x[0] == 'license']
922 if lichandled: 956 if lichandled:
923 # Someone else has already handled the license vars, just return their value 957 # Someone else has already handled the license vars, just return their value
924 return lichandled[0][1] 958 return lichandled[0][1]
925 959
926 licvalues = guess_license(srctree, d) 960 licvalues = find_licenses(srctree, d)
927 licenses = [] 961 licenses = []
928 lic_files_chksum = [] 962 lic_files_chksum = []
929 lic_unknown = [] 963 lic_unknown = []
930 lines = [] 964 lines = []
931 if licvalues: 965 if licvalues:
932 for licvalue in licvalues: 966 for licvalue in licvalues:
933 if not licvalue[0] in licenses: 967 license = licvalue[0]
934 licenses.append(licvalue[0]) 968 lics = tidy_licenses(fixup_license(license))
969 lics = [lic for lic in lics if lic not in licenses]
970 if len(lics):
971 licenses.extend(lics)
935 lic_files_chksum.append('file://%s;md5=%s' % (licvalue[1], licvalue[2])) 972 lic_files_chksum.append('file://%s;md5=%s' % (licvalue[1], licvalue[2]))
936 if licvalue[0] == 'Unknown': 973 if license == 'Unknown':
937 lic_unknown.append(licvalue[1]) 974 lic_unknown.append(licvalue[1])
938 if lic_unknown: 975 if lic_unknown:
939 lines.append('#') 976 lines.append('#')
@@ -942,9 +979,7 @@ def handle_license_vars(srctree, lines_before, handled, extravalues, d):
942 for licfile in lic_unknown: 979 for licfile in lic_unknown:
943 lines.append('# %s' % licfile) 980 lines.append('# %s' % licfile)
944 981
945 extra_license = split_value(extravalues.pop('LICENSE', [])) 982 extra_license = tidy_licenses(extravalues.pop('LICENSE', ''))
946 if '&' in extra_license:
947 extra_license.remove('&')
948 if extra_license: 983 if extra_license:
949 if licenses == ['Unknown']: 984 if licenses == ['Unknown']:
950 licenses = extra_license 985 licenses = extra_license
@@ -985,7 +1020,7 @@ def handle_license_vars(srctree, lines_before, handled, extravalues, d):
985 lines.append('# instead of &. If there is any doubt, check the accompanying documentation') 1020 lines.append('# instead of &. If there is any doubt, check the accompanying documentation')
986 lines.append('# to determine which situation is applicable.') 1021 lines.append('# to determine which situation is applicable.')
987 1022
988 lines.append('LICENSE = "%s"' % ' & '.join(licenses)) 1023 lines.append('LICENSE = "%s"' % ' & '.join(sorted(licenses, key=str.casefold)))
989 lines.append('LIC_FILES_CHKSUM = "%s"' % ' \\\n '.join(lic_files_chksum)) 1024 lines.append('LIC_FILES_CHKSUM = "%s"' % ' \\\n '.join(lic_files_chksum))
990 lines.append('') 1025 lines.append('')
991 1026
@@ -1002,166 +1037,15 @@ def handle_license_vars(srctree, lines_before, handled, extravalues, d):
1002 handled.append(('license', licvalues)) 1037 handled.append(('license', licvalues))
1003 return licvalues 1038 return licvalues
1004 1039
1005def get_license_md5sums(d, static_only=False):
1006 import bb.utils
1007 md5sums = {}
1008 if not static_only:
1009 # Gather md5sums of license files in common license dir
1010 commonlicdir = d.getVar('COMMON_LICENSE_DIR')
1011 for fn in os.listdir(commonlicdir):
1012 md5value = bb.utils.md5_file(os.path.join(commonlicdir, fn))
1013 md5sums[md5value] = fn
1014 # The following were extracted from common values in various recipes
1015 # (double checking the license against the license file itself, not just
1016 # the LICENSE value in the recipe)
1017 md5sums['94d55d512a9ba36caa9b7df079bae19f'] = 'GPLv2'
1018 md5sums['b234ee4d69f5fce4486a80fdaf4a4263'] = 'GPLv2'
1019 md5sums['59530bdf33659b29e73d4adb9f9f6552'] = 'GPLv2'
1020 md5sums['0636e73ff0215e8d672dc4c32c317bb3'] = 'GPLv2'
1021 md5sums['eb723b61539feef013de476e68b5c50a'] = 'GPLv2'
1022 md5sums['751419260aa954499f7abaabaa882bbe'] = 'GPLv2'
1023 md5sums['393a5ca445f6965873eca0259a17f833'] = 'GPLv2'
1024 md5sums['12f884d2ae1ff87c09e5b7ccc2c4ca7e'] = 'GPLv2'
1025 md5sums['8ca43cbc842c2336e835926c2166c28b'] = 'GPLv2'
1026 md5sums['ebb5c50ab7cab4baeffba14977030c07'] = 'GPLv2'
1027 md5sums['c93c0550bd3173f4504b2cbd8991e50b'] = 'GPLv2'
1028 md5sums['9ac2e7cff1ddaf48b6eab6028f23ef88'] = 'GPLv2'
1029 md5sums['4325afd396febcb659c36b49533135d4'] = 'GPLv2'
1030 md5sums['18810669f13b87348459e611d31ab760'] = 'GPLv2'
1031 md5sums['d7810fab7487fb0aad327b76f1be7cd7'] = 'GPLv2' # the Linux kernel's COPYING file
1032 md5sums['bbb461211a33b134d42ed5ee802b37ff'] = 'LGPLv2.1'
1033 md5sums['7fbc338309ac38fefcd64b04bb903e34'] = 'LGPLv2.1'
1034 md5sums['4fbd65380cdd255951079008b364516c'] = 'LGPLv2.1'
1035 md5sums['2d5025d4aa3495befef8f17206a5b0a1'] = 'LGPLv2.1'
1036 md5sums['fbc093901857fcd118f065f900982c24'] = 'LGPLv2.1'
1037 md5sums['a6f89e2100d9b6cdffcea4f398e37343'] = 'LGPLv2.1'
1038 md5sums['d8045f3b8f929c1cb29a1e3fd737b499'] = 'LGPLv2.1'
1039 md5sums['fad9b3332be894bab9bc501572864b29'] = 'LGPLv2.1'
1040 md5sums['3bf50002aefd002f49e7bb854063f7e7'] = 'LGPLv2'
1041 md5sums['9f604d8a4f8e74f4f5140845a21b6674'] = 'LGPLv2'
1042 md5sums['5f30f0716dfdd0d91eb439ebec522ec2'] = 'LGPLv2'
1043 md5sums['55ca817ccb7d5b5b66355690e9abc605'] = 'LGPLv2'
1044 md5sums['252890d9eee26aab7b432e8b8a616475'] = 'LGPLv2'
1045 md5sums['3214f080875748938ba060314b4f727d'] = 'LGPLv2'
1046 md5sums['db979804f025cf55aabec7129cb671ed'] = 'LGPLv2'
1047 md5sums['d32239bcb673463ab874e80d47fae504'] = 'GPLv3'
1048 md5sums['f27defe1e96c2e1ecd4e0c9be8967949'] = 'GPLv3'
1049 md5sums['6a6a8e020838b23406c81b19c1d46df6'] = 'LGPLv3'
1050 md5sums['3b83ef96387f14655fc854ddc3c6bd57'] = 'Apache-2.0'
1051 md5sums['385c55653886acac3821999a3ccd17b3'] = 'Artistic-1.0 | GPL-2.0' # some perl modules
1052 md5sums['54c7042be62e169199200bc6477f04d1'] = 'BSD-3-Clause'
1053 md5sums['bfe1f75d606912a4111c90743d6c7325'] = 'MPL-1.1'
1054 return md5sums
1055
1056def crunch_license(licfile):
1057 '''
1058 Remove non-material text from a license file and then check
1059 its md5sum against a known list. This works well for licenses
1060 which contain a copyright statement, but is also a useful way
1061 to handle people's insistence upon reformatting the license text
1062 slightly (with no material difference to the text of the
1063 license).
1064 '''
1065
1066 import oe.utils
1067
1068 # Note: these are carefully constructed!
1069 license_title_re = re.compile(r'^\(?(#+ *)?(The )?.{1,10} [Ll]icen[sc]e( \(.{1,10}\))?\)?:?$')
1070 license_statement_re = re.compile(r'^(This (project|software) is( free software)? (released|licen[sc]ed)|(Released|Licen[cs]ed)) under the .{1,10} [Ll]icen[sc]e:?$')
1071 copyright_re = re.compile('^(#+)? *Copyright .*$')
1072
1073 crunched_md5sums = {}
1074 # The following two were gleaned from the "forever" npm package
1075 crunched_md5sums['0a97f8e4cbaf889d6fa51f84b89a79f6'] = 'ISC'
1076 crunched_md5sums['eecf6429523cbc9693547cf2db790b5c'] = 'MIT'
1077 # https://github.com/vasi/pixz/blob/master/LICENSE
1078 crunched_md5sums['2f03392b40bbe663597b5bd3cc5ebdb9'] = 'BSD-2-Clause'
1079 # https://github.com/waffle-gl/waffle/blob/master/LICENSE.txt
1080 crunched_md5sums['e72e5dfef0b1a4ca8a3d26a60587db66'] = 'BSD-2-Clause'
1081 # https://github.com/spigwitmer/fakeds1963s/blob/master/LICENSE
1082 crunched_md5sums['8be76ac6d191671f347ee4916baa637e'] = 'GPLv2'
1083 # https://github.com/datto/dattobd/blob/master/COPYING
1084 # http://git.savannah.gnu.org/cgit/freetype/freetype2.git/tree/docs/GPLv2.TXT
1085 crunched_md5sums['1d65c5ad4bf6489f85f4812bf08ae73d'] = 'GPLv2'
1086 # http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt
1087 # http://git.neil.brown.name/?p=mdadm.git;a=blob;f=COPYING;h=d159169d1050894d3ea3b98e1c965c4058208fe1;hb=HEAD
1088 crunched_md5sums['fb530f66a7a89ce920f0e912b5b66d4b'] = 'GPLv2'
1089 # https://github.com/gkos/nrf24/blob/master/COPYING
1090 crunched_md5sums['7b6aaa4daeafdfa6ed5443fd2684581b'] = 'GPLv2'
1091 # https://github.com/josch09/resetusb/blob/master/COPYING
1092 crunched_md5sums['8b8ac1d631a4d220342e83bcf1a1fbc3'] = 'GPLv3'
1093 # https://github.com/FFmpeg/FFmpeg/blob/master/COPYING.LGPLv2.1
1094 crunched_md5sums['2ea316ed973ae176e502e2297b574bb3'] = 'LGPLv2.1'
1095 # unixODBC-2.3.4 COPYING
1096 crunched_md5sums['1daebd9491d1e8426900b4fa5a422814'] = 'LGPLv2.1'
1097 # https://github.com/FFmpeg/FFmpeg/blob/master/COPYING.LGPLv3
1098 crunched_md5sums['2ebfb3bb49b9a48a075cc1425e7f4129'] = 'LGPLv3'
1099 # https://raw.githubusercontent.com/eclipse/mosquitto/v1.4.14/epl-v10
1100 crunched_md5sums['efe2cb9a35826992b9df68224e3c2628'] = 'EPL-1.0'
1101 # https://raw.githubusercontent.com/eclipse/mosquitto/v1.4.14/edl-v10
1102 crunched_md5sums['0a9c78c0a398d1bbce4a166757d60387'] = 'EDL-1.0'
1103 lictext = []
1104 with open(licfile, 'r', errors='surrogateescape') as f:
1105 for line in f:
1106 # Drop opening statements
1107 if copyright_re.match(line):
1108 continue
1109 elif license_title_re.match(line):
1110 continue
1111 elif license_statement_re.match(line):
1112 continue
1113 # Squash spaces, and replace smart quotes, double quotes
1114 # and backticks with single quotes
1115 line = oe.utils.squashspaces(line.strip())
1116 line = line.replace(u"\u2018", "'").replace(u"\u2019", "'").replace(u"\u201c","'").replace(u"\u201d", "'").replace('"', '\'').replace('`', '\'')
1117 if line:
1118 lictext.append(line)
1119
1120 m = hashlib.md5()
1121 try:
1122 m.update(' '.join(lictext).encode('utf-8'))
1123 md5val = m.hexdigest()
1124 except UnicodeEncodeError:
1125 md5val = None
1126 lictext = ''
1127 license = crunched_md5sums.get(md5val, None)
1128 return license, md5val, lictext
1129
1130def guess_license(srctree, d):
1131 import bb
1132 md5sums = get_license_md5sums(d)
1133
1134 licenses = []
1135 licspecs = ['*LICEN[CS]E*', 'COPYING*', '*[Ll]icense*', 'LEGAL*', '[Ll]egal*', '*GPL*', 'README.lic*', 'COPYRIGHT*', '[Cc]opyright*', 'e[dp]l-v10']
1136 licfiles = []
1137 for root, dirs, files in os.walk(srctree):
1138 for fn in files:
1139 for spec in licspecs:
1140 if fnmatch.fnmatch(fn, spec):
1141 fullpath = os.path.join(root, fn)
1142 if not fullpath in licfiles:
1143 licfiles.append(fullpath)
1144 for licfile in licfiles:
1145 md5value = bb.utils.md5_file(licfile)
1146 license = md5sums.get(md5value, None)
1147 if not license:
1148 license, crunched_md5, lictext = crunch_license(licfile)
1149 if not license:
1150 license = 'Unknown'
1151 licenses.append((license, os.path.relpath(licfile, srctree), md5value))
1152
1153 # FIXME should we grab at least one source file with a license header and add that too?
1154
1155 return licenses
1156
1157def split_pkg_licenses(licvalues, packages, outlines, fallback_licenses=None, pn='${PN}'): 1040def split_pkg_licenses(licvalues, packages, outlines, fallback_licenses=None, pn='${PN}'):
1158 """ 1041 """
1159 Given a list of (license, path, md5sum) as returned by guess_license(), 1042 Given a list of (license, path, md5sum) as returned by match_licenses(),
1160 a dict of package name to path mappings, write out a set of 1043 a dict of package name to path mappings, write out a set of
1161 package-specific LICENSE values. 1044 package-specific LICENSE values.
1162 """ 1045 """
1163 pkglicenses = {pn: []} 1046 pkglicenses = {pn: []}
1164 for license, licpath, _ in licvalues: 1047 for license, licpath, _ in licvalues:
1048 license = fixup_license(license)
1165 for pkgname, pkgpath in packages.items(): 1049 for pkgname, pkgpath in packages.items():
1166 if licpath.startswith(pkgpath + '/'): 1050 if licpath.startswith(pkgpath + '/'):
1167 if pkgname in pkglicenses: 1051 if pkgname in pkglicenses:
@@ -1174,13 +1058,24 @@ def split_pkg_licenses(licvalues, packages, outlines, fallback_licenses=None, pn
1174 pkglicenses[pn].append(license) 1058 pkglicenses[pn].append(license)
1175 outlicenses = {} 1059 outlicenses = {}
1176 for pkgname in packages: 1060 for pkgname in packages:
1177 license = ' '.join(list(set(pkglicenses.get(pkgname, ['Unknown'])))) or 'Unknown' 1061 # Assume AND operator between license files
1178 if license == 'Unknown' and pkgname in fallback_licenses: 1062 license = ' & '.join(list(set(pkglicenses.get(pkgname, ['Unknown'])))) or 'Unknown'
1063 if license == 'Unknown' and fallback_licenses and pkgname in fallback_licenses:
1179 license = fallback_licenses[pkgname] 1064 license = fallback_licenses[pkgname]
1180 outlines.append('LICENSE_%s = "%s"' % (pkgname, license)) 1065 licenses = tidy_licenses(license)
1181 outlicenses[pkgname] = license.split() 1066 license = ' & '.join(licenses)
1067 outlines.append('LICENSE:%s = "%s"' % (pkgname, license))
1068 outlicenses[pkgname] = licenses
1182 return outlicenses 1069 return outlicenses
1183 1070
1071def generate_common_licenses_chksums(common_licenses, d):
1072 lic_files_chksums = []
1073 for license in tidy_licenses(common_licenses):
1074 licfile = '${COMMON_LICENSE_DIR}/' + license
1075 md5value = bb.utils.md5_file(d.expand(licfile))
1076 lic_files_chksums.append('file://%s;md5=%s' % (licfile, md5value))
1077 return lic_files_chksums
1078
1184def read_pkgconfig_provides(d): 1079def read_pkgconfig_provides(d):
1185 pkgdatadir = d.getVar('PKGDATA_DIR') 1080 pkgdatadir = d.getVar('PKGDATA_DIR')
1186 pkgmap = {} 1081 pkgmap = {}
@@ -1311,7 +1206,7 @@ def register_commands(subparsers):
1311 parser_create.add_argument('-B', '--srcbranch', help='Branch in source repository if fetching from an SCM such as git (default master)') 1206 parser_create.add_argument('-B', '--srcbranch', help='Branch in source repository if fetching from an SCM such as git (default master)')
1312 parser_create.add_argument('--keep-temp', action="store_true", help='Keep temporary directory (for debugging)') 1207 parser_create.add_argument('--keep-temp', action="store_true", help='Keep temporary directory (for debugging)')
1313 parser_create.add_argument('--npm-dev', action="store_true", help='For npm, also fetch devDependencies') 1208 parser_create.add_argument('--npm-dev', action="store_true", help='For npm, also fetch devDependencies')
1209 parser_create.add_argument('--no-pypi', action="store_true", help='Do not inherit pypi class')
1314 parser_create.add_argument('--devtool', action="store_true", help=argparse.SUPPRESS) 1210 parser_create.add_argument('--devtool', action="store_true", help=argparse.SUPPRESS)
1315 parser_create.add_argument('--mirrors', action="store_true", help='Enable PREMIRRORS and MIRRORS for source tree fetching (disabled by default).') 1211 parser_create.add_argument('--mirrors', action="store_true", help='Enable PREMIRRORS and MIRRORS for source tree fetching (disabled by default).')
1316 parser_create.set_defaults(func=create_recipe) 1212 parser_create.set_defaults(func=create_recipe)
1317
diff --git a/scripts/lib/recipetool/create_buildsys.py b/scripts/lib/recipetool/create_buildsys.py
index 35a97c9345..ec9d510e23 100644
--- a/scripts/lib/recipetool/create_buildsys.py
+++ b/scripts/lib/recipetool/create_buildsys.py
@@ -5,9 +5,9 @@
5# SPDX-License-Identifier: GPL-2.0-only 5# SPDX-License-Identifier: GPL-2.0-only
6# 6#
7 7
8import os
8import re 9import re
9import logging 10import logging
10import glob
11from recipetool.create import RecipeHandler, validate_pv 11from recipetool.create import RecipeHandler, validate_pv
12 12
13logger = logging.getLogger('recipetool') 13logger = logging.getLogger('recipetool')
@@ -137,15 +137,15 @@ class CmakeRecipeHandler(RecipeHandler):
137 deps = [] 137 deps = []
138 unmappedpkgs = [] 138 unmappedpkgs = []
139 139
140 proj_re = re.compile('project\s*\(([^)]*)\)', re.IGNORECASE) 140 proj_re = re.compile(r'project\s*\(([^)]*)\)', re.IGNORECASE)
141 pkgcm_re = re.compile('pkg_check_modules\s*\(\s*[a-zA-Z0-9-_]+\s*(REQUIRED)?\s+([^)\s]+)\s*\)', re.IGNORECASE) 141 pkgcm_re = re.compile(r'pkg_check_modules\s*\(\s*[a-zA-Z0-9-_]+\s*(REQUIRED)?\s+([^)\s]+)\s*\)', re.IGNORECASE)
142 pkgsm_re = re.compile('pkg_search_module\s*\(\s*[a-zA-Z0-9-_]+\s*(REQUIRED)?((\s+[^)\s]+)+)\s*\)', re.IGNORECASE) 142 pkgsm_re = re.compile(r'pkg_search_module\s*\(\s*[a-zA-Z0-9-_]+\s*(REQUIRED)?((\s+[^)\s]+)+)\s*\)', re.IGNORECASE)
143 findpackage_re = re.compile('find_package\s*\(\s*([a-zA-Z0-9-_]+)\s*.*', re.IGNORECASE) 143 findpackage_re = re.compile(r'find_package\s*\(\s*([a-zA-Z0-9-_]+)\s*.*', re.IGNORECASE)
144 findlibrary_re = re.compile('find_library\s*\(\s*[a-zA-Z0-9-_]+\s*(NAMES\s+)?([a-zA-Z0-9-_ ]+)\s*.*') 144 findlibrary_re = re.compile(r'find_library\s*\(\s*[a-zA-Z0-9-_]+\s*(NAMES\s+)?([a-zA-Z0-9-_ ]+)\s*.*')
145 checklib_re = re.compile('check_library_exists\s*\(\s*([^\s)]+)\s*.*', re.IGNORECASE) 145 checklib_re = re.compile(r'check_library_exists\s*\(\s*([^\s)]+)\s*.*', re.IGNORECASE)
146 include_re = re.compile('include\s*\(\s*([^)\s]*)\s*\)', re.IGNORECASE) 146 include_re = re.compile(r'include\s*\(\s*([^)\s]*)\s*\)', re.IGNORECASE)
147 subdir_re = re.compile('add_subdirectory\s*\(\s*([^)\s]*)\s*([^)\s]*)\s*\)', re.IGNORECASE) 147 subdir_re = re.compile(r'add_subdirectory\s*\(\s*([^)\s]*)\s*([^)\s]*)\s*\)', re.IGNORECASE)
148 dep_re = re.compile('([^ ><=]+)( *[<>=]+ *[^ ><=]+)?') 148 dep_re = re.compile(r'([^ ><=]+)( *[<>=]+ *[^ ><=]+)?')
149 149
150 def find_cmake_package(pkg): 150 def find_cmake_package(pkg):
151 RecipeHandler.load_devel_filemap(tinfoil.config_data) 151 RecipeHandler.load_devel_filemap(tinfoil.config_data)
@@ -423,16 +423,16 @@ class AutotoolsRecipeHandler(RecipeHandler):
423 'makeinfo': 'texinfo', 423 'makeinfo': 'texinfo',
424 } 424 }
425 425
426 pkg_re = re.compile('PKG_CHECK_MODULES\(\s*\[?[a-zA-Z0-9_]*\]?,\s*\[?([^,\]]*)\]?[),].*') 426 pkg_re = re.compile(r'PKG_CHECK_MODULES\(\s*\[?[a-zA-Z0-9_]*\]?,\s*\[?([^,\]]*)\]?[),].*')
427 pkgce_re = re.compile('PKG_CHECK_EXISTS\(\s*\[?([^,\]]*)\]?[),].*') 427 pkgce_re = re.compile(r'PKG_CHECK_EXISTS\(\s*\[?([^,\]]*)\]?[),].*')
428 lib_re = re.compile('AC_CHECK_LIB\(\s*\[?([^,\]]*)\]?,.*') 428 lib_re = re.compile(r'AC_CHECK_LIB\(\s*\[?([^,\]]*)\]?,.*')
429 libx_re = re.compile('AX_CHECK_LIBRARY\(\s*\[?[^,\]]*\]?,\s*\[?([^,\]]*)\]?,\s*\[?([a-zA-Z0-9-]*)\]?,.*') 429 libx_re = re.compile(r'AX_CHECK_LIBRARY\(\s*\[?[^,\]]*\]?,\s*\[?([^,\]]*)\]?,\s*\[?([a-zA-Z0-9-]*)\]?,.*')
430 progs_re = re.compile('_PROGS?\(\s*\[?[a-zA-Z0-9_]*\]?,\s*\[?([^,\]]*)\]?[),].*') 430 progs_re = re.compile(r'_PROGS?\(\s*\[?[a-zA-Z0-9_]*\]?,\s*\[?([^,\]]*)\]?[),].*')
431 dep_re = re.compile('([^ ><=]+)( [<>=]+ [^ ><=]+)?') 431 dep_re = re.compile(r'([^ ><=]+)( [<>=]+ [^ ><=]+)?')
432 ac_init_re = re.compile('AC_INIT\(\s*([^,]+),\s*([^,]+)[,)].*') 432 ac_init_re = re.compile(r'AC_INIT\(\s*([^,]+),\s*([^,]+)[,)].*')
433 am_init_re = re.compile('AM_INIT_AUTOMAKE\(\s*([^,]+),\s*([^,]+)[,)].*') 433 am_init_re = re.compile(r'AM_INIT_AUTOMAKE\(\s*([^,]+),\s*([^,]+)[,)].*')
434 define_re = re.compile('\s*(m4_)?define\(\s*([^,]+),\s*([^,]+)\)') 434 define_re = re.compile(r'\s*(m4_)?define\(\s*([^,]+),\s*([^,]+)\)')
435 version_re = re.compile('([0-9.]+)') 435 version_re = re.compile(r'([0-9.]+)')
436 436
437 defines = {} 437 defines = {}
438 def subst_defines(value): 438 def subst_defines(value):
@@ -545,7 +545,7 @@ class AutotoolsRecipeHandler(RecipeHandler):
545 deps.append('zlib') 545 deps.append('zlib')
546 elif keyword in ('AX_CHECK_OPENSSL', 'AX_LIB_CRYPTO'): 546 elif keyword in ('AX_CHECK_OPENSSL', 'AX_LIB_CRYPTO'):
547 deps.append('openssl') 547 deps.append('openssl')
548 elif keyword == 'AX_LIB_CURL': 548 elif keyword in ('AX_LIB_CURL', 'LIBCURL_CHECK_CONFIG'):
549 deps.append('curl') 549 deps.append('curl')
550 elif keyword == 'AX_LIB_BEECRYPT': 550 elif keyword == 'AX_LIB_BEECRYPT':
551 deps.append('beecrypt') 551 deps.append('beecrypt')
@@ -624,6 +624,7 @@ class AutotoolsRecipeHandler(RecipeHandler):
624 'AX_CHECK_OPENSSL', 624 'AX_CHECK_OPENSSL',
625 'AX_LIB_CRYPTO', 625 'AX_LIB_CRYPTO',
626 'AX_LIB_CURL', 626 'AX_LIB_CURL',
627 'LIBCURL_CHECK_CONFIG',
627 'AX_LIB_BEECRYPT', 628 'AX_LIB_BEECRYPT',
628 'AX_LIB_EXPAT', 629 'AX_LIB_EXPAT',
629 'AX_LIB_GCRYPT', 630 'AX_LIB_GCRYPT',
diff --git a/scripts/lib/recipetool/create_buildsys_python.py b/scripts/lib/recipetool/create_buildsys_python.py
index adfa377956..a807dafae5 100644
--- a/scripts/lib/recipetool/create_buildsys_python.py
+++ b/scripts/lib/recipetool/create_buildsys_python.py
@@ -8,9 +8,9 @@
8import ast 8import ast
9import codecs 9import codecs
10import collections 10import collections
11import distutils.command.build_py 11import setuptools.command.build_py
12import email 12import email
13import imp 13import importlib
14import glob 14import glob
15import itertools 15import itertools
16import logging 16import logging
@@ -18,7 +18,11 @@ import os
18import re 18import re
19import sys 19import sys
20import subprocess 20import subprocess
21import json
22import urllib.request
21from recipetool.create import RecipeHandler 23from recipetool.create import RecipeHandler
24from urllib.parse import urldefrag
25from recipetool.create import determine_from_url
22 26
23logger = logging.getLogger('recipetool') 27logger = logging.getLogger('recipetool')
24 28
@@ -37,7 +41,334 @@ class PythonRecipeHandler(RecipeHandler):
37 assume_provided = ['builtins', 'os.path'] 41 assume_provided = ['builtins', 'os.path']
38 # Assumes that the host python3 builtin_module_names is sane for target too 42 # Assumes that the host python3 builtin_module_names is sane for target too
39 assume_provided = assume_provided + list(sys.builtin_module_names) 43 assume_provided = assume_provided + list(sys.builtin_module_names)
44 excluded_fields = []
40 45
46
47 classifier_license_map = {
48 'License :: OSI Approved :: Academic Free License (AFL)': 'AFL',
49 'License :: OSI Approved :: Apache Software License': 'Apache',
50 'License :: OSI Approved :: Apple Public Source License': 'APSL',
51 'License :: OSI Approved :: Artistic License': 'Artistic',
52 'License :: OSI Approved :: Attribution Assurance License': 'AAL',
53 'License :: OSI Approved :: BSD License': 'BSD-3-Clause',
54 'License :: OSI Approved :: Boost Software License 1.0 (BSL-1.0)': 'BSL-1.0',
55 'License :: OSI Approved :: CEA CNRS Inria Logiciel Libre License, version 2.1 (CeCILL-2.1)': 'CECILL-2.1',
56 'License :: OSI Approved :: Common Development and Distribution License 1.0 (CDDL-1.0)': 'CDDL-1.0',
57 'License :: OSI Approved :: Common Public License': 'CPL',
58 'License :: OSI Approved :: Eclipse Public License 1.0 (EPL-1.0)': 'EPL-1.0',
59 'License :: OSI Approved :: Eclipse Public License 2.0 (EPL-2.0)': 'EPL-2.0',
60 'License :: OSI Approved :: Eiffel Forum License': 'EFL',
61 'License :: OSI Approved :: European Union Public Licence 1.0 (EUPL 1.0)': 'EUPL-1.0',
62 'License :: OSI Approved :: European Union Public Licence 1.1 (EUPL 1.1)': 'EUPL-1.1',
63 'License :: OSI Approved :: European Union Public Licence 1.2 (EUPL 1.2)': 'EUPL-1.2',
64 'License :: OSI Approved :: GNU Affero General Public License v3': 'AGPL-3.0-only',
65 'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)': 'AGPL-3.0-or-later',
66 'License :: OSI Approved :: GNU Free Documentation License (FDL)': 'GFDL',
67 'License :: OSI Approved :: GNU General Public License (GPL)': 'GPL',
68 'License :: OSI Approved :: GNU General Public License v2 (GPLv2)': 'GPL-2.0-only',
69 'License :: OSI Approved :: GNU General Public License v2 or later (GPLv2+)': 'GPL-2.0-or-later',
70 'License :: OSI Approved :: GNU General Public License v3 (GPLv3)': 'GPL-3.0-only',
71 'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)': 'GPL-3.0-or-later',
72 'License :: OSI Approved :: GNU Lesser General Public License v2 (LGPLv2)': 'LGPL-2.0-only',
73 'License :: OSI Approved :: GNU Lesser General Public License v2 or later (LGPLv2+)': 'LGPL-2.0-or-later',
74 'License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)': 'LGPL-3.0-only',
75 'License :: OSI Approved :: GNU Lesser General Public License v3 or later (LGPLv3+)': 'LGPL-3.0-or-later',
76 'License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)': 'LGPL',
77 'License :: OSI Approved :: Historical Permission Notice and Disclaimer (HPND)': 'HPND',
78 'License :: OSI Approved :: IBM Public License': 'IPL',
79 'License :: OSI Approved :: ISC License (ISCL)': 'ISC',
80 'License :: OSI Approved :: Intel Open Source License': 'Intel',
81 'License :: OSI Approved :: Jabber Open Source License': 'Jabber',
82 'License :: OSI Approved :: MIT License': 'MIT',
83 'License :: OSI Approved :: MIT No Attribution License (MIT-0)': 'MIT-0',
84 'License :: OSI Approved :: MITRE Collaborative Virtual Workspace License (CVW)': 'CVWL',
85 'License :: OSI Approved :: MirOS License (MirOS)': 'MirOS',
86 'License :: OSI Approved :: Motosoto License': 'Motosoto',
87 'License :: OSI Approved :: Mozilla Public License 1.0 (MPL)': 'MPL-1.0',
88 'License :: OSI Approved :: Mozilla Public License 1.1 (MPL 1.1)': 'MPL-1.1',
89 'License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)': 'MPL-2.0',
90 'License :: OSI Approved :: Nethack General Public License': 'NGPL',
91 'License :: OSI Approved :: Nokia Open Source License': 'Nokia',
92 'License :: OSI Approved :: Open Group Test Suite License': 'OGTSL',
93 'License :: OSI Approved :: Open Software License 3.0 (OSL-3.0)': 'OSL-3.0',
94 'License :: OSI Approved :: PostgreSQL License': 'PostgreSQL',
95 'License :: OSI Approved :: Python License (CNRI Python License)': 'CNRI-Python',
96 'License :: OSI Approved :: Python Software Foundation License': 'PSF-2.0',
97 'License :: OSI Approved :: Qt Public License (QPL)': 'QPL',
98 'License :: OSI Approved :: Ricoh Source Code Public License': 'RSCPL',
99 'License :: OSI Approved :: SIL Open Font License 1.1 (OFL-1.1)': 'OFL-1.1',
100 'License :: OSI Approved :: Sleepycat License': 'Sleepycat',
101 'License :: OSI Approved :: Sun Industry Standards Source License (SISSL)': 'SISSL',
102 'License :: OSI Approved :: Sun Public License': 'SPL',
103 'License :: OSI Approved :: The Unlicense (Unlicense)': 'Unlicense',
104 'License :: OSI Approved :: Universal Permissive License (UPL)': 'UPL-1.0',
105 'License :: OSI Approved :: University of Illinois/NCSA Open Source License': 'NCSA',
106 'License :: OSI Approved :: Vovida Software License 1.0': 'VSL-1.0',
107 'License :: OSI Approved :: W3C License': 'W3C',
108 'License :: OSI Approved :: X.Net License': 'Xnet',
109 'License :: OSI Approved :: Zope Public License': 'ZPL',
110 'License :: OSI Approved :: zlib/libpng License': 'Zlib',
111 'License :: Other/Proprietary License': 'Proprietary',
112 'License :: Public Domain': 'PD',
113 }
114
115 def __init__(self):
116 pass
117
118 def process_url(self, args, classes, handled, extravalues):
119 """
120 Convert any pypi url https://pypi.org/project/<package>/<version> into https://files.pythonhosted.org/packages/source/...
121 which corresponds to the archive location, and add pypi class
122 """
123
124 if 'url' in handled:
125 return None
126
127 fetch_uri = None
128 source = args.source
129 required_version = args.version if args.version else None
130 match = re.match(r'https?://pypi.org/project/([^/]+)(?:/([^/]+))?/?$', urldefrag(source)[0])
131 if match:
132 package = match.group(1)
133 version = match.group(2) if match.group(2) else required_version
134
135 json_url = f"https://pypi.org/pypi/%s/json" % package
136 response = urllib.request.urlopen(json_url)
137 if response.status == 200:
138 data = json.loads(response.read())
139 if not version:
140 # grab latest version
141 version = data["info"]["version"]
142 pypi_package = data["info"]["name"]
143 for release in reversed(data["releases"][version]):
144 if release["packagetype"] == "sdist":
145 fetch_uri = release["url"]
146 break
147 else:
148 logger.warning("Cannot handle pypi url %s: cannot fetch package information using %s", source, json_url)
149 return None
150 else:
151 match = re.match(r'^https?://files.pythonhosted.org/packages.*/(.*)-.*$', source)
152 if match:
153 fetch_uri = source
154 pypi_package = match.group(1)
155 _, version = determine_from_url(fetch_uri)
156
157 if match and not args.no_pypi:
158 if required_version and version != required_version:
159 raise Exception("Version specified using --version/-V (%s) and version specified in the url (%s) do not match" % (required_version, version))
160 # This is optionnal if BPN looks like "python-<pypi_package>" or "python3-<pypi_package>" (see pypi.bbclass)
161 # but at this point we cannot know because because user can specify the output name of the recipe on the command line
162 extravalues["PYPI_PACKAGE"] = pypi_package
163 # If the tarball extension is not 'tar.gz' (default value in pypi.bblcass) whe should set PYPI_PACKAGE_EXT in the recipe
164 pypi_package_ext = re.match(r'.*%s-%s\.(.*)$' % (pypi_package, version), fetch_uri)
165 if pypi_package_ext:
166 pypi_package_ext = pypi_package_ext.group(1)
167 if pypi_package_ext != "tar.gz":
168 extravalues["PYPI_PACKAGE_EXT"] = pypi_package_ext
169
170 # Pypi class will handle S and SRC_URI variables, so remove them
171 # TODO: allow oe.recipeutils.patch_recipe_lines() to accept regexp so we can simplify the following to:
172 # extravalues['SRC_URI(?:\[.*?\])?'] = None
173 extravalues['S'] = None
174 extravalues['SRC_URI'] = None
175
176 classes.append('pypi')
177
178 handled.append('url')
179 return fetch_uri
180
181 def handle_classifier_license(self, classifiers, existing_licenses=""):
182
183 licenses = []
184 for classifier in classifiers:
185 if classifier in self.classifier_license_map:
186 license = self.classifier_license_map[classifier]
187 if license == 'Apache' and 'Apache-2.0' in existing_licenses:
188 license = 'Apache-2.0'
189 elif license == 'GPL':
190 if 'GPL-2.0' in existing_licenses or 'GPLv2' in existing_licenses:
191 license = 'GPL-2.0'
192 elif 'GPL-3.0' in existing_licenses or 'GPLv3' in existing_licenses:
193 license = 'GPL-3.0'
194 elif license == 'LGPL':
195 if 'LGPL-2.1' in existing_licenses or 'LGPLv2.1' in existing_licenses:
196 license = 'LGPL-2.1'
197 elif 'LGPL-2.0' in existing_licenses or 'LGPLv2' in existing_licenses:
198 license = 'LGPL-2.0'
199 elif 'LGPL-3.0' in existing_licenses or 'LGPLv3' in existing_licenses:
200 license = 'LGPL-3.0'
201 licenses.append(license)
202
203 if licenses:
204 return ' & '.join(licenses)
205
206 return None
207
208 def map_info_to_bbvar(self, info, extravalues):
209
210 # Map PKG-INFO & setup.py fields to bitbake variables
211 for field, values in info.items():
212 if field in self.excluded_fields:
213 continue
214
215 if field not in self.bbvar_map:
216 continue
217
218 if isinstance(values, str):
219 value = values
220 else:
221 value = ' '.join(str(v) for v in values if v)
222
223 bbvar = self.bbvar_map[field]
224 if bbvar == "PN":
225 # by convention python recipes start with "python3-"
226 if not value.startswith('python'):
227 value = 'python3-' + value
228
229 if bbvar not in extravalues and value:
230 extravalues[bbvar] = value
231
232 def apply_info_replacements(self, info):
233 if not self.replacements:
234 return
235
236 for variable, search, replace in self.replacements:
237 if variable not in info:
238 continue
239
240 def replace_value(search, replace, value):
241 if replace is None:
242 if re.search(search, value):
243 return None
244 else:
245 new_value = re.sub(search, replace, value)
246 if value != new_value:
247 return new_value
248 return value
249
250 value = info[variable]
251 if isinstance(value, str):
252 new_value = replace_value(search, replace, value)
253 if new_value is None:
254 del info[variable]
255 elif new_value != value:
256 info[variable] = new_value
257 elif hasattr(value, 'items'):
258 for dkey, dvalue in list(value.items()):
259 new_list = []
260 for pos, a_value in enumerate(dvalue):
261 new_value = replace_value(search, replace, a_value)
262 if new_value is not None and new_value != value:
263 new_list.append(new_value)
264
265 if value != new_list:
266 value[dkey] = new_list
267 else:
268 new_list = []
269 for pos, a_value in enumerate(value):
270 new_value = replace_value(search, replace, a_value)
271 if new_value is not None and new_value != value:
272 new_list.append(new_value)
273
274 if value != new_list:
275 info[variable] = new_list
276
277
278 def scan_python_dependencies(self, paths):
279 deps = set()
280 try:
281 dep_output = self.run_command(['pythondeps', '-d'] + paths)
282 except (OSError, subprocess.CalledProcessError):
283 pass
284 else:
285 for line in dep_output.splitlines():
286 line = line.rstrip()
287 dep, filename = line.split('\t', 1)
288 if filename.endswith('/setup.py'):
289 continue
290 deps.add(dep)
291
292 try:
293 provides_output = self.run_command(['pythondeps', '-p'] + paths)
294 except (OSError, subprocess.CalledProcessError):
295 pass
296 else:
297 provides_lines = (l.rstrip() for l in provides_output.splitlines())
298 provides = set(l for l in provides_lines if l and l != 'setup')
299 deps -= provides
300
301 return deps
302
303 def parse_pkgdata_for_python_packages(self):
304 pkgdata_dir = tinfoil.config_data.getVar('PKGDATA_DIR')
305
306 ldata = tinfoil.config_data.createCopy()
307 bb.parse.handle('classes-recipe/python3-dir.bbclass', ldata, True)
308 python_sitedir = ldata.getVar('PYTHON_SITEPACKAGES_DIR')
309
310 dynload_dir = os.path.join(os.path.dirname(python_sitedir), 'lib-dynload')
311 python_dirs = [python_sitedir + os.sep,
312 os.path.join(os.path.dirname(python_sitedir), 'dist-packages') + os.sep,
313 os.path.dirname(python_sitedir) + os.sep]
314 packages = {}
315 for pkgdatafile in glob.glob('{}/runtime/*'.format(pkgdata_dir)):
316 files_info = None
317 with open(pkgdatafile, 'r') as f:
318 for line in f.readlines():
319 field, value = line.split(': ', 1)
320 if field.startswith('FILES_INFO'):
321 files_info = ast.literal_eval(value)
322 break
323 else:
324 continue
325
326 for fn in files_info:
327 for suffix in importlib.machinery.all_suffixes():
328 if fn.endswith(suffix):
329 break
330 else:
331 continue
332
333 if fn.startswith(dynload_dir + os.sep):
334 if '/.debug/' in fn:
335 continue
336 base = os.path.basename(fn)
337 provided = base.split('.', 1)[0]
338 packages[provided] = os.path.basename(pkgdatafile)
339 continue
340
341 for python_dir in python_dirs:
342 if fn.startswith(python_dir):
343 relpath = fn[len(python_dir):]
344 relstart, _, relremaining = relpath.partition(os.sep)
345 if relstart.endswith('.egg'):
346 relpath = relremaining
347 base, _ = os.path.splitext(relpath)
348
349 if '/.debug/' in base:
350 continue
351 if os.path.basename(base) == '__init__':
352 base = os.path.dirname(base)
353 base = base.replace(os.sep + os.sep, os.sep)
354 provided = base.replace(os.sep, '.')
355 packages[provided] = os.path.basename(pkgdatafile)
356 return packages
357
358 @classmethod
359 def run_command(cls, cmd, **popenargs):
360 if 'stderr' not in popenargs:
361 popenargs['stderr'] = subprocess.STDOUT
362 try:
363 return subprocess.check_output(cmd, **popenargs).decode('utf-8')
364 except OSError as exc:
365 logger.error('Unable to run `{}`: {}', ' '.join(cmd), exc)
366 raise
367 except subprocess.CalledProcessError as exc:
368 logger.error('Unable to run `{}`: {}', ' '.join(cmd), exc.output)
369 raise
370
371class PythonSetupPyRecipeHandler(PythonRecipeHandler):
41 bbvar_map = { 372 bbvar_map = {
42 'Name': 'PN', 373 'Name': 'PN',
43 'Version': 'PV', 374 'Version': 'PV',
@@ -45,9 +376,9 @@ class PythonRecipeHandler(RecipeHandler):
45 'Summary': 'SUMMARY', 376 'Summary': 'SUMMARY',
46 'Description': 'DESCRIPTION', 377 'Description': 'DESCRIPTION',
47 'License': 'LICENSE', 378 'License': 'LICENSE',
48 'Requires': 'RDEPENDS_${PN}', 379 'Requires': 'RDEPENDS:${PN}',
49 'Provides': 'RPROVIDES_${PN}', 380 'Provides': 'RPROVIDES:${PN}',
50 'Obsoletes': 'RREPLACES_${PN}', 381 'Obsoletes': 'RREPLACES:${PN}',
51 } 382 }
52 # PN/PV are already set by recipetool core & desc can be extremely long 383 # PN/PV are already set by recipetool core & desc can be extremely long
53 excluded_fields = [ 384 excluded_fields = [
@@ -75,6 +406,7 @@ class PythonRecipeHandler(RecipeHandler):
75 'Supported-Platform', 406 'Supported-Platform',
76 ] 407 ]
77 setuparg_multi_line_values = ['Description'] 408 setuparg_multi_line_values = ['Description']
409
78 replacements = [ 410 replacements = [
79 ('License', r' +$', ''), 411 ('License', r' +$', ''),
80 ('License', r'^ +', ''), 412 ('License', r'^ +', ''),
@@ -95,71 +427,161 @@ class PythonRecipeHandler(RecipeHandler):
95 ('Install-requires', r'\[[^\]]+\]$', ''), 427 ('Install-requires', r'\[[^\]]+\]$', ''),
96 ] 428 ]
97 429
98 classifier_license_map = {
99 'License :: OSI Approved :: Academic Free License (AFL)': 'AFL',
100 'License :: OSI Approved :: Apache Software License': 'Apache',
101 'License :: OSI Approved :: Apple Public Source License': 'APSL',
102 'License :: OSI Approved :: Artistic License': 'Artistic',
103 'License :: OSI Approved :: Attribution Assurance License': 'AAL',
104 'License :: OSI Approved :: BSD License': 'BSD',
105 'License :: OSI Approved :: Common Public License': 'CPL',
106 'License :: OSI Approved :: Eiffel Forum License': 'EFL',
107 'License :: OSI Approved :: European Union Public Licence 1.0 (EUPL 1.0)': 'EUPL-1.0',
108 'License :: OSI Approved :: European Union Public Licence 1.1 (EUPL 1.1)': 'EUPL-1.1',
109 'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)': 'AGPL-3.0+',
110 'License :: OSI Approved :: GNU Affero General Public License v3': 'AGPL-3.0',
111 'License :: OSI Approved :: GNU Free Documentation License (FDL)': 'GFDL',
112 'License :: OSI Approved :: GNU General Public License (GPL)': 'GPL',
113 'License :: OSI Approved :: GNU General Public License v2 (GPLv2)': 'GPL-2.0',
114 'License :: OSI Approved :: GNU General Public License v2 or later (GPLv2+)': 'GPL-2.0+',
115 'License :: OSI Approved :: GNU General Public License v3 (GPLv3)': 'GPL-3.0',
116 'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)': 'GPL-3.0+',
117 'License :: OSI Approved :: GNU Lesser General Public License v2 (LGPLv2)': 'LGPL-2.0',
118 'License :: OSI Approved :: GNU Lesser General Public License v2 or later (LGPLv2+)': 'LGPL-2.0+',
119 'License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)': 'LGPL-3.0',
120 'License :: OSI Approved :: GNU Lesser General Public License v3 or later (LGPLv3+)': 'LGPL-3.0+',
121 'License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)': 'LGPL',
122 'License :: OSI Approved :: IBM Public License': 'IPL',
123 'License :: OSI Approved :: ISC License (ISCL)': 'ISC',
124 'License :: OSI Approved :: Intel Open Source License': 'Intel',
125 'License :: OSI Approved :: Jabber Open Source License': 'Jabber',
126 'License :: OSI Approved :: MIT License': 'MIT',
127 'License :: OSI Approved :: MITRE Collaborative Virtual Workspace License (CVW)': 'CVWL',
128 'License :: OSI Approved :: Motosoto License': 'Motosoto',
129 'License :: OSI Approved :: Mozilla Public License 1.0 (MPL)': 'MPL-1.0',
130 'License :: OSI Approved :: Mozilla Public License 1.1 (MPL 1.1)': 'MPL-1.1',
131 'License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)': 'MPL-2.0',
132 'License :: OSI Approved :: Nethack General Public License': 'NGPL',
133 'License :: OSI Approved :: Nokia Open Source License': 'Nokia',
134 'License :: OSI Approved :: Open Group Test Suite License': 'OGTSL',
135 'License :: OSI Approved :: Python License (CNRI Python License)': 'CNRI-Python',
136 'License :: OSI Approved :: Python Software Foundation License': 'PSF',
137 'License :: OSI Approved :: Qt Public License (QPL)': 'QPL',
138 'License :: OSI Approved :: Ricoh Source Code Public License': 'RSCPL',
139 'License :: OSI Approved :: Sleepycat License': 'Sleepycat',
140 'License :: OSI Approved :: Sun Industry Standards Source License (SISSL)': '-- Sun Industry Standards Source License (SISSL)',
141 'License :: OSI Approved :: Sun Public License': 'SPL',
142 'License :: OSI Approved :: University of Illinois/NCSA Open Source License': 'NCSA',
143 'License :: OSI Approved :: Vovida Software License 1.0': 'VSL-1.0',
144 'License :: OSI Approved :: W3C License': 'W3C',
145 'License :: OSI Approved :: X.Net License': 'Xnet',
146 'License :: OSI Approved :: Zope Public License': 'ZPL',
147 'License :: OSI Approved :: zlib/libpng License': 'Zlib',
148 }
149
150 def __init__(self): 430 def __init__(self):
151 pass 431 pass
152 432
433 def parse_setup_py(self, setupscript='./setup.py'):
434 with codecs.open(setupscript) as f:
435 info, imported_modules, non_literals, extensions = gather_setup_info(f)
436
437 def _map(key):
438 key = key.replace('_', '-')
439 key = key[0].upper() + key[1:]
440 if key in self.setup_parse_map:
441 key = self.setup_parse_map[key]
442 return key
443
444 # Naive mapping of setup() arguments to PKG-INFO field names
445 for d in [info, non_literals]:
446 for key, value in list(d.items()):
447 if key is None:
448 continue
449 new_key = _map(key)
450 if new_key != key:
451 del d[key]
452 d[new_key] = value
453
454 return info, 'setuptools' in imported_modules, non_literals, extensions
455
456 def get_setup_args_info(self, setupscript='./setup.py'):
457 cmd = ['python3', setupscript]
458 info = {}
459 keys = set(self.bbvar_map.keys())
460 keys |= set(self.setuparg_list_fields)
461 keys |= set(self.setuparg_multi_line_values)
462 grouped_keys = itertools.groupby(keys, lambda k: (k in self.setuparg_list_fields, k in self.setuparg_multi_line_values))
463 for index, keys in grouped_keys:
464 if index == (True, False):
465 # Splitlines output for each arg as a list value
466 for key in keys:
467 arg = self.setuparg_map.get(key, key.lower())
468 try:
469 arg_info = self.run_command(cmd + ['--' + arg], cwd=os.path.dirname(setupscript))
470 except (OSError, subprocess.CalledProcessError):
471 pass
472 else:
473 info[key] = [l.rstrip() for l in arg_info.splitlines()]
474 elif index == (False, True):
475 # Entire output for each arg
476 for key in keys:
477 arg = self.setuparg_map.get(key, key.lower())
478 try:
479 arg_info = self.run_command(cmd + ['--' + arg], cwd=os.path.dirname(setupscript))
480 except (OSError, subprocess.CalledProcessError):
481 pass
482 else:
483 info[key] = arg_info
484 else:
485 info.update(self.get_setup_byline(list(keys), setupscript))
486 return info
487
488 def get_setup_byline(self, fields, setupscript='./setup.py'):
489 info = {}
490
491 cmd = ['python3', setupscript]
492 cmd.extend('--' + self.setuparg_map.get(f, f.lower()) for f in fields)
493 try:
494 info_lines = self.run_command(cmd, cwd=os.path.dirname(setupscript)).splitlines()
495 except (OSError, subprocess.CalledProcessError):
496 pass
497 else:
498 if len(fields) != len(info_lines):
499 logger.error('Mismatch between setup.py output lines and number of fields')
500 sys.exit(1)
501
502 for lineno, line in enumerate(info_lines):
503 line = line.rstrip()
504 info[fields[lineno]] = line
505 return info
506
507 def get_pkginfo(self, pkginfo_fn):
508 msg = email.message_from_file(open(pkginfo_fn, 'r'))
509 msginfo = {}
510 for field in msg.keys():
511 values = msg.get_all(field)
512 if len(values) == 1:
513 msginfo[field] = values[0]
514 else:
515 msginfo[field] = values
516 return msginfo
517
518 def scan_setup_python_deps(self, srctree, setup_info, setup_non_literals):
519 if 'Package-dir' in setup_info:
520 package_dir = setup_info['Package-dir']
521 else:
522 package_dir = {}
523
524 dist = setuptools.Distribution()
525
526 class PackageDir(setuptools.command.build_py.build_py):
527 def __init__(self, package_dir):
528 self.package_dir = package_dir
529 self.dist = dist
530 super().__init__(self.dist)
531
532 pd = PackageDir(package_dir)
533 to_scan = []
534 if not any(v in setup_non_literals for v in ['Py-modules', 'Scripts', 'Packages']):
535 if 'Py-modules' in setup_info:
536 for module in setup_info['Py-modules']:
537 try:
538 package, module = module.rsplit('.', 1)
539 except ValueError:
540 package, module = '.', module
541 module_path = os.path.join(pd.get_package_dir(package), module + '.py')
542 to_scan.append(module_path)
543
544 if 'Packages' in setup_info:
545 for package in setup_info['Packages']:
546 to_scan.append(pd.get_package_dir(package))
547
548 if 'Scripts' in setup_info:
549 to_scan.extend(setup_info['Scripts'])
550 else:
551 logger.info("Scanning the entire source tree, as one or more of the following setup keywords are non-literal: py_modules, scripts, packages.")
552
553 if not to_scan:
554 to_scan = ['.']
555
556 logger.info("Scanning paths for packages & dependencies: %s", ', '.join(to_scan))
557
558 provided_packages = self.parse_pkgdata_for_python_packages()
559 scanned_deps = self.scan_python_dependencies([os.path.join(srctree, p) for p in to_scan])
560 mapped_deps, unmapped_deps = set(self.base_pkgdeps), set()
561 for dep in scanned_deps:
562 mapped = provided_packages.get(dep)
563 if mapped:
564 logger.debug('Mapped %s to %s' % (dep, mapped))
565 mapped_deps.add(mapped)
566 else:
567 logger.debug('Could not map %s' % dep)
568 unmapped_deps.add(dep)
569 return mapped_deps, unmapped_deps
570
153 def process(self, srctree, classes, lines_before, lines_after, handled, extravalues): 571 def process(self, srctree, classes, lines_before, lines_after, handled, extravalues):
572
154 if 'buildsystem' in handled: 573 if 'buildsystem' in handled:
155 return False 574 return False
156 575
576 logger.debug("Trying setup.py parser")
577
157 # Check for non-zero size setup.py files 578 # Check for non-zero size setup.py files
158 setupfiles = RecipeHandler.checkfiles(srctree, ['setup.py']) 579 setupfiles = RecipeHandler.checkfiles(srctree, ['setup.py'])
159 for fn in setupfiles: 580 for fn in setupfiles:
160 if os.path.getsize(fn): 581 if os.path.getsize(fn):
161 break 582 break
162 else: 583 else:
584 logger.debug("No setup.py found")
163 return False 585 return False
164 586
165 # setup.py is always parsed to get at certain required information, such as 587 # setup.py is always parsed to get at certain required information, such as
@@ -193,6 +615,18 @@ class PythonRecipeHandler(RecipeHandler):
193 continue 615 continue
194 616
195 if line.startswith('['): 617 if line.startswith('['):
618 # PACKAGECONFIG must not contain expressions or whitespace
619 line = line.replace(" ", "")
620 line = line.replace(':', "")
621 line = line.replace('.', "-dot-")
622 line = line.replace('"', "")
623 line = line.replace('<', "-smaller-")
624 line = line.replace('>', "-bigger-")
625 line = line.replace('_', "-")
626 line = line.replace('(', "")
627 line = line.replace(')', "")
628 line = line.replace('!', "-not-")
629 line = line.replace('=', "-equals-")
196 current_feature = line[1:-1] 630 current_feature = line[1:-1]
197 elif current_feature: 631 elif current_feature:
198 extras_req[current_feature].append(line) 632 extras_req[current_feature].append(line)
@@ -226,51 +660,16 @@ class PythonRecipeHandler(RecipeHandler):
226 660
227 if license_str: 661 if license_str:
228 for i, line in enumerate(lines_before): 662 for i, line in enumerate(lines_before):
229 if line.startswith('LICENSE = '): 663 if line.startswith('##LICENSE_PLACEHOLDER##'):
230 lines_before.insert(i, '# NOTE: License in setup.py/PKGINFO is: %s' % license_str) 664 lines_before.insert(i, '# NOTE: License in setup.py/PKGINFO is: %s' % license_str)
231 break 665 break
232 666
233 if 'Classifier' in info: 667 if 'Classifier' in info:
234 existing_licenses = info.get('License', '') 668 license = self.handle_classifier_license(info['Classifier'], info.get('License', ''))
235 licenses = [] 669 if license:
236 for classifier in info['Classifier']: 670 info['License'] = license
237 if classifier in self.classifier_license_map:
238 license = self.classifier_license_map[classifier]
239 if license == 'Apache' and 'Apache-2.0' in existing_licenses:
240 license = 'Apache-2.0'
241 elif license == 'GPL':
242 if 'GPL-2.0' in existing_licenses or 'GPLv2' in existing_licenses:
243 license = 'GPL-2.0'
244 elif 'GPL-3.0' in existing_licenses or 'GPLv3' in existing_licenses:
245 license = 'GPL-3.0'
246 elif license == 'LGPL':
247 if 'LGPL-2.1' in existing_licenses or 'LGPLv2.1' in existing_licenses:
248 license = 'LGPL-2.1'
249 elif 'LGPL-2.0' in existing_licenses or 'LGPLv2' in existing_licenses:
250 license = 'LGPL-2.0'
251 elif 'LGPL-3.0' in existing_licenses or 'LGPLv3' in existing_licenses:
252 license = 'LGPL-3.0'
253 licenses.append(license)
254
255 if licenses:
256 info['License'] = ' & '.join(licenses)
257 671
258 # Map PKG-INFO & setup.py fields to bitbake variables 672 self.map_info_to_bbvar(info, extravalues)
259 for field, values in info.items():
260 if field in self.excluded_fields:
261 continue
262
263 if field not in self.bbvar_map:
264 continue
265
266 if isinstance(values, str):
267 value = values
268 else:
269 value = ' '.join(str(v) for v in values if v)
270
271 bbvar = self.bbvar_map[field]
272 if bbvar not in extravalues and value:
273 extravalues[bbvar] = value
274 673
275 mapped_deps, unmapped_deps = self.scan_setup_python_deps(srctree, setup_info, setup_non_literals) 674 mapped_deps, unmapped_deps = self.scan_setup_python_deps(srctree, setup_info, setup_non_literals)
276 675
@@ -281,6 +680,7 @@ class PythonRecipeHandler(RecipeHandler):
281 lines_after.append('# The following configs & dependencies are from setuptools extras_require.') 680 lines_after.append('# The following configs & dependencies are from setuptools extras_require.')
282 lines_after.append('# These dependencies are optional, hence can be controlled via PACKAGECONFIG.') 681 lines_after.append('# These dependencies are optional, hence can be controlled via PACKAGECONFIG.')
283 lines_after.append('# The upstream names may not correspond exactly to bitbake package names.') 682 lines_after.append('# The upstream names may not correspond exactly to bitbake package names.')
683 lines_after.append('# The configs are might not correct, since PACKAGECONFIG does not support expressions as may used in requires.txt - they are just replaced by text.')
284 lines_after.append('#') 684 lines_after.append('#')
285 lines_after.append('# Uncomment this line to enable all the optional features.') 685 lines_after.append('# Uncomment this line to enable all the optional features.')
286 lines_after.append('#PACKAGECONFIG ?= "{}"'.format(' '.join(k.lower() for k in extras_req))) 686 lines_after.append('#PACKAGECONFIG ?= "{}"'.format(' '.join(k.lower() for k in extras_req)))
@@ -301,7 +701,7 @@ class PythonRecipeHandler(RecipeHandler):
301 inst_req_deps = ('python3-' + r.replace('.', '-').lower() for r in sorted(inst_reqs)) 701 inst_req_deps = ('python3-' + r.replace('.', '-').lower() for r in sorted(inst_reqs))
302 lines_after.append('# WARNING: the following rdepends are from setuptools install_requires. These') 702 lines_after.append('# WARNING: the following rdepends are from setuptools install_requires. These')
303 lines_after.append('# upstream names may not correspond exactly to bitbake package names.') 703 lines_after.append('# upstream names may not correspond exactly to bitbake package names.')
304 lines_after.append('RDEPENDS_${{PN}} += "{}"'.format(' '.join(inst_req_deps))) 704 lines_after.append('RDEPENDS:${{PN}} += "{}"'.format(' '.join(inst_req_deps)))
305 705
306 if mapped_deps: 706 if mapped_deps:
307 name = info.get('Name') 707 name = info.get('Name')
@@ -313,7 +713,7 @@ class PythonRecipeHandler(RecipeHandler):
313 lines_after.append('') 713 lines_after.append('')
314 lines_after.append('# WARNING: the following rdepends are determined through basic analysis of the') 714 lines_after.append('# WARNING: the following rdepends are determined through basic analysis of the')
315 lines_after.append('# python sources, and might not be 100% accurate.') 715 lines_after.append('# python sources, and might not be 100% accurate.')
316 lines_after.append('RDEPENDS_${{PN}} += "{}"'.format(' '.join(sorted(mapped_deps)))) 716 lines_after.append('RDEPENDS:${{PN}} += "{}"'.format(' '.join(sorted(mapped_deps))))
317 717
318 unmapped_deps -= set(extensions) 718 unmapped_deps -= set(extensions)
319 unmapped_deps -= set(self.assume_provided) 719 unmapped_deps -= set(self.assume_provided)
@@ -326,275 +726,283 @@ class PythonRecipeHandler(RecipeHandler):
326 726
327 handled.append('buildsystem') 727 handled.append('buildsystem')
328 728
329 def get_pkginfo(self, pkginfo_fn): 729class PythonPyprojectTomlRecipeHandler(PythonRecipeHandler):
330 msg = email.message_from_file(open(pkginfo_fn, 'r')) 730 """Base class to support PEP517 and PEP518
331 msginfo = {} 731
332 for field in msg.keys(): 732 PEP517 https://peps.python.org/pep-0517/#source-trees
333 values = msg.get_all(field) 733 PEP518 https://peps.python.org/pep-0518/#build-system-table
334 if len(values) == 1: 734 """
335 msginfo[field] = values[0] 735 # bitbake currently supports the 4 following backends
336 else: 736 build_backend_map = {
337 msginfo[field] = values 737 "setuptools.build_meta": "python_setuptools_build_meta",
338 return msginfo 738 "poetry.core.masonry.api": "python_poetry_core",
739 "flit_core.buildapi": "python_flit_core",
740 "hatchling.build": "python_hatchling",
741 "maturin": "python_maturin",
742 "mesonpy": "python_mesonpy",
743 }
339 744
340 def parse_setup_py(self, setupscript='./setup.py'): 745 # setuptools.build_meta and flit declare project metadata into the "project" section of pyproject.toml
341 with codecs.open(setupscript) as f: 746 # according to PEP-621: https://packaging.python.org/en/latest/specifications/declaring-project-metadata/#declaring-project-metadata
342 info, imported_modules, non_literals, extensions = gather_setup_info(f) 747 # while poetry uses the "tool.poetry" section according to its official documentation: https://python-poetry.org/docs/pyproject/
748 # keys from "project" and "tool.poetry" sections are almost the same except for the HOMEPAGE which is "homepage" for tool.poetry
749 # and "Homepage" for "project" section. So keep both
750 bbvar_map = {
751 "name": "PN",
752 "version": "PV",
753 "Homepage": "HOMEPAGE",
754 "homepage": "HOMEPAGE",
755 "description": "SUMMARY",
756 "license": "LICENSE",
757 "dependencies": "RDEPENDS:${PN}",
758 "requires": "DEPENDS",
759 }
343 760
344 def _map(key): 761 replacements = [
345 key = key.replace('_', '-') 762 ("license", r" +$", ""),
346 key = key[0].upper() + key[1:] 763 ("license", r"^ +", ""),
347 if key in self.setup_parse_map: 764 ("license", r" ", "-"),
348 key = self.setup_parse_map[key] 765 ("license", r"^GNU-", ""),
349 return key 766 ("license", r"-[Ll]icen[cs]e(,?-[Vv]ersion)?", ""),
767 ("license", r"^UNKNOWN$", ""),
768 # Remove currently unhandled version numbers from these variables
769 ("requires", r"\[[^\]]+\]$", ""),
770 ("requires", r"^([^><= ]+).*", r"\1"),
771 ("dependencies", r"\[[^\]]+\]$", ""),
772 ("dependencies", r"^([^><= ]+).*", r"\1"),
773 ]
350 774
351 # Naive mapping of setup() arguments to PKG-INFO field names 775 excluded_native_pkgdeps = [
352 for d in [info, non_literals]: 776 # already provided by python_setuptools_build_meta.bbclass
353 for key, value in list(d.items()): 777 "python3-setuptools-native",
354 if key is None: 778 "python3-wheel-native",
355 continue 779 # already provided by python_poetry_core.bbclass
356 new_key = _map(key) 780 "python3-poetry-core-native",
357 if new_key != key: 781 # already provided by python_flit_core.bbclass
358 del d[key] 782 "python3-flit-core-native",
359 d[new_key] = value 783 # already provided by python_mesonpy
784 "python3-meson-python-native",
785 ]
360 786
361 return info, 'setuptools' in imported_modules, non_literals, extensions 787 # add here a list of known and often used packages and the corresponding bitbake package
788 known_deps_map = {
789 "setuptools": "python3-setuptools",
790 "wheel": "python3-wheel",
791 "poetry-core": "python3-poetry-core",
792 "flit_core": "python3-flit-core",
793 "setuptools-scm": "python3-setuptools-scm",
794 "hatchling": "python3-hatchling",
795 "hatch-vcs": "python3-hatch-vcs",
796 "meson-python" : "python3-meson-python",
797 }
362 798
363 def get_setup_args_info(self, setupscript='./setup.py'): 799 def __init__(self):
364 cmd = ['python3', setupscript] 800 pass
365 info = {}
366 keys = set(self.bbvar_map.keys())
367 keys |= set(self.setuparg_list_fields)
368 keys |= set(self.setuparg_multi_line_values)
369 grouped_keys = itertools.groupby(keys, lambda k: (k in self.setuparg_list_fields, k in self.setuparg_multi_line_values))
370 for index, keys in grouped_keys:
371 if index == (True, False):
372 # Splitlines output for each arg as a list value
373 for key in keys:
374 arg = self.setuparg_map.get(key, key.lower())
375 try:
376 arg_info = self.run_command(cmd + ['--' + arg], cwd=os.path.dirname(setupscript))
377 except (OSError, subprocess.CalledProcessError):
378 pass
379 else:
380 info[key] = [l.rstrip() for l in arg_info.splitlines()]
381 elif index == (False, True):
382 # Entire output for each arg
383 for key in keys:
384 arg = self.setuparg_map.get(key, key.lower())
385 try:
386 arg_info = self.run_command(cmd + ['--' + arg], cwd=os.path.dirname(setupscript))
387 except (OSError, subprocess.CalledProcessError):
388 pass
389 else:
390 info[key] = arg_info
391 else:
392 info.update(self.get_setup_byline(list(keys), setupscript))
393 return info
394 801
395 def get_setup_byline(self, fields, setupscript='./setup.py'): 802 def process(self, srctree, classes, lines_before, lines_after, handled, extravalues):
396 info = {} 803 info = {}
804 metadata = {}
397 805
398 cmd = ['python3', setupscript] 806 if 'buildsystem' in handled:
399 cmd.extend('--' + self.setuparg_map.get(f, f.lower()) for f in fields) 807 return False
400 try:
401 info_lines = self.run_command(cmd, cwd=os.path.dirname(setupscript)).splitlines()
402 except (OSError, subprocess.CalledProcessError):
403 pass
404 else:
405 if len(fields) != len(info_lines):
406 logger.error('Mismatch between setup.py output lines and number of fields')
407 sys.exit(1)
408
409 for lineno, line in enumerate(info_lines):
410 line = line.rstrip()
411 info[fields[lineno]] = line
412 return info
413
414 def apply_info_replacements(self, info):
415 for variable, search, replace in self.replacements:
416 if variable not in info:
417 continue
418
419 def replace_value(search, replace, value):
420 if replace is None:
421 if re.search(search, value):
422 return None
423 else:
424 new_value = re.sub(search, replace, value)
425 if value != new_value:
426 return new_value
427 return value
428
429 value = info[variable]
430 if isinstance(value, str):
431 new_value = replace_value(search, replace, value)
432 if new_value is None:
433 del info[variable]
434 elif new_value != value:
435 info[variable] = new_value
436 elif hasattr(value, 'items'):
437 for dkey, dvalue in list(value.items()):
438 new_list = []
439 for pos, a_value in enumerate(dvalue):
440 new_value = replace_value(search, replace, a_value)
441 if new_value is not None and new_value != value:
442 new_list.append(new_value)
443
444 if value != new_list:
445 value[dkey] = new_list
446 else:
447 new_list = []
448 for pos, a_value in enumerate(value):
449 new_value = replace_value(search, replace, a_value)
450 if new_value is not None and new_value != value:
451 new_list.append(new_value)
452
453 if value != new_list:
454 info[variable] = new_list
455
456 def scan_setup_python_deps(self, srctree, setup_info, setup_non_literals):
457 if 'Package-dir' in setup_info:
458 package_dir = setup_info['Package-dir']
459 else:
460 package_dir = {}
461
462 class PackageDir(distutils.command.build_py.build_py):
463 def __init__(self, package_dir):
464 self.package_dir = package_dir
465
466 pd = PackageDir(package_dir)
467 to_scan = []
468 if not any(v in setup_non_literals for v in ['Py-modules', 'Scripts', 'Packages']):
469 if 'Py-modules' in setup_info:
470 for module in setup_info['Py-modules']:
471 try:
472 package, module = module.rsplit('.', 1)
473 except ValueError:
474 package, module = '.', module
475 module_path = os.path.join(pd.get_package_dir(package), module + '.py')
476 to_scan.append(module_path)
477 808
478 if 'Packages' in setup_info: 809 logger.debug("Trying pyproject.toml parser")
479 for package in setup_info['Packages']:
480 to_scan.append(pd.get_package_dir(package))
481 810
482 if 'Scripts' in setup_info: 811 # Check for non-zero size setup.py files
483 to_scan.extend(setup_info['Scripts']) 812 setupfiles = RecipeHandler.checkfiles(srctree, ["pyproject.toml"])
813 for fn in setupfiles:
814 if os.path.getsize(fn):
815 break
484 else: 816 else:
485 logger.info("Scanning the entire source tree, as one or more of the following setup keywords are non-literal: py_modules, scripts, packages.") 817 logger.debug("No pyproject.toml found")
486 818 return False
487 if not to_scan:
488 to_scan = ['.']
489
490 logger.info("Scanning paths for packages & dependencies: %s", ', '.join(to_scan))
491 819
492 provided_packages = self.parse_pkgdata_for_python_packages() 820 setupscript = os.path.join(srctree, "pyproject.toml")
493 scanned_deps = self.scan_python_dependencies([os.path.join(srctree, p) for p in to_scan])
494 mapped_deps, unmapped_deps = set(self.base_pkgdeps), set()
495 for dep in scanned_deps:
496 mapped = provided_packages.get(dep)
497 if mapped:
498 logger.debug('Mapped %s to %s' % (dep, mapped))
499 mapped_deps.add(mapped)
500 else:
501 logger.debug('Could not map %s' % dep)
502 unmapped_deps.add(dep)
503 return mapped_deps, unmapped_deps
504 821
505 def scan_python_dependencies(self, paths):
506 deps = set()
507 try: 822 try:
508 dep_output = self.run_command(['pythondeps', '-d'] + paths) 823 try:
509 except (OSError, subprocess.CalledProcessError): 824 import tomllib
510 pass 825 except ImportError:
511 else: 826 try:
512 for line in dep_output.splitlines(): 827 import tomli as tomllib
513 line = line.rstrip() 828 except ImportError:
514 dep, filename = line.split('\t', 1) 829 logger.error("Neither 'tomllib' nor 'tomli' could be imported, cannot scan pyproject.toml.")
515 if filename.endswith('/setup.py'): 830 return False
516 continue 831
517 deps.add(dep) 832 try:
833 with open(setupscript, "rb") as f:
834 config = tomllib.load(f)
835 except Exception:
836 logger.exception("Failed to parse pyproject.toml")
837 return False
838
839 build_backend = config["build-system"]["build-backend"]
840 if build_backend in self.build_backend_map:
841 classes.append(self.build_backend_map[build_backend])
842 else:
843 logger.error(
844 "Unsupported build-backend: %s, cannot use pyproject.toml. Will try to use legacy setup.py"
845 % build_backend
846 )
847 return False
518 848
519 try: 849 licfile = ""
520 provides_output = self.run_command(['pythondeps', '-p'] + paths)
521 except (OSError, subprocess.CalledProcessError):
522 pass
523 else:
524 provides_lines = (l.rstrip() for l in provides_output.splitlines())
525 provides = set(l for l in provides_lines if l and l != 'setup')
526 deps -= provides
527 850
528 return deps 851 if build_backend == "poetry.core.masonry.api":
852 if "tool" in config and "poetry" in config["tool"]:
853 metadata = config["tool"]["poetry"]
854 else:
855 if "project" in config:
856 metadata = config["project"]
857
858 if metadata:
859 for field, values in metadata.items():
860 if field == "license":
861 # For setuptools.build_meta and flit, licence is a table
862 # but for poetry licence is a string
863 # for hatchling, both table (jsonschema) and string (iniconfig) have been used
864 if build_backend == "poetry.core.masonry.api":
865 value = values
866 else:
867 value = values.get("text", "")
868 if not value:
869 licfile = values.get("file", "")
870 continue
871 elif field == "dependencies" and build_backend == "poetry.core.masonry.api":
872 # For poetry backend, "dependencies" section looks like:
873 # [tool.poetry.dependencies]
874 # requests = "^2.13.0"
875 # requests = { version = "^2.13.0", source = "private" }
876 # See https://python-poetry.org/docs/master/pyproject/#dependencies-and-dependency-groups for more details
877 # This class doesn't handle versions anyway, so we just get the dependencies name here and construct a list
878 value = []
879 for k in values.keys():
880 value.append(k)
881 elif isinstance(values, dict):
882 for k, v in values.items():
883 info[k] = v
884 continue
885 else:
886 value = values
529 887
530 def parse_pkgdata_for_python_packages(self): 888 info[field] = value
531 suffixes = [t[0] for t in imp.get_suffixes()]
532 pkgdata_dir = tinfoil.config_data.getVar('PKGDATA_DIR')
533 889
534 ldata = tinfoil.config_data.createCopy() 890 # Grab the license value before applying replacements
535 bb.parse.handle('classes/python3-dir.bbclass', ldata, True) 891 license_str = info.get("license", "").strip()
536 python_sitedir = ldata.getVar('PYTHON_SITEPACKAGES_DIR')
537 892
538 dynload_dir = os.path.join(os.path.dirname(python_sitedir), 'lib-dynload') 893 if license_str:
539 python_dirs = [python_sitedir + os.sep, 894 for i, line in enumerate(lines_before):
540 os.path.join(os.path.dirname(python_sitedir), 'dist-packages') + os.sep, 895 if line.startswith("##LICENSE_PLACEHOLDER##"):
541 os.path.dirname(python_sitedir) + os.sep] 896 lines_before.insert(
542 packages = {} 897 i, "# NOTE: License in pyproject.toml is: %s" % license_str
543 for pkgdatafile in glob.glob('{}/runtime/*'.format(pkgdata_dir)): 898 )
544 files_info = None
545 with open(pkgdatafile, 'r') as f:
546 for line in f.readlines():
547 field, value = line.split(': ', 1)
548 if field == 'FILES_INFO':
549 files_info = ast.literal_eval(value)
550 break 899 break
551 else:
552 continue
553 900
554 for fn in files_info: 901 info["requires"] = config["build-system"]["requires"]
555 for suffix in suffixes: 902
556 if fn.endswith(suffix): 903 self.apply_info_replacements(info)
557 break 904
558 else: 905 if "classifiers" in info:
559 continue 906 license = self.handle_classifier_license(
907 info["classifiers"], info.get("license", "")
908 )
909 if license:
910 if licfile:
911 lines = []
912 md5value = bb.utils.md5_file(os.path.join(srctree, licfile))
913 lines.append('LICENSE = "%s"' % license)
914 lines.append(
915 'LIC_FILES_CHKSUM = "file://%s;md5=%s"'
916 % (licfile, md5value)
917 )
918 lines.append("")
919
920 # Replace the placeholder so we get the values in the right place in the recipe file
921 try:
922 pos = lines_before.index("##LICENSE_PLACEHOLDER##")
923 except ValueError:
924 pos = -1
925 if pos == -1:
926 lines_before.extend(lines)
927 else:
928 lines_before[pos : pos + 1] = lines
560 929
561 if fn.startswith(dynload_dir + os.sep): 930 handled.append(("license", [license, licfile, md5value]))
562 if '/.debug/' in fn: 931 else:
563 continue 932 info["license"] = license
564 base = os.path.basename(fn)
565 provided = base.split('.', 1)[0]
566 packages[provided] = os.path.basename(pkgdatafile)
567 continue
568 933
569 for python_dir in python_dirs: 934 provided_packages = self.parse_pkgdata_for_python_packages()
570 if fn.startswith(python_dir): 935 provided_packages.update(self.known_deps_map)
571 relpath = fn[len(python_dir):] 936 native_mapped_deps, native_unmapped_deps = set(), set()
572 relstart, _, relremaining = relpath.partition(os.sep) 937 mapped_deps, unmapped_deps = set(), set()
573 if relstart.endswith('.egg'):
574 relpath = relremaining
575 base, _ = os.path.splitext(relpath)
576 938
577 if '/.debug/' in base: 939 if "requires" in info:
578 continue 940 for require in info["requires"]:
579 if os.path.basename(base) == '__init__': 941 mapped = provided_packages.get(require)
580 base = os.path.dirname(base)
581 base = base.replace(os.sep + os.sep, os.sep)
582 provided = base.replace(os.sep, '.')
583 packages[provided] = os.path.basename(pkgdatafile)
584 return packages
585 942
586 @classmethod 943 if mapped:
587 def run_command(cls, cmd, **popenargs): 944 logger.debug("Mapped %s to %s" % (require, mapped))
588 if 'stderr' not in popenargs: 945 native_mapped_deps.add(mapped)
589 popenargs['stderr'] = subprocess.STDOUT 946 else:
590 try: 947 logger.debug("Could not map %s" % require)
591 return subprocess.check_output(cmd, **popenargs).decode('utf-8') 948 native_unmapped_deps.add(require)
592 except OSError as exc: 949
593 logger.error('Unable to run `{}`: {}', ' '.join(cmd), exc) 950 info.pop("requires")
594 raise 951
595 except subprocess.CalledProcessError as exc: 952 if native_mapped_deps != set():
596 logger.error('Unable to run `{}`: {}', ' '.join(cmd), exc.output) 953 native_mapped_deps = {
597 raise 954 item + "-native" for item in native_mapped_deps
955 }
956 native_mapped_deps -= set(self.excluded_native_pkgdeps)
957 if native_mapped_deps != set():
958 info["requires"] = " ".join(sorted(native_mapped_deps))
959
960 if native_unmapped_deps:
961 lines_after.append("")
962 lines_after.append(
963 "# WARNING: We were unable to map the following python package/module"
964 )
965 lines_after.append(
966 "# dependencies to the bitbake packages which include them:"
967 )
968 lines_after.extend(
969 "# {}".format(d) for d in sorted(native_unmapped_deps)
970 )
971
972 if "dependencies" in info:
973 for dependency in info["dependencies"]:
974 mapped = provided_packages.get(dependency)
975 if mapped:
976 logger.debug("Mapped %s to %s" % (dependency, mapped))
977 mapped_deps.add(mapped)
978 else:
979 logger.debug("Could not map %s" % dependency)
980 unmapped_deps.add(dependency)
981
982 info.pop("dependencies")
983
984 if mapped_deps != set():
985 if mapped_deps != set():
986 info["dependencies"] = " ".join(sorted(mapped_deps))
987
988 if unmapped_deps:
989 lines_after.append("")
990 lines_after.append(
991 "# WARNING: We were unable to map the following python package/module"
992 )
993 lines_after.append(
994 "# runtime dependencies to the bitbake packages which include them:"
995 )
996 lines_after.extend(
997 "# {}".format(d) for d in sorted(unmapped_deps)
998 )
999
1000 self.map_info_to_bbvar(info, extravalues)
1001
1002 handled.append("buildsystem")
1003 except Exception:
1004 logger.exception("Failed to correctly handle pyproject.toml, falling back to another method")
1005 return False
598 1006
599 1007
600def gather_setup_info(fileobj): 1008def gather_setup_info(fileobj):
@@ -710,5 +1118,7 @@ def has_non_literals(value):
710 1118
711 1119
712def register_recipe_handlers(handlers): 1120def register_recipe_handlers(handlers):
713 # We need to make sure this is ahead of the makefile fallback handler 1121 # We need to make sure these are ahead of the makefile fallback handler
714 handlers.append((PythonRecipeHandler(), 70)) 1122 # and the pyproject.toml handler ahead of the setup.py handler
1123 handlers.append((PythonPyprojectTomlRecipeHandler(), 75))
1124 handlers.append((PythonSetupPyRecipeHandler(), 70))
diff --git a/scripts/lib/recipetool/create_go.py b/scripts/lib/recipetool/create_go.py
new file mode 100644
index 0000000000..4b1fa39d13
--- /dev/null
+++ b/scripts/lib/recipetool/create_go.py
@@ -0,0 +1,174 @@
1# Recipe creation tool - go support plugin
2#
3# The code is based on golang internals. See the afftected
4# methods for further reference and information.
5#
6# Copyright (C) 2023 Weidmueller GmbH & Co KG
7# Author: Lukas Funke <lukas.funke@weidmueller.com>
8#
9# SPDX-License-Identifier: GPL-2.0-only
10#
11
12
13from recipetool.create import RecipeHandler, handle_license_vars
14
15import bb.utils
16import json
17import logging
18import os
19import re
20import subprocess
21import sys
22import tempfile
23
24
25logger = logging.getLogger('recipetool')
26
27tinfoil = None
28
29
30def tinfoil_init(instance):
31 global tinfoil
32 tinfoil = instance
33
34
35
36class GoRecipeHandler(RecipeHandler):
37 """Class to handle the go recipe creation"""
38
39 @staticmethod
40 def __ensure_go():
41 """Check if the 'go' command is available in the recipes"""
42 recipe = "go-native"
43 if not tinfoil.recipes_parsed:
44 tinfoil.parse_recipes()
45 try:
46 rd = tinfoil.parse_recipe(recipe)
47 except bb.providers.NoProvider:
48 bb.error(
49 "Nothing provides '%s' which is required for the build" % (recipe))
50 bb.note(
51 "You will likely need to add a layer that provides '%s'" % (recipe))
52 return None
53
54 bindir = rd.getVar('STAGING_BINDIR_NATIVE')
55 gopath = os.path.join(bindir, 'go')
56
57 if not os.path.exists(gopath):
58 tinfoil.build_targets(recipe, 'addto_recipe_sysroot')
59
60 if not os.path.exists(gopath):
61 logger.error(
62 '%s required to process specified source, but %s did not seem to populate it' % 'go', recipe)
63 return None
64
65 return bindir
66
67 def process(self, srctree, classes, lines_before,
68 lines_after, handled, extravalues):
69
70 if 'buildsystem' in handled:
71 return False
72
73 files = RecipeHandler.checkfiles(srctree, ['go.mod'])
74 if not files:
75 return False
76
77 go_bindir = self.__ensure_go()
78 if not go_bindir:
79 sys.exit(14)
80
81 handled.append('buildsystem')
82 classes.append("go-mod")
83
84 # Use go-mod-update-modules to set the full SRC_URI and LICENSE
85 classes.append("go-mod-update-modules")
86 extravalues["run_tasks"] = "update_modules"
87
88 with tempfile.TemporaryDirectory(prefix="go-mod-") as tmp_mod_dir:
89 env = dict(os.environ)
90 env["PATH"] += f":{go_bindir}"
91 env['GOMODCACHE'] = tmp_mod_dir
92
93 stdout = subprocess.check_output(["go", "mod", "edit", "-json"], cwd=srctree, env=env, text=True)
94 go_mod = json.loads(stdout)
95 go_import = re.sub(r'/v([0-9]+)$', '', go_mod['Module']['Path'])
96
97 localfilesdir = tempfile.mkdtemp(prefix='recipetool-go-')
98 extravalues.setdefault('extrafiles', {})
99
100 # Write the stub ${BPN}-licenses.inc and ${BPN}-go-mods.inc files
101 basename = "{pn}-licenses.inc"
102 filename = os.path.join(localfilesdir, basename)
103 with open(filename, "w") as f:
104 f.write("# FROM RECIPETOOL\n")
105 extravalues['extrafiles'][f"../{basename}"] = filename
106
107 basename = "{pn}-go-mods.inc"
108 filename = os.path.join(localfilesdir, basename)
109 with open(filename, "w") as f:
110 f.write("# FROM RECIPETOOL\n")
111 extravalues['extrafiles'][f"../{basename}"] = filename
112
113 # Do generic license handling
114 d = bb.data.createCopy(tinfoil.config_data)
115 handle_license_vars(srctree, lines_before, handled, extravalues, d)
116 self.__rewrite_lic_vars(lines_before)
117
118 self.__rewrite_src_uri(lines_before)
119
120 lines_before.append('require ${BPN}-licenses.inc')
121 lines_before.append('require ${BPN}-go-mods.inc')
122 lines_before.append(f'GO_IMPORT = "{go_import}"')
123
124 def __update_lines_before(self, updated, newlines, lines_before):
125 if updated:
126 del lines_before[:]
127 for line in newlines:
128 # Hack to avoid newlines that edit_metadata inserts
129 if line.endswith('\n'):
130 line = line[:-1]
131 lines_before.append(line)
132 return updated
133
134 def __rewrite_lic_vars(self, lines_before):
135 def varfunc(varname, origvalue, op, newlines):
136 import urllib.parse
137 if varname == 'LIC_FILES_CHKSUM':
138 new_licenses = []
139 licenses = origvalue.split('\\')
140 for license in licenses:
141 if not license:
142 logger.warning("No license file was detected for the main module!")
143 # the license list of the main recipe must be empty
144 # this can happen for example in case of CLOSED license
145 # Fall through to complete recipe generation
146 continue
147 license = license.strip()
148 uri, chksum = license.split(';', 1)
149 url = urllib.parse.urlparse(uri)
150 new_uri = os.path.join(
151 url.scheme + "://", "src", "${GO_IMPORT}", url.netloc + url.path) + ";" + chksum
152 new_licenses.append(new_uri)
153
154 return new_licenses, None, -1, True
155 return origvalue, None, 0, True
156
157 updated, newlines = bb.utils.edit_metadata(
158 lines_before, ['LIC_FILES_CHKSUM'], varfunc)
159 return self.__update_lines_before(updated, newlines, lines_before)
160
161 def __rewrite_src_uri(self, lines_before):
162
163 def varfunc(varname, origvalue, op, newlines):
164 if varname == 'SRC_URI':
165 src_uri = ['git://${GO_IMPORT};protocol=https;nobranch=1;destsuffix=${GO_SRCURI_DESTSUFFIX}']
166 return src_uri, None, -1, True
167 return origvalue, None, 0, True
168
169 updated, newlines = bb.utils.edit_metadata(lines_before, ['SRC_URI'], varfunc)
170 return self.__update_lines_before(updated, newlines, lines_before)
171
172
173def register_recipe_handlers(handlers):
174 handlers.append((GoRecipeHandler(), 60))
diff --git a/scripts/lib/recipetool/create_kmod.py b/scripts/lib/recipetool/create_kmod.py
index 85b5c48e53..cc00106961 100644
--- a/scripts/lib/recipetool/create_kmod.py
+++ b/scripts/lib/recipetool/create_kmod.py
@@ -113,7 +113,7 @@ class KernelModuleRecipeHandler(RecipeHandler):
113 kdirpath, _ = check_target(compile_lines, install=False) 113 kdirpath, _ = check_target(compile_lines, install=False)
114 114
115 if manual_install or not install_lines: 115 if manual_install or not install_lines:
116 lines_after.append('EXTRA_OEMAKE_append_task-install = " -C ${STAGING_KERNEL_DIR} M=${S}"') 116 lines_after.append('EXTRA_OEMAKE:append:task-install = " -C ${STAGING_KERNEL_DIR} M=${S}"')
117 elif install_target and install_target != 'modules_install': 117 elif install_target and install_target != 'modules_install':
118 lines_after.append('MODULES_INSTALL_TARGET = "install"') 118 lines_after.append('MODULES_INSTALL_TARGET = "install"')
119 119
diff --git a/scripts/lib/recipetool/create_npm.py b/scripts/lib/recipetool/create_npm.py
index 2bcae91dfa..8c4cdd5234 100644
--- a/scripts/lib/recipetool/create_npm.py
+++ b/scripts/lib/recipetool/create_npm.py
@@ -6,16 +6,20 @@
6"""Recipe creation tool - npm module support plugin""" 6"""Recipe creation tool - npm module support plugin"""
7 7
8import json 8import json
9import logging
9import os 10import os
10import re 11import re
11import sys 12import sys
12import tempfile 13import tempfile
13import bb 14import bb
14from bb.fetch2.npm import NpmEnvironment 15from bb.fetch2.npm import NpmEnvironment
16from bb.fetch2.npm import npm_package
15from bb.fetch2.npmsw import foreach_dependencies 17from bb.fetch2.npmsw import foreach_dependencies
18from oe.license_finder import match_licenses, find_license_files
16from recipetool.create import RecipeHandler 19from recipetool.create import RecipeHandler
17from recipetool.create import guess_license 20from recipetool.create import generate_common_licenses_chksums
18from recipetool.create import split_pkg_licenses 21from recipetool.create import split_pkg_licenses
22logger = logging.getLogger('recipetool')
19 23
20TINFOIL = None 24TINFOIL = None
21 25
@@ -28,15 +32,6 @@ class NpmRecipeHandler(RecipeHandler):
28 """Class to handle the npm recipe creation""" 32 """Class to handle the npm recipe creation"""
29 33
30 @staticmethod 34 @staticmethod
31 def _npm_name(name):
32 """Generate a Yocto friendly npm name"""
33 name = re.sub("/", "-", name)
34 name = name.lower()
35 name = re.sub(r"[^\-a-z0-9]", "", name)
36 name = name.strip("-")
37 return name
38
39 @staticmethod
40 def _get_registry(lines): 35 def _get_registry(lines):
41 """Get the registry value from the 'npm://registry' url""" 36 """Get the registry value from the 'npm://registry' url"""
42 registry = None 37 registry = None
@@ -117,31 +112,71 @@ class NpmRecipeHandler(RecipeHandler):
117 """Return the extra license files and the list of packages""" 112 """Return the extra license files and the list of packages"""
118 licfiles = [] 113 licfiles = []
119 packages = {} 114 packages = {}
115 # Licenses from package.json will point to COMMON_LICENSE_DIR so we need
116 # to associate them explicitely to packages for split_pkg_licenses()
117 fallback_licenses = dict()
118
119 def _find_package_licenses(destdir):
120 """Either find license files, or use package.json metadata"""
121 def _get_licenses_from_package_json(package_json):
122 with open(os.path.join(srctree, package_json), "r") as f:
123 data = json.load(f)
124 if "license" in data:
125 licenses = data["license"].split(" ")
126 licenses = [license.strip("()") for license in licenses if license != "OR" and license != "AND"]
127 return [], licenses
128 else:
129 return [package_json], None
130
131 basedir = os.path.join(srctree, destdir)
132 licfiles = find_license_files(basedir)
133 if len(licfiles) > 0:
134 return licfiles, None
135 else:
136 # A license wasn't found in the package directory, so we'll use the package.json metadata
137 pkg_json = os.path.join(basedir, "package.json")
138 return _get_licenses_from_package_json(pkg_json)
139
140 def _get_package_licenses(destdir, package):
141 (package_licfiles, package_licenses) = _find_package_licenses(destdir)
142 if package_licfiles:
143 licfiles.extend(package_licfiles)
144 else:
145 fallback_licenses[package] = package_licenses
120 146
121 def _licfiles_append(licfile): 147 # Handle the dependencies
122 """Append 'licfile' to the license files list""" 148 def _handle_dependency(name, params, destdir):
123 licfilepath = os.path.join(srctree, licfile) 149 deptree = destdir.split('node_modules/')
124 licmd5 = bb.utils.md5_file(licfilepath) 150 suffix = "-".join([npm_package(dep) for dep in deptree])
125 licfiles.append("file://%s;md5=%s" % (licfile, licmd5)) 151 packages["${PN}" + suffix] = destdir
152 _get_package_licenses(destdir, "${PN}" + suffix)
153
154 with open(shrinkwrap_file, "r") as f:
155 shrinkwrap = json.load(f)
156 foreach_dependencies(shrinkwrap, _handle_dependency, dev)
126 157
127 # Handle the parent package 158 # Handle the parent package
128 _licfiles_append("package.json")
129 packages["${PN}"] = "" 159 packages["${PN}"] = ""
160 _get_package_licenses(srctree, "${PN}")
130 161
131 # Handle the dependencies 162 return licfiles, packages, fallback_licenses
132 def _handle_dependency(name, params, deptree): 163
133 suffix = "-".join([self._npm_name(dep) for dep in deptree]) 164 # Handle the peer dependencies
134 destdirs = [os.path.join("node_modules", dep) for dep in deptree] 165 def _handle_peer_dependency(self, shrinkwrap_file):
135 destdir = os.path.join(*destdirs) 166 """Check if package has peer dependencies and show warning if it is the case"""
136 _licfiles_append(os.path.join(destdir, "package.json"))
137 packages["${PN}-" + suffix] = destdir
138
139 with open(shrinkwrap_file, "r") as f: 167 with open(shrinkwrap_file, "r") as f:
140 shrinkwrap = json.load(f) 168 shrinkwrap = json.load(f)
169
170 packages = shrinkwrap.get("packages", {})
171 peer_deps = packages.get("", {}).get("peerDependencies", {})
172
173 for peer_dep in peer_deps:
174 peer_dep_yocto_name = npm_package(peer_dep)
175 bb.warn(peer_dep + " is a peer dependencie of the actual package. " +
176 "Please add this peer dependencie to the RDEPENDS variable as %s and generate its recipe with devtool"
177 % peer_dep_yocto_name)
141 178
142 foreach_dependencies(shrinkwrap, _handle_dependency, dev)
143 179
144 return licfiles, packages
145 180
146 def process(self, srctree, classes, lines_before, lines_after, handled, extravalues): 181 def process(self, srctree, classes, lines_before, lines_after, handled, extravalues):
147 """Handle the npm recipe creation""" 182 """Handle the npm recipe creation"""
@@ -160,7 +195,7 @@ class NpmRecipeHandler(RecipeHandler):
160 if "name" not in data or "version" not in data: 195 if "name" not in data or "version" not in data:
161 return False 196 return False
162 197
163 extravalues["PN"] = self._npm_name(data["name"]) 198 extravalues["PN"] = npm_package(data["name"])
164 extravalues["PV"] = data["version"] 199 extravalues["PV"] = data["version"]
165 200
166 if "description" in data: 201 if "description" in data:
@@ -229,7 +264,7 @@ class NpmRecipeHandler(RecipeHandler):
229 value = origvalue.replace("version=" + data["version"], "version=${PV}") 264 value = origvalue.replace("version=" + data["version"], "version=${PV}")
230 value = value.replace("version=latest", "version=${PV}") 265 value = value.replace("version=latest", "version=${PV}")
231 values = [line.strip() for line in value.strip('\n').splitlines()] 266 values = [line.strip() for line in value.strip('\n').splitlines()]
232 if "dependencies" in shrinkwrap: 267 if "dependencies" in shrinkwrap.get("packages", {}).get("", {}):
233 values.append(url_recipe) 268 values.append(url_recipe)
234 return values, None, 4, False 269 return values, None, 4, False
235 270
@@ -245,13 +280,19 @@ class NpmRecipeHandler(RecipeHandler):
245 fetcher.unpack(srctree) 280 fetcher.unpack(srctree)
246 281
247 bb.note("Handling licences ...") 282 bb.note("Handling licences ...")
248 (licfiles, packages) = self._handle_licenses(srctree, shrinkwrap_file, dev) 283 (licfiles, packages, fallback_licenses) = self._handle_licenses(srctree, shrinkwrap_file, dev)
249 extravalues["LIC_FILES_CHKSUM"] = licfiles 284 licvalues = match_licenses(licfiles, srctree, d)
250 split_pkg_licenses(guess_license(srctree, d), packages, lines_after, []) 285 split_pkg_licenses(licvalues, packages, lines_after, fallback_licenses)
286 fallback_licenses_flat = [license for sublist in fallback_licenses.values() for license in sublist]
287 extravalues["LIC_FILES_CHKSUM"] = generate_common_licenses_chksums(fallback_licenses_flat, d)
288 extravalues["LICENSE"] = fallback_licenses_flat
251 289
252 classes.append("npm") 290 classes.append("npm")
253 handled.append("buildsystem") 291 handled.append("buildsystem")
254 292
293 # Check if package has peer dependencies and inform the user
294 self._handle_peer_dependency(shrinkwrap_file)
295
255 return True 296 return True
256 297
257def register_recipe_handlers(handlers): 298def register_recipe_handlers(handlers):
diff --git a/scripts/lib/recipetool/setvar.py b/scripts/lib/recipetool/setvar.py
index f8e2ee75fb..b5ad335cae 100644
--- a/scripts/lib/recipetool/setvar.py
+++ b/scripts/lib/recipetool/setvar.py
@@ -49,6 +49,7 @@ def setvar(args):
49 for patch in patches: 49 for patch in patches:
50 for line in patch: 50 for line in patch:
51 sys.stdout.write(line) 51 sys.stdout.write(line)
52 tinfoil.modified_files()
52 return 0 53 return 0
53 54
54 55