diff options
Diffstat (limited to 'scripts/lib/devtool/standard.py')
-rw-r--r-- | scripts/lib/devtool/standard.py | 751 |
1 files changed, 411 insertions, 340 deletions
diff --git a/scripts/lib/devtool/standard.py b/scripts/lib/devtool/standard.py index 01fb5ad96f..1fd5947c41 100644 --- a/scripts/lib/devtool/standard.py +++ b/scripts/lib/devtool/standard.py | |||
@@ -18,11 +18,13 @@ import argparse_oe | |||
18 | import scriptutils | 18 | import scriptutils |
19 | import errno | 19 | import errno |
20 | import glob | 20 | import glob |
21 | import filecmp | ||
22 | from collections import OrderedDict | 21 | from collections import OrderedDict |
22 | |||
23 | from devtool import exec_build_env_command, setup_tinfoil, check_workspace_recipe, use_external_build, setup_git_repo, recipe_to_append, get_bbclassextend_targets, update_unlockedsigs, check_prerelease_version, check_git_repo_dirty, check_git_repo_op, DevtoolError | 23 | from devtool import exec_build_env_command, setup_tinfoil, check_workspace_recipe, use_external_build, setup_git_repo, recipe_to_append, get_bbclassextend_targets, update_unlockedsigs, check_prerelease_version, check_git_repo_dirty, check_git_repo_op, DevtoolError |
24 | from devtool import parse_recipe | 24 | from devtool import parse_recipe |
25 | 25 | ||
26 | import bb.utils | ||
27 | |||
26 | logger = logging.getLogger('devtool') | 28 | logger = logging.getLogger('devtool') |
27 | 29 | ||
28 | override_branch_prefix = 'devtool-override-' | 30 | override_branch_prefix = 'devtool-override-' |
@@ -30,7 +32,8 @@ override_branch_prefix = 'devtool-override-' | |||
30 | 32 | ||
31 | def add(args, config, basepath, workspace): | 33 | def add(args, config, basepath, workspace): |
32 | """Entry point for the devtool 'add' subcommand""" | 34 | """Entry point for the devtool 'add' subcommand""" |
33 | import bb | 35 | import bb.data |
36 | import bb.process | ||
34 | import oe.recipeutils | 37 | import oe.recipeutils |
35 | 38 | ||
36 | if not args.recipename and not args.srctree and not args.fetch and not args.fetchuri: | 39 | if not args.recipename and not args.srctree and not args.fetch and not args.fetchuri: |
@@ -147,6 +150,8 @@ def add(args, config, basepath, workspace): | |||
147 | extracmdopts += ' -a' | 150 | extracmdopts += ' -a' |
148 | if args.npm_dev: | 151 | if args.npm_dev: |
149 | extracmdopts += ' --npm-dev' | 152 | extracmdopts += ' --npm-dev' |
153 | if args.no_pypi: | ||
154 | extracmdopts += ' --no-pypi' | ||
150 | if args.mirrors: | 155 | if args.mirrors: |
151 | extracmdopts += ' --mirrors' | 156 | extracmdopts += ' --mirrors' |
152 | if args.srcrev: | 157 | if args.srcrev: |
@@ -204,7 +209,7 @@ def add(args, config, basepath, workspace): | |||
204 | for fn in os.listdir(tempdir): | 209 | for fn in os.listdir(tempdir): |
205 | shutil.move(os.path.join(tempdir, fn), recipedir) | 210 | shutil.move(os.path.join(tempdir, fn), recipedir) |
206 | else: | 211 | else: |
207 | raise DevtoolError('Command \'%s\' did not create any recipe file:\n%s' % (e.command, e.stdout)) | 212 | raise DevtoolError(f'Failed to create a recipe file for source {source}') |
208 | attic_recipe = os.path.join(config.workspace_path, 'attic', recipename, os.path.basename(recipefile)) | 213 | attic_recipe = os.path.join(config.workspace_path, 'attic', recipename, os.path.basename(recipefile)) |
209 | if os.path.exists(attic_recipe): | 214 | if os.path.exists(attic_recipe): |
210 | logger.warning('A modified recipe from a previous invocation exists in %s - you may wish to move this over the top of the new recipe if you had changes in it that you want to continue with' % attic_recipe) | 215 | logger.warning('A modified recipe from a previous invocation exists in %s - you may wish to move this over the top of the new recipe if you had changes in it that you want to continue with' % attic_recipe) |
@@ -234,10 +239,14 @@ def add(args, config, basepath, workspace): | |||
234 | if args.fetchuri and not args.no_git: | 239 | if args.fetchuri and not args.no_git: |
235 | setup_git_repo(srctree, args.version, 'devtool', d=tinfoil.config_data) | 240 | setup_git_repo(srctree, args.version, 'devtool', d=tinfoil.config_data) |
236 | 241 | ||
237 | initial_rev = None | 242 | initial_rev = {} |
238 | if os.path.exists(os.path.join(srctree, '.git')): | 243 | if os.path.exists(os.path.join(srctree, '.git')): |
239 | (stdout, _) = bb.process.run('git rev-parse HEAD', cwd=srctree) | 244 | (stdout, _) = bb.process.run('git rev-parse HEAD', cwd=srctree) |
240 | initial_rev = stdout.rstrip() | 245 | initial_rev["."] = stdout.rstrip() |
246 | (stdout, _) = bb.process.run('git submodule --quiet foreach --recursive \'echo `git rev-parse HEAD` $PWD\'', cwd=srctree) | ||
247 | for line in stdout.splitlines(): | ||
248 | (rev, submodule) = line.split() | ||
249 | initial_rev[os.path.relpath(submodule, srctree)] = rev | ||
241 | 250 | ||
242 | if args.src_subdir: | 251 | if args.src_subdir: |
243 | srctree = os.path.join(srctree, args.src_subdir) | 252 | srctree = os.path.join(srctree, args.src_subdir) |
@@ -251,7 +260,8 @@ def add(args, config, basepath, workspace): | |||
251 | if b_is_s: | 260 | if b_is_s: |
252 | f.write('EXTERNALSRC_BUILD = "%s"\n' % srctree) | 261 | f.write('EXTERNALSRC_BUILD = "%s"\n' % srctree) |
253 | if initial_rev: | 262 | if initial_rev: |
254 | f.write('\n# initial_rev: %s\n' % initial_rev) | 263 | for key, value in initial_rev.items(): |
264 | f.write('\n# initial_rev %s: %s\n' % (key, value)) | ||
255 | 265 | ||
256 | if args.binary: | 266 | if args.binary: |
257 | f.write('do_install:append() {\n') | 267 | f.write('do_install:append() {\n') |
@@ -298,6 +308,7 @@ def add(args, config, basepath, workspace): | |||
298 | 308 | ||
299 | def _check_compatible_recipe(pn, d): | 309 | def _check_compatible_recipe(pn, d): |
300 | """Check if the recipe is supported by devtool""" | 310 | """Check if the recipe is supported by devtool""" |
311 | import bb.data | ||
301 | if pn == 'perf': | 312 | if pn == 'perf': |
302 | raise DevtoolError("The perf recipe does not actually check out " | 313 | raise DevtoolError("The perf recipe does not actually check out " |
303 | "source and thus cannot be supported by this tool", | 314 | "source and thus cannot be supported by this tool", |
@@ -353,7 +364,7 @@ def _move_file(src, dst, dry_run_outdir=None, base_outdir=None): | |||
353 | bb.utils.mkdirhier(dst_d) | 364 | bb.utils.mkdirhier(dst_d) |
354 | shutil.move(src, dst) | 365 | shutil.move(src, dst) |
355 | 366 | ||
356 | def _copy_file(src, dst, dry_run_outdir=None): | 367 | def _copy_file(src, dst, dry_run_outdir=None, base_outdir=None): |
357 | """Copy a file. Creates all the directory components of destination path.""" | 368 | """Copy a file. Creates all the directory components of destination path.""" |
358 | dry_run_suffix = ' (dry-run)' if dry_run_outdir else '' | 369 | dry_run_suffix = ' (dry-run)' if dry_run_outdir else '' |
359 | logger.debug('Copying %s to %s%s' % (src, dst, dry_run_suffix)) | 370 | logger.debug('Copying %s to %s%s' % (src, dst, dry_run_suffix)) |
@@ -367,7 +378,7 @@ def _copy_file(src, dst, dry_run_outdir=None): | |||
367 | 378 | ||
368 | def _git_ls_tree(repodir, treeish='HEAD', recursive=False): | 379 | def _git_ls_tree(repodir, treeish='HEAD', recursive=False): |
369 | """List contents of a git treeish""" | 380 | """List contents of a git treeish""" |
370 | import bb | 381 | import bb.process |
371 | cmd = ['git', 'ls-tree', '-z', treeish] | 382 | cmd = ['git', 'ls-tree', '-z', treeish] |
372 | if recursive: | 383 | if recursive: |
373 | cmd.append('-r') | 384 | cmd.append('-r') |
@@ -380,6 +391,19 @@ def _git_ls_tree(repodir, treeish='HEAD', recursive=False): | |||
380 | ret[split[3]] = split[0:3] | 391 | ret[split[3]] = split[0:3] |
381 | return ret | 392 | return ret |
382 | 393 | ||
394 | def _git_modified(repodir): | ||
395 | """List the difference between HEAD and the index""" | ||
396 | import bb.process | ||
397 | cmd = ['git', 'status', '--porcelain'] | ||
398 | out, _ = bb.process.run(cmd, cwd=repodir) | ||
399 | ret = [] | ||
400 | if out: | ||
401 | for line in out.split("\n"): | ||
402 | if line and not line.startswith('??'): | ||
403 | ret.append(line[3:]) | ||
404 | return ret | ||
405 | |||
406 | |||
383 | def _git_exclude_path(srctree, path): | 407 | def _git_exclude_path(srctree, path): |
384 | """Return pathspec (list of paths) that excludes certain path""" | 408 | """Return pathspec (list of paths) that excludes certain path""" |
385 | # NOTE: "Filtering out" files/paths in this way is not entirely reliable - | 409 | # NOTE: "Filtering out" files/paths in this way is not entirely reliable - |
@@ -407,8 +431,6 @@ def _ls_tree(directory): | |||
407 | 431 | ||
408 | def extract(args, config, basepath, workspace): | 432 | def extract(args, config, basepath, workspace): |
409 | """Entry point for the devtool 'extract' subcommand""" | 433 | """Entry point for the devtool 'extract' subcommand""" |
410 | import bb | ||
411 | |||
412 | tinfoil = setup_tinfoil(basepath=basepath, tracking=True) | 434 | tinfoil = setup_tinfoil(basepath=basepath, tracking=True) |
413 | if not tinfoil: | 435 | if not tinfoil: |
414 | # Error already shown | 436 | # Error already shown |
@@ -431,8 +453,6 @@ def extract(args, config, basepath, workspace): | |||
431 | 453 | ||
432 | def sync(args, config, basepath, workspace): | 454 | def sync(args, config, basepath, workspace): |
433 | """Entry point for the devtool 'sync' subcommand""" | 455 | """Entry point for the devtool 'sync' subcommand""" |
434 | import bb | ||
435 | |||
436 | tinfoil = setup_tinfoil(basepath=basepath, tracking=True) | 456 | tinfoil = setup_tinfoil(basepath=basepath, tracking=True) |
437 | if not tinfoil: | 457 | if not tinfoil: |
438 | # Error already shown | 458 | # Error already shown |
@@ -453,41 +473,11 @@ def sync(args, config, basepath, workspace): | |||
453 | finally: | 473 | finally: |
454 | tinfoil.shutdown() | 474 | tinfoil.shutdown() |
455 | 475 | ||
456 | def symlink_oelocal_files_srctree(rd,srctree): | ||
457 | import oe.patch | ||
458 | if os.path.abspath(rd.getVar('S')) == os.path.abspath(rd.getVar('WORKDIR')): | ||
459 | # If recipe extracts to ${WORKDIR}, symlink the files into the srctree | ||
460 | # (otherwise the recipe won't build as expected) | ||
461 | local_files_dir = os.path.join(srctree, 'oe-local-files') | ||
462 | addfiles = [] | ||
463 | for root, _, files in os.walk(local_files_dir): | ||
464 | relpth = os.path.relpath(root, local_files_dir) | ||
465 | if relpth != '.': | ||
466 | bb.utils.mkdirhier(os.path.join(srctree, relpth)) | ||
467 | for fn in files: | ||
468 | if fn == '.gitignore': | ||
469 | continue | ||
470 | destpth = os.path.join(srctree, relpth, fn) | ||
471 | if os.path.exists(destpth): | ||
472 | os.unlink(destpth) | ||
473 | if relpth != '.': | ||
474 | back_relpth = os.path.relpath(local_files_dir, root) | ||
475 | os.symlink('%s/oe-local-files/%s/%s' % (back_relpth, relpth, fn), destpth) | ||
476 | else: | ||
477 | os.symlink('oe-local-files/%s' % fn, destpth) | ||
478 | addfiles.append(os.path.join(relpth, fn)) | ||
479 | if addfiles: | ||
480 | bb.process.run('git add %s' % ' '.join(addfiles), cwd=srctree) | ||
481 | useroptions = [] | ||
482 | oe.patch.GitApplyTree.gitCommandUserOptions(useroptions, d=rd) | ||
483 | bb.process.run('git %s commit -m "Committing local file symlinks\n\n%s"' % (' '.join(useroptions), oe.patch.GitApplyTree.ignore_commit_prefix), cwd=srctree) | ||
484 | |||
485 | |||
486 | def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, workspace, fixed_setup, d, tinfoil, no_overrides=False): | 476 | def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, workspace, fixed_setup, d, tinfoil, no_overrides=False): |
487 | """Extract sources of a recipe""" | 477 | """Extract sources of a recipe""" |
488 | import oe.recipeutils | ||
489 | import oe.patch | ||
490 | import oe.path | 478 | import oe.path |
479 | import bb.data | ||
480 | import bb.process | ||
491 | 481 | ||
492 | pn = d.getVar('PN') | 482 | pn = d.getVar('PN') |
493 | 483 | ||
@@ -520,7 +510,9 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works | |||
520 | for event in history: | 510 | for event in history: |
521 | if not 'flag' in event: | 511 | if not 'flag' in event: |
522 | if event['op'].startswith((':append[', ':prepend[')): | 512 | if event['op'].startswith((':append[', ':prepend[')): |
523 | extra_overrides.append(event['op'].split('[')[1].split(']')[0]) | 513 | override = event['op'].split('[')[1].split(']')[0] |
514 | if not override.startswith('pn-'): | ||
515 | extra_overrides.append(override) | ||
524 | # We want to remove duplicate overrides. If a recipe had multiple | 516 | # We want to remove duplicate overrides. If a recipe had multiple |
525 | # SRC_URI_override += values it would cause mulitple instances of | 517 | # SRC_URI_override += values it would cause mulitple instances of |
526 | # overrides. This doesn't play nicely with things like creating a | 518 | # overrides. This doesn't play nicely with things like creating a |
@@ -550,6 +542,7 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works | |||
550 | tempbasedir = d.getVar('WORKDIR') | 542 | tempbasedir = d.getVar('WORKDIR') |
551 | bb.utils.mkdirhier(tempbasedir) | 543 | bb.utils.mkdirhier(tempbasedir) |
552 | tempdir = tempfile.mkdtemp(prefix='devtooltmp-', dir=tempbasedir) | 544 | tempdir = tempfile.mkdtemp(prefix='devtooltmp-', dir=tempbasedir) |
545 | appendbackup = None | ||
553 | try: | 546 | try: |
554 | tinfoil.logger.setLevel(logging.WARNING) | 547 | tinfoil.logger.setLevel(logging.WARNING) |
555 | 548 | ||
@@ -560,11 +553,13 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works | |||
560 | appendbackup = os.path.join(tempdir, os.path.basename(appendfile) + '.bak') | 553 | appendbackup = os.path.join(tempdir, os.path.basename(appendfile) + '.bak') |
561 | shutil.copyfile(appendfile, appendbackup) | 554 | shutil.copyfile(appendfile, appendbackup) |
562 | else: | 555 | else: |
563 | appendbackup = None | ||
564 | bb.utils.mkdirhier(os.path.dirname(appendfile)) | 556 | bb.utils.mkdirhier(os.path.dirname(appendfile)) |
565 | logger.debug('writing append file %s' % appendfile) | 557 | logger.debug('writing append file %s' % appendfile) |
566 | with open(appendfile, 'a') as f: | 558 | with open(appendfile, 'a') as f: |
567 | f.write('###--- _extract_source\n') | 559 | f.write('###--- _extract_source\n') |
560 | f.write('deltask do_recipe_qa\n') | ||
561 | f.write('deltask do_recipe_qa_setscene\n') | ||
562 | f.write('ERROR_QA:remove = "patch-fuzz"\n') | ||
568 | f.write('DEVTOOL_TEMPDIR = "%s"\n' % tempdir) | 563 | f.write('DEVTOOL_TEMPDIR = "%s"\n' % tempdir) |
569 | f.write('DEVTOOL_DEVBRANCH = "%s"\n' % devbranch) | 564 | f.write('DEVTOOL_DEVBRANCH = "%s"\n' % devbranch) |
570 | if not is_kernel_yocto: | 565 | if not is_kernel_yocto: |
@@ -582,6 +577,7 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works | |||
582 | preservestampfile = os.path.join(sstate_manifests, 'preserve-stamps') | 577 | preservestampfile = os.path.join(sstate_manifests, 'preserve-stamps') |
583 | with open(preservestampfile, 'w') as f: | 578 | with open(preservestampfile, 'w') as f: |
584 | f.write(d.getVar('STAMP')) | 579 | f.write(d.getVar('STAMP')) |
580 | tinfoil.modified_files() | ||
585 | try: | 581 | try: |
586 | if is_kernel_yocto: | 582 | if is_kernel_yocto: |
587 | # We need to generate the kernel config | 583 | # We need to generate the kernel config |
@@ -629,7 +625,7 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works | |||
629 | srcsubdir = f.read() | 625 | srcsubdir = f.read() |
630 | except FileNotFoundError as e: | 626 | except FileNotFoundError as e: |
631 | raise DevtoolError('Something went wrong with source extraction - the devtool-source class was not active or did not function correctly:\n%s' % str(e)) | 627 | raise DevtoolError('Something went wrong with source extraction - the devtool-source class was not active or did not function correctly:\n%s' % str(e)) |
632 | srcsubdir_rel = os.path.relpath(srcsubdir, os.path.join(tempdir, 'workdir')) | 628 | srcsubdir_rel = os.path.relpath(srcsubdir, os.path.join(tempdir, 'workdir', os.path.relpath(d.getVar('UNPACKDIR'), d.getVar('WORKDIR')))) |
633 | 629 | ||
634 | # Check if work-shared is empty, if yes | 630 | # Check if work-shared is empty, if yes |
635 | # find source and copy to work-shared | 631 | # find source and copy to work-shared |
@@ -644,39 +640,26 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works | |||
644 | 640 | ||
645 | if os.path.exists(workshareddir) and (not os.listdir(workshareddir) or kernelVersion != staging_kerVer): | 641 | if os.path.exists(workshareddir) and (not os.listdir(workshareddir) or kernelVersion != staging_kerVer): |
646 | shutil.rmtree(workshareddir) | 642 | shutil.rmtree(workshareddir) |
647 | oe.path.copyhardlinktree(srcsubdir,workshareddir) | 643 | oe.path.copyhardlinktree(srcsubdir, workshareddir) |
648 | elif not os.path.exists(workshareddir): | 644 | elif not os.path.exists(workshareddir): |
649 | oe.path.copyhardlinktree(srcsubdir,workshareddir) | 645 | oe.path.copyhardlinktree(srcsubdir, workshareddir) |
650 | |||
651 | tempdir_localdir = os.path.join(tempdir, 'oe-local-files') | ||
652 | srctree_localdir = os.path.join(srctree, 'oe-local-files') | ||
653 | 646 | ||
654 | if sync: | 647 | if sync: |
655 | bb.process.run('git fetch file://' + srcsubdir + ' ' + devbranch + ':' + devbranch, cwd=srctree) | 648 | try: |
656 | 649 | logger.info('Backing up current %s branch as branch: %s.bak' % (devbranch, devbranch)) | |
657 | # Move oe-local-files directory to srctree | 650 | bb.process.run('git branch -f ' + devbranch + '.bak', cwd=srctree) |
658 | # As the oe-local-files is not part of the constructed git tree, | 651 | |
659 | # remove them directly during the synchrounizating might surprise | 652 | # Use git fetch to update the source with the current recipe |
660 | # the users. Instead, we move it to oe-local-files.bak and remind | 653 | # To be able to update the currently checked out branch with |
661 | # user in the log message. | 654 | # possibly new history (no fast-forward) git needs to be told |
662 | if os.path.exists(srctree_localdir + '.bak'): | 655 | # that's ok |
663 | shutil.rmtree(srctree_localdir, srctree_localdir + '.bak') | 656 | logger.info('Syncing source files including patches to git branch: %s' % devbranch) |
664 | 657 | bb.process.run('git fetch --update-head-ok --force file://' + srcsubdir + ' ' + devbranch + ':' + devbranch, cwd=srctree) | |
665 | if os.path.exists(srctree_localdir): | 658 | except bb.process.ExecutionError as e: |
666 | logger.info('Backing up current local file directory %s' % srctree_localdir) | 659 | raise DevtoolError("Error when syncing source files to local checkout: %s" % str(e)) |
667 | shutil.move(srctree_localdir, srctree_localdir + '.bak') | ||
668 | |||
669 | if os.path.exists(tempdir_localdir): | ||
670 | logger.info('Syncing local source files to srctree...') | ||
671 | shutil.copytree(tempdir_localdir, srctree_localdir) | ||
672 | else: | ||
673 | # Move oe-local-files directory to srctree | ||
674 | if os.path.exists(tempdir_localdir): | ||
675 | logger.info('Adding local source files to srctree...') | ||
676 | shutil.move(tempdir_localdir, srcsubdir) | ||
677 | 660 | ||
661 | else: | ||
678 | shutil.move(srcsubdir, srctree) | 662 | shutil.move(srcsubdir, srctree) |
679 | symlink_oelocal_files_srctree(d,srctree) | ||
680 | 663 | ||
681 | if is_kernel_yocto: | 664 | if is_kernel_yocto: |
682 | logger.info('Copying kernel config to srctree') | 665 | logger.info('Copying kernel config to srctree') |
@@ -695,8 +678,6 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works | |||
695 | 678 | ||
696 | def _add_md5(config, recipename, filename): | 679 | def _add_md5(config, recipename, filename): |
697 | """Record checksum of a file (or recursively for a directory) to the md5-file of the workspace""" | 680 | """Record checksum of a file (or recursively for a directory) to the md5-file of the workspace""" |
698 | import bb.utils | ||
699 | |||
700 | def addfile(fn): | 681 | def addfile(fn): |
701 | md5 = bb.utils.md5_file(fn) | 682 | md5 = bb.utils.md5_file(fn) |
702 | with open(os.path.join(config.workspace_path, '.devtool_md5'), 'a+') as f: | 683 | with open(os.path.join(config.workspace_path, '.devtool_md5'), 'a+') as f: |
@@ -715,7 +696,6 @@ def _add_md5(config, recipename, filename): | |||
715 | def _check_preserve(config, recipename): | 696 | def _check_preserve(config, recipename): |
716 | """Check if a file was manually changed and needs to be saved in 'attic' | 697 | """Check if a file was manually changed and needs to be saved in 'attic' |
717 | directory""" | 698 | directory""" |
718 | import bb.utils | ||
719 | origfile = os.path.join(config.workspace_path, '.devtool_md5') | 699 | origfile = os.path.join(config.workspace_path, '.devtool_md5') |
720 | newfile = os.path.join(config.workspace_path, '.devtool_md5_new') | 700 | newfile = os.path.join(config.workspace_path, '.devtool_md5_new') |
721 | preservepath = os.path.join(config.workspace_path, 'attic', recipename) | 701 | preservepath = os.path.join(config.workspace_path, 'attic', recipename) |
@@ -746,26 +726,36 @@ def _check_preserve(config, recipename): | |||
746 | 726 | ||
747 | def get_staging_kver(srcdir): | 727 | def get_staging_kver(srcdir): |
748 | # Kernel version from work-shared | 728 | # Kernel version from work-shared |
749 | kerver = [] | 729 | import itertools |
750 | staging_kerVer="" | 730 | try: |
751 | if os.path.exists(srcdir) and os.listdir(srcdir): | 731 | with open(os.path.join(srcdir, "Makefile")) as f: |
752 | with open(os.path.join(srcdir,"Makefile")) as f: | 732 | # Take VERSION, PATCHLEVEL, SUBLEVEL from lines 1, 2, 3 |
753 | version = [next(f) for x in range(5)][1:4] | 733 | return ".".join(line.rstrip().split('= ')[1] for line in itertools.islice(f, 1, 4)) |
754 | for word in version: | 734 | except FileNotFoundError: |
755 | kerver.append(word.split('= ')[1].split('\n')[0]) | 735 | return "" |
756 | staging_kerVer = ".".join(kerver) | ||
757 | return staging_kerVer | ||
758 | 736 | ||
759 | def get_staging_kbranch(srcdir): | 737 | def get_staging_kbranch(srcdir): |
738 | import bb.process | ||
760 | staging_kbranch = "" | 739 | staging_kbranch = "" |
761 | if os.path.exists(srcdir) and os.listdir(srcdir): | 740 | if os.path.exists(srcdir) and os.listdir(srcdir): |
762 | (branch, _) = bb.process.run('git branch | grep \* | cut -d \' \' -f2', cwd=srcdir) | 741 | (branch, _) = bb.process.run('git branch | grep \\* | cut -d \' \' -f2', cwd=srcdir) |
763 | staging_kbranch = "".join(branch.split('\n')[0]) | 742 | staging_kbranch = "".join(branch.split('\n')[0]) |
764 | return staging_kbranch | 743 | return staging_kbranch |
765 | 744 | ||
745 | def get_real_srctree(srctree, s, unpackdir): | ||
746 | # Check that recipe isn't using a shared workdir | ||
747 | s = os.path.abspath(s) | ||
748 | unpackdir = os.path.abspath(unpackdir) | ||
749 | if s.startswith(unpackdir) and s != unpackdir and os.path.dirname(s) != unpackdir: | ||
750 | # Handle if S is set to a subdirectory of the source | ||
751 | srcsubdir = os.path.relpath(s, unpackdir).split(os.sep, 1)[1] | ||
752 | srctree = os.path.join(srctree, srcsubdir) | ||
753 | return srctree | ||
754 | |||
766 | def modify(args, config, basepath, workspace): | 755 | def modify(args, config, basepath, workspace): |
767 | """Entry point for the devtool 'modify' subcommand""" | 756 | """Entry point for the devtool 'modify' subcommand""" |
768 | import bb | 757 | import bb.data |
758 | import bb.process | ||
769 | import oe.recipeutils | 759 | import oe.recipeutils |
770 | import oe.patch | 760 | import oe.patch |
771 | import oe.path | 761 | import oe.path |
@@ -807,8 +797,8 @@ def modify(args, config, basepath, workspace): | |||
807 | 797 | ||
808 | _check_compatible_recipe(pn, rd) | 798 | _check_compatible_recipe(pn, rd) |
809 | 799 | ||
810 | initial_rev = None | 800 | initial_revs = {} |
811 | commits = [] | 801 | commits = {} |
812 | check_commits = False | 802 | check_commits = False |
813 | 803 | ||
814 | if bb.data.inherits_class('kernel-yocto', rd): | 804 | if bb.data.inherits_class('kernel-yocto', rd): |
@@ -820,36 +810,22 @@ def modify(args, config, basepath, workspace): | |||
820 | staging_kerVer = get_staging_kver(srcdir) | 810 | staging_kerVer = get_staging_kver(srcdir) |
821 | staging_kbranch = get_staging_kbranch(srcdir) | 811 | staging_kbranch = get_staging_kbranch(srcdir) |
822 | if (os.path.exists(srcdir) and os.listdir(srcdir)) and (kernelVersion in staging_kerVer and staging_kbranch == kbranch): | 812 | if (os.path.exists(srcdir) and os.listdir(srcdir)) and (kernelVersion in staging_kerVer and staging_kbranch == kbranch): |
823 | oe.path.copyhardlinktree(srcdir,srctree) | 813 | oe.path.copyhardlinktree(srcdir, srctree) |
824 | workdir = rd.getVar('WORKDIR') | 814 | unpackdir = rd.getVar('UNPACKDIR') |
825 | srcsubdir = rd.getVar('S') | 815 | srcsubdir = rd.getVar('S') |
826 | localfilesdir = os.path.join(srctree,'oe-local-files') | ||
827 | # Move local source files into separate subdir | ||
828 | recipe_patches = [os.path.basename(patch) for patch in oe.recipeutils.get_recipe_patches(rd)] | ||
829 | local_files = oe.recipeutils.get_recipe_local_files(rd) | ||
830 | 816 | ||
831 | for key in local_files.copy(): | 817 | # Add locally copied files to gitignore as we add back to the metadata directly |
832 | if key.endswith('scc'): | 818 | local_files = oe.recipeutils.get_recipe_local_files(rd) |
833 | sccfile = open(local_files[key], 'r') | ||
834 | for l in sccfile: | ||
835 | line = l.split() | ||
836 | if line and line[0] in ('kconf', 'patch'): | ||
837 | cfg = os.path.join(os.path.dirname(local_files[key]), line[-1]) | ||
838 | if not cfg in local_files.values(): | ||
839 | local_files[line[-1]] = cfg | ||
840 | shutil.copy2(cfg, workdir) | ||
841 | sccfile.close() | ||
842 | |||
843 | # Ignore local files with subdir={BP} | ||
844 | srcabspath = os.path.abspath(srcsubdir) | 819 | srcabspath = os.path.abspath(srcsubdir) |
845 | local_files = [fname for fname in local_files if os.path.exists(os.path.join(workdir, fname)) and (srcabspath == workdir or not os.path.join(workdir, fname).startswith(srcabspath + os.sep))] | 820 | local_files = [fname for fname in local_files if |
821 | os.path.exists(os.path.join(unpackdir, fname)) and | ||
822 | srcabspath == unpackdir] | ||
846 | if local_files: | 823 | if local_files: |
847 | for fname in local_files: | 824 | with open(os.path.join(srctree, '.gitignore'), 'a+') as f: |
848 | _move_file(os.path.join(workdir, fname), os.path.join(srctree, 'oe-local-files', fname)) | 825 | f.write('# Ignore local files, by default. Remove following lines' |
849 | with open(os.path.join(srctree, 'oe-local-files', '.gitignore'), 'w') as f: | 826 | 'if you want to commit the directory to Git\n') |
850 | f.write('# Ignore local files, by default. Remove this file ''if you want to commit the directory to Git\n*\n') | 827 | for fname in local_files: |
851 | 828 | f.write('%s\n' % fname) | |
852 | symlink_oelocal_files_srctree(rd,srctree) | ||
853 | 829 | ||
854 | task = 'do_configure' | 830 | task = 'do_configure' |
855 | res = tinfoil.build_targets(pn, task, handle_events=True) | 831 | res = tinfoil.build_targets(pn, task, handle_events=True) |
@@ -857,22 +833,33 @@ def modify(args, config, basepath, workspace): | |||
857 | # Copy .config to workspace | 833 | # Copy .config to workspace |
858 | kconfpath = rd.getVar('B') | 834 | kconfpath = rd.getVar('B') |
859 | logger.info('Copying kernel config to workspace') | 835 | logger.info('Copying kernel config to workspace') |
860 | shutil.copy2(os.path.join(kconfpath, '.config'),srctree) | 836 | shutil.copy2(os.path.join(kconfpath, '.config'), srctree) |
861 | 837 | ||
862 | # Set this to true, we still need to get initial_rev | 838 | # Set this to true, we still need to get initial_rev |
863 | # by parsing the git repo | 839 | # by parsing the git repo |
864 | args.no_extract = True | 840 | args.no_extract = True |
865 | 841 | ||
866 | if not args.no_extract: | 842 | if not args.no_extract: |
867 | initial_rev, _ = _extract_source(srctree, args.keep_temp, args.branch, False, config, basepath, workspace, args.fixed_setup, rd, tinfoil, no_overrides=args.no_overrides) | 843 | initial_revs["."], _ = _extract_source(srctree, args.keep_temp, args.branch, False, config, basepath, workspace, args.fixed_setup, rd, tinfoil, no_overrides=args.no_overrides) |
868 | if not initial_rev: | 844 | if not initial_revs["."]: |
869 | return 1 | 845 | return 1 |
870 | logger.info('Source tree extracted to %s' % srctree) | 846 | logger.info('Source tree extracted to %s' % srctree) |
847 | |||
871 | if os.path.exists(os.path.join(srctree, '.git')): | 848 | if os.path.exists(os.path.join(srctree, '.git')): |
872 | # Get list of commits since this revision | 849 | # Get list of commits since this revision |
873 | (stdout, _) = bb.process.run('git rev-list --reverse %s..HEAD' % initial_rev, cwd=srctree) | 850 | (stdout, _) = bb.process.run('git rev-list --reverse %s..HEAD' % initial_revs["."], cwd=srctree) |
874 | commits = stdout.split() | 851 | commits["."] = stdout.split() |
875 | check_commits = True | 852 | check_commits = True |
853 | try: | ||
854 | (stdout, _) = bb.process.run('git submodule --quiet foreach --recursive \'echo `git rev-parse devtool-base` $PWD\'', cwd=srctree) | ||
855 | except bb.process.ExecutionError: | ||
856 | stdout = "" | ||
857 | for line in stdout.splitlines(): | ||
858 | (rev, submodule_path) = line.split() | ||
859 | submodule = os.path.relpath(submodule_path, srctree) | ||
860 | initial_revs[submodule] = rev | ||
861 | (stdout, _) = bb.process.run('git rev-list --reverse devtool-base..HEAD', cwd=submodule_path) | ||
862 | commits[submodule] = stdout.split() | ||
876 | else: | 863 | else: |
877 | if os.path.exists(os.path.join(srctree, '.git')): | 864 | if os.path.exists(os.path.join(srctree, '.git')): |
878 | # Check if it's a tree previously extracted by us. This is done | 865 | # Check if it's a tree previously extracted by us. This is done |
@@ -889,11 +876,11 @@ def modify(args, config, basepath, workspace): | |||
889 | for line in stdout.splitlines(): | 876 | for line in stdout.splitlines(): |
890 | if line.startswith('*'): | 877 | if line.startswith('*'): |
891 | (stdout, _) = bb.process.run('git rev-parse devtool-base', cwd=srctree) | 878 | (stdout, _) = bb.process.run('git rev-parse devtool-base', cwd=srctree) |
892 | initial_rev = stdout.rstrip() | 879 | initial_revs["."] = stdout.rstrip() |
893 | if not initial_rev: | 880 | if "." not in initial_revs: |
894 | # Otherwise, just grab the head revision | 881 | # Otherwise, just grab the head revision |
895 | (stdout, _) = bb.process.run('git rev-parse HEAD', cwd=srctree) | 882 | (stdout, _) = bb.process.run('git rev-parse HEAD', cwd=srctree) |
896 | initial_rev = stdout.rstrip() | 883 | initial_revs["."] = stdout.rstrip() |
897 | 884 | ||
898 | branch_patches = {} | 885 | branch_patches = {} |
899 | if check_commits: | 886 | if check_commits: |
@@ -910,28 +897,40 @@ def modify(args, config, basepath, workspace): | |||
910 | seen_patches = [] | 897 | seen_patches = [] |
911 | for branch in branches: | 898 | for branch in branches: |
912 | branch_patches[branch] = [] | 899 | branch_patches[branch] = [] |
913 | (stdout, _) = bb.process.run('git log devtool-base..%s' % branch, cwd=srctree) | 900 | (stdout, _) = bb.process.run('git rev-list devtool-base..%s' % branch, cwd=srctree) |
914 | for line in stdout.splitlines(): | 901 | for sha1 in stdout.splitlines(): |
915 | line = line.strip() | 902 | notes = oe.patch.GitApplyTree.getNotes(srctree, sha1.strip()) |
916 | if line.startswith(oe.patch.GitApplyTree.patch_line_prefix): | 903 | origpatch = notes.get(oe.patch.GitApplyTree.original_patch) |
917 | origpatch = line[len(oe.patch.GitApplyTree.patch_line_prefix):].split(':', 1)[-1].strip() | 904 | if origpatch and origpatch not in seen_patches: |
918 | if not origpatch in seen_patches: | 905 | seen_patches.append(origpatch) |
919 | seen_patches.append(origpatch) | 906 | branch_patches[branch].append(origpatch) |
920 | branch_patches[branch].append(origpatch) | ||
921 | 907 | ||
922 | # Need to grab this here in case the source is within a subdirectory | 908 | # Need to grab this here in case the source is within a subdirectory |
923 | srctreebase = srctree | 909 | srctreebase = srctree |
924 | 910 | srctree = get_real_srctree(srctree, rd.getVar('S'), rd.getVar('UNPACKDIR')) | |
925 | # Check that recipe isn't using a shared workdir | ||
926 | s = os.path.abspath(rd.getVar('S')) | ||
927 | workdir = os.path.abspath(rd.getVar('WORKDIR')) | ||
928 | if s.startswith(workdir) and s != workdir and os.path.dirname(s) != workdir: | ||
929 | # Handle if S is set to a subdirectory of the source | ||
930 | srcsubdir = os.path.relpath(s, workdir).split(os.sep, 1)[1] | ||
931 | srctree = os.path.join(srctree, srcsubdir) | ||
932 | 911 | ||
933 | bb.utils.mkdirhier(os.path.dirname(appendfile)) | 912 | bb.utils.mkdirhier(os.path.dirname(appendfile)) |
934 | with open(appendfile, 'w') as f: | 913 | with open(appendfile, 'w') as f: |
914 | # if not present, add type=git-dependency to the secondary sources | ||
915 | # (non local files) so they can be extracted correctly when building a recipe after | ||
916 | # doing a devtool modify on it | ||
917 | src_uri = rd.getVar('SRC_URI').split() | ||
918 | src_uri_append = [] | ||
919 | src_uri_remove = [] | ||
920 | |||
921 | # Assume first entry is main source extracted in ${S} so skip it | ||
922 | src_uri = src_uri[1::] | ||
923 | |||
924 | # Add "type=git-dependency" to all non local sources | ||
925 | for url in src_uri: | ||
926 | if not url.startswith('file://') and not 'type=' in url: | ||
927 | src_uri_remove.append(url) | ||
928 | src_uri_append.append('%s;type=git-dependency' % url) | ||
929 | |||
930 | if src_uri_remove: | ||
931 | f.write('SRC_URI:remove = "%s"\n' % ' '.join(src_uri_remove)) | ||
932 | f.write('SRC_URI:append = " %s"\n\n' % ' '.join(src_uri_append)) | ||
933 | |||
935 | f.write('FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n') | 934 | f.write('FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n') |
936 | # Local files can be modified/tracked in separate subdir under srctree | 935 | # Local files can be modified/tracked in separate subdir under srctree |
937 | # Mostly useful for packages with S != WORKDIR | 936 | # Mostly useful for packages with S != WORKDIR |
@@ -948,34 +947,31 @@ def modify(args, config, basepath, workspace): | |||
948 | f.write('EXTERNALSRC_BUILD:pn-%s = "%s"\n' % (pn, srctree)) | 947 | f.write('EXTERNALSRC_BUILD:pn-%s = "%s"\n' % (pn, srctree)) |
949 | 948 | ||
950 | if bb.data.inherits_class('kernel', rd): | 949 | if bb.data.inherits_class('kernel', rd): |
951 | f.write('SRCTREECOVEREDTASKS = "do_validate_branches do_kernel_checkout ' | ||
952 | 'do_fetch do_unpack do_kernel_configcheck"\n') | ||
953 | f.write('\ndo_patch[noexec] = "1"\n') | ||
954 | f.write('\ndo_configure:append() {\n' | ||
955 | ' cp ${B}/.config ${S}/.config.baseline\n' | ||
956 | ' ln -sfT ${B}/.config ${S}/.config.new\n' | ||
957 | '}\n') | ||
958 | f.write('\ndo_kernel_configme:prepend() {\n' | 950 | f.write('\ndo_kernel_configme:prepend() {\n' |
959 | ' if [ -e ${S}/.config ]; then\n' | 951 | ' if [ -e ${S}/.config ]; then\n' |
960 | ' mv ${S}/.config ${S}/.config.old\n' | 952 | ' mv ${S}/.config ${S}/.config.old\n' |
961 | ' fi\n' | 953 | ' fi\n' |
962 | '}\n') | 954 | '}\n') |
963 | if rd.getVarFlag('do_menuconfig','task'): | 955 | if rd.getVarFlag('do_menuconfig', 'task'): |
964 | f.write('\ndo_configure:append() {\n' | 956 | f.write('\ndo_configure:append() {\n' |
965 | ' if [ ! ${DEVTOOL_DISABLE_MENUCONFIG} ]; then\n' | 957 | ' if [ ${@oe.types.boolean(d.getVar("KCONFIG_CONFIG_ENABLE_MENUCONFIG"))} = True ]; then\n' |
966 | ' cp ${B}/.config ${S}/.config.baseline\n' | 958 | ' cp ${KCONFIG_CONFIG_ROOTDIR}/.config ${S}/.config.baseline\n' |
967 | ' ln -sfT ${B}/.config ${S}/.config.new\n' | 959 | ' ln -sfT ${KCONFIG_CONFIG_ROOTDIR}/.config ${S}/.config.new\n' |
968 | ' fi\n' | 960 | ' fi\n' |
969 | '}\n') | 961 | '}\n') |
970 | if initial_rev: | 962 | if initial_revs: |
971 | f.write('\n# initial_rev: %s\n' % initial_rev) | 963 | for name, rev in initial_revs.items(): |
972 | for commit in commits: | 964 | f.write('\n# initial_rev %s: %s\n' % (name, rev)) |
973 | f.write('# commit: %s\n' % commit) | 965 | if name in commits: |
966 | for commit in commits[name]: | ||
967 | f.write('# commit %s: %s\n' % (name, commit)) | ||
974 | if branch_patches: | 968 | if branch_patches: |
975 | for branch in branch_patches: | 969 | for branch in branch_patches: |
976 | if branch == args.branch: | 970 | if branch == args.branch: |
977 | continue | 971 | continue |
978 | f.write('# patches_%s: %s\n' % (branch, ','.join(branch_patches[branch]))) | 972 | f.write('# patches_%s: %s\n' % (branch, ','.join(branch_patches[branch]))) |
973 | if args.debug_build: | ||
974 | f.write('\nDEBUG_BUILD = "1"\n') | ||
979 | 975 | ||
980 | update_unlockedsigs(basepath, workspace, args.fixed_setup, [pn]) | 976 | update_unlockedsigs(basepath, workspace, args.fixed_setup, [pn]) |
981 | 977 | ||
@@ -1020,6 +1016,7 @@ def rename(args, config, basepath, workspace): | |||
1020 | origfnver = '' | 1016 | origfnver = '' |
1021 | 1017 | ||
1022 | recipefilemd5 = None | 1018 | recipefilemd5 = None |
1019 | newrecipefilemd5 = None | ||
1023 | tinfoil = setup_tinfoil(basepath=basepath, tracking=True) | 1020 | tinfoil = setup_tinfoil(basepath=basepath, tracking=True) |
1024 | try: | 1021 | try: |
1025 | rd = parse_recipe(config, tinfoil, args.recipename, True) | 1022 | rd = parse_recipe(config, tinfoil, args.recipename, True) |
@@ -1097,6 +1094,7 @@ def rename(args, config, basepath, workspace): | |||
1097 | 1094 | ||
1098 | # Rename source tree if it's the default path | 1095 | # Rename source tree if it's the default path |
1099 | appendmd5 = None | 1096 | appendmd5 = None |
1097 | newappendmd5 = None | ||
1100 | if not args.no_srctree: | 1098 | if not args.no_srctree: |
1101 | srctree = workspace[args.recipename]['srctree'] | 1099 | srctree = workspace[args.recipename]['srctree'] |
1102 | if os.path.abspath(srctree) == os.path.join(config.workspace_path, 'sources', args.recipename): | 1100 | if os.path.abspath(srctree) == os.path.join(config.workspace_path, 'sources', args.recipename): |
@@ -1185,7 +1183,7 @@ def _get_patchset_revs(srctree, recipe_path, initial_rev=None, force_patch_refre | |||
1185 | """Get initial and update rev of a recipe. These are the start point of the | 1183 | """Get initial and update rev of a recipe. These are the start point of the |
1186 | whole patchset and start point for the patches to be re-generated/updated. | 1184 | whole patchset and start point for the patches to be re-generated/updated. |
1187 | """ | 1185 | """ |
1188 | import bb | 1186 | import bb.process |
1189 | 1187 | ||
1190 | # Get current branch | 1188 | # Get current branch |
1191 | stdout, _ = bb.process.run('git rev-parse --abbrev-ref HEAD', | 1189 | stdout, _ = bb.process.run('git rev-parse --abbrev-ref HEAD', |
@@ -1193,44 +1191,56 @@ def _get_patchset_revs(srctree, recipe_path, initial_rev=None, force_patch_refre | |||
1193 | branchname = stdout.rstrip() | 1191 | branchname = stdout.rstrip() |
1194 | 1192 | ||
1195 | # Parse initial rev from recipe if not specified | 1193 | # Parse initial rev from recipe if not specified |
1196 | commits = [] | 1194 | commits = {} |
1197 | patches = [] | 1195 | patches = [] |
1196 | initial_revs = {} | ||
1198 | with open(recipe_path, 'r') as f: | 1197 | with open(recipe_path, 'r') as f: |
1199 | for line in f: | 1198 | for line in f: |
1200 | if line.startswith('# initial_rev:'): | 1199 | pattern = r'^#\s.*\s(.*):\s([0-9a-fA-F]+)$' |
1201 | if not initial_rev: | 1200 | match = re.search(pattern, line) |
1202 | initial_rev = line.split(':')[-1].strip() | 1201 | if match: |
1203 | elif line.startswith('# commit:') and not force_patch_refresh: | 1202 | name = match.group(1) |
1204 | commits.append(line.split(':')[-1].strip()) | 1203 | rev = match.group(2) |
1205 | elif line.startswith('# patches_%s:' % branchname): | 1204 | if line.startswith('# initial_rev'): |
1206 | patches = line.split(':')[-1].strip().split(',') | 1205 | if not (name == "." and initial_rev): |
1207 | 1206 | initial_revs[name] = rev | |
1208 | update_rev = initial_rev | 1207 | elif line.startswith('# commit') and not force_patch_refresh: |
1209 | changed_revs = None | 1208 | if name not in commits: |
1210 | if initial_rev: | 1209 | commits[name] = [rev] |
1210 | else: | ||
1211 | commits[name].append(rev) | ||
1212 | elif line.startswith('# patches_%s:' % branchname): | ||
1213 | patches = line.split(':')[-1].strip().split(',') | ||
1214 | |||
1215 | update_revs = dict(initial_revs) | ||
1216 | changed_revs = {} | ||
1217 | for name, rev in initial_revs.items(): | ||
1211 | # Find first actually changed revision | 1218 | # Find first actually changed revision |
1212 | stdout, _ = bb.process.run('git rev-list --reverse %s..HEAD' % | 1219 | stdout, _ = bb.process.run('git rev-list --reverse %s..HEAD' % |
1213 | initial_rev, cwd=srctree) | 1220 | rev, cwd=os.path.join(srctree, name)) |
1214 | newcommits = stdout.split() | 1221 | newcommits = stdout.split() |
1215 | for i in range(min(len(commits), len(newcommits))): | 1222 | if name in commits: |
1216 | if newcommits[i] == commits[i]: | 1223 | for i in range(min(len(commits[name]), len(newcommits))): |
1217 | update_rev = commits[i] | 1224 | if newcommits[i] == commits[name][i]: |
1225 | update_revs[name] = commits[name][i] | ||
1218 | 1226 | ||
1219 | try: | 1227 | try: |
1220 | stdout, _ = bb.process.run('git cherry devtool-patched', | 1228 | stdout, _ = bb.process.run('git cherry devtool-patched', |
1221 | cwd=srctree) | 1229 | cwd=os.path.join(srctree, name)) |
1222 | except bb.process.ExecutionError as err: | 1230 | except bb.process.ExecutionError as err: |
1223 | stdout = None | 1231 | stdout = None |
1224 | 1232 | ||
1225 | if stdout is not None and not force_patch_refresh: | 1233 | if stdout is not None and not force_patch_refresh: |
1226 | changed_revs = [] | ||
1227 | for line in stdout.splitlines(): | 1234 | for line in stdout.splitlines(): |
1228 | if line.startswith('+ '): | 1235 | if line.startswith('+ '): |
1229 | rev = line.split()[1] | 1236 | rev = line.split()[1] |
1230 | if rev in newcommits: | 1237 | if rev in newcommits: |
1231 | changed_revs.append(rev) | 1238 | if name not in changed_revs: |
1239 | changed_revs[name] = [rev] | ||
1240 | else: | ||
1241 | changed_revs[name].append(rev) | ||
1232 | 1242 | ||
1233 | return initial_rev, update_rev, changed_revs, patches | 1243 | return initial_revs, update_revs, changed_revs, patches |
1234 | 1244 | ||
1235 | def _remove_file_entries(srcuri, filelist): | 1245 | def _remove_file_entries(srcuri, filelist): |
1236 | """Remove file:// entries from SRC_URI""" | 1246 | """Remove file:// entries from SRC_URI""" |
@@ -1285,17 +1295,21 @@ def _remove_source_files(append, files, destpath, no_report_remove=False, dry_ru | |||
1285 | raise | 1295 | raise |
1286 | 1296 | ||
1287 | 1297 | ||
1288 | def _export_patches(srctree, rd, start_rev, destdir, changed_revs=None): | 1298 | def _export_patches(srctree, rd, start_revs, destdir, changed_revs=None): |
1289 | """Export patches from srctree to given location. | 1299 | """Export patches from srctree to given location. |
1290 | Returns three-tuple of dicts: | 1300 | Returns three-tuple of dicts: |
1291 | 1. updated - patches that already exist in SRCURI | 1301 | 1. updated - patches that already exist in SRCURI |
1292 | 2. added - new patches that don't exist in SRCURI | 1302 | 2. added - new patches that don't exist in SRCURI |
1293 | 3 removed - patches that exist in SRCURI but not in exported patches | 1303 | 3 removed - patches that exist in SRCURI but not in exported patches |
1294 | In each dict the key is the 'basepath' of the URI and value is the | 1304 | In each dict the key is the 'basepath' of the URI and value is: |
1295 | absolute path to the existing file in recipe space (if any). | 1305 | - for updated and added dicts, a dict with 2 optionnal keys: |
1306 | - 'path': the absolute path to the existing file in recipe space (if any) | ||
1307 | - 'patchdir': the directory in wich the patch should be applied (if any) | ||
1308 | - for removed dict, the absolute path to the existing file in recipe space | ||
1296 | """ | 1309 | """ |
1297 | import oe.recipeutils | 1310 | import oe.recipeutils |
1298 | from oe.patch import GitApplyTree | 1311 | from oe.patch import GitApplyTree |
1312 | import bb.process | ||
1299 | updated = OrderedDict() | 1313 | updated = OrderedDict() |
1300 | added = OrderedDict() | 1314 | added = OrderedDict() |
1301 | seqpatch_re = re.compile('^([0-9]{4}-)?(.+)') | 1315 | seqpatch_re = re.compile('^([0-9]{4}-)?(.+)') |
@@ -1306,59 +1320,67 @@ def _export_patches(srctree, rd, start_rev, destdir, changed_revs=None): | |||
1306 | 1320 | ||
1307 | # Generate patches from Git, exclude local files directory | 1321 | # Generate patches from Git, exclude local files directory |
1308 | patch_pathspec = _git_exclude_path(srctree, 'oe-local-files') | 1322 | patch_pathspec = _git_exclude_path(srctree, 'oe-local-files') |
1309 | GitApplyTree.extractPatches(srctree, start_rev, destdir, patch_pathspec) | 1323 | GitApplyTree.extractPatches(srctree, start_revs, destdir, patch_pathspec) |
1310 | 1324 | for dirpath, dirnames, filenames in os.walk(destdir): | |
1311 | new_patches = sorted(os.listdir(destdir)) | 1325 | new_patches = filenames |
1312 | for new_patch in new_patches: | 1326 | reldirpath = os.path.relpath(dirpath, destdir) |
1313 | # Strip numbering from patch names. If it's a git sequence named patch, | 1327 | for new_patch in new_patches: |
1314 | # the numbers might not match up since we are starting from a different | 1328 | # Strip numbering from patch names. If it's a git sequence named patch, |
1315 | # revision This does assume that people are using unique shortlog | 1329 | # the numbers might not match up since we are starting from a different |
1316 | # values, but they ought to be anyway... | 1330 | # revision This does assume that people are using unique shortlog |
1317 | new_basename = seqpatch_re.match(new_patch).group(2) | 1331 | # values, but they ought to be anyway... |
1318 | match_name = None | 1332 | new_basename = seqpatch_re.match(new_patch).group(2) |
1319 | for old_patch in existing_patches: | 1333 | match_name = None |
1320 | old_basename = seqpatch_re.match(old_patch).group(2) | 1334 | old_patch = None |
1321 | old_basename_splitext = os.path.splitext(old_basename) | 1335 | for old_patch in existing_patches: |
1322 | if old_basename.endswith(('.gz', '.bz2', '.Z')) and old_basename_splitext[0] == new_basename: | 1336 | old_basename = seqpatch_re.match(old_patch).group(2) |
1323 | old_patch_noext = os.path.splitext(old_patch)[0] | 1337 | old_basename_splitext = os.path.splitext(old_basename) |
1324 | match_name = old_patch_noext | 1338 | if old_basename.endswith(('.gz', '.bz2', '.Z')) and old_basename_splitext[0] == new_basename: |
1325 | break | 1339 | old_patch_noext = os.path.splitext(old_patch)[0] |
1326 | elif new_basename == old_basename: | 1340 | match_name = old_patch_noext |
1327 | match_name = old_patch | 1341 | break |
1328 | break | 1342 | elif new_basename == old_basename: |
1329 | if match_name: | 1343 | match_name = old_patch |
1330 | # Rename patch files | 1344 | break |
1331 | if new_patch != match_name: | 1345 | if match_name: |
1332 | bb.utils.rename(os.path.join(destdir, new_patch), | 1346 | # Rename patch files |
1333 | os.path.join(destdir, match_name)) | 1347 | if new_patch != match_name: |
1334 | # Need to pop it off the list now before checking changed_revs | 1348 | bb.utils.rename(os.path.join(destdir, new_patch), |
1335 | oldpath = existing_patches.pop(old_patch) | 1349 | os.path.join(destdir, match_name)) |
1336 | if changed_revs is not None: | 1350 | # Need to pop it off the list now before checking changed_revs |
1337 | # Avoid updating patches that have not actually changed | 1351 | oldpath = existing_patches.pop(old_patch) |
1338 | with open(os.path.join(destdir, match_name), 'r') as f: | 1352 | if changed_revs is not None and dirpath in changed_revs: |
1339 | firstlineitems = f.readline().split() | 1353 | # Avoid updating patches that have not actually changed |
1340 | # Looking for "From <hash>" line | 1354 | with open(os.path.join(dirpath, match_name), 'r') as f: |
1341 | if len(firstlineitems) > 1 and len(firstlineitems[1]) == 40: | 1355 | firstlineitems = f.readline().split() |
1342 | if not firstlineitems[1] in changed_revs: | 1356 | # Looking for "From <hash>" line |
1343 | continue | 1357 | if len(firstlineitems) > 1 and len(firstlineitems[1]) == 40: |
1344 | # Recompress if necessary | 1358 | if not firstlineitems[1] in changed_revs[dirpath]: |
1345 | if oldpath.endswith(('.gz', '.Z')): | 1359 | continue |
1346 | bb.process.run(['gzip', match_name], cwd=destdir) | 1360 | # Recompress if necessary |
1347 | if oldpath.endswith('.gz'): | 1361 | if oldpath.endswith(('.gz', '.Z')): |
1348 | match_name += '.gz' | 1362 | bb.process.run(['gzip', match_name], cwd=destdir) |
1349 | else: | 1363 | if oldpath.endswith('.gz'): |
1350 | match_name += '.Z' | 1364 | match_name += '.gz' |
1351 | elif oldpath.endswith('.bz2'): | 1365 | else: |
1352 | bb.process.run(['bzip2', match_name], cwd=destdir) | 1366 | match_name += '.Z' |
1353 | match_name += '.bz2' | 1367 | elif oldpath.endswith('.bz2'): |
1354 | updated[match_name] = oldpath | 1368 | bb.process.run(['bzip2', match_name], cwd=destdir) |
1355 | else: | 1369 | match_name += '.bz2' |
1356 | added[new_patch] = None | 1370 | updated[match_name] = {'path' : oldpath} |
1371 | if reldirpath != ".": | ||
1372 | updated[match_name]['patchdir'] = reldirpath | ||
1373 | else: | ||
1374 | added[new_patch] = {} | ||
1375 | if reldirpath != ".": | ||
1376 | added[new_patch]['patchdir'] = reldirpath | ||
1377 | |||
1357 | return (updated, added, existing_patches) | 1378 | return (updated, added, existing_patches) |
1358 | 1379 | ||
1359 | 1380 | ||
1360 | def _create_kconfig_diff(srctree, rd, outfile): | 1381 | def _create_kconfig_diff(srctree, rd, outfile): |
1361 | """Create a kconfig fragment""" | 1382 | """Create a kconfig fragment""" |
1383 | import bb.process | ||
1362 | # Only update config fragment if both config files exist | 1384 | # Only update config fragment if both config files exist |
1363 | orig_config = os.path.join(srctree, '.config.baseline') | 1385 | orig_config = os.path.join(srctree, '.config.baseline') |
1364 | new_config = os.path.join(srctree, '.config.new') | 1386 | new_config = os.path.join(srctree, '.config.new') |
@@ -1390,38 +1412,59 @@ def _export_local_files(srctree, rd, destdir, srctreebase): | |||
1390 | 1. updated - files that already exist in SRCURI | 1412 | 1. updated - files that already exist in SRCURI |
1391 | 2. added - new files files that don't exist in SRCURI | 1413 | 2. added - new files files that don't exist in SRCURI |
1392 | 3 removed - files that exist in SRCURI but not in exported files | 1414 | 3 removed - files that exist in SRCURI but not in exported files |
1393 | In each dict the key is the 'basepath' of the URI and value is the | 1415 | In each dict the key is the 'basepath' of the URI and value is: |
1394 | absolute path to the existing file in recipe space (if any). | 1416 | - for updated and added dicts, a dict with 1 optionnal key: |
1417 | - 'path': the absolute path to the existing file in recipe space (if any) | ||
1418 | - for removed dict, the absolute path to the existing file in recipe space | ||
1395 | """ | 1419 | """ |
1396 | import oe.recipeutils | 1420 | import oe.recipeutils |
1421 | import bb.data | ||
1422 | import bb.process | ||
1397 | 1423 | ||
1398 | # Find out local files (SRC_URI files that exist in the "recipe space"). | 1424 | # Find out local files (SRC_URI files that exist in the "recipe space"). |
1399 | # Local files that reside in srctree are not included in patch generation. | 1425 | # Local files that reside in srctree are not included in patch generation. |
1400 | # Instead they are directly copied over the original source files (in | 1426 | # Instead they are directly copied over the original source files (in |
1401 | # recipe space). | 1427 | # recipe space). |
1402 | existing_files = oe.recipeutils.get_recipe_local_files(rd) | 1428 | existing_files = oe.recipeutils.get_recipe_local_files(rd) |
1429 | |||
1403 | new_set = None | 1430 | new_set = None |
1404 | updated = OrderedDict() | 1431 | updated = OrderedDict() |
1405 | added = OrderedDict() | 1432 | added = OrderedDict() |
1406 | removed = OrderedDict() | 1433 | removed = OrderedDict() |
1407 | local_files_dir = os.path.join(srctreebase, 'oe-local-files') | 1434 | |
1408 | git_files = _git_ls_tree(srctree) | 1435 | # Get current branch and return early with empty lists |
1409 | if 'oe-local-files' in git_files: | 1436 | # if on one of the override branches |
1410 | # If tracked by Git, take the files from srctree HEAD. First get | 1437 | # (local files are provided only for the main branch and processing |
1411 | # the tree object of the directory | 1438 | # them against lists from recipe overrides will result in mismatches |
1412 | tmp_index = os.path.join(srctree, '.git', 'index.tmp.devtool') | 1439 | # and broken modifications to recipes). |
1413 | tree = git_files['oe-local-files'][2] | 1440 | stdout, _ = bb.process.run('git rev-parse --abbrev-ref HEAD', |
1414 | bb.process.run(['git', 'checkout', tree, '--', '.'], cwd=srctree, | 1441 | cwd=srctree) |
1415 | env=dict(os.environ, GIT_WORK_TREE=destdir, | 1442 | branchname = stdout.rstrip() |
1416 | GIT_INDEX_FILE=tmp_index)) | 1443 | if branchname.startswith(override_branch_prefix): |
1417 | new_set = list(_git_ls_tree(srctree, tree, True).keys()) | 1444 | return (updated, added, removed) |
1418 | elif os.path.isdir(local_files_dir): | 1445 | |
1419 | # If not tracked by Git, just copy from working copy | 1446 | files = _git_modified(srctree) |
1420 | new_set = _ls_tree(local_files_dir) | 1447 | #if not files: |
1421 | bb.process.run(['cp', '-ax', | 1448 | # files = _ls_tree(srctree) |
1422 | os.path.join(local_files_dir, '.'), destdir]) | 1449 | for f in files: |
1423 | else: | 1450 | fullfile = os.path.join(srctree, f) |
1424 | new_set = [] | 1451 | if os.path.exists(os.path.join(fullfile, ".git")): |
1452 | # submodules handled elsewhere | ||
1453 | continue | ||
1454 | if f not in existing_files: | ||
1455 | added[f] = {} | ||
1456 | if os.path.isdir(os.path.join(srctree, f)): | ||
1457 | shutil.copytree(fullfile, os.path.join(destdir, f)) | ||
1458 | else: | ||
1459 | shutil.copy2(fullfile, os.path.join(destdir, f)) | ||
1460 | elif not os.path.exists(fullfile): | ||
1461 | removed[f] = existing_files[f] | ||
1462 | elif f in existing_files: | ||
1463 | updated[f] = {'path' : existing_files[f]} | ||
1464 | if os.path.isdir(os.path.join(srctree, f)): | ||
1465 | shutil.copytree(fullfile, os.path.join(destdir, f)) | ||
1466 | else: | ||
1467 | shutil.copy2(fullfile, os.path.join(destdir, f)) | ||
1425 | 1468 | ||
1426 | # Special handling for kernel config | 1469 | # Special handling for kernel config |
1427 | if bb.data.inherits_class('kernel-yocto', rd): | 1470 | if bb.data.inherits_class('kernel-yocto', rd): |
@@ -1429,17 +1472,14 @@ def _export_local_files(srctree, rd, destdir, srctreebase): | |||
1429 | fragment_path = os.path.join(destdir, fragment_fn) | 1472 | fragment_path = os.path.join(destdir, fragment_fn) |
1430 | if _create_kconfig_diff(srctree, rd, fragment_path): | 1473 | if _create_kconfig_diff(srctree, rd, fragment_path): |
1431 | if os.path.exists(fragment_path): | 1474 | if os.path.exists(fragment_path): |
1432 | if fragment_fn not in new_set: | 1475 | if fragment_fn in removed: |
1433 | new_set.append(fragment_fn) | 1476 | del removed[fragment_fn] |
1434 | # Copy fragment to local-files | 1477 | if fragment_fn not in updated and fragment_fn not in added: |
1435 | if os.path.isdir(local_files_dir): | 1478 | added[fragment_fn] = {} |
1436 | shutil.copy2(fragment_path, local_files_dir) | ||
1437 | else: | 1479 | else: |
1438 | if fragment_fn in new_set: | 1480 | if fragment_fn in updated: |
1439 | new_set.remove(fragment_fn) | 1481 | removed[fragment_fn] = updated[fragment_fn] |
1440 | # Remove fragment from local-files | 1482 | del updated[fragment_fn] |
1441 | if os.path.exists(os.path.join(local_files_dir, fragment_fn)): | ||
1442 | os.unlink(os.path.join(local_files_dir, fragment_fn)) | ||
1443 | 1483 | ||
1444 | # Special handling for cml1, ccmake, etc bbclasses that generated | 1484 | # Special handling for cml1, ccmake, etc bbclasses that generated |
1445 | # configuration fragment files that are consumed as source files | 1485 | # configuration fragment files that are consumed as source files |
@@ -1447,42 +1487,13 @@ def _export_local_files(srctree, rd, destdir, srctreebase): | |||
1447 | if bb.data.inherits_class(frag_class, rd): | 1487 | if bb.data.inherits_class(frag_class, rd): |
1448 | srcpath = os.path.join(rd.getVar('WORKDIR'), frag_name) | 1488 | srcpath = os.path.join(rd.getVar('WORKDIR'), frag_name) |
1449 | if os.path.exists(srcpath): | 1489 | if os.path.exists(srcpath): |
1450 | if frag_name not in new_set: | 1490 | if frag_name in removed: |
1451 | new_set.append(frag_name) | 1491 | del removed[frag_name] |
1492 | if frag_name not in updated: | ||
1493 | added[frag_name] = {} | ||
1452 | # copy fragment into destdir | 1494 | # copy fragment into destdir |
1453 | shutil.copy2(srcpath, destdir) | 1495 | shutil.copy2(srcpath, destdir) |
1454 | # copy fragment into local files if exists | 1496 | |
1455 | if os.path.isdir(local_files_dir): | ||
1456 | shutil.copy2(srcpath, local_files_dir) | ||
1457 | |||
1458 | if new_set is not None: | ||
1459 | for fname in new_set: | ||
1460 | if fname in existing_files: | ||
1461 | origpath = existing_files.pop(fname) | ||
1462 | workpath = os.path.join(local_files_dir, fname) | ||
1463 | if not filecmp.cmp(origpath, workpath): | ||
1464 | updated[fname] = origpath | ||
1465 | elif fname != '.gitignore': | ||
1466 | added[fname] = None | ||
1467 | |||
1468 | workdir = rd.getVar('WORKDIR') | ||
1469 | s = rd.getVar('S') | ||
1470 | if not s.endswith(os.sep): | ||
1471 | s += os.sep | ||
1472 | |||
1473 | if workdir != s: | ||
1474 | # Handle files where subdir= was specified | ||
1475 | for fname in list(existing_files.keys()): | ||
1476 | # FIXME handle both subdir starting with BP and not? | ||
1477 | fworkpath = os.path.join(workdir, fname) | ||
1478 | if fworkpath.startswith(s): | ||
1479 | fpath = os.path.join(srctree, os.path.relpath(fworkpath, s)) | ||
1480 | if os.path.exists(fpath): | ||
1481 | origpath = existing_files.pop(fname) | ||
1482 | if not filecmp.cmp(origpath, fpath): | ||
1483 | updated[fpath] = origpath | ||
1484 | |||
1485 | removed = existing_files | ||
1486 | return (updated, added, removed) | 1497 | return (updated, added, removed) |
1487 | 1498 | ||
1488 | 1499 | ||
@@ -1500,7 +1511,7 @@ def _determine_files_dir(rd): | |||
1500 | 1511 | ||
1501 | def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wildcard_version, no_remove, no_report_remove, dry_run_outdir=None): | 1512 | def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wildcard_version, no_remove, no_report_remove, dry_run_outdir=None): |
1502 | """Implement the 'srcrev' mode of update-recipe""" | 1513 | """Implement the 'srcrev' mode of update-recipe""" |
1503 | import bb | 1514 | import bb.process |
1504 | import oe.recipeutils | 1515 | import oe.recipeutils |
1505 | 1516 | ||
1506 | dry_run_suffix = ' (dry-run)' if dry_run_outdir else '' | 1517 | dry_run_suffix = ' (dry-run)' if dry_run_outdir else '' |
@@ -1509,6 +1520,12 @@ def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wi | |||
1509 | recipedir = os.path.basename(recipefile) | 1520 | recipedir = os.path.basename(recipefile) |
1510 | logger.info('Updating SRCREV in recipe %s%s' % (recipedir, dry_run_suffix)) | 1521 | logger.info('Updating SRCREV in recipe %s%s' % (recipedir, dry_run_suffix)) |
1511 | 1522 | ||
1523 | # Get original SRCREV | ||
1524 | old_srcrev = rd.getVar('SRCREV') or '' | ||
1525 | if old_srcrev == "INVALID": | ||
1526 | raise DevtoolError('Update mode srcrev is only valid for recipe fetched from an SCM repository') | ||
1527 | old_srcrev = {'.': old_srcrev} | ||
1528 | |||
1512 | # Get HEAD revision | 1529 | # Get HEAD revision |
1513 | try: | 1530 | try: |
1514 | stdout, _ = bb.process.run('git rev-parse HEAD', cwd=srctree) | 1531 | stdout, _ = bb.process.run('git rev-parse HEAD', cwd=srctree) |
@@ -1532,16 +1549,16 @@ def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wi | |||
1532 | local_files_dir = tempfile.mkdtemp(dir=tempdir) | 1549 | local_files_dir = tempfile.mkdtemp(dir=tempdir) |
1533 | srctreebase = workspace[recipename]['srctreebase'] | 1550 | srctreebase = workspace[recipename]['srctreebase'] |
1534 | upd_f, new_f, del_f = _export_local_files(srctree, rd, local_files_dir, srctreebase) | 1551 | upd_f, new_f, del_f = _export_local_files(srctree, rd, local_files_dir, srctreebase) |
1552 | removedentries = {} | ||
1535 | if not no_remove: | 1553 | if not no_remove: |
1536 | # Find list of existing patches in recipe file | 1554 | # Find list of existing patches in recipe file |
1537 | patches_dir = tempfile.mkdtemp(dir=tempdir) | 1555 | patches_dir = tempfile.mkdtemp(dir=tempdir) |
1538 | old_srcrev = rd.getVar('SRCREV') or '' | ||
1539 | upd_p, new_p, del_p = _export_patches(srctree, rd, old_srcrev, | 1556 | upd_p, new_p, del_p = _export_patches(srctree, rd, old_srcrev, |
1540 | patches_dir) | 1557 | patches_dir) |
1541 | logger.debug('Patches: update %s, new %s, delete %s' % (dict(upd_p), dict(new_p), dict(del_p))) | 1558 | logger.debug('Patches: update %s, new %s, delete %s' % (dict(upd_p), dict(new_p), dict(del_p))) |
1542 | 1559 | ||
1543 | # Remove deleted local files and "overlapping" patches | 1560 | # Remove deleted local files and "overlapping" patches |
1544 | remove_files = list(del_f.values()) + list(upd_p.values()) + list(del_p.values()) | 1561 | remove_files = list(del_f.values()) + [value["path"] for value in upd_p.values() if "path" in value] + [value["path"] for value in del_p.values() if "path" in value] |
1545 | if remove_files: | 1562 | if remove_files: |
1546 | removedentries = _remove_file_entries(srcuri, remove_files)[0] | 1563 | removedentries = _remove_file_entries(srcuri, remove_files)[0] |
1547 | update_srcuri = True | 1564 | update_srcuri = True |
@@ -1555,14 +1572,14 @@ def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wi | |||
1555 | patchfields['SRC_URI'] = '\\\n '.join(srcuri) | 1572 | patchfields['SRC_URI'] = '\\\n '.join(srcuri) |
1556 | if dry_run_outdir: | 1573 | if dry_run_outdir: |
1557 | logger.info('Creating bbappend (dry-run)') | 1574 | logger.info('Creating bbappend (dry-run)') |
1558 | else: | 1575 | appendfile, destpath = oe.recipeutils.bbappend_recipe( |
1559 | appendfile, destpath = oe.recipeutils.bbappend_recipe( | 1576 | rd, appendlayerdir, files, wildcardver=wildcard_version, |
1560 | rd, appendlayerdir, files, wildcardver=wildcard_version, | 1577 | extralines=patchfields, removevalues=removevalues, |
1561 | extralines=patchfields, removevalues=removevalues, | 1578 | redirect_output=dry_run_outdir) |
1562 | redirect_output=dry_run_outdir) | ||
1563 | else: | 1579 | else: |
1564 | files_dir = _determine_files_dir(rd) | 1580 | files_dir = _determine_files_dir(rd) |
1565 | for basepath, path in upd_f.items(): | 1581 | for basepath, param in upd_f.items(): |
1582 | path = param['path'] | ||
1566 | logger.info('Updating file %s%s' % (basepath, dry_run_suffix)) | 1583 | logger.info('Updating file %s%s' % (basepath, dry_run_suffix)) |
1567 | if os.path.isabs(basepath): | 1584 | if os.path.isabs(basepath): |
1568 | # Original file (probably with subdir pointing inside source tree) | 1585 | # Original file (probably with subdir pointing inside source tree) |
@@ -1572,7 +1589,8 @@ def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wi | |||
1572 | _move_file(os.path.join(local_files_dir, basepath), path, | 1589 | _move_file(os.path.join(local_files_dir, basepath), path, |
1573 | dry_run_outdir=dry_run_outdir, base_outdir=recipedir) | 1590 | dry_run_outdir=dry_run_outdir, base_outdir=recipedir) |
1574 | update_srcuri= True | 1591 | update_srcuri= True |
1575 | for basepath, path in new_f.items(): | 1592 | for basepath, param in new_f.items(): |
1593 | path = param['path'] | ||
1576 | logger.info('Adding new file %s%s' % (basepath, dry_run_suffix)) | 1594 | logger.info('Adding new file %s%s' % (basepath, dry_run_suffix)) |
1577 | _move_file(os.path.join(local_files_dir, basepath), | 1595 | _move_file(os.path.join(local_files_dir, basepath), |
1578 | os.path.join(files_dir, basepath), | 1596 | os.path.join(files_dir, basepath), |
@@ -1595,7 +1613,6 @@ def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wi | |||
1595 | 1613 | ||
1596 | def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wildcard_version, no_remove, no_report_remove, initial_rev, dry_run_outdir=None, force_patch_refresh=False): | 1614 | def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wildcard_version, no_remove, no_report_remove, initial_rev, dry_run_outdir=None, force_patch_refresh=False): |
1597 | """Implement the 'patch' mode of update-recipe""" | 1615 | """Implement the 'patch' mode of update-recipe""" |
1598 | import bb | ||
1599 | import oe.recipeutils | 1616 | import oe.recipeutils |
1600 | 1617 | ||
1601 | recipefile = rd.getVar('FILE') | 1618 | recipefile = rd.getVar('FILE') |
@@ -1604,9 +1621,22 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil | |||
1604 | if not os.path.exists(append): | 1621 | if not os.path.exists(append): |
1605 | raise DevtoolError('unable to find workspace bbappend for recipe %s' % | 1622 | raise DevtoolError('unable to find workspace bbappend for recipe %s' % |
1606 | recipename) | 1623 | recipename) |
1624 | srctreebase = workspace[recipename]['srctreebase'] | ||
1625 | relpatchdir = os.path.relpath(srctreebase, srctree) | ||
1626 | if relpatchdir == '.': | ||
1627 | patchdir_params = {} | ||
1628 | else: | ||
1629 | patchdir_params = {'patchdir': relpatchdir} | ||
1630 | |||
1631 | def srcuri_entry(basepath, patchdir_params): | ||
1632 | if patchdir_params: | ||
1633 | paramstr = ';' + ';'.join('%s=%s' % (k,v) for k,v in patchdir_params.items()) | ||
1634 | else: | ||
1635 | paramstr = '' | ||
1636 | return 'file://%s%s' % (basepath, paramstr) | ||
1607 | 1637 | ||
1608 | initial_rev, update_rev, changed_revs, filter_patches = _get_patchset_revs(srctree, append, initial_rev, force_patch_refresh) | 1638 | initial_revs, update_revs, changed_revs, filter_patches = _get_patchset_revs(srctree, append, initial_rev, force_patch_refresh) |
1609 | if not initial_rev: | 1639 | if not initial_revs: |
1610 | raise DevtoolError('Unable to find initial revision - please specify ' | 1640 | raise DevtoolError('Unable to find initial revision - please specify ' |
1611 | 'it with --initial-rev') | 1641 | 'it with --initial-rev') |
1612 | 1642 | ||
@@ -1620,61 +1650,69 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil | |||
1620 | tempdir = tempfile.mkdtemp(prefix='devtool') | 1650 | tempdir = tempfile.mkdtemp(prefix='devtool') |
1621 | try: | 1651 | try: |
1622 | local_files_dir = tempfile.mkdtemp(dir=tempdir) | 1652 | local_files_dir = tempfile.mkdtemp(dir=tempdir) |
1623 | if filter_patches: | 1653 | upd_f, new_f, del_f = _export_local_files(srctree, rd, local_files_dir, srctreebase) |
1624 | upd_f = {} | ||
1625 | new_f = {} | ||
1626 | del_f = {} | ||
1627 | else: | ||
1628 | srctreebase = workspace[recipename]['srctreebase'] | ||
1629 | upd_f, new_f, del_f = _export_local_files(srctree, rd, local_files_dir, srctreebase) | ||
1630 | |||
1631 | remove_files = [] | ||
1632 | if not no_remove: | ||
1633 | # Get all patches from source tree and check if any should be removed | ||
1634 | all_patches_dir = tempfile.mkdtemp(dir=tempdir) | ||
1635 | _, _, del_p = _export_patches(srctree, rd, initial_rev, | ||
1636 | all_patches_dir) | ||
1637 | # Remove deleted local files and patches | ||
1638 | remove_files = list(del_f.values()) + list(del_p.values()) | ||
1639 | 1654 | ||
1640 | # Get updated patches from source tree | 1655 | # Get updated patches from source tree |
1641 | patches_dir = tempfile.mkdtemp(dir=tempdir) | 1656 | patches_dir = tempfile.mkdtemp(dir=tempdir) |
1642 | upd_p, new_p, _ = _export_patches(srctree, rd, update_rev, | 1657 | upd_p, new_p, _ = _export_patches(srctree, rd, update_revs, |
1643 | patches_dir, changed_revs) | 1658 | patches_dir, changed_revs) |
1659 | # Get all patches from source tree and check if any should be removed | ||
1660 | all_patches_dir = tempfile.mkdtemp(dir=tempdir) | ||
1661 | _, _, del_p = _export_patches(srctree, rd, initial_revs, | ||
1662 | all_patches_dir) | ||
1644 | logger.debug('Pre-filtering: update: %s, new: %s' % (dict(upd_p), dict(new_p))) | 1663 | logger.debug('Pre-filtering: update: %s, new: %s' % (dict(upd_p), dict(new_p))) |
1645 | if filter_patches: | 1664 | if filter_patches: |
1646 | new_p = OrderedDict() | 1665 | new_p = OrderedDict() |
1647 | upd_p = OrderedDict((k,v) for k,v in upd_p.items() if k in filter_patches) | 1666 | upd_p = OrderedDict((k,v) for k,v in upd_p.items() if k in filter_patches) |
1648 | remove_files = [f for f in remove_files if f in filter_patches] | 1667 | del_p = OrderedDict((k,v) for k,v in del_p.items() if k in filter_patches) |
1668 | remove_files = [] | ||
1669 | if not no_remove: | ||
1670 | # Remove deleted local files and patches | ||
1671 | remove_files = list(del_f.values()) + list(del_p.values()) | ||
1649 | updatefiles = False | 1672 | updatefiles = False |
1650 | updaterecipe = False | 1673 | updaterecipe = False |
1651 | destpath = None | 1674 | destpath = None |
1652 | srcuri = (rd.getVar('SRC_URI', False) or '').split() | 1675 | srcuri = (rd.getVar('SRC_URI', False) or '').split() |
1676 | |||
1653 | if appendlayerdir: | 1677 | if appendlayerdir: |
1654 | files = OrderedDict((os.path.join(local_files_dir, key), val) for | 1678 | files = OrderedDict((os.path.join(local_files_dir, key), val) for |
1655 | key, val in list(upd_f.items()) + list(new_f.items())) | 1679 | key, val in list(upd_f.items()) + list(new_f.items())) |
1656 | files.update(OrderedDict((os.path.join(patches_dir, key), val) for | 1680 | files.update(OrderedDict((os.path.join(patches_dir, key), val) for |
1657 | key, val in list(upd_p.items()) + list(new_p.items()))) | 1681 | key, val in list(upd_p.items()) + list(new_p.items()))) |
1682 | |||
1683 | params = [] | ||
1684 | for file, param in files.items(): | ||
1685 | patchdir_param = dict(patchdir_params) | ||
1686 | patchdir = param.get('patchdir', ".") | ||
1687 | if patchdir != "." : | ||
1688 | if patchdir_param: | ||
1689 | patchdir_param['patchdir'] += patchdir | ||
1690 | else: | ||
1691 | patchdir_param['patchdir'] = patchdir | ||
1692 | params.append(patchdir_param) | ||
1693 | |||
1658 | if files or remove_files: | 1694 | if files or remove_files: |
1659 | removevalues = None | 1695 | removevalues = None |
1660 | if remove_files: | 1696 | if remove_files: |
1661 | removedentries, remaining = _remove_file_entries( | 1697 | removedentries, remaining = _remove_file_entries( |
1662 | srcuri, remove_files) | 1698 | srcuri, remove_files) |
1663 | if removedentries or remaining: | 1699 | if removedentries or remaining: |
1664 | remaining = ['file://' + os.path.basename(item) for | 1700 | remaining = [srcuri_entry(os.path.basename(item), patchdir_params) for |
1665 | item in remaining] | 1701 | item in remaining] |
1666 | removevalues = {'SRC_URI': removedentries + remaining} | 1702 | removevalues = {'SRC_URI': removedentries + remaining} |
1667 | appendfile, destpath = oe.recipeutils.bbappend_recipe( | 1703 | appendfile, destpath = oe.recipeutils.bbappend_recipe( |
1668 | rd, appendlayerdir, files, | 1704 | rd, appendlayerdir, files, |
1669 | wildcardver=wildcard_version, | 1705 | wildcardver=wildcard_version, |
1670 | removevalues=removevalues, | 1706 | removevalues=removevalues, |
1671 | redirect_output=dry_run_outdir) | 1707 | redirect_output=dry_run_outdir, |
1708 | params=params) | ||
1672 | else: | 1709 | else: |
1673 | logger.info('No patches or local source files needed updating') | 1710 | logger.info('No patches or local source files needed updating') |
1674 | else: | 1711 | else: |
1675 | # Update existing files | 1712 | # Update existing files |
1676 | files_dir = _determine_files_dir(rd) | 1713 | files_dir = _determine_files_dir(rd) |
1677 | for basepath, path in upd_f.items(): | 1714 | for basepath, param in upd_f.items(): |
1715 | path = param['path'] | ||
1678 | logger.info('Updating file %s' % basepath) | 1716 | logger.info('Updating file %s' % basepath) |
1679 | if os.path.isabs(basepath): | 1717 | if os.path.isabs(basepath): |
1680 | # Original file (probably with subdir pointing inside source tree) | 1718 | # Original file (probably with subdir pointing inside source tree) |
@@ -1685,14 +1723,23 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil | |||
1685 | _move_file(os.path.join(local_files_dir, basepath), path, | 1723 | _move_file(os.path.join(local_files_dir, basepath), path, |
1686 | dry_run_outdir=dry_run_outdir, base_outdir=recipedir) | 1724 | dry_run_outdir=dry_run_outdir, base_outdir=recipedir) |
1687 | updatefiles = True | 1725 | updatefiles = True |
1688 | for basepath, path in upd_p.items(): | 1726 | for basepath, param in upd_p.items(): |
1689 | patchfn = os.path.join(patches_dir, basepath) | 1727 | path = param['path'] |
1728 | patchdir = param.get('patchdir', ".") | ||
1729 | patchdir_param = {} | ||
1730 | if patchdir != "." : | ||
1731 | patchdir_param = dict(patchdir_params) | ||
1732 | if patchdir_param: | ||
1733 | patchdir_param['patchdir'] += patchdir | ||
1734 | else: | ||
1735 | patchdir_param['patchdir'] = patchdir | ||
1736 | patchfn = os.path.join(patches_dir, patchdir, basepath) | ||
1690 | if os.path.dirname(path) + '/' == dl_dir: | 1737 | if os.path.dirname(path) + '/' == dl_dir: |
1691 | # This is a a downloaded patch file - we now need to | 1738 | # This is a a downloaded patch file - we now need to |
1692 | # replace the entry in SRC_URI with our local version | 1739 | # replace the entry in SRC_URI with our local version |
1693 | logger.info('Replacing remote patch %s with updated local version' % basepath) | 1740 | logger.info('Replacing remote patch %s with updated local version' % basepath) |
1694 | path = os.path.join(files_dir, basepath) | 1741 | path = os.path.join(files_dir, basepath) |
1695 | _replace_srcuri_entry(srcuri, basepath, 'file://%s' % basepath) | 1742 | _replace_srcuri_entry(srcuri, basepath, srcuri_entry(basepath, patchdir_param)) |
1696 | updaterecipe = True | 1743 | updaterecipe = True |
1697 | else: | 1744 | else: |
1698 | logger.info('Updating patch %s%s' % (basepath, dry_run_suffix)) | 1745 | logger.info('Updating patch %s%s' % (basepath, dry_run_suffix)) |
@@ -1700,21 +1747,29 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil | |||
1700 | dry_run_outdir=dry_run_outdir, base_outdir=recipedir) | 1747 | dry_run_outdir=dry_run_outdir, base_outdir=recipedir) |
1701 | updatefiles = True | 1748 | updatefiles = True |
1702 | # Add any new files | 1749 | # Add any new files |
1703 | for basepath, path in new_f.items(): | 1750 | for basepath, param in new_f.items(): |
1704 | logger.info('Adding new file %s%s' % (basepath, dry_run_suffix)) | 1751 | logger.info('Adding new file %s%s' % (basepath, dry_run_suffix)) |
1705 | _move_file(os.path.join(local_files_dir, basepath), | 1752 | _move_file(os.path.join(local_files_dir, basepath), |
1706 | os.path.join(files_dir, basepath), | 1753 | os.path.join(files_dir, basepath), |
1707 | dry_run_outdir=dry_run_outdir, | 1754 | dry_run_outdir=dry_run_outdir, |
1708 | base_outdir=recipedir) | 1755 | base_outdir=recipedir) |
1709 | srcuri.append('file://%s' % basepath) | 1756 | srcuri.append(srcuri_entry(basepath, patchdir_params)) |
1710 | updaterecipe = True | 1757 | updaterecipe = True |
1711 | for basepath, path in new_p.items(): | 1758 | for basepath, param in new_p.items(): |
1759 | patchdir = param.get('patchdir', ".") | ||
1712 | logger.info('Adding new patch %s%s' % (basepath, dry_run_suffix)) | 1760 | logger.info('Adding new patch %s%s' % (basepath, dry_run_suffix)) |
1713 | _move_file(os.path.join(patches_dir, basepath), | 1761 | _move_file(os.path.join(patches_dir, patchdir, basepath), |
1714 | os.path.join(files_dir, basepath), | 1762 | os.path.join(files_dir, basepath), |
1715 | dry_run_outdir=dry_run_outdir, | 1763 | dry_run_outdir=dry_run_outdir, |
1716 | base_outdir=recipedir) | 1764 | base_outdir=recipedir) |
1717 | srcuri.append('file://%s' % basepath) | 1765 | params = dict(patchdir_params) |
1766 | if patchdir != "." : | ||
1767 | if params: | ||
1768 | params['patchdir'] += patchdir | ||
1769 | else: | ||
1770 | params['patchdir'] = patchdir | ||
1771 | |||
1772 | srcuri.append(srcuri_entry(basepath, params)) | ||
1718 | updaterecipe = True | 1773 | updaterecipe = True |
1719 | # Update recipe, if needed | 1774 | # Update recipe, if needed |
1720 | if _remove_file_entries(srcuri, remove_files)[0]: | 1775 | if _remove_file_entries(srcuri, remove_files)[0]: |
@@ -1737,6 +1792,7 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil | |||
1737 | 1792 | ||
1738 | def _guess_recipe_update_mode(srctree, rdata): | 1793 | def _guess_recipe_update_mode(srctree, rdata): |
1739 | """Guess the recipe update mode to use""" | 1794 | """Guess the recipe update mode to use""" |
1795 | import bb.process | ||
1740 | src_uri = (rdata.getVar('SRC_URI') or '').split() | 1796 | src_uri = (rdata.getVar('SRC_URI') or '').split() |
1741 | git_uris = [uri for uri in src_uri if uri.startswith('git://')] | 1797 | git_uris = [uri for uri in src_uri if uri.startswith('git://')] |
1742 | if not git_uris: | 1798 | if not git_uris: |
@@ -1758,6 +1814,8 @@ def _guess_recipe_update_mode(srctree, rdata): | |||
1758 | return 'patch' | 1814 | return 'patch' |
1759 | 1815 | ||
1760 | def _update_recipe(recipename, workspace, rd, mode, appendlayerdir, wildcard_version, no_remove, initial_rev, no_report_remove=False, dry_run_outdir=None, no_overrides=False, force_patch_refresh=False): | 1816 | def _update_recipe(recipename, workspace, rd, mode, appendlayerdir, wildcard_version, no_remove, initial_rev, no_report_remove=False, dry_run_outdir=None, no_overrides=False, force_patch_refresh=False): |
1817 | import bb.data | ||
1818 | import bb.process | ||
1761 | srctree = workspace[recipename]['srctree'] | 1819 | srctree = workspace[recipename]['srctree'] |
1762 | if mode == 'auto': | 1820 | if mode == 'auto': |
1763 | mode = _guess_recipe_update_mode(srctree, rd) | 1821 | mode = _guess_recipe_update_mode(srctree, rd) |
@@ -1771,6 +1829,8 @@ def _update_recipe(recipename, workspace, rd, mode, appendlayerdir, wildcard_ver | |||
1771 | for line in stdout.splitlines(): | 1829 | for line in stdout.splitlines(): |
1772 | branchname = line[2:] | 1830 | branchname = line[2:] |
1773 | if line.startswith('* '): | 1831 | if line.startswith('* '): |
1832 | if 'HEAD' in line: | ||
1833 | raise DevtoolError('Detached HEAD - please check out a branch, e.g., "devtool"') | ||
1774 | startbranch = branchname | 1834 | startbranch = branchname |
1775 | if branchname.startswith(override_branch_prefix): | 1835 | if branchname.startswith(override_branch_prefix): |
1776 | override_branches.append(branchname) | 1836 | override_branches.append(branchname) |
@@ -1878,6 +1938,7 @@ def status(args, config, basepath, workspace): | |||
1878 | 1938 | ||
1879 | def _reset(recipes, no_clean, remove_work, config, basepath, workspace): | 1939 | def _reset(recipes, no_clean, remove_work, config, basepath, workspace): |
1880 | """Reset one or more recipes""" | 1940 | """Reset one or more recipes""" |
1941 | import bb.process | ||
1881 | import oe.path | 1942 | import oe.path |
1882 | 1943 | ||
1883 | def clean_preferred_provider(pn, layerconf_path): | 1944 | def clean_preferred_provider(pn, layerconf_path): |
@@ -1890,7 +1951,7 @@ def _reset(recipes, no_clean, remove_work, config, basepath, workspace): | |||
1890 | lines = f.readlines() | 1951 | lines = f.readlines() |
1891 | with open(new_layerconf_file, 'a') as nf: | 1952 | with open(new_layerconf_file, 'a') as nf: |
1892 | for line in lines: | 1953 | for line in lines: |
1893 | pprovider_exp = r'^PREFERRED_PROVIDER_.*? = "' + pn + r'"$' | 1954 | pprovider_exp = r'^PREFERRED_PROVIDER_.*? = "' + re.escape(pn) + r'"$' |
1894 | if not re.match(pprovider_exp, line): | 1955 | if not re.match(pprovider_exp, line): |
1895 | nf.write(line) | 1956 | nf.write(line) |
1896 | else: | 1957 | else: |
@@ -1960,9 +2021,19 @@ def _reset(recipes, no_clean, remove_work, config, basepath, workspace): | |||
1960 | shutil.rmtree(srctreebase) | 2021 | shutil.rmtree(srctreebase) |
1961 | else: | 2022 | else: |
1962 | # We don't want to risk wiping out any work in progress | 2023 | # We don't want to risk wiping out any work in progress |
1963 | logger.info('Leaving source tree %s as-is; if you no ' | 2024 | if srctreebase.startswith(os.path.join(config.workspace_path, 'sources')): |
1964 | 'longer need it then please delete it manually' | 2025 | from datetime import datetime |
1965 | % srctreebase) | 2026 | preservesrc = os.path.join(config.workspace_path, 'attic', 'sources', "{}.{}".format(pn, datetime.now().strftime("%Y%m%d%H%M%S"))) |
2027 | logger.info('Preserving source tree in %s\nIf you no ' | ||
2028 | 'longer need it then please delete it manually.\n' | ||
2029 | 'It is also possible to reuse it via devtool source tree argument.' | ||
2030 | % preservesrc) | ||
2031 | bb.utils.mkdirhier(os.path.dirname(preservesrc)) | ||
2032 | shutil.move(srctreebase, preservesrc) | ||
2033 | else: | ||
2034 | logger.info('Leaving source tree %s as-is; if you no ' | ||
2035 | 'longer need it then please delete it manually' | ||
2036 | % srctreebase) | ||
1966 | else: | 2037 | else: |
1967 | # This is unlikely, but if it's empty we can just remove it | 2038 | # This is unlikely, but if it's empty we can just remove it |
1968 | os.rmdir(srctreebase) | 2039 | os.rmdir(srctreebase) |
@@ -1971,8 +2042,6 @@ def _reset(recipes, no_clean, remove_work, config, basepath, workspace): | |||
1971 | 2042 | ||
1972 | def reset(args, config, basepath, workspace): | 2043 | def reset(args, config, basepath, workspace): |
1973 | """Entry point for the devtool 'reset' subcommand""" | 2044 | """Entry point for the devtool 'reset' subcommand""" |
1974 | import bb | ||
1975 | import shutil | ||
1976 | 2045 | ||
1977 | recipes = "" | 2046 | recipes = "" |
1978 | 2047 | ||
@@ -2222,6 +2291,7 @@ def register_commands(subparsers, context): | |||
2222 | group.add_argument('--no-same-dir', help='Force build in a separate build directory', action="store_true") | 2291 | group.add_argument('--no-same-dir', help='Force build in a separate build directory', action="store_true") |
2223 | parser_add.add_argument('--fetch', '-f', help='Fetch the specified URI and extract it to create the source tree (deprecated - pass as positional argument instead)', metavar='URI') | 2292 | parser_add.add_argument('--fetch', '-f', help='Fetch the specified URI and extract it to create the source tree (deprecated - pass as positional argument instead)', metavar='URI') |
2224 | parser_add.add_argument('--npm-dev', help='For npm, also fetch devDependencies', action="store_true") | 2293 | parser_add.add_argument('--npm-dev', help='For npm, also fetch devDependencies', action="store_true") |
2294 | parser_add.add_argument('--no-pypi', help='Do not inherit pypi class', action="store_true") | ||
2225 | parser_add.add_argument('--version', '-V', help='Version to use within recipe (PV)') | 2295 | parser_add.add_argument('--version', '-V', help='Version to use within recipe (PV)') |
2226 | parser_add.add_argument('--no-git', '-g', help='If fetching source, do not set up source tree as a git repository', action="store_true") | 2296 | parser_add.add_argument('--no-git', '-g', help='If fetching source, do not set up source tree as a git repository', action="store_true") |
2227 | group = parser_add.add_mutually_exclusive_group() | 2297 | group = parser_add.add_mutually_exclusive_group() |
@@ -2250,6 +2320,7 @@ def register_commands(subparsers, context): | |||
2250 | parser_modify.add_argument('--branch', '-b', default="devtool", help='Name for development branch to checkout (when not using -n/--no-extract) (default "%(default)s")') | 2320 | parser_modify.add_argument('--branch', '-b', default="devtool", help='Name for development branch to checkout (when not using -n/--no-extract) (default "%(default)s")') |
2251 | parser_modify.add_argument('--no-overrides', '-O', action="store_true", help='Do not create branches for other override configurations') | 2321 | parser_modify.add_argument('--no-overrides', '-O', action="store_true", help='Do not create branches for other override configurations') |
2252 | parser_modify.add_argument('--keep-temp', help='Keep temporary directory (for debugging)', action="store_true") | 2322 | parser_modify.add_argument('--keep-temp', help='Keep temporary directory (for debugging)', action="store_true") |
2323 | parser_modify.add_argument('--debug-build', action="store_true", help='Add DEBUG_BUILD = "1" to the modified recipe') | ||
2253 | parser_modify.set_defaults(func=modify, fixed_setup=context.fixed_setup) | 2324 | parser_modify.set_defaults(func=modify, fixed_setup=context.fixed_setup) |
2254 | 2325 | ||
2255 | parser_extract = subparsers.add_parser('extract', help='Extract the source for an existing recipe', | 2326 | parser_extract = subparsers.add_parser('extract', help='Extract the source for an existing recipe', |