diff options
Diffstat (limited to 'scripts/lib/devtool/standard.py')
-rw-r--r-- | scripts/lib/devtool/standard.py | 540 |
1 files changed, 342 insertions, 198 deletions
diff --git a/scripts/lib/devtool/standard.py b/scripts/lib/devtool/standard.py index 7b62b7e7b8..bd009f44b1 100644 --- a/scripts/lib/devtool/standard.py +++ b/scripts/lib/devtool/standard.py | |||
@@ -147,6 +147,8 @@ def add(args, config, basepath, workspace): | |||
147 | extracmdopts += ' -a' | 147 | extracmdopts += ' -a' |
148 | if args.npm_dev: | 148 | if args.npm_dev: |
149 | extracmdopts += ' --npm-dev' | 149 | extracmdopts += ' --npm-dev' |
150 | if args.no_pypi: | ||
151 | extracmdopts += ' --no-pypi' | ||
150 | if args.mirrors: | 152 | if args.mirrors: |
151 | extracmdopts += ' --mirrors' | 153 | extracmdopts += ' --mirrors' |
152 | if args.srcrev: | 154 | if args.srcrev: |
@@ -234,10 +236,14 @@ def add(args, config, basepath, workspace): | |||
234 | if args.fetchuri and not args.no_git: | 236 | if args.fetchuri and not args.no_git: |
235 | setup_git_repo(srctree, args.version, 'devtool', d=tinfoil.config_data) | 237 | setup_git_repo(srctree, args.version, 'devtool', d=tinfoil.config_data) |
236 | 238 | ||
237 | initial_rev = None | 239 | initial_rev = {} |
238 | if os.path.exists(os.path.join(srctree, '.git')): | 240 | if os.path.exists(os.path.join(srctree, '.git')): |
239 | (stdout, _) = bb.process.run('git rev-parse HEAD', cwd=srctree) | 241 | (stdout, _) = bb.process.run('git rev-parse HEAD', cwd=srctree) |
240 | initial_rev = stdout.rstrip() | 242 | initial_rev["."] = stdout.rstrip() |
243 | (stdout, _) = bb.process.run('git submodule --quiet foreach --recursive \'echo `git rev-parse HEAD` $PWD\'', cwd=srctree) | ||
244 | for line in stdout.splitlines(): | ||
245 | (rev, submodule) = line.split() | ||
246 | initial_rev[os.path.relpath(submodule, srctree)] = rev | ||
241 | 247 | ||
242 | if args.src_subdir: | 248 | if args.src_subdir: |
243 | srctree = os.path.join(srctree, args.src_subdir) | 249 | srctree = os.path.join(srctree, args.src_subdir) |
@@ -251,16 +257,17 @@ def add(args, config, basepath, workspace): | |||
251 | if b_is_s: | 257 | if b_is_s: |
252 | f.write('EXTERNALSRC_BUILD = "%s"\n' % srctree) | 258 | f.write('EXTERNALSRC_BUILD = "%s"\n' % srctree) |
253 | if initial_rev: | 259 | if initial_rev: |
254 | f.write('\n# initial_rev: %s\n' % initial_rev) | 260 | for key, value in initial_rev.items(): |
261 | f.write('\n# initial_rev %s: %s\n' % (key, value)) | ||
255 | 262 | ||
256 | if args.binary: | 263 | if args.binary: |
257 | f.write('do_install_append() {\n') | 264 | f.write('do_install:append() {\n') |
258 | f.write(' rm -rf ${D}/.git\n') | 265 | f.write(' rm -rf ${D}/.git\n') |
259 | f.write(' rm -f ${D}/singletask.lock\n') | 266 | f.write(' rm -f ${D}/singletask.lock\n') |
260 | f.write('}\n') | 267 | f.write('}\n') |
261 | 268 | ||
262 | if bb.data.inherits_class('npm', rd): | 269 | if bb.data.inherits_class('npm', rd): |
263 | f.write('python do_configure_append() {\n') | 270 | f.write('python do_configure:append() {\n') |
264 | f.write(' pkgdir = d.getVar("NPM_PACKAGE")\n') | 271 | f.write(' pkgdir = d.getVar("NPM_PACKAGE")\n') |
265 | f.write(' lockfile = os.path.join(pkgdir, "singletask.lock")\n') | 272 | f.write(' lockfile = os.path.join(pkgdir, "singletask.lock")\n') |
266 | f.write(' bb.utils.remove(lockfile)\n') | 273 | f.write(' bb.utils.remove(lockfile)\n') |
@@ -318,10 +325,6 @@ def _check_compatible_recipe(pn, d): | |||
318 | raise DevtoolError("The %s recipe is a packagegroup, and therefore is " | 325 | raise DevtoolError("The %s recipe is a packagegroup, and therefore is " |
319 | "not supported by this tool" % pn, 4) | 326 | "not supported by this tool" % pn, 4) |
320 | 327 | ||
321 | if bb.data.inherits_class('meta', d): | ||
322 | raise DevtoolError("The %s recipe is a meta-recipe, and therefore is " | ||
323 | "not supported by this tool" % pn, 4) | ||
324 | |||
325 | if bb.data.inherits_class('externalsrc', d) and d.getVar('EXTERNALSRC'): | 328 | if bb.data.inherits_class('externalsrc', d) and d.getVar('EXTERNALSRC'): |
326 | # Not an incompatibility error per se, so we don't pass the error code | 329 | # Not an incompatibility error per se, so we don't pass the error code |
327 | raise DevtoolError("externalsrc is currently enabled for the %s " | 330 | raise DevtoolError("externalsrc is currently enabled for the %s " |
@@ -357,7 +360,7 @@ def _move_file(src, dst, dry_run_outdir=None, base_outdir=None): | |||
357 | bb.utils.mkdirhier(dst_d) | 360 | bb.utils.mkdirhier(dst_d) |
358 | shutil.move(src, dst) | 361 | shutil.move(src, dst) |
359 | 362 | ||
360 | def _copy_file(src, dst, dry_run_outdir=None): | 363 | def _copy_file(src, dst, dry_run_outdir=None, base_outdir=None): |
361 | """Copy a file. Creates all the directory components of destination path.""" | 364 | """Copy a file. Creates all the directory components of destination path.""" |
362 | dry_run_suffix = ' (dry-run)' if dry_run_outdir else '' | 365 | dry_run_suffix = ' (dry-run)' if dry_run_outdir else '' |
363 | logger.debug('Copying %s to %s%s' % (src, dst, dry_run_suffix)) | 366 | logger.debug('Copying %s to %s%s' % (src, dst, dry_run_suffix)) |
@@ -457,7 +460,7 @@ def sync(args, config, basepath, workspace): | |||
457 | finally: | 460 | finally: |
458 | tinfoil.shutdown() | 461 | tinfoil.shutdown() |
459 | 462 | ||
460 | def symlink_oelocal_files_srctree(rd,srctree): | 463 | def symlink_oelocal_files_srctree(rd, srctree): |
461 | import oe.patch | 464 | import oe.patch |
462 | if os.path.abspath(rd.getVar('S')) == os.path.abspath(rd.getVar('WORKDIR')): | 465 | if os.path.abspath(rd.getVar('S')) == os.path.abspath(rd.getVar('WORKDIR')): |
463 | # If recipe extracts to ${WORKDIR}, symlink the files into the srctree | 466 | # If recipe extracts to ${WORKDIR}, symlink the files into the srctree |
@@ -481,11 +484,7 @@ def symlink_oelocal_files_srctree(rd,srctree): | |||
481 | os.symlink('oe-local-files/%s' % fn, destpth) | 484 | os.symlink('oe-local-files/%s' % fn, destpth) |
482 | addfiles.append(os.path.join(relpth, fn)) | 485 | addfiles.append(os.path.join(relpth, fn)) |
483 | if addfiles: | 486 | if addfiles: |
484 | bb.process.run('git add %s' % ' '.join(addfiles), cwd=srctree) | 487 | oe.patch.GitApplyTree.commitIgnored("Add local file symlinks", dir=srctree, files=addfiles, d=rd) |
485 | useroptions = [] | ||
486 | oe.patch.GitApplyTree.gitCommandUserOptions(useroptions, d=rd) | ||
487 | bb.process.run('git %s commit -m "Committing local file symlinks\n\n%s"' % (' '.join(useroptions), oe.patch.GitApplyTree.ignore_commit_prefix), cwd=srctree) | ||
488 | |||
489 | 488 | ||
490 | def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, workspace, fixed_setup, d, tinfoil, no_overrides=False): | 489 | def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, workspace, fixed_setup, d, tinfoil, no_overrides=False): |
491 | """Extract sources of a recipe""" | 490 | """Extract sources of a recipe""" |
@@ -523,8 +522,10 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works | |||
523 | history = d.varhistory.variable('SRC_URI') | 522 | history = d.varhistory.variable('SRC_URI') |
524 | for event in history: | 523 | for event in history: |
525 | if not 'flag' in event: | 524 | if not 'flag' in event: |
526 | if event['op'].startswith(('_append[', '_prepend[')): | 525 | if event['op'].startswith((':append[', ':prepend[')): |
527 | extra_overrides.append(event['op'].split('[')[1].split(']')[0]) | 526 | override = event['op'].split('[')[1].split(']')[0] |
527 | if not override.startswith('pn-'): | ||
528 | extra_overrides.append(override) | ||
528 | # We want to remove duplicate overrides. If a recipe had multiple | 529 | # We want to remove duplicate overrides. If a recipe had multiple |
529 | # SRC_URI_override += values it would cause mulitple instances of | 530 | # SRC_URI_override += values it would cause mulitple instances of |
530 | # overrides. This doesn't play nicely with things like creating a | 531 | # overrides. This doesn't play nicely with things like creating a |
@@ -569,6 +570,9 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works | |||
569 | logger.debug('writing append file %s' % appendfile) | 570 | logger.debug('writing append file %s' % appendfile) |
570 | with open(appendfile, 'a') as f: | 571 | with open(appendfile, 'a') as f: |
571 | f.write('###--- _extract_source\n') | 572 | f.write('###--- _extract_source\n') |
573 | f.write('deltask do_recipe_qa\n') | ||
574 | f.write('deltask do_recipe_qa_setscene\n') | ||
575 | f.write('ERROR_QA:remove = "patch-fuzz"\n') | ||
572 | f.write('DEVTOOL_TEMPDIR = "%s"\n' % tempdir) | 576 | f.write('DEVTOOL_TEMPDIR = "%s"\n' % tempdir) |
573 | f.write('DEVTOOL_DEVBRANCH = "%s"\n' % devbranch) | 577 | f.write('DEVTOOL_DEVBRANCH = "%s"\n' % devbranch) |
574 | if not is_kernel_yocto: | 578 | if not is_kernel_yocto: |
@@ -586,6 +590,7 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works | |||
586 | preservestampfile = os.path.join(sstate_manifests, 'preserve-stamps') | 590 | preservestampfile = os.path.join(sstate_manifests, 'preserve-stamps') |
587 | with open(preservestampfile, 'w') as f: | 591 | with open(preservestampfile, 'w') as f: |
588 | f.write(d.getVar('STAMP')) | 592 | f.write(d.getVar('STAMP')) |
593 | tinfoil.modified_files() | ||
589 | try: | 594 | try: |
590 | if is_kernel_yocto: | 595 | if is_kernel_yocto: |
591 | # We need to generate the kernel config | 596 | # We need to generate the kernel config |
@@ -648,23 +653,34 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works | |||
648 | 653 | ||
649 | if os.path.exists(workshareddir) and (not os.listdir(workshareddir) or kernelVersion != staging_kerVer): | 654 | if os.path.exists(workshareddir) and (not os.listdir(workshareddir) or kernelVersion != staging_kerVer): |
650 | shutil.rmtree(workshareddir) | 655 | shutil.rmtree(workshareddir) |
651 | oe.path.copyhardlinktree(srcsubdir,workshareddir) | 656 | oe.path.copyhardlinktree(srcsubdir, workshareddir) |
652 | elif not os.path.exists(workshareddir): | 657 | elif not os.path.exists(workshareddir): |
653 | oe.path.copyhardlinktree(srcsubdir,workshareddir) | 658 | oe.path.copyhardlinktree(srcsubdir, workshareddir) |
654 | 659 | ||
655 | tempdir_localdir = os.path.join(tempdir, 'oe-local-files') | 660 | tempdir_localdir = os.path.join(tempdir, 'oe-local-files') |
656 | srctree_localdir = os.path.join(srctree, 'oe-local-files') | 661 | srctree_localdir = os.path.join(srctree, 'oe-local-files') |
657 | 662 | ||
658 | if sync: | 663 | if sync: |
659 | bb.process.run('git fetch file://' + srcsubdir + ' ' + devbranch + ':' + devbranch, cwd=srctree) | 664 | try: |
660 | 665 | logger.info('Backing up current %s branch as branch: %s.bak' % (devbranch, devbranch)) | |
661 | # Move oe-local-files directory to srctree | 666 | bb.process.run('git branch -f ' + devbranch + '.bak', cwd=srctree) |
662 | # As the oe-local-files is not part of the constructed git tree, | 667 | |
663 | # remove them directly during the synchrounizating might surprise | 668 | # Use git fetch to update the source with the current recipe |
664 | # the users. Instead, we move it to oe-local-files.bak and remind | 669 | # To be able to update the currently checked out branch with |
665 | # user in the log message. | 670 | # possibly new history (no fast-forward) git needs to be told |
671 | # that's ok | ||
672 | logger.info('Syncing source files including patches to git branch: %s' % devbranch) | ||
673 | bb.process.run('git fetch --update-head-ok --force file://' + srcsubdir + ' ' + devbranch + ':' + devbranch, cwd=srctree) | ||
674 | except bb.process.ExecutionError as e: | ||
675 | raise DevtoolError("Error when syncing source files to local checkout: %s" % str(e)) | ||
676 | |||
677 | # Move the oe-local-files directory to srctree. | ||
678 | # As oe-local-files is not part of the constructed git tree, | ||
679 | # removing it directly during the synchronization might surprise | ||
680 | # the user. Instead, we move it to oe-local-files.bak and remind | ||
681 | # the user in the log message. | ||
666 | if os.path.exists(srctree_localdir + '.bak'): | 682 | if os.path.exists(srctree_localdir + '.bak'): |
667 | shutil.rmtree(srctree_localdir, srctree_localdir + '.bak') | 683 | shutil.rmtree(srctree_localdir + '.bak') |
668 | 684 | ||
669 | if os.path.exists(srctree_localdir): | 685 | if os.path.exists(srctree_localdir): |
670 | logger.info('Backing up current local file directory %s' % srctree_localdir) | 686 | logger.info('Backing up current local file directory %s' % srctree_localdir) |
@@ -680,7 +696,7 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works | |||
680 | shutil.move(tempdir_localdir, srcsubdir) | 696 | shutil.move(tempdir_localdir, srcsubdir) |
681 | 697 | ||
682 | shutil.move(srcsubdir, srctree) | 698 | shutil.move(srcsubdir, srctree) |
683 | symlink_oelocal_files_srctree(d,srctree) | 699 | symlink_oelocal_files_srctree(d, srctree) |
684 | 700 | ||
685 | if is_kernel_yocto: | 701 | if is_kernel_yocto: |
686 | logger.info('Copying kernel config to srctree') | 702 | logger.info('Copying kernel config to srctree') |
@@ -746,14 +762,14 @@ def _check_preserve(config, recipename): | |||
746 | os.remove(removefile) | 762 | os.remove(removefile) |
747 | else: | 763 | else: |
748 | tf.write(line) | 764 | tf.write(line) |
749 | os.rename(newfile, origfile) | 765 | bb.utils.rename(newfile, origfile) |
750 | 766 | ||
751 | def get_staging_kver(srcdir): | 767 | def get_staging_kver(srcdir): |
752 | # Kernel version from work-shared | 768 | # Kernel version from work-shared |
753 | kerver = [] | 769 | kerver = [] |
754 | staging_kerVer="" | 770 | staging_kerVer="" |
755 | if os.path.exists(srcdir) and os.listdir(srcdir): | 771 | if os.path.exists(srcdir) and os.listdir(srcdir): |
756 | with open(os.path.join(srcdir,"Makefile")) as f: | 772 | with open(os.path.join(srcdir, "Makefile")) as f: |
757 | version = [next(f) for x in range(5)][1:4] | 773 | version = [next(f) for x in range(5)][1:4] |
758 | for word in version: | 774 | for word in version: |
759 | kerver.append(word.split('= ')[1].split('\n')[0]) | 775 | kerver.append(word.split('= ')[1].split('\n')[0]) |
@@ -763,10 +779,20 @@ def get_staging_kver(srcdir): | |||
763 | def get_staging_kbranch(srcdir): | 779 | def get_staging_kbranch(srcdir): |
764 | staging_kbranch = "" | 780 | staging_kbranch = "" |
765 | if os.path.exists(srcdir) and os.listdir(srcdir): | 781 | if os.path.exists(srcdir) and os.listdir(srcdir): |
766 | (branch, _) = bb.process.run('git branch | grep \* | cut -d \' \' -f2', cwd=srcdir) | 782 | (branch, _) = bb.process.run('git branch | grep \\* | cut -d \' \' -f2', cwd=srcdir) |
767 | staging_kbranch = "".join(branch.split('\n')[0]) | 783 | staging_kbranch = "".join(branch.split('\n')[0]) |
768 | return staging_kbranch | 784 | return staging_kbranch |
769 | 785 | ||
786 | def get_real_srctree(srctree, s, workdir): | ||
787 | # Check that recipe isn't using a shared workdir | ||
788 | s = os.path.abspath(s) | ||
789 | workdir = os.path.abspath(workdir) | ||
790 | if s.startswith(workdir) and s != workdir and os.path.dirname(s) != workdir: | ||
791 | # Handle if S is set to a subdirectory of the source | ||
792 | srcsubdir = os.path.relpath(s, workdir).split(os.sep, 1)[1] | ||
793 | srctree = os.path.join(srctree, srcsubdir) | ||
794 | return srctree | ||
795 | |||
770 | def modify(args, config, basepath, workspace): | 796 | def modify(args, config, basepath, workspace): |
771 | """Entry point for the devtool 'modify' subcommand""" | 797 | """Entry point for the devtool 'modify' subcommand""" |
772 | import bb | 798 | import bb |
@@ -811,8 +837,8 @@ def modify(args, config, basepath, workspace): | |||
811 | 837 | ||
812 | _check_compatible_recipe(pn, rd) | 838 | _check_compatible_recipe(pn, rd) |
813 | 839 | ||
814 | initial_rev = None | 840 | initial_revs = {} |
815 | commits = [] | 841 | commits = {} |
816 | check_commits = False | 842 | check_commits = False |
817 | 843 | ||
818 | if bb.data.inherits_class('kernel-yocto', rd): | 844 | if bb.data.inherits_class('kernel-yocto', rd): |
@@ -824,10 +850,10 @@ def modify(args, config, basepath, workspace): | |||
824 | staging_kerVer = get_staging_kver(srcdir) | 850 | staging_kerVer = get_staging_kver(srcdir) |
825 | staging_kbranch = get_staging_kbranch(srcdir) | 851 | staging_kbranch = get_staging_kbranch(srcdir) |
826 | if (os.path.exists(srcdir) and os.listdir(srcdir)) and (kernelVersion in staging_kerVer and staging_kbranch == kbranch): | 852 | if (os.path.exists(srcdir) and os.listdir(srcdir)) and (kernelVersion in staging_kerVer and staging_kbranch == kbranch): |
827 | oe.path.copyhardlinktree(srcdir,srctree) | 853 | oe.path.copyhardlinktree(srcdir, srctree) |
828 | workdir = rd.getVar('WORKDIR') | 854 | workdir = rd.getVar('WORKDIR') |
829 | srcsubdir = rd.getVar('S') | 855 | srcsubdir = rd.getVar('S') |
830 | localfilesdir = os.path.join(srctree,'oe-local-files') | 856 | localfilesdir = os.path.join(srctree, 'oe-local-files') |
831 | # Move local source files into separate subdir | 857 | # Move local source files into separate subdir |
832 | recipe_patches = [os.path.basename(patch) for patch in oe.recipeutils.get_recipe_patches(rd)] | 858 | recipe_patches = [os.path.basename(patch) for patch in oe.recipeutils.get_recipe_patches(rd)] |
833 | local_files = oe.recipeutils.get_recipe_local_files(rd) | 859 | local_files = oe.recipeutils.get_recipe_local_files(rd) |
@@ -851,9 +877,9 @@ def modify(args, config, basepath, workspace): | |||
851 | for fname in local_files: | 877 | for fname in local_files: |
852 | _move_file(os.path.join(workdir, fname), os.path.join(srctree, 'oe-local-files', fname)) | 878 | _move_file(os.path.join(workdir, fname), os.path.join(srctree, 'oe-local-files', fname)) |
853 | with open(os.path.join(srctree, 'oe-local-files', '.gitignore'), 'w') as f: | 879 | with open(os.path.join(srctree, 'oe-local-files', '.gitignore'), 'w') as f: |
854 | f.write('# Ignore local files, by default. Remove this file ''if you want to commit the directory to Git\n*\n') | 880 | f.write('# Ignore local files, by default. Remove this file if you want to commit the directory to Git\n*\n') |
855 | 881 | ||
856 | symlink_oelocal_files_srctree(rd,srctree) | 882 | symlink_oelocal_files_srctree(rd, srctree) |
857 | 883 | ||
858 | task = 'do_configure' | 884 | task = 'do_configure' |
859 | res = tinfoil.build_targets(pn, task, handle_events=True) | 885 | res = tinfoil.build_targets(pn, task, handle_events=True) |
@@ -861,22 +887,30 @@ def modify(args, config, basepath, workspace): | |||
861 | # Copy .config to workspace | 887 | # Copy .config to workspace |
862 | kconfpath = rd.getVar('B') | 888 | kconfpath = rd.getVar('B') |
863 | logger.info('Copying kernel config to workspace') | 889 | logger.info('Copying kernel config to workspace') |
864 | shutil.copy2(os.path.join(kconfpath, '.config'),srctree) | 890 | shutil.copy2(os.path.join(kconfpath, '.config'), srctree) |
865 | 891 | ||
866 | # Set this to true, we still need to get initial_rev | 892 | # Set this to true, we still need to get initial_rev |
867 | # by parsing the git repo | 893 | # by parsing the git repo |
868 | args.no_extract = True | 894 | args.no_extract = True |
869 | 895 | ||
870 | if not args.no_extract: | 896 | if not args.no_extract: |
871 | initial_rev, _ = _extract_source(srctree, args.keep_temp, args.branch, False, config, basepath, workspace, args.fixed_setup, rd, tinfoil, no_overrides=args.no_overrides) | 897 | initial_revs["."], _ = _extract_source(srctree, args.keep_temp, args.branch, False, config, basepath, workspace, args.fixed_setup, rd, tinfoil, no_overrides=args.no_overrides) |
872 | if not initial_rev: | 898 | if not initial_revs["."]: |
873 | return 1 | 899 | return 1 |
874 | logger.info('Source tree extracted to %s' % srctree) | 900 | logger.info('Source tree extracted to %s' % srctree) |
901 | |||
875 | if os.path.exists(os.path.join(srctree, '.git')): | 902 | if os.path.exists(os.path.join(srctree, '.git')): |
876 | # Get list of commits since this revision | 903 | # Get list of commits since this revision |
877 | (stdout, _) = bb.process.run('git rev-list --reverse %s..HEAD' % initial_rev, cwd=srctree) | 904 | (stdout, _) = bb.process.run('git rev-list --reverse %s..HEAD' % initial_revs["."], cwd=srctree) |
878 | commits = stdout.split() | 905 | commits["."] = stdout.split() |
879 | check_commits = True | 906 | check_commits = True |
907 | (stdout, _) = bb.process.run('git submodule --quiet foreach --recursive \'echo `git rev-parse devtool-base` $PWD\'', cwd=srctree) | ||
908 | for line in stdout.splitlines(): | ||
909 | (rev, submodule_path) = line.split() | ||
910 | submodule = os.path.relpath(submodule_path, srctree) | ||
911 | initial_revs[submodule] = rev | ||
912 | (stdout, _) = bb.process.run('git rev-list --reverse devtool-base..HEAD', cwd=submodule_path) | ||
913 | commits[submodule] = stdout.split() | ||
880 | else: | 914 | else: |
881 | if os.path.exists(os.path.join(srctree, '.git')): | 915 | if os.path.exists(os.path.join(srctree, '.git')): |
882 | # Check if it's a tree previously extracted by us. This is done | 916 | # Check if it's a tree previously extracted by us. This is done |
@@ -893,11 +927,11 @@ def modify(args, config, basepath, workspace): | |||
893 | for line in stdout.splitlines(): | 927 | for line in stdout.splitlines(): |
894 | if line.startswith('*'): | 928 | if line.startswith('*'): |
895 | (stdout, _) = bb.process.run('git rev-parse devtool-base', cwd=srctree) | 929 | (stdout, _) = bb.process.run('git rev-parse devtool-base', cwd=srctree) |
896 | initial_rev = stdout.rstrip() | 930 | initial_revs["."] = stdout.rstrip() |
897 | if not initial_rev: | 931 | if "." not in initial_revs: |
898 | # Otherwise, just grab the head revision | 932 | # Otherwise, just grab the head revision |
899 | (stdout, _) = bb.process.run('git rev-parse HEAD', cwd=srctree) | 933 | (stdout, _) = bb.process.run('git rev-parse HEAD', cwd=srctree) |
900 | initial_rev = stdout.rstrip() | 934 | initial_revs["."] = stdout.rstrip() |
901 | 935 | ||
902 | branch_patches = {} | 936 | branch_patches = {} |
903 | if check_commits: | 937 | if check_commits: |
@@ -914,62 +948,81 @@ def modify(args, config, basepath, workspace): | |||
914 | seen_patches = [] | 948 | seen_patches = [] |
915 | for branch in branches: | 949 | for branch in branches: |
916 | branch_patches[branch] = [] | 950 | branch_patches[branch] = [] |
917 | (stdout, _) = bb.process.run('git log devtool-base..%s' % branch, cwd=srctree) | 951 | (stdout, _) = bb.process.run('git rev-list devtool-base..%s' % branch, cwd=srctree) |
918 | for line in stdout.splitlines(): | 952 | for sha1 in stdout.splitlines(): |
919 | line = line.strip() | 953 | notes = oe.patch.GitApplyTree.getNotes(srctree, sha1.strip()) |
920 | if line.startswith(oe.patch.GitApplyTree.patch_line_prefix): | 954 | origpatch = notes.get(oe.patch.GitApplyTree.original_patch) |
921 | origpatch = line[len(oe.patch.GitApplyTree.patch_line_prefix):].split(':', 1)[-1].strip() | 955 | if origpatch and origpatch not in seen_patches: |
922 | if not origpatch in seen_patches: | 956 | seen_patches.append(origpatch) |
923 | seen_patches.append(origpatch) | 957 | branch_patches[branch].append(origpatch) |
924 | branch_patches[branch].append(origpatch) | ||
925 | 958 | ||
926 | # Need to grab this here in case the source is within a subdirectory | 959 | # Need to grab this here in case the source is within a subdirectory |
927 | srctreebase = srctree | 960 | srctreebase = srctree |
928 | 961 | srctree = get_real_srctree(srctree, rd.getVar('S'), rd.getVar('WORKDIR')) | |
929 | # Check that recipe isn't using a shared workdir | ||
930 | s = os.path.abspath(rd.getVar('S')) | ||
931 | workdir = os.path.abspath(rd.getVar('WORKDIR')) | ||
932 | if s.startswith(workdir) and s != workdir and os.path.dirname(s) != workdir: | ||
933 | # Handle if S is set to a subdirectory of the source | ||
934 | srcsubdir = os.path.relpath(s, workdir).split(os.sep, 1)[1] | ||
935 | srctree = os.path.join(srctree, srcsubdir) | ||
936 | 962 | ||
937 | bb.utils.mkdirhier(os.path.dirname(appendfile)) | 963 | bb.utils.mkdirhier(os.path.dirname(appendfile)) |
938 | with open(appendfile, 'w') as f: | 964 | with open(appendfile, 'w') as f: |
939 | f.write('FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n') | 965 | # if not present, add type=git-dependency to the secondary sources |
966 | # (non local files) so they can be extracted correctly when building a recipe after | ||
967 | # doing a devtool modify on it | ||
968 | src_uri = rd.getVar('SRC_URI').split() | ||
969 | src_uri_append = [] | ||
970 | src_uri_remove = [] | ||
971 | |||
972 | # Assume first entry is main source extracted in ${S} so skip it | ||
973 | src_uri = src_uri[1::] | ||
974 | |||
975 | # Add "type=git-dependency" to all non local sources | ||
976 | for url in src_uri: | ||
977 | if not url.startswith('file://') and not 'type=' in url: | ||
978 | src_uri_remove.append(url) | ||
979 | src_uri_append.append('%s;type=git-dependency' % url) | ||
980 | |||
981 | if src_uri_remove: | ||
982 | f.write('SRC_URI:remove = "%s"\n' % ' '.join(src_uri_remove)) | ||
983 | f.write('SRC_URI:append = " %s"\n\n' % ' '.join(src_uri_append)) | ||
984 | |||
985 | f.write('FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n') | ||
940 | # Local files can be modified/tracked in separate subdir under srctree | 986 | # Local files can be modified/tracked in separate subdir under srctree |
941 | # Mostly useful for packages with S != WORKDIR | 987 | # Mostly useful for packages with S != WORKDIR |
942 | f.write('FILESPATH_prepend := "%s:"\n' % | 988 | f.write('FILESPATH:prepend := "%s:"\n' % |
943 | os.path.join(srctreebase, 'oe-local-files')) | 989 | os.path.join(srctreebase, 'oe-local-files')) |
944 | f.write('# srctreebase: %s\n' % srctreebase) | 990 | f.write('# srctreebase: %s\n' % srctreebase) |
945 | 991 | ||
946 | f.write('\ninherit externalsrc\n') | 992 | f.write('\ninherit externalsrc\n') |
947 | f.write('# NOTE: We use pn- overrides here to avoid affecting multiple variants in the case where the recipe uses BBCLASSEXTEND\n') | 993 | f.write('# NOTE: We use pn- overrides here to avoid affecting multiple variants in the case where the recipe uses BBCLASSEXTEND\n') |
948 | f.write('EXTERNALSRC_pn-%s = "%s"\n' % (pn, srctree)) | 994 | f.write('EXTERNALSRC:pn-%s = "%s"\n' % (pn, srctree)) |
949 | 995 | ||
950 | b_is_s = use_external_build(args.same_dir, args.no_same_dir, rd) | 996 | b_is_s = use_external_build(args.same_dir, args.no_same_dir, rd) |
951 | if b_is_s: | 997 | if b_is_s: |
952 | f.write('EXTERNALSRC_BUILD_pn-%s = "%s"\n' % (pn, srctree)) | 998 | f.write('EXTERNALSRC_BUILD:pn-%s = "%s"\n' % (pn, srctree)) |
953 | 999 | ||
954 | if bb.data.inherits_class('kernel', rd): | 1000 | if bb.data.inherits_class('kernel', rd): |
955 | f.write('SRCTREECOVEREDTASKS = "do_validate_branches do_kernel_checkout ' | 1001 | f.write('SRCTREECOVEREDTASKS = "do_validate_branches do_kernel_checkout ' |
956 | 'do_fetch do_unpack do_kernel_configme do_kernel_configcheck"\n') | 1002 | 'do_fetch do_unpack do_kernel_configcheck"\n') |
957 | f.write('\ndo_patch[noexec] = "1"\n') | 1003 | f.write('\ndo_patch[noexec] = "1"\n') |
958 | f.write('\ndo_configure_append() {\n' | 1004 | f.write('\ndo_configure:append() {\n' |
959 | ' cp ${B}/.config ${S}/.config.baseline\n' | 1005 | ' cp ${B}/.config ${S}/.config.baseline\n' |
960 | ' ln -sfT ${B}/.config ${S}/.config.new\n' | 1006 | ' ln -sfT ${B}/.config ${S}/.config.new\n' |
961 | '}\n') | 1007 | '}\n') |
962 | if rd.getVarFlag('do_menuconfig','task'): | 1008 | f.write('\ndo_kernel_configme:prepend() {\n' |
963 | f.write('\ndo_configure_append() {\n' | 1009 | ' if [ -e ${S}/.config ]; then\n' |
964 | ' if [ ! ${DEVTOOL_DISABLE_MENUCONFIG} ]; then\n' | 1010 | ' mv ${S}/.config ${S}/.config.old\n' |
965 | ' cp ${B}/.config ${S}/.config.baseline\n' | 1011 | ' fi\n' |
966 | ' ln -sfT ${B}/.config ${S}/.config.new\n' | 1012 | '}\n') |
1013 | if rd.getVarFlag('do_menuconfig', 'task'): | ||
1014 | f.write('\ndo_configure:append() {\n' | ||
1015 | ' if [ ${@oe.types.boolean(d.getVar("KCONFIG_CONFIG_ENABLE_MENUCONFIG"))} = True ]; then\n' | ||
1016 | ' cp ${KCONFIG_CONFIG_ROOTDIR}/.config ${S}/.config.baseline\n' | ||
1017 | ' ln -sfT ${KCONFIG_CONFIG_ROOTDIR}/.config ${S}/.config.new\n' | ||
967 | ' fi\n' | 1018 | ' fi\n' |
968 | '}\n') | 1019 | '}\n') |
969 | if initial_rev: | 1020 | if initial_revs: |
970 | f.write('\n# initial_rev: %s\n' % initial_rev) | 1021 | for name, rev in initial_revs.items(): |
971 | for commit in commits: | 1022 | f.write('\n# initial_rev %s: %s\n' % (name, rev)) |
972 | f.write('# commit: %s\n' % commit) | 1023 | if name in commits: |
1024 | for commit in commits[name]: | ||
1025 | f.write('# commit %s: %s\n' % (name, commit)) | ||
973 | if branch_patches: | 1026 | if branch_patches: |
974 | for branch in branch_patches: | 1027 | for branch in branch_patches: |
975 | if branch == args.branch: | 1028 | if branch == args.branch: |
@@ -1089,10 +1142,10 @@ def rename(args, config, basepath, workspace): | |||
1089 | 1142 | ||
1090 | # Rename bbappend | 1143 | # Rename bbappend |
1091 | logger.info('Renaming %s to %s' % (append, newappend)) | 1144 | logger.info('Renaming %s to %s' % (append, newappend)) |
1092 | os.rename(append, newappend) | 1145 | bb.utils.rename(append, newappend) |
1093 | # Rename recipe file | 1146 | # Rename recipe file |
1094 | logger.info('Renaming %s to %s' % (recipefile, newfile)) | 1147 | logger.info('Renaming %s to %s' % (recipefile, newfile)) |
1095 | os.rename(recipefile, newfile) | 1148 | bb.utils.rename(recipefile, newfile) |
1096 | 1149 | ||
1097 | # Rename source tree if it's the default path | 1150 | # Rename source tree if it's the default path |
1098 | appendmd5 = None | 1151 | appendmd5 = None |
@@ -1192,44 +1245,56 @@ def _get_patchset_revs(srctree, recipe_path, initial_rev=None, force_patch_refre | |||
1192 | branchname = stdout.rstrip() | 1245 | branchname = stdout.rstrip() |
1193 | 1246 | ||
1194 | # Parse initial rev from recipe if not specified | 1247 | # Parse initial rev from recipe if not specified |
1195 | commits = [] | 1248 | commits = {} |
1196 | patches = [] | 1249 | patches = [] |
1250 | initial_revs = {} | ||
1197 | with open(recipe_path, 'r') as f: | 1251 | with open(recipe_path, 'r') as f: |
1198 | for line in f: | 1252 | for line in f: |
1199 | if line.startswith('# initial_rev:'): | 1253 | pattern = r'^#\s.*\s(.*):\s([0-9a-fA-F]+)$' |
1200 | if not initial_rev: | 1254 | match = re.search(pattern, line) |
1201 | initial_rev = line.split(':')[-1].strip() | 1255 | if match: |
1202 | elif line.startswith('# commit:') and not force_patch_refresh: | 1256 | name = match.group(1) |
1203 | commits.append(line.split(':')[-1].strip()) | 1257 | rev = match.group(2) |
1204 | elif line.startswith('# patches_%s:' % branchname): | 1258 | if line.startswith('# initial_rev'): |
1205 | patches = line.split(':')[-1].strip().split(',') | 1259 | if not (name == "." and initial_rev): |
1206 | 1260 | initial_revs[name] = rev | |
1207 | update_rev = initial_rev | 1261 | elif line.startswith('# commit') and not force_patch_refresh: |
1208 | changed_revs = None | 1262 | if name not in commits: |
1209 | if initial_rev: | 1263 | commits[name] = [rev] |
1264 | else: | ||
1265 | commits[name].append(rev) | ||
1266 | elif line.startswith('# patches_%s:' % branchname): | ||
1267 | patches = line.split(':')[-1].strip().split(',') | ||
1268 | |||
1269 | update_revs = dict(initial_revs) | ||
1270 | changed_revs = {} | ||
1271 | for name, rev in initial_revs.items(): | ||
1210 | # Find first actually changed revision | 1272 | # Find first actually changed revision |
1211 | stdout, _ = bb.process.run('git rev-list --reverse %s..HEAD' % | 1273 | stdout, _ = bb.process.run('git rev-list --reverse %s..HEAD' % |
1212 | initial_rev, cwd=srctree) | 1274 | rev, cwd=os.path.join(srctree, name)) |
1213 | newcommits = stdout.split() | 1275 | newcommits = stdout.split() |
1214 | for i in range(min(len(commits), len(newcommits))): | 1276 | if name in commits: |
1215 | if newcommits[i] == commits[i]: | 1277 | for i in range(min(len(commits[name]), len(newcommits))): |
1216 | update_rev = commits[i] | 1278 | if newcommits[i] == commits[name][i]: |
1279 | update_revs[name] = commits[name][i] | ||
1217 | 1280 | ||
1218 | try: | 1281 | try: |
1219 | stdout, _ = bb.process.run('git cherry devtool-patched', | 1282 | stdout, _ = bb.process.run('git cherry devtool-patched', |
1220 | cwd=srctree) | 1283 | cwd=os.path.join(srctree, name)) |
1221 | except bb.process.ExecutionError as err: | 1284 | except bb.process.ExecutionError as err: |
1222 | stdout = None | 1285 | stdout = None |
1223 | 1286 | ||
1224 | if stdout is not None and not force_patch_refresh: | 1287 | if stdout is not None and not force_patch_refresh: |
1225 | changed_revs = [] | ||
1226 | for line in stdout.splitlines(): | 1288 | for line in stdout.splitlines(): |
1227 | if line.startswith('+ '): | 1289 | if line.startswith('+ '): |
1228 | rev = line.split()[1] | 1290 | rev = line.split()[1] |
1229 | if rev in newcommits: | 1291 | if rev in newcommits: |
1230 | changed_revs.append(rev) | 1292 | if name not in changed_revs: |
1293 | changed_revs[name] = [rev] | ||
1294 | else: | ||
1295 | changed_revs[name].append(rev) | ||
1231 | 1296 | ||
1232 | return initial_rev, update_rev, changed_revs, patches | 1297 | return initial_revs, update_revs, changed_revs, patches |
1233 | 1298 | ||
1234 | def _remove_file_entries(srcuri, filelist): | 1299 | def _remove_file_entries(srcuri, filelist): |
1235 | """Remove file:// entries from SRC_URI""" | 1300 | """Remove file:// entries from SRC_URI""" |
@@ -1284,14 +1349,17 @@ def _remove_source_files(append, files, destpath, no_report_remove=False, dry_ru | |||
1284 | raise | 1349 | raise |
1285 | 1350 | ||
1286 | 1351 | ||
1287 | def _export_patches(srctree, rd, start_rev, destdir, changed_revs=None): | 1352 | def _export_patches(srctree, rd, start_revs, destdir, changed_revs=None): |
1288 | """Export patches from srctree to given location. | 1353 | """Export patches from srctree to given location. |
1289 | Returns three-tuple of dicts: | 1354 | Returns three-tuple of dicts: |
1290 | 1. updated - patches that already exist in SRCURI | 1355 | 1. updated - patches that already exist in SRCURI |
1291 | 2. added - new patches that don't exist in SRCURI | 1356 | 2. added - new patches that don't exist in SRCURI |
1292 | 3 removed - patches that exist in SRCURI but not in exported patches | 1357 | 3 removed - patches that exist in SRCURI but not in exported patches |
1293 | In each dict the key is the 'basepath' of the URI and value is the | 1358 | In each dict the key is the 'basepath' of the URI and value is: |
1294 | absolute path to the existing file in recipe space (if any). | 1359 | - for updated and added dicts, a dict with 2 optionnal keys: |
1360 | - 'path': the absolute path to the existing file in recipe space (if any) | ||
1361 | - 'patchdir': the directory in wich the patch should be applied (if any) | ||
1362 | - for removed dict, the absolute path to the existing file in recipe space | ||
1295 | """ | 1363 | """ |
1296 | import oe.recipeutils | 1364 | import oe.recipeutils |
1297 | from oe.patch import GitApplyTree | 1365 | from oe.patch import GitApplyTree |
@@ -1305,54 +1373,60 @@ def _export_patches(srctree, rd, start_rev, destdir, changed_revs=None): | |||
1305 | 1373 | ||
1306 | # Generate patches from Git, exclude local files directory | 1374 | # Generate patches from Git, exclude local files directory |
1307 | patch_pathspec = _git_exclude_path(srctree, 'oe-local-files') | 1375 | patch_pathspec = _git_exclude_path(srctree, 'oe-local-files') |
1308 | GitApplyTree.extractPatches(srctree, start_rev, destdir, patch_pathspec) | 1376 | GitApplyTree.extractPatches(srctree, start_revs, destdir, patch_pathspec) |
1309 | 1377 | for dirpath, dirnames, filenames in os.walk(destdir): | |
1310 | new_patches = sorted(os.listdir(destdir)) | 1378 | new_patches = filenames |
1311 | for new_patch in new_patches: | 1379 | reldirpath = os.path.relpath(dirpath, destdir) |
1312 | # Strip numbering from patch names. If it's a git sequence named patch, | 1380 | for new_patch in new_patches: |
1313 | # the numbers might not match up since we are starting from a different | 1381 | # Strip numbering from patch names. If it's a git sequence named patch, |
1314 | # revision This does assume that people are using unique shortlog | 1382 | # the numbers might not match up since we are starting from a different |
1315 | # values, but they ought to be anyway... | 1383 | # revision This does assume that people are using unique shortlog |
1316 | new_basename = seqpatch_re.match(new_patch).group(2) | 1384 | # values, but they ought to be anyway... |
1317 | match_name = None | 1385 | new_basename = seqpatch_re.match(new_patch).group(2) |
1318 | for old_patch in existing_patches: | 1386 | match_name = None |
1319 | old_basename = seqpatch_re.match(old_patch).group(2) | 1387 | for old_patch in existing_patches: |
1320 | old_basename_splitext = os.path.splitext(old_basename) | 1388 | old_basename = seqpatch_re.match(old_patch).group(2) |
1321 | if old_basename.endswith(('.gz', '.bz2', '.Z')) and old_basename_splitext[0] == new_basename: | 1389 | old_basename_splitext = os.path.splitext(old_basename) |
1322 | old_patch_noext = os.path.splitext(old_patch)[0] | 1390 | if old_basename.endswith(('.gz', '.bz2', '.Z')) and old_basename_splitext[0] == new_basename: |
1323 | match_name = old_patch_noext | 1391 | old_patch_noext = os.path.splitext(old_patch)[0] |
1324 | break | 1392 | match_name = old_patch_noext |
1325 | elif new_basename == old_basename: | 1393 | break |
1326 | match_name = old_patch | 1394 | elif new_basename == old_basename: |
1327 | break | 1395 | match_name = old_patch |
1328 | if match_name: | 1396 | break |
1329 | # Rename patch files | 1397 | if match_name: |
1330 | if new_patch != match_name: | 1398 | # Rename patch files |
1331 | os.rename(os.path.join(destdir, new_patch), | 1399 | if new_patch != match_name: |
1332 | os.path.join(destdir, match_name)) | 1400 | bb.utils.rename(os.path.join(destdir, new_patch), |
1333 | # Need to pop it off the list now before checking changed_revs | 1401 | os.path.join(destdir, match_name)) |
1334 | oldpath = existing_patches.pop(old_patch) | 1402 | # Need to pop it off the list now before checking changed_revs |
1335 | if changed_revs is not None: | 1403 | oldpath = existing_patches.pop(old_patch) |
1336 | # Avoid updating patches that have not actually changed | 1404 | if changed_revs is not None and dirpath in changed_revs: |
1337 | with open(os.path.join(destdir, match_name), 'r') as f: | 1405 | # Avoid updating patches that have not actually changed |
1338 | firstlineitems = f.readline().split() | 1406 | with open(os.path.join(dirpath, match_name), 'r') as f: |
1339 | # Looking for "From <hash>" line | 1407 | firstlineitems = f.readline().split() |
1340 | if len(firstlineitems) > 1 and len(firstlineitems[1]) == 40: | 1408 | # Looking for "From <hash>" line |
1341 | if not firstlineitems[1] in changed_revs: | 1409 | if len(firstlineitems) > 1 and len(firstlineitems[1]) == 40: |
1342 | continue | 1410 | if not firstlineitems[1] in changed_revs[dirpath]: |
1343 | # Recompress if necessary | 1411 | continue |
1344 | if oldpath.endswith(('.gz', '.Z')): | 1412 | # Recompress if necessary |
1345 | bb.process.run(['gzip', match_name], cwd=destdir) | 1413 | if oldpath.endswith(('.gz', '.Z')): |
1346 | if oldpath.endswith('.gz'): | 1414 | bb.process.run(['gzip', match_name], cwd=destdir) |
1347 | match_name += '.gz' | 1415 | if oldpath.endswith('.gz'): |
1348 | else: | 1416 | match_name += '.gz' |
1349 | match_name += '.Z' | 1417 | else: |
1350 | elif oldpath.endswith('.bz2'): | 1418 | match_name += '.Z' |
1351 | bb.process.run(['bzip2', match_name], cwd=destdir) | 1419 | elif oldpath.endswith('.bz2'): |
1352 | match_name += '.bz2' | 1420 | bb.process.run(['bzip2', match_name], cwd=destdir) |
1353 | updated[match_name] = oldpath | 1421 | match_name += '.bz2' |
1354 | else: | 1422 | updated[match_name] = {'path' : oldpath} |
1355 | added[new_patch] = None | 1423 | if reldirpath != ".": |
1424 | updated[match_name]['patchdir'] = reldirpath | ||
1425 | else: | ||
1426 | added[new_patch] = {} | ||
1427 | if reldirpath != ".": | ||
1428 | added[new_patch]['patchdir'] = reldirpath | ||
1429 | |||
1356 | return (updated, added, existing_patches) | 1430 | return (updated, added, existing_patches) |
1357 | 1431 | ||
1358 | 1432 | ||
@@ -1389,8 +1463,10 @@ def _export_local_files(srctree, rd, destdir, srctreebase): | |||
1389 | 1. updated - files that already exist in SRCURI | 1463 | 1. updated - files that already exist in SRCURI |
1390 | 2. added - new files files that don't exist in SRCURI | 1464 | 2. added - new files files that don't exist in SRCURI |
1391 | 3 removed - files that exist in SRCURI but not in exported files | 1465 | 3 removed - files that exist in SRCURI but not in exported files |
1392 | In each dict the key is the 'basepath' of the URI and value is the | 1466 | In each dict the key is the 'basepath' of the URI and value is: |
1393 | absolute path to the existing file in recipe space (if any). | 1467 | - for updated and added dicts, a dict with 1 optionnal key: |
1468 | - 'path': the absolute path to the existing file in recipe space (if any) | ||
1469 | - for removed dict, the absolute path to the existing file in recipe space | ||
1394 | """ | 1470 | """ |
1395 | import oe.recipeutils | 1471 | import oe.recipeutils |
1396 | 1472 | ||
@@ -1403,6 +1479,18 @@ def _export_local_files(srctree, rd, destdir, srctreebase): | |||
1403 | updated = OrderedDict() | 1479 | updated = OrderedDict() |
1404 | added = OrderedDict() | 1480 | added = OrderedDict() |
1405 | removed = OrderedDict() | 1481 | removed = OrderedDict() |
1482 | |||
1483 | # Get current branch and return early with empty lists | ||
1484 | # if on one of the override branches | ||
1485 | # (local files are provided only for the main branch and processing | ||
1486 | # them against lists from recipe overrides will result in mismatches | ||
1487 | # and broken modifications to recipes). | ||
1488 | stdout, _ = bb.process.run('git rev-parse --abbrev-ref HEAD', | ||
1489 | cwd=srctree) | ||
1490 | branchname = stdout.rstrip() | ||
1491 | if branchname.startswith(override_branch_prefix): | ||
1492 | return (updated, added, removed) | ||
1493 | |||
1406 | local_files_dir = os.path.join(srctreebase, 'oe-local-files') | 1494 | local_files_dir = os.path.join(srctreebase, 'oe-local-files') |
1407 | git_files = _git_ls_tree(srctree) | 1495 | git_files = _git_ls_tree(srctree) |
1408 | if 'oe-local-files' in git_files: | 1496 | if 'oe-local-files' in git_files: |
@@ -1460,9 +1548,9 @@ def _export_local_files(srctree, rd, destdir, srctreebase): | |||
1460 | origpath = existing_files.pop(fname) | 1548 | origpath = existing_files.pop(fname) |
1461 | workpath = os.path.join(local_files_dir, fname) | 1549 | workpath = os.path.join(local_files_dir, fname) |
1462 | if not filecmp.cmp(origpath, workpath): | 1550 | if not filecmp.cmp(origpath, workpath): |
1463 | updated[fname] = origpath | 1551 | updated[fname] = {'path' : origpath} |
1464 | elif fname != '.gitignore': | 1552 | elif fname != '.gitignore': |
1465 | added[fname] = None | 1553 | added[fname] = {} |
1466 | 1554 | ||
1467 | workdir = rd.getVar('WORKDIR') | 1555 | workdir = rd.getVar('WORKDIR') |
1468 | s = rd.getVar('S') | 1556 | s = rd.getVar('S') |
@@ -1479,7 +1567,7 @@ def _export_local_files(srctree, rd, destdir, srctreebase): | |||
1479 | if os.path.exists(fpath): | 1567 | if os.path.exists(fpath): |
1480 | origpath = existing_files.pop(fname) | 1568 | origpath = existing_files.pop(fname) |
1481 | if not filecmp.cmp(origpath, fpath): | 1569 | if not filecmp.cmp(origpath, fpath): |
1482 | updated[fpath] = origpath | 1570 | updated[fpath] = {'path' : origpath} |
1483 | 1571 | ||
1484 | removed = existing_files | 1572 | removed = existing_files |
1485 | return (updated, added, removed) | 1573 | return (updated, added, removed) |
@@ -1508,6 +1596,12 @@ def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wi | |||
1508 | recipedir = os.path.basename(recipefile) | 1596 | recipedir = os.path.basename(recipefile) |
1509 | logger.info('Updating SRCREV in recipe %s%s' % (recipedir, dry_run_suffix)) | 1597 | logger.info('Updating SRCREV in recipe %s%s' % (recipedir, dry_run_suffix)) |
1510 | 1598 | ||
1599 | # Get original SRCREV | ||
1600 | old_srcrev = rd.getVar('SRCREV') or '' | ||
1601 | if old_srcrev == "INVALID": | ||
1602 | raise DevtoolError('Update mode srcrev is only valid for recipe fetched from an SCM repository') | ||
1603 | old_srcrev = {'.': old_srcrev} | ||
1604 | |||
1511 | # Get HEAD revision | 1605 | # Get HEAD revision |
1512 | try: | 1606 | try: |
1513 | stdout, _ = bb.process.run('git rev-parse HEAD', cwd=srctree) | 1607 | stdout, _ = bb.process.run('git rev-parse HEAD', cwd=srctree) |
@@ -1534,13 +1628,12 @@ def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wi | |||
1534 | if not no_remove: | 1628 | if not no_remove: |
1535 | # Find list of existing patches in recipe file | 1629 | # Find list of existing patches in recipe file |
1536 | patches_dir = tempfile.mkdtemp(dir=tempdir) | 1630 | patches_dir = tempfile.mkdtemp(dir=tempdir) |
1537 | old_srcrev = rd.getVar('SRCREV') or '' | ||
1538 | upd_p, new_p, del_p = _export_patches(srctree, rd, old_srcrev, | 1631 | upd_p, new_p, del_p = _export_patches(srctree, rd, old_srcrev, |
1539 | patches_dir) | 1632 | patches_dir) |
1540 | logger.debug('Patches: update %s, new %s, delete %s' % (dict(upd_p), dict(new_p), dict(del_p))) | 1633 | logger.debug('Patches: update %s, new %s, delete %s' % (dict(upd_p), dict(new_p), dict(del_p))) |
1541 | 1634 | ||
1542 | # Remove deleted local files and "overlapping" patches | 1635 | # Remove deleted local files and "overlapping" patches |
1543 | remove_files = list(del_f.values()) + list(upd_p.values()) + list(del_p.values()) | 1636 | remove_files = list(del_f.values()) + [value["path"] for value in upd_p.values() if "path" in value] + [value["path"] for value in del_p.values() if "path" in value] |
1544 | if remove_files: | 1637 | if remove_files: |
1545 | removedentries = _remove_file_entries(srcuri, remove_files)[0] | 1638 | removedentries = _remove_file_entries(srcuri, remove_files)[0] |
1546 | update_srcuri = True | 1639 | update_srcuri = True |
@@ -1554,14 +1647,14 @@ def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wi | |||
1554 | patchfields['SRC_URI'] = '\\\n '.join(srcuri) | 1647 | patchfields['SRC_URI'] = '\\\n '.join(srcuri) |
1555 | if dry_run_outdir: | 1648 | if dry_run_outdir: |
1556 | logger.info('Creating bbappend (dry-run)') | 1649 | logger.info('Creating bbappend (dry-run)') |
1557 | else: | 1650 | appendfile, destpath = oe.recipeutils.bbappend_recipe( |
1558 | appendfile, destpath = oe.recipeutils.bbappend_recipe( | 1651 | rd, appendlayerdir, files, wildcardver=wildcard_version, |
1559 | rd, appendlayerdir, files, wildcardver=wildcard_version, | 1652 | extralines=patchfields, removevalues=removevalues, |
1560 | extralines=patchfields, removevalues=removevalues, | 1653 | redirect_output=dry_run_outdir) |
1561 | redirect_output=dry_run_outdir) | ||
1562 | else: | 1654 | else: |
1563 | files_dir = _determine_files_dir(rd) | 1655 | files_dir = _determine_files_dir(rd) |
1564 | for basepath, path in upd_f.items(): | 1656 | for basepath, param in upd_f.items(): |
1657 | path = param['path'] | ||
1565 | logger.info('Updating file %s%s' % (basepath, dry_run_suffix)) | 1658 | logger.info('Updating file %s%s' % (basepath, dry_run_suffix)) |
1566 | if os.path.isabs(basepath): | 1659 | if os.path.isabs(basepath): |
1567 | # Original file (probably with subdir pointing inside source tree) | 1660 | # Original file (probably with subdir pointing inside source tree) |
@@ -1571,7 +1664,8 @@ def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wi | |||
1571 | _move_file(os.path.join(local_files_dir, basepath), path, | 1664 | _move_file(os.path.join(local_files_dir, basepath), path, |
1572 | dry_run_outdir=dry_run_outdir, base_outdir=recipedir) | 1665 | dry_run_outdir=dry_run_outdir, base_outdir=recipedir) |
1573 | update_srcuri= True | 1666 | update_srcuri= True |
1574 | for basepath, path in new_f.items(): | 1667 | for basepath, param in new_f.items(): |
1668 | path = param['path'] | ||
1575 | logger.info('Adding new file %s%s' % (basepath, dry_run_suffix)) | 1669 | logger.info('Adding new file %s%s' % (basepath, dry_run_suffix)) |
1576 | _move_file(os.path.join(local_files_dir, basepath), | 1670 | _move_file(os.path.join(local_files_dir, basepath), |
1577 | os.path.join(files_dir, basepath), | 1671 | os.path.join(files_dir, basepath), |
@@ -1603,9 +1697,22 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil | |||
1603 | if not os.path.exists(append): | 1697 | if not os.path.exists(append): |
1604 | raise DevtoolError('unable to find workspace bbappend for recipe %s' % | 1698 | raise DevtoolError('unable to find workspace bbappend for recipe %s' % |
1605 | recipename) | 1699 | recipename) |
1700 | srctreebase = workspace[recipename]['srctreebase'] | ||
1701 | relpatchdir = os.path.relpath(srctreebase, srctree) | ||
1702 | if relpatchdir == '.': | ||
1703 | patchdir_params = {} | ||
1704 | else: | ||
1705 | patchdir_params = {'patchdir': relpatchdir} | ||
1606 | 1706 | ||
1607 | initial_rev, update_rev, changed_revs, filter_patches = _get_patchset_revs(srctree, append, initial_rev, force_patch_refresh) | 1707 | def srcuri_entry(basepath, patchdir_params): |
1608 | if not initial_rev: | 1708 | if patchdir_params: |
1709 | paramstr = ';' + ';'.join('%s=%s' % (k,v) for k,v in patchdir_params.items()) | ||
1710 | else: | ||
1711 | paramstr = '' | ||
1712 | return 'file://%s%s' % (basepath, paramstr) | ||
1713 | |||
1714 | initial_revs, update_revs, changed_revs, filter_patches = _get_patchset_revs(srctree, append, initial_rev, force_patch_refresh) | ||
1715 | if not initial_revs: | ||
1609 | raise DevtoolError('Unable to find initial revision - please specify ' | 1716 | raise DevtoolError('Unable to find initial revision - please specify ' |
1610 | 'it with --initial-rev') | 1717 | 'it with --initial-rev') |
1611 | 1718 | ||
@@ -1619,61 +1726,69 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil | |||
1619 | tempdir = tempfile.mkdtemp(prefix='devtool') | 1726 | tempdir = tempfile.mkdtemp(prefix='devtool') |
1620 | try: | 1727 | try: |
1621 | local_files_dir = tempfile.mkdtemp(dir=tempdir) | 1728 | local_files_dir = tempfile.mkdtemp(dir=tempdir) |
1622 | if filter_patches: | 1729 | upd_f, new_f, del_f = _export_local_files(srctree, rd, local_files_dir, srctreebase) |
1623 | upd_f = {} | ||
1624 | new_f = {} | ||
1625 | del_f = {} | ||
1626 | else: | ||
1627 | srctreebase = workspace[recipename]['srctreebase'] | ||
1628 | upd_f, new_f, del_f = _export_local_files(srctree, rd, local_files_dir, srctreebase) | ||
1629 | |||
1630 | remove_files = [] | ||
1631 | if not no_remove: | ||
1632 | # Get all patches from source tree and check if any should be removed | ||
1633 | all_patches_dir = tempfile.mkdtemp(dir=tempdir) | ||
1634 | _, _, del_p = _export_patches(srctree, rd, initial_rev, | ||
1635 | all_patches_dir) | ||
1636 | # Remove deleted local files and patches | ||
1637 | remove_files = list(del_f.values()) + list(del_p.values()) | ||
1638 | 1730 | ||
1639 | # Get updated patches from source tree | 1731 | # Get updated patches from source tree |
1640 | patches_dir = tempfile.mkdtemp(dir=tempdir) | 1732 | patches_dir = tempfile.mkdtemp(dir=tempdir) |
1641 | upd_p, new_p, _ = _export_patches(srctree, rd, update_rev, | 1733 | upd_p, new_p, _ = _export_patches(srctree, rd, update_revs, |
1642 | patches_dir, changed_revs) | 1734 | patches_dir, changed_revs) |
1735 | # Get all patches from source tree and check if any should be removed | ||
1736 | all_patches_dir = tempfile.mkdtemp(dir=tempdir) | ||
1737 | _, _, del_p = _export_patches(srctree, rd, initial_revs, | ||
1738 | all_patches_dir) | ||
1643 | logger.debug('Pre-filtering: update: %s, new: %s' % (dict(upd_p), dict(new_p))) | 1739 | logger.debug('Pre-filtering: update: %s, new: %s' % (dict(upd_p), dict(new_p))) |
1644 | if filter_patches: | 1740 | if filter_patches: |
1645 | new_p = OrderedDict() | 1741 | new_p = OrderedDict() |
1646 | upd_p = OrderedDict((k,v) for k,v in upd_p.items() if k in filter_patches) | 1742 | upd_p = OrderedDict((k,v) for k,v in upd_p.items() if k in filter_patches) |
1647 | remove_files = [f for f in remove_files if f in filter_patches] | 1743 | del_p = OrderedDict((k,v) for k,v in del_p.items() if k in filter_patches) |
1744 | remove_files = [] | ||
1745 | if not no_remove: | ||
1746 | # Remove deleted local files and patches | ||
1747 | remove_files = list(del_f.values()) + list(del_p.values()) | ||
1648 | updatefiles = False | 1748 | updatefiles = False |
1649 | updaterecipe = False | 1749 | updaterecipe = False |
1650 | destpath = None | 1750 | destpath = None |
1651 | srcuri = (rd.getVar('SRC_URI', False) or '').split() | 1751 | srcuri = (rd.getVar('SRC_URI', False) or '').split() |
1752 | |||
1652 | if appendlayerdir: | 1753 | if appendlayerdir: |
1653 | files = OrderedDict((os.path.join(local_files_dir, key), val) for | 1754 | files = OrderedDict((os.path.join(local_files_dir, key), val) for |
1654 | key, val in list(upd_f.items()) + list(new_f.items())) | 1755 | key, val in list(upd_f.items()) + list(new_f.items())) |
1655 | files.update(OrderedDict((os.path.join(patches_dir, key), val) for | 1756 | files.update(OrderedDict((os.path.join(patches_dir, key), val) for |
1656 | key, val in list(upd_p.items()) + list(new_p.items()))) | 1757 | key, val in list(upd_p.items()) + list(new_p.items()))) |
1758 | |||
1759 | params = [] | ||
1760 | for file, param in files.items(): | ||
1761 | patchdir_param = dict(patchdir_params) | ||
1762 | patchdir = param.get('patchdir', ".") | ||
1763 | if patchdir != "." : | ||
1764 | if patchdir_param: | ||
1765 | patchdir_param['patchdir'] += patchdir | ||
1766 | else: | ||
1767 | patchdir_param['patchdir'] = patchdir | ||
1768 | params.append(patchdir_param) | ||
1769 | |||
1657 | if files or remove_files: | 1770 | if files or remove_files: |
1658 | removevalues = None | 1771 | removevalues = None |
1659 | if remove_files: | 1772 | if remove_files: |
1660 | removedentries, remaining = _remove_file_entries( | 1773 | removedentries, remaining = _remove_file_entries( |
1661 | srcuri, remove_files) | 1774 | srcuri, remove_files) |
1662 | if removedentries or remaining: | 1775 | if removedentries or remaining: |
1663 | remaining = ['file://' + os.path.basename(item) for | 1776 | remaining = [srcuri_entry(os.path.basename(item), patchdir_params) for |
1664 | item in remaining] | 1777 | item in remaining] |
1665 | removevalues = {'SRC_URI': removedentries + remaining} | 1778 | removevalues = {'SRC_URI': removedentries + remaining} |
1666 | appendfile, destpath = oe.recipeutils.bbappend_recipe( | 1779 | appendfile, destpath = oe.recipeutils.bbappend_recipe( |
1667 | rd, appendlayerdir, files, | 1780 | rd, appendlayerdir, files, |
1668 | wildcardver=wildcard_version, | 1781 | wildcardver=wildcard_version, |
1669 | removevalues=removevalues, | 1782 | removevalues=removevalues, |
1670 | redirect_output=dry_run_outdir) | 1783 | redirect_output=dry_run_outdir, |
1784 | params=params) | ||
1671 | else: | 1785 | else: |
1672 | logger.info('No patches or local source files needed updating') | 1786 | logger.info('No patches or local source files needed updating') |
1673 | else: | 1787 | else: |
1674 | # Update existing files | 1788 | # Update existing files |
1675 | files_dir = _determine_files_dir(rd) | 1789 | files_dir = _determine_files_dir(rd) |
1676 | for basepath, path in upd_f.items(): | 1790 | for basepath, param in upd_f.items(): |
1791 | path = param['path'] | ||
1677 | logger.info('Updating file %s' % basepath) | 1792 | logger.info('Updating file %s' % basepath) |
1678 | if os.path.isabs(basepath): | 1793 | if os.path.isabs(basepath): |
1679 | # Original file (probably with subdir pointing inside source tree) | 1794 | # Original file (probably with subdir pointing inside source tree) |
@@ -1684,14 +1799,22 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil | |||
1684 | _move_file(os.path.join(local_files_dir, basepath), path, | 1799 | _move_file(os.path.join(local_files_dir, basepath), path, |
1685 | dry_run_outdir=dry_run_outdir, base_outdir=recipedir) | 1800 | dry_run_outdir=dry_run_outdir, base_outdir=recipedir) |
1686 | updatefiles = True | 1801 | updatefiles = True |
1687 | for basepath, path in upd_p.items(): | 1802 | for basepath, param in upd_p.items(): |
1688 | patchfn = os.path.join(patches_dir, basepath) | 1803 | path = param['path'] |
1804 | patchdir = param.get('patchdir', ".") | ||
1805 | if patchdir != "." : | ||
1806 | patchdir_param = dict(patchdir_params) | ||
1807 | if patchdir_param: | ||
1808 | patchdir_param['patchdir'] += patchdir | ||
1809 | else: | ||
1810 | patchdir_param['patchdir'] = patchdir | ||
1811 | patchfn = os.path.join(patches_dir, patchdir, basepath) | ||
1689 | if os.path.dirname(path) + '/' == dl_dir: | 1812 | if os.path.dirname(path) + '/' == dl_dir: |
1690 | # This is a a downloaded patch file - we now need to | 1813 | # This is a a downloaded patch file - we now need to |
1691 | # replace the entry in SRC_URI with our local version | 1814 | # replace the entry in SRC_URI with our local version |
1692 | logger.info('Replacing remote patch %s with updated local version' % basepath) | 1815 | logger.info('Replacing remote patch %s with updated local version' % basepath) |
1693 | path = os.path.join(files_dir, basepath) | 1816 | path = os.path.join(files_dir, basepath) |
1694 | _replace_srcuri_entry(srcuri, basepath, 'file://%s' % basepath) | 1817 | _replace_srcuri_entry(srcuri, basepath, srcuri_entry(basepath, patchdir_param)) |
1695 | updaterecipe = True | 1818 | updaterecipe = True |
1696 | else: | 1819 | else: |
1697 | logger.info('Updating patch %s%s' % (basepath, dry_run_suffix)) | 1820 | logger.info('Updating patch %s%s' % (basepath, dry_run_suffix)) |
@@ -1699,21 +1822,29 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil | |||
1699 | dry_run_outdir=dry_run_outdir, base_outdir=recipedir) | 1822 | dry_run_outdir=dry_run_outdir, base_outdir=recipedir) |
1700 | updatefiles = True | 1823 | updatefiles = True |
1701 | # Add any new files | 1824 | # Add any new files |
1702 | for basepath, path in new_f.items(): | 1825 | for basepath, param in new_f.items(): |
1703 | logger.info('Adding new file %s%s' % (basepath, dry_run_suffix)) | 1826 | logger.info('Adding new file %s%s' % (basepath, dry_run_suffix)) |
1704 | _move_file(os.path.join(local_files_dir, basepath), | 1827 | _move_file(os.path.join(local_files_dir, basepath), |
1705 | os.path.join(files_dir, basepath), | 1828 | os.path.join(files_dir, basepath), |
1706 | dry_run_outdir=dry_run_outdir, | 1829 | dry_run_outdir=dry_run_outdir, |
1707 | base_outdir=recipedir) | 1830 | base_outdir=recipedir) |
1708 | srcuri.append('file://%s' % basepath) | 1831 | srcuri.append(srcuri_entry(basepath, patchdir_params)) |
1709 | updaterecipe = True | 1832 | updaterecipe = True |
1710 | for basepath, path in new_p.items(): | 1833 | for basepath, param in new_p.items(): |
1834 | patchdir = param.get('patchdir', ".") | ||
1711 | logger.info('Adding new patch %s%s' % (basepath, dry_run_suffix)) | 1835 | logger.info('Adding new patch %s%s' % (basepath, dry_run_suffix)) |
1712 | _move_file(os.path.join(patches_dir, basepath), | 1836 | _move_file(os.path.join(patches_dir, patchdir, basepath), |
1713 | os.path.join(files_dir, basepath), | 1837 | os.path.join(files_dir, basepath), |
1714 | dry_run_outdir=dry_run_outdir, | 1838 | dry_run_outdir=dry_run_outdir, |
1715 | base_outdir=recipedir) | 1839 | base_outdir=recipedir) |
1716 | srcuri.append('file://%s' % basepath) | 1840 | params = dict(patchdir_params) |
1841 | if patchdir != "." : | ||
1842 | if params: | ||
1843 | params['patchdir'] += patchdir | ||
1844 | else: | ||
1845 | params['patchdir'] = patchdir | ||
1846 | |||
1847 | srcuri.append(srcuri_entry(basepath, params)) | ||
1717 | updaterecipe = True | 1848 | updaterecipe = True |
1718 | # Update recipe, if needed | 1849 | # Update recipe, if needed |
1719 | if _remove_file_entries(srcuri, remove_files)[0]: | 1850 | if _remove_file_entries(srcuri, remove_files)[0]: |
@@ -1770,6 +1901,8 @@ def _update_recipe(recipename, workspace, rd, mode, appendlayerdir, wildcard_ver | |||
1770 | for line in stdout.splitlines(): | 1901 | for line in stdout.splitlines(): |
1771 | branchname = line[2:] | 1902 | branchname = line[2:] |
1772 | if line.startswith('* '): | 1903 | if line.startswith('* '): |
1904 | if 'HEAD' in line: | ||
1905 | raise DevtoolError('Detached HEAD - please check out a branch, e.g., "devtool"') | ||
1773 | startbranch = branchname | 1906 | startbranch = branchname |
1774 | if branchname.startswith(override_branch_prefix): | 1907 | if branchname.startswith(override_branch_prefix): |
1775 | override_branches.append(branchname) | 1908 | override_branches.append(branchname) |
@@ -1959,9 +2092,19 @@ def _reset(recipes, no_clean, remove_work, config, basepath, workspace): | |||
1959 | shutil.rmtree(srctreebase) | 2092 | shutil.rmtree(srctreebase) |
1960 | else: | 2093 | else: |
1961 | # We don't want to risk wiping out any work in progress | 2094 | # We don't want to risk wiping out any work in progress |
1962 | logger.info('Leaving source tree %s as-is; if you no ' | 2095 | if srctreebase.startswith(os.path.join(config.workspace_path, 'sources')): |
1963 | 'longer need it then please delete it manually' | 2096 | from datetime import datetime |
1964 | % srctreebase) | 2097 | preservesrc = os.path.join(config.workspace_path, 'attic', 'sources', "{}.{}".format(pn, datetime.now().strftime("%Y%m%d%H%M%S"))) |
2098 | logger.info('Preserving source tree in %s\nIf you no ' | ||
2099 | 'longer need it then please delete it manually.\n' | ||
2100 | 'It is also possible to reuse it via devtool source tree argument.' | ||
2101 | % preservesrc) | ||
2102 | bb.utils.mkdirhier(os.path.dirname(preservesrc)) | ||
2103 | shutil.move(srctreebase, preservesrc) | ||
2104 | else: | ||
2105 | logger.info('Leaving source tree %s as-is; if you no ' | ||
2106 | 'longer need it then please delete it manually' | ||
2107 | % srctreebase) | ||
1965 | else: | 2108 | else: |
1966 | # This is unlikely, but if it's empty we can just remove it | 2109 | # This is unlikely, but if it's empty we can just remove it |
1967 | os.rmdir(srctreebase) | 2110 | os.rmdir(srctreebase) |
@@ -2221,6 +2364,7 @@ def register_commands(subparsers, context): | |||
2221 | group.add_argument('--no-same-dir', help='Force build in a separate build directory', action="store_true") | 2364 | group.add_argument('--no-same-dir', help='Force build in a separate build directory', action="store_true") |
2222 | parser_add.add_argument('--fetch', '-f', help='Fetch the specified URI and extract it to create the source tree (deprecated - pass as positional argument instead)', metavar='URI') | 2365 | parser_add.add_argument('--fetch', '-f', help='Fetch the specified URI and extract it to create the source tree (deprecated - pass as positional argument instead)', metavar='URI') |
2223 | parser_add.add_argument('--npm-dev', help='For npm, also fetch devDependencies', action="store_true") | 2366 | parser_add.add_argument('--npm-dev', help='For npm, also fetch devDependencies', action="store_true") |
2367 | parser_add.add_argument('--no-pypi', help='Do not inherit pypi class', action="store_true") | ||
2224 | parser_add.add_argument('--version', '-V', help='Version to use within recipe (PV)') | 2368 | parser_add.add_argument('--version', '-V', help='Version to use within recipe (PV)') |
2225 | parser_add.add_argument('--no-git', '-g', help='If fetching source, do not set up source tree as a git repository', action="store_true") | 2369 | parser_add.add_argument('--no-git', '-g', help='If fetching source, do not set up source tree as a git repository', action="store_true") |
2226 | group = parser_add.add_mutually_exclusive_group() | 2370 | group = parser_add.add_mutually_exclusive_group() |