diff options
-rwxr-xr-x | scripts/contrib/list-packageconfig-flags.py | 6 | ||||
-rwxr-xr-x | scripts/contrib/verify-homepage.py | 2 | ||||
-rwxr-xr-x | scripts/devtool | 2 | ||||
-rw-r--r-- | scripts/lib/devtool/__init__.py | 8 | ||||
-rw-r--r-- | scripts/lib/devtool/build_image.py | 10 | ||||
-rw-r--r-- | scripts/lib/devtool/deploy.py | 2 | ||||
-rw-r--r-- | scripts/lib/devtool/package.py | 2 | ||||
-rw-r--r-- | scripts/lib/devtool/runqemu.py | 4 | ||||
-rw-r--r-- | scripts/lib/devtool/sdk.py | 8 | ||||
-rw-r--r-- | scripts/lib/devtool/search.py | 4 | ||||
-rw-r--r-- | scripts/lib/devtool/standard.py | 58 | ||||
-rw-r--r-- | scripts/lib/devtool/upgrade.py | 20 | ||||
-rw-r--r-- | scripts/lib/devtool/utilcmds.py | 18 | ||||
-rw-r--r-- | scripts/lib/recipetool/append.py | 22 | ||||
-rw-r--r-- | scripts/lib/recipetool/create.py | 24 | ||||
-rw-r--r-- | scripts/lib/recipetool/create_buildsys_python.py | 4 | ||||
-rw-r--r-- | scripts/lib/recipetool/create_kernel.py | 4 | ||||
-rw-r--r-- | scripts/lib/recipetool/create_npm.py | 6 | ||||
-rw-r--r-- | scripts/lib/recipetool/newappend.py | 2 | ||||
-rw-r--r-- | scripts/lib/scriptutils.py | 2 | ||||
-rwxr-xr-x | scripts/oe-pkgdata-util | 2 | ||||
-rwxr-xr-x | scripts/recipetool | 2 |
22 files changed, 106 insertions, 106 deletions
diff --git a/scripts/contrib/list-packageconfig-flags.py b/scripts/contrib/list-packageconfig-flags.py index 389fb97f67..7ce718624a 100755 --- a/scripts/contrib/list-packageconfig-flags.py +++ b/scripts/contrib/list-packageconfig-flags.py | |||
@@ -76,7 +76,7 @@ def collect_pkgs(data_dict): | |||
76 | for fn in data_dict: | 76 | for fn in data_dict: |
77 | pkgconfigflags = data_dict[fn].getVarFlags("PACKAGECONFIG") | 77 | pkgconfigflags = data_dict[fn].getVarFlags("PACKAGECONFIG") |
78 | pkgconfigflags.pop('doc', None) | 78 | pkgconfigflags.pop('doc', None) |
79 | pkgname = data_dict[fn].getVar("P", True) | 79 | pkgname = data_dict[fn].getVar("P") |
80 | pkg_dict[pkgname] = sorted(pkgconfigflags.keys()) | 80 | pkg_dict[pkgname] = sorted(pkgconfigflags.keys()) |
81 | 81 | ||
82 | return pkg_dict | 82 | return pkg_dict |
@@ -124,9 +124,9 @@ def display_all(data_dict): | |||
124 | ''' Display all pkgs and PACKAGECONFIG information ''' | 124 | ''' Display all pkgs and PACKAGECONFIG information ''' |
125 | print(str("").ljust(50, '=')) | 125 | print(str("").ljust(50, '=')) |
126 | for fn in data_dict: | 126 | for fn in data_dict: |
127 | print('%s' % data_dict[fn].getVar("P", True)) | 127 | print('%s' % data_dict[fn].getVar("P")) |
128 | print(fn) | 128 | print(fn) |
129 | packageconfig = data_dict[fn].getVar("PACKAGECONFIG", True) or '' | 129 | packageconfig = data_dict[fn].getVar("PACKAGECONFIG") or '' |
130 | if packageconfig.strip() == '': | 130 | if packageconfig.strip() == '': |
131 | packageconfig = 'None' | 131 | packageconfig = 'None' |
132 | print('PACKAGECONFIG %s' % packageconfig) | 132 | print('PACKAGECONFIG %s' % packageconfig) |
diff --git a/scripts/contrib/verify-homepage.py b/scripts/contrib/verify-homepage.py index d39dd1d973..76f1749cfa 100755 --- a/scripts/contrib/verify-homepage.py +++ b/scripts/contrib/verify-homepage.py | |||
@@ -44,7 +44,7 @@ def verifyHomepage(bbhandler): | |||
44 | if realfn in checked: | 44 | if realfn in checked: |
45 | continue | 45 | continue |
46 | data = bbhandler.parse_recipe_file(realfn) | 46 | data = bbhandler.parse_recipe_file(realfn) |
47 | homepage = data.getVar("HOMEPAGE", True) | 47 | homepage = data.getVar("HOMEPAGE") |
48 | if homepage: | 48 | if homepage: |
49 | try: | 49 | try: |
50 | urllib.request.urlopen(homepage, timeout=5) | 50 | urllib.request.urlopen(homepage, timeout=5) |
diff --git a/scripts/devtool b/scripts/devtool index 578db5a68b..0866be9d5f 100755 --- a/scripts/devtool +++ b/scripts/devtool | |||
@@ -291,7 +291,7 @@ def main(): | |||
291 | try: | 291 | try: |
292 | tinfoil = setup_tinfoil(config_only=True, basepath=basepath) | 292 | tinfoil = setup_tinfoil(config_only=True, basepath=basepath) |
293 | try: | 293 | try: |
294 | global_args.bbpath = tinfoil.config_data.getVar('BBPATH', True) | 294 | global_args.bbpath = tinfoil.config_data.getVar('BBPATH') |
295 | finally: | 295 | finally: |
296 | tinfoil.shutdown() | 296 | tinfoil.shutdown() |
297 | except bb.BBHandledException: | 297 | except bb.BBHandledException: |
diff --git a/scripts/lib/devtool/__init__.py b/scripts/lib/devtool/__init__.py index 99c5534893..fd2f042ba5 100644 --- a/scripts/lib/devtool/__init__.py +++ b/scripts/lib/devtool/__init__.py | |||
@@ -87,13 +87,13 @@ def exec_watch(cmd, **options): | |||
87 | def exec_fakeroot(d, cmd, **kwargs): | 87 | def exec_fakeroot(d, cmd, **kwargs): |
88 | """Run a command under fakeroot (pseudo, in fact) so that it picks up the appropriate file permissions""" | 88 | """Run a command under fakeroot (pseudo, in fact) so that it picks up the appropriate file permissions""" |
89 | # Grab the command and check it actually exists | 89 | # Grab the command and check it actually exists |
90 | fakerootcmd = d.getVar('FAKEROOTCMD', True) | 90 | fakerootcmd = d.getVar('FAKEROOTCMD') |
91 | if not os.path.exists(fakerootcmd): | 91 | if not os.path.exists(fakerootcmd): |
92 | logger.error('pseudo executable %s could not be found - have you run a build yet? pseudo-native should install this and if you have run any build then that should have been built') | 92 | logger.error('pseudo executable %s could not be found - have you run a build yet? pseudo-native should install this and if you have run any build then that should have been built') |
93 | return 2 | 93 | return 2 |
94 | # Set up the appropriate environment | 94 | # Set up the appropriate environment |
95 | newenv = dict(os.environ) | 95 | newenv = dict(os.environ) |
96 | fakerootenv = d.getVar('FAKEROOTENV', True) | 96 | fakerootenv = d.getVar('FAKEROOTENV') |
97 | for varvalue in fakerootenv.split(): | 97 | for varvalue in fakerootenv.split(): |
98 | if '=' in varvalue: | 98 | if '=' in varvalue: |
99 | splitval = varvalue.split('=', 1) | 99 | splitval = varvalue.split('=', 1) |
@@ -179,7 +179,7 @@ def use_external_build(same_dir, no_same_dir, d): | |||
179 | logger.info('Using source tree as build directory since --same-dir specified') | 179 | logger.info('Using source tree as build directory since --same-dir specified') |
180 | elif bb.data.inherits_class('autotools-brokensep', d): | 180 | elif bb.data.inherits_class('autotools-brokensep', d): |
181 | logger.info('Using source tree as build directory since recipe inherits autotools-brokensep') | 181 | logger.info('Using source tree as build directory since recipe inherits autotools-brokensep') |
182 | elif d.getVar('B', True) == os.path.abspath(d.getVar('S', True)): | 182 | elif d.getVar('B') == os.path.abspath(d.getVar('S')): |
183 | logger.info('Using source tree as build directory since that would be the default for this recipe') | 183 | logger.info('Using source tree as build directory since that would be the default for this recipe') |
184 | else: | 184 | else: |
185 | b_is_s = False | 185 | b_is_s = False |
@@ -256,7 +256,7 @@ def ensure_npm(config, basepath, fixed_setup=False): | |||
256 | """ | 256 | """ |
257 | tinfoil = setup_tinfoil(config_only=True, basepath=basepath) | 257 | tinfoil = setup_tinfoil(config_only=True, basepath=basepath) |
258 | try: | 258 | try: |
259 | nativepath = tinfoil.config_data.getVar('STAGING_BINDIR_NATIVE', True) | 259 | nativepath = tinfoil.config_data.getVar('STAGING_BINDIR_NATIVE') |
260 | finally: | 260 | finally: |
261 | tinfoil.shutdown() | 261 | tinfoil.shutdown() |
262 | 262 | ||
diff --git a/scripts/lib/devtool/build_image.py b/scripts/lib/devtool/build_image.py index ae75511dc7..e5810389be 100644 --- a/scripts/lib/devtool/build_image.py +++ b/scripts/lib/devtool/build_image.py | |||
@@ -34,8 +34,8 @@ def _get_packages(tinfoil, workspace, config): | |||
34 | result = [] | 34 | result = [] |
35 | for recipe in workspace: | 35 | for recipe in workspace: |
36 | data = parse_recipe(config, tinfoil, recipe, True) | 36 | data = parse_recipe(config, tinfoil, recipe, True) |
37 | if 'class-target' in data.getVar('OVERRIDES', True).split(':'): | 37 | if 'class-target' in data.getVar('OVERRIDES').split(':'): |
38 | if recipe in data.getVar('PACKAGES', True).split(): | 38 | if recipe in data.getVar('PACKAGES').split(): |
39 | result.append(recipe) | 39 | result.append(recipe) |
40 | else: | 40 | else: |
41 | logger.warning("Skipping recipe %s as it doesn't produce a " | 41 | logger.warning("Skipping recipe %s as it doesn't produce a " |
@@ -95,7 +95,7 @@ def build_image_task(config, basepath, workspace, image, add_packages=None, task | |||
95 | raise TargetNotImageError() | 95 | raise TargetNotImageError() |
96 | 96 | ||
97 | # Get the actual filename used and strip the .bb and full path | 97 | # Get the actual filename used and strip the .bb and full path |
98 | target_basename = rd.getVar('FILE', True) | 98 | target_basename = rd.getVar('FILE') |
99 | target_basename = os.path.splitext(os.path.basename(target_basename))[0] | 99 | target_basename = os.path.splitext(os.path.basename(target_basename))[0] |
100 | config.set('SDK', 'target_basename', target_basename) | 100 | config.set('SDK', 'target_basename', target_basename) |
101 | config.write() | 101 | config.write() |
@@ -132,9 +132,9 @@ def build_image_task(config, basepath, workspace, image, add_packages=None, task | |||
132 | afile.write('%s\n' % line) | 132 | afile.write('%s\n' % line) |
133 | 133 | ||
134 | if task in ['populate_sdk', 'populate_sdk_ext']: | 134 | if task in ['populate_sdk', 'populate_sdk_ext']: |
135 | outputdir = rd.getVar('SDK_DEPLOY', True) | 135 | outputdir = rd.getVar('SDK_DEPLOY') |
136 | else: | 136 | else: |
137 | outputdir = rd.getVar('DEPLOY_DIR_IMAGE', True) | 137 | outputdir = rd.getVar('DEPLOY_DIR_IMAGE') |
138 | 138 | ||
139 | tmp_tinfoil = tinfoil | 139 | tmp_tinfoil = tinfoil |
140 | tinfoil = None | 140 | tinfoil = None |
diff --git a/scripts/lib/devtool/deploy.py b/scripts/lib/devtool/deploy.py index db7dffa307..9ec04e366a 100644 --- a/scripts/lib/devtool/deploy.py +++ b/scripts/lib/devtool/deploy.py | |||
@@ -160,7 +160,7 @@ def deploy(args, config, basepath, workspace): | |||
160 | except Exception as e: | 160 | except Exception as e: |
161 | raise DevtoolError('Exception parsing recipe %s: %s' % | 161 | raise DevtoolError('Exception parsing recipe %s: %s' % |
162 | (args.recipename, e)) | 162 | (args.recipename, e)) |
163 | recipe_outdir = rd.getVar('D', True) | 163 | recipe_outdir = rd.getVar('D') |
164 | if not os.path.exists(recipe_outdir) or not os.listdir(recipe_outdir): | 164 | if not os.path.exists(recipe_outdir) or not os.listdir(recipe_outdir): |
165 | raise DevtoolError('No files to deploy - have you built the %s ' | 165 | raise DevtoolError('No files to deploy - have you built the %s ' |
166 | 'recipe? If so, the install step has not installed ' | 166 | 'recipe? If so, the install step has not installed ' |
diff --git a/scripts/lib/devtool/package.py b/scripts/lib/devtool/package.py index 47640641d1..b4f4720fd3 100644 --- a/scripts/lib/devtool/package.py +++ b/scripts/lib/devtool/package.py | |||
@@ -32,7 +32,7 @@ def package(args, config, basepath, workspace): | |||
32 | try: | 32 | try: |
33 | image_pkgtype = config.get('Package', 'image_pkgtype', '') | 33 | image_pkgtype = config.get('Package', 'image_pkgtype', '') |
34 | if not image_pkgtype: | 34 | if not image_pkgtype: |
35 | image_pkgtype = tinfoil.config_data.getVar('IMAGE_PKGTYPE', True) | 35 | image_pkgtype = tinfoil.config_data.getVar('IMAGE_PKGTYPE') |
36 | 36 | ||
37 | deploy_dir_pkg = tinfoil.config_data.getVar('DEPLOY_DIR_%s' % image_pkgtype.upper(), True) | 37 | deploy_dir_pkg = tinfoil.config_data.getVar('DEPLOY_DIR_%s' % image_pkgtype.upper(), True) |
38 | finally: | 38 | finally: |
diff --git a/scripts/lib/devtool/runqemu.py b/scripts/lib/devtool/runqemu.py index ae25cee08c..641664e565 100644 --- a/scripts/lib/devtool/runqemu.py +++ b/scripts/lib/devtool/runqemu.py | |||
@@ -31,8 +31,8 @@ def runqemu(args, config, basepath, workspace): | |||
31 | 31 | ||
32 | tinfoil = setup_tinfoil(config_only=True, basepath=basepath) | 32 | tinfoil = setup_tinfoil(config_only=True, basepath=basepath) |
33 | try: | 33 | try: |
34 | machine = tinfoil.config_data.getVar('MACHINE', True) | 34 | machine = tinfoil.config_data.getVar('MACHINE') |
35 | bindir_native = tinfoil.config_data.getVar('STAGING_BINDIR_NATIVE', True) | 35 | bindir_native = tinfoil.config_data.getVar('STAGING_BINDIR_NATIVE') |
36 | finally: | 36 | finally: |
37 | tinfoil.shutdown() | 37 | tinfoil.shutdown() |
38 | 38 | ||
diff --git a/scripts/lib/devtool/sdk.py b/scripts/lib/devtool/sdk.py index 922277b79f..f629db1876 100644 --- a/scripts/lib/devtool/sdk.py +++ b/scripts/lib/devtool/sdk.py | |||
@@ -132,9 +132,9 @@ def sdk_update(args, config, basepath, workspace): | |||
132 | # Grab variable values | 132 | # Grab variable values |
133 | tinfoil = setup_tinfoil(config_only=True, basepath=basepath) | 133 | tinfoil = setup_tinfoil(config_only=True, basepath=basepath) |
134 | try: | 134 | try: |
135 | stamps_dir = tinfoil.config_data.getVar('STAMPS_DIR', True) | 135 | stamps_dir = tinfoil.config_data.getVar('STAMPS_DIR') |
136 | sstate_mirrors = tinfoil.config_data.getVar('SSTATE_MIRRORS', True) | 136 | sstate_mirrors = tinfoil.config_data.getVar('SSTATE_MIRRORS') |
137 | site_conf_version = tinfoil.config_data.getVar('SITE_CONF_VERSION', True) | 137 | site_conf_version = tinfoil.config_data.getVar('SITE_CONF_VERSION') |
138 | finally: | 138 | finally: |
139 | tinfoil.shutdown() | 139 | tinfoil.shutdown() |
140 | 140 | ||
@@ -273,7 +273,7 @@ def sdk_install(args, config, basepath, workspace): | |||
273 | rd = parse_recipe(config, tinfoil, recipe, True) | 273 | rd = parse_recipe(config, tinfoil, recipe, True) |
274 | if not rd: | 274 | if not rd: |
275 | return 1 | 275 | return 1 |
276 | stampprefixes[recipe] = '%s.%s' % (rd.getVar('STAMP', True), tasks[0]) | 276 | stampprefixes[recipe] = '%s.%s' % (rd.getVar('STAMP'), tasks[0]) |
277 | if checkstamp(recipe): | 277 | if checkstamp(recipe): |
278 | logger.info('%s is already installed' % recipe) | 278 | logger.info('%s is already installed' % recipe) |
279 | else: | 279 | else: |
diff --git a/scripts/lib/devtool/search.py b/scripts/lib/devtool/search.py index b44bed7f6f..054985b85d 100644 --- a/scripts/lib/devtool/search.py +++ b/scripts/lib/devtool/search.py | |||
@@ -31,7 +31,7 @@ def search(args, config, basepath, workspace): | |||
31 | 31 | ||
32 | tinfoil = setup_tinfoil(config_only=False, basepath=basepath) | 32 | tinfoil = setup_tinfoil(config_only=False, basepath=basepath) |
33 | try: | 33 | try: |
34 | pkgdata_dir = tinfoil.config_data.getVar('PKGDATA_DIR', True) | 34 | pkgdata_dir = tinfoil.config_data.getVar('PKGDATA_DIR') |
35 | defsummary = tinfoil.config_data.getVar('SUMMARY', False) or '' | 35 | defsummary = tinfoil.config_data.getVar('SUMMARY', False) or '' |
36 | 36 | ||
37 | keyword_rc = re.compile(args.keyword) | 37 | keyword_rc = re.compile(args.keyword) |
@@ -70,7 +70,7 @@ def search(args, config, basepath, workspace): | |||
70 | 70 | ||
71 | if match: | 71 | if match: |
72 | rd = parse_recipe(config, tinfoil, fn, True) | 72 | rd = parse_recipe(config, tinfoil, fn, True) |
73 | summary = rd.getVar('SUMMARY', True) | 73 | summary = rd.getVar('SUMMARY') |
74 | if summary == rd.expand(defsummary): | 74 | if summary == rd.expand(defsummary): |
75 | summary = '' | 75 | summary = '' |
76 | print("%s %s" % (fn.ljust(20), summary)) | 76 | print("%s %s" % (fn.ljust(20), summary)) |
diff --git a/scripts/lib/devtool/standard.py b/scripts/lib/devtool/standard.py index c52b00678e..e662e3b505 100644 --- a/scripts/lib/devtool/standard.py +++ b/scripts/lib/devtool/standard.py | |||
@@ -303,7 +303,7 @@ def _check_compatible_recipe(pn, d): | |||
303 | raise DevtoolError("The %s recipe is a meta-recipe, and therefore is " | 303 | raise DevtoolError("The %s recipe is a meta-recipe, and therefore is " |
304 | "not supported by this tool" % pn, 4) | 304 | "not supported by this tool" % pn, 4) |
305 | 305 | ||
306 | if bb.data.inherits_class('externalsrc', d) and d.getVar('EXTERNALSRC', True): | 306 | if bb.data.inherits_class('externalsrc', d) and d.getVar('EXTERNALSRC'): |
307 | # Not an incompatibility error per se, so we don't pass the error code | 307 | # Not an incompatibility error per se, so we don't pass the error code |
308 | raise DevtoolError("externalsrc is currently enabled for the %s " | 308 | raise DevtoolError("externalsrc is currently enabled for the %s " |
309 | "recipe. This prevents the normal do_patch task " | 309 | "recipe. This prevents the normal do_patch task " |
@@ -439,7 +439,7 @@ def _extract_source(srctree, keep_temp, devbranch, sync, d, tinfoil): | |||
439 | """Extract sources of a recipe""" | 439 | """Extract sources of a recipe""" |
440 | import oe.recipeutils | 440 | import oe.recipeutils |
441 | 441 | ||
442 | pn = d.getVar('PN', True) | 442 | pn = d.getVar('PN') |
443 | 443 | ||
444 | _check_compatible_recipe(pn, d) | 444 | _check_compatible_recipe(pn, d) |
445 | 445 | ||
@@ -473,13 +473,13 @@ def _extract_source(srctree, keep_temp, devbranch, sync, d, tinfoil): | |||
473 | # Make a subdir so we guard against WORKDIR==S | 473 | # Make a subdir so we guard against WORKDIR==S |
474 | workdir = os.path.join(tempdir, 'workdir') | 474 | workdir = os.path.join(tempdir, 'workdir') |
475 | crd.setVar('WORKDIR', workdir) | 475 | crd.setVar('WORKDIR', workdir) |
476 | if not crd.getVar('S', True).startswith(workdir): | 476 | if not crd.getVar('S').startswith(workdir): |
477 | # Usually a shared workdir recipe (kernel, gcc) | 477 | # Usually a shared workdir recipe (kernel, gcc) |
478 | # Try to set a reasonable default | 478 | # Try to set a reasonable default |
479 | if bb.data.inherits_class('kernel', d): | 479 | if bb.data.inherits_class('kernel', d): |
480 | crd.setVar('S', '${WORKDIR}/source') | 480 | crd.setVar('S', '${WORKDIR}/source') |
481 | else: | 481 | else: |
482 | crd.setVar('S', '${WORKDIR}/%s' % os.path.basename(d.getVar('S', True))) | 482 | crd.setVar('S', '${WORKDIR}/%s' % os.path.basename(d.getVar('S'))) |
483 | if bb.data.inherits_class('kernel', d): | 483 | if bb.data.inherits_class('kernel', d): |
484 | # We don't want to move the source to STAGING_KERNEL_DIR here | 484 | # We don't want to move the source to STAGING_KERNEL_DIR here |
485 | crd.setVar('STAGING_KERNEL_DIR', '${S}') | 485 | crd.setVar('STAGING_KERNEL_DIR', '${S}') |
@@ -533,7 +533,7 @@ def _extract_source(srctree, keep_temp, devbranch, sync, d, tinfoil): | |||
533 | # Extra step for kernel to populate the source directory | 533 | # Extra step for kernel to populate the source directory |
534 | runtask(fn, 'kernel_checkout') | 534 | runtask(fn, 'kernel_checkout') |
535 | 535 | ||
536 | srcsubdir = crd.getVar('S', True) | 536 | srcsubdir = crd.getVar('S') |
537 | 537 | ||
538 | # Move local source files into separate subdir | 538 | # Move local source files into separate subdir |
539 | recipe_patches = [os.path.basename(patch) for patch in | 539 | recipe_patches = [os.path.basename(patch) for patch in |
@@ -581,7 +581,7 @@ def _extract_source(srctree, keep_temp, devbranch, sync, d, tinfoil): | |||
581 | "doesn't use any source or the correct source " | 581 | "doesn't use any source or the correct source " |
582 | "directory could not be determined" % pn) | 582 | "directory could not be determined" % pn) |
583 | 583 | ||
584 | setup_git_repo(srcsubdir, crd.getVar('PV', True), devbranch, d=d) | 584 | setup_git_repo(srcsubdir, crd.getVar('PV'), devbranch, d=d) |
585 | 585 | ||
586 | (stdout, _) = bb.process.run('git rev-parse HEAD', cwd=srcsubdir) | 586 | (stdout, _) = bb.process.run('git rev-parse HEAD', cwd=srcsubdir) |
587 | initial_rev = stdout.rstrip() | 587 | initial_rev = stdout.rstrip() |
@@ -596,7 +596,7 @@ def _extract_source(srctree, keep_temp, devbranch, sync, d, tinfoil): | |||
596 | # Store generate and store kernel config | 596 | # Store generate and store kernel config |
597 | logger.info('Generating kernel config') | 597 | logger.info('Generating kernel config') |
598 | runtask(fn, 'configure') | 598 | runtask(fn, 'configure') |
599 | kconfig = os.path.join(crd.getVar('B', True), '.config') | 599 | kconfig = os.path.join(crd.getVar('B'), '.config') |
600 | 600 | ||
601 | 601 | ||
602 | tempdir_localdir = os.path.join(tempdir, 'oe-local-files') | 602 | tempdir_localdir = os.path.join(tempdir, 'oe-local-files') |
@@ -628,7 +628,7 @@ def _extract_source(srctree, keep_temp, devbranch, sync, d, tinfoil): | |||
628 | 628 | ||
629 | shutil.move(srcsubdir, srctree) | 629 | shutil.move(srcsubdir, srctree) |
630 | 630 | ||
631 | if os.path.abspath(d.getVar('S', True)) == os.path.abspath(d.getVar('WORKDIR', True)): | 631 | if os.path.abspath(d.getVar('S')) == os.path.abspath(d.getVar('WORKDIR')): |
632 | # If recipe extracts to ${WORKDIR}, symlink the files into the srctree | 632 | # If recipe extracts to ${WORKDIR}, symlink the files into the srctree |
633 | # (otherwise the recipe won't build as expected) | 633 | # (otherwise the recipe won't build as expected) |
634 | local_files_dir = os.path.join(srctree, 'oe-local-files') | 634 | local_files_dir = os.path.join(srctree, 'oe-local-files') |
@@ -725,7 +725,7 @@ def modify(args, config, basepath, workspace): | |||
725 | if not rd: | 725 | if not rd: |
726 | return 1 | 726 | return 1 |
727 | 727 | ||
728 | pn = rd.getVar('PN', True) | 728 | pn = rd.getVar('PN') |
729 | if pn != args.recipename: | 729 | if pn != args.recipename: |
730 | logger.info('Mapping %s to %s' % (args.recipename, pn)) | 730 | logger.info('Mapping %s to %s' % (args.recipename, pn)) |
731 | if pn in workspace: | 731 | if pn in workspace: |
@@ -747,7 +747,7 @@ def modify(args, config, basepath, workspace): | |||
747 | # Error already shown | 747 | # Error already shown |
748 | return 1 | 748 | return 1 |
749 | 749 | ||
750 | recipefile = rd.getVar('FILE', True) | 750 | recipefile = rd.getVar('FILE') |
751 | appendfile = recipe_to_append(recipefile, config, args.wildcard) | 751 | appendfile = recipe_to_append(recipefile, config, args.wildcard) |
752 | if os.path.exists(appendfile): | 752 | if os.path.exists(appendfile): |
753 | raise DevtoolError("Another variant of recipe %s is already in your " | 753 | raise DevtoolError("Another variant of recipe %s is already in your " |
@@ -784,8 +784,8 @@ def modify(args, config, basepath, workspace): | |||
784 | initial_rev = stdout.rstrip() | 784 | initial_rev = stdout.rstrip() |
785 | 785 | ||
786 | # Check that recipe isn't using a shared workdir | 786 | # Check that recipe isn't using a shared workdir |
787 | s = os.path.abspath(rd.getVar('S', True)) | 787 | s = os.path.abspath(rd.getVar('S')) |
788 | workdir = os.path.abspath(rd.getVar('WORKDIR', True)) | 788 | workdir = os.path.abspath(rd.getVar('WORKDIR')) |
789 | if s.startswith(workdir) and s != workdir and os.path.dirname(s) != workdir: | 789 | if s.startswith(workdir) and s != workdir and os.path.dirname(s) != workdir: |
790 | # Handle if S is set to a subdirectory of the source | 790 | # Handle if S is set to a subdirectory of the source |
791 | srcsubdir = os.path.relpath(s, workdir).split(os.sep, 1)[1] | 791 | srcsubdir = os.path.relpath(s, workdir).split(os.sep, 1)[1] |
@@ -866,17 +866,17 @@ def rename(args, config, basepath, workspace): | |||
866 | if not rd: | 866 | if not rd: |
867 | return 1 | 867 | return 1 |
868 | 868 | ||
869 | bp = rd.getVar('BP', True) | 869 | bp = rd.getVar('BP') |
870 | bpn = rd.getVar('BPN', True) | 870 | bpn = rd.getVar('BPN') |
871 | if newname != args.recipename: | 871 | if newname != args.recipename: |
872 | localdata = rd.createCopy() | 872 | localdata = rd.createCopy() |
873 | localdata.setVar('PN', newname) | 873 | localdata.setVar('PN', newname) |
874 | newbpn = localdata.getVar('BPN', True) | 874 | newbpn = localdata.getVar('BPN') |
875 | else: | 875 | else: |
876 | newbpn = bpn | 876 | newbpn = bpn |
877 | s = rd.getVar('S', False) | 877 | s = rd.getVar('S', False) |
878 | src_uri = rd.getVar('SRC_URI', False) | 878 | src_uri = rd.getVar('SRC_URI', False) |
879 | pv = rd.getVar('PV', True) | 879 | pv = rd.getVar('PV') |
880 | 880 | ||
881 | # Correct variable values that refer to the upstream source - these | 881 | # Correct variable values that refer to the upstream source - these |
882 | # values must stay the same, so if the name/version are changing then | 882 | # values must stay the same, so if the name/version are changing then |
@@ -1277,8 +1277,8 @@ def _export_local_files(srctree, rd, destdir): | |||
1277 | elif fname != '.gitignore': | 1277 | elif fname != '.gitignore': |
1278 | added[fname] = None | 1278 | added[fname] = None |
1279 | 1279 | ||
1280 | workdir = rd.getVar('WORKDIR', True) | 1280 | workdir = rd.getVar('WORKDIR') |
1281 | s = rd.getVar('S', True) | 1281 | s = rd.getVar('S') |
1282 | if not s.endswith(os.sep): | 1282 | if not s.endswith(os.sep): |
1283 | s += os.sep | 1283 | s += os.sep |
1284 | 1284 | ||
@@ -1300,14 +1300,14 @@ def _export_local_files(srctree, rd, destdir): | |||
1300 | 1300 | ||
1301 | def _determine_files_dir(rd): | 1301 | def _determine_files_dir(rd): |
1302 | """Determine the appropriate files directory for a recipe""" | 1302 | """Determine the appropriate files directory for a recipe""" |
1303 | recipedir = rd.getVar('FILE_DIRNAME', True) | 1303 | recipedir = rd.getVar('FILE_DIRNAME') |
1304 | for entry in rd.getVar('FILESPATH', True).split(':'): | 1304 | for entry in rd.getVar('FILESPATH').split(':'): |
1305 | relpth = os.path.relpath(entry, recipedir) | 1305 | relpth = os.path.relpath(entry, recipedir) |
1306 | if not os.sep in relpth: | 1306 | if not os.sep in relpth: |
1307 | # One (or zero) levels below only, so we don't put anything in machine-specific directories | 1307 | # One (or zero) levels below only, so we don't put anything in machine-specific directories |
1308 | if os.path.isdir(entry): | 1308 | if os.path.isdir(entry): |
1309 | return entry | 1309 | return entry |
1310 | return os.path.join(recipedir, rd.getVar('BPN', True)) | 1310 | return os.path.join(recipedir, rd.getVar('BPN')) |
1311 | 1311 | ||
1312 | 1312 | ||
1313 | def _update_recipe_srcrev(srctree, rd, appendlayerdir, wildcard_version, no_remove): | 1313 | def _update_recipe_srcrev(srctree, rd, appendlayerdir, wildcard_version, no_remove): |
@@ -1315,7 +1315,7 @@ def _update_recipe_srcrev(srctree, rd, appendlayerdir, wildcard_version, no_remo | |||
1315 | import bb | 1315 | import bb |
1316 | import oe.recipeutils | 1316 | import oe.recipeutils |
1317 | 1317 | ||
1318 | recipefile = rd.getVar('FILE', True) | 1318 | recipefile = rd.getVar('FILE') |
1319 | logger.info('Updating SRCREV in recipe %s' % os.path.basename(recipefile)) | 1319 | logger.info('Updating SRCREV in recipe %s' % os.path.basename(recipefile)) |
1320 | 1320 | ||
1321 | # Get HEAD revision | 1321 | # Get HEAD revision |
@@ -1397,7 +1397,7 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil | |||
1397 | import bb | 1397 | import bb |
1398 | import oe.recipeutils | 1398 | import oe.recipeutils |
1399 | 1399 | ||
1400 | recipefile = rd.getVar('FILE', True) | 1400 | recipefile = rd.getVar('FILE') |
1401 | append = workspace[recipename]['bbappend'] | 1401 | append = workspace[recipename]['bbappend'] |
1402 | if not os.path.exists(append): | 1402 | if not os.path.exists(append): |
1403 | raise DevtoolError('unable to find workspace bbappend for recipe %s' % | 1403 | raise DevtoolError('unable to find workspace bbappend for recipe %s' % |
@@ -1408,7 +1408,7 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil | |||
1408 | raise DevtoolError('Unable to find initial revision - please specify ' | 1408 | raise DevtoolError('Unable to find initial revision - please specify ' |
1409 | 'it with --initial-rev') | 1409 | 'it with --initial-rev') |
1410 | 1410 | ||
1411 | dl_dir = rd.getVar('DL_DIR', True) | 1411 | dl_dir = rd.getVar('DL_DIR') |
1412 | if not dl_dir.endswith('/'): | 1412 | if not dl_dir.endswith('/'): |
1413 | dl_dir += '/' | 1413 | dl_dir += '/' |
1414 | 1414 | ||
@@ -1567,7 +1567,7 @@ def update_recipe(args, config, basepath, workspace): | |||
1567 | updated = _update_recipe(args.recipename, workspace, rd, args.mode, args.append, args.wildcard_version, args.no_remove, args.initial_rev) | 1567 | updated = _update_recipe(args.recipename, workspace, rd, args.mode, args.append, args.wildcard_version, args.no_remove, args.initial_rev) |
1568 | 1568 | ||
1569 | if updated: | 1569 | if updated: |
1570 | rf = rd.getVar('FILE', True) | 1570 | rf = rd.getVar('FILE') |
1571 | if rf.startswith(config.workspace_path): | 1571 | if rf.startswith(config.workspace_path): |
1572 | logger.warn('Recipe file %s has been updated but is inside the workspace - you will need to move it (and any associated files next to it) out to the desired layer before using "devtool reset" in order to keep any changes' % rf) | 1572 | logger.warn('Recipe file %s has been updated but is inside the workspace - you will need to move it (and any associated files next to it) out to the desired layer before using "devtool reset" in order to keep any changes' % rf) |
1573 | finally: | 1573 | finally: |
@@ -1671,7 +1671,7 @@ def reset(args, config, basepath, workspace): | |||
1671 | 1671 | ||
1672 | def _get_layer(layername, d): | 1672 | def _get_layer(layername, d): |
1673 | """Determine the base layer path for the specified layer name/path""" | 1673 | """Determine the base layer path for the specified layer name/path""" |
1674 | layerdirs = d.getVar('BBLAYERS', True).split() | 1674 | layerdirs = d.getVar('BBLAYERS').split() |
1675 | layers = {os.path.basename(p): p for p in layerdirs} | 1675 | layers = {os.path.basename(p): p for p in layerdirs} |
1676 | # Provide some shortcuts | 1676 | # Provide some shortcuts |
1677 | if layername.lower() in ['oe-core', 'openembedded-core']: | 1677 | if layername.lower() in ['oe-core', 'openembedded-core']: |
@@ -1697,7 +1697,7 @@ def finish(args, config, basepath, workspace): | |||
1697 | return 1 | 1697 | return 1 |
1698 | 1698 | ||
1699 | destlayerdir = _get_layer(args.destination, tinfoil.config_data) | 1699 | destlayerdir = _get_layer(args.destination, tinfoil.config_data) |
1700 | origlayerdir = oe.recipeutils.find_layerdir(rd.getVar('FILE', True)) | 1700 | origlayerdir = oe.recipeutils.find_layerdir(rd.getVar('FILE')) |
1701 | 1701 | ||
1702 | if not os.path.isdir(destlayerdir): | 1702 | if not os.path.isdir(destlayerdir): |
1703 | raise DevtoolError('Unable to find layer or directory matching "%s"' % args.destination) | 1703 | raise DevtoolError('Unable to find layer or directory matching "%s"' % args.destination) |
@@ -1728,7 +1728,7 @@ def finish(args, config, basepath, workspace): | |||
1728 | if not destpath: | 1728 | if not destpath: |
1729 | raise DevtoolError("Unable to determine destination layer path - check that %s specifies an actual layer and %s/conf/layer.conf specifies BBFILES. You may also need to specify a more complete path." % (args.destination, destlayerdir)) | 1729 | raise DevtoolError("Unable to determine destination layer path - check that %s specifies an actual layer and %s/conf/layer.conf specifies BBFILES. You may also need to specify a more complete path." % (args.destination, destlayerdir)) |
1730 | # Warn if the layer isn't in bblayers.conf (the code to create a bbappend will do this in other cases) | 1730 | # Warn if the layer isn't in bblayers.conf (the code to create a bbappend will do this in other cases) |
1731 | layerdirs = [os.path.abspath(layerdir) for layerdir in rd.getVar('BBLAYERS', True).split()] | 1731 | layerdirs = [os.path.abspath(layerdir) for layerdir in rd.getVar('BBLAYERS').split()] |
1732 | if not os.path.abspath(destlayerdir) in layerdirs: | 1732 | if not os.path.abspath(destlayerdir) in layerdirs: |
1733 | bb.warn('Specified destination layer is not currently enabled in bblayers.conf, so the %s recipe will now be unavailable in your current configuration until you add the layer there' % args.recipename) | 1733 | bb.warn('Specified destination layer is not currently enabled in bblayers.conf, so the %s recipe will now be unavailable in your current configuration until you add the layer there' % args.recipename) |
1734 | 1734 | ||
@@ -1758,7 +1758,7 @@ def finish(args, config, basepath, workspace): | |||
1758 | # associated files to the specified layer | 1758 | # associated files to the specified layer |
1759 | no_clean = True | 1759 | no_clean = True |
1760 | logger.info('Moving recipe file to %s' % destpath) | 1760 | logger.info('Moving recipe file to %s' % destpath) |
1761 | recipedir = os.path.dirname(rd.getVar('FILE', True)) | 1761 | recipedir = os.path.dirname(rd.getVar('FILE')) |
1762 | for root, _, files in os.walk(recipedir): | 1762 | for root, _, files in os.walk(recipedir): |
1763 | for fn in files: | 1763 | for fn in files: |
1764 | srcpath = os.path.join(root, fn) | 1764 | srcpath = os.path.join(root, fn) |
diff --git a/scripts/lib/devtool/upgrade.py b/scripts/lib/devtool/upgrade.py index d89e9a23ac..9595f3e7a4 100644 --- a/scripts/lib/devtool/upgrade.py +++ b/scripts/lib/devtool/upgrade.py | |||
@@ -68,7 +68,7 @@ def _remove_patch_dirs(recipefolder): | |||
68 | shutil.rmtree(os.path.join(root,d)) | 68 | shutil.rmtree(os.path.join(root,d)) |
69 | 69 | ||
70 | def _recipe_contains(rd, var): | 70 | def _recipe_contains(rd, var): |
71 | rf = rd.getVar('FILE', True) | 71 | rf = rd.getVar('FILE') |
72 | varfiles = oe.recipeutils.get_var_files(rf, [var], rd) | 72 | varfiles = oe.recipeutils.get_var_files(rf, [var], rd) |
73 | for var, fn in varfiles.items(): | 73 | for var, fn in varfiles.items(): |
74 | if fn and fn.startswith(os.path.dirname(rf) + os.sep): | 74 | if fn and fn.startswith(os.path.dirname(rf) + os.sep): |
@@ -132,7 +132,7 @@ def _write_append(rc, srctree, same_dir, no_same_dir, rev, copied, workspace, d) | |||
132 | if rev: | 132 | if rev: |
133 | f.write('# initial_rev: %s\n' % rev) | 133 | f.write('# initial_rev: %s\n' % rev) |
134 | if copied: | 134 | if copied: |
135 | f.write('# original_path: %s\n' % os.path.dirname(d.getVar('FILE', True))) | 135 | f.write('# original_path: %s\n' % os.path.dirname(d.getVar('FILE'))) |
136 | f.write('# original_files: %s\n' % ' '.join(copied)) | 136 | f.write('# original_files: %s\n' % ' '.join(copied)) |
137 | return af | 137 | return af |
138 | 138 | ||
@@ -154,7 +154,7 @@ def _upgrade_error(e, rf, srctree): | |||
154 | raise DevtoolError(e) | 154 | raise DevtoolError(e) |
155 | 155 | ||
156 | def _get_uri(rd): | 156 | def _get_uri(rd): |
157 | srcuris = rd.getVar('SRC_URI', True).split() | 157 | srcuris = rd.getVar('SRC_URI').split() |
158 | if not len(srcuris): | 158 | if not len(srcuris): |
159 | raise DevtoolError('SRC_URI not found on recipe') | 159 | raise DevtoolError('SRC_URI not found on recipe') |
160 | # Get first non-local entry in SRC_URI - usually by convention it's | 160 | # Get first non-local entry in SRC_URI - usually by convention it's |
@@ -185,7 +185,7 @@ def _extract_new_source(newpv, srctree, no_patch, srcrev, branch, keep_temp, tin | |||
185 | 185 | ||
186 | crd = rd.createCopy() | 186 | crd = rd.createCopy() |
187 | 187 | ||
188 | pv = crd.getVar('PV', True) | 188 | pv = crd.getVar('PV') |
189 | crd.setVar('PV', newpv) | 189 | crd.setVar('PV', newpv) |
190 | 190 | ||
191 | tmpsrctree = None | 191 | tmpsrctree = None |
@@ -270,15 +270,15 @@ def _extract_new_source(newpv, srctree, no_patch, srcrev, branch, keep_temp, tin | |||
270 | def _create_new_recipe(newpv, md5, sha256, srcrev, srcbranch, workspace, tinfoil, rd): | 270 | def _create_new_recipe(newpv, md5, sha256, srcrev, srcbranch, workspace, tinfoil, rd): |
271 | """Creates the new recipe under workspace""" | 271 | """Creates the new recipe under workspace""" |
272 | 272 | ||
273 | bpn = rd.getVar('BPN', True) | 273 | bpn = rd.getVar('BPN') |
274 | path = os.path.join(workspace, 'recipes', bpn) | 274 | path = os.path.join(workspace, 'recipes', bpn) |
275 | bb.utils.mkdirhier(path) | 275 | bb.utils.mkdirhier(path) |
276 | copied, _ = oe.recipeutils.copy_recipe_files(rd, path) | 276 | copied, _ = oe.recipeutils.copy_recipe_files(rd, path) |
277 | 277 | ||
278 | oldpv = rd.getVar('PV', True) | 278 | oldpv = rd.getVar('PV') |
279 | if not newpv: | 279 | if not newpv: |
280 | newpv = oldpv | 280 | newpv = oldpv |
281 | origpath = rd.getVar('FILE', True) | 281 | origpath = rd.getVar('FILE') |
282 | fullpath = _rename_recipe_files(origpath, bpn, oldpv, newpv, path) | 282 | fullpath = _rename_recipe_files(origpath, bpn, oldpv, newpv, path) |
283 | logger.debug('Upgraded %s => %s' % (origpath, fullpath)) | 283 | logger.debug('Upgraded %s => %s' % (origpath, fullpath)) |
284 | 284 | ||
@@ -341,7 +341,7 @@ def upgrade(args, config, basepath, workspace): | |||
341 | if not rd: | 341 | if not rd: |
342 | return 1 | 342 | return 1 |
343 | 343 | ||
344 | pn = rd.getVar('PN', True) | 344 | pn = rd.getVar('PN') |
345 | if pn != args.recipename: | 345 | if pn != args.recipename: |
346 | logger.info('Mapping %s to %s' % (args.recipename, pn)) | 346 | logger.info('Mapping %s to %s' % (args.recipename, pn)) |
347 | if pn in workspace: | 347 | if pn in workspace: |
@@ -353,12 +353,12 @@ def upgrade(args, config, basepath, workspace): | |||
353 | srctree = standard.get_default_srctree(config, pn) | 353 | srctree = standard.get_default_srctree(config, pn) |
354 | 354 | ||
355 | standard._check_compatible_recipe(pn, rd) | 355 | standard._check_compatible_recipe(pn, rd) |
356 | old_srcrev = rd.getVar('SRCREV', True) | 356 | old_srcrev = rd.getVar('SRCREV') |
357 | if old_srcrev == 'INVALID': | 357 | if old_srcrev == 'INVALID': |
358 | old_srcrev = None | 358 | old_srcrev = None |
359 | if old_srcrev and not args.srcrev: | 359 | if old_srcrev and not args.srcrev: |
360 | raise DevtoolError("Recipe specifies a SRCREV value; you must specify a new one when upgrading") | 360 | raise DevtoolError("Recipe specifies a SRCREV value; you must specify a new one when upgrading") |
361 | if rd.getVar('PV', True) == args.version and old_srcrev == args.srcrev: | 361 | if rd.getVar('PV') == args.version and old_srcrev == args.srcrev: |
362 | raise DevtoolError("Current and upgrade versions are the same version") | 362 | raise DevtoolError("Current and upgrade versions are the same version") |
363 | 363 | ||
364 | rf = None | 364 | rf = None |
diff --git a/scripts/lib/devtool/utilcmds.py b/scripts/lib/devtool/utilcmds.py index b761a80f8f..0437e6417c 100644 --- a/scripts/lib/devtool/utilcmds.py +++ b/scripts/lib/devtool/utilcmds.py | |||
@@ -39,7 +39,7 @@ def edit_recipe(args, config, basepath, workspace): | |||
39 | rd = parse_recipe(config, tinfoil, args.recipename, True) | 39 | rd = parse_recipe(config, tinfoil, args.recipename, True) |
40 | if not rd: | 40 | if not rd: |
41 | return 1 | 41 | return 1 |
42 | recipefile = rd.getVar('FILE', True) | 42 | recipefile = rd.getVar('FILE') |
43 | finally: | 43 | finally: |
44 | tinfoil.shutdown() | 44 | tinfoil.shutdown() |
45 | else: | 45 | else: |
@@ -62,20 +62,20 @@ def configure_help(args, config, basepath, workspace): | |||
62 | rd = parse_recipe(config, tinfoil, args.recipename, appends=True, filter_workspace=False) | 62 | rd = parse_recipe(config, tinfoil, args.recipename, appends=True, filter_workspace=False) |
63 | if not rd: | 63 | if not rd: |
64 | return 1 | 64 | return 1 |
65 | b = rd.getVar('B', True) | 65 | b = rd.getVar('B') |
66 | s = rd.getVar('S', True) | 66 | s = rd.getVar('S') |
67 | configurescript = os.path.join(s, 'configure') | 67 | configurescript = os.path.join(s, 'configure') |
68 | confdisabled = 'noexec' in rd.getVarFlags('do_configure') or 'do_configure' not in (rd.getVar('__BBTASKS', False) or []) | 68 | confdisabled = 'noexec' in rd.getVarFlags('do_configure') or 'do_configure' not in (rd.getVar('__BBTASKS', False) or []) |
69 | configureopts = oe.utils.squashspaces(rd.getVar('CONFIGUREOPTS', True) or '') | 69 | configureopts = oe.utils.squashspaces(rd.getVar('CONFIGUREOPTS') or '') |
70 | extra_oeconf = oe.utils.squashspaces(rd.getVar('EXTRA_OECONF', True) or '') | 70 | extra_oeconf = oe.utils.squashspaces(rd.getVar('EXTRA_OECONF') or '') |
71 | extra_oecmake = oe.utils.squashspaces(rd.getVar('EXTRA_OECMAKE', True) or '') | 71 | extra_oecmake = oe.utils.squashspaces(rd.getVar('EXTRA_OECMAKE') or '') |
72 | do_configure = rd.getVar('do_configure', True) or '' | 72 | do_configure = rd.getVar('do_configure') or '' |
73 | do_configure_noexpand = rd.getVar('do_configure', False) or '' | 73 | do_configure_noexpand = rd.getVar('do_configure', False) or '' |
74 | packageconfig = rd.getVarFlags('PACKAGECONFIG') or [] | 74 | packageconfig = rd.getVarFlags('PACKAGECONFIG') or [] |
75 | autotools = bb.data.inherits_class('autotools', rd) and ('oe_runconf' in do_configure or 'autotools_do_configure' in do_configure) | 75 | autotools = bb.data.inherits_class('autotools', rd) and ('oe_runconf' in do_configure or 'autotools_do_configure' in do_configure) |
76 | cmake = bb.data.inherits_class('cmake', rd) and ('cmake_do_configure' in do_configure) | 76 | cmake = bb.data.inherits_class('cmake', rd) and ('cmake_do_configure' in do_configure) |
77 | cmake_do_configure = rd.getVar('cmake_do_configure', True) | 77 | cmake_do_configure = rd.getVar('cmake_do_configure') |
78 | pn = rd.getVar('PN', True) | 78 | pn = rd.getVar('PN') |
79 | finally: | 79 | finally: |
80 | tinfoil.shutdown() | 80 | tinfoil.shutdown() |
81 | 81 | ||
diff --git a/scripts/lib/recipetool/append.py b/scripts/lib/recipetool/append.py index 3e85a0cb0f..def4f9027c 100644 --- a/scripts/lib/recipetool/append.py +++ b/scripts/lib/recipetool/append.py | |||
@@ -48,7 +48,7 @@ def find_target_file(targetpath, d, pkglist=None): | |||
48 | """Find the recipe installing the specified target path, optionally limited to a select list of packages""" | 48 | """Find the recipe installing the specified target path, optionally limited to a select list of packages""" |
49 | import json | 49 | import json |
50 | 50 | ||
51 | pkgdata_dir = d.getVar('PKGDATA_DIR', True) | 51 | pkgdata_dir = d.getVar('PKGDATA_DIR') |
52 | 52 | ||
53 | # The mix between /etc and ${sysconfdir} here may look odd, but it is just | 53 | # The mix between /etc and ${sysconfdir} here may look odd, but it is just |
54 | # being consistent with usage elsewhere | 54 | # being consistent with usage elsewhere |
@@ -110,8 +110,8 @@ def determine_file_source(targetpath, rd): | |||
110 | import oe.recipeutils | 110 | import oe.recipeutils |
111 | 111 | ||
112 | # See if it's in do_install for the recipe | 112 | # See if it's in do_install for the recipe |
113 | workdir = rd.getVar('WORKDIR', True) | 113 | workdir = rd.getVar('WORKDIR') |
114 | src_uri = rd.getVar('SRC_URI', True) | 114 | src_uri = rd.getVar('SRC_URI') |
115 | srcfile = '' | 115 | srcfile = '' |
116 | modpatches = [] | 116 | modpatches = [] |
117 | elements = check_do_install(rd, targetpath) | 117 | elements = check_do_install(rd, targetpath) |
@@ -190,7 +190,7 @@ def get_source_path(cmdelements): | |||
190 | 190 | ||
191 | def get_func_deps(func, d): | 191 | def get_func_deps(func, d): |
192 | """Find the function dependencies of a shell function""" | 192 | """Find the function dependencies of a shell function""" |
193 | deps = bb.codeparser.ShellParser(func, logger).parse_shell(d.getVar(func, True)) | 193 | deps = bb.codeparser.ShellParser(func, logger).parse_shell(d.getVar(func)) |
194 | deps |= set((d.getVarFlag(func, "vardeps", True) or "").split()) | 194 | deps |= set((d.getVarFlag(func, "vardeps", True) or "").split()) |
195 | funcdeps = [] | 195 | funcdeps = [] |
196 | for dep in deps: | 196 | for dep in deps: |
@@ -200,12 +200,12 @@ def get_func_deps(func, d): | |||
200 | 200 | ||
201 | def check_do_install(rd, targetpath): | 201 | def check_do_install(rd, targetpath): |
202 | """Look at do_install for a command that installs/copies the specified target path""" | 202 | """Look at do_install for a command that installs/copies the specified target path""" |
203 | instpath = os.path.abspath(os.path.join(rd.getVar('D', True), targetpath.lstrip('/'))) | 203 | instpath = os.path.abspath(os.path.join(rd.getVar('D'), targetpath.lstrip('/'))) |
204 | do_install = rd.getVar('do_install', True) | 204 | do_install = rd.getVar('do_install') |
205 | # Handle where do_install calls other functions (somewhat crudely, but good enough for this purpose) | 205 | # Handle where do_install calls other functions (somewhat crudely, but good enough for this purpose) |
206 | deps = get_func_deps('do_install', rd) | 206 | deps = get_func_deps('do_install', rd) |
207 | for dep in deps: | 207 | for dep in deps: |
208 | do_install = do_install.replace(dep, rd.getVar(dep, True)) | 208 | do_install = do_install.replace(dep, rd.getVar(dep)) |
209 | 209 | ||
210 | # Look backwards through do_install as we want to catch where a later line (perhaps | 210 | # Look backwards through do_install as we want to catch where a later line (perhaps |
211 | # from a bbappend) is writing over the top | 211 | # from a bbappend) is writing over the top |
@@ -322,12 +322,12 @@ def appendfile(args): | |||
322 | def appendsrc(args, files, rd, extralines=None): | 322 | def appendsrc(args, files, rd, extralines=None): |
323 | import oe.recipeutils | 323 | import oe.recipeutils |
324 | 324 | ||
325 | srcdir = rd.getVar('S', True) | 325 | srcdir = rd.getVar('S') |
326 | workdir = rd.getVar('WORKDIR', True) | 326 | workdir = rd.getVar('WORKDIR') |
327 | 327 | ||
328 | import bb.fetch | 328 | import bb.fetch |
329 | simplified = {} | 329 | simplified = {} |
330 | src_uri = rd.getVar('SRC_URI', True).split() | 330 | src_uri = rd.getVar('SRC_URI').split() |
331 | for uri in src_uri: | 331 | for uri in src_uri: |
332 | if uri.endswith(';'): | 332 | if uri.endswith(';'): |
333 | uri = uri[:-1] | 333 | uri = uri[:-1] |
@@ -340,7 +340,7 @@ def appendsrc(args, files, rd, extralines=None): | |||
340 | for newfile, srcfile in files.items(): | 340 | for newfile, srcfile in files.items(): |
341 | src_destdir = os.path.dirname(srcfile) | 341 | src_destdir = os.path.dirname(srcfile) |
342 | if not args.use_workdir: | 342 | if not args.use_workdir: |
343 | if rd.getVar('S', True) == rd.getVar('STAGING_KERNEL_DIR', True): | 343 | if rd.getVar('S') == rd.getVar('STAGING_KERNEL_DIR'): |
344 | srcdir = os.path.join(workdir, 'git') | 344 | srcdir = os.path.join(workdir, 'git') |
345 | if not bb.data.inherits_class('kernel-yocto', rd): | 345 | if not bb.data.inherits_class('kernel-yocto', rd): |
346 | logger.warn('S == STAGING_KERNEL_DIR and non-kernel-yocto, unable to determine path to srcdir, defaulting to ${WORKDIR}/git') | 346 | logger.warn('S == STAGING_KERNEL_DIR and non-kernel-yocto, unable to determine path to srcdir, defaulting to ${WORKDIR}/git') |
diff --git a/scripts/lib/recipetool/create.py b/scripts/lib/recipetool/create.py index ab265318e5..0801223582 100644 --- a/scripts/lib/recipetool/create.py +++ b/scripts/lib/recipetool/create.py | |||
@@ -68,8 +68,8 @@ class RecipeHandler(object): | |||
68 | return | 68 | return |
69 | # First build up library->package mapping | 69 | # First build up library->package mapping |
70 | shlib_providers = oe.package.read_shlib_providers(d) | 70 | shlib_providers = oe.package.read_shlib_providers(d) |
71 | libdir = d.getVar('libdir', True) | 71 | libdir = d.getVar('libdir') |
72 | base_libdir = d.getVar('base_libdir', True) | 72 | base_libdir = d.getVar('base_libdir') |
73 | libpaths = list(set([base_libdir, libdir])) | 73 | libpaths = list(set([base_libdir, libdir])) |
74 | libname_re = re.compile('^lib(.+)\.so.*$') | 74 | libname_re = re.compile('^lib(.+)\.so.*$') |
75 | pkglibmap = {} | 75 | pkglibmap = {} |
@@ -85,7 +85,7 @@ class RecipeHandler(object): | |||
85 | logger.debug('unable to extract library name from %s' % lib) | 85 | logger.debug('unable to extract library name from %s' % lib) |
86 | 86 | ||
87 | # Now turn it into a library->recipe mapping | 87 | # Now turn it into a library->recipe mapping |
88 | pkgdata_dir = d.getVar('PKGDATA_DIR', True) | 88 | pkgdata_dir = d.getVar('PKGDATA_DIR') |
89 | for libname, pkg in pkglibmap.items(): | 89 | for libname, pkg in pkglibmap.items(): |
90 | try: | 90 | try: |
91 | with open(os.path.join(pkgdata_dir, 'runtime', pkg)) as f: | 91 | with open(os.path.join(pkgdata_dir, 'runtime', pkg)) as f: |
@@ -109,9 +109,9 @@ class RecipeHandler(object): | |||
109 | '''Build up development file->recipe mapping''' | 109 | '''Build up development file->recipe mapping''' |
110 | if RecipeHandler.recipeheadermap: | 110 | if RecipeHandler.recipeheadermap: |
111 | return | 111 | return |
112 | pkgdata_dir = d.getVar('PKGDATA_DIR', True) | 112 | pkgdata_dir = d.getVar('PKGDATA_DIR') |
113 | includedir = d.getVar('includedir', True) | 113 | includedir = d.getVar('includedir') |
114 | cmakedir = os.path.join(d.getVar('libdir', True), 'cmake') | 114 | cmakedir = os.path.join(d.getVar('libdir'), 'cmake') |
115 | for pkg in glob.glob(os.path.join(pkgdata_dir, 'runtime', '*-dev')): | 115 | for pkg in glob.glob(os.path.join(pkgdata_dir, 'runtime', '*-dev')): |
116 | with open(os.path.join(pkgdata_dir, 'runtime', pkg)) as f: | 116 | with open(os.path.join(pkgdata_dir, 'runtime', pkg)) as f: |
117 | pn = None | 117 | pn = None |
@@ -140,9 +140,9 @@ class RecipeHandler(object): | |||
140 | '''Build up native binary->recipe mapping''' | 140 | '''Build up native binary->recipe mapping''' |
141 | if RecipeHandler.recipebinmap: | 141 | if RecipeHandler.recipebinmap: |
142 | return | 142 | return |
143 | sstate_manifests = d.getVar('SSTATE_MANIFESTS', True) | 143 | sstate_manifests = d.getVar('SSTATE_MANIFESTS') |
144 | staging_bindir_native = d.getVar('STAGING_BINDIR_NATIVE', True) | 144 | staging_bindir_native = d.getVar('STAGING_BINDIR_NATIVE') |
145 | build_arch = d.getVar('BUILD_ARCH', True) | 145 | build_arch = d.getVar('BUILD_ARCH') |
146 | fileprefix = 'manifest-%s-' % build_arch | 146 | fileprefix = 'manifest-%s-' % build_arch |
147 | for fn in glob.glob(os.path.join(sstate_manifests, '%s*-native.populate_sysroot' % fileprefix)): | 147 | for fn in glob.glob(os.path.join(sstate_manifests, '%s*-native.populate_sysroot' % fileprefix)): |
148 | with open(fn, 'r') as f: | 148 | with open(fn, 'r') as f: |
@@ -837,7 +837,7 @@ def get_license_md5sums(d, static_only=False): | |||
837 | md5sums = {} | 837 | md5sums = {} |
838 | if not static_only: | 838 | if not static_only: |
839 | # Gather md5sums of license files in common license dir | 839 | # Gather md5sums of license files in common license dir |
840 | commonlicdir = d.getVar('COMMON_LICENSE_DIR', True) | 840 | commonlicdir = d.getVar('COMMON_LICENSE_DIR') |
841 | for fn in os.listdir(commonlicdir): | 841 | for fn in os.listdir(commonlicdir): |
842 | md5value = bb.utils.md5_file(os.path.join(commonlicdir, fn)) | 842 | md5value = bb.utils.md5_file(os.path.join(commonlicdir, fn)) |
843 | md5sums[md5value] = fn | 843 | md5sums[md5value] = fn |
@@ -1007,7 +1007,7 @@ def split_pkg_licenses(licvalues, packages, outlines, fallback_licenses=None, pn | |||
1007 | return outlicenses | 1007 | return outlicenses |
1008 | 1008 | ||
1009 | def read_pkgconfig_provides(d): | 1009 | def read_pkgconfig_provides(d): |
1010 | pkgdatadir = d.getVar('PKGDATA_DIR', True) | 1010 | pkgdatadir = d.getVar('PKGDATA_DIR') |
1011 | pkgmap = {} | 1011 | pkgmap = {} |
1012 | for fn in glob.glob(os.path.join(pkgdatadir, 'shlibs2', '*.pclist')): | 1012 | for fn in glob.glob(os.path.join(pkgdatadir, 'shlibs2', '*.pclist')): |
1013 | with open(fn, 'r') as f: | 1013 | with open(fn, 'r') as f: |
@@ -1117,7 +1117,7 @@ def convert_rpm_xml(xmlfile): | |||
1117 | 1117 | ||
1118 | 1118 | ||
1119 | def check_npm(d, debugonly=False): | 1119 | def check_npm(d, debugonly=False): |
1120 | if not os.path.exists(os.path.join(d.getVar('STAGING_BINDIR_NATIVE', True), 'npm')): | 1120 | if not os.path.exists(os.path.join(d.getVar('STAGING_BINDIR_NATIVE'), 'npm')): |
1121 | log_error_cond('npm required to process specified source, but npm is not available - you need to build nodejs-native first', debugonly) | 1121 | log_error_cond('npm required to process specified source, but npm is not available - you need to build nodejs-native first', debugonly) |
1122 | sys.exit(14) | 1122 | sys.exit(14) |
1123 | 1123 | ||
diff --git a/scripts/lib/recipetool/create_buildsys_python.py b/scripts/lib/recipetool/create_buildsys_python.py index 82a2be1224..ec5449bee9 100644 --- a/scripts/lib/recipetool/create_buildsys_python.py +++ b/scripts/lib/recipetool/create_buildsys_python.py | |||
@@ -532,11 +532,11 @@ class PythonRecipeHandler(RecipeHandler): | |||
532 | 532 | ||
533 | def parse_pkgdata_for_python_packages(self): | 533 | def parse_pkgdata_for_python_packages(self): |
534 | suffixes = [t[0] for t in imp.get_suffixes()] | 534 | suffixes = [t[0] for t in imp.get_suffixes()] |
535 | pkgdata_dir = tinfoil.config_data.getVar('PKGDATA_DIR', True) | 535 | pkgdata_dir = tinfoil.config_data.getVar('PKGDATA_DIR') |
536 | 536 | ||
537 | ldata = tinfoil.config_data.createCopy() | 537 | ldata = tinfoil.config_data.createCopy() |
538 | bb.parse.handle('classes/python-dir.bbclass', ldata, True) | 538 | bb.parse.handle('classes/python-dir.bbclass', ldata, True) |
539 | python_sitedir = ldata.getVar('PYTHON_SITEPACKAGES_DIR', True) | 539 | python_sitedir = ldata.getVar('PYTHON_SITEPACKAGES_DIR') |
540 | 540 | ||
541 | dynload_dir = os.path.join(os.path.dirname(python_sitedir), 'lib-dynload') | 541 | dynload_dir = os.path.join(os.path.dirname(python_sitedir), 'lib-dynload') |
542 | python_dirs = [python_sitedir + os.sep, | 542 | python_dirs = [python_sitedir + os.sep, |
diff --git a/scripts/lib/recipetool/create_kernel.py b/scripts/lib/recipetool/create_kernel.py index 7dac59fd03..ca4996c7ac 100644 --- a/scripts/lib/recipetool/create_kernel.py +++ b/scripts/lib/recipetool/create_kernel.py | |||
@@ -41,7 +41,7 @@ class KernelRecipeHandler(RecipeHandler): | |||
41 | handled.append('buildsystem') | 41 | handled.append('buildsystem') |
42 | del lines_after[:] | 42 | del lines_after[:] |
43 | del classes[:] | 43 | del classes[:] |
44 | template = os.path.join(tinfoil.config_data.getVar('COREBASE', True), 'meta-skeleton', 'recipes-kernel', 'linux', 'linux-yocto-custom.bb') | 44 | template = os.path.join(tinfoil.config_data.getVar('COREBASE'), 'meta-skeleton', 'recipes-kernel', 'linux', 'linux-yocto-custom.bb') |
45 | def handle_var(varname, origvalue, op, newlines): | 45 | def handle_var(varname, origvalue, op, newlines): |
46 | if varname in ['SRCREV', 'SRCREV_machine']: | 46 | if varname in ['SRCREV', 'SRCREV_machine']: |
47 | while newlines[-1].startswith('#'): | 47 | while newlines[-1].startswith('#'): |
@@ -85,7 +85,7 @@ class KernelRecipeHandler(RecipeHandler): | |||
85 | elif varname == 'COMPATIBLE_MACHINE': | 85 | elif varname == 'COMPATIBLE_MACHINE': |
86 | while newlines[-1].startswith('#'): | 86 | while newlines[-1].startswith('#'): |
87 | del newlines[-1] | 87 | del newlines[-1] |
88 | machine = tinfoil.config_data.getVar('MACHINE', True) | 88 | machine = tinfoil.config_data.getVar('MACHINE') |
89 | return machine, op, 0, True | 89 | return machine, op, 0, True |
90 | return origvalue, op, 0, True | 90 | return origvalue, op, 0, True |
91 | with open(template, 'r') as f: | 91 | with open(template, 'r') as f: |
diff --git a/scripts/lib/recipetool/create_npm.py b/scripts/lib/recipetool/create_npm.py index 7bb844cb0c..888aa2b00a 100644 --- a/scripts/lib/recipetool/create_npm.py +++ b/scripts/lib/recipetool/create_npm.py | |||
@@ -49,7 +49,7 @@ class NpmRecipeHandler(RecipeHandler): | |||
49 | 49 | ||
50 | def _shrinkwrap(self, srctree, localfilesdir, extravalues, lines_before): | 50 | def _shrinkwrap(self, srctree, localfilesdir, extravalues, lines_before): |
51 | try: | 51 | try: |
52 | runenv = dict(os.environ, PATH=tinfoil.config_data.getVar('PATH', True)) | 52 | runenv = dict(os.environ, PATH=tinfoil.config_data.getVar('PATH')) |
53 | bb.process.run('npm shrinkwrap', cwd=srctree, stderr=subprocess.STDOUT, env=runenv, shell=True) | 53 | bb.process.run('npm shrinkwrap', cwd=srctree, stderr=subprocess.STDOUT, env=runenv, shell=True) |
54 | except bb.process.ExecutionError as e: | 54 | except bb.process.ExecutionError as e: |
55 | logger.warn('npm shrinkwrap failed:\n%s' % e.stdout) | 55 | logger.warn('npm shrinkwrap failed:\n%s' % e.stdout) |
@@ -62,7 +62,7 @@ class NpmRecipeHandler(RecipeHandler): | |||
62 | lines_before.append('NPM_SHRINKWRAP := "${THISDIR}/${PN}/npm-shrinkwrap.json"') | 62 | lines_before.append('NPM_SHRINKWRAP := "${THISDIR}/${PN}/npm-shrinkwrap.json"') |
63 | 63 | ||
64 | def _lockdown(self, srctree, localfilesdir, extravalues, lines_before): | 64 | def _lockdown(self, srctree, localfilesdir, extravalues, lines_before): |
65 | runenv = dict(os.environ, PATH=tinfoil.config_data.getVar('PATH', True)) | 65 | runenv = dict(os.environ, PATH=tinfoil.config_data.getVar('PATH')) |
66 | if not NpmRecipeHandler.lockdownpath: | 66 | if not NpmRecipeHandler.lockdownpath: |
67 | NpmRecipeHandler.lockdownpath = tempfile.mkdtemp('recipetool-npm-lockdown') | 67 | NpmRecipeHandler.lockdownpath = tempfile.mkdtemp('recipetool-npm-lockdown') |
68 | bb.process.run('npm install lockdown --prefix %s' % NpmRecipeHandler.lockdownpath, | 68 | bb.process.run('npm install lockdown --prefix %s' % NpmRecipeHandler.lockdownpath, |
@@ -257,7 +257,7 @@ class NpmRecipeHandler(RecipeHandler): | |||
257 | if version != '*' and not '/' in version: | 257 | if version != '*' and not '/' in version: |
258 | pkgfullname += "@'%s'" % version | 258 | pkgfullname += "@'%s'" % version |
259 | logger.debug(2, "Calling getdeps on %s" % pkg) | 259 | logger.debug(2, "Calling getdeps on %s" % pkg) |
260 | runenv = dict(os.environ, PATH=d.getVar('PATH', True)) | 260 | runenv = dict(os.environ, PATH=d.getVar('PATH')) |
261 | fetchcmd = "npm view %s --json" % pkgfullname | 261 | fetchcmd = "npm view %s --json" % pkgfullname |
262 | output, _ = bb.process.run(fetchcmd, stderr=subprocess.STDOUT, env=runenv, shell=True) | 262 | output, _ = bb.process.run(fetchcmd, stderr=subprocess.STDOUT, env=runenv, shell=True) |
263 | data = self._parse_view(output) | 263 | data = self._parse_view(output) |
diff --git a/scripts/lib/recipetool/newappend.py b/scripts/lib/recipetool/newappend.py index 376084035f..0b63759d8c 100644 --- a/scripts/lib/recipetool/newappend.py +++ b/scripts/lib/recipetool/newappend.py | |||
@@ -60,7 +60,7 @@ def newappend(args): | |||
60 | if not path_ok: | 60 | if not path_ok: |
61 | logger.warn('Unable to determine correct subdirectory path for bbappend file - check that what %s adds to BBFILES also matches .bbappend files. Using %s for now, but until you fix this the bbappend will not be applied.', os.path.join(args.destlayer, 'conf', 'layer.conf'), os.path.dirname(append_path)) | 61 | logger.warn('Unable to determine correct subdirectory path for bbappend file - check that what %s adds to BBFILES also matches .bbappend files. Using %s for now, but until you fix this the bbappend will not be applied.', os.path.join(args.destlayer, 'conf', 'layer.conf'), os.path.dirname(append_path)) |
62 | 62 | ||
63 | layerdirs = [os.path.abspath(layerdir) for layerdir in rd.getVar('BBLAYERS', True).split()] | 63 | layerdirs = [os.path.abspath(layerdir) for layerdir in rd.getVar('BBLAYERS').split()] |
64 | if not os.path.abspath(args.destlayer) in layerdirs: | 64 | if not os.path.abspath(args.destlayer) in layerdirs: |
65 | logger.warn('Specified layer is not currently enabled in bblayers.conf, you will need to add it before this bbappend will be active') | 65 | logger.warn('Specified layer is not currently enabled in bblayers.conf, you will need to add it before this bbappend will be active') |
66 | 66 | ||
diff --git a/scripts/lib/scriptutils.py b/scripts/lib/scriptutils.py index 27d82b62b8..42337830eb 100644 --- a/scripts/lib/scriptutils.py +++ b/scripts/lib/scriptutils.py | |||
@@ -93,7 +93,7 @@ def fetch_uri(d, uri, destdir, srcrev=None): | |||
93 | fetcher.download() | 93 | fetcher.download() |
94 | for u in fetcher.ud: | 94 | for u in fetcher.ud: |
95 | ud = fetcher.ud[u] | 95 | ud = fetcher.ud[u] |
96 | if ud.localpath.rstrip(os.sep) == localdata.getVar('DL_DIR', True).rstrip(os.sep): | 96 | if ud.localpath.rstrip(os.sep) == localdata.getVar('DL_DIR').rstrip(os.sep): |
97 | raise Exception('Local path is download directory - please check that the URI "%s" is correct' % uri) | 97 | raise Exception('Local path is download directory - please check that the URI "%s" is correct' % uri) |
98 | fetcher.unpack(destdir) | 98 | fetcher.unpack(destdir) |
99 | for u in fetcher.ud: | 99 | for u in fetcher.ud: |
diff --git a/scripts/oe-pkgdata-util b/scripts/oe-pkgdata-util index fe41f41fa3..6255662a4b 100755 --- a/scripts/oe-pkgdata-util +++ b/scripts/oe-pkgdata-util | |||
@@ -570,7 +570,7 @@ def main(): | |||
570 | logger.debug('Found bitbake path: %s' % bitbakepath) | 570 | logger.debug('Found bitbake path: %s' % bitbakepath) |
571 | tinfoil = tinfoil_init() | 571 | tinfoil = tinfoil_init() |
572 | try: | 572 | try: |
573 | args.pkgdata_dir = tinfoil.config_data.getVar('PKGDATA_DIR', True) | 573 | args.pkgdata_dir = tinfoil.config_data.getVar('PKGDATA_DIR') |
574 | finally: | 574 | finally: |
575 | tinfoil.shutdown() | 575 | tinfoil.shutdown() |
576 | logger.debug('Value of PKGDATA_DIR is "%s"' % args.pkgdata_dir) | 576 | logger.debug('Value of PKGDATA_DIR is "%s"' % args.pkgdata_dir) |
diff --git a/scripts/recipetool b/scripts/recipetool index 54cbc857dc..3765ec7cf9 100755 --- a/scripts/recipetool +++ b/scripts/recipetool | |||
@@ -79,7 +79,7 @@ def main(): | |||
79 | 79 | ||
80 | tinfoil = tinfoil_init(False) | 80 | tinfoil = tinfoil_init(False) |
81 | try: | 81 | try: |
82 | for path in (tinfoil.config_data.getVar('BBPATH', True).split(':') | 82 | for path in (tinfoil.config_data.getVar('BBPATH').split(':') |
83 | + [scripts_path]): | 83 | + [scripts_path]): |
84 | pluginpath = os.path.join(path, 'lib', 'recipetool') | 84 | pluginpath = os.path.join(path, 'lib', 'recipetool') |
85 | scriptutils.load_plugins(logger, plugins, pluginpath) | 85 | scriptutils.load_plugins(logger, plugins, pluginpath) |