diff options
Diffstat (limited to 'meta/classes')
-rw-r--r-- | meta/classes/autotools.bbclass | 128 | ||||
-rw-r--r-- | meta/classes/base.bbclass | 2 | ||||
-rw-r--r-- | meta/classes/ccache.bbclass | 3 | ||||
-rw-r--r-- | meta/classes/cross-canadian.bbclass | 5 | ||||
-rw-r--r-- | meta/classes/cross.bbclass | 2 | ||||
-rw-r--r-- | meta/classes/crosssdk.bbclass | 1 | ||||
-rw-r--r-- | meta/classes/gobject-introspection.bbclass | 2 | ||||
-rw-r--r-- | meta/classes/image.bbclass | 6 | ||||
-rw-r--r-- | meta/classes/insane.bbclass | 3 | ||||
-rw-r--r-- | meta/classes/module.bbclass | 2 | ||||
-rw-r--r-- | meta/classes/native.bbclass | 2 | ||||
-rw-r--r-- | meta/classes/nativesdk.bbclass | 4 | ||||
-rw-r--r-- | meta/classes/package.bbclass | 3 | ||||
-rw-r--r-- | meta/classes/pixbufcache.bbclass | 21 | ||||
-rw-r--r-- | meta/classes/populate_sdk_ext.bbclass | 2 | ||||
-rw-r--r-- | meta/classes/siteconfig.bbclass | 2 | ||||
-rw-r--r-- | meta/classes/sstate.bbclass | 126 | ||||
-rw-r--r-- | meta/classes/staging.bbclass | 349 | ||||
-rw-r--r-- | meta/classes/toolchain-scripts.bbclass | 1 | ||||
-rw-r--r-- | meta/classes/uninative.bbclass | 3 | ||||
-rw-r--r-- | meta/classes/useradd.bbclass | 34 |
21 files changed, 493 insertions, 208 deletions
diff --git a/meta/classes/autotools.bbclass b/meta/classes/autotools.bbclass index c43531b050..3741d09706 100644 --- a/meta/classes/autotools.bbclass +++ b/meta/classes/autotools.bbclass | |||
@@ -131,129 +131,15 @@ EXTRACONFFUNCS ??= "" | |||
131 | 131 | ||
132 | EXTRA_OECONF_append = " ${PACKAGECONFIG_CONFARGS}" | 132 | EXTRA_OECONF_append = " ${PACKAGECONFIG_CONFARGS}" |
133 | 133 | ||
134 | do_configure[prefuncs] += "autotools_preconfigure autotools_copy_aclocals ${EXTRACONFFUNCS}" | 134 | do_configure[prefuncs] += "autotools_preconfigure autotools_aclocals ${EXTRACONFFUNCS}" |
135 | do_configure[postfuncs] += "autotools_postconfigure" | 135 | do_configure[postfuncs] += "autotools_postconfigure" |
136 | 136 | ||
137 | ACLOCALDIR = "${WORKDIR}/aclocal-copy" | 137 | ACLOCALDIR = "${STAGING_DATADIR}/aclocal" |
138 | 138 | ACLOCALEXTRAPATH = "" | |
139 | python autotools_copy_aclocals () { | 139 | ACLOCALEXTRAPATH_class-target = " -I ${STAGING_DATADIR_NATIVE}/aclocal/" |
140 | import copy | 140 | ACLOCALEXTRAPATH_class-nativesdk = " -I ${STAGING_DATADIR_NATIVE}/aclocal/" |
141 | |||
142 | s = d.getVar("AUTOTOOLS_SCRIPT_PATH") | ||
143 | if not os.path.exists(s + "/configure.in") and not os.path.exists(s + "/configure.ac"): | ||
144 | if not d.getVar("AUTOTOOLS_COPYACLOCAL", False): | ||
145 | return | ||
146 | |||
147 | taskdepdata = d.getVar("BB_TASKDEPDATA", False) | ||
148 | #bb.warn(str(taskdepdata)) | ||
149 | pn = d.getVar("PN") | ||
150 | aclocaldir = d.getVar("ACLOCALDIR") | ||
151 | oe.path.remove(aclocaldir) | ||
152 | bb.utils.mkdirhier(aclocaldir) | ||
153 | start = None | ||
154 | configuredeps = [] | ||
155 | # Detect bitbake -b usage | ||
156 | # Everything but quilt-native would have dependencies | ||
157 | nodeps = (pn != "quilt-native") | ||
158 | |||
159 | for dep in taskdepdata: | ||
160 | data = taskdepdata[dep] | ||
161 | if data[1] == "do_configure" and data[0] == pn: | ||
162 | start = dep | ||
163 | if not nodeps and start: | ||
164 | break | ||
165 | if nodeps and data[0] != pn: | ||
166 | nodeps = False | ||
167 | if start is None: | ||
168 | bb.fatal("Couldn't find ourself in BB_TASKDEPDATA?") | ||
169 | |||
170 | # We need to figure out which m4 files we need to expose to this do_configure task. | ||
171 | # This needs to match what would get restored from sstate, which is controlled | ||
172 | # ultimately by calls from bitbake to setscene_depvalid(). | ||
173 | # That function expects a setscene dependency tree. We build a dependency tree | ||
174 | # condensed to do_populate_sysroot -> do_populate_sysroot dependencies, similar to | ||
175 | # that used by setscene tasks. We can then call into setscene_depvalid() and decide | ||
176 | # which dependencies we can "see" and should expose the m4 files for. | ||
177 | setscenedeps = copy.deepcopy(taskdepdata) | ||
178 | |||
179 | start = set([start]) | ||
180 | |||
181 | # Create collapsed do_populate_sysroot -> do_populate_sysroot tree | ||
182 | for dep in taskdepdata: | ||
183 | data = setscenedeps[dep] | ||
184 | if data[1] != "do_populate_sysroot": | ||
185 | for dep2 in setscenedeps: | ||
186 | data2 = setscenedeps[dep2] | ||
187 | if dep in data2[3]: | ||
188 | data2[3].update(setscenedeps[dep][3]) | ||
189 | data2[3].remove(dep) | ||
190 | if dep in start: | ||
191 | start.update(setscenedeps[dep][3]) | ||
192 | start.remove(dep) | ||
193 | del setscenedeps[dep] | ||
194 | |||
195 | # Remove circular references | ||
196 | for dep in setscenedeps: | ||
197 | if dep in setscenedeps[dep][3]: | ||
198 | setscenedeps[dep][3].remove(dep) | ||
199 | |||
200 | # Direct dependencies should be present and can be depended upon | ||
201 | for dep in start: | ||
202 | configuredeps.append(setscenedeps[dep][0]) | ||
203 | |||
204 | # Call into setscene_depvalid for each sub-dependency and only copy m4 files | ||
205 | # for ones that would be restored from sstate. | ||
206 | done = list(start) | ||
207 | next = list(start) | ||
208 | while next: | ||
209 | new = [] | ||
210 | for dep in next: | ||
211 | data = setscenedeps[dep] | ||
212 | for datadep in data[3]: | ||
213 | if datadep in done: | ||
214 | continue | ||
215 | taskdeps = {} | ||
216 | taskdeps[dep] = setscenedeps[dep][:2] | ||
217 | taskdeps[datadep] = setscenedeps[datadep][:2] | ||
218 | retval = setscene_depvalid(datadep, taskdeps, [], d) | ||
219 | if retval: | ||
220 | bb.note("Skipping setscene dependency %s for m4 macro copying" % datadep) | ||
221 | continue | ||
222 | done.append(datadep) | ||
223 | new.append(datadep) | ||
224 | configuredeps.append(setscenedeps[datadep][0]) | ||
225 | next = new | ||
226 | |||
227 | cp = [] | ||
228 | if nodeps: | ||
229 | bb.warn("autotools: Unable to find task dependencies, -b being used? Pulling in all m4 files") | ||
230 | for l in [d.expand("${STAGING_DATADIR_NATIVE}/aclocal/"), d.expand("${STAGING_DATADIR}/aclocal/")]: | ||
231 | cp.extend(os.path.join(l, f) for f in os.listdir(l)) | ||
232 | |||
233 | for c in configuredeps: | ||
234 | if c.endswith("-native"): | ||
235 | manifest = d.expand("${SSTATE_MANIFESTS}/manifest-${BUILD_ARCH}-%s.populate_sysroot" % c) | ||
236 | elif c.startswith("nativesdk-"): | ||
237 | manifest = d.expand("${SSTATE_MANIFESTS}/manifest-${SDK_ARCH}_${SDK_OS}-%s.populate_sysroot" % c) | ||
238 | elif "-cross-" in c or "-crosssdk" in c: | ||
239 | continue | ||
240 | else: | ||
241 | manifest = d.expand("${SSTATE_MANIFESTS}/manifest-${MACHINE}-%s.populate_sysroot" % c) | ||
242 | try: | ||
243 | f = open(manifest, "r") | ||
244 | for l in f: | ||
245 | if "/aclocal/" in l and l.strip().endswith(".m4"): | ||
246 | cp.append(l.strip()) | ||
247 | elif "config_site.d/" in l: | ||
248 | cp.append(l.strip()) | ||
249 | except: | ||
250 | bb.warn("%s not found" % manifest) | ||
251 | |||
252 | for c in cp: | ||
253 | t = os.path.join(aclocaldir, os.path.basename(c)) | ||
254 | if not os.path.exists(t): | ||
255 | os.symlink(c, t) | ||
256 | 141 | ||
142 | python autotools_aclocals () { | ||
257 | # Refresh variable with cache files | 143 | # Refresh variable with cache files |
258 | d.setVar("CONFIG_SITE", siteinfo_get_files(d, aclocalcache=True)) | 144 | d.setVar("CONFIG_SITE", siteinfo_get_files(d, aclocalcache=True)) |
259 | } | 145 | } |
@@ -279,6 +165,7 @@ autotools_do_configure() { | |||
279 | if [ -e ${AUTOTOOLS_SCRIPT_PATH}/configure.in -o -e ${AUTOTOOLS_SCRIPT_PATH}/configure.ac ]; then | 165 | if [ -e ${AUTOTOOLS_SCRIPT_PATH}/configure.in -o -e ${AUTOTOOLS_SCRIPT_PATH}/configure.ac ]; then |
280 | olddir=`pwd` | 166 | olddir=`pwd` |
281 | cd ${AUTOTOOLS_SCRIPT_PATH} | 167 | cd ${AUTOTOOLS_SCRIPT_PATH} |
168 | mkdir -p ${ACLOCALDIR} | ||
282 | ACLOCAL="aclocal --system-acdir=${ACLOCALDIR}/" | 169 | ACLOCAL="aclocal --system-acdir=${ACLOCALDIR}/" |
283 | if [ x"${acpaths}" = xdefault ]; then | 170 | if [ x"${acpaths}" = xdefault ]; then |
284 | acpaths= | 171 | acpaths= |
@@ -289,6 +176,7 @@ autotools_do_configure() { | |||
289 | else | 176 | else |
290 | acpaths="${acpaths}" | 177 | acpaths="${acpaths}" |
291 | fi | 178 | fi |
179 | acpaths="$acpaths ${ACLOCALEXTRAPATH}" | ||
292 | AUTOV=`automake --version | sed -e '1{s/.* //;s/\.[0-9]\+$//};q'` | 180 | AUTOV=`automake --version | sed -e '1{s/.* //;s/\.[0-9]\+$//};q'` |
293 | automake --version | 181 | automake --version |
294 | echo "AUTOV is $AUTOV" | 182 | echo "AUTOV is $AUTOV" |
diff --git a/meta/classes/base.bbclass b/meta/classes/base.bbclass index 2765ebf61b..965518c733 100644 --- a/meta/classes/base.bbclass +++ b/meta/classes/base.bbclass | |||
@@ -284,7 +284,7 @@ CLEANBROKEN = "0" | |||
284 | 284 | ||
285 | addtask configure after do_patch | 285 | addtask configure after do_patch |
286 | do_configure[dirs] = "${B}" | 286 | do_configure[dirs] = "${B}" |
287 | do_configure[deptask] = "do_populate_sysroot" | 287 | do_prepare_recipe_sysroot[deptask] = "do_populate_sysroot" |
288 | base_do_configure() { | 288 | base_do_configure() { |
289 | if [ -n "${CONFIGURESTAMPFILE}" -a -e "${CONFIGURESTAMPFILE}" ]; then | 289 | if [ -n "${CONFIGURESTAMPFILE}" -a -e "${CONFIGURESTAMPFILE}" ]; then |
290 | if [ "`cat ${CONFIGURESTAMPFILE}`" != "${BB_TASKHASH}" ]; then | 290 | if [ "`cat ${CONFIGURESTAMPFILE}`" != "${BB_TASKHASH}" ]; then |
diff --git a/meta/classes/ccache.bbclass b/meta/classes/ccache.bbclass index 93fcacaf1a..76c8828619 100644 --- a/meta/classes/ccache.bbclass +++ b/meta/classes/ccache.bbclass | |||
@@ -2,5 +2,8 @@ CCACHE = "${@bb.utils.which(d.getVar('PATH'), 'ccache') and 'ccache '}" | |||
2 | export CCACHE_DIR ?= "${TMPDIR}/ccache/${MULTIMACH_HOST_SYS}/${PN}" | 2 | export CCACHE_DIR ?= "${TMPDIR}/ccache/${MULTIMACH_HOST_SYS}/${PN}" |
3 | CCACHE_DISABLE[unexport] = "1" | 3 | CCACHE_DISABLE[unexport] = "1" |
4 | 4 | ||
5 | DEPENDS_append_class-target = " ccache-native" | ||
6 | DEPENDS[vardepvalueexclude] = " ccache-native" | ||
7 | |||
5 | do_configure[dirs] =+ "${CCACHE_DIR}" | 8 | do_configure[dirs] =+ "${CCACHE_DIR}" |
6 | do_kernel_configme[dirs] =+ "${CCACHE_DIR}" | 9 | do_kernel_configme[dirs] =+ "${CCACHE_DIR}" |
diff --git a/meta/classes/cross-canadian.bbclass b/meta/classes/cross-canadian.bbclass index 64db1134f5..3120f46a14 100644 --- a/meta/classes/cross-canadian.bbclass +++ b/meta/classes/cross-canadian.bbclass | |||
@@ -100,9 +100,9 @@ MULTIMACH_TARGET_SYS = "${PACKAGE_ARCH}${HOST_VENDOR}-${HOST_OS}" | |||
100 | 100 | ||
101 | INHIBIT_DEFAULT_DEPS = "1" | 101 | INHIBIT_DEFAULT_DEPS = "1" |
102 | 102 | ||
103 | STAGING_DIR_HOST = "${STAGING_DIR}/${HOST_ARCH}-${SDKPKGSUFFIX}${HOST_VENDOR}-${HOST_OS}" | 103 | STAGING_DIR_HOST = "${RECIPE_SYSROOT}" |
104 | 104 | ||
105 | TOOLCHAIN_OPTIONS = " --sysroot=${STAGING_DIR}/${HOST_ARCH}-${SDKPKGSUFFIX}${HOST_VENDOR}-${HOST_OS}" | 105 | TOOLCHAIN_OPTIONS = " --sysroot=${RECIPE_SYSROOT}" |
106 | 106 | ||
107 | PATH_append = ":${TMPDIR}/sysroots/${HOST_ARCH}/${bindir_cross}" | 107 | PATH_append = ":${TMPDIR}/sysroots/${HOST_ARCH}/${bindir_cross}" |
108 | PKGHIST_DIR = "${TMPDIR}/pkghistory/${HOST_ARCH}-${SDKPKGSUFFIX}${HOST_VENDOR}-${HOST_OS}/" | 108 | PKGHIST_DIR = "${TMPDIR}/pkghistory/${HOST_ARCH}-${SDKPKGSUFFIX}${HOST_VENDOR}-${HOST_OS}/" |
@@ -169,6 +169,7 @@ USE_NLS = "${SDKUSE_NLS}" | |||
169 | # and not any particular tune that is enabled. | 169 | # and not any particular tune that is enabled. |
170 | TARGET_ARCH[vardepsexclude] = "TUNE_ARCH" | 170 | TARGET_ARCH[vardepsexclude] = "TUNE_ARCH" |
171 | 171 | ||
172 | PKGDATA_DIR = "${TMPDIR}/pkgdata/${SDK_SYS}" | ||
172 | # If MLPREFIX is set by multilib code, shlibs | 173 | # If MLPREFIX is set by multilib code, shlibs |
173 | # points to the wrong place so force it | 174 | # points to the wrong place so force it |
174 | SHLIBSDIRS = "${PKGDATA_DIR}/nativesdk-shlibs2" | 175 | SHLIBSDIRS = "${PKGDATA_DIR}/nativesdk-shlibs2" |
diff --git a/meta/classes/cross.bbclass b/meta/classes/cross.bbclass index 01b09337ae..d125f10d01 100644 --- a/meta/classes/cross.bbclass +++ b/meta/classes/cross.bbclass | |||
@@ -19,7 +19,7 @@ HOST_AS_ARCH = "${BUILD_AS_ARCH}" | |||
19 | 19 | ||
20 | export lt_cv_sys_lib_dlsearch_path_spec = "${libdir} ${base_libdir} /lib /lib64 /usr/lib /usr/lib64" | 20 | export lt_cv_sys_lib_dlsearch_path_spec = "${libdir} ${base_libdir} /lib /lib64 /usr/lib /usr/lib64" |
21 | 21 | ||
22 | STAGING_DIR_HOST = "${STAGING_DIR}/${HOST_ARCH}${HOST_VENDOR}-${HOST_OS}" | 22 | STAGING_DIR_HOST = "${RECIPE_SYSROOT_NATIVE}" |
23 | 23 | ||
24 | PACKAGE_ARCH = "${BUILD_ARCH}" | 24 | PACKAGE_ARCH = "${BUILD_ARCH}" |
25 | 25 | ||
diff --git a/meta/classes/crosssdk.bbclass b/meta/classes/crosssdk.bbclass index eaf2beb94d..c5c29ea3da 100644 --- a/meta/classes/crosssdk.bbclass +++ b/meta/classes/crosssdk.bbclass | |||
@@ -8,7 +8,6 @@ python () { | |||
8 | d.setVar('TUNE_PKGARCH', d.getVar('SDK_ARCH')) | 8 | d.setVar('TUNE_PKGARCH', d.getVar('SDK_ARCH')) |
9 | } | 9 | } |
10 | 10 | ||
11 | STAGING_DIR_TARGET = "${STAGING_DIR}/${SDK_ARCH}-${SDKPKGSUFFIX}${SDK_VENDOR}-${SDK_OS}" | ||
12 | STAGING_BINDIR_TOOLCHAIN = "${STAGING_DIR_NATIVE}${bindir_native}/${TARGET_ARCH}${TARGET_VENDOR}-${TARGET_OS}" | 11 | STAGING_BINDIR_TOOLCHAIN = "${STAGING_DIR_NATIVE}${bindir_native}/${TARGET_ARCH}${TARGET_VENDOR}-${TARGET_OS}" |
13 | 12 | ||
14 | TARGET_ARCH = "${SDK_ARCH}" | 13 | TARGET_ARCH = "${SDK_ARCH}" |
diff --git a/meta/classes/gobject-introspection.bbclass b/meta/classes/gobject-introspection.bbclass index 37389cbc8b..b6160b88b6 100644 --- a/meta/classes/gobject-introspection.bbclass +++ b/meta/classes/gobject-introspection.bbclass | |||
@@ -17,7 +17,7 @@ UNKNOWN_CONFIGURE_WHITELIST_append = " --enable-introspection --disable-introspe | |||
17 | 17 | ||
18 | # Generating introspection data depends on a combination of native and target | 18 | # Generating introspection data depends on a combination of native and target |
19 | # introspection tools, and qemu to run the target tools. | 19 | # introspection tools, and qemu to run the target tools. |
20 | DEPENDS_append_class-target = " gobject-introspection gobject-introspection-native qemu-native" | 20 | DEPENDS_append_class-target = " gobject-introspection gobject-introspection-native qemu-native prelink-native" |
21 | 21 | ||
22 | # Even though introspection is disabled on -native, gobject-introspection package is still | 22 | # Even though introspection is disabled on -native, gobject-introspection package is still |
23 | # needed for m4 macros. | 23 | # needed for m4 macros. |
diff --git a/meta/classes/image.bbclass b/meta/classes/image.bbclass index 6b0864913b..5fc7385d79 100644 --- a/meta/classes/image.bbclass +++ b/meta/classes/image.bbclass | |||
@@ -13,7 +13,7 @@ inherit gzipnative | |||
13 | 13 | ||
14 | LICENSE = "MIT" | 14 | LICENSE = "MIT" |
15 | PACKAGES = "" | 15 | PACKAGES = "" |
16 | DEPENDS += "${MLPREFIX}qemuwrapper-cross ${MLPREFIX}depmodwrapper-cross" | 16 | DEPENDS += "${MLPREFIX}qemuwrapper-cross depmodwrapper-cross" |
17 | RDEPENDS += "${PACKAGE_INSTALL} ${LINGUAS_INSTALL}" | 17 | RDEPENDS += "${PACKAGE_INSTALL} ${LINGUAS_INSTALL}" |
18 | RRECOMMENDS += "${PACKAGE_INSTALL_ATTEMPTONLY}" | 18 | RRECOMMENDS += "${PACKAGE_INSTALL_ATTEMPTONLY}" |
19 | 19 | ||
@@ -164,7 +164,7 @@ python () { | |||
164 | deps = "" | 164 | deps = "" |
165 | for dep in (d.getVar('EXTRA_IMAGEDEPENDS') or "").split(): | 165 | for dep in (d.getVar('EXTRA_IMAGEDEPENDS') or "").split(): |
166 | deps += " %s:do_populate_sysroot" % dep | 166 | deps += " %s:do_populate_sysroot" % dep |
167 | d.appendVarFlag('do_build', 'depends', deps) | 167 | d.appendVarFlag('do_image_complete', 'depends', deps) |
168 | 168 | ||
169 | #process IMAGE_FEATURES, we must do this before runtime_mapping_rename | 169 | #process IMAGE_FEATURES, we must do this before runtime_mapping_rename |
170 | #Check for replaces image features | 170 | #Check for replaces image features |
@@ -274,7 +274,7 @@ fakeroot python do_rootfs () { | |||
274 | do_rootfs[dirs] = "${TOPDIR}" | 274 | do_rootfs[dirs] = "${TOPDIR}" |
275 | do_rootfs[cleandirs] += "${S} ${IMGDEPLOYDIR}" | 275 | do_rootfs[cleandirs] += "${S} ${IMGDEPLOYDIR}" |
276 | do_rootfs[umask] = "022" | 276 | do_rootfs[umask] = "022" |
277 | addtask rootfs before do_build | 277 | addtask rootfs before do_build after do_prepare_recipe_sysroot |
278 | 278 | ||
279 | fakeroot python do_image () { | 279 | fakeroot python do_image () { |
280 | from oe.utils import execute_pre_post_process | 280 | from oe.utils import execute_pre_post_process |
diff --git a/meta/classes/insane.bbclass b/meta/classes/insane.bbclass index 7332e45453..3cf86c12c3 100644 --- a/meta/classes/insane.bbclass +++ b/meta/classes/insane.bbclass | |||
@@ -827,6 +827,7 @@ def package_qa_check_staged(path,d): | |||
827 | sane = True | 827 | sane = True |
828 | tmpdir = d.getVar('TMPDIR') | 828 | tmpdir = d.getVar('TMPDIR') |
829 | workdir = os.path.join(tmpdir, "work") | 829 | workdir = os.path.join(tmpdir, "work") |
830 | recipesysroot = d.getVar("RECIPE_SYSROOT") | ||
830 | 831 | ||
831 | if bb.data.inherits_class("native", d) or bb.data.inherits_class("cross", d): | 832 | if bb.data.inherits_class("native", d) or bb.data.inherits_class("cross", d): |
832 | pkgconfigcheck = workdir | 833 | pkgconfigcheck = workdir |
@@ -842,12 +843,14 @@ def package_qa_check_staged(path,d): | |||
842 | if file.endswith(".la"): | 843 | if file.endswith(".la"): |
843 | with open(path) as f: | 844 | with open(path) as f: |
844 | file_content = f.read() | 845 | file_content = f.read() |
846 | file_content = file_content.replace(recipesysroot, "") | ||
845 | if workdir in file_content: | 847 | if workdir in file_content: |
846 | error_msg = "%s failed sanity test (workdir) in path %s" % (file,root) | 848 | error_msg = "%s failed sanity test (workdir) in path %s" % (file,root) |
847 | sane = package_qa_handle_error("la", error_msg, d) | 849 | sane = package_qa_handle_error("la", error_msg, d) |
848 | elif file.endswith(".pc"): | 850 | elif file.endswith(".pc"): |
849 | with open(path) as f: | 851 | with open(path) as f: |
850 | file_content = f.read() | 852 | file_content = f.read() |
853 | file_content = file_content.replace(recipesysroot, "") | ||
851 | if pkgconfigcheck in file_content: | 854 | if pkgconfigcheck in file_content: |
852 | error_msg = "%s failed sanity test (tmpdir) in path %s" % (file,root) | 855 | error_msg = "%s failed sanity test (tmpdir) in path %s" % (file,root) |
853 | sane = package_qa_handle_error("pkgconfig", error_msg, d) | 856 | sane = package_qa_handle_error("pkgconfig", error_msg, d) |
diff --git a/meta/classes/module.bbclass b/meta/classes/module.bbclass index 58470474cc..a5888736f2 100644 --- a/meta/classes/module.bbclass +++ b/meta/classes/module.bbclass | |||
@@ -1,6 +1,6 @@ | |||
1 | inherit module-base kernel-module-split | 1 | inherit module-base kernel-module-split |
2 | 2 | ||
3 | addtask make_scripts after do_patch before do_compile | 3 | addtask make_scripts after do_prepare_recipe_sysroot before do_compile |
4 | do_make_scripts[lockfiles] = "${TMPDIR}/kernel-scripts.lock" | 4 | do_make_scripts[lockfiles] = "${TMPDIR}/kernel-scripts.lock" |
5 | do_make_scripts[depends] += "virtual/kernel:do_shared_workdir" | 5 | do_make_scripts[depends] += "virtual/kernel:do_shared_workdir" |
6 | 6 | ||
diff --git a/meta/classes/native.bbclass b/meta/classes/native.bbclass index 5592e1d1c1..4d4aaadc02 100644 --- a/meta/classes/native.bbclass +++ b/meta/classes/native.bbclass | |||
@@ -95,7 +95,7 @@ libdir .= "${NATIVE_PACKAGE_PATH_SUFFIX}" | |||
95 | libexecdir .= "${NATIVE_PACKAGE_PATH_SUFFIX}" | 95 | libexecdir .= "${NATIVE_PACKAGE_PATH_SUFFIX}" |
96 | 96 | ||
97 | do_populate_sysroot[sstate-inputdirs] = "${SYSROOT_DESTDIR}/${STAGING_DIR_NATIVE}/" | 97 | do_populate_sysroot[sstate-inputdirs] = "${SYSROOT_DESTDIR}/${STAGING_DIR_NATIVE}/" |
98 | do_populate_sysroot[sstate-outputdirs] = "${STAGING_DIR_NATIVE}/" | 98 | do_populate_sysroot[sstate-outputdirs] = "${STAGING_DIR}-components/${PACKAGE_ARCH}/${PN}" |
99 | 99 | ||
100 | # Since we actually install these into situ there is no staging prefix | 100 | # Since we actually install these into situ there is no staging prefix |
101 | STAGING_DIR_HOST = "" | 101 | STAGING_DIR_HOST = "" |
diff --git a/meta/classes/nativesdk.bbclass b/meta/classes/nativesdk.bbclass index 2ac8fd82ed..a97f5a7114 100644 --- a/meta/classes/nativesdk.bbclass +++ b/meta/classes/nativesdk.bbclass | |||
@@ -25,9 +25,7 @@ PACKAGE_ARCHS = "${SDK_PACKAGE_ARCHS}" | |||
25 | DEPENDS_append = " chrpath-replacement-native" | 25 | DEPENDS_append = " chrpath-replacement-native" |
26 | EXTRANATIVEPATH += "chrpath-native" | 26 | EXTRANATIVEPATH += "chrpath-native" |
27 | 27 | ||
28 | STAGING_DIR_HOST = "${STAGING_DIR}/${MULTIMACH_HOST_SYS}" | 28 | PKGDATA_DIR = "${TMPDIR}/pkgdata/${SDK_SYS}" |
29 | STAGING_DIR_TARGET = "${STAGING_DIR}/${MULTIMACH_TARGET_SYS}" | ||
30 | PKGDATA_DIR = "${STAGING_DIR_HOST}/pkgdata" | ||
31 | 29 | ||
32 | HOST_ARCH = "${SDK_ARCH}" | 30 | HOST_ARCH = "${SDK_ARCH}" |
33 | HOST_VENDOR = "${SDK_VENDOR}" | 31 | HOST_VENDOR = "${SDK_VENDOR}" |
diff --git a/meta/classes/package.bbclass b/meta/classes/package.bbclass index 0068a50258..ac2d71cfa4 100644 --- a/meta/classes/package.bbclass +++ b/meta/classes/package.bbclass | |||
@@ -414,7 +414,8 @@ def copydebugsources(debugsrcdir, d): | |||
414 | bb.utils.mkdirhier(basepath) | 414 | bb.utils.mkdirhier(basepath) |
415 | cpath.updatecache(basepath) | 415 | cpath.updatecache(basepath) |
416 | 416 | ||
417 | processdebugsrc = "LC_ALL=C ; sort -z -u '%s' | egrep -v -z '(<internal>|<built-in>)$' | " | 417 | # Ignore files from the recipe sysroots (target and native) |
418 | processdebugsrc = "LC_ALL=C ; sort -z -u '%s' | egrep -v -z '((<internal>|<built-in>)$|/.*recipe-sysroot.*/)' | " | ||
418 | # We need to ignore files that are not actually ours | 419 | # We need to ignore files that are not actually ours |
419 | # we do this by only paying attention to items from this package | 420 | # we do this by only paying attention to items from this package |
420 | processdebugsrc += "fgrep -zw '%s' | " | 421 | processdebugsrc += "fgrep -zw '%s' | " |
diff --git a/meta/classes/pixbufcache.bbclass b/meta/classes/pixbufcache.bbclass index 633a8757ed..aa9815cc6f 100644 --- a/meta/classes/pixbufcache.bbclass +++ b/meta/classes/pixbufcache.bbclass | |||
@@ -48,24 +48,17 @@ python populate_packages_append() { | |||
48 | } | 48 | } |
49 | 49 | ||
50 | gdkpixbuf_complete() { | 50 | gdkpixbuf_complete() { |
51 | GDK_PIXBUF_FATAL_LOADER=1 ${STAGING_LIBDIR_NATIVE}/gdk-pixbuf-2.0/gdk-pixbuf-query-loaders --update-cache || exit 1 | 51 | GDK_PIXBUF_FATAL_LOADER=1 ${STAGING_LIBDIR_NATIVE}/gdk-pixbuf-2.0/gdk-pixbuf-query-loaders --update-cache || exit 1 |
52 | } | 52 | } |
53 | 53 | ||
54 | # | ||
55 | # Add an sstate postinst hook to update the cache for native packages. | ||
56 | # An error exit during populate_sysroot_setscene allows bitbake to | ||
57 | # try to recover by re-building the package. | ||
58 | # | ||
59 | DEPENDS_append_class-native = " gdk-pixbuf-native" | 54 | DEPENDS_append_class-native = " gdk-pixbuf-native" |
60 | SSTATEPOSTINSTFUNCS_append_class-native = " pixbufcache_sstate_postinst" | 55 | SYSROOT_PREPROCESS_FUNCS_append_class-native = " pixbufcache_sstate_postinst" |
61 | 56 | ||
62 | # See base.bbclass for the other half of this | 57 | # See base.bbclass for the other half of this |
63 | pixbufcache_sstate_postinst() { | 58 | pixbufcache_sstate_postinst() { |
64 | if [ "${BB_CURRENTTASK}" = "populate_sysroot" ]; then | 59 | mkdir -p ${SYSROOT_DESTDIR}${bindir} |
65 | ${gdkpixbuf_complete} | 60 | dest=${SYSROOT_DESTDIR}${bindir}/postinst-useradd-${PN} |
66 | elif [ "${BB_CURRENTTASK}" = "populate_sysroot_setscene" ]; then | 61 | echo '#!/bin/sh' > $dest |
67 | if [ -x ${STAGING_LIBDIR_NATIVE}/gdk-pixbuf-2.0/gdk-pixbuf-query-loaders ]; then | 62 | echo "${gdkpixbuf_complete}" >> $dest |
68 | echo "${gdkpixbuf_complete}" >> ${STAGING_DIR}/sstatecompletions | 63 | chmod 0755 $dest |
69 | fi | ||
70 | fi | ||
71 | } | 64 | } |
diff --git a/meta/classes/populate_sdk_ext.bbclass b/meta/classes/populate_sdk_ext.bbclass index 7c9666c162..39e0c83fe4 100644 --- a/meta/classes/populate_sdk_ext.bbclass +++ b/meta/classes/populate_sdk_ext.bbclass | |||
@@ -404,7 +404,7 @@ python copy_buildsystem () { | |||
404 | 404 | ||
405 | if sdk_include_toolchain: | 405 | if sdk_include_toolchain: |
406 | lockedsigs_base = d.getVar('WORKDIR') + '/locked-sigs-base2.inc' | 406 | lockedsigs_base = d.getVar('WORKDIR') + '/locked-sigs-base2.inc' |
407 | lockedsigs_toolchain = d.getVar('STAGING_DIR_HOST') + '/locked-sigs/locked-sigs-extsdk-toolchain.inc' | 407 | lockedsigs_toolchain = d.expand("${STAGING_DIR}/${TUNE_PKGARCH}/meta-extsdk-toolchain/locked-sigs/locked-sigs-extsdk-toolchain.inc") |
408 | shutil.move(lockedsigs_pruned, lockedsigs_base) | 408 | shutil.move(lockedsigs_pruned, lockedsigs_base) |
409 | oe.copy_buildsystem.merge_lockedsigs([], | 409 | oe.copy_buildsystem.merge_lockedsigs([], |
410 | lockedsigs_base, | 410 | lockedsigs_base, |
diff --git a/meta/classes/siteconfig.bbclass b/meta/classes/siteconfig.bbclass index 9d51a0266a..bb491d2994 100644 --- a/meta/classes/siteconfig.bbclass +++ b/meta/classes/siteconfig.bbclass | |||
@@ -5,9 +5,9 @@ python siteconfig_do_siteconfig () { | |||
5 | if not os.path.isdir(os.path.join(d.getVar('FILE_DIRNAME'), 'site_config')): | 5 | if not os.path.isdir(os.path.join(d.getVar('FILE_DIRNAME'), 'site_config')): |
6 | bb.debug(1, "No site_config directory, skipping do_siteconfig") | 6 | bb.debug(1, "No site_config directory, skipping do_siteconfig") |
7 | return | 7 | return |
8 | sstate_install(shared_state, d) | ||
8 | bb.build.exec_func('do_siteconfig_gencache', d) | 9 | bb.build.exec_func('do_siteconfig_gencache', d) |
9 | sstate_clean(shared_state, d) | 10 | sstate_clean(shared_state, d) |
10 | sstate_install(shared_state, d) | ||
11 | } | 11 | } |
12 | 12 | ||
13 | EXTRASITECONFIG ?= "" | 13 | EXTRASITECONFIG ?= "" |
diff --git a/meta/classes/sstate.bbclass b/meta/classes/sstate.bbclass index f1faf4848e..a767a0203b 100644 --- a/meta/classes/sstate.bbclass +++ b/meta/classes/sstate.bbclass | |||
@@ -97,7 +97,7 @@ python () { | |||
97 | # These classes encode staging paths into their scripts data so can only be | 97 | # These classes encode staging paths into their scripts data so can only be |
98 | # reused if we manipulate the paths | 98 | # reused if we manipulate the paths |
99 | if bb.data.inherits_class('native', d) or bb.data.inherits_class('cross', d) or bb.data.inherits_class('sdk', d) or bb.data.inherits_class('crosssdk', d): | 99 | if bb.data.inherits_class('native', d) or bb.data.inherits_class('cross', d) or bb.data.inherits_class('sdk', d) or bb.data.inherits_class('crosssdk', d): |
100 | scan_cmd = "grep -Irl ${STAGING_DIR} ${SSTATE_BUILDDIR}" | 100 | scan_cmd = "grep -Irl -e ${RECIPE_SYSROOT} -e ${RECIPE_SYSROOT_NATIVE} ${SSTATE_BUILDDIR}" |
101 | d.setVar('SSTATE_SCAN_CMD', scan_cmd) | 101 | d.setVar('SSTATE_SCAN_CMD', scan_cmd) |
102 | 102 | ||
103 | unique_tasks = sorted(set((d.getVar('SSTATETASKS') or "").split())) | 103 | unique_tasks = sorted(set((d.getVar('SSTATETASKS') or "").split())) |
@@ -131,6 +131,7 @@ def sstate_state_fromvars(d, task = None): | |||
131 | lockfiles = (d.getVarFlag("do_" + task, 'sstate-lockfile') or "").split() | 131 | lockfiles = (d.getVarFlag("do_" + task, 'sstate-lockfile') or "").split() |
132 | lockfilesshared = (d.getVarFlag("do_" + task, 'sstate-lockfile-shared') or "").split() | 132 | lockfilesshared = (d.getVarFlag("do_" + task, 'sstate-lockfile-shared') or "").split() |
133 | interceptfuncs = (d.getVarFlag("do_" + task, 'sstate-interceptfuncs') or "").split() | 133 | interceptfuncs = (d.getVarFlag("do_" + task, 'sstate-interceptfuncs') or "").split() |
134 | fixmedir = d.getVarFlag("do_" + task, 'sstate-fixmedir') or "" | ||
134 | if not task or len(inputs) != len(outputs): | 135 | if not task or len(inputs) != len(outputs): |
135 | bb.fatal("sstate variables not setup correctly?!") | 136 | bb.fatal("sstate variables not setup correctly?!") |
136 | 137 | ||
@@ -146,6 +147,7 @@ def sstate_state_fromvars(d, task = None): | |||
146 | ss['lockfiles-shared'] = lockfilesshared | 147 | ss['lockfiles-shared'] = lockfilesshared |
147 | ss['plaindirs'] = plaindirs | 148 | ss['plaindirs'] = plaindirs |
148 | ss['interceptfuncs'] = interceptfuncs | 149 | ss['interceptfuncs'] = interceptfuncs |
150 | ss['fixmedir'] = fixmedir | ||
149 | return ss | 151 | return ss |
150 | 152 | ||
151 | def sstate_add(ss, source, dest, d): | 153 | def sstate_add(ss, source, dest, d): |
@@ -195,6 +197,9 @@ def sstate_install(ss, d): | |||
195 | srcdir = os.path.join(walkroot, dir) | 197 | srcdir = os.path.join(walkroot, dir) |
196 | dstdir = srcdir.replace(state[1], state[2]) | 198 | dstdir = srcdir.replace(state[1], state[2]) |
197 | #bb.debug(2, "Staging %s to %s" % (srcdir, dstdir)) | 199 | #bb.debug(2, "Staging %s to %s" % (srcdir, dstdir)) |
200 | if os.path.islink(srcdir): | ||
201 | sharedfiles.append(dstdir) | ||
202 | continue | ||
198 | if not dstdir.endswith("/"): | 203 | if not dstdir.endswith("/"): |
199 | dstdir = dstdir + "/" | 204 | dstdir = dstdir + "/" |
200 | shareddirs.append(dstdir) | 205 | shareddirs.append(dstdir) |
@@ -203,7 +208,7 @@ def sstate_install(ss, d): | |||
203 | whitelist = (d.getVar("SSTATE_DUPWHITELIST") or "").split() | 208 | whitelist = (d.getVar("SSTATE_DUPWHITELIST") or "").split() |
204 | match = [] | 209 | match = [] |
205 | for f in sharedfiles: | 210 | for f in sharedfiles: |
206 | if os.path.exists(f): | 211 | if os.path.exists(f) and not os.path.islink(f): |
207 | f = os.path.normpath(f) | 212 | f = os.path.normpath(f) |
208 | realmatch = True | 213 | realmatch = True |
209 | for w in whitelist: | 214 | for w in whitelist: |
@@ -244,6 +249,10 @@ def sstate_install(ss, d): | |||
244 | (d.getVar('PN'), "\n ".join(match))) | 249 | (d.getVar('PN'), "\n ".join(match))) |
245 | bb.fatal("If the above message is too much, the simpler version is you're advised to wipe out tmp and rebuild (reusing sstate is fine). That will likely fix things in most (but not all) cases.") | 250 | bb.fatal("If the above message is too much, the simpler version is you're advised to wipe out tmp and rebuild (reusing sstate is fine). That will likely fix things in most (but not all) cases.") |
246 | 251 | ||
252 | if ss['fixmedir'] and os.path.exists(ss['fixmedir'] + "/fixmepath.cmd"): | ||
253 | sharedfiles.append(ss['fixmedir'] + "/fixmepath.cmd") | ||
254 | sharedfiles.append(ss['fixmedir'] + "/fixmepath") | ||
255 | |||
247 | # Write out the manifest | 256 | # Write out the manifest |
248 | f = open(manifest, "w") | 257 | f = open(manifest, "w") |
249 | for file in sharedfiles: | 258 | for file in sharedfiles: |
@@ -288,17 +297,8 @@ sstate_install[vardepsexclude] += "SSTATE_DUPWHITELIST STATE_MANMACH SSTATE_MANF | |||
288 | sstate_install[vardeps] += "${SSTATEPOSTINSTFUNCS}" | 297 | sstate_install[vardeps] += "${SSTATEPOSTINSTFUNCS}" |
289 | 298 | ||
290 | def sstate_installpkg(ss, d): | 299 | def sstate_installpkg(ss, d): |
291 | import oe.path | ||
292 | import subprocess | ||
293 | from oe.gpg_sign import get_signer | 300 | from oe.gpg_sign import get_signer |
294 | 301 | ||
295 | def prepdir(dir): | ||
296 | # remove dir if it exists, ensure any parent directories do exist | ||
297 | if os.path.exists(dir): | ||
298 | oe.path.remove(dir) | ||
299 | bb.utils.mkdirhier(dir) | ||
300 | oe.path.remove(dir) | ||
301 | |||
302 | sstateinst = d.expand("${WORKDIR}/sstate-install-%s/" % ss['task']) | 302 | sstateinst = d.expand("${WORKDIR}/sstate-install-%s/" % ss['task']) |
303 | sstatefetch = d.getVar('SSTATE_PKGNAME') + '_' + ss['task'] + ".tgz" | 303 | sstatefetch = d.getVar('SSTATE_PKGNAME') + '_' + ss['task'] + ".tgz" |
304 | sstatepkg = d.getVar('SSTATE_PKG') + '_' + ss['task'] + ".tgz" | 304 | sstatepkg = d.getVar('SSTATE_PKG') + '_' + ss['task'] + ".tgz" |
@@ -320,11 +320,36 @@ def sstate_installpkg(ss, d): | |||
320 | if not signer.verify(sstatepkg + '.sig'): | 320 | if not signer.verify(sstatepkg + '.sig'): |
321 | bb.warn("Cannot verify signature on sstate package %s" % sstatepkg) | 321 | bb.warn("Cannot verify signature on sstate package %s" % sstatepkg) |
322 | 322 | ||
323 | for f in (d.getVar('SSTATEPREINSTFUNCS') or '').split() + ['sstate_unpack_package'] + (d.getVar('SSTATEPOSTUNPACKFUNCS') or '').split(): | 323 | sstateinst = d.getVar("SSTATE_INSTDIR") |
324 | d.setVar('SSTATE_FIXMEDIR', ss['fixmedir']) | ||
325 | |||
326 | for f in (d.getVar('SSTATEPREINSTFUNCS') or '').split() + ['sstate_unpack_package']: | ||
324 | # All hooks should run in the SSTATE_INSTDIR | 327 | # All hooks should run in the SSTATE_INSTDIR |
325 | bb.build.exec_func(f, d, (sstateinst,)) | 328 | bb.build.exec_func(f, d, (sstateinst,)) |
326 | 329 | ||
330 | return sstate_installpkgdir(ss, d) | ||
331 | |||
332 | def sstate_installpkgdir(ss, d): | ||
333 | import oe.path | ||
334 | import subprocess | ||
335 | |||
336 | sstateinst = d.getVar("SSTATE_INSTDIR") | ||
337 | d.setVar('SSTATE_FIXMEDIR', ss['fixmedir']) | ||
338 | |||
339 | for f in (d.getVar('SSTATEPOSTUNPACKFUNCS') or '').split(): | ||
340 | # All hooks should run in the SSTATE_INSTDIR | ||
341 | bb.build.exec_func(f, d, (sstateinst,)) | ||
342 | |||
343 | def prepdir(dir): | ||
344 | # remove dir if it exists, ensure any parent directories do exist | ||
345 | if os.path.exists(dir): | ||
346 | oe.path.remove(dir) | ||
347 | bb.utils.mkdirhier(dir) | ||
348 | oe.path.remove(dir) | ||
349 | |||
327 | for state in ss['dirs']: | 350 | for state in ss['dirs']: |
351 | if d.getVar('SSTATE_SKIP_CREATION') == '1': | ||
352 | continue | ||
328 | prepdir(state[1]) | 353 | prepdir(state[1]) |
329 | os.rename(sstateinst + state[0], state[1]) | 354 | os.rename(sstateinst + state[0], state[1]) |
330 | sstate_install(ss, d) | 355 | sstate_install(ss, d) |
@@ -347,18 +372,18 @@ python sstate_hardcode_path_unpack () { | |||
347 | import subprocess | 372 | import subprocess |
348 | 373 | ||
349 | sstateinst = d.getVar('SSTATE_INSTDIR') | 374 | sstateinst = d.getVar('SSTATE_INSTDIR') |
350 | fixmefn = sstateinst + "fixmepath" | 375 | sstatefixmedir = d.getVar('SSTATE_FIXMEDIR') |
376 | fixmefn = sstateinst + "fixmepath" | ||
351 | if os.path.isfile(fixmefn): | 377 | if os.path.isfile(fixmefn): |
352 | staging = d.getVar('STAGING_DIR') | 378 | staging_target = d.getVar('RECIPE_SYSROOT') |
353 | staging_target = d.getVar('STAGING_DIR_TARGET') | 379 | staging_host = d.getVar('RECIPE_SYSROOT_NATIVE') |
354 | staging_host = d.getVar('STAGING_DIR_HOST') | 380 | |
355 | 381 | if bb.data.inherits_class('native', d) or bb.data.inherits_class('cross-canadian', d): | |
356 | if bb.data.inherits_class('native', d) or bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('crosssdk', d) or bb.data.inherits_class('cross-canadian', d): | ||
357 | sstate_sed_cmd = "sed -i -e 's:FIXMESTAGINGDIR:%s:g'" % (staging) | ||
358 | elif bb.data.inherits_class('cross', d): | ||
359 | sstate_sed_cmd = "sed -i -e 's:FIXMESTAGINGDIRTARGET:%s:g; s:FIXMESTAGINGDIR:%s:g'" % (staging_target, staging) | ||
360 | else: | ||
361 | sstate_sed_cmd = "sed -i -e 's:FIXMESTAGINGDIRHOST:%s:g'" % (staging_host) | 382 | sstate_sed_cmd = "sed -i -e 's:FIXMESTAGINGDIRHOST:%s:g'" % (staging_host) |
383 | elif bb.data.inherits_class('cross', d) or bb.data.inherits_class('crosssdk', d): | ||
384 | sstate_sed_cmd = "sed -i -e 's:FIXMESTAGINGDIRTARGET:%s:g; s:FIXMESTAGINGDIRHOST:%s:g'" % (staging_target, staging_host) | ||
385 | else: | ||
386 | sstate_sed_cmd = "sed -i -e 's:FIXMESTAGINGDIRTARGET:%s:g'" % (staging_target) | ||
362 | 387 | ||
363 | extra_staging_fixmes = d.getVar('EXTRA_STAGING_FIXMES') or '' | 388 | extra_staging_fixmes = d.getVar('EXTRA_STAGING_FIXMES') or '' |
364 | for fixmevar in extra_staging_fixmes.split(): | 389 | for fixmevar in extra_staging_fixmes.split(): |
@@ -368,6 +393,18 @@ python sstate_hardcode_path_unpack () { | |||
368 | # Add sstateinst to each filename in fixmepath, use xargs to efficiently call sed | 393 | # Add sstateinst to each filename in fixmepath, use xargs to efficiently call sed |
369 | sstate_hardcode_cmd = "sed -e 's:^:%s:g' %s | xargs %s" % (sstateinst, fixmefn, sstate_sed_cmd) | 394 | sstate_hardcode_cmd = "sed -e 's:^:%s:g' %s | xargs %s" % (sstateinst, fixmefn, sstate_sed_cmd) |
370 | 395 | ||
396 | # Defer do_populate_sysroot relocation command | ||
397 | if sstatefixmedir: | ||
398 | bb.utils.mkdirhier(sstatefixmedir) | ||
399 | with open(sstatefixmedir + "/fixmepath.cmd", "w") as f: | ||
400 | sstate_hardcode_cmd = sstate_hardcode_cmd.replace(fixmefn, sstatefixmedir + "/fixmepath") | ||
401 | sstate_hardcode_cmd = sstate_hardcode_cmd.replace(sstateinst, "FIXMEFINALSSTATEINST") | ||
402 | sstate_hardcode_cmd = sstate_hardcode_cmd.replace(staging_host, "FIXMEFINALSSTATEHOST") | ||
403 | sstate_hardcode_cmd = sstate_hardcode_cmd.replace(staging_target, "FIXMEFINALSSTATETARGET") | ||
404 | f.write(sstate_hardcode_cmd) | ||
405 | bb.utils.copyfile(fixmefn, sstatefixmedir + "/fixmepath") | ||
406 | return | ||
407 | |||
371 | bb.note("Replacing fixme paths in sstate package: %s" % (sstate_hardcode_cmd)) | 408 | bb.note("Replacing fixme paths in sstate package: %s" % (sstate_hardcode_cmd)) |
372 | subprocess.call(sstate_hardcode_cmd, shell=True) | 409 | subprocess.call(sstate_hardcode_cmd, shell=True) |
373 | 410 | ||
@@ -499,20 +536,19 @@ python sstate_hardcode_path () { | |||
499 | # Note: the logic in this function needs to match the reverse logic | 536 | # Note: the logic in this function needs to match the reverse logic |
500 | # in sstate_installpkg(ss, d) | 537 | # in sstate_installpkg(ss, d) |
501 | 538 | ||
502 | staging = d.getVar('STAGING_DIR') | 539 | staging_target = d.getVar('RECIPE_SYSROOT') |
503 | staging_target = d.getVar('STAGING_DIR_TARGET') | 540 | staging_host = d.getVar('RECIPE_SYSROOT_NATIVE') |
504 | staging_host = d.getVar('STAGING_DIR_HOST') | ||
505 | sstate_builddir = d.getVar('SSTATE_BUILDDIR') | 541 | sstate_builddir = d.getVar('SSTATE_BUILDDIR') |
506 | 542 | ||
507 | if bb.data.inherits_class('native', d) or bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('crosssdk', d) or bb.data.inherits_class('cross-canadian', d): | 543 | if bb.data.inherits_class('native', d) or bb.data.inherits_class('cross-canadian', d): |
508 | sstate_grep_cmd = "grep -l -e '%s'" % (staging) | ||
509 | sstate_sed_cmd = "sed -i -e 's:%s:FIXMESTAGINGDIR:g'" % (staging) | ||
510 | elif bb.data.inherits_class('cross', d): | ||
511 | sstate_grep_cmd = "grep -l -e '%s' -e '%s'" % (staging_target, staging) | ||
512 | sstate_sed_cmd = "sed -i -e 's:%s:FIXMESTAGINGDIRTARGET:g; s:%s:FIXMESTAGINGDIR:g'" % (staging_target, staging) | ||
513 | else: | ||
514 | sstate_grep_cmd = "grep -l -e '%s'" % (staging_host) | 544 | sstate_grep_cmd = "grep -l -e '%s'" % (staging_host) |
515 | sstate_sed_cmd = "sed -i -e 's:%s:FIXMESTAGINGDIRHOST:g'" % (staging_host) | 545 | sstate_sed_cmd = "sed -i -e 's:%s:FIXMESTAGINGDIRHOST:g'" % (staging_host) |
546 | elif bb.data.inherits_class('cross', d) or bb.data.inherits_class('crosssdk', d): | ||
547 | sstate_grep_cmd = "grep -l -e '%s' -e '%s'" % (staging_target, staging_host) | ||
548 | sstate_sed_cmd = "sed -i -e 's:%s:FIXMESTAGINGDIRTARGET:g; s:%s:FIXMESTAGINGDIRHOST:g'" % (staging_target, staging_host) | ||
549 | else: | ||
550 | sstate_grep_cmd = "grep -l -e '%s'" % (staging_target) | ||
551 | sstate_sed_cmd = "sed -i -e 's:%s:FIXMESTAGINGDIRTARGET:g'" % (staging_target) | ||
516 | 552 | ||
517 | extra_staging_fixmes = d.getVar('EXTRA_STAGING_FIXMES') or '' | 553 | extra_staging_fixmes = d.getVar('EXTRA_STAGING_FIXMES') or '' |
518 | for fixmevar in extra_staging_fixmes.split(): | 554 | for fixmevar in extra_staging_fixmes.split(): |
@@ -537,14 +573,14 @@ python sstate_hardcode_path () { | |||
537 | sstate_hardcode_cmd = "%s | xargs %s | %s | xargs %s %s" % (sstate_scan_cmd, sstate_grep_cmd, sstate_filelist_cmd, xargs_no_empty_run_cmd, sstate_sed_cmd) | 573 | sstate_hardcode_cmd = "%s | xargs %s | %s | xargs %s %s" % (sstate_scan_cmd, sstate_grep_cmd, sstate_filelist_cmd, xargs_no_empty_run_cmd, sstate_sed_cmd) |
538 | 574 | ||
539 | bb.note("Removing hardcoded paths from sstate package: '%s'" % (sstate_hardcode_cmd)) | 575 | bb.note("Removing hardcoded paths from sstate package: '%s'" % (sstate_hardcode_cmd)) |
540 | subprocess.call(sstate_hardcode_cmd, shell=True) | 576 | subprocess.check_call(sstate_hardcode_cmd, shell=True, cwd=sstate_builddir) |
541 | 577 | ||
542 | # If the fixmefn is empty, remove it.. | 578 | # If the fixmefn is empty, remove it.. |
543 | if os.stat(fixmefn).st_size == 0: | 579 | if os.stat(fixmefn).st_size == 0: |
544 | os.remove(fixmefn) | 580 | os.remove(fixmefn) |
545 | else: | 581 | else: |
546 | bb.note("Replacing absolute paths in fixmepath file: '%s'" % (sstate_filelist_relative_cmd)) | 582 | bb.note("Replacing absolute paths in fixmepath file: '%s'" % (sstate_filelist_relative_cmd)) |
547 | subprocess.call(sstate_filelist_relative_cmd, shell=True) | 583 | subprocess.check_call(sstate_filelist_relative_cmd, shell=True) |
548 | } | 584 | } |
549 | 585 | ||
550 | def sstate_package(ss, d): | 586 | def sstate_package(ss, d): |
@@ -560,6 +596,8 @@ def sstate_package(ss, d): | |||
560 | if not link.startswith(tmpdir): | 596 | if not link.startswith(tmpdir): |
561 | return | 597 | return |
562 | 598 | ||
599 | #base = os.path.relpath(link, os.path.dirname(path)) | ||
600 | |||
563 | depth = outputpath.rpartition(tmpdir)[2].count('/') | 601 | depth = outputpath.rpartition(tmpdir)[2].count('/') |
564 | base = link.partition(tmpdir)[2].strip() | 602 | base = link.partition(tmpdir)[2].strip() |
565 | while depth > 1: | 603 | while depth > 1: |
@@ -594,14 +632,14 @@ def sstate_package(ss, d): | |||
594 | dstpath = srcpath.replace(state[1], state[2]) | 632 | dstpath = srcpath.replace(state[1], state[2]) |
595 | make_relative_symlink(srcpath, dstpath, d) | 633 | make_relative_symlink(srcpath, dstpath, d) |
596 | bb.debug(2, "Preparing tree %s for packaging at %s" % (state[1], sstatebuild + state[0])) | 634 | bb.debug(2, "Preparing tree %s for packaging at %s" % (state[1], sstatebuild + state[0])) |
597 | oe.path.copyhardlinktree(state[1], sstatebuild + state[0]) | 635 | os.rename(state[1], sstatebuild + state[0]) |
598 | 636 | ||
599 | workdir = d.getVar('WORKDIR') | 637 | workdir = d.getVar('WORKDIR') |
600 | for plain in ss['plaindirs']: | 638 | for plain in ss['plaindirs']: |
601 | pdir = plain.replace(workdir, sstatebuild) | 639 | pdir = plain.replace(workdir, sstatebuild) |
602 | bb.utils.mkdirhier(plain) | 640 | bb.utils.mkdirhier(plain) |
603 | bb.utils.mkdirhier(pdir) | 641 | bb.utils.mkdirhier(pdir) |
604 | oe.path.copyhardlinktree(plain, pdir) | 642 | os.rename(plain, pdir) |
605 | 643 | ||
606 | d.setVar('SSTATE_BUILDDIR', sstatebuild) | 644 | d.setVar('SSTATE_BUILDDIR', sstatebuild) |
607 | d.setVar('SSTATE_PKG', sstatepkg) | 645 | d.setVar('SSTATE_PKG', sstatepkg) |
@@ -614,6 +652,8 @@ def sstate_package(ss, d): | |||
614 | 652 | ||
615 | bb.siggen.dump_this_task(sstatepkg + ".siginfo", d) | 653 | bb.siggen.dump_this_task(sstatepkg + ".siginfo", d) |
616 | 654 | ||
655 | d.setVar('SSTATE_INSTDIR', sstatebuild) | ||
656 | |||
617 | return | 657 | return |
618 | 658 | ||
619 | def pstaging_fetch(sstatefetch, sstatepkg, d): | 659 | def pstaging_fetch(sstatefetch, sstatepkg, d): |
@@ -672,14 +712,21 @@ sstate_task_prefunc[dirs] = "${WORKDIR}" | |||
672 | python sstate_task_postfunc () { | 712 | python sstate_task_postfunc () { |
673 | shared_state = sstate_state_fromvars(d) | 713 | shared_state = sstate_state_fromvars(d) |
674 | 714 | ||
675 | sstate_install(shared_state, d) | ||
676 | for intercept in shared_state['interceptfuncs']: | 715 | for intercept in shared_state['interceptfuncs']: |
677 | bb.build.exec_func(intercept, d, (d.getVar("WORKDIR"),)) | 716 | bb.build.exec_func(intercept, d, (d.getVar("WORKDIR"),)) |
717 | |||
678 | omask = os.umask(0o002) | 718 | omask = os.umask(0o002) |
679 | if omask != 0o002: | 719 | if omask != 0o002: |
680 | bb.note("Using umask 0o002 (not %0o) for sstate packaging" % omask) | 720 | bb.note("Using umask 0o002 (not %0o) for sstate packaging" % omask) |
681 | sstate_package(shared_state, d) | 721 | sstate_package(shared_state, d) |
682 | os.umask(omask) | 722 | os.umask(omask) |
723 | |||
724 | sstateinst = d.getVar("SSTATE_INSTDIR") | ||
725 | d.setVar('SSTATE_FIXMEDIR', shared_state['fixmedir']) | ||
726 | |||
727 | sstate_installpkgdir(shared_state, d) | ||
728 | |||
729 | bb.utils.remove(d.getVar("SSTATE_BUILDDIR"), recurse=True) | ||
683 | } | 730 | } |
684 | sstate_task_postfunc[dirs] = "${WORKDIR}" | 731 | sstate_task_postfunc[dirs] = "${WORKDIR}" |
685 | 732 | ||
@@ -704,9 +751,6 @@ sstate_create_package () { | |||
704 | fi | 751 | fi |
705 | chmod 0664 $TFILE | 752 | chmod 0664 $TFILE |
706 | mv -f $TFILE ${SSTATE_PKG} | 753 | mv -f $TFILE ${SSTATE_PKG} |
707 | |||
708 | cd ${WORKDIR} | ||
709 | rm -rf ${SSTATE_BUILDDIR} | ||
710 | } | 754 | } |
711 | 755 | ||
712 | python sstate_sign_package () { | 756 | python sstate_sign_package () { |
@@ -877,7 +921,7 @@ def setscene_depvalid(task, taskdependees, notneeded, d): | |||
877 | bb.debug(2, "Considering setscene task: %s" % (str(taskdependees[task]))) | 921 | bb.debug(2, "Considering setscene task: %s" % (str(taskdependees[task]))) |
878 | 922 | ||
879 | def isNativeCross(x): | 923 | def isNativeCross(x): |
880 | return x.endswith("-native") or "-cross-" in x or "-crosssdk" in x | 924 | return x.endswith("-native") or "-cross-" in x or "-crosssdk" in x or x.endswith("-cross") |
881 | 925 | ||
882 | # We only need to trigger populate_lic through direct dependencies | 926 | # We only need to trigger populate_lic through direct dependencies |
883 | if taskdependees[task][1] == "do_populate_lic": | 927 | if taskdependees[task][1] == "do_populate_lic": |
diff --git a/meta/classes/staging.bbclass b/meta/classes/staging.bbclass index 2512ae6f5d..1a4668e5d3 100644 --- a/meta/classes/staging.bbclass +++ b/meta/classes/staging.bbclass | |||
@@ -235,12 +235,357 @@ do_populate_sysroot[depends] += "${POPULATESYSROOTDEPS}" | |||
235 | SSTATETASKS += "do_populate_sysroot" | 235 | SSTATETASKS += "do_populate_sysroot" |
236 | do_populate_sysroot[cleandirs] = "${SYSROOT_DESTDIR}" | 236 | do_populate_sysroot[cleandirs] = "${SYSROOT_DESTDIR}" |
237 | do_populate_sysroot[sstate-inputdirs] = "${SYSROOT_DESTDIR}" | 237 | do_populate_sysroot[sstate-inputdirs] = "${SYSROOT_DESTDIR}" |
238 | do_populate_sysroot[sstate-outputdirs] = "${STAGING_DIR_HOST}/" | 238 | do_populate_sysroot[sstate-outputdirs] = "${STAGING_DIR}-components/${PACKAGE_ARCH}/${PN}" |
239 | do_populate_sysroot[stamp-extra-info] = "${MACHINE}" | 239 | do_populate_sysroot[sstate-fixmedir] = "${STAGING_DIR}-components/${PACKAGE_ARCH}/${PN}" |
240 | 240 | ||
241 | python do_populate_sysroot_setscene () { | 241 | python do_populate_sysroot_setscene () { |
242 | sstate_setscene(d) | 242 | sstate_setscene(d) |
243 | } | 243 | } |
244 | addtask do_populate_sysroot_setscene | 244 | addtask do_populate_sysroot_setscene |
245 | 245 | ||
246 | def staging_copyfile(c, target, fixme, postinsts, stagingdir): | ||
247 | import errno | ||
248 | |||
249 | if c.endswith("/fixmepath"): | ||
250 | fixme.append(c) | ||
251 | return None | ||
252 | if c.endswith("/fixmepath.cmd"): | ||
253 | return None | ||
254 | #bb.warn(c) | ||
255 | dest = c.replace(stagingdir, "") | ||
256 | dest = target + "/" + "/".join(dest.split("/")[3:]) | ||
257 | bb.utils.mkdirhier(os.path.dirname(dest)) | ||
258 | if "/usr/bin/postinst-" in c: | ||
259 | postinsts.append(dest) | ||
260 | if os.path.islink(c): | ||
261 | linkto = os.readlink(c) | ||
262 | if os.path.lexists(dest): | ||
263 | if os.readlink(dest) == linkto: | ||
264 | return dest | ||
265 | bb.fatal("Link %s already exists to a different location?" % dest) | ||
266 | os.symlink(linkto, dest) | ||
267 | #bb.warn(c) | ||
268 | else: | ||
269 | try: | ||
270 | os.link(c, dest) | ||
271 | except OSError as err: | ||
272 | if err.errno == errno.EXDEV: | ||
273 | bb.utils.copyfile(c, dest) | ||
274 | else: | ||
275 | raise | ||
276 | return dest | ||
277 | |||
278 | def staging_copydir(c, target, stagingdir): | ||
279 | dest = c.replace(stagingdir, "") | ||
280 | dest = target + "/" + "/".join(dest.split("/")[3:]) | ||
281 | bb.utils.mkdirhier(dest) | ||
282 | |||
283 | def staging_processfixme(fixme, target, recipesysroot, recipesysrootnative, d): | ||
284 | import subprocess | ||
285 | |||
286 | if not fixme: | ||
287 | return | ||
288 | cmd = "sed -e 's:^[^/]*/:%s/:g' %s | xargs sed -i -e 's:FIXMESTAGINGDIRTARGET:%s:g; s:FIXMESTAGINGDIRHOST:%s:g'" % (target, " ".join(fixme), recipesysroot, recipesysrootnative) | ||
289 | for fixmevar in ['PKGDATA_DIR']: | ||
290 | fixme_path = d.getVar(fixmevar) | ||
291 | cmd += " -e 's:FIXME_%s:%s:g'" % (fixmevar, fixme_path) | ||
292 | bb.note(cmd) | ||
293 | subprocess.check_call(cmd, shell=True) | ||
294 | |||
295 | |||
296 | def staging_populate_sysroot_dir(targetsysroot, nativesysroot, native, d): | ||
297 | import glob | ||
298 | import subprocess | ||
299 | |||
300 | fixme = [] | ||
301 | postinsts = [] | ||
302 | stagingdir = d.getVar("STAGING_DIR") | ||
303 | if native: | ||
304 | pkgarchs = ['${BUILD_ARCH}', '${BUILD_ARCH}_*'] | ||
305 | targetdir = nativesysroot | ||
306 | else: | ||
307 | pkgarchs = ['${MACHINE_ARCH}', '${TUNE_PKGARCH}', 'allarch'] | ||
308 | targetdir = targetsysroot | ||
309 | |||
310 | bb.utils.mkdirhier(targetdir) | ||
311 | for pkgarch in pkgarchs: | ||
312 | for manifest in glob.glob(d.expand("${SSTATE_MANIFESTS}/manifest-%s-*.populate_sysroot" % pkgarch)): | ||
313 | if manifest.endswith("-initial.populate_sysroot"): | ||
314 | # skip glibc-initial and libgcc-initial due to file overlap | ||
315 | continue | ||
316 | tmanifest = targetdir + "/" + os.path.basename(manifest) | ||
317 | if os.path.exists(tmanifest): | ||
318 | continue | ||
319 | try: | ||
320 | os.link(manifest, tmanifest) | ||
321 | except OSError as err: | ||
322 | if err.errno == errno.EXDEV: | ||
323 | bb.utils.copyfile(manifest, tmanifest) | ||
324 | else: | ||
325 | raise | ||
326 | with open(manifest, "r") as f: | ||
327 | for l in f: | ||
328 | l = l.strip() | ||
329 | if l.endswith("/"): | ||
330 | staging_copydir(l, targetdir, stagingdir) | ||
331 | continue | ||
332 | staging_copyfile(l, targetdir, fixme, postinsts, stagingdir) | ||
333 | |||
334 | staging_processfixme(fixme, targetdir, targetsysroot, nativesysroot, d) | ||
335 | for p in postinsts: | ||
336 | subprocess.check_call(p, shell=True) | ||
337 | |||
338 | # | ||
339 | # Manifests here are complicated. The main sysroot area has the unpacked sstate | ||
340 | # which us unrelocated and tracked by the main sstate manifests. Each recipe | ||
341 | # specific sysroot has manifests for each dependency that is installed there. | ||
342 | # The task hash is used to tell whether the data needs to be reinstalled. We | ||
343 | # use a symlink to point to the currently installed hash. There is also a | ||
344 | # "complete" stamp file which is used to mark if installation completed. If | ||
345 | # something fails (e.g. a postinst), this won't get written and we would | ||
346 | # remove and reinstall the dependency. This also means partially installed | ||
347 | # dependencies should get cleaned up correctly. | ||
348 | # | ||
349 | |||
350 | python extend_recipe_sysroot() { | ||
351 | import copy | ||
352 | import subprocess | ||
353 | |||
354 | taskdepdata = d.getVar("BB_TASKDEPDATA", False) | ||
355 | mytaskname = d.getVar("BB_RUNTASK") | ||
356 | #bb.warn(str(taskdepdata)) | ||
357 | pn = d.getVar("PN") | ||
358 | |||
359 | if mytaskname.endswith("_setscene"): | ||
360 | mytaskname = mytaskname.replace("_setscene", "") | ||
361 | |||
362 | start = None | ||
363 | configuredeps = [] | ||
364 | for dep in taskdepdata: | ||
365 | data = taskdepdata[dep] | ||
366 | if data[1] == mytaskname and data[0] == pn: | ||
367 | start = dep | ||
368 | break | ||
369 | if start is None: | ||
370 | bb.fatal("Couldn't find ourself in BB_TASKDEPDATA?") | ||
371 | |||
372 | # We need to figure out which sysroot files we need to expose to this task. | ||
373 | # This needs to match what would get restored from sstate, which is controlled | ||
374 | # ultimately by calls from bitbake to setscene_depvalid(). | ||
375 | # That function expects a setscene dependency tree. We build a dependency tree | ||
376 | # condensed to inter-sstate task dependencies, similar to that used by setscene | ||
377 | # tasks. We can then call into setscene_depvalid() and decide | ||
378 | # which dependencies we can "see" and should expose in the recipe specific sysroot. | ||
379 | setscenedeps = copy.deepcopy(taskdepdata) | ||
380 | |||
381 | start = set([start]) | ||
382 | |||
383 | sstatetasks = d.getVar("SSTATETASKS").split() | ||
384 | |||
385 | def print_dep_tree(deptree): | ||
386 | data = "" | ||
387 | for dep in deptree: | ||
388 | deps = " " + "\n ".join(deptree[dep][3]) + "\n" | ||
389 | data = "%s:\n %s\n %s\n%s %s\n %s\n" % (deptree[dep][0], deptree[dep][1], deptree[dep][2], deps, deptree[dep][4], deptree[dep][5]) | ||
390 | return data | ||
391 | |||
392 | #bb.note("Full dep tree is:\n%s" % print_dep_tree(taskdepdata)) | ||
393 | |||
394 | #bb.note(" start2 is %s" % str(start)) | ||
395 | |||
396 | # If start is an sstate task (like do_package) we need to add in its direct dependencies | ||
397 | # else the code below won't recurse into them. | ||
398 | for dep in set(start): | ||
399 | for dep2 in setscenedeps[dep][3]: | ||
400 | start.add(dep2) | ||
401 | start.remove(dep) | ||
402 | |||
403 | #bb.note(" start3 is %s" % str(start)) | ||
404 | |||
405 | # Create collapsed do_populate_sysroot -> do_populate_sysroot tree | ||
406 | for dep in taskdepdata: | ||
407 | data = setscenedeps[dep] | ||
408 | if data[1] not in sstatetasks: | ||
409 | for dep2 in setscenedeps: | ||
410 | data2 = setscenedeps[dep2] | ||
411 | if dep in data2[3]: | ||
412 | data2[3].update(setscenedeps[dep][3]) | ||
413 | data2[3].remove(dep) | ||
414 | if dep in start: | ||
415 | start.update(setscenedeps[dep][3]) | ||
416 | start.remove(dep) | ||
417 | del setscenedeps[dep] | ||
418 | |||
419 | # Remove circular references | ||
420 | for dep in setscenedeps: | ||
421 | if dep in setscenedeps[dep][3]: | ||
422 | setscenedeps[dep][3].remove(dep) | ||
423 | |||
424 | #bb.note("Computed dep tree is:\n%s" % print_dep_tree(setscenedeps)) | ||
425 | #bb.note(" start is %s" % str(start)) | ||
426 | |||
427 | # Direct dependencies should be present and can be depended upon | ||
428 | for dep in set(start): | ||
429 | if setscenedeps[dep][1] == "do_populate_sysroot": | ||
430 | if dep not in configuredeps: | ||
431 | configuredeps.append(dep) | ||
432 | bb.note("Direct dependencies are %s" % str(configuredeps)) | ||
433 | #bb.note(" or %s" % str(start)) | ||
434 | |||
435 | # Call into setscene_depvalid for each sub-dependency and only copy sysroot files | ||
436 | # for ones that would be restored from sstate. | ||
437 | done = list(start) | ||
438 | next = list(start) | ||
439 | while next: | ||
440 | new = [] | ||
441 | for dep in next: | ||
442 | data = setscenedeps[dep] | ||
443 | for datadep in data[3]: | ||
444 | if datadep in done: | ||
445 | continue | ||
446 | taskdeps = {} | ||
447 | taskdeps[dep] = setscenedeps[dep][:2] | ||
448 | taskdeps[datadep] = setscenedeps[datadep][:2] | ||
449 | retval = setscene_depvalid(datadep, taskdeps, [], d) | ||
450 | if retval: | ||
451 | bb.note("Skipping setscene dependency %s for installation into the sysroot" % datadep) | ||
452 | continue | ||
453 | done.append(datadep) | ||
454 | new.append(datadep) | ||
455 | if datadep not in configuredeps and setscenedeps[datadep][1] == "do_populate_sysroot": | ||
456 | configuredeps.append(datadep) | ||
457 | bb.note("Adding dependency on %s" % setscenedeps[datadep][0]) | ||
458 | else: | ||
459 | bb.note("Following dependency on %s" % setscenedeps[datadep][0]) | ||
460 | next = new | ||
461 | |||
462 | stagingdir = d.getVar("STAGING_DIR") | ||
463 | recipesysroot = d.getVar("RECIPE_SYSROOT") | ||
464 | recipesysrootnative = d.getVar("RECIPE_SYSROOT_NATIVE") | ||
465 | current_variant = d.getVar("BBEXTENDVARIANT") | ||
466 | |||
467 | # Detect bitbake -b usage | ||
468 | nodeps = d.getVar("BB_LIMITEDDEPS") or False | ||
469 | if nodeps: | ||
470 | lock = bb.utils.lockfile(recipesysroot + "/sysroot.lock") | ||
471 | staging_populate_sysroot_dir(recipesysroot, recipesysrootnative, True, d) | ||
472 | staging_populate_sysroot_dir(recipesysroot, recipesysrootnative, False, d) | ||
473 | bb.utils.unlockfile(lock) | ||
474 | |||
475 | depdir = recipesysrootnative + "/installeddeps" | ||
476 | bb.utils.mkdirhier(depdir) | ||
477 | |||
478 | lock = bb.utils.lockfile(recipesysroot + "/sysroot.lock") | ||
479 | |||
480 | fixme = {} | ||
481 | fixme[''] = [] | ||
482 | fixme['native'] = [] | ||
483 | postinsts = [] | ||
484 | multilibs = {} | ||
485 | |||
486 | for dep in configuredeps: | ||
487 | c = setscenedeps[dep][0] | ||
488 | taskhash = setscenedeps[dep][5] | ||
489 | taskmanifest = depdir + "/" + c + "." + taskhash | ||
490 | if mytaskname in ["do_sdk_depends", "do_populate_sdk_ext"] and c.endswith("-initial"): | ||
491 | bb.note("Skipping initial setscene dependency %s for installation into the sysroot" % c) | ||
492 | continue | ||
493 | if os.path.exists(depdir + "/" + c): | ||
494 | lnk = os.readlink(depdir + "/" + c) | ||
495 | if lnk == c + "." + taskhash and os.path.exists(depdir + "/" + c + ".complete"): | ||
496 | bb.note("%s exists in sysroot, skipping" % c) | ||
497 | continue | ||
498 | else: | ||
499 | bb.note("%s exists in sysroot, but is stale (%s vs. %s), removing." % (c, lnk, c + "." + taskhash)) | ||
500 | sstate_clean_manifest(depdir + "/" + lnk, d) | ||
501 | os.unlink(depdir + "/" + c) | ||
502 | elif os.path.lexists(depdir + "/" + c): | ||
503 | os.unlink(depdir + "/" + c) | ||
504 | |||
505 | os.symlink(c + "." + taskhash, depdir + "/" + c) | ||
506 | |||
507 | d2 = d | ||
508 | destsysroot = recipesysroot | ||
509 | variant = '' | ||
510 | if setscenedeps[dep][2].startswith("virtual:multilib"): | ||
511 | variant = setscenedeps[dep][2].split(":")[2] | ||
512 | if variant != current_variant: | ||
513 | if variant not in multilibs: | ||
514 | multilibs[variant] = get_multilib_datastore(variant, d) | ||
515 | d2 = multilibs[variant] | ||
516 | destsysroot = d2.getVar("RECIPE_SYSROOT") | ||
517 | |||
518 | native = False | ||
519 | if c.endswith("-native"): | ||
520 | manifest = d2.expand("${SSTATE_MANIFESTS}/manifest-${BUILD_ARCH}-%s.populate_sysroot" % c) | ||
521 | native = True | ||
522 | elif c.startswith("nativesdk-"): | ||
523 | manifest = d2.expand("${SSTATE_MANIFESTS}/manifest-${SDK_ARCH}_${SDK_OS}-%s.populate_sysroot" % c) | ||
524 | elif "-cross-" in c: | ||
525 | manifest = d2.expand("${SSTATE_MANIFESTS}/manifest-${BUILD_ARCH}_${TARGET_ARCH}-%s.populate_sysroot" % c) | ||
526 | native = True | ||
527 | elif "-crosssdk" in c: | ||
528 | manifest = d2.expand("${SSTATE_MANIFESTS}/manifest-${BUILD_ARCH}_${SDK_ARCH}_${SDK_OS}-%s.populate_sysroot" % c) | ||
529 | native = True | ||
530 | else: | ||
531 | manifest = d2.expand("${SSTATE_MANIFESTS}/manifest-${MACHINE_ARCH}-%s.populate_sysroot" % c) | ||
532 | if not os.path.exists(manifest): | ||
533 | manifest = d2.expand("${SSTATE_MANIFESTS}/manifest-${TUNE_PKGARCH}-%s.populate_sysroot" % c) | ||
534 | if not os.path.exists(manifest): | ||
535 | manifest = d2.expand("${SSTATE_MANIFESTS}/manifest-allarch-%s.populate_sysroot" % c) | ||
536 | if not os.path.exists(manifest): | ||
537 | bb.warn("Manifest %s not found?" % manifest) | ||
538 | else: | ||
539 | with open(manifest, "r") as f, open(taskmanifest, 'w') as m: | ||
540 | for l in f: | ||
541 | l = l.strip() | ||
542 | if l.endswith("/"): | ||
543 | if native: | ||
544 | dest = staging_copydir(l, recipesysrootnative, stagingdir) | ||
545 | else: | ||
546 | dest = staging_copydir(l, destsysroot, stagingdir) | ||
547 | continue | ||
548 | if native: | ||
549 | dest = staging_copyfile(l, recipesysrootnative, fixme['native'], postinsts, stagingdir) | ||
550 | else: | ||
551 | dest = staging_copyfile(l, destsysroot, fixme[''], postinsts, stagingdir) | ||
552 | if dest: | ||
553 | m.write(dest + "\n") | ||
554 | |||
555 | for f in fixme: | ||
556 | if f == '': | ||
557 | staging_processfixme(fixme[f], recipesysroot, recipesysroot, recipesysrootnative, d) | ||
558 | elif f == 'native': | ||
559 | staging_processfixme(fixme[f], recipesysrootnative, recipesysroot, recipesysrootnative, d) | ||
560 | else: | ||
561 | staging_processfixme(fixme[f], multilibs[f].getVar("RECIPE_SYSROOT"), recipesysroot, recipesysrootnative, d) | ||
562 | |||
563 | for p in postinsts: | ||
564 | subprocess.check_call(p, shell=True) | ||
565 | |||
566 | for dep in configuredeps: | ||
567 | c = setscenedeps[dep][0] | ||
568 | open(depdir + "/" + c + ".complete", "w").close() | ||
569 | |||
570 | bb.utils.unlockfile(lock) | ||
571 | } | ||
572 | extend_recipe_sysroot[vardepsexclude] += "MACHINE SDK_ARCH BUILD_ARCH SDK_OS BB_TASKDEPDATA" | ||
573 | |||
574 | python do_prepare_recipe_sysroot () { | ||
575 | bb.build.exec_func("extend_recipe_sysroot", d) | ||
576 | } | ||
577 | addtask do_prepare_recipe_sysroot before do_configure after do_fetch | ||
578 | |||
579 | # Clean out the recipe specific sysroots before do_fetch | ||
580 | do_fetch[cleandirs] += "${RECIPE_SYSROOT} ${RECIPE_SYSROOT_NATIVE}" | ||
581 | |||
582 | python staging_taskhandler() { | ||
583 | bbtasks = e.tasklist | ||
584 | for task in bbtasks: | ||
585 | deps = d.getVarFlag(task, "depends") | ||
586 | if deps and "populate_sysroot" in deps: | ||
587 | d.appendVarFlag(task, "prefuncs", " extend_recipe_sysroot") | ||
588 | } | ||
589 | staging_taskhandler[eventmask] = "bb.event.RecipeTaskPreProcess" | ||
590 | addhandler staging_taskhandler | ||
246 | 591 | ||
diff --git a/meta/classes/toolchain-scripts.bbclass b/meta/classes/toolchain-scripts.bbclass index 44b4e24255..7b68d20a15 100644 --- a/meta/classes/toolchain-scripts.bbclass +++ b/meta/classes/toolchain-scripts.bbclass | |||
@@ -108,6 +108,7 @@ EOF | |||
108 | TOOLCHAIN_CONFIGSITE_NOCACHE = "${@siteinfo_get_files(d)}" | 108 | TOOLCHAIN_CONFIGSITE_NOCACHE = "${@siteinfo_get_files(d)}" |
109 | TOOLCHAIN_CONFIGSITE_SYSROOTCACHE = "${STAGING_DIR}/${MLPREFIX}${MACHINE}/${target_datadir}/${TARGET_SYS}_config_site.d" | 109 | TOOLCHAIN_CONFIGSITE_SYSROOTCACHE = "${STAGING_DIR}/${MLPREFIX}${MACHINE}/${target_datadir}/${TARGET_SYS}_config_site.d" |
110 | TOOLCHAIN_NEED_CONFIGSITE_CACHE ??= "virtual/${MLPREFIX}libc ncurses" | 110 | TOOLCHAIN_NEED_CONFIGSITE_CACHE ??= "virtual/${MLPREFIX}libc ncurses" |
111 | DEPENDS += "${TOOLCHAIN_NEED_CONFIGSITE_CACHE}" | ||
111 | 112 | ||
112 | #This function create a site config file | 113 | #This function create a site config file |
113 | toolchain_create_sdk_siteconfig () { | 114 | toolchain_create_sdk_siteconfig () { |
diff --git a/meta/classes/uninative.bbclass b/meta/classes/uninative.bbclass index 54f63afad1..ba7ca63b8f 100644 --- a/meta/classes/uninative.bbclass +++ b/meta/classes/uninative.bbclass | |||
@@ -91,7 +91,8 @@ def enable_uninative(d): | |||
91 | bb.debug(2, "Enabling uninative") | 91 | bb.debug(2, "Enabling uninative") |
92 | d.setVar("NATIVELSBSTRING", "universal%s" % oe.utils.host_gcc_version(d)) | 92 | d.setVar("NATIVELSBSTRING", "universal%s" % oe.utils.host_gcc_version(d)) |
93 | d.appendVar("SSTATEPOSTUNPACKFUNCS", " uninative_changeinterp") | 93 | d.appendVar("SSTATEPOSTUNPACKFUNCS", " uninative_changeinterp") |
94 | d.prependVar("PATH", "${UNINATIVE_STAGING_DIR}-uninative/${BUILD_ARCH}-linux${bindir_native}:") | 94 | d.setVarFlag("SSTATEPOSTUNPACKFUNCS", "vardepvalueexclude", " uninative_changeinterp") |
95 | d.prependVar("PATH", "${STAGING_DIR}-uninative/${BUILD_ARCH}-linux${bindir_native}:") | ||
95 | 96 | ||
96 | python uninative_changeinterp () { | 97 | python uninative_changeinterp () { |
97 | import subprocess | 98 | import subprocess |
diff --git a/meta/classes/useradd.bbclass b/meta/classes/useradd.bbclass index 82f4d52758..326c04d0b1 100644 --- a/meta/classes/useradd.bbclass +++ b/meta/classes/useradd.bbclass | |||
@@ -3,7 +3,7 @@ inherit useradd_base | |||
3 | # base-passwd-cross provides the default passwd and group files in the | 3 | # base-passwd-cross provides the default passwd and group files in the |
4 | # target sysroot, and shadow -native and -sysroot provide the utilities | 4 | # target sysroot, and shadow -native and -sysroot provide the utilities |
5 | # and support files needed to add and modify user and group accounts | 5 | # and support files needed to add and modify user and group accounts |
6 | DEPENDS_append_class-target = " base-files shadow-native shadow-sysroot shadow" | 6 | DEPENDS_append_class-target = " base-files shadow-native shadow-sysroot shadow base-passwd" |
7 | PACKAGE_WRITE_DEPS += "shadow-native" | 7 | PACKAGE_WRITE_DEPS += "shadow-native" |
8 | 8 | ||
9 | # This preinstall function can be run in four different contexts: | 9 | # This preinstall function can be run in four different contexts: |
@@ -97,13 +97,13 @@ fi | |||
97 | } | 97 | } |
98 | 98 | ||
99 | useradd_sysroot () { | 99 | useradd_sysroot () { |
100 | # Pseudo may (do_install) or may not (do_populate_sysroot_setscene) be running | 100 | # Pseudo may (do_prepare_recipe_sysroot) or may not (do_populate_sysroot_setscene) be running |
101 | # at this point so we're explicit about the environment so pseudo can load if | 101 | # at this point so we're explicit about the environment so pseudo can load if |
102 | # not already present. | 102 | # not already present. |
103 | export PSEUDO="${FAKEROOTENV} PSEUDO_LOCALSTATEDIR=${STAGING_DIR_TARGET}${localstatedir}/pseudo ${STAGING_DIR_NATIVE}${bindir_native}/pseudo" | 103 | export PSEUDO="${FAKEROOTENV} PSEUDO_LOCALSTATEDIR=${STAGING_DIR_TARGET}${localstatedir}/pseudo ${PSEUDO_SYSROOT}${bindir_native}/pseudo" |
104 | 104 | ||
105 | # Explicitly set $D since it isn't set to anything | 105 | # Explicitly set $D since it isn't set to anything |
106 | # before do_install | 106 | # before do_prepare_recipe_sysroot |
107 | D=${STAGING_DIR_TARGET} | 107 | D=${STAGING_DIR_TARGET} |
108 | 108 | ||
109 | # Add groups and users defined for all recipe packages | 109 | # Add groups and users defined for all recipe packages |
@@ -117,17 +117,25 @@ useradd_sysroot () { | |||
117 | useradd_preinst | 117 | useradd_preinst |
118 | } | 118 | } |
119 | 119 | ||
120 | useradd_sysroot_sstate () { | 120 | python useradd_sysroot_sstate () { |
121 | if [ "${BB_CURRENTTASK}" = "package_setscene" -o "${BB_CURRENTTASK}" = "populate_sysroot_setscene" ] | 121 | task = d.getVar("BB_CURRENTTASK") |
122 | then | 122 | if task == "package_setscene": |
123 | useradd_sysroot | 123 | bb.build.exec_func("useradd_sysroot", d) |
124 | fi | 124 | elif task == "prepare_recipe_sysroot": |
125 | scriptfile = d.expand("${RECIPE_SYSROOT}${bindir}/postinst-useradd-${PN}") | ||
126 | bb.utils.mkdirhier(os.path.dirname(scriptfile)) | ||
127 | with open(scriptfile, 'w') as script: | ||
128 | script.write("#!/bin/sh\n") | ||
129 | bb.data.emit_func("useradd_sysroot", script, d) | ||
130 | script.write("useradd_sysroot\n") | ||
131 | os.chmod(scriptfile, 0o755) | ||
132 | bb.build.exec_func("useradd_sysroot", d) | ||
125 | } | 133 | } |
126 | 134 | ||
127 | userdel_sysroot_sstate () { | 135 | userdel_sysroot_sstate () { |
128 | if test "x${STAGING_DIR_TARGET}" != "x"; then | 136 | if test "x${STAGING_DIR_TARGET}" != "x"; then |
129 | if [ "${BB_CURRENTTASK}" = "clean" ]; then | 137 | if [ "${BB_CURRENTTASK}" = "clean" ]; then |
130 | export PSEUDO="${FAKEROOTENV} PSEUDO_LOCALSTATEDIR=${STAGING_DIR_TARGET}${localstatedir}/pseudo ${STAGING_DIR_NATIVE}${bindir_native}/pseudo" | 138 | export PSEUDO="${FAKEROOTENV} PSEUDO_LOCALSTATEDIR=${STAGING_DIR_TARGET}${localstatedir}/pseudo ${PSEUDO_SYSROOT}${bindir_native}/pseudo" |
131 | OPT="--root ${STAGING_DIR_TARGET}" | 139 | OPT="--root ${STAGING_DIR_TARGET}" |
132 | 140 | ||
133 | # Remove groups and users defined for package | 141 | # Remove groups and users defined for package |
@@ -154,10 +162,10 @@ if test "x${STAGING_DIR_TARGET}" != "x"; then | |||
154 | fi | 162 | fi |
155 | } | 163 | } |
156 | 164 | ||
157 | SSTATECLEANFUNCS_append_class-target = " userdel_sysroot_sstate" | 165 | #SSTATECLEANFUNCS_append_class-target = " userdel_sysroot_sstate" |
158 | 166 | ||
159 | do_install[prefuncs] += "${SYSROOTFUNC}" | 167 | do_prepare_recipe_sysroot[postfuncs] += "${SYSROOTFUNC}" |
160 | SYSROOTFUNC_class-target = "useradd_sysroot" | 168 | SYSROOTFUNC_class-target = "useradd_sysroot_sstate" |
161 | SYSROOTFUNC = "" | 169 | SYSROOTFUNC = "" |
162 | 170 | ||
163 | SSTATEPREINSTFUNCS_append_class-target = " useradd_sysroot_sstate" | 171 | SSTATEPREINSTFUNCS_append_class-target = " useradd_sysroot_sstate" |