summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorRichard Purdie <richard.purdie@linuxfoundation.org>2019-06-28 16:45:53 +0100
committerRichard Purdie <richard.purdie@linuxfoundation.org>2019-06-30 22:40:52 +0100
commit99206cb7b41be7a124ea8339bc891077f02128a2 (patch)
treeffbcbd35a6d56ecb5b731cb7438c188a1bb8e919
parentb9bf7d26827fc12d94981efc4920ac367c3c9fce (diff)
downloadpoky-99206cb7b41be7a124ea8339bc891077f02128a2.tar.gz
package: Build pkgdata specific to the current recipe
This switches the code to build pkgdata specific to the current recipe which means that its filtered to the recipes dependencies and can perform better as we can drop the lockfile. It uses a similar method to the staging code to do this, using BB_TASKDEPDATA to construct a list of packagedata task output which this recipe should "see". The original pkgdata store is left unaltered so existing code works. The lock file was there to prevent files disappearing as they were read or as directories were listed. Since we have a copy of the data and only access output from completed tasks (as per their manifests), we can remove the lock. The lock was causing starvation issues on systems with parallelism. There was also a potential determinism problem as the current code could "see" data from recipes which it doesn't depend upon. [YOCTO #13412] (From OE-Core rev: 1951132576bfb95675b4879287f8b3b7c47524fa) Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
-rw-r--r--meta/classes/package.bbclass16
-rw-r--r--meta/classes/package_pkgdata.bbclass167
-rw-r--r--scripts/lib/recipetool/create.py4
3 files changed, 173 insertions, 14 deletions
diff --git a/meta/classes/package.bbclass b/meta/classes/package.bbclass
index 70babb3812..8adf6e1650 100644
--- a/meta/classes/package.bbclass
+++ b/meta/classes/package.bbclass
@@ -40,6 +40,7 @@
40 40
41inherit packagedata 41inherit packagedata
42inherit chrpath 42inherit chrpath
43inherit package_pkgdata
43 44
44# Need the package_qa_handle_error() in insane.bbclass 45# Need the package_qa_handle_error() in insane.bbclass
45inherit insane 46inherit insane
@@ -1571,7 +1572,7 @@ python package_do_filedeps() {
1571 d.setVar("FILERPROVIDESFLIST_" + pkg, " ".join(provides_files[pkg])) 1572 d.setVar("FILERPROVIDESFLIST_" + pkg, " ".join(provides_files[pkg]))
1572} 1573}
1573 1574
1574SHLIBSDIRS = "${PKGDATA_DIR}/${MLPREFIX}shlibs2" 1575SHLIBSDIRS = "${WORKDIR_PKGDATA}/${MLPREFIX}shlibs2"
1575SHLIBSWORKDIR = "${PKGDESTWORK}/${MLPREFIX}shlibs2" 1576SHLIBSWORKDIR = "${PKGDESTWORK}/${MLPREFIX}shlibs2"
1576 1577
1577python package_do_shlibs() { 1578python package_do_shlibs() {
@@ -1729,10 +1730,7 @@ python package_do_shlibs() {
1729 1730
1730 needed = {} 1731 needed = {}
1731 1732
1732 # Take shared lock since we're only reading, not writing
1733 lf = bb.utils.lockfile(d.expand("${PACKAGELOCK}"), True)
1734 shlib_provider = oe.package.read_shlib_providers(d) 1733 shlib_provider = oe.package.read_shlib_providers(d)
1735 bb.utils.unlockfile(lf)
1736 1734
1737 for pkg in shlib_pkgs: 1735 for pkg in shlib_pkgs:
1738 private_libs = d.getVar('PRIVATE_LIBS_' + pkg) or d.getVar('PRIVATE_LIBS') or "" 1736 private_libs = d.getVar('PRIVATE_LIBS_' + pkg) or d.getVar('PRIVATE_LIBS') or ""
@@ -1918,9 +1916,6 @@ python package_do_pkgconfig () {
1918 f.write('%s\n' % p) 1916 f.write('%s\n' % p)
1919 f.close() 1917 f.close()
1920 1918
1921 # Take shared lock since we're only reading, not writing
1922 lf = bb.utils.lockfile(d.expand("${PACKAGELOCK}"), True)
1923
1924 # Go from least to most specific since the last one found wins 1919 # Go from least to most specific since the last one found wins
1925 for dir in reversed(shlibs_dirs): 1920 for dir in reversed(shlibs_dirs):
1926 if not os.path.exists(dir): 1921 if not os.path.exists(dir):
@@ -1936,8 +1931,6 @@ python package_do_pkgconfig () {
1936 for l in lines: 1931 for l in lines:
1937 pkgconfig_provided[pkg].append(l.rstrip()) 1932 pkgconfig_provided[pkg].append(l.rstrip())
1938 1933
1939 bb.utils.unlockfile(lf)
1940
1941 for pkg in packages.split(): 1934 for pkg in packages.split():
1942 deps = [] 1935 deps = []
1943 for n in pkgconfig_needed[pkg]: 1936 for n in pkgconfig_needed[pkg]:
@@ -2134,6 +2127,7 @@ def gen_packagevar(d):
2134PACKAGE_PREPROCESS_FUNCS ?= "" 2127PACKAGE_PREPROCESS_FUNCS ?= ""
2135# Functions for setting up PKGD 2128# Functions for setting up PKGD
2136PACKAGEBUILDPKGD ?= " \ 2129PACKAGEBUILDPKGD ?= " \
2130 package_prepare_pkgdata \
2137 perform_packagecopy \ 2131 perform_packagecopy \
2138 ${PACKAGE_PREPROCESS_FUNCS} \ 2132 ${PACKAGE_PREPROCESS_FUNCS} \
2139 split_and_strip_files \ 2133 split_and_strip_files \
@@ -2261,12 +2255,8 @@ do_packagedata () {
2261addtask packagedata before do_build after do_package 2255addtask packagedata before do_build after do_package
2262 2256
2263SSTATETASKS += "do_packagedata" 2257SSTATETASKS += "do_packagedata"
2264# PACKAGELOCK protects readers of PKGDATA_DIR against writes
2265# whilst code is reading in do_package
2266PACKAGELOCK = "${STAGING_DIR}/package-output.lock"
2267do_packagedata[sstate-inputdirs] = "${PKGDESTWORK}" 2258do_packagedata[sstate-inputdirs] = "${PKGDESTWORK}"
2268do_packagedata[sstate-outputdirs] = "${PKGDATA_DIR}" 2259do_packagedata[sstate-outputdirs] = "${PKGDATA_DIR}"
2269do_packagedata[sstate-lockfile] = "${PACKAGELOCK}"
2270do_packagedata[stamp-extra-info] = "${MACHINE_ARCH}" 2260do_packagedata[stamp-extra-info] = "${MACHINE_ARCH}"
2271 2261
2272python do_packagedata_setscene () { 2262python do_packagedata_setscene () {
diff --git a/meta/classes/package_pkgdata.bbclass b/meta/classes/package_pkgdata.bbclass
new file mode 100644
index 0000000000..18b7ed62e0
--- /dev/null
+++ b/meta/classes/package_pkgdata.bbclass
@@ -0,0 +1,167 @@
1WORKDIR_PKGDATA = "${WORKDIR}/pkgdata-sysroot"
2
3def package_populate_pkgdata_dir(pkgdatadir, d):
4 import glob
5
6 postinsts = []
7 seendirs = set()
8 stagingdir = d.getVar("PKGDATA_DIR")
9 pkgarchs = ['${MACHINE_ARCH}']
10 pkgarchs = pkgarchs + list(reversed(d.getVar("PACKAGE_EXTRA_ARCHS").split()))
11 pkgarchs.append('allarch')
12
13 bb.utils.mkdirhier(pkgdatadir)
14 for pkgarch in pkgarchs:
15 for manifest in glob.glob(d.expand("${SSTATE_MANIFESTS}/manifest-%s-*.packagedata" % pkgarch)):
16 with open(manifest, "r") as f:
17 for l in f:
18 l = l.strip()
19 dest = l.replace(stagingdir, "")
20 if l.endswith("/"):
21 staging_copydir(l, pkgdatadir, dest, seendirs)
22 continue
23 try:
24 staging_copyfile(l, pkgdatadir, dest, postinsts, seendirs)
25 except FileExistsError:
26 continue
27
28python package_prepare_pkgdata() {
29 import copy
30 import glob
31
32 taskdepdata = d.getVar("BB_TASKDEPDATA", False)
33 mytaskname = d.getVar("BB_RUNTASK")
34 if mytaskname.endswith("_setscene"):
35 mytaskname = mytaskname.replace("_setscene", "")
36 workdir = d.getVar("WORKDIR")
37 pn = d.getVar("PN")
38 stagingdir = d.getVar("PKGDATA_DIR")
39 pkgdatadir = d.getVar("WORKDIR_PKGDATA")
40
41 # Detect bitbake -b usage
42 nodeps = d.getVar("BB_LIMITEDDEPS") or False
43 if nodeps:
44 staging_package_populate_pkgdata_dir(pkgdatadir, d)
45 return
46
47 start = None
48 configuredeps = []
49 for dep in taskdepdata:
50 data = taskdepdata[dep]
51 if data[1] == mytaskname and data[0] == pn:
52 start = dep
53 break
54 if start is None:
55 bb.fatal("Couldn't find ourself in BB_TASKDEPDATA?")
56
57 # We need to figure out which sysroot files we need to expose to this task.
58 # This needs to match what would get restored from sstate, which is controlled
59 # ultimately by calls from bitbake to setscene_depvalid().
60 # That function expects a setscene dependency tree. We build a dependency tree
61 # condensed to inter-sstate task dependencies, similar to that used by setscene
62 # tasks. We can then call into setscene_depvalid() and decide
63 # which dependencies we can "see" and should expose in the recipe specific sysroot.
64 setscenedeps = copy.deepcopy(taskdepdata)
65
66 start = set([start])
67
68 sstatetasks = d.getVar("SSTATETASKS").split()
69 # Add recipe specific tasks referenced by setscene_depvalid()
70 sstatetasks.append("do_stash_locale")
71
72 # If start is an sstate task (like do_package) we need to add in its direct dependencies
73 # else the code below won't recurse into them.
74 for dep in set(start):
75 for dep2 in setscenedeps[dep][3]:
76 start.add(dep2)
77 start.remove(dep)
78
79 # Create collapsed do_populate_sysroot -> do_populate_sysroot tree
80 for dep in taskdepdata:
81 data = setscenedeps[dep]
82 if data[1] not in sstatetasks:
83 for dep2 in setscenedeps:
84 data2 = setscenedeps[dep2]
85 if dep in data2[3]:
86 data2[3].update(setscenedeps[dep][3])
87 data2[3].remove(dep)
88 if dep in start:
89 start.update(setscenedeps[dep][3])
90 start.remove(dep)
91 del setscenedeps[dep]
92
93 # Remove circular references
94 for dep in setscenedeps:
95 if dep in setscenedeps[dep][3]:
96 setscenedeps[dep][3].remove(dep)
97
98 # Direct dependencies should be present and can be depended upon
99 for dep in set(start):
100 if setscenedeps[dep][1] == "do_packagedata":
101 if dep not in configuredeps:
102 configuredeps.append(dep)
103
104 msgbuf = []
105 # Call into setscene_depvalid for each sub-dependency and only copy sysroot files
106 # for ones that would be restored from sstate.
107 done = list(start)
108 next = list(start)
109 while next:
110 new = []
111 for dep in next:
112 data = setscenedeps[dep]
113 for datadep in data[3]:
114 if datadep in done:
115 continue
116 taskdeps = {}
117 taskdeps[dep] = setscenedeps[dep][:2]
118 taskdeps[datadep] = setscenedeps[datadep][:2]
119 retval = setscene_depvalid(datadep, taskdeps, [], d, msgbuf)
120 done.append(datadep)
121 new.append(datadep)
122 if retval:
123 msgbuf.append("Skipping setscene dependency %s" % datadep)
124 continue
125 if datadep not in configuredeps and setscenedeps[datadep][1] == "do_packagedata":
126 configuredeps.append(datadep)
127 msgbuf.append("Adding dependency on %s" % setscenedeps[datadep][0])
128 else:
129 msgbuf.append("Following dependency on %s" % setscenedeps[datadep][0])
130 next = new
131
132 # This logging is too verbose for day to day use sadly
133 #bb.debug(2, "\n".join(msgbuf))
134
135 seendirs = set()
136 postinsts = []
137 multilibs = {}
138 manifests = {}
139
140 msg_adding = []
141
142 for dep in configuredeps:
143 c = setscenedeps[dep][0]
144 msg_adding.append(c)
145
146 manifest, d2 = oe.sstatesig.find_sstate_manifest(c, setscenedeps[dep][2], "packagedata", d, multilibs)
147 destsysroot = pkgdatadir
148
149 if manifest:
150 targetdir = destsysroot
151 with open(manifest, "r") as f:
152 manifests[dep] = manifest
153 for l in f:
154 l = l.strip()
155 dest = targetdir + l.replace(stagingdir, "")
156 if l.endswith("/"):
157 staging_copydir(l, targetdir, dest, seendirs)
158 continue
159 staging_copyfile(l, targetdir, dest, postinsts, seendirs)
160
161 bb.note("Installed into pkgdata-sysroot: %s" % str(msg_adding))
162
163}
164package_prepare_pkgdata[cleandirs] = "${WORKDIR_PKGDATA}"
165package_prepare_pkgdata[vardepsexclude] += "MACHINE_ARCH PACKAGE_EXTRA_ARCHS SDK_ARCH BUILD_ARCH SDK_OS BB_TASKDEPDATA"
166
167
diff --git a/scripts/lib/recipetool/create.py b/scripts/lib/recipetool/create.py
index 98277f74c9..f2c671b0f5 100644
--- a/scripts/lib/recipetool/create.py
+++ b/scripts/lib/recipetool/create.py
@@ -60,7 +60,9 @@ class RecipeHandler(object):
60 if RecipeHandler.recipelibmap: 60 if RecipeHandler.recipelibmap:
61 return 61 return
62 # First build up library->package mapping 62 # First build up library->package mapping
63 shlib_providers = oe.package.read_shlib_providers(d) 63 d2 = bb.data.createCopy(d)
64 d2.setVar("WORKDIR_PKGDATA", "${PKGDATA_DIR}")
65 shlib_providers = oe.package.read_shlib_providers(d2)
64 libdir = d.getVar('libdir') 66 libdir = d.getVar('libdir')
65 base_libdir = d.getVar('base_libdir') 67 base_libdir = d.getVar('base_libdir')
66 libpaths = list(set([base_libdir, libdir])) 68 libpaths = list(set([base_libdir, libdir]))