diff options
Diffstat (limited to 'meta/lib/oe/package_manager.py')
-rw-r--r-- | meta/lib/oe/package_manager.py | 425 |
1 files changed, 425 insertions, 0 deletions
diff --git a/meta/lib/oe/package_manager.py b/meta/lib/oe/package_manager.py new file mode 100644 index 0000000000..93ca78acb2 --- /dev/null +++ b/meta/lib/oe/package_manager.py | |||
@@ -0,0 +1,425 @@ | |||
1 | from abc import ABCMeta, abstractmethod | ||
2 | import os | ||
3 | import subprocess | ||
4 | import multiprocessing | ||
5 | import re | ||
6 | |||
7 | |||
8 | # this can be used by all PM backends to create the index files in parallel | ||
9 | def create_index(arg): | ||
10 | index_cmd = arg | ||
11 | |||
12 | try: | ||
13 | bb.note("Executing '%s' ..." % index_cmd) | ||
14 | subprocess.check_output(index_cmd, shell=True) | ||
15 | except subprocess.CalledProcessError as e: | ||
16 | return("Index creation command %s failed with return code %d!" % | ||
17 | (' '.join(e.cmd), e.returncode)) | ||
18 | |||
19 | return None | ||
20 | |||
21 | |||
22 | class PackageManager(object): | ||
23 | """ | ||
24 | This is an abstract class. Do not instantiate this directly. | ||
25 | """ | ||
26 | __metaclass__ = ABCMeta | ||
27 | |||
28 | def __init__(self, d): | ||
29 | self.d = d | ||
30 | |||
31 | """ | ||
32 | Update the package manager package database. | ||
33 | """ | ||
34 | @abstractmethod | ||
35 | def update(self): | ||
36 | pass | ||
37 | |||
38 | """ | ||
39 | Install a list of packages. 'pkgs' is a list object. If 'attempt_only' is | ||
40 | True, installation failures are ignored. | ||
41 | """ | ||
42 | @abstractmethod | ||
43 | def install(self, pkgs, attempt_only=False): | ||
44 | pass | ||
45 | |||
46 | """ | ||
47 | Remove a list of packages. 'pkgs' is a list object. If 'with_dependencies' | ||
48 | is False, the any dependencies are left in place. | ||
49 | """ | ||
50 | @abstractmethod | ||
51 | def remove(self, pkgs, with_dependencies=True): | ||
52 | pass | ||
53 | |||
54 | """ | ||
55 | This function creates the Packages.gz files in each arch directory in | ||
56 | DEPLOY_DIR_DEB. | ||
57 | """ | ||
58 | @abstractmethod | ||
59 | def write_index(self): | ||
60 | pass | ||
61 | |||
62 | @abstractmethod | ||
63 | def remove_packaging_data(self): | ||
64 | pass | ||
65 | |||
66 | @abstractmethod | ||
67 | def list_installed(self, format=None): | ||
68 | pass | ||
69 | |||
70 | """ | ||
71 | Install complementary packages based upon the list of currently installed | ||
72 | packages e.g. locales, *-dev, *-dbg, etc. This will only attempt to install | ||
73 | these packages, if they don't exist then no error will occur. Note: every | ||
74 | backend needs to call this function explicitly after the normal package | ||
75 | installation | ||
76 | """ | ||
77 | def install_complementary(self, globs=None): | ||
78 | # we need to write the list of installed packages to a file because the | ||
79 | # oe-pkgdata-util reads it from a file | ||
80 | installed_pkgs_file = os.path.join(self.d.getVar('WORKDIR', True), | ||
81 | "installed_pkgs.txt") | ||
82 | with open(installed_pkgs_file, "w+") as installed_pkgs: | ||
83 | installed_pkgs.write(self.list_installed("arch")) | ||
84 | |||
85 | if globs is None: | ||
86 | globs = self.d.getVar('IMAGE_INSTALL_COMPLEMENTARY', True) | ||
87 | split_linguas = set() | ||
88 | |||
89 | for translation in self.d.getVar('IMAGE_LINGUAS', True).split(): | ||
90 | split_linguas.add(translation) | ||
91 | split_linguas.add(translation.split('-')[0]) | ||
92 | |||
93 | split_linguas = sorted(split_linguas) | ||
94 | |||
95 | for lang in split_linguas: | ||
96 | globs += " *-locale-%s" % lang | ||
97 | |||
98 | if globs is None: | ||
99 | return | ||
100 | |||
101 | cmd = [bb.utils.which(os.getenv('PATH'), "oe-pkgdata-util"), | ||
102 | "glob", self.d.getVar('PKGDATA_DIR', True), installed_pkgs_file, | ||
103 | globs] | ||
104 | try: | ||
105 | bb.note("Installing complementary packages ...") | ||
106 | complementary_pkgs = subprocess.check_output(cmd) | ||
107 | except subprocess.CalledProcessError as e: | ||
108 | bb.fatal("Could not compute complementary packages list. Command " | ||
109 | "%s returned %d!" % (' '.join(cmd), e.returncode)) | ||
110 | |||
111 | self.install(complementary_pkgs.split(), attempt_only=True) | ||
112 | |||
113 | |||
114 | class RpmPM(PackageManager): | ||
115 | def __init__(self): | ||
116 | super(RpmPM, self).__init__() | ||
117 | |||
118 | """ | ||
119 | TBD | ||
120 | """ | ||
121 | |||
122 | |||
123 | class OpkgPM(PackageManager): | ||
124 | def __init__(self): | ||
125 | super(OpkgPM, self).__init__() | ||
126 | |||
127 | """ | ||
128 | TBD | ||
129 | """ | ||
130 | |||
131 | |||
132 | class DpkgPM(PackageManager): | ||
133 | def __init__(self, d, target_rootfs, archs, dpkg_arch): | ||
134 | super(DpkgPM, self).__init__(d) | ||
135 | self.target_rootfs = target_rootfs | ||
136 | self.deploy_dir = self.d.getVar('DEPLOY_DIR_DEB', True) | ||
137 | self.apt_conf_dir = self.d.expand("${APTCONF_TARGET}/apt") | ||
138 | self.apt_conf_file = os.path.join(self.apt_conf_dir, "apt.conf") | ||
139 | self.apt_get_cmd = bb.utils.which(os.getenv('PATH'), "apt-get") | ||
140 | |||
141 | self.apt_args = ['', '--no-install-recommends'][d.getVar("NO_RECOMMENDATIONS", True) == "1"] | ||
142 | |||
143 | os.environ['APT_CONFIG'] = self.apt_conf_file | ||
144 | |||
145 | self._create_configs(archs, dpkg_arch) | ||
146 | |||
147 | """ | ||
148 | This function will change a package's status in /var/lib/dpkg/status file. | ||
149 | If 'packages' is None then the new_status will be applied to all | ||
150 | packages | ||
151 | """ | ||
152 | def mark_packages(self, status_tag, packages=None): | ||
153 | status_file = self.target_rootfs + "/var/lib/dpkg/status" | ||
154 | |||
155 | with open(status_file, "r") as sf: | ||
156 | with open(status_file + ".tmp", "w+") as tmp_sf: | ||
157 | if packages is None: | ||
158 | tmp_sf.write(re.sub(r"Package: (.*)\nStatus: (.*)(unpacked|installed)", | ||
159 | r"Package: \1\nStatus: \2%s" % status_tag, | ||
160 | sf.read())) | ||
161 | else: | ||
162 | if type(packages).__name__ != "list": | ||
163 | raise TypeError("'packages' should be a list object") | ||
164 | |||
165 | status = sf.read() | ||
166 | for pkg in packages: | ||
167 | status = re.sub(r"Package: %s\nStatus: (.*)(unpacked|installed)" % pkg, | ||
168 | r"Package: %s\nStatus: \1%s" % (pkg, status_tag), | ||
169 | status) | ||
170 | |||
171 | tmp_sf.write(status) | ||
172 | |||
173 | os.rename(status_file + ".tmp", status_file) | ||
174 | |||
175 | """ | ||
176 | Run the pre/post installs for package "package_name". If package_name is | ||
177 | None, then run all pre/post install scriptlets. | ||
178 | """ | ||
179 | def run_pre_post_installs(self, package_name=None): | ||
180 | info_dir = self.target_rootfs + "/var/lib/dpkg/info" | ||
181 | suffixes = [(".preinst", "Preinstall"), (".postinst", "Postinstall")] | ||
182 | status_file = self.target_rootfs + "/var/lib/dpkg/status" | ||
183 | installed_pkgs = [] | ||
184 | |||
185 | with open(status_file, "r") as status: | ||
186 | for line in status.read().split('\n'): | ||
187 | m = re.match("^Package: (.*)", line) | ||
188 | if m is not None: | ||
189 | installed_pkgs.append(m.group(1)) | ||
190 | |||
191 | if package_name is not None and not package_name in installed_pkgs: | ||
192 | return | ||
193 | |||
194 | os.environ['D'] = self.target_rootfs | ||
195 | os.environ['OFFLINE_ROOT'] = self.target_rootfs | ||
196 | os.environ['IPKG_OFFLINE_ROOT'] = self.target_rootfs | ||
197 | os.environ['OPKG_OFFLINE_ROOT'] = self.target_rootfs | ||
198 | os.environ['INTERCEPT_DIR'] = os.path.join(self.d.getVar('WORKDIR', True), | ||
199 | "intercept_scripts") | ||
200 | os.environ['NATIVE_ROOT'] = self.d.getVar('STAGING_DIR_NATIVE', True) | ||
201 | |||
202 | failed_pkgs = [] | ||
203 | for pkg_name in installed_pkgs: | ||
204 | for suffix in suffixes: | ||
205 | p_full = os.path.join(info_dir, pkg_name + suffix[0]) | ||
206 | if os.path.exists(p_full): | ||
207 | try: | ||
208 | bb.note("Executing %s for package: %s ..." % | ||
209 | (suffix[1].lower(), pkg_name)) | ||
210 | subprocess.check_output(p_full) | ||
211 | except subprocess.CalledProcessError as e: | ||
212 | bb.note("%s for package %s failed with %d" % | ||
213 | (suffix[1], pkg_name, e.returncode)) | ||
214 | failed_pkgs.append(pkg_name) | ||
215 | break | ||
216 | |||
217 | if len(failed_pkgs): | ||
218 | self.mark_packages("unpacked", failed_pkgs) | ||
219 | |||
220 | def update(self): | ||
221 | cmd = "%s update" % self.apt_get_cmd | ||
222 | |||
223 | try: | ||
224 | subprocess.check_output(cmd.split()) | ||
225 | except subprocess.CalledProcessError as e: | ||
226 | bb.fatal("Unable to update the package index files. Command %s " | ||
227 | "returned %d" % (e.cmd, e.returncode)) | ||
228 | |||
229 | def install(self, pkgs, attempt_only=False): | ||
230 | cmd = "%s %s install --force-yes --allow-unauthenticated %s" % \ | ||
231 | (self.apt_get_cmd, self.apt_args, ' '.join(pkgs)) | ||
232 | |||
233 | try: | ||
234 | bb.note("Installing the following packages: %s" % ' '.join(pkgs)) | ||
235 | subprocess.check_output(cmd.split()) | ||
236 | except subprocess.CalledProcessError as e: | ||
237 | (bb.fatal, bb.note)[attempt_only]("Unable to install packages. " | ||
238 | "Command %s returned %d" % | ||
239 | (cmd, e.returncode)) | ||
240 | |||
241 | # rename *.dpkg-new files/dirs | ||
242 | for root, dirs, files in os.walk(self.target_rootfs): | ||
243 | for dir in dirs: | ||
244 | new_dir = re.sub("\.dpkg-new", "", dir) | ||
245 | if dir != new_dir: | ||
246 | os.rename(os.path.join(root, dir), | ||
247 | os.path.join(root, new_dir)) | ||
248 | |||
249 | for file in files: | ||
250 | new_file = re.sub("\.dpkg-new", "", file) | ||
251 | if file != new_file: | ||
252 | os.rename(os.path.join(root, file), | ||
253 | os.path.join(root, new_file)) | ||
254 | |||
255 | |||
256 | def remove(self, pkgs, with_dependencies=True): | ||
257 | if with_dependencies: | ||
258 | cmd = "%s remove %s" % (self.apt_get_cmd, ' '.join(pkgs)) | ||
259 | else: | ||
260 | cmd = "%s --admindir=%s/var/lib/dpkg --instdir=%s" \ | ||
261 | " -r --force-depends %s" % \ | ||
262 | (bb.utils.which(os.getenv('PATH'), "dpkg"), | ||
263 | self.target_rootfs, self.target_rootfs, ' '.join(pkgs)) | ||
264 | |||
265 | try: | ||
266 | subprocess.check_output(cmd.split()) | ||
267 | except subprocess.CalledProcessError as e: | ||
268 | bb.fatal("Unable to remove packages. Command %s " | ||
269 | "returned %d" % (e.cmd, e.returncode)) | ||
270 | |||
271 | def write_index(self): | ||
272 | tmpdir = self.d.getVar('TMPDIR', True) | ||
273 | if os.path.exists(os.path.join(tmpdir, "stamps", "DEB_PACKAGE_INDEX_CLEAN")): | ||
274 | return | ||
275 | |||
276 | pkg_archs = self.d.getVar('PACKAGE_ARCHS', True) | ||
277 | if pkg_archs is not None: | ||
278 | arch_list = pkg_archs.split() | ||
279 | sdk_pkg_archs = self.d.getVar('SDK_PACKAGE_ARCHS', True) | ||
280 | if sdk_pkg_archs is not None: | ||
281 | arch_list += sdk_pkg_archs.split() | ||
282 | |||
283 | dpkg_scanpackages = bb.utils.which(os.getenv('PATH'), "dpkg-scanpackages") | ||
284 | gzip = bb.utils.which(os.getenv('PATH'), "gzip") | ||
285 | |||
286 | index_cmds = [] | ||
287 | deb_dirs_found = False | ||
288 | for arch in arch_list: | ||
289 | arch_dir = os.path.join(self.deploy_dir, arch) | ||
290 | if not os.path.isdir(arch_dir): | ||
291 | continue | ||
292 | |||
293 | with open(os.path.join(arch_dir, "Release"), "w+") as release: | ||
294 | release.write("Label: %s" % arch) | ||
295 | |||
296 | index_cmds.append("cd %s; %s . | %s > Packages.gz" % | ||
297 | (arch_dir, dpkg_scanpackages, gzip)) | ||
298 | |||
299 | deb_dirs_found = True | ||
300 | |||
301 | if not deb_dirs_found: | ||
302 | bb.fatal("There are no packages in %s" % self.deploy_dir) | ||
303 | |||
304 | nproc = multiprocessing.cpu_count() | ||
305 | pool = bb.utils.multiprocessingpool(nproc) | ||
306 | results = list(pool.imap(create_index, index_cmds)) | ||
307 | pool.close() | ||
308 | pool.join() | ||
309 | |||
310 | for result in results: | ||
311 | if result is not None: | ||
312 | bb.fatal(result) | ||
313 | |||
314 | open(os.path.join(tmpdir, "stamps", "DEB_PACKAGE_INDEX_CLEAN"), "w+").close() | ||
315 | |||
316 | def _create_configs(self, archs, dpkg_arch): | ||
317 | dpkg_arch = re.sub("_", "-", dpkg_arch) | ||
318 | |||
319 | if os.path.exists(self.apt_conf_dir): | ||
320 | bb.utils.remove(self.apt_conf_dir, True) | ||
321 | |||
322 | bb.utils.mkdirhier(self.apt_conf_dir) | ||
323 | |||
324 | arch_list = [] | ||
325 | for arch in archs.split(): | ||
326 | if not os.path.exists(os.path.join(self.deploy_dir, arch)): | ||
327 | continue | ||
328 | arch_list.append(arch) | ||
329 | |||
330 | with open(os.path.join(self.apt_conf_dir, "preferences"), "w+") as prefs_file: | ||
331 | priority = 801 | ||
332 | for arch in arch_list: | ||
333 | prefs_file.write( | ||
334 | "Package: *\n" | ||
335 | "Pin: release l=%s\n" | ||
336 | "Pin-Priority: %d\n\n" % (arch, priority)) | ||
337 | |||
338 | priority += 5 | ||
339 | |||
340 | for pkg in self.d.getVar('PACKAGE_EXCLUDE', True).split(): | ||
341 | prefs_file.write( | ||
342 | "Package: %s\n" | ||
343 | "Pin: release *\n" | ||
344 | "Pin-Priority: -1\n\n" % pkg) | ||
345 | |||
346 | arch_list.reverse() | ||
347 | |||
348 | with open(os.path.join(self.apt_conf_dir, "sources.list"), "w+") as sources_file: | ||
349 | for arch in arch_list: | ||
350 | sources_file.write("deb file:%s/ ./\n" % | ||
351 | os.path.join(self.deploy_dir, arch)) | ||
352 | |||
353 | with open(self.apt_conf_file, "w+") as apt_conf: | ||
354 | with open(self.d.expand("${STAGING_ETCDIR_NATIVE}/apt/apt.conf.sample")) as apt_conf_sample: | ||
355 | for line in apt_conf_sample.read().split("\n"): | ||
356 | line = re.sub("Architecture \".*\";", | ||
357 | "Architecture \"%s\";" % dpkg_arch, line) | ||
358 | line = re.sub("#ROOTFS#", self.target_rootfs, line) | ||
359 | line = re.sub("#APTCONF#", self.apt_conf_dir, line) | ||
360 | |||
361 | apt_conf.write(line + "\n") | ||
362 | |||
363 | target_dpkg_dir = "%s/var/lib/dpkg" % self.target_rootfs | ||
364 | bb.utils.mkdirhier(os.path.join(target_dpkg_dir, "info")) | ||
365 | |||
366 | bb.utils.mkdirhier(os.path.join(target_dpkg_dir, "updates")) | ||
367 | |||
368 | open(os.path.join(target_dpkg_dir, "status"), "w+").close() | ||
369 | open(os.path.join(target_dpkg_dir, "available"), "w+").close() | ||
370 | |||
371 | def remove_packaging_data(self): | ||
372 | bb.utils.remove(os.path.join(self.target_rootfs, | ||
373 | self.d.getVar('opkglibdir', True)), True) | ||
374 | bb.utils.remove(self.target_rootfs + "/var/lib/dpkg/", True) | ||
375 | |||
376 | def fix_broken_dependencies(self): | ||
377 | cmd = "%s %s -f install" % (self.apt_get_cmd, self.apt_args) | ||
378 | |||
379 | try: | ||
380 | subprocess.check_output(cmd.split()) | ||
381 | except subprocess.CalledProcessError as e: | ||
382 | bb.fatal("Cannot fix broken dependencies. Command %s " | ||
383 | "returned %d" % (cmd, e.returncode)) | ||
384 | |||
385 | def list_installed(self, format=None): | ||
386 | cmd = [bb.utils.which(os.getenv('PATH'), "dpkg-query"), | ||
387 | "--admindir=%s/var/lib/dpkg" % self.target_rootfs, | ||
388 | "-W"] | ||
389 | |||
390 | if format == "arch": | ||
391 | cmd.append("-f=${Package} ${PackageArch}\n") | ||
392 | elif format == "file": | ||
393 | cmd.append("-f=${Package} ${Package}_${Version}_${Architecture}.deb ${PackageArch}\n") | ||
394 | elif format == "ver": | ||
395 | cmd.append("-f=${Package} ${PackageArch} ${Version}\n") | ||
396 | else: | ||
397 | cmd.append("-f=${Package}") | ||
398 | |||
399 | try: | ||
400 | output = subprocess.check_output(cmd).strip() | ||
401 | except subprocess.CalledProcessError as e: | ||
402 | bb.fatal("Cannot get the installed packages list. Command %s " | ||
403 | "returned %d" % (' '.join(cmd), e.returncode)) | ||
404 | |||
405 | if format == "file": | ||
406 | tmp_output = "" | ||
407 | for pkg, pkg_file, pkg_arch in tuple(output.split('\n')): | ||
408 | full_path = os.path.join(self.deploy_dir, pkg_arch, pkg_file) | ||
409 | if os.path.exists(full_path): | ||
410 | tmp_output += "%s %s %s\n" % (pkg, full_path, pkg_arch) | ||
411 | else: | ||
412 | tmp_output += "%s %s %s\n" % (pkg, pkg_file, pkg_arch) | ||
413 | |||
414 | output = tmp_output | ||
415 | |||
416 | return output | ||
417 | |||
418 | if __name__ == "__main__": | ||
419 | """ | ||
420 | We should be able to run this as a standalone script, from outside bitbake | ||
421 | environment. | ||
422 | """ | ||
423 | """ | ||
424 | TBD | ||
425 | """ | ||