summaryrefslogtreecommitdiffstats
path: root/meta/lib/oe/package_manager.py
diff options
context:
space:
mode:
authorFredrik Gustafsson <fredrik.gustafsson@axis.com>2020-07-24 16:42:29 +0200
committerRichard Purdie <richard.purdie@linuxfoundation.org>2020-07-27 19:58:09 +0100
commit34dc8571d2d7c2bb9b5f92e157ab91e9d4f9e992 (patch)
treede10e02d583cd12b2b319b7fdaf2951244fc3695 /meta/lib/oe/package_manager.py
parent7b78e1b4de0421de9f876aeb4ff11d6d017a4b7b (diff)
downloadpoky-34dc8571d2d7c2bb9b5f92e157ab91e9d4f9e992.tar.gz
package_manager: Move to package_manager/__init__.py
This is part of a refactor that will split the package manager code so that it's possible to use other package managers in other layers. (From OE-Core rev: 66ccc7a228bf73df0a4dd846bf2c8e99eaa79580) Signed-off-by: Fredrik Gustafsson <fredrigu@axis.com> Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Diffstat (limited to 'meta/lib/oe/package_manager.py')
-rw-r--r--meta/lib/oe/package_manager.py1863
1 files changed, 0 insertions, 1863 deletions
diff --git a/meta/lib/oe/package_manager.py b/meta/lib/oe/package_manager.py
deleted file mode 100644
index 35e5cff073..0000000000
--- a/meta/lib/oe/package_manager.py
+++ /dev/null
@@ -1,1863 +0,0 @@
1#
2# SPDX-License-Identifier: GPL-2.0-only
3#
4
5from abc import ABCMeta, abstractmethod
6import os
7import glob
8import subprocess
9import shutil
10import re
11import collections
12import bb
13import tempfile
14import oe.utils
15import oe.path
16import string
17from oe.gpg_sign import get_signer
18import hashlib
19import fnmatch
20
21# this can be used by all PM backends to create the index files in parallel
22def create_index(arg):
23 index_cmd = arg
24
25 bb.note("Executing '%s' ..." % index_cmd)
26 result = subprocess.check_output(index_cmd, stderr=subprocess.STDOUT, shell=True).decode("utf-8")
27 if result:
28 bb.note(result)
29
30def opkg_query(cmd_output):
31 """
32 This method parse the output from the package managerand return
33 a dictionary with the information of the packages. This is used
34 when the packages are in deb or ipk format.
35 """
36 verregex = re.compile(r' \([=<>]* [^ )]*\)')
37 output = dict()
38 pkg = ""
39 arch = ""
40 ver = ""
41 filename = ""
42 dep = []
43 prov = []
44 pkgarch = ""
45 for line in cmd_output.splitlines()+['']:
46 line = line.rstrip()
47 if ':' in line:
48 if line.startswith("Package: "):
49 pkg = line.split(": ")[1]
50 elif line.startswith("Architecture: "):
51 arch = line.split(": ")[1]
52 elif line.startswith("Version: "):
53 ver = line.split(": ")[1]
54 elif line.startswith("File: ") or line.startswith("Filename:"):
55 filename = line.split(": ")[1]
56 if "/" in filename:
57 filename = os.path.basename(filename)
58 elif line.startswith("Depends: "):
59 depends = verregex.sub('', line.split(": ")[1])
60 for depend in depends.split(", "):
61 dep.append(depend)
62 elif line.startswith("Recommends: "):
63 recommends = verregex.sub('', line.split(": ")[1])
64 for recommend in recommends.split(", "):
65 dep.append("%s [REC]" % recommend)
66 elif line.startswith("PackageArch: "):
67 pkgarch = line.split(": ")[1]
68 elif line.startswith("Provides: "):
69 provides = verregex.sub('', line.split(": ")[1])
70 for provide in provides.split(", "):
71 prov.append(provide)
72
73 # When there is a blank line save the package information
74 elif not line:
75 # IPK doesn't include the filename
76 if not filename:
77 filename = "%s_%s_%s.ipk" % (pkg, ver, arch)
78 if pkg:
79 output[pkg] = {"arch":arch, "ver":ver,
80 "filename":filename, "deps": dep, "pkgarch":pkgarch, "provs": prov}
81 pkg = ""
82 arch = ""
83 ver = ""
84 filename = ""
85 dep = []
86 prov = []
87 pkgarch = ""
88
89 return output
90
91def failed_postinsts_abort(pkgs, log_path):
92 bb.fatal("""Postinstall scriptlets of %s have failed. If the intention is to defer them to first boot,
93then please place them into pkg_postinst_ontarget_${PN} ().
94Deferring to first boot via 'exit 1' is no longer supported.
95Details of the failure are in %s.""" %(pkgs, log_path))
96
97def generate_locale_archive(d, rootfs, target_arch, localedir):
98 # Pretty sure we don't need this for locale archive generation but
99 # keeping it to be safe...
100 locale_arch_options = { \
101 "arc": ["--uint32-align=4", "--little-endian"],
102 "arceb": ["--uint32-align=4", "--big-endian"],
103 "arm": ["--uint32-align=4", "--little-endian"],
104 "armeb": ["--uint32-align=4", "--big-endian"],
105 "aarch64": ["--uint32-align=4", "--little-endian"],
106 "aarch64_be": ["--uint32-align=4", "--big-endian"],
107 "sh4": ["--uint32-align=4", "--big-endian"],
108 "powerpc": ["--uint32-align=4", "--big-endian"],
109 "powerpc64": ["--uint32-align=4", "--big-endian"],
110 "powerpc64le": ["--uint32-align=4", "--little-endian"],
111 "mips": ["--uint32-align=4", "--big-endian"],
112 "mipsisa32r6": ["--uint32-align=4", "--big-endian"],
113 "mips64": ["--uint32-align=4", "--big-endian"],
114 "mipsisa64r6": ["--uint32-align=4", "--big-endian"],
115 "mipsel": ["--uint32-align=4", "--little-endian"],
116 "mipsisa32r6el": ["--uint32-align=4", "--little-endian"],
117 "mips64el": ["--uint32-align=4", "--little-endian"],
118 "mipsisa64r6el": ["--uint32-align=4", "--little-endian"],
119 "riscv64": ["--uint32-align=4", "--little-endian"],
120 "riscv32": ["--uint32-align=4", "--little-endian"],
121 "i586": ["--uint32-align=4", "--little-endian"],
122 "i686": ["--uint32-align=4", "--little-endian"],
123 "x86_64": ["--uint32-align=4", "--little-endian"]
124 }
125 if target_arch in locale_arch_options:
126 arch_options = locale_arch_options[target_arch]
127 else:
128 bb.error("locale_arch_options not found for target_arch=" + target_arch)
129 bb.fatal("unknown arch:" + target_arch + " for locale_arch_options")
130
131 # Need to set this so cross-localedef knows where the archive is
132 env = dict(os.environ)
133 env["LOCALEARCHIVE"] = oe.path.join(localedir, "locale-archive")
134
135 for name in sorted(os.listdir(localedir)):
136 path = os.path.join(localedir, name)
137 if os.path.isdir(path):
138 cmd = ["cross-localedef", "--verbose"]
139 cmd += arch_options
140 cmd += ["--add-to-archive", path]
141 subprocess.check_output(cmd, env=env, stderr=subprocess.STDOUT)
142
143class Indexer(object, metaclass=ABCMeta):
144 def __init__(self, d, deploy_dir):
145 self.d = d
146 self.deploy_dir = deploy_dir
147
148 @abstractmethod
149 def write_index(self):
150 pass
151
152
153class RpmIndexer(Indexer):
154 def write_index(self):
155 self.do_write_index(self.deploy_dir)
156
157 def do_write_index(self, deploy_dir):
158 if self.d.getVar('PACKAGE_FEED_SIGN') == '1':
159 signer = get_signer(self.d, self.d.getVar('PACKAGE_FEED_GPG_BACKEND'))
160 else:
161 signer = None
162
163 createrepo_c = bb.utils.which(os.environ['PATH'], "createrepo_c")
164 result = create_index("%s --update -q %s" % (createrepo_c, deploy_dir))
165 if result:
166 bb.fatal(result)
167
168 # Sign repomd
169 if signer:
170 sig_type = self.d.getVar('PACKAGE_FEED_GPG_SIGNATURE_TYPE')
171 is_ascii_sig = (sig_type.upper() != "BIN")
172 signer.detach_sign(os.path.join(deploy_dir, 'repodata', 'repomd.xml'),
173 self.d.getVar('PACKAGE_FEED_GPG_NAME'),
174 self.d.getVar('PACKAGE_FEED_GPG_PASSPHRASE_FILE'),
175 armor=is_ascii_sig)
176
177class RpmSubdirIndexer(RpmIndexer):
178 def write_index(self):
179 bb.note("Generating package index for %s" %(self.deploy_dir))
180 self.do_write_index(self.deploy_dir)
181 for entry in os.walk(self.deploy_dir):
182 if os.path.samefile(self.deploy_dir, entry[0]):
183 for dir in entry[1]:
184 if dir != 'repodata':
185 dir_path = oe.path.join(self.deploy_dir, dir)
186 bb.note("Generating package index for %s" %(dir_path))
187 self.do_write_index(dir_path)
188
189class OpkgIndexer(Indexer):
190 def write_index(self):
191 arch_vars = ["ALL_MULTILIB_PACKAGE_ARCHS",
192 "SDK_PACKAGE_ARCHS",
193 ]
194
195 opkg_index_cmd = bb.utils.which(os.getenv('PATH'), "opkg-make-index")
196 if self.d.getVar('PACKAGE_FEED_SIGN') == '1':
197 signer = get_signer(self.d, self.d.getVar('PACKAGE_FEED_GPG_BACKEND'))
198 else:
199 signer = None
200
201 if not os.path.exists(os.path.join(self.deploy_dir, "Packages")):
202 open(os.path.join(self.deploy_dir, "Packages"), "w").close()
203
204 index_cmds = set()
205 index_sign_files = set()
206 for arch_var in arch_vars:
207 archs = self.d.getVar(arch_var)
208 if archs is None:
209 continue
210
211 for arch in archs.split():
212 pkgs_dir = os.path.join(self.deploy_dir, arch)
213 pkgs_file = os.path.join(pkgs_dir, "Packages")
214
215 if not os.path.isdir(pkgs_dir):
216 continue
217
218 if not os.path.exists(pkgs_file):
219 open(pkgs_file, "w").close()
220
221 index_cmds.add('%s --checksum md5 --checksum sha256 -r %s -p %s -m %s' %
222 (opkg_index_cmd, pkgs_file, pkgs_file, pkgs_dir))
223
224 index_sign_files.add(pkgs_file)
225
226 if len(index_cmds) == 0:
227 bb.note("There are no packages in %s!" % self.deploy_dir)
228 return
229
230 oe.utils.multiprocess_launch(create_index, index_cmds, self.d)
231
232 if signer:
233 feed_sig_type = self.d.getVar('PACKAGE_FEED_GPG_SIGNATURE_TYPE')
234 is_ascii_sig = (feed_sig_type.upper() != "BIN")
235 for f in index_sign_files:
236 signer.detach_sign(f,
237 self.d.getVar('PACKAGE_FEED_GPG_NAME'),
238 self.d.getVar('PACKAGE_FEED_GPG_PASSPHRASE_FILE'),
239 armor=is_ascii_sig)
240
241
242class DpkgIndexer(Indexer):
243 def _create_configs(self):
244 bb.utils.mkdirhier(self.apt_conf_dir)
245 bb.utils.mkdirhier(os.path.join(self.apt_conf_dir, "lists", "partial"))
246 bb.utils.mkdirhier(os.path.join(self.apt_conf_dir, "apt.conf.d"))
247 bb.utils.mkdirhier(os.path.join(self.apt_conf_dir, "preferences.d"))
248
249 with open(os.path.join(self.apt_conf_dir, "preferences"),
250 "w") as prefs_file:
251 pass
252 with open(os.path.join(self.apt_conf_dir, "sources.list"),
253 "w+") as sources_file:
254 pass
255
256 with open(self.apt_conf_file, "w") as apt_conf:
257 with open(os.path.join(self.d.expand("${STAGING_ETCDIR_NATIVE}"),
258 "apt", "apt.conf.sample")) as apt_conf_sample:
259 for line in apt_conf_sample.read().split("\n"):
260 line = re.sub(r"#ROOTFS#", "/dev/null", line)
261 line = re.sub(r"#APTCONF#", self.apt_conf_dir, line)
262 apt_conf.write(line + "\n")
263
264 def write_index(self):
265 self.apt_conf_dir = os.path.join(self.d.expand("${APTCONF_TARGET}"),
266 "apt-ftparchive")
267 self.apt_conf_file = os.path.join(self.apt_conf_dir, "apt.conf")
268 self._create_configs()
269
270 os.environ['APT_CONFIG'] = self.apt_conf_file
271
272 pkg_archs = self.d.getVar('PACKAGE_ARCHS')
273 if pkg_archs is not None:
274 arch_list = pkg_archs.split()
275 sdk_pkg_archs = self.d.getVar('SDK_PACKAGE_ARCHS')
276 if sdk_pkg_archs is not None:
277 for a in sdk_pkg_archs.split():
278 if a not in pkg_archs:
279 arch_list.append(a)
280
281 all_mlb_pkg_arch_list = (self.d.getVar('ALL_MULTILIB_PACKAGE_ARCHS') or "").split()
282 arch_list.extend(arch for arch in all_mlb_pkg_arch_list if arch not in arch_list)
283
284 apt_ftparchive = bb.utils.which(os.getenv('PATH'), "apt-ftparchive")
285 gzip = bb.utils.which(os.getenv('PATH'), "gzip")
286
287 index_cmds = []
288 deb_dirs_found = False
289 for arch in arch_list:
290 arch_dir = os.path.join(self.deploy_dir, arch)
291 if not os.path.isdir(arch_dir):
292 continue
293
294 cmd = "cd %s; PSEUDO_UNLOAD=1 %s packages . > Packages;" % (arch_dir, apt_ftparchive)
295
296 cmd += "%s -fcn Packages > Packages.gz;" % gzip
297
298 with open(os.path.join(arch_dir, "Release"), "w+") as release:
299 release.write("Label: %s\n" % arch)
300
301 cmd += "PSEUDO_UNLOAD=1 %s release . >> Release" % apt_ftparchive
302
303 index_cmds.append(cmd)
304
305 deb_dirs_found = True
306
307 if not deb_dirs_found:
308 bb.note("There are no packages in %s" % self.deploy_dir)
309 return
310
311 oe.utils.multiprocess_launch(create_index, index_cmds, self.d)
312 if self.d.getVar('PACKAGE_FEED_SIGN') == '1':
313 raise NotImplementedError('Package feed signing not implementd for dpkg')
314
315
316
317class PkgsList(object, metaclass=ABCMeta):
318 def __init__(self, d, rootfs_dir):
319 self.d = d
320 self.rootfs_dir = rootfs_dir
321
322 @abstractmethod
323 def list_pkgs(self):
324 pass
325
326class RpmPkgsList(PkgsList):
327 def list_pkgs(self):
328 return RpmPM(self.d, self.rootfs_dir, self.d.getVar('TARGET_VENDOR'), needfeed=False).list_installed()
329
330class OpkgPkgsList(PkgsList):
331 def __init__(self, d, rootfs_dir, config_file):
332 super(OpkgPkgsList, self).__init__(d, rootfs_dir)
333
334 self.opkg_cmd = bb.utils.which(os.getenv('PATH'), "opkg")
335 self.opkg_args = "-f %s -o %s " % (config_file, rootfs_dir)
336 self.opkg_args += self.d.getVar("OPKG_ARGS")
337
338 def list_pkgs(self, format=None):
339 cmd = "%s %s status" % (self.opkg_cmd, self.opkg_args)
340
341 # opkg returns success even when it printed some
342 # "Collected errors:" report to stderr. Mixing stderr into
343 # stdout then leads to random failures later on when
344 # parsing the output. To avoid this we need to collect both
345 # output streams separately and check for empty stderr.
346 p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
347 cmd_output, cmd_stderr = p.communicate()
348 cmd_output = cmd_output.decode("utf-8")
349 cmd_stderr = cmd_stderr.decode("utf-8")
350 if p.returncode or cmd_stderr:
351 bb.fatal("Cannot get the installed packages list. Command '%s' "
352 "returned %d and stderr:\n%s" % (cmd, p.returncode, cmd_stderr))
353
354 return opkg_query(cmd_output)
355
356
357class DpkgPkgsList(PkgsList):
358
359 def list_pkgs(self):
360 cmd = [bb.utils.which(os.getenv('PATH'), "dpkg-query"),
361 "--admindir=%s/var/lib/dpkg" % self.rootfs_dir,
362 "-W"]
363
364 cmd.append("-f=Package: ${Package}\nArchitecture: ${PackageArch}\nVersion: ${Version}\nFile: ${Package}_${Version}_${Architecture}.deb\nDepends: ${Depends}\nRecommends: ${Recommends}\nProvides: ${Provides}\n\n")
365
366 try:
367 cmd_output = subprocess.check_output(cmd, stderr=subprocess.STDOUT).strip().decode("utf-8")
368 except subprocess.CalledProcessError as e:
369 bb.fatal("Cannot get the installed packages list. Command '%s' "
370 "returned %d:\n%s" % (' '.join(cmd), e.returncode, e.output.decode("utf-8")))
371
372 return opkg_query(cmd_output)
373
374
375class PackageManager(object, metaclass=ABCMeta):
376 """
377 This is an abstract class. Do not instantiate this directly.
378 """
379
380 def __init__(self, d, target_rootfs):
381 self.d = d
382 self.target_rootfs = target_rootfs
383 self.deploy_dir = None
384 self.deploy_lock = None
385 self._initialize_intercepts()
386
387 def _initialize_intercepts(self):
388 bb.note("Initializing intercept dir for %s" % self.target_rootfs)
389 # As there might be more than one instance of PackageManager operating at the same time
390 # we need to isolate the intercept_scripts directories from each other,
391 # hence the ugly hash digest in dir name.
392 self.intercepts_dir = os.path.join(self.d.getVar('WORKDIR'), "intercept_scripts-%s" %
393 (hashlib.sha256(self.target_rootfs.encode()).hexdigest()))
394
395 postinst_intercepts = (self.d.getVar("POSTINST_INTERCEPTS") or "").split()
396 if not postinst_intercepts:
397 postinst_intercepts_path = self.d.getVar("POSTINST_INTERCEPTS_PATH")
398 if not postinst_intercepts_path:
399 postinst_intercepts_path = self.d.getVar("POSTINST_INTERCEPTS_DIR") or self.d.expand("${COREBASE}/scripts/postinst-intercepts")
400 postinst_intercepts = oe.path.which_wild('*', postinst_intercepts_path)
401
402 bb.debug(1, 'Collected intercepts:\n%s' % ''.join(' %s\n' % i for i in postinst_intercepts))
403 bb.utils.remove(self.intercepts_dir, True)
404 bb.utils.mkdirhier(self.intercepts_dir)
405 for intercept in postinst_intercepts:
406 bb.utils.copyfile(intercept, os.path.join(self.intercepts_dir, os.path.basename(intercept)))
407
408 @abstractmethod
409 def _handle_intercept_failure(self, failed_script):
410 pass
411
412 def _postpone_to_first_boot(self, postinst_intercept_hook):
413 with open(postinst_intercept_hook) as intercept:
414 registered_pkgs = None
415 for line in intercept.read().split("\n"):
416 m = re.match(r"^##PKGS:(.*)", line)
417 if m is not None:
418 registered_pkgs = m.group(1).strip()
419 break
420
421 if registered_pkgs is not None:
422 bb.note("If an image is being built, the postinstalls for the following packages "
423 "will be postponed for first boot: %s" %
424 registered_pkgs)
425
426 # call the backend dependent handler
427 self._handle_intercept_failure(registered_pkgs)
428
429
430 def run_intercepts(self, populate_sdk=None):
431 intercepts_dir = self.intercepts_dir
432
433 bb.note("Running intercept scripts:")
434 os.environ['D'] = self.target_rootfs
435 os.environ['STAGING_DIR_NATIVE'] = self.d.getVar('STAGING_DIR_NATIVE')
436 for script in os.listdir(intercepts_dir):
437 script_full = os.path.join(intercepts_dir, script)
438
439 if script == "postinst_intercept" or not os.access(script_full, os.X_OK):
440 continue
441
442 # we do not want to run any multilib variant of this
443 if script.startswith("delay_to_first_boot"):
444 self._postpone_to_first_boot(script_full)
445 continue
446
447 if populate_sdk == 'host' and self.d.getVar('SDK_OS') == 'mingw32':
448 bb.note("The postinstall intercept hook '%s' could not be executed due to missing wine support, details in %s/log.do_%s"
449 % (script, self.d.getVar('T'), self.d.getVar('BB_CURRENTTASK')))
450 continue
451
452 bb.note("> Executing %s intercept ..." % script)
453
454 try:
455 output = subprocess.check_output(script_full, stderr=subprocess.STDOUT)
456 if output: bb.note(output.decode("utf-8"))
457 except subprocess.CalledProcessError as e:
458 bb.note("Exit code %d. Output:\n%s" % (e.returncode, e.output.decode("utf-8")))
459 if populate_sdk == 'host':
460 bb.fatal("The postinstall intercept hook '%s' failed, details in %s/log.do_%s" % (script, self.d.getVar('T'), self.d.getVar('BB_CURRENTTASK')))
461 elif populate_sdk == 'target':
462 if "qemuwrapper: qemu usermode is not supported" in e.output.decode("utf-8"):
463 bb.note("The postinstall intercept hook '%s' could not be executed due to missing qemu usermode support, details in %s/log.do_%s"
464 % (script, self.d.getVar('T'), self.d.getVar('BB_CURRENTTASK')))
465 else:
466 bb.fatal("The postinstall intercept hook '%s' failed, details in %s/log.do_%s" % (script, self.d.getVar('T'), self.d.getVar('BB_CURRENTTASK')))
467 else:
468 if "qemuwrapper: qemu usermode is not supported" in e.output.decode("utf-8"):
469 bb.note("The postinstall intercept hook '%s' could not be executed due to missing qemu usermode support, details in %s/log.do_%s"
470 % (script, self.d.getVar('T'), self.d.getVar('BB_CURRENTTASK')))
471 self._postpone_to_first_boot(script_full)
472 else:
473 bb.fatal("The postinstall intercept hook '%s' failed, details in %s/log.do_%s" % (script, self.d.getVar('T'), self.d.getVar('BB_CURRENTTASK')))
474
475 @abstractmethod
476 def update(self):
477 """
478 Update the package manager package database.
479 """
480 pass
481
482 @abstractmethod
483 def install(self, pkgs, attempt_only=False):
484 """
485 Install a list of packages. 'pkgs' is a list object. If 'attempt_only' is
486 True, installation failures are ignored.
487 """
488 pass
489
490 @abstractmethod
491 def remove(self, pkgs, with_dependencies=True):
492 """
493 Remove a list of packages. 'pkgs' is a list object. If 'with_dependencies'
494 is False, then any dependencies are left in place.
495 """
496 pass
497
498 @abstractmethod
499 def write_index(self):
500 """
501 This function creates the index files
502 """
503 pass
504
505 @abstractmethod
506 def remove_packaging_data(self):
507 pass
508
509 @abstractmethod
510 def list_installed(self):
511 pass
512
513 @abstractmethod
514 def extract(self, pkg):
515 """
516 Returns the path to a tmpdir where resides the contents of a package.
517 Deleting the tmpdir is responsability of the caller.
518 """
519 pass
520
521 @abstractmethod
522 def insert_feeds_uris(self, feed_uris, feed_base_paths, feed_archs):
523 """
524 Add remote package feeds into repository manager configuration. The parameters
525 for the feeds are set by feed_uris, feed_base_paths and feed_archs.
526 See http://www.yoctoproject.org/docs/current/ref-manual/ref-manual.html#var-PACKAGE_FEED_URIS
527 for their description.
528 """
529 pass
530
531 def install_glob(self, globs, sdk=False):
532 """
533 Install all packages that match a glob.
534 """
535 # TODO don't have sdk here but have a property on the superclass
536 # (and respect in install_complementary)
537 if sdk:
538 pkgdatadir = self.d.expand("${TMPDIR}/pkgdata/${SDK_SYS}")
539 else:
540 pkgdatadir = self.d.getVar("PKGDATA_DIR")
541
542 try:
543 bb.note("Installing globbed packages...")
544 cmd = ["oe-pkgdata-util", "-p", pkgdatadir, "list-pkgs", globs]
545 pkgs = subprocess.check_output(cmd, stderr=subprocess.STDOUT).decode("utf-8")
546 self.install(pkgs.split(), attempt_only=True)
547 except subprocess.CalledProcessError as e:
548 # Return code 1 means no packages matched
549 if e.returncode != 1:
550 bb.fatal("Could not compute globbed packages list. Command "
551 "'%s' returned %d:\n%s" %
552 (' '.join(cmd), e.returncode, e.output.decode("utf-8")))
553
554 def install_complementary(self, globs=None):
555 """
556 Install complementary packages based upon the list of currently installed
557 packages e.g. locales, *-dev, *-dbg, etc. This will only attempt to install
558 these packages, if they don't exist then no error will occur. Note: every
559 backend needs to call this function explicitly after the normal package
560 installation
561 """
562 if globs is None:
563 globs = self.d.getVar('IMAGE_INSTALL_COMPLEMENTARY')
564 split_linguas = set()
565
566 for translation in self.d.getVar('IMAGE_LINGUAS').split():
567 split_linguas.add(translation)
568 split_linguas.add(translation.split('-')[0])
569
570 split_linguas = sorted(split_linguas)
571
572 for lang in split_linguas:
573 globs += " *-locale-%s" % lang
574 for complementary_linguas in (self.d.getVar('IMAGE_LINGUAS_COMPLEMENTARY') or "").split():
575 globs += (" " + complementary_linguas) % lang
576
577 if globs is None:
578 return
579
580 # we need to write the list of installed packages to a file because the
581 # oe-pkgdata-util reads it from a file
582 with tempfile.NamedTemporaryFile(mode="w+", prefix="installed-pkgs") as installed_pkgs:
583 pkgs = self.list_installed()
584
585 provided_pkgs = set()
586 for pkg in pkgs.values():
587 provided_pkgs |= set(pkg.get('provs', []))
588
589 output = oe.utils.format_pkg_list(pkgs, "arch")
590 installed_pkgs.write(output)
591 installed_pkgs.flush()
592
593 cmd = ["oe-pkgdata-util",
594 "-p", self.d.getVar('PKGDATA_DIR'), "glob", installed_pkgs.name,
595 globs]
596 exclude = self.d.getVar('PACKAGE_EXCLUDE_COMPLEMENTARY')
597 if exclude:
598 cmd.extend(['--exclude=' + '|'.join(exclude.split())])
599 try:
600 bb.note('Running %s' % cmd)
601 complementary_pkgs = subprocess.check_output(cmd, stderr=subprocess.STDOUT).decode("utf-8")
602 complementary_pkgs = set(complementary_pkgs.split())
603 skip_pkgs = sorted(complementary_pkgs & provided_pkgs)
604 install_pkgs = sorted(complementary_pkgs - provided_pkgs)
605 bb.note("Installing complementary packages ... %s (skipped already provided packages %s)" % (
606 ' '.join(install_pkgs),
607 ' '.join(skip_pkgs)))
608 self.install(install_pkgs, attempt_only=True)
609 except subprocess.CalledProcessError as e:
610 bb.fatal("Could not compute complementary packages list. Command "
611 "'%s' returned %d:\n%s" %
612 (' '.join(cmd), e.returncode, e.output.decode("utf-8")))
613
614 target_arch = self.d.getVar('TARGET_ARCH')
615 localedir = oe.path.join(self.target_rootfs, self.d.getVar("libdir"), "locale")
616 if os.path.exists(localedir) and os.listdir(localedir):
617 generate_locale_archive(self.d, self.target_rootfs, target_arch, localedir)
618 # And now delete the binary locales
619 self.remove(fnmatch.filter(self.list_installed(), "glibc-binary-localedata-*"), False)
620
621 def deploy_dir_lock(self):
622 if self.deploy_dir is None:
623 raise RuntimeError("deploy_dir is not set!")
624
625 lock_file_name = os.path.join(self.deploy_dir, "deploy.lock")
626
627 self.deploy_lock = bb.utils.lockfile(lock_file_name)
628
629 def deploy_dir_unlock(self):
630 if self.deploy_lock is None:
631 return
632
633 bb.utils.unlockfile(self.deploy_lock)
634
635 self.deploy_lock = None
636
637 def construct_uris(self, uris, base_paths):
638 """
639 Construct URIs based on the following pattern: uri/base_path where 'uri'
640 and 'base_path' correspond to each element of the corresponding array
641 argument leading to len(uris) x len(base_paths) elements on the returned
642 array
643 """
644 def _append(arr1, arr2, sep='/'):
645 res = []
646 narr1 = [a.rstrip(sep) for a in arr1]
647 narr2 = [a.rstrip(sep).lstrip(sep) for a in arr2]
648 for a1 in narr1:
649 if arr2:
650 for a2 in narr2:
651 res.append("%s%s%s" % (a1, sep, a2))
652 else:
653 res.append(a1)
654 return res
655 return _append(uris, base_paths)
656
657def create_packages_dir(d, subrepo_dir, deploydir, taskname, filterbydependencies):
658 """
659 Go through our do_package_write_X dependencies and hardlink the packages we depend
660 upon into the repo directory. This prevents us seeing other packages that may
661 have been built that we don't depend upon and also packages for architectures we don't
662 support.
663 """
664 import errno
665
666 taskdepdata = d.getVar("BB_TASKDEPDATA", False)
667 mytaskname = d.getVar("BB_RUNTASK")
668 pn = d.getVar("PN")
669 seendirs = set()
670 multilibs = {}
671
672 bb.utils.remove(subrepo_dir, recurse=True)
673 bb.utils.mkdirhier(subrepo_dir)
674
675 # Detect bitbake -b usage
676 nodeps = d.getVar("BB_LIMITEDDEPS") or False
677 if nodeps or not filterbydependencies:
678 oe.path.symlink(deploydir, subrepo_dir, True)
679 return
680
681 start = None
682 for dep in taskdepdata:
683 data = taskdepdata[dep]
684 if data[1] == mytaskname and data[0] == pn:
685 start = dep
686 break
687 if start is None:
688 bb.fatal("Couldn't find ourself in BB_TASKDEPDATA?")
689 pkgdeps = set()
690 start = [start]
691 seen = set(start)
692 # Support direct dependencies (do_rootfs -> do_package_write_X)
693 # or indirect dependencies within PN (do_populate_sdk_ext -> do_rootfs -> do_package_write_X)
694 while start:
695 next = []
696 for dep2 in start:
697 for dep in taskdepdata[dep2][3]:
698 if taskdepdata[dep][0] != pn:
699 if "do_" + taskname in dep:
700 pkgdeps.add(dep)
701 elif dep not in seen:
702 next.append(dep)
703 seen.add(dep)
704 start = next
705
706 for dep in pkgdeps:
707 c = taskdepdata[dep][0]
708 manifest, d2 = oe.sstatesig.find_sstate_manifest(c, taskdepdata[dep][2], taskname, d, multilibs)
709 if not manifest:
710 bb.fatal("No manifest generated from: %s in %s" % (c, taskdepdata[dep][2]))
711 if not os.path.exists(manifest):
712 continue
713 with open(manifest, "r") as f:
714 for l in f:
715 l = l.strip()
716 deploydir = os.path.normpath(deploydir)
717 if bb.data.inherits_class('packagefeed-stability', d):
718 dest = l.replace(deploydir + "-prediff", "")
719 else:
720 dest = l.replace(deploydir, "")
721 dest = subrepo_dir + dest
722 if l.endswith("/"):
723 if dest not in seendirs:
724 bb.utils.mkdirhier(dest)
725 seendirs.add(dest)
726 continue
727 # Try to hardlink the file, copy if that fails
728 destdir = os.path.dirname(dest)
729 if destdir not in seendirs:
730 bb.utils.mkdirhier(destdir)
731 seendirs.add(destdir)
732 try:
733 os.link(l, dest)
734 except OSError as err:
735 if err.errno == errno.EXDEV:
736 bb.utils.copyfile(l, dest)
737 else:
738 raise
739
740class RpmPM(PackageManager):
741 def __init__(self,
742 d,
743 target_rootfs,
744 target_vendor,
745 task_name='target',
746 arch_var=None,
747 os_var=None,
748 rpm_repo_workdir="oe-rootfs-repo",
749 filterbydependencies=True,
750 needfeed=True):
751 super(RpmPM, self).__init__(d, target_rootfs)
752 self.target_vendor = target_vendor
753 self.task_name = task_name
754 if arch_var == None:
755 self.archs = self.d.getVar('ALL_MULTILIB_PACKAGE_ARCHS').replace("-","_")
756 else:
757 self.archs = self.d.getVar(arch_var).replace("-","_")
758 if task_name == "host":
759 self.primary_arch = self.d.getVar('SDK_ARCH')
760 else:
761 self.primary_arch = self.d.getVar('MACHINE_ARCH')
762
763 if needfeed:
764 self.rpm_repo_dir = oe.path.join(self.d.getVar('WORKDIR'), rpm_repo_workdir)
765 create_packages_dir(self.d, oe.path.join(self.rpm_repo_dir, "rpm"), d.getVar("DEPLOY_DIR_RPM"), "package_write_rpm", filterbydependencies)
766
767 self.saved_packaging_data = self.d.expand('${T}/saved_packaging_data/%s' % self.task_name)
768 if not os.path.exists(self.d.expand('${T}/saved_packaging_data')):
769 bb.utils.mkdirhier(self.d.expand('${T}/saved_packaging_data'))
770 self.packaging_data_dirs = ['etc/rpm', 'etc/rpmrc', 'etc/dnf', 'var/lib/rpm', 'var/lib/dnf', 'var/cache/dnf']
771 self.solution_manifest = self.d.expand('${T}/saved/%s_solution' %
772 self.task_name)
773 if not os.path.exists(self.d.expand('${T}/saved')):
774 bb.utils.mkdirhier(self.d.expand('${T}/saved'))
775
776 def _configure_dnf(self):
777 # libsolv handles 'noarch' internally, we don't need to specify it explicitly
778 archs = [i for i in reversed(self.archs.split()) if i not in ["any", "all", "noarch"]]
779 # This prevents accidental matching against libsolv's built-in policies
780 if len(archs) <= 1:
781 archs = archs + ["bogusarch"]
782 # This architecture needs to be upfront so that packages using it are properly prioritized
783 archs = ["sdk_provides_dummy_target"] + archs
784 confdir = "%s/%s" %(self.target_rootfs, "etc/dnf/vars/")
785 bb.utils.mkdirhier(confdir)
786 open(confdir + "arch", 'w').write(":".join(archs))
787 distro_codename = self.d.getVar('DISTRO_CODENAME')
788 open(confdir + "releasever", 'w').write(distro_codename if distro_codename is not None else '')
789
790 open(oe.path.join(self.target_rootfs, "etc/dnf/dnf.conf"), 'w').write("")
791
792
793 def _configure_rpm(self):
794 # We need to configure rpm to use our primary package architecture as the installation architecture,
795 # and to make it compatible with other package architectures that we use.
796 # Otherwise it will refuse to proceed with packages installation.
797 platformconfdir = "%s/%s" %(self.target_rootfs, "etc/rpm/")
798 rpmrcconfdir = "%s/%s" %(self.target_rootfs, "etc/")
799 bb.utils.mkdirhier(platformconfdir)
800 open(platformconfdir + "platform", 'w').write("%s-pc-linux" % self.primary_arch)
801 with open(rpmrcconfdir + "rpmrc", 'w') as f:
802 f.write("arch_compat: %s: %s\n" % (self.primary_arch, self.archs if len(self.archs) > 0 else self.primary_arch))
803 f.write("buildarch_compat: %s: noarch\n" % self.primary_arch)
804
805 open(platformconfdir + "macros", 'w').write("%_transaction_color 7\n")
806 if self.d.getVar('RPM_PREFER_ELF_ARCH'):
807 open(platformconfdir + "macros", 'a').write("%%_prefer_color %s" % (self.d.getVar('RPM_PREFER_ELF_ARCH')))
808
809 if self.d.getVar('RPM_SIGN_PACKAGES') == '1':
810 signer = get_signer(self.d, self.d.getVar('RPM_GPG_BACKEND'))
811 pubkey_path = oe.path.join(self.d.getVar('B'), 'rpm-key')
812 signer.export_pubkey(pubkey_path, self.d.getVar('RPM_GPG_NAME'))
813 rpm_bin = bb.utils.which(os.getenv('PATH'), "rpmkeys")
814 cmd = [rpm_bin, '--root=%s' % self.target_rootfs, '--import', pubkey_path]
815 try:
816 subprocess.check_output(cmd, stderr=subprocess.STDOUT)
817 except subprocess.CalledProcessError as e:
818 bb.fatal("Importing GPG key failed. Command '%s' "
819 "returned %d:\n%s" % (' '.join(cmd), e.returncode, e.output.decode("utf-8")))
820
821 def create_configs(self):
822 self._configure_dnf()
823 self._configure_rpm()
824
825 def write_index(self):
826 lockfilename = self.d.getVar('DEPLOY_DIR_RPM') + "/rpm.lock"
827 lf = bb.utils.lockfile(lockfilename, False)
828 RpmIndexer(self.d, self.rpm_repo_dir).write_index()
829 bb.utils.unlockfile(lf)
830
831 def insert_feeds_uris(self, feed_uris, feed_base_paths, feed_archs):
832 from urllib.parse import urlparse
833
834 if feed_uris == "":
835 return
836
837 gpg_opts = ''
838 if self.d.getVar('PACKAGE_FEED_SIGN') == '1':
839 gpg_opts += 'repo_gpgcheck=1\n'
840 gpg_opts += 'gpgkey=file://%s/pki/packagefeed-gpg/PACKAGEFEED-GPG-KEY-%s-%s\n' % (self.d.getVar('sysconfdir'), self.d.getVar('DISTRO'), self.d.getVar('DISTRO_CODENAME'))
841
842 if self.d.getVar('RPM_SIGN_PACKAGES') != '1':
843 gpg_opts += 'gpgcheck=0\n'
844
845 bb.utils.mkdirhier(oe.path.join(self.target_rootfs, "etc", "yum.repos.d"))
846 remote_uris = self.construct_uris(feed_uris.split(), feed_base_paths.split())
847 for uri in remote_uris:
848 repo_base = "oe-remote-repo" + "-".join(urlparse(uri).path.split("/"))
849 if feed_archs is not None:
850 for arch in feed_archs.split():
851 repo_uri = uri + "/" + arch
852 repo_id = "oe-remote-repo" + "-".join(urlparse(repo_uri).path.split("/"))
853 repo_name = "OE Remote Repo:" + " ".join(urlparse(repo_uri).path.split("/"))
854 open(oe.path.join(self.target_rootfs, "etc", "yum.repos.d", repo_base + ".repo"), 'a').write(
855 "[%s]\nname=%s\nbaseurl=%s\n%s\n" % (repo_id, repo_name, repo_uri, gpg_opts))
856 else:
857 repo_name = "OE Remote Repo:" + " ".join(urlparse(uri).path.split("/"))
858 repo_uri = uri
859 open(oe.path.join(self.target_rootfs, "etc", "yum.repos.d", repo_base + ".repo"), 'w').write(
860 "[%s]\nname=%s\nbaseurl=%s\n%s" % (repo_base, repo_name, repo_uri, gpg_opts))
861
862 def _prepare_pkg_transaction(self):
863 os.environ['D'] = self.target_rootfs
864 os.environ['OFFLINE_ROOT'] = self.target_rootfs
865 os.environ['IPKG_OFFLINE_ROOT'] = self.target_rootfs
866 os.environ['OPKG_OFFLINE_ROOT'] = self.target_rootfs
867 os.environ['INTERCEPT_DIR'] = self.intercepts_dir
868 os.environ['NATIVE_ROOT'] = self.d.getVar('STAGING_DIR_NATIVE')
869
870
871 def install(self, pkgs, attempt_only = False):
872 if len(pkgs) == 0:
873 return
874 self._prepare_pkg_transaction()
875
876 bad_recommendations = self.d.getVar('BAD_RECOMMENDATIONS')
877 package_exclude = self.d.getVar('PACKAGE_EXCLUDE')
878 exclude_pkgs = (bad_recommendations.split() if bad_recommendations else []) + (package_exclude.split() if package_exclude else [])
879
880 output = self._invoke_dnf((["--skip-broken"] if attempt_only else []) +
881 (["-x", ",".join(exclude_pkgs)] if len(exclude_pkgs) > 0 else []) +
882 (["--setopt=install_weak_deps=False"] if self.d.getVar('NO_RECOMMENDATIONS') == "1" else []) +
883 (["--nogpgcheck"] if self.d.getVar('RPM_SIGN_PACKAGES') != '1' else ["--setopt=gpgcheck=True"]) +
884 ["install"] +
885 pkgs)
886
887 failed_scriptlets_pkgnames = collections.OrderedDict()
888 for line in output.splitlines():
889 if line.startswith("Error in POSTIN scriptlet in rpm package"):
890 failed_scriptlets_pkgnames[line.split()[-1]] = True
891
892 if len(failed_scriptlets_pkgnames) > 0:
893 failed_postinsts_abort(list(failed_scriptlets_pkgnames.keys()), self.d.expand("${T}/log.do_${BB_CURRENTTASK}"))
894
895 def remove(self, pkgs, with_dependencies = True):
896 if not pkgs:
897 return
898
899 self._prepare_pkg_transaction()
900
901 if with_dependencies:
902 self._invoke_dnf(["remove"] + pkgs)
903 else:
904 cmd = bb.utils.which(os.getenv('PATH'), "rpm")
905 args = ["-e", "-v", "--nodeps", "--root=%s" %self.target_rootfs]
906
907 try:
908 bb.note("Running %s" % ' '.join([cmd] + args + pkgs))
909 output = subprocess.check_output([cmd] + args + pkgs, stderr=subprocess.STDOUT).decode("utf-8")
910 bb.note(output)
911 except subprocess.CalledProcessError as e:
912 bb.fatal("Could not invoke rpm. Command "
913 "'%s' returned %d:\n%s" % (' '.join([cmd] + args + pkgs), e.returncode, e.output.decode("utf-8")))
914
915 def upgrade(self):
916 self._prepare_pkg_transaction()
917 self._invoke_dnf(["upgrade"])
918
919 def autoremove(self):
920 self._prepare_pkg_transaction()
921 self._invoke_dnf(["autoremove"])
922
923 def remove_packaging_data(self):
924 self._invoke_dnf(["clean", "all"])
925 for dir in self.packaging_data_dirs:
926 bb.utils.remove(oe.path.join(self.target_rootfs, dir), True)
927
928 def backup_packaging_data(self):
929 # Save the packaging dirs for increment rpm image generation
930 if os.path.exists(self.saved_packaging_data):
931 bb.utils.remove(self.saved_packaging_data, True)
932 for i in self.packaging_data_dirs:
933 source_dir = oe.path.join(self.target_rootfs, i)
934 target_dir = oe.path.join(self.saved_packaging_data, i)
935 if os.path.isdir(source_dir):
936 shutil.copytree(source_dir, target_dir, symlinks=True)
937 elif os.path.isfile(source_dir):
938 shutil.copy2(source_dir, target_dir)
939
940 def recovery_packaging_data(self):
941 # Move the rpmlib back
942 if os.path.exists(self.saved_packaging_data):
943 for i in self.packaging_data_dirs:
944 target_dir = oe.path.join(self.target_rootfs, i)
945 if os.path.exists(target_dir):
946 bb.utils.remove(target_dir, True)
947 source_dir = oe.path.join(self.saved_packaging_data, i)
948 if os.path.isdir(source_dir):
949 shutil.copytree(source_dir, target_dir, symlinks=True)
950 elif os.path.isfile(source_dir):
951 shutil.copy2(source_dir, target_dir)
952
953 def list_installed(self):
954 output = self._invoke_dnf(["repoquery", "--installed", "--queryformat", "Package: %{name} %{arch} %{version} %{name}-%{version}-%{release}.%{arch}.rpm\nDependencies:\n%{requires}\nRecommendations:\n%{recommends}\nDependenciesEndHere:\n"],
955 print_output = False)
956 packages = {}
957 current_package = None
958 current_deps = None
959 current_state = "initial"
960 for line in output.splitlines():
961 if line.startswith("Package:"):
962 package_info = line.split(" ")[1:]
963 current_package = package_info[0]
964 package_arch = package_info[1]
965 package_version = package_info[2]
966 package_rpm = package_info[3]
967 packages[current_package] = {"arch":package_arch, "ver":package_version, "filename":package_rpm}
968 current_deps = []
969 elif line.startswith("Dependencies:"):
970 current_state = "dependencies"
971 elif line.startswith("Recommendations"):
972 current_state = "recommendations"
973 elif line.startswith("DependenciesEndHere:"):
974 current_state = "initial"
975 packages[current_package]["deps"] = current_deps
976 elif len(line) > 0:
977 if current_state == "dependencies":
978 current_deps.append(line)
979 elif current_state == "recommendations":
980 current_deps.append("%s [REC]" % line)
981
982 return packages
983
984 def update(self):
985 self._invoke_dnf(["makecache", "--refresh"])
986
987 def _invoke_dnf(self, dnf_args, fatal = True, print_output = True ):
988 os.environ['RPM_ETCCONFIGDIR'] = self.target_rootfs
989
990 dnf_cmd = bb.utils.which(os.getenv('PATH'), "dnf")
991 standard_dnf_args = ["-v", "--rpmverbosity=info", "-y",
992 "-c", oe.path.join(self.target_rootfs, "etc/dnf/dnf.conf"),
993 "--setopt=reposdir=%s" %(oe.path.join(self.target_rootfs, "etc/yum.repos.d")),
994 "--installroot=%s" % (self.target_rootfs),
995 "--setopt=logdir=%s" % (self.d.getVar('T'))
996 ]
997 if hasattr(self, "rpm_repo_dir"):
998 standard_dnf_args.append("--repofrompath=oe-repo,%s" % (self.rpm_repo_dir))
999 cmd = [dnf_cmd] + standard_dnf_args + dnf_args
1000 bb.note('Running %s' % ' '.join(cmd))
1001 try:
1002 output = subprocess.check_output(cmd,stderr=subprocess.STDOUT).decode("utf-8")
1003 if print_output:
1004 bb.debug(1, output)
1005 return output
1006 except subprocess.CalledProcessError as e:
1007 if print_output:
1008 (bb.note, bb.fatal)[fatal]("Could not invoke dnf. Command "
1009 "'%s' returned %d:\n%s" % (' '.join(cmd), e.returncode, e.output.decode("utf-8")))
1010 else:
1011 (bb.note, bb.fatal)[fatal]("Could not invoke dnf. Command "
1012 "'%s' returned %d:" % (' '.join(cmd), e.returncode))
1013 return e.output.decode("utf-8")
1014
1015 def dump_install_solution(self, pkgs):
1016 open(self.solution_manifest, 'w').write(" ".join(pkgs))
1017 return pkgs
1018
1019 def load_old_install_solution(self):
1020 if not os.path.exists(self.solution_manifest):
1021 return []
1022 with open(self.solution_manifest, 'r') as fd:
1023 return fd.read().split()
1024
1025 def _script_num_prefix(self, path):
1026 files = os.listdir(path)
1027 numbers = set()
1028 numbers.add(99)
1029 for f in files:
1030 numbers.add(int(f.split("-")[0]))
1031 return max(numbers) + 1
1032
1033 def save_rpmpostinst(self, pkg):
1034 bb.note("Saving postinstall script of %s" % (pkg))
1035 cmd = bb.utils.which(os.getenv('PATH'), "rpm")
1036 args = ["-q", "--root=%s" % self.target_rootfs, "--queryformat", "%{postin}", pkg]
1037
1038 try:
1039 output = subprocess.check_output([cmd] + args,stderr=subprocess.STDOUT).decode("utf-8")
1040 except subprocess.CalledProcessError as e:
1041 bb.fatal("Could not invoke rpm. Command "
1042 "'%s' returned %d:\n%s" % (' '.join([cmd] + args), e.returncode, e.output.decode("utf-8")))
1043
1044 # may need to prepend #!/bin/sh to output
1045
1046 target_path = oe.path.join(self.target_rootfs, self.d.expand('${sysconfdir}/rpm-postinsts/'))
1047 bb.utils.mkdirhier(target_path)
1048 num = self._script_num_prefix(target_path)
1049 saved_script_name = oe.path.join(target_path, "%d-%s" % (num, pkg))
1050 open(saved_script_name, 'w').write(output)
1051 os.chmod(saved_script_name, 0o755)
1052
1053 def _handle_intercept_failure(self, registered_pkgs):
1054 rpm_postinsts_dir = self.target_rootfs + self.d.expand('${sysconfdir}/rpm-postinsts/')
1055 bb.utils.mkdirhier(rpm_postinsts_dir)
1056
1057 # Save the package postinstalls in /etc/rpm-postinsts
1058 for pkg in registered_pkgs.split():
1059 self.save_rpmpostinst(pkg)
1060
1061 def extract(self, pkg):
1062 output = self._invoke_dnf(["repoquery", "--queryformat", "%{location}", pkg])
1063 pkg_name = output.splitlines()[-1]
1064 if not pkg_name.endswith(".rpm"):
1065 bb.fatal("dnf could not find package %s in repository: %s" %(pkg, output))
1066 pkg_path = oe.path.join(self.rpm_repo_dir, pkg_name)
1067
1068 cpio_cmd = bb.utils.which(os.getenv("PATH"), "cpio")
1069 rpm2cpio_cmd = bb.utils.which(os.getenv("PATH"), "rpm2cpio")
1070
1071 if not os.path.isfile(pkg_path):
1072 bb.fatal("Unable to extract package for '%s'."
1073 "File %s doesn't exists" % (pkg, pkg_path))
1074
1075 tmp_dir = tempfile.mkdtemp()
1076 current_dir = os.getcwd()
1077 os.chdir(tmp_dir)
1078
1079 try:
1080 cmd = "%s %s | %s -idmv" % (rpm2cpio_cmd, pkg_path, cpio_cmd)
1081 output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
1082 except subprocess.CalledProcessError as e:
1083 bb.utils.remove(tmp_dir, recurse=True)
1084 bb.fatal("Unable to extract %s package. Command '%s' "
1085 "returned %d:\n%s" % (pkg_path, cmd, e.returncode, e.output.decode("utf-8")))
1086 except OSError as e:
1087 bb.utils.remove(tmp_dir, recurse=True)
1088 bb.fatal("Unable to extract %s package. Command '%s' "
1089 "returned %d:\n%s at %s" % (pkg_path, cmd, e.errno, e.strerror, e.filename))
1090
1091 bb.note("Extracted %s to %s" % (pkg_path, tmp_dir))
1092 os.chdir(current_dir)
1093
1094 return tmp_dir
1095
1096
1097class OpkgDpkgPM(PackageManager):
1098 def __init__(self, d, target_rootfs):
1099 """
1100 This is an abstract class. Do not instantiate this directly.
1101 """
1102 super(OpkgDpkgPM, self).__init__(d, target_rootfs)
1103
1104 def package_info(self, pkg, cmd):
1105 """
1106 Returns a dictionary with the package info.
1107
1108 This method extracts the common parts for Opkg and Dpkg
1109 """
1110
1111 try:
1112 output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True).decode("utf-8")
1113 except subprocess.CalledProcessError as e:
1114 bb.fatal("Unable to list available packages. Command '%s' "
1115 "returned %d:\n%s" % (cmd, e.returncode, e.output.decode("utf-8")))
1116 return opkg_query(output)
1117
1118 def extract(self, pkg, pkg_info):
1119 """
1120 Returns the path to a tmpdir where resides the contents of a package.
1121
1122 Deleting the tmpdir is responsability of the caller.
1123
1124 This method extracts the common parts for Opkg and Dpkg
1125 """
1126
1127 ar_cmd = bb.utils.which(os.getenv("PATH"), "ar")
1128 tar_cmd = bb.utils.which(os.getenv("PATH"), "tar")
1129 pkg_path = pkg_info[pkg]["filepath"]
1130
1131 if not os.path.isfile(pkg_path):
1132 bb.fatal("Unable to extract package for '%s'."
1133 "File %s doesn't exists" % (pkg, pkg_path))
1134
1135 tmp_dir = tempfile.mkdtemp()
1136 current_dir = os.getcwd()
1137 os.chdir(tmp_dir)
1138 data_tar = 'data.tar.xz'
1139
1140 try:
1141 cmd = [ar_cmd, 'x', pkg_path]
1142 output = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
1143 cmd = [tar_cmd, 'xf', data_tar]
1144 output = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
1145 except subprocess.CalledProcessError as e:
1146 bb.utils.remove(tmp_dir, recurse=True)
1147 bb.fatal("Unable to extract %s package. Command '%s' "
1148 "returned %d:\n%s" % (pkg_path, ' '.join(cmd), e.returncode, e.output.decode("utf-8")))
1149 except OSError as e:
1150 bb.utils.remove(tmp_dir, recurse=True)
1151 bb.fatal("Unable to extract %s package. Command '%s' "
1152 "returned %d:\n%s at %s" % (pkg_path, ' '.join(cmd), e.errno, e.strerror, e.filename))
1153
1154 bb.note("Extracted %s to %s" % (pkg_path, tmp_dir))
1155 bb.utils.remove(os.path.join(tmp_dir, "debian-binary"))
1156 bb.utils.remove(os.path.join(tmp_dir, "control.tar.gz"))
1157 os.chdir(current_dir)
1158
1159 return tmp_dir
1160
1161 def _handle_intercept_failure(self, registered_pkgs):
1162 self.mark_packages("unpacked", registered_pkgs.split())
1163
1164class OpkgPM(OpkgDpkgPM):
1165 def __init__(self, d, target_rootfs, config_file, archs, task_name='target', ipk_repo_workdir="oe-rootfs-repo", filterbydependencies=True, prepare_index=True):
1166 super(OpkgPM, self).__init__(d, target_rootfs)
1167
1168 self.config_file = config_file
1169 self.pkg_archs = archs
1170 self.task_name = task_name
1171
1172 self.deploy_dir = oe.path.join(self.d.getVar('WORKDIR'), ipk_repo_workdir)
1173 self.deploy_lock_file = os.path.join(self.deploy_dir, "deploy.lock")
1174 self.opkg_cmd = bb.utils.which(os.getenv('PATH'), "opkg")
1175 self.opkg_args = "--volatile-cache -f %s -t %s -o %s " % (self.config_file, self.d.expand('${T}/ipktemp/') ,target_rootfs)
1176 self.opkg_args += self.d.getVar("OPKG_ARGS")
1177
1178 if prepare_index:
1179 create_packages_dir(self.d, self.deploy_dir, d.getVar("DEPLOY_DIR_IPK"), "package_write_ipk", filterbydependencies)
1180
1181 opkg_lib_dir = self.d.getVar('OPKGLIBDIR')
1182 if opkg_lib_dir[0] == "/":
1183 opkg_lib_dir = opkg_lib_dir[1:]
1184
1185 self.opkg_dir = os.path.join(target_rootfs, opkg_lib_dir, "opkg")
1186
1187 bb.utils.mkdirhier(self.opkg_dir)
1188
1189 self.saved_opkg_dir = self.d.expand('${T}/saved/%s' % self.task_name)
1190 if not os.path.exists(self.d.expand('${T}/saved')):
1191 bb.utils.mkdirhier(self.d.expand('${T}/saved'))
1192
1193 self.from_feeds = (self.d.getVar('BUILD_IMAGES_FROM_FEEDS') or "") == "1"
1194 if self.from_feeds:
1195 self._create_custom_config()
1196 else:
1197 self._create_config()
1198
1199 self.indexer = OpkgIndexer(self.d, self.deploy_dir)
1200
1201 def mark_packages(self, status_tag, packages=None):
1202 """
1203 This function will change a package's status in /var/lib/opkg/status file.
1204 If 'packages' is None then the new_status will be applied to all
1205 packages
1206 """
1207 status_file = os.path.join(self.opkg_dir, "status")
1208
1209 with open(status_file, "r") as sf:
1210 with open(status_file + ".tmp", "w+") as tmp_sf:
1211 if packages is None:
1212 tmp_sf.write(re.sub(r"Package: (.*?)\n((?:[^\n]+\n)*?)Status: (.*)(?:unpacked|installed)",
1213 r"Package: \1\n\2Status: \3%s" % status_tag,
1214 sf.read()))
1215 else:
1216 if type(packages).__name__ != "list":
1217 raise TypeError("'packages' should be a list object")
1218
1219 status = sf.read()
1220 for pkg in packages:
1221 status = re.sub(r"Package: %s\n((?:[^\n]+\n)*?)Status: (.*)(?:unpacked|installed)" % pkg,
1222 r"Package: %s\n\1Status: \2%s" % (pkg, status_tag),
1223 status)
1224
1225 tmp_sf.write(status)
1226
1227 os.rename(status_file + ".tmp", status_file)
1228
1229 def _create_custom_config(self):
1230 bb.note("Building from feeds activated!")
1231
1232 with open(self.config_file, "w+") as config_file:
1233 priority = 1
1234 for arch in self.pkg_archs.split():
1235 config_file.write("arch %s %d\n" % (arch, priority))
1236 priority += 5
1237
1238 for line in (self.d.getVar('IPK_FEED_URIS') or "").split():
1239 feed_match = re.match(r"^[ \t]*(.*)##([^ \t]*)[ \t]*$", line)
1240
1241 if feed_match is not None:
1242 feed_name = feed_match.group(1)
1243 feed_uri = feed_match.group(2)
1244
1245 bb.note("Add %s feed with URL %s" % (feed_name, feed_uri))
1246
1247 config_file.write("src/gz %s %s\n" % (feed_name, feed_uri))
1248
1249 """
1250 Allow to use package deploy directory contents as quick devel-testing
1251 feed. This creates individual feed configs for each arch subdir of those
1252 specified as compatible for the current machine.
1253 NOTE: Development-helper feature, NOT a full-fledged feed.
1254 """
1255 if (self.d.getVar('FEED_DEPLOYDIR_BASE_URI') or "") != "":
1256 for arch in self.pkg_archs.split():
1257 cfg_file_name = os.path.join(self.target_rootfs,
1258 self.d.getVar("sysconfdir"),
1259 "opkg",
1260 "local-%s-feed.conf" % arch)
1261
1262 with open(cfg_file_name, "w+") as cfg_file:
1263 cfg_file.write("src/gz local-%s %s/%s" %
1264 (arch,
1265 self.d.getVar('FEED_DEPLOYDIR_BASE_URI'),
1266 arch))
1267
1268 if self.d.getVar('OPKGLIBDIR') != '/var/lib':
1269 # There is no command line option for this anymore, we need to add
1270 # info_dir and status_file to config file, if OPKGLIBDIR doesn't have
1271 # the default value of "/var/lib" as defined in opkg:
1272 # libopkg/opkg_conf.h:#define OPKG_CONF_DEFAULT_LISTS_DIR VARDIR "/lib/opkg/lists"
1273 # libopkg/opkg_conf.h:#define OPKG_CONF_DEFAULT_INFO_DIR VARDIR "/lib/opkg/info"
1274 # libopkg/opkg_conf.h:#define OPKG_CONF_DEFAULT_STATUS_FILE VARDIR "/lib/opkg/status"
1275 cfg_file.write("option info_dir %s\n" % os.path.join(self.d.getVar('OPKGLIBDIR'), 'opkg', 'info'))
1276 cfg_file.write("option lists_dir %s\n" % os.path.join(self.d.getVar('OPKGLIBDIR'), 'opkg', 'lists'))
1277 cfg_file.write("option status_file %s\n" % os.path.join(self.d.getVar('OPKGLIBDIR'), 'opkg', 'status'))
1278
1279
1280 def _create_config(self):
1281 with open(self.config_file, "w+") as config_file:
1282 priority = 1
1283 for arch in self.pkg_archs.split():
1284 config_file.write("arch %s %d\n" % (arch, priority))
1285 priority += 5
1286
1287 config_file.write("src oe file:%s\n" % self.deploy_dir)
1288
1289 for arch in self.pkg_archs.split():
1290 pkgs_dir = os.path.join(self.deploy_dir, arch)
1291 if os.path.isdir(pkgs_dir):
1292 config_file.write("src oe-%s file:%s\n" %
1293 (arch, pkgs_dir))
1294
1295 if self.d.getVar('OPKGLIBDIR') != '/var/lib':
1296 # There is no command line option for this anymore, we need to add
1297 # info_dir and status_file to config file, if OPKGLIBDIR doesn't have
1298 # the default value of "/var/lib" as defined in opkg:
1299 # libopkg/opkg_conf.h:#define OPKG_CONF_DEFAULT_LISTS_DIR VARDIR "/lib/opkg/lists"
1300 # libopkg/opkg_conf.h:#define OPKG_CONF_DEFAULT_INFO_DIR VARDIR "/lib/opkg/info"
1301 # libopkg/opkg_conf.h:#define OPKG_CONF_DEFAULT_STATUS_FILE VARDIR "/lib/opkg/status"
1302 config_file.write("option info_dir %s\n" % os.path.join(self.d.getVar('OPKGLIBDIR'), 'opkg', 'info'))
1303 config_file.write("option lists_dir %s\n" % os.path.join(self.d.getVar('OPKGLIBDIR'), 'opkg', 'lists'))
1304 config_file.write("option status_file %s\n" % os.path.join(self.d.getVar('OPKGLIBDIR'), 'opkg', 'status'))
1305
1306 def insert_feeds_uris(self, feed_uris, feed_base_paths, feed_archs):
1307 if feed_uris == "":
1308 return
1309
1310 rootfs_config = os.path.join('%s/etc/opkg/base-feeds.conf'
1311 % self.target_rootfs)
1312
1313 os.makedirs('%s/etc/opkg' % self.target_rootfs, exist_ok=True)
1314
1315 feed_uris = self.construct_uris(feed_uris.split(), feed_base_paths.split())
1316 archs = self.pkg_archs.split() if feed_archs is None else feed_archs.split()
1317
1318 with open(rootfs_config, "w+") as config_file:
1319 uri_iterator = 0
1320 for uri in feed_uris:
1321 if archs:
1322 for arch in archs:
1323 if (feed_archs is None) and (not os.path.exists(oe.path.join(self.deploy_dir, arch))):
1324 continue
1325 bb.note('Adding opkg feed url-%s-%d (%s)' %
1326 (arch, uri_iterator, uri))
1327 config_file.write("src/gz uri-%s-%d %s/%s\n" %
1328 (arch, uri_iterator, uri, arch))
1329 else:
1330 bb.note('Adding opkg feed url-%d (%s)' %
1331 (uri_iterator, uri))
1332 config_file.write("src/gz uri-%d %s\n" %
1333 (uri_iterator, uri))
1334
1335 uri_iterator += 1
1336
1337 def update(self):
1338 self.deploy_dir_lock()
1339
1340 cmd = "%s %s update" % (self.opkg_cmd, self.opkg_args)
1341
1342 try:
1343 subprocess.check_output(cmd.split(), stderr=subprocess.STDOUT)
1344 except subprocess.CalledProcessError as e:
1345 self.deploy_dir_unlock()
1346 bb.fatal("Unable to update the package index files. Command '%s' "
1347 "returned %d:\n%s" % (cmd, e.returncode, e.output.decode("utf-8")))
1348
1349 self.deploy_dir_unlock()
1350
1351 def install(self, pkgs, attempt_only=False):
1352 if not pkgs:
1353 return
1354
1355 cmd = "%s %s" % (self.opkg_cmd, self.opkg_args)
1356 for exclude in (self.d.getVar("PACKAGE_EXCLUDE") or "").split():
1357 cmd += " --add-exclude %s" % exclude
1358 for bad_recommendation in (self.d.getVar("BAD_RECOMMENDATIONS") or "").split():
1359 cmd += " --add-ignore-recommends %s" % bad_recommendation
1360 cmd += " install "
1361 cmd += " ".join(pkgs)
1362
1363 os.environ['D'] = self.target_rootfs
1364 os.environ['OFFLINE_ROOT'] = self.target_rootfs
1365 os.environ['IPKG_OFFLINE_ROOT'] = self.target_rootfs
1366 os.environ['OPKG_OFFLINE_ROOT'] = self.target_rootfs
1367 os.environ['INTERCEPT_DIR'] = self.intercepts_dir
1368 os.environ['NATIVE_ROOT'] = self.d.getVar('STAGING_DIR_NATIVE')
1369
1370 try:
1371 bb.note("Installing the following packages: %s" % ' '.join(pkgs))
1372 bb.note(cmd)
1373 output = subprocess.check_output(cmd.split(), stderr=subprocess.STDOUT).decode("utf-8")
1374 bb.note(output)
1375 failed_pkgs = []
1376 for line in output.split('\n'):
1377 if line.endswith("configuration required on target."):
1378 bb.warn(line)
1379 failed_pkgs.append(line.split(".")[0])
1380 if failed_pkgs:
1381 failed_postinsts_abort(failed_pkgs, self.d.expand("${T}/log.do_${BB_CURRENTTASK}"))
1382 except subprocess.CalledProcessError as e:
1383 (bb.fatal, bb.warn)[attempt_only]("Unable to install packages. "
1384 "Command '%s' returned %d:\n%s" %
1385 (cmd, e.returncode, e.output.decode("utf-8")))
1386
1387 def remove(self, pkgs, with_dependencies=True):
1388 if not pkgs:
1389 return
1390
1391 if with_dependencies:
1392 cmd = "%s %s --force-remove --force-removal-of-dependent-packages remove %s" % \
1393 (self.opkg_cmd, self.opkg_args, ' '.join(pkgs))
1394 else:
1395 cmd = "%s %s --force-depends remove %s" % \
1396 (self.opkg_cmd, self.opkg_args, ' '.join(pkgs))
1397
1398 try:
1399 bb.note(cmd)
1400 output = subprocess.check_output(cmd.split(), stderr=subprocess.STDOUT).decode("utf-8")
1401 bb.note(output)
1402 except subprocess.CalledProcessError as e:
1403 bb.fatal("Unable to remove packages. Command '%s' "
1404 "returned %d:\n%s" % (e.cmd, e.returncode, e.output.decode("utf-8")))
1405
1406 def write_index(self):
1407 self.deploy_dir_lock()
1408
1409 result = self.indexer.write_index()
1410
1411 self.deploy_dir_unlock()
1412
1413 if result is not None:
1414 bb.fatal(result)
1415
1416 def remove_packaging_data(self):
1417 bb.utils.remove(self.opkg_dir, True)
1418 # create the directory back, it's needed by PM lock
1419 bb.utils.mkdirhier(self.opkg_dir)
1420
1421 def remove_lists(self):
1422 if not self.from_feeds:
1423 bb.utils.remove(os.path.join(self.opkg_dir, "lists"), True)
1424
1425 def list_installed(self):
1426 return OpkgPkgsList(self.d, self.target_rootfs, self.config_file).list_pkgs()
1427
1428 def dummy_install(self, pkgs):
1429 """
1430 The following function dummy installs pkgs and returns the log of output.
1431 """
1432 if len(pkgs) == 0:
1433 return
1434
1435 # Create an temp dir as opkg root for dummy installation
1436 temp_rootfs = self.d.expand('${T}/opkg')
1437 opkg_lib_dir = self.d.getVar('OPKGLIBDIR')
1438 if opkg_lib_dir[0] == "/":
1439 opkg_lib_dir = opkg_lib_dir[1:]
1440 temp_opkg_dir = os.path.join(temp_rootfs, opkg_lib_dir, 'opkg')
1441 bb.utils.mkdirhier(temp_opkg_dir)
1442
1443 opkg_args = "-f %s -o %s " % (self.config_file, temp_rootfs)
1444 opkg_args += self.d.getVar("OPKG_ARGS")
1445
1446 cmd = "%s %s update" % (self.opkg_cmd, opkg_args)
1447 try:
1448 subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
1449 except subprocess.CalledProcessError as e:
1450 bb.fatal("Unable to update. Command '%s' "
1451 "returned %d:\n%s" % (cmd, e.returncode, e.output.decode("utf-8")))
1452
1453 # Dummy installation
1454 cmd = "%s %s --noaction install %s " % (self.opkg_cmd,
1455 opkg_args,
1456 ' '.join(pkgs))
1457 try:
1458 output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
1459 except subprocess.CalledProcessError as e:
1460 bb.fatal("Unable to dummy install packages. Command '%s' "
1461 "returned %d:\n%s" % (cmd, e.returncode, e.output.decode("utf-8")))
1462
1463 bb.utils.remove(temp_rootfs, True)
1464
1465 return output
1466
1467 def backup_packaging_data(self):
1468 # Save the opkglib for increment ipk image generation
1469 if os.path.exists(self.saved_opkg_dir):
1470 bb.utils.remove(self.saved_opkg_dir, True)
1471 shutil.copytree(self.opkg_dir,
1472 self.saved_opkg_dir,
1473 symlinks=True)
1474
1475 def recover_packaging_data(self):
1476 # Move the opkglib back
1477 if os.path.exists(self.saved_opkg_dir):
1478 if os.path.exists(self.opkg_dir):
1479 bb.utils.remove(self.opkg_dir, True)
1480
1481 bb.note('Recover packaging data')
1482 shutil.copytree(self.saved_opkg_dir,
1483 self.opkg_dir,
1484 symlinks=True)
1485
1486 def package_info(self, pkg):
1487 """
1488 Returns a dictionary with the package info.
1489 """
1490 cmd = "%s %s info %s" % (self.opkg_cmd, self.opkg_args, pkg)
1491 pkg_info = super(OpkgPM, self).package_info(pkg, cmd)
1492
1493 pkg_arch = pkg_info[pkg]["arch"]
1494 pkg_filename = pkg_info[pkg]["filename"]
1495 pkg_info[pkg]["filepath"] = \
1496 os.path.join(self.deploy_dir, pkg_arch, pkg_filename)
1497
1498 return pkg_info
1499
1500 def extract(self, pkg):
1501 """
1502 Returns the path to a tmpdir where resides the contents of a package.
1503
1504 Deleting the tmpdir is responsability of the caller.
1505 """
1506 pkg_info = self.package_info(pkg)
1507 if not pkg_info:
1508 bb.fatal("Unable to get information for package '%s' while "
1509 "trying to extract the package." % pkg)
1510
1511 tmp_dir = super(OpkgPM, self).extract(pkg, pkg_info)
1512 bb.utils.remove(os.path.join(tmp_dir, "data.tar.xz"))
1513
1514 return tmp_dir
1515
1516class DpkgPM(OpkgDpkgPM):
1517 def __init__(self, d, target_rootfs, archs, base_archs, apt_conf_dir=None, deb_repo_workdir="oe-rootfs-repo", filterbydependencies=True):
1518 super(DpkgPM, self).__init__(d, target_rootfs)
1519 self.deploy_dir = oe.path.join(self.d.getVar('WORKDIR'), deb_repo_workdir)
1520
1521 create_packages_dir(self.d, self.deploy_dir, d.getVar("DEPLOY_DIR_DEB"), "package_write_deb", filterbydependencies)
1522
1523 if apt_conf_dir is None:
1524 self.apt_conf_dir = self.d.expand("${APTCONF_TARGET}/apt")
1525 else:
1526 self.apt_conf_dir = apt_conf_dir
1527 self.apt_conf_file = os.path.join(self.apt_conf_dir, "apt.conf")
1528 self.apt_get_cmd = bb.utils.which(os.getenv('PATH'), "apt-get")
1529 self.apt_cache_cmd = bb.utils.which(os.getenv('PATH'), "apt-cache")
1530
1531 self.apt_args = d.getVar("APT_ARGS")
1532
1533 self.all_arch_list = archs.split()
1534 all_mlb_pkg_arch_list = (self.d.getVar('ALL_MULTILIB_PACKAGE_ARCHS') or "").split()
1535 self.all_arch_list.extend(arch for arch in all_mlb_pkg_arch_list if arch not in self.all_arch_list)
1536
1537 self._create_configs(archs, base_archs)
1538
1539 self.indexer = DpkgIndexer(self.d, self.deploy_dir)
1540
1541 def mark_packages(self, status_tag, packages=None):
1542 """
1543 This function will change a package's status in /var/lib/dpkg/status file.
1544 If 'packages' is None then the new_status will be applied to all
1545 packages
1546 """
1547 status_file = self.target_rootfs + "/var/lib/dpkg/status"
1548
1549 with open(status_file, "r") as sf:
1550 with open(status_file + ".tmp", "w+") as tmp_sf:
1551 if packages is None:
1552 tmp_sf.write(re.sub(r"Package: (.*?)\n((?:[^\n]+\n)*?)Status: (.*)(?:unpacked|installed)",
1553 r"Package: \1\n\2Status: \3%s" % status_tag,
1554 sf.read()))
1555 else:
1556 if type(packages).__name__ != "list":
1557 raise TypeError("'packages' should be a list object")
1558
1559 status = sf.read()
1560 for pkg in packages:
1561 status = re.sub(r"Package: %s\n((?:[^\n]+\n)*?)Status: (.*)(?:unpacked|installed)" % pkg,
1562 r"Package: %s\n\1Status: \2%s" % (pkg, status_tag),
1563 status)
1564
1565 tmp_sf.write(status)
1566
1567 os.rename(status_file + ".tmp", status_file)
1568
1569 def run_pre_post_installs(self, package_name=None):
1570 """
1571 Run the pre/post installs for package "package_name". If package_name is
1572 None, then run all pre/post install scriptlets.
1573 """
1574 info_dir = self.target_rootfs + "/var/lib/dpkg/info"
1575 ControlScript = collections.namedtuple("ControlScript", ["suffix", "name", "argument"])
1576 control_scripts = [
1577 ControlScript(".preinst", "Preinstall", "install"),
1578 ControlScript(".postinst", "Postinstall", "configure")]
1579 status_file = self.target_rootfs + "/var/lib/dpkg/status"
1580 installed_pkgs = []
1581
1582 with open(status_file, "r") as status:
1583 for line in status.read().split('\n'):
1584 m = re.match(r"^Package: (.*)", line)
1585 if m is not None:
1586 installed_pkgs.append(m.group(1))
1587
1588 if package_name is not None and not package_name in installed_pkgs:
1589 return
1590
1591 os.environ['D'] = self.target_rootfs
1592 os.environ['OFFLINE_ROOT'] = self.target_rootfs
1593 os.environ['IPKG_OFFLINE_ROOT'] = self.target_rootfs
1594 os.environ['OPKG_OFFLINE_ROOT'] = self.target_rootfs
1595 os.environ['INTERCEPT_DIR'] = self.intercepts_dir
1596 os.environ['NATIVE_ROOT'] = self.d.getVar('STAGING_DIR_NATIVE')
1597
1598 for pkg_name in installed_pkgs:
1599 for control_script in control_scripts:
1600 p_full = os.path.join(info_dir, pkg_name + control_script.suffix)
1601 if os.path.exists(p_full):
1602 try:
1603 bb.note("Executing %s for package: %s ..." %
1604 (control_script.name.lower(), pkg_name))
1605 output = subprocess.check_output([p_full, control_script.argument],
1606 stderr=subprocess.STDOUT).decode("utf-8")
1607 bb.note(output)
1608 except subprocess.CalledProcessError as e:
1609 bb.warn("%s for package %s failed with %d:\n%s" %
1610 (control_script.name, pkg_name, e.returncode,
1611 e.output.decode("utf-8")))
1612 failed_postinsts_abort([pkg_name], self.d.expand("${T}/log.do_${BB_CURRENTTASK}"))
1613
1614 def update(self):
1615 os.environ['APT_CONFIG'] = self.apt_conf_file
1616
1617 self.deploy_dir_lock()
1618
1619 cmd = "%s update" % self.apt_get_cmd
1620
1621 try:
1622 subprocess.check_output(cmd.split(), stderr=subprocess.STDOUT)
1623 except subprocess.CalledProcessError as e:
1624 bb.fatal("Unable to update the package index files. Command '%s' "
1625 "returned %d:\n%s" % (e.cmd, e.returncode, e.output.decode("utf-8")))
1626
1627 self.deploy_dir_unlock()
1628
1629 def install(self, pkgs, attempt_only=False):
1630 if attempt_only and len(pkgs) == 0:
1631 return
1632
1633 os.environ['APT_CONFIG'] = self.apt_conf_file
1634
1635 cmd = "%s %s install --force-yes --allow-unauthenticated --no-remove %s" % \
1636 (self.apt_get_cmd, self.apt_args, ' '.join(pkgs))
1637
1638 try:
1639 bb.note("Installing the following packages: %s" % ' '.join(pkgs))
1640 subprocess.check_output(cmd.split(), stderr=subprocess.STDOUT)
1641 except subprocess.CalledProcessError as e:
1642 (bb.fatal, bb.warn)[attempt_only]("Unable to install packages. "
1643 "Command '%s' returned %d:\n%s" %
1644 (cmd, e.returncode, e.output.decode("utf-8")))
1645
1646 # rename *.dpkg-new files/dirs
1647 for root, dirs, files in os.walk(self.target_rootfs):
1648 for dir in dirs:
1649 new_dir = re.sub(r"\.dpkg-new", "", dir)
1650 if dir != new_dir:
1651 os.rename(os.path.join(root, dir),
1652 os.path.join(root, new_dir))
1653
1654 for file in files:
1655 new_file = re.sub(r"\.dpkg-new", "", file)
1656 if file != new_file:
1657 os.rename(os.path.join(root, file),
1658 os.path.join(root, new_file))
1659
1660
1661 def remove(self, pkgs, with_dependencies=True):
1662 if not pkgs:
1663 return
1664
1665 if with_dependencies:
1666 os.environ['APT_CONFIG'] = self.apt_conf_file
1667 cmd = "%s purge %s" % (self.apt_get_cmd, ' '.join(pkgs))
1668 else:
1669 cmd = "%s --admindir=%s/var/lib/dpkg --instdir=%s" \
1670 " -P --force-depends %s" % \
1671 (bb.utils.which(os.getenv('PATH'), "dpkg"),
1672 self.target_rootfs, self.target_rootfs, ' '.join(pkgs))
1673
1674 try:
1675 subprocess.check_output(cmd.split(), stderr=subprocess.STDOUT)
1676 except subprocess.CalledProcessError as e:
1677 bb.fatal("Unable to remove packages. Command '%s' "
1678 "returned %d:\n%s" % (e.cmd, e.returncode, e.output.decode("utf-8")))
1679
1680 def write_index(self):
1681 self.deploy_dir_lock()
1682
1683 result = self.indexer.write_index()
1684
1685 self.deploy_dir_unlock()
1686
1687 if result is not None:
1688 bb.fatal(result)
1689
1690 def insert_feeds_uris(self, feed_uris, feed_base_paths, feed_archs):
1691 if feed_uris == "":
1692 return
1693
1694 sources_conf = os.path.join("%s/etc/apt/sources.list"
1695 % self.target_rootfs)
1696 arch_list = []
1697
1698 if feed_archs is None:
1699 for arch in self.all_arch_list:
1700 if not os.path.exists(os.path.join(self.deploy_dir, arch)):
1701 continue
1702 arch_list.append(arch)
1703 else:
1704 arch_list = feed_archs.split()
1705
1706 feed_uris = self.construct_uris(feed_uris.split(), feed_base_paths.split())
1707
1708 with open(sources_conf, "w+") as sources_file:
1709 for uri in feed_uris:
1710 if arch_list:
1711 for arch in arch_list:
1712 bb.note('Adding dpkg channel at (%s)' % uri)
1713 sources_file.write("deb %s/%s ./\n" %
1714 (uri, arch))
1715 else:
1716 bb.note('Adding dpkg channel at (%s)' % uri)
1717 sources_file.write("deb %s ./\n" % uri)
1718
1719 def _create_configs(self, archs, base_archs):
1720 base_archs = re.sub(r"_", r"-", base_archs)
1721
1722 if os.path.exists(self.apt_conf_dir):
1723 bb.utils.remove(self.apt_conf_dir, True)
1724
1725 bb.utils.mkdirhier(self.apt_conf_dir)
1726 bb.utils.mkdirhier(self.apt_conf_dir + "/lists/partial/")
1727 bb.utils.mkdirhier(self.apt_conf_dir + "/apt.conf.d/")
1728 bb.utils.mkdirhier(self.apt_conf_dir + "/preferences.d/")
1729
1730 arch_list = []
1731 for arch in self.all_arch_list:
1732 if not os.path.exists(os.path.join(self.deploy_dir, arch)):
1733 continue
1734 arch_list.append(arch)
1735
1736 with open(os.path.join(self.apt_conf_dir, "preferences"), "w+") as prefs_file:
1737 priority = 801
1738 for arch in arch_list:
1739 prefs_file.write(
1740 "Package: *\n"
1741 "Pin: release l=%s\n"
1742 "Pin-Priority: %d\n\n" % (arch, priority))
1743
1744 priority += 5
1745
1746 pkg_exclude = self.d.getVar('PACKAGE_EXCLUDE') or ""
1747 for pkg in pkg_exclude.split():
1748 prefs_file.write(
1749 "Package: %s\n"
1750 "Pin: release *\n"
1751 "Pin-Priority: -1\n\n" % pkg)
1752
1753 arch_list.reverse()
1754
1755 with open(os.path.join(self.apt_conf_dir, "sources.list"), "w+") as sources_file:
1756 for arch in arch_list:
1757 sources_file.write("deb file:%s/ ./\n" %
1758 os.path.join(self.deploy_dir, arch))
1759
1760 base_arch_list = base_archs.split()
1761 multilib_variants = self.d.getVar("MULTILIB_VARIANTS");
1762 for variant in multilib_variants.split():
1763 localdata = bb.data.createCopy(self.d)
1764 variant_tune = localdata.getVar("DEFAULTTUNE_virtclass-multilib-" + variant, False)
1765 orig_arch = localdata.getVar("DPKG_ARCH")
1766 localdata.setVar("DEFAULTTUNE", variant_tune)
1767 variant_arch = localdata.getVar("DPKG_ARCH")
1768 if variant_arch not in base_arch_list:
1769 base_arch_list.append(variant_arch)
1770
1771 with open(self.apt_conf_file, "w+") as apt_conf:
1772 with open(self.d.expand("${STAGING_ETCDIR_NATIVE}/apt/apt.conf.sample")) as apt_conf_sample:
1773 for line in apt_conf_sample.read().split("\n"):
1774 match_arch = re.match(r" Architecture \".*\";$", line)
1775 architectures = ""
1776 if match_arch:
1777 for base_arch in base_arch_list:
1778 architectures += "\"%s\";" % base_arch
1779 apt_conf.write(" Architectures {%s};\n" % architectures);
1780 apt_conf.write(" Architecture \"%s\";\n" % base_archs)
1781 else:
1782 line = re.sub(r"#ROOTFS#", self.target_rootfs, line)
1783 line = re.sub(r"#APTCONF#", self.apt_conf_dir, line)
1784 apt_conf.write(line + "\n")
1785
1786 target_dpkg_dir = "%s/var/lib/dpkg" % self.target_rootfs
1787 bb.utils.mkdirhier(os.path.join(target_dpkg_dir, "info"))
1788
1789 bb.utils.mkdirhier(os.path.join(target_dpkg_dir, "updates"))
1790
1791 if not os.path.exists(os.path.join(target_dpkg_dir, "status")):
1792 open(os.path.join(target_dpkg_dir, "status"), "w+").close()
1793 if not os.path.exists(os.path.join(target_dpkg_dir, "available")):
1794 open(os.path.join(target_dpkg_dir, "available"), "w+").close()
1795
1796 def remove_packaging_data(self):
1797 bb.utils.remove(self.target_rootfs + self.d.getVar('opkglibdir'), True)
1798 bb.utils.remove(self.target_rootfs + "/var/lib/dpkg/", True)
1799
1800 def fix_broken_dependencies(self):
1801 os.environ['APT_CONFIG'] = self.apt_conf_file
1802
1803 cmd = "%s %s --allow-unauthenticated -f install" % (self.apt_get_cmd, self.apt_args)
1804
1805 try:
1806 subprocess.check_output(cmd.split(), stderr=subprocess.STDOUT)
1807 except subprocess.CalledProcessError as e:
1808 bb.fatal("Cannot fix broken dependencies. Command '%s' "
1809 "returned %d:\n%s" % (cmd, e.returncode, e.output.decode("utf-8")))
1810
1811 def list_installed(self):
1812 return DpkgPkgsList(self.d, self.target_rootfs).list_pkgs()
1813
1814 def package_info(self, pkg):
1815 """
1816 Returns a dictionary with the package info.
1817 """
1818 cmd = "%s show %s" % (self.apt_cache_cmd, pkg)
1819 pkg_info = super(DpkgPM, self).package_info(pkg, cmd)
1820
1821 pkg_arch = pkg_info[pkg]["pkgarch"]
1822 pkg_filename = pkg_info[pkg]["filename"]
1823 pkg_info[pkg]["filepath"] = \
1824 os.path.join(self.deploy_dir, pkg_arch, pkg_filename)
1825
1826 return pkg_info
1827
1828 def extract(self, pkg):
1829 """
1830 Returns the path to a tmpdir where resides the contents of a package.
1831
1832 Deleting the tmpdir is responsability of the caller.
1833 """
1834 pkg_info = self.package_info(pkg)
1835 if not pkg_info:
1836 bb.fatal("Unable to get information for package '%s' while "
1837 "trying to extract the package." % pkg)
1838
1839 tmp_dir = super(DpkgPM, self).extract(pkg, pkg_info)
1840 bb.utils.remove(os.path.join(tmp_dir, "data.tar.xz"))
1841
1842 return tmp_dir
1843
1844def generate_index_files(d):
1845 classes = d.getVar('PACKAGE_CLASSES').replace("package_", "").split()
1846
1847 indexer_map = {
1848 "rpm": (RpmSubdirIndexer, d.getVar('DEPLOY_DIR_RPM')),
1849 "ipk": (OpkgIndexer, d.getVar('DEPLOY_DIR_IPK')),
1850 "deb": (DpkgIndexer, d.getVar('DEPLOY_DIR_DEB'))
1851 }
1852
1853 result = None
1854
1855 for pkg_class in classes:
1856 if not pkg_class in indexer_map:
1857 continue
1858
1859 if os.path.exists(indexer_map[pkg_class][1]):
1860 result = indexer_map[pkg_class][0](d, indexer_map[pkg_class][1]).write_index()
1861
1862 if result is not None:
1863 bb.fatal(result)