summaryrefslogtreecommitdiffstats
path: root/meta/lib/oe/package_manager.py
diff options
context:
space:
mode:
authorTudor Florea <tudor.florea@enea.com>2014-10-16 03:05:19 +0200
committerTudor Florea <tudor.florea@enea.com>2014-10-16 03:05:19 +0200
commitc527fd1f14c27855a37f2e8ac5346ce8d940ced2 (patch)
treebb002c1fdf011c41dbd2f0927bed23ecb5f83c97 /meta/lib/oe/package_manager.py
downloadpoky-daisy-140929.tar.gz
initial commit for Enea Linux 4.0-140929daisy-140929
Migrated from the internal git server on the daisy-enea-point-release branch Signed-off-by: Tudor Florea <tudor.florea@enea.com>
Diffstat (limited to 'meta/lib/oe/package_manager.py')
-rw-r--r--meta/lib/oe/package_manager.py1721
1 files changed, 1721 insertions, 0 deletions
diff --git a/meta/lib/oe/package_manager.py b/meta/lib/oe/package_manager.py
new file mode 100644
index 0000000000..a8360fe983
--- /dev/null
+++ b/meta/lib/oe/package_manager.py
@@ -0,0 +1,1721 @@
1from abc import ABCMeta, abstractmethod
2import os
3import glob
4import subprocess
5import shutil
6import multiprocessing
7import re
8import bb
9
10
11# this can be used by all PM backends to create the index files in parallel
12def create_index(arg):
13 index_cmd = arg
14
15 try:
16 bb.note("Executing '%s' ..." % index_cmd)
17 subprocess.check_output(index_cmd, stderr=subprocess.STDOUT, shell=True)
18 except subprocess.CalledProcessError as e:
19 return("Index creation command '%s' failed with return code %d:\n%s" %
20 (e.cmd, e.returncode, e.output))
21
22 return None
23
24
25class Indexer(object):
26 __metaclass__ = ABCMeta
27
28 def __init__(self, d, deploy_dir):
29 self.d = d
30 self.deploy_dir = deploy_dir
31
32 @abstractmethod
33 def write_index(self):
34 pass
35
36
37class RpmIndexer(Indexer):
38 def get_ml_prefix_and_os_list(self, arch_var=None, os_var=None):
39 package_archs = {
40 'default': [],
41 }
42
43 target_os = {
44 'default': "",
45 }
46
47 if arch_var is not None and os_var is not None:
48 package_archs['default'] = self.d.getVar(arch_var, True).split()
49 package_archs['default'].reverse()
50 target_os['default'] = self.d.getVar(os_var, True).strip()
51 else:
52 package_archs['default'] = self.d.getVar("PACKAGE_ARCHS", True).split()
53 # arch order is reversed. This ensures the -best- match is
54 # listed first!
55 package_archs['default'].reverse()
56 target_os['default'] = self.d.getVar("TARGET_OS", True).strip()
57 multilibs = self.d.getVar('MULTILIBS', True) or ""
58 for ext in multilibs.split():
59 eext = ext.split(':')
60 if len(eext) > 1 and eext[0] == 'multilib':
61 localdata = bb.data.createCopy(self.d)
62 default_tune_key = "DEFAULTTUNE_virtclass-multilib-" + eext[1]
63 default_tune = localdata.getVar(default_tune_key, False)
64 if default_tune:
65 localdata.setVar("DEFAULTTUNE", default_tune)
66 bb.data.update_data(localdata)
67 package_archs[eext[1]] = localdata.getVar('PACKAGE_ARCHS',
68 True).split()
69 package_archs[eext[1]].reverse()
70 target_os[eext[1]] = localdata.getVar("TARGET_OS",
71 True).strip()
72
73 ml_prefix_list = dict()
74 for mlib in package_archs:
75 if mlib == 'default':
76 ml_prefix_list[mlib] = package_archs[mlib]
77 else:
78 ml_prefix_list[mlib] = list()
79 for arch in package_archs[mlib]:
80 if arch in ['all', 'noarch', 'any']:
81 ml_prefix_list[mlib].append(arch)
82 else:
83 ml_prefix_list[mlib].append(mlib + "_" + arch)
84
85 return (ml_prefix_list, target_os)
86
87 def write_index(self):
88 sdk_pkg_archs = (self.d.getVar('SDK_PACKAGE_ARCHS', True) or "").replace('-', '_').split()
89 all_mlb_pkg_archs = (self.d.getVar('ALL_MULTILIB_PACKAGE_ARCHS', True) or "").replace('-', '_').split()
90
91 mlb_prefix_list = self.get_ml_prefix_and_os_list()[0]
92
93 archs = set()
94 for item in mlb_prefix_list:
95 archs = archs.union(set(i.replace('-', '_') for i in mlb_prefix_list[item]))
96
97 if len(archs) == 0:
98 archs = archs.union(set(all_mlb_pkg_archs))
99
100 archs = archs.union(set(sdk_pkg_archs))
101
102 rpm_createrepo = bb.utils.which(os.getenv('PATH'), "createrepo")
103 index_cmds = []
104 rpm_dirs_found = False
105 for arch in archs:
106 arch_dir = os.path.join(self.deploy_dir, arch)
107 if not os.path.isdir(arch_dir):
108 continue
109
110 index_cmds.append("%s --update -q %s" % (rpm_createrepo, arch_dir))
111
112 rpm_dirs_found = True
113
114 if not rpm_dirs_found:
115 bb.note("There are no packages in %s" % self.deploy_dir)
116 return
117
118 nproc = multiprocessing.cpu_count()
119 pool = bb.utils.multiprocessingpool(nproc)
120 results = list(pool.imap(create_index, index_cmds))
121 pool.close()
122 pool.join()
123
124 for result in results:
125 if result is not None:
126 return(result)
127
128
129class OpkgIndexer(Indexer):
130 def write_index(self):
131 arch_vars = ["ALL_MULTILIB_PACKAGE_ARCHS",
132 "SDK_PACKAGE_ARCHS",
133 "MULTILIB_ARCHS"]
134
135 opkg_index_cmd = bb.utils.which(os.getenv('PATH'), "opkg-make-index")
136
137 if not os.path.exists(os.path.join(self.deploy_dir, "Packages")):
138 open(os.path.join(self.deploy_dir, "Packages"), "w").close()
139
140 index_cmds = []
141 for arch_var in arch_vars:
142 archs = self.d.getVar(arch_var, True)
143 if archs is None:
144 continue
145
146 for arch in archs.split():
147 pkgs_dir = os.path.join(self.deploy_dir, arch)
148 pkgs_file = os.path.join(pkgs_dir, "Packages")
149
150 if not os.path.isdir(pkgs_dir):
151 continue
152
153 if not os.path.exists(pkgs_file):
154 open(pkgs_file, "w").close()
155
156 index_cmds.append('%s -r %s -p %s -m %s' %
157 (opkg_index_cmd, pkgs_file, pkgs_file, pkgs_dir))
158
159 if len(index_cmds) == 0:
160 bb.note("There are no packages in %s!" % self.deploy_dir)
161 return
162
163 nproc = multiprocessing.cpu_count()
164 pool = bb.utils.multiprocessingpool(nproc)
165 results = list(pool.imap(create_index, index_cmds))
166 pool.close()
167 pool.join()
168
169 for result in results:
170 if result is not None:
171 return(result)
172
173
174class DpkgIndexer(Indexer):
175 def write_index(self):
176 pkg_archs = self.d.getVar('PACKAGE_ARCHS', True)
177 if pkg_archs is not None:
178 arch_list = pkg_archs.split()
179 sdk_pkg_archs = self.d.getVar('SDK_PACKAGE_ARCHS', True)
180 if sdk_pkg_archs is not None:
181 for a in sdk_pkg_archs.split():
182 if a not in pkg_archs:
183 arch_list.append(a)
184
185 apt_ftparchive = bb.utils.which(os.getenv('PATH'), "apt-ftparchive")
186 gzip = bb.utils.which(os.getenv('PATH'), "gzip")
187
188 index_cmds = []
189 deb_dirs_found = False
190 for arch in arch_list:
191 arch_dir = os.path.join(self.deploy_dir, arch)
192 if not os.path.isdir(arch_dir):
193 continue
194
195 cmd = "cd %s; PSEUDO_UNLOAD=1 %s packages . > Packages;" % (arch_dir, apt_ftparchive)
196
197 cmd += "%s -fc Packages > Packages.gz;" % gzip
198
199 with open(os.path.join(arch_dir, "Release"), "w+") as release:
200 release.write("Label: %s\n" % arch)
201
202 cmd += "PSEUDO_UNLOAD=1 %s release . >> Release" % apt_ftparchive
203
204 index_cmds.append(cmd)
205
206 deb_dirs_found = True
207
208 if not deb_dirs_found:
209 bb.note("There are no packages in %s" % self.deploy_dir)
210 return
211
212 nproc = multiprocessing.cpu_count()
213 pool = bb.utils.multiprocessingpool(nproc)
214 results = list(pool.imap(create_index, index_cmds))
215 pool.close()
216 pool.join()
217
218 for result in results:
219 if result is not None:
220 return(result)
221
222
223class PkgsList(object):
224 __metaclass__ = ABCMeta
225
226 def __init__(self, d, rootfs_dir):
227 self.d = d
228 self.rootfs_dir = rootfs_dir
229
230 @abstractmethod
231 def list(self, format=None):
232 pass
233
234
235class RpmPkgsList(PkgsList):
236 def __init__(self, d, rootfs_dir, arch_var=None, os_var=None):
237 super(RpmPkgsList, self).__init__(d, rootfs_dir)
238
239 self.rpm_cmd = bb.utils.which(os.getenv('PATH'), "rpm")
240 self.image_rpmlib = os.path.join(self.rootfs_dir, 'var/lib/rpm')
241
242 self.ml_prefix_list, self.ml_os_list = \
243 RpmIndexer(d, rootfs_dir).get_ml_prefix_and_os_list(arch_var, os_var)
244
245 '''
246 Translate the RPM/Smart format names to the OE multilib format names
247 '''
248 def _pkg_translate_smart_to_oe(self, pkg, arch):
249 new_pkg = pkg
250 fixed_arch = arch.replace('_', '-')
251 found = 0
252 for mlib in self.ml_prefix_list:
253 for cmp_arch in self.ml_prefix_list[mlib]:
254 fixed_cmp_arch = cmp_arch.replace('_', '-')
255 if fixed_arch == fixed_cmp_arch:
256 if mlib == 'default':
257 new_pkg = pkg
258 new_arch = cmp_arch
259 else:
260 new_pkg = mlib + '-' + pkg
261 # We need to strip off the ${mlib}_ prefix on the arch
262 new_arch = cmp_arch.replace(mlib + '_', '')
263
264 # Workaround for bug 3565. Simply look to see if we
265 # know of a package with that name, if not try again!
266 filename = os.path.join(self.d.getVar('PKGDATA_DIR', True),
267 'runtime-reverse',
268 new_pkg)
269 if os.path.exists(filename):
270 found = 1
271 break
272
273 if found == 1 and fixed_arch == fixed_cmp_arch:
274 break
275 #bb.note('%s, %s -> %s, %s' % (pkg, arch, new_pkg, new_arch))
276 return new_pkg, new_arch
277
278 def _list_pkg_deps(self):
279 cmd = [bb.utils.which(os.getenv('PATH'), "rpmresolve"),
280 "-t", self.image_rpmlib]
281
282 try:
283 output = subprocess.check_output(cmd, stderr=subprocess.STDOUT).strip()
284 except subprocess.CalledProcessError as e:
285 bb.fatal("Cannot get the package dependencies. Command '%s' "
286 "returned %d:\n%s" % (' '.join(cmd), e.returncode, e.output))
287
288 return output
289
290 def list(self, format=None):
291 if format == "deps":
292 return self._list_pkg_deps()
293
294 cmd = self.rpm_cmd + ' --root ' + self.rootfs_dir
295 cmd += ' -D "_dbpath /var/lib/rpm" -qa'
296 cmd += " --qf '[%{NAME} %{ARCH} %{VERSION} %{PACKAGEORIGIN}\n]'"
297
298 try:
299 # bb.note(cmd)
300 tmp_output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True).strip()
301
302 except subprocess.CalledProcessError as e:
303 bb.fatal("Cannot get the installed packages list. Command '%s' "
304 "returned %d:\n%s" % (cmd, e.returncode, e.output))
305
306 output = list()
307 for line in tmp_output.split('\n'):
308 if len(line.strip()) == 0:
309 continue
310 pkg = line.split()[0]
311 arch = line.split()[1]
312 ver = line.split()[2]
313 pkgorigin = line.split()[3]
314 new_pkg, new_arch = self._pkg_translate_smart_to_oe(pkg, arch)
315
316 if format == "arch":
317 output.append('%s %s' % (new_pkg, new_arch))
318 elif format == "file":
319 output.append('%s %s %s' % (new_pkg, pkgorigin, new_arch))
320 elif format == "ver":
321 output.append('%s %s %s' % (new_pkg, new_arch, ver))
322 else:
323 output.append('%s' % (new_pkg))
324
325 output.sort()
326
327 return '\n'.join(output)
328
329
330class OpkgPkgsList(PkgsList):
331 def __init__(self, d, rootfs_dir, config_file):
332 super(OpkgPkgsList, self).__init__(d, rootfs_dir)
333
334 self.opkg_cmd = bb.utils.which(os.getenv('PATH'), "opkg-cl")
335 self.opkg_args = "-f %s -o %s " % (config_file, rootfs_dir)
336 self.opkg_args += self.d.getVar("OPKG_ARGS", True)
337
338 def list(self, format=None):
339 opkg_query_cmd = bb.utils.which(os.getenv('PATH'), "opkg-query-helper.py")
340
341 if format == "arch":
342 cmd = "%s %s status | %s -a" % \
343 (self.opkg_cmd, self.opkg_args, opkg_query_cmd)
344 elif format == "file":
345 cmd = "%s %s status | %s -f" % \
346 (self.opkg_cmd, self.opkg_args, opkg_query_cmd)
347 elif format == "ver":
348 cmd = "%s %s status | %s -v" % \
349 (self.opkg_cmd, self.opkg_args, opkg_query_cmd)
350 elif format == "deps":
351 cmd = "%s %s status | %s" % \
352 (self.opkg_cmd, self.opkg_args, opkg_query_cmd)
353 else:
354 cmd = "%s %s list_installed | cut -d' ' -f1" % \
355 (self.opkg_cmd, self.opkg_args)
356
357 try:
358 output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True).strip()
359 except subprocess.CalledProcessError as e:
360 bb.fatal("Cannot get the installed packages list. Command '%s' "
361 "returned %d:\n%s" % (cmd, e.returncode, e.output))
362
363 if output and format == "file":
364 tmp_output = ""
365 for line in output.split('\n'):
366 pkg, pkg_file, pkg_arch = line.split()
367 full_path = os.path.join(self.rootfs_dir, pkg_arch, pkg_file)
368 if os.path.exists(full_path):
369 tmp_output += "%s %s %s\n" % (pkg, full_path, pkg_arch)
370 else:
371 tmp_output += "%s %s %s\n" % (pkg, pkg_file, pkg_arch)
372
373 output = tmp_output
374
375 return output
376
377
378class DpkgPkgsList(PkgsList):
379 def list(self, format=None):
380 cmd = [bb.utils.which(os.getenv('PATH'), "dpkg-query"),
381 "--admindir=%s/var/lib/dpkg" % self.rootfs_dir,
382 "-W"]
383
384 if format == "arch":
385 cmd.append("-f=${Package} ${PackageArch}\n")
386 elif format == "file":
387 cmd.append("-f=${Package} ${Package}_${Version}_${Architecture}.deb ${PackageArch}\n")
388 elif format == "ver":
389 cmd.append("-f=${Package} ${PackageArch} ${Version}\n")
390 elif format == "deps":
391 cmd.append("-f=Package: ${Package}\nDepends: ${Depends}\nRecommends: ${Recommends}\n\n")
392 else:
393 cmd.append("-f=${Package}\n")
394
395 try:
396 output = subprocess.check_output(cmd, stderr=subprocess.STDOUT).strip()
397 except subprocess.CalledProcessError as e:
398 bb.fatal("Cannot get the installed packages list. Command '%s' "
399 "returned %d:\n%s" % (' '.join(cmd), e.returncode, e.output))
400
401 if format == "file":
402 tmp_output = ""
403 for line in tuple(output.split('\n')):
404 pkg, pkg_file, pkg_arch = line.split()
405 full_path = os.path.join(self.rootfs_dir, pkg_arch, pkg_file)
406 if os.path.exists(full_path):
407 tmp_output += "%s %s %s\n" % (pkg, full_path, pkg_arch)
408 else:
409 tmp_output += "%s %s %s\n" % (pkg, pkg_file, pkg_arch)
410
411 output = tmp_output
412 elif format == "deps":
413 opkg_query_cmd = bb.utils.which(os.getenv('PATH'), "opkg-query-helper.py")
414
415 try:
416 output = subprocess.check_output("echo -e '%s' | %s" %
417 (output, opkg_query_cmd),
418 stderr=subprocess.STDOUT,
419 shell=True)
420 except subprocess.CalledProcessError as e:
421 bb.fatal("Cannot compute packages dependencies. Command '%s' "
422 "returned %d:\n%s" % (e.cmd, e.returncode, e.output))
423
424 return output
425
426
427class PackageManager(object):
428 """
429 This is an abstract class. Do not instantiate this directly.
430 """
431 __metaclass__ = ABCMeta
432
433 def __init__(self, d):
434 self.d = d
435 self.deploy_dir = None
436 self.deploy_lock = None
437 self.feed_uris = self.d.getVar('PACKAGE_FEED_URIS', True) or ""
438
439 """
440 Update the package manager package database.
441 """
442 @abstractmethod
443 def update(self):
444 pass
445
446 """
447 Install a list of packages. 'pkgs' is a list object. If 'attempt_only' is
448 True, installation failures are ignored.
449 """
450 @abstractmethod
451 def install(self, pkgs, attempt_only=False):
452 pass
453
454 """
455 Remove a list of packages. 'pkgs' is a list object. If 'with_dependencies'
456 is False, the any dependencies are left in place.
457 """
458 @abstractmethod
459 def remove(self, pkgs, with_dependencies=True):
460 pass
461
462 """
463 This function creates the index files
464 """
465 @abstractmethod
466 def write_index(self):
467 pass
468
469 @abstractmethod
470 def remove_packaging_data(self):
471 pass
472
473 @abstractmethod
474 def list_installed(self, format=None):
475 pass
476
477 @abstractmethod
478 def insert_feeds_uris(self):
479 pass
480
481 """
482 Install complementary packages based upon the list of currently installed
483 packages e.g. locales, *-dev, *-dbg, etc. This will only attempt to install
484 these packages, if they don't exist then no error will occur. Note: every
485 backend needs to call this function explicitly after the normal package
486 installation
487 """
488 def install_complementary(self, globs=None):
489 # we need to write the list of installed packages to a file because the
490 # oe-pkgdata-util reads it from a file
491 installed_pkgs_file = os.path.join(self.d.getVar('WORKDIR', True),
492 "installed_pkgs.txt")
493 with open(installed_pkgs_file, "w+") as installed_pkgs:
494 installed_pkgs.write(self.list_installed("arch"))
495
496 if globs is None:
497 globs = self.d.getVar('IMAGE_INSTALL_COMPLEMENTARY', True)
498 split_linguas = set()
499
500 for translation in self.d.getVar('IMAGE_LINGUAS', True).split():
501 split_linguas.add(translation)
502 split_linguas.add(translation.split('-')[0])
503
504 split_linguas = sorted(split_linguas)
505
506 for lang in split_linguas:
507 globs += " *-locale-%s" % lang
508
509 if globs is None:
510 return
511
512 cmd = [bb.utils.which(os.getenv('PATH'), "oe-pkgdata-util"),
513 "glob", self.d.getVar('PKGDATA_DIR', True), installed_pkgs_file,
514 globs]
515 try:
516 bb.note("Installing complementary packages ...")
517 complementary_pkgs = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
518 except subprocess.CalledProcessError as e:
519 bb.fatal("Could not compute complementary packages list. Command "
520 "'%s' returned %d:\n%s" %
521 (' '.join(cmd), e.returncode, e.output))
522
523 self.install(complementary_pkgs.split(), attempt_only=True)
524
525 def deploy_dir_lock(self):
526 if self.deploy_dir is None:
527 raise RuntimeError("deploy_dir is not set!")
528
529 lock_file_name = os.path.join(self.deploy_dir, "deploy.lock")
530
531 self.deploy_lock = bb.utils.lockfile(lock_file_name)
532
533 def deploy_dir_unlock(self):
534 if self.deploy_lock is None:
535 return
536
537 bb.utils.unlockfile(self.deploy_lock)
538
539 self.deploy_lock = None
540
541
542class RpmPM(PackageManager):
543 def __init__(self,
544 d,
545 target_rootfs,
546 target_vendor,
547 task_name='target',
548 providename=None,
549 arch_var=None,
550 os_var=None):
551 super(RpmPM, self).__init__(d)
552 self.target_rootfs = target_rootfs
553 self.target_vendor = target_vendor
554 self.task_name = task_name
555 self.providename = providename
556 self.fullpkglist = list()
557 self.deploy_dir = self.d.getVar('DEPLOY_DIR_RPM', True)
558 self.etcrpm_dir = os.path.join(self.target_rootfs, "etc/rpm")
559 self.install_dir = os.path.join(self.target_rootfs, "install")
560 self.rpm_cmd = bb.utils.which(os.getenv('PATH'), "rpm")
561 self.smart_cmd = bb.utils.which(os.getenv('PATH'), "smart")
562 self.smart_opt = "--data-dir=" + os.path.join(target_rootfs,
563 'var/lib/smart')
564 self.scriptlet_wrapper = self.d.expand('${WORKDIR}/scriptlet_wrapper')
565 self.solution_manifest = self.d.expand('${T}/saved/%s_solution' %
566 self.task_name)
567 self.saved_rpmlib = self.d.expand('${T}/saved/%s' % self.task_name)
568 self.image_rpmlib = os.path.join(self.target_rootfs, 'var/lib/rpm')
569
570 if not os.path.exists(self.d.expand('${T}/saved')):
571 bb.utils.mkdirhier(self.d.expand('${T}/saved'))
572
573 self.indexer = RpmIndexer(self.d, self.deploy_dir)
574 self.pkgs_list = RpmPkgsList(self.d, self.target_rootfs, arch_var, os_var)
575
576 self.ml_prefix_list, self.ml_os_list = self.indexer.get_ml_prefix_and_os_list(arch_var, os_var)
577
578 def insert_feeds_uris(self):
579 if self.feed_uris == "":
580 return
581
582 # List must be prefered to least preferred order
583 default_platform_extra = set()
584 platform_extra = set()
585 bbextendvariant = self.d.getVar('BBEXTENDVARIANT', True) or ""
586 for mlib in self.ml_os_list:
587 for arch in self.ml_prefix_list[mlib]:
588 plt = arch.replace('-', '_') + '-.*-' + self.ml_os_list[mlib]
589 if mlib == bbextendvariant:
590 default_platform_extra.add(plt)
591 else:
592 platform_extra.add(plt)
593
594 platform_extra = platform_extra.union(default_platform_extra)
595
596 arch_list = []
597 for canonical_arch in platform_extra:
598 arch = canonical_arch.split('-')[0]
599 if not os.path.exists(os.path.join(self.deploy_dir, arch)):
600 continue
601 arch_list.append(arch)
602
603 uri_iterator = 0
604 channel_priority = 10 + 5 * len(self.feed_uris.split()) * len(arch_list)
605
606 for uri in self.feed_uris.split():
607 for arch in arch_list:
608 bb.note('Note: adding Smart channel url%d%s (%s)' %
609 (uri_iterator, arch, channel_priority))
610 self._invoke_smart('channel --add url%d-%s type=rpm-md baseurl=%s/rpm/%s -y'
611 % (uri_iterator, arch, uri, arch))
612 self._invoke_smart('channel --set url%d-%s priority=%d' %
613 (uri_iterator, arch, channel_priority))
614 channel_priority -= 5
615 uri_iterator += 1
616
617 '''
618 Create configs for rpm and smart, and multilib is supported
619 '''
620 def create_configs(self):
621 target_arch = self.d.getVar('TARGET_ARCH', True)
622 platform = '%s%s-%s' % (target_arch.replace('-', '_'),
623 self.target_vendor,
624 self.ml_os_list['default'])
625
626 # List must be prefered to least preferred order
627 default_platform_extra = list()
628 platform_extra = list()
629 bbextendvariant = self.d.getVar('BBEXTENDVARIANT', True) or ""
630 for mlib in self.ml_os_list:
631 for arch in self.ml_prefix_list[mlib]:
632 plt = arch.replace('-', '_') + '-.*-' + self.ml_os_list[mlib]
633 if mlib == bbextendvariant:
634 if plt not in default_platform_extra:
635 default_platform_extra.append(plt)
636 else:
637 if plt not in platform_extra:
638 platform_extra.append(plt)
639 platform_extra = default_platform_extra + platform_extra
640
641 self._create_configs(platform, platform_extra)
642
643 def _invoke_smart(self, args):
644 cmd = "%s %s %s" % (self.smart_cmd, self.smart_opt, args)
645 # bb.note(cmd)
646 try:
647 complementary_pkgs = subprocess.check_output(cmd,
648 stderr=subprocess.STDOUT,
649 shell=True)
650 # bb.note(complementary_pkgs)
651 return complementary_pkgs
652 except subprocess.CalledProcessError as e:
653 bb.fatal("Could not invoke smart. Command "
654 "'%s' returned %d:\n%s" % (cmd, e.returncode, e.output))
655
656 def _search_pkg_name_in_feeds(self, pkg, feed_archs):
657 for arch in feed_archs:
658 arch = arch.replace('-', '_')
659 for p in self.fullpkglist:
660 regex_match = r"^%s-[^-]*-[^-]*@%s$" % \
661 (re.escape(pkg), re.escape(arch))
662 if re.match(regex_match, p) is not None:
663 # First found is best match
664 # bb.note('%s -> %s' % (pkg, pkg + '@' + arch))
665 return pkg + '@' + arch
666
667 return ""
668
669 '''
670 Translate the OE multilib format names to the RPM/Smart format names
671 It searched the RPM/Smart format names in probable multilib feeds first,
672 and then searched the default base feed.
673 '''
674 def _pkg_translate_oe_to_smart(self, pkgs, attempt_only=False):
675 new_pkgs = list()
676
677 for pkg in pkgs:
678 new_pkg = pkg
679 # Search new_pkg in probable multilibs first
680 for mlib in self.ml_prefix_list:
681 # Jump the default archs
682 if mlib == 'default':
683 continue
684
685 subst = pkg.replace(mlib + '-', '')
686 # if the pkg in this multilib feed
687 if subst != pkg:
688 feed_archs = self.ml_prefix_list[mlib]
689 new_pkg = self._search_pkg_name_in_feeds(subst, feed_archs)
690 if not new_pkg:
691 # Failed to translate, package not found!
692 err_msg = '%s not found in the %s feeds (%s).\n' % \
693 (pkg, mlib, " ".join(feed_archs))
694 if not attempt_only:
695 err_msg += " ".join(self.fullpkglist)
696 bb.fatal(err_msg)
697 bb.warn(err_msg)
698 else:
699 new_pkgs.append(new_pkg)
700
701 break
702
703 # Apparently not a multilib package...
704 if pkg == new_pkg:
705 # Search new_pkg in default archs
706 default_archs = self.ml_prefix_list['default']
707 new_pkg = self._search_pkg_name_in_feeds(pkg, default_archs)
708 if not new_pkg:
709 err_msg = '%s not found in the base feeds (%s).\n' % \
710 (pkg, ' '.join(default_archs))
711 if not attempt_only:
712 err_msg += " ".join(self.fullpkglist)
713 bb.fatal(err_msg)
714 bb.warn(err_msg)
715 else:
716 new_pkgs.append(new_pkg)
717
718 return new_pkgs
719
720 def _create_configs(self, platform, platform_extra):
721 # Setup base system configuration
722 bb.note("configuring RPM platform settings")
723
724 # Configure internal RPM environment when using Smart
725 os.environ['RPM_ETCRPM'] = self.etcrpm_dir
726 bb.utils.mkdirhier(self.etcrpm_dir)
727
728 # Setup temporary directory -- install...
729 if os.path.exists(self.install_dir):
730 bb.utils.remove(self.install_dir, True)
731 bb.utils.mkdirhier(os.path.join(self.install_dir, 'tmp'))
732
733 channel_priority = 5
734 platform_dir = os.path.join(self.etcrpm_dir, "platform")
735 with open(platform_dir, "w+") as platform_fd:
736 platform_fd.write(platform + '\n')
737 for pt in platform_extra:
738 channel_priority += 5
739 platform_fd.write(re.sub("-linux.*$", "-linux.*\n", pt))
740
741 # Tell RPM that the "/" directory exist and is available
742 bb.note("configuring RPM system provides")
743 sysinfo_dir = os.path.join(self.etcrpm_dir, "sysinfo")
744 bb.utils.mkdirhier(sysinfo_dir)
745 with open(os.path.join(sysinfo_dir, "Dirnames"), "w+") as dirnames:
746 dirnames.write("/\n")
747
748 if self.providename:
749 providename_dir = os.path.join(sysinfo_dir, "Providename")
750 if not os.path.exists(providename_dir):
751 providename_content = '\n'.join(self.providename)
752 providename_content += '\n'
753 open(providename_dir, "w+").write(providename_content)
754
755 # Configure RPM... we enforce these settings!
756 bb.note("configuring RPM DB settings")
757 # After change the __db.* cache size, log file will not be
758 # generated automatically, that will raise some warnings,
759 # so touch a bare log for rpm write into it.
760 rpmlib_log = os.path.join(self.image_rpmlib, 'log', 'log.0000000001')
761 if not os.path.exists(rpmlib_log):
762 bb.utils.mkdirhier(os.path.join(self.image_rpmlib, 'log'))
763 open(rpmlib_log, 'w+').close()
764
765 DB_CONFIG_CONTENT = "# ================ Environment\n" \
766 "set_data_dir .\n" \
767 "set_create_dir .\n" \
768 "set_lg_dir ./log\n" \
769 "set_tmp_dir ./tmp\n" \
770 "set_flags db_log_autoremove on\n" \
771 "\n" \
772 "# -- thread_count must be >= 8\n" \
773 "set_thread_count 64\n" \
774 "\n" \
775 "# ================ Logging\n" \
776 "\n" \
777 "# ================ Memory Pool\n" \
778 "set_cachesize 0 1048576 0\n" \
779 "set_mp_mmapsize 268435456\n" \
780 "\n" \
781 "# ================ Locking\n" \
782 "set_lk_max_locks 16384\n" \
783 "set_lk_max_lockers 16384\n" \
784 "set_lk_max_objects 16384\n" \
785 "mutex_set_max 163840\n" \
786 "\n" \
787 "# ================ Replication\n"
788
789 db_config_dir = os.path.join(self.image_rpmlib, 'DB_CONFIG')
790 if not os.path.exists(db_config_dir):
791 open(db_config_dir, 'w+').write(DB_CONFIG_CONTENT)
792
793 # Create database so that smart doesn't complain (lazy init)
794 cmd = "%s --root %s --dbpath /var/lib/rpm -qa > /dev/null" % (
795 self.rpm_cmd,
796 self.target_rootfs)
797 try:
798 subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
799 except subprocess.CalledProcessError as e:
800 bb.fatal("Create rpm database failed. Command '%s' "
801 "returned %d:\n%s" % (cmd, e.returncode, e.output))
802
803 # Configure smart
804 bb.note("configuring Smart settings")
805 bb.utils.remove(os.path.join(self.target_rootfs, 'var/lib/smart'),
806 True)
807 self._invoke_smart('config --set rpm-root=%s' % self.target_rootfs)
808 self._invoke_smart('config --set rpm-dbpath=/var/lib/rpm')
809 self._invoke_smart('config --set rpm-extra-macros._var=%s' %
810 self.d.getVar('localstatedir', True))
811 cmd = 'config --set rpm-extra-macros._tmppath=/install/tmp'
812 self._invoke_smart(cmd)
813
814 # Write common configuration for host and target usage
815 self._invoke_smart('config --set rpm-nolinktos=1')
816 self._invoke_smart('config --set rpm-noparentdirs=1')
817 for i in self.d.getVar('BAD_RECOMMENDATIONS', True).split():
818 self._invoke_smart('flag --set ignore-recommends %s' % i)
819
820 # Do the following configurations here, to avoid them being
821 # saved for field upgrade
822 if self.d.getVar('NO_RECOMMENDATIONS', True).strip() == "1":
823 self._invoke_smart('config --set ignore-all-recommends=1')
824 pkg_exclude = self.d.getVar('PACKAGE_EXCLUDE', True) or ""
825 for i in pkg_exclude.split():
826 self._invoke_smart('flag --set exclude-packages %s' % i)
827
828 # Optional debugging
829 # self._invoke_smart('config --set rpm-log-level=debug')
830 # cmd = 'config --set rpm-log-file=/tmp/smart-debug-logfile'
831 # self._invoke_smart(cmd)
832 ch_already_added = []
833 for canonical_arch in platform_extra:
834 arch = canonical_arch.split('-')[0]
835 arch_channel = os.path.join(self.deploy_dir, arch)
836 if os.path.exists(arch_channel) and not arch in ch_already_added:
837 bb.note('Note: adding Smart channel %s (%s)' %
838 (arch, channel_priority))
839 self._invoke_smart('channel --add %s type=rpm-md baseurl=%s -y'
840 % (arch, arch_channel))
841 self._invoke_smart('channel --set %s priority=%d' %
842 (arch, channel_priority))
843 channel_priority -= 5
844
845 ch_already_added.append(arch)
846
847 bb.note('adding Smart RPM DB channel')
848 self._invoke_smart('channel --add rpmsys type=rpm-sys -y')
849
850 # Construct install scriptlet wrapper.
851 # Scripts need to be ordered when executed, this ensures numeric order.
852 # If we ever run into needing more the 899 scripts, we'll have to.
853 # change num to start with 1000.
854 #
855 SCRIPTLET_FORMAT = "#!/bin/bash\n" \
856 "\n" \
857 "export PATH=%s\n" \
858 "export D=%s\n" \
859 'export OFFLINE_ROOT="$D"\n' \
860 'export IPKG_OFFLINE_ROOT="$D"\n' \
861 'export OPKG_OFFLINE_ROOT="$D"\n' \
862 "export INTERCEPT_DIR=%s\n" \
863 "export NATIVE_ROOT=%s\n" \
864 "\n" \
865 "$2 $1/$3 $4\n" \
866 "if [ $? -ne 0 ]; then\n" \
867 " if [ $4 -eq 1 ]; then\n" \
868 " mkdir -p $1/etc/rpm-postinsts\n" \
869 " num=100\n" \
870 " while [ -e $1/etc/rpm-postinsts/${num}-* ]; do num=$((num + 1)); done\n" \
871 " name=`head -1 $1/$3 | cut -d\' \' -f 2`\n" \
872 ' echo "#!$2" > $1/etc/rpm-postinsts/${num}-${name}\n' \
873 ' echo "# Arg: $4" >> $1/etc/rpm-postinsts/${num}-${name}\n' \
874 " cat $1/$3 >> $1/etc/rpm-postinsts/${num}-${name}\n" \
875 " chmod +x $1/etc/rpm-postinsts/${num}-${name}\n" \
876 " else\n" \
877 ' echo "Error: pre/post remove scriptlet failed"\n' \
878 " fi\n" \
879 "fi\n"
880
881 intercept_dir = self.d.expand('${WORKDIR}/intercept_scripts')
882 native_root = self.d.getVar('STAGING_DIR_NATIVE', True)
883 scriptlet_content = SCRIPTLET_FORMAT % (os.environ['PATH'],
884 self.target_rootfs,
885 intercept_dir,
886 native_root)
887 open(self.scriptlet_wrapper, 'w+').write(scriptlet_content)
888
889 bb.note("Note: configuring RPM cross-install scriptlet_wrapper")
890 os.chmod(self.scriptlet_wrapper, 0755)
891 cmd = 'config --set rpm-extra-macros._cross_scriptlet_wrapper=%s' % \
892 self.scriptlet_wrapper
893 self._invoke_smart(cmd)
894
895 # Debug to show smart config info
896 # bb.note(self._invoke_smart('config --show'))
897
898 def update(self):
899 self._invoke_smart('update rpmsys')
900
901 '''
902 Install pkgs with smart, the pkg name is oe format
903 '''
904 def install(self, pkgs, attempt_only=False):
905
906 bb.note("Installing the following packages: %s" % ' '.join(pkgs))
907 if attempt_only and len(pkgs) == 0:
908 return
909 pkgs = self._pkg_translate_oe_to_smart(pkgs, attempt_only)
910
911 if not attempt_only:
912 bb.note('to be installed: %s' % ' '.join(pkgs))
913 cmd = "%s %s install -y %s" % \
914 (self.smart_cmd, self.smart_opt, ' '.join(pkgs))
915 bb.note(cmd)
916 else:
917 bb.note('installing attempt only packages...')
918 bb.note('Attempting %s' % ' '.join(pkgs))
919 cmd = "%s %s install --attempt -y %s" % \
920 (self.smart_cmd, self.smart_opt, ' '.join(pkgs))
921 try:
922 output = subprocess.check_output(cmd.split(), stderr=subprocess.STDOUT)
923 bb.note(output)
924 except subprocess.CalledProcessError as e:
925 bb.fatal("Unable to install packages. Command '%s' "
926 "returned %d:\n%s" % (cmd, e.returncode, e.output))
927
928 '''
929 Remove pkgs with smart, the pkg name is smart/rpm format
930 '''
931 def remove(self, pkgs, with_dependencies=True):
932 bb.note('to be removed: ' + ' '.join(pkgs))
933
934 if not with_dependencies:
935 cmd = "%s -e --nodeps " % self.rpm_cmd
936 cmd += "--root=%s " % self.target_rootfs
937 cmd += "--dbpath=/var/lib/rpm "
938 cmd += "--define='_cross_scriptlet_wrapper %s' " % \
939 self.scriptlet_wrapper
940 cmd += "--define='_tmppath /install/tmp' %s" % ' '.join(pkgs)
941 else:
942 # for pkg in pkgs:
943 # bb.note('Debug: What required: %s' % pkg)
944 # bb.note(self._invoke_smart('query %s --show-requiredby' % pkg))
945
946 cmd = "%s %s remove -y %s" % (self.smart_cmd,
947 self.smart_opt,
948 ' '.join(pkgs))
949
950 try:
951 bb.note(cmd)
952 output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
953 bb.note(output)
954 except subprocess.CalledProcessError as e:
955 bb.note("Unable to remove packages. Command '%s' "
956 "returned %d:\n%s" % (cmd, e.returncode, e.output))
957
958 def upgrade(self):
959 bb.note('smart upgrade')
960 self._invoke_smart('upgrade')
961
962 def write_index(self):
963 result = self.indexer.write_index()
964
965 if result is not None:
966 bb.fatal(result)
967
968 def remove_packaging_data(self):
969 bb.utils.remove(self.image_rpmlib, True)
970 bb.utils.remove(os.path.join(self.target_rootfs, 'var/lib/smart'),
971 True)
972 bb.utils.remove(os.path.join(self.target_rootfs, 'var/lib/opkg'), True)
973
974 # remove temp directory
975 bb.utils.remove(self.d.expand('${IMAGE_ROOTFS}/install'), True)
976
977 def backup_packaging_data(self):
978 # Save the rpmlib for increment rpm image generation
979 if os.path.exists(self.saved_rpmlib):
980 bb.utils.remove(self.saved_rpmlib, True)
981 shutil.copytree(self.image_rpmlib,
982 self.saved_rpmlib,
983 symlinks=True)
984
985 def recovery_packaging_data(self):
986 # Move the rpmlib back
987 if os.path.exists(self.saved_rpmlib):
988 if os.path.exists(self.image_rpmlib):
989 bb.utils.remove(self.image_rpmlib, True)
990
991 bb.note('Recovery packaging data')
992 shutil.copytree(self.saved_rpmlib,
993 self.image_rpmlib,
994 symlinks=True)
995
996 def list_installed(self, format=None):
997 return self.pkgs_list.list(format)
998
999 '''
1000 If incremental install, we need to determine what we've got,
1001 what we need to add, and what to remove...
1002 The dump_install_solution will dump and save the new install
1003 solution.
1004 '''
1005 def dump_install_solution(self, pkgs):
1006 bb.note('creating new install solution for incremental install')
1007 if len(pkgs) == 0:
1008 return
1009
1010 pkgs = self._pkg_translate_oe_to_smart(pkgs, False)
1011 install_pkgs = list()
1012
1013 cmd = "%s %s install -y --dump %s 2>%s" % \
1014 (self.smart_cmd,
1015 self.smart_opt,
1016 ' '.join(pkgs),
1017 self.solution_manifest)
1018 try:
1019 # Disable rpmsys channel for the fake install
1020 self._invoke_smart('channel --disable rpmsys')
1021
1022 subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
1023 with open(self.solution_manifest, 'r') as manifest:
1024 for pkg in manifest.read().split('\n'):
1025 if '@' in pkg:
1026 install_pkgs.append(pkg)
1027 except subprocess.CalledProcessError as e:
1028 bb.note("Unable to dump install packages. Command '%s' "
1029 "returned %d:\n%s" % (cmd, e.returncode, e.output))
1030 # Recovery rpmsys channel
1031 self._invoke_smart('channel --enable rpmsys')
1032 return install_pkgs
1033
1034 '''
1035 If incremental install, we need to determine what we've got,
1036 what we need to add, and what to remove...
1037 The load_old_install_solution will load the previous install
1038 solution
1039 '''
1040 def load_old_install_solution(self):
1041 bb.note('load old install solution for incremental install')
1042 installed_pkgs = list()
1043 if not os.path.exists(self.solution_manifest):
1044 bb.note('old install solution not exist')
1045 return installed_pkgs
1046
1047 with open(self.solution_manifest, 'r') as manifest:
1048 for pkg in manifest.read().split('\n'):
1049 if '@' in pkg:
1050 installed_pkgs.append(pkg.strip())
1051
1052 return installed_pkgs
1053
1054 '''
1055 Dump all available packages in feeds, it should be invoked after the
1056 newest rpm index was created
1057 '''
1058 def dump_all_available_pkgs(self):
1059 available_manifest = self.d.expand('${T}/saved/available_pkgs.txt')
1060 available_pkgs = list()
1061 cmd = "%s %s query --output %s" % \
1062 (self.smart_cmd, self.smart_opt, available_manifest)
1063 try:
1064 subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
1065 with open(available_manifest, 'r') as manifest:
1066 for pkg in manifest.read().split('\n'):
1067 if '@' in pkg:
1068 available_pkgs.append(pkg.strip())
1069 except subprocess.CalledProcessError as e:
1070 bb.note("Unable to list all available packages. Command '%s' "
1071 "returned %d:\n%s" % (cmd, e.returncode, e.output))
1072
1073 self.fullpkglist = available_pkgs
1074
1075 return
1076
1077 def save_rpmpostinst(self, pkg):
1078 mlibs = (self.d.getVar('MULTILIB_GLOBAL_VARIANTS') or "").split()
1079
1080 new_pkg = pkg
1081 # Remove any multilib prefix from the package name
1082 for mlib in mlibs:
1083 if mlib in pkg:
1084 new_pkg = pkg.replace(mlib + '-', '')
1085 break
1086
1087 bb.note(' * postponing %s' % new_pkg)
1088 saved_dir = self.target_rootfs + self.d.expand('${sysconfdir}/rpm-postinsts/') + new_pkg
1089
1090 cmd = self.rpm_cmd + ' -q --scripts --root ' + self.target_rootfs
1091 cmd += ' --dbpath=/var/lib/rpm ' + new_pkg
1092 cmd += ' | sed -n -e "/^postinstall scriptlet (using .*):$/,/^.* scriptlet (using .*):$/ {/.*/p}"'
1093 cmd += ' | sed -e "/postinstall scriptlet (using \(.*\)):$/d"'
1094 cmd += ' -e "/^.* scriptlet (using .*):$/d" > %s' % saved_dir
1095
1096 try:
1097 bb.note(cmd)
1098 output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True).strip()
1099 bb.note(output)
1100 os.chmod(saved_dir, 0755)
1101 except subprocess.CalledProcessError as e:
1102 bb.fatal("Invoke save_rpmpostinst failed. Command '%s' "
1103 "returned %d:\n%s" % (cmd, e.returncode, e.output))
1104
1105 '''Write common configuration for target usage'''
1106 def rpm_setup_smart_target_config(self):
1107 bb.utils.remove(os.path.join(self.target_rootfs, 'var/lib/smart'),
1108 True)
1109
1110 self._invoke_smart('config --set rpm-nolinktos=1')
1111 self._invoke_smart('config --set rpm-noparentdirs=1')
1112 for i in self.d.getVar('BAD_RECOMMENDATIONS', True).split():
1113 self._invoke_smart('flag --set ignore-recommends %s' % i)
1114 self._invoke_smart('channel --add rpmsys type=rpm-sys -y')
1115
1116 '''
1117 The rpm db lock files were produced after invoking rpm to query on
1118 build system, and they caused the rpm on target didn't work, so we
1119 need to unlock the rpm db by removing the lock files.
1120 '''
1121 def unlock_rpm_db(self):
1122 # Remove rpm db lock files
1123 rpm_db_locks = glob.glob('%s/var/lib/rpm/__db.*' % self.target_rootfs)
1124 for f in rpm_db_locks:
1125 bb.utils.remove(f, True)
1126
1127
1128class OpkgPM(PackageManager):
1129 def __init__(self, d, target_rootfs, config_file, archs, task_name='target'):
1130 super(OpkgPM, self).__init__(d)
1131
1132 self.target_rootfs = target_rootfs
1133 self.config_file = config_file
1134 self.pkg_archs = archs
1135 self.task_name = task_name
1136
1137 self.deploy_dir = self.d.getVar("DEPLOY_DIR_IPK", True)
1138 self.deploy_lock_file = os.path.join(self.deploy_dir, "deploy.lock")
1139 self.opkg_cmd = bb.utils.which(os.getenv('PATH'), "opkg-cl")
1140 self.opkg_args = "-f %s -o %s " % (self.config_file, target_rootfs)
1141 self.opkg_args += self.d.getVar("OPKG_ARGS", True)
1142
1143 opkg_lib_dir = self.d.getVar('OPKGLIBDIR', True)
1144 if opkg_lib_dir[0] == "/":
1145 opkg_lib_dir = opkg_lib_dir[1:]
1146
1147 self.opkg_dir = os.path.join(target_rootfs, opkg_lib_dir, "opkg")
1148
1149 bb.utils.mkdirhier(self.opkg_dir)
1150
1151 self.saved_opkg_dir = self.d.expand('${T}/saved/%s' % self.task_name)
1152 if not os.path.exists(self.d.expand('${T}/saved')):
1153 bb.utils.mkdirhier(self.d.expand('${T}/saved'))
1154
1155 if (self.d.getVar('BUILD_IMAGES_FROM_FEEDS', True) or "") != "1":
1156 self._create_config()
1157 else:
1158 self._create_custom_config()
1159
1160 self.indexer = OpkgIndexer(self.d, self.deploy_dir)
1161
1162 """
1163 This function will change a package's status in /var/lib/opkg/status file.
1164 If 'packages' is None then the new_status will be applied to all
1165 packages
1166 """
1167 def mark_packages(self, status_tag, packages=None):
1168 status_file = os.path.join(self.opkg_dir, "status")
1169
1170 with open(status_file, "r") as sf:
1171 with open(status_file + ".tmp", "w+") as tmp_sf:
1172 if packages is None:
1173 tmp_sf.write(re.sub(r"Package: (.*?)\n((?:[^\n]+\n)*?)Status: (.*)(?:unpacked|installed)",
1174 r"Package: \1\n\2Status: \3%s" % status_tag,
1175 sf.read()))
1176 else:
1177 if type(packages).__name__ != "list":
1178 raise TypeError("'packages' should be a list object")
1179
1180 status = sf.read()
1181 for pkg in packages:
1182 status = re.sub(r"Package: %s\n((?:[^\n]+\n)*?)Status: (.*)(?:unpacked|installed)" % pkg,
1183 r"Package: %s\n\1Status: \2%s" % (pkg, status_tag),
1184 status)
1185
1186 tmp_sf.write(status)
1187
1188 os.rename(status_file + ".tmp", status_file)
1189
1190 def _create_custom_config(self):
1191 bb.note("Building from feeds activated!")
1192
1193 with open(self.config_file, "w+") as config_file:
1194 priority = 1
1195 for arch in self.pkg_archs.split():
1196 config_file.write("arch %s %d\n" % (arch, priority))
1197 priority += 5
1198
1199 for line in (self.d.getVar('IPK_FEED_URIS', True) or "").split():
1200 feed_match = re.match("^[ \t]*(.*)##([^ \t]*)[ \t]*$", line)
1201
1202 if feed_match is not None:
1203 feed_name = feed_match.group(1)
1204 feed_uri = feed_match.group(2)
1205
1206 bb.note("Add %s feed with URL %s" % (feed_name, feed_uri))
1207
1208 config_file.write("src/gz %s %s\n" % (feed_name, feed_uri))
1209
1210 """
1211 Allow to use package deploy directory contents as quick devel-testing
1212 feed. This creates individual feed configs for each arch subdir of those
1213 specified as compatible for the current machine.
1214 NOTE: Development-helper feature, NOT a full-fledged feed.
1215 """
1216 if (self.d.getVar('FEED_DEPLOYDIR_BASE_URI', True) or "") != "":
1217 for arch in self.pkg_archs.split():
1218 cfg_file_name = os.path.join(self.target_rootfs,
1219 self.d.getVar("sysconfdir", True),
1220 "opkg",
1221 "local-%s-feed.conf" % arch)
1222
1223 with open(cfg_file_name, "w+") as cfg_file:
1224 cfg_file.write("src/gz local-%s %s/%s" %
1225 arch,
1226 self.d.getVar('FEED_DEPLOYDIR_BASE_URI', True),
1227 arch)
1228
1229 def _create_config(self):
1230 with open(self.config_file, "w+") as config_file:
1231 priority = 1
1232 for arch in self.pkg_archs.split():
1233 config_file.write("arch %s %d\n" % (arch, priority))
1234 priority += 5
1235
1236 config_file.write("src oe file:%s\n" % self.deploy_dir)
1237
1238 for arch in self.pkg_archs.split():
1239 pkgs_dir = os.path.join(self.deploy_dir, arch)
1240 if os.path.isdir(pkgs_dir):
1241 config_file.write("src oe-%s file:%s\n" %
1242 (arch, pkgs_dir))
1243
1244 def insert_feeds_uris(self):
1245 if self.feed_uris == "":
1246 return
1247
1248 rootfs_config = os.path.join('%s/etc/opkg/base-feeds.conf'
1249 % self.target_rootfs)
1250
1251 with open(rootfs_config, "w+") as config_file:
1252 uri_iterator = 0
1253 for uri in self.feed_uris.split():
1254 config_file.write("src/gz url-%d %s/ipk\n" %
1255 (uri_iterator, uri))
1256
1257 for arch in self.pkg_archs.split():
1258 if not os.path.exists(os.path.join(self.deploy_dir, arch)):
1259 continue
1260 bb.note('Note: adding opkg channel url-%s-%d (%s)' %
1261 (arch, uri_iterator, uri))
1262
1263 config_file.write("src/gz uri-%s-%d %s/ipk/%s\n" %
1264 (arch, uri_iterator, uri, arch))
1265 uri_iterator += 1
1266
1267 def update(self):
1268 self.deploy_dir_lock()
1269
1270 cmd = "%s %s update" % (self.opkg_cmd, self.opkg_args)
1271
1272 try:
1273 subprocess.check_output(cmd.split(), stderr=subprocess.STDOUT)
1274 except subprocess.CalledProcessError as e:
1275 self.deploy_dir_unlock()
1276 bb.fatal("Unable to update the package index files. Command '%s' "
1277 "returned %d:\n%s" % (cmd, e.returncode, e.output))
1278
1279 self.deploy_dir_unlock()
1280
1281 def install(self, pkgs, attempt_only=False):
1282 if attempt_only and len(pkgs) == 0:
1283 return
1284
1285 cmd = "%s %s install %s" % (self.opkg_cmd, self.opkg_args, ' '.join(pkgs))
1286
1287 os.environ['D'] = self.target_rootfs
1288 os.environ['OFFLINE_ROOT'] = self.target_rootfs
1289 os.environ['IPKG_OFFLINE_ROOT'] = self.target_rootfs
1290 os.environ['OPKG_OFFLINE_ROOT'] = self.target_rootfs
1291 os.environ['INTERCEPT_DIR'] = os.path.join(self.d.getVar('WORKDIR', True),
1292 "intercept_scripts")
1293 os.environ['NATIVE_ROOT'] = self.d.getVar('STAGING_DIR_NATIVE', True)
1294
1295 try:
1296 bb.note("Installing the following packages: %s" % ' '.join(pkgs))
1297 bb.note(cmd)
1298 output = subprocess.check_output(cmd.split(), stderr=subprocess.STDOUT)
1299 bb.note(output)
1300 except subprocess.CalledProcessError as e:
1301 (bb.fatal, bb.note)[attempt_only]("Unable to install packages. "
1302 "Command '%s' returned %d:\n%s" %
1303 (cmd, e.returncode, e.output))
1304
1305 def remove(self, pkgs, with_dependencies=True):
1306 if with_dependencies:
1307 cmd = "%s %s --force-depends --force-remove --force-removal-of-dependent-packages remove %s" % \
1308 (self.opkg_cmd, self.opkg_args, ' '.join(pkgs))
1309 else:
1310 cmd = "%s %s --force-depends remove %s" % \
1311 (self.opkg_cmd, self.opkg_args, ' '.join(pkgs))
1312
1313 try:
1314 bb.note(cmd)
1315 output = subprocess.check_output(cmd.split(), stderr=subprocess.STDOUT)
1316 bb.note(output)
1317 except subprocess.CalledProcessError as e:
1318 bb.fatal("Unable to remove packages. Command '%s' "
1319 "returned %d:\n%s" % (e.cmd, e.returncode, e.output))
1320
1321 def write_index(self):
1322 self.deploy_dir_lock()
1323
1324 result = self.indexer.write_index()
1325
1326 self.deploy_dir_unlock()
1327
1328 if result is not None:
1329 bb.fatal(result)
1330
1331 def remove_packaging_data(self):
1332 bb.utils.remove(self.opkg_dir, True)
1333 # create the directory back, it's needed by PM lock
1334 bb.utils.mkdirhier(self.opkg_dir)
1335
1336 def list_installed(self, format=None):
1337 return OpkgPkgsList(self.d, self.target_rootfs, self.config_file).list(format)
1338
1339 def handle_bad_recommendations(self):
1340 bad_recommendations = self.d.getVar("BAD_RECOMMENDATIONS", True) or ""
1341 if bad_recommendations.strip() == "":
1342 return
1343
1344 status_file = os.path.join(self.opkg_dir, "status")
1345
1346 # If status file existed, it means the bad recommendations has already
1347 # been handled
1348 if os.path.exists(status_file):
1349 return
1350
1351 cmd = "%s %s info " % (self.opkg_cmd, self.opkg_args)
1352
1353 with open(status_file, "w+") as status:
1354 for pkg in bad_recommendations.split():
1355 pkg_info = cmd + pkg
1356
1357 try:
1358 output = subprocess.check_output(pkg_info.split(), stderr=subprocess.STDOUT).strip()
1359 except subprocess.CalledProcessError as e:
1360 bb.fatal("Cannot get package info. Command '%s' "
1361 "returned %d:\n%s" % (pkg_info, e.returncode, e.output))
1362
1363 if output == "":
1364 bb.note("Ignored bad recommendation: '%s' is "
1365 "not a package" % pkg)
1366 continue
1367
1368 for line in output.split('\n'):
1369 if line.startswith("Status:"):
1370 status.write("Status: deinstall hold not-installed\n")
1371 else:
1372 status.write(line + "\n")
1373
1374 '''
1375 The following function dummy installs pkgs and returns the log of output.
1376 '''
1377 def dummy_install(self, pkgs):
1378 if len(pkgs) == 0:
1379 return
1380
1381 # Create an temp dir as opkg root for dummy installation
1382 temp_rootfs = self.d.expand('${T}/opkg')
1383 temp_opkg_dir = os.path.join(temp_rootfs, 'var/lib/opkg')
1384 bb.utils.mkdirhier(temp_opkg_dir)
1385
1386 opkg_args = "-f %s -o %s " % (self.config_file, temp_rootfs)
1387 opkg_args += self.d.getVar("OPKG_ARGS", True)
1388
1389 cmd = "%s %s update" % (self.opkg_cmd, opkg_args)
1390 try:
1391 subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
1392 except subprocess.CalledProcessError as e:
1393 bb.fatal("Unable to update. Command '%s' "
1394 "returned %d:\n%s" % (cmd, e.returncode, e.output))
1395
1396 # Dummy installation
1397 cmd = "%s %s --noaction install %s " % (self.opkg_cmd,
1398 opkg_args,
1399 ' '.join(pkgs))
1400 try:
1401 output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
1402 except subprocess.CalledProcessError as e:
1403 bb.fatal("Unable to dummy install packages. Command '%s' "
1404 "returned %d:\n%s" % (cmd, e.returncode, e.output))
1405
1406 bb.utils.remove(temp_rootfs, True)
1407
1408 return output
1409
1410 def backup_packaging_data(self):
1411 # Save the opkglib for increment ipk image generation
1412 if os.path.exists(self.saved_opkg_dir):
1413 bb.utils.remove(self.saved_opkg_dir, True)
1414 shutil.copytree(self.opkg_dir,
1415 self.saved_opkg_dir,
1416 symlinks=True)
1417
1418 def recover_packaging_data(self):
1419 # Move the opkglib back
1420 if os.path.exists(self.saved_opkg_dir):
1421 if os.path.exists(self.opkg_dir):
1422 bb.utils.remove(self.opkg_dir, True)
1423
1424 bb.note('Recover packaging data')
1425 shutil.copytree(self.saved_opkg_dir,
1426 self.opkg_dir,
1427 symlinks=True)
1428
1429
1430class DpkgPM(PackageManager):
1431 def __init__(self, d, target_rootfs, archs, base_archs, apt_conf_dir=None):
1432 super(DpkgPM, self).__init__(d)
1433 self.target_rootfs = target_rootfs
1434 self.deploy_dir = self.d.getVar('DEPLOY_DIR_DEB', True)
1435 if apt_conf_dir is None:
1436 self.apt_conf_dir = self.d.expand("${APTCONF_TARGET}/apt")
1437 else:
1438 self.apt_conf_dir = apt_conf_dir
1439 self.apt_conf_file = os.path.join(self.apt_conf_dir, "apt.conf")
1440 self.apt_get_cmd = bb.utils.which(os.getenv('PATH'), "apt-get")
1441
1442 self.apt_args = d.getVar("APT_ARGS", True)
1443
1444 self._create_configs(archs, base_archs)
1445
1446 self.indexer = DpkgIndexer(self.d, self.deploy_dir)
1447
1448 """
1449 This function will change a package's status in /var/lib/dpkg/status file.
1450 If 'packages' is None then the new_status will be applied to all
1451 packages
1452 """
1453 def mark_packages(self, status_tag, packages=None):
1454 status_file = self.target_rootfs + "/var/lib/dpkg/status"
1455
1456 with open(status_file, "r") as sf:
1457 with open(status_file + ".tmp", "w+") as tmp_sf:
1458 if packages is None:
1459 tmp_sf.write(re.sub(r"Package: (.*?)\n((?:[^\n]+\n)*?)Status: (.*)(?:unpacked|installed)",
1460 r"Package: \1\n\2Status: \3%s" % status_tag,
1461 sf.read()))
1462 else:
1463 if type(packages).__name__ != "list":
1464 raise TypeError("'packages' should be a list object")
1465
1466 status = sf.read()
1467 for pkg in packages:
1468 status = re.sub(r"Package: %s\n((?:[^\n]+\n)*?)Status: (.*)(?:unpacked|installed)" % pkg,
1469 r"Package: %s\n\1Status: \2%s" % (pkg, status_tag),
1470 status)
1471
1472 tmp_sf.write(status)
1473
1474 os.rename(status_file + ".tmp", status_file)
1475
1476 """
1477 Run the pre/post installs for package "package_name". If package_name is
1478 None, then run all pre/post install scriptlets.
1479 """
1480 def run_pre_post_installs(self, package_name=None):
1481 info_dir = self.target_rootfs + "/var/lib/dpkg/info"
1482 suffixes = [(".preinst", "Preinstall"), (".postinst", "Postinstall")]
1483 status_file = self.target_rootfs + "/var/lib/dpkg/status"
1484 installed_pkgs = []
1485
1486 with open(status_file, "r") as status:
1487 for line in status.read().split('\n'):
1488 m = re.match("^Package: (.*)", line)
1489 if m is not None:
1490 installed_pkgs.append(m.group(1))
1491
1492 if package_name is not None and not package_name in installed_pkgs:
1493 return
1494
1495 os.environ['D'] = self.target_rootfs
1496 os.environ['OFFLINE_ROOT'] = self.target_rootfs
1497 os.environ['IPKG_OFFLINE_ROOT'] = self.target_rootfs
1498 os.environ['OPKG_OFFLINE_ROOT'] = self.target_rootfs
1499 os.environ['INTERCEPT_DIR'] = os.path.join(self.d.getVar('WORKDIR', True),
1500 "intercept_scripts")
1501 os.environ['NATIVE_ROOT'] = self.d.getVar('STAGING_DIR_NATIVE', True)
1502
1503 failed_pkgs = []
1504 for pkg_name in installed_pkgs:
1505 for suffix in suffixes:
1506 p_full = os.path.join(info_dir, pkg_name + suffix[0])
1507 if os.path.exists(p_full):
1508 try:
1509 bb.note("Executing %s for package: %s ..." %
1510 (suffix[1].lower(), pkg_name))
1511 subprocess.check_output(p_full, stderr=subprocess.STDOUT)
1512 except subprocess.CalledProcessError as e:
1513 bb.note("%s for package %s failed with %d:\n%s" %
1514 (suffix[1], pkg_name, e.returncode, e.output))
1515 failed_pkgs.append(pkg_name)
1516 break
1517
1518 if len(failed_pkgs):
1519 self.mark_packages("unpacked", failed_pkgs)
1520
1521 def update(self):
1522 os.environ['APT_CONFIG'] = self.apt_conf_file
1523
1524 self.deploy_dir_lock()
1525
1526 cmd = "%s update" % self.apt_get_cmd
1527
1528 try:
1529 subprocess.check_output(cmd.split(), stderr=subprocess.STDOUT)
1530 except subprocess.CalledProcessError as e:
1531 bb.fatal("Unable to update the package index files. Command '%s' "
1532 "returned %d:\n%s" % (e.cmd, e.returncode, e.output))
1533
1534 self.deploy_dir_unlock()
1535
1536 def install(self, pkgs, attempt_only=False):
1537 if attempt_only and len(pkgs) == 0:
1538 return
1539
1540 os.environ['APT_CONFIG'] = self.apt_conf_file
1541
1542 cmd = "%s %s install --force-yes --allow-unauthenticated %s" % \
1543 (self.apt_get_cmd, self.apt_args, ' '.join(pkgs))
1544
1545 try:
1546 bb.note("Installing the following packages: %s" % ' '.join(pkgs))
1547 subprocess.check_output(cmd.split(), stderr=subprocess.STDOUT)
1548 except subprocess.CalledProcessError as e:
1549 (bb.fatal, bb.note)[attempt_only]("Unable to install packages. "
1550 "Command '%s' returned %d:\n%s" %
1551 (cmd, e.returncode, e.output))
1552
1553 # rename *.dpkg-new files/dirs
1554 for root, dirs, files in os.walk(self.target_rootfs):
1555 for dir in dirs:
1556 new_dir = re.sub("\.dpkg-new", "", dir)
1557 if dir != new_dir:
1558 os.rename(os.path.join(root, dir),
1559 os.path.join(root, new_dir))
1560
1561 for file in files:
1562 new_file = re.sub("\.dpkg-new", "", file)
1563 if file != new_file:
1564 os.rename(os.path.join(root, file),
1565 os.path.join(root, new_file))
1566
1567
1568 def remove(self, pkgs, with_dependencies=True):
1569 if with_dependencies:
1570 os.environ['APT_CONFIG'] = self.apt_conf_file
1571 cmd = "%s remove %s" % (self.apt_get_cmd, ' '.join(pkgs))
1572 else:
1573 cmd = "%s --admindir=%s/var/lib/dpkg --instdir=%s" \
1574 " -r --force-depends %s" % \
1575 (bb.utils.which(os.getenv('PATH'), "dpkg"),
1576 self.target_rootfs, self.target_rootfs, ' '.join(pkgs))
1577
1578 try:
1579 subprocess.check_output(cmd.split(), stderr=subprocess.STDOUT)
1580 except subprocess.CalledProcessError as e:
1581 bb.fatal("Unable to remove packages. Command '%s' "
1582 "returned %d:\n%s" % (e.cmd, e.returncode, e.output))
1583
1584 def write_index(self):
1585 self.deploy_dir_lock()
1586
1587 result = self.indexer.write_index()
1588
1589 self.deploy_dir_unlock()
1590
1591 if result is not None:
1592 bb.fatal(result)
1593
1594 def insert_feeds_uris(self):
1595 if self.feed_uris == "":
1596 return
1597
1598 sources_conf = os.path.join("%s/etc/apt/sources.list"
1599 % self.target_rootfs)
1600 arch_list = []
1601 archs = self.d.getVar('PACKAGE_ARCHS', True)
1602 for arch in archs.split():
1603 if not os.path.exists(os.path.join(self.deploy_dir, arch)):
1604 continue
1605 arch_list.append(arch)
1606
1607 with open(sources_conf, "w+") as sources_file:
1608 for uri in self.feed_uris.split():
1609 for arch in arch_list:
1610 bb.note('Note: adding dpkg channel at (%s)' % uri)
1611 sources_file.write("deb %s/deb/%s ./\n" %
1612 (uri, arch))
1613
1614 def _create_configs(self, archs, base_archs):
1615 base_archs = re.sub("_", "-", base_archs)
1616
1617 if os.path.exists(self.apt_conf_dir):
1618 bb.utils.remove(self.apt_conf_dir, True)
1619
1620 bb.utils.mkdirhier(self.apt_conf_dir)
1621 bb.utils.mkdirhier(self.apt_conf_dir + "/lists/partial/")
1622 bb.utils.mkdirhier(self.apt_conf_dir + "/apt.conf.d/")
1623
1624 arch_list = []
1625 for arch in archs.split():
1626 if not os.path.exists(os.path.join(self.deploy_dir, arch)):
1627 continue
1628 arch_list.append(arch)
1629
1630 with open(os.path.join(self.apt_conf_dir, "preferences"), "w+") as prefs_file:
1631 priority = 801
1632 for arch in arch_list:
1633 prefs_file.write(
1634 "Package: *\n"
1635 "Pin: release l=%s\n"
1636 "Pin-Priority: %d\n\n" % (arch, priority))
1637
1638 priority += 5
1639
1640 for pkg in self.d.getVar('PACKAGE_EXCLUDE', True).split():
1641 prefs_file.write(
1642 "Package: %s\n"
1643 "Pin: release *\n"
1644 "Pin-Priority: -1\n\n" % pkg)
1645
1646 arch_list.reverse()
1647
1648 with open(os.path.join(self.apt_conf_dir, "sources.list"), "w+") as sources_file:
1649 for arch in arch_list:
1650 sources_file.write("deb file:%s/ ./\n" %
1651 os.path.join(self.deploy_dir, arch))
1652
1653 with open(self.apt_conf_file, "w+") as apt_conf:
1654 with open(self.d.expand("${STAGING_ETCDIR_NATIVE}/apt/apt.conf.sample")) as apt_conf_sample:
1655 for line in apt_conf_sample.read().split("\n"):
1656 line = re.sub("Architecture \".*\";",
1657 "Architecture \"%s\";" % base_archs, line)
1658 line = re.sub("#ROOTFS#", self.target_rootfs, line)
1659 line = re.sub("#APTCONF#", self.apt_conf_dir, line)
1660
1661 apt_conf.write(line + "\n")
1662
1663 target_dpkg_dir = "%s/var/lib/dpkg" % self.target_rootfs
1664 bb.utils.mkdirhier(os.path.join(target_dpkg_dir, "info"))
1665
1666 bb.utils.mkdirhier(os.path.join(target_dpkg_dir, "updates"))
1667
1668 if not os.path.exists(os.path.join(target_dpkg_dir, "status")):
1669 open(os.path.join(target_dpkg_dir, "status"), "w+").close()
1670 if not os.path.exists(os.path.join(target_dpkg_dir, "available")):
1671 open(os.path.join(target_dpkg_dir, "available"), "w+").close()
1672
1673 def remove_packaging_data(self):
1674 bb.utils.remove(os.path.join(self.target_rootfs,
1675 self.d.getVar('opkglibdir', True)), True)
1676 bb.utils.remove(self.target_rootfs + "/var/lib/dpkg/", True)
1677
1678 def fix_broken_dependencies(self):
1679 os.environ['APT_CONFIG'] = self.apt_conf_file
1680
1681 cmd = "%s %s -f install" % (self.apt_get_cmd, self.apt_args)
1682
1683 try:
1684 subprocess.check_output(cmd.split(), stderr=subprocess.STDOUT)
1685 except subprocess.CalledProcessError as e:
1686 bb.fatal("Cannot fix broken dependencies. Command '%s' "
1687 "returned %d:\n%s" % (cmd, e.returncode, e.output))
1688
1689 def list_installed(self, format=None):
1690 return DpkgPkgsList(self.d, self.target_rootfs).list()
1691
1692
1693def generate_index_files(d):
1694 classes = d.getVar('PACKAGE_CLASSES', True).replace("package_", "").split()
1695
1696 indexer_map = {
1697 "rpm": (RpmIndexer, d.getVar('DEPLOY_DIR_RPM', True)),
1698 "ipk": (OpkgIndexer, d.getVar('DEPLOY_DIR_IPK', True)),
1699 "deb": (DpkgIndexer, d.getVar('DEPLOY_DIR_DEB', True))
1700 }
1701
1702 result = None
1703
1704 for pkg_class in classes:
1705 if not pkg_class in indexer_map:
1706 continue
1707
1708 if os.path.exists(indexer_map[pkg_class][1]):
1709 result = indexer_map[pkg_class][0](d, indexer_map[pkg_class][1]).write_index()
1710
1711 if result is not None:
1712 bb.fatal(result)
1713
1714if __name__ == "__main__":
1715 """
1716 We should be able to run this as a standalone script, from outside bitbake
1717 environment.
1718 """
1719 """
1720 TBD
1721 """