diff options
Diffstat (limited to 'meta/classes/package.bbclass')
| -rw-r--r-- | meta/classes/package.bbclass | 729 |
1 files changed, 729 insertions, 0 deletions
diff --git a/meta/classes/package.bbclass b/meta/classes/package.bbclass new file mode 100644 index 0000000000..9edcc1e5ed --- /dev/null +++ b/meta/classes/package.bbclass | |||
| @@ -0,0 +1,729 @@ | |||
| 1 | def legitimize_package_name(s): | ||
| 2 | return s.lower().replace('_', '-').replace('@', '+').replace(',', '+').replace('/', '-') | ||
| 3 | |||
| 4 | STAGING_PKGMAPS_DIR ?= "${STAGING_DIR}/pkgmaps" | ||
| 5 | |||
| 6 | def add_package_mapping (pkg, new_name, d): | ||
| 7 | import bb, os | ||
| 8 | |||
| 9 | def encode(str): | ||
| 10 | import codecs | ||
| 11 | c = codecs.getencoder("string_escape") | ||
| 12 | return c(str)[0] | ||
| 13 | |||
| 14 | pmap_dir = bb.data.getVar('STAGING_PKGMAPS_DIR', d, 1) | ||
| 15 | |||
| 16 | bb.mkdirhier(pmap_dir) | ||
| 17 | |||
| 18 | data_file = os.path.join(pmap_dir, pkg) | ||
| 19 | |||
| 20 | f = open(data_file, 'w') | ||
| 21 | f.write("%s\n" % encode(new_name)) | ||
| 22 | f.close() | ||
| 23 | |||
| 24 | def get_package_mapping (pkg, d): | ||
| 25 | import bb, os | ||
| 26 | |||
| 27 | def decode(str): | ||
| 28 | import codecs | ||
| 29 | c = codecs.getdecoder("string_escape") | ||
| 30 | return c(str)[0] | ||
| 31 | |||
| 32 | data_file = bb.data.expand("${STAGING_PKGMAPS_DIR}/%s" % pkg, d) | ||
| 33 | |||
| 34 | if os.access(data_file, os.R_OK): | ||
| 35 | f = file(data_file, 'r') | ||
| 36 | lines = f.readlines() | ||
| 37 | f.close() | ||
| 38 | for l in lines: | ||
| 39 | return decode(l).strip() | ||
| 40 | return pkg | ||
| 41 | |||
| 42 | def runtime_mapping_rename (varname, d): | ||
| 43 | import bb, os | ||
| 44 | |||
| 45 | #bb.note("%s before: %s" % (varname, bb.data.getVar(varname, d, 1))) | ||
| 46 | |||
| 47 | new_depends = [] | ||
| 48 | for depend in explode_deps(bb.data.getVar(varname, d, 1) or ""): | ||
| 49 | # Have to be careful with any version component of the depend | ||
| 50 | split_depend = depend.split(' (') | ||
| 51 | new_depend = get_package_mapping(split_depend[0].strip(), d) | ||
| 52 | if len(split_depend) > 1: | ||
| 53 | new_depends.append("%s (%s" % (new_depend, split_depend[1])) | ||
| 54 | else: | ||
| 55 | new_depends.append(new_depend) | ||
| 56 | |||
| 57 | bb.data.setVar(varname, " ".join(new_depends) or None, d) | ||
| 58 | |||
| 59 | #bb.note("%s after: %s" % (varname, bb.data.getVar(varname, d, 1))) | ||
| 60 | |||
| 61 | python package_mapping_rename_hook () { | ||
| 62 | runtime_mapping_rename("RDEPENDS", d) | ||
| 63 | runtime_mapping_rename("RRECOMMENDS", d) | ||
| 64 | runtime_mapping_rename("RSUGGESTS", d) | ||
| 65 | runtime_mapping_rename("RPROVIDES", d) | ||
| 66 | runtime_mapping_rename("RREPLACES", d) | ||
| 67 | runtime_mapping_rename("RCONFLICTS", d) | ||
| 68 | } | ||
| 69 | |||
| 70 | |||
| 71 | def do_split_packages(d, root, file_regex, output_pattern, description, postinst=None, recursive=False, hook=None, extra_depends=None, aux_files_pattern=None, postrm=None, allow_dirs=False, prepend=False, match_path=False, aux_files_pattern_verbatim=None): | ||
| 72 | import os, os.path, bb | ||
| 73 | |||
| 74 | dvar = bb.data.getVar('D', d, 1) | ||
| 75 | if not dvar: | ||
| 76 | bb.error("D not defined") | ||
| 77 | return | ||
| 78 | |||
| 79 | packages = bb.data.getVar('PACKAGES', d, 1).split() | ||
| 80 | if not packages: | ||
| 81 | # nothing to do | ||
| 82 | return | ||
| 83 | |||
| 84 | if postinst: | ||
| 85 | postinst = '#!/bin/sh\n' + postinst + '\n' | ||
| 86 | if postrm: | ||
| 87 | postrm = '#!/bin/sh\n' + postrm + '\n' | ||
| 88 | if not recursive: | ||
| 89 | objs = os.listdir(dvar + root) | ||
| 90 | else: | ||
| 91 | objs = [] | ||
| 92 | for walkroot, dirs, files in os.walk(dvar + root): | ||
| 93 | for file in files: | ||
| 94 | relpath = os.path.join(walkroot, file).replace(dvar + root + '/', '', 1) | ||
| 95 | if relpath: | ||
| 96 | objs.append(relpath) | ||
| 97 | |||
| 98 | if extra_depends == None: | ||
| 99 | extra_depends = bb.data.getVar('PKG_' + packages[0], d, 1) or packages[0] | ||
| 100 | |||
| 101 | for o in objs: | ||
| 102 | import re, stat | ||
| 103 | if match_path: | ||
| 104 | m = re.match(file_regex, o) | ||
| 105 | else: | ||
| 106 | m = re.match(file_regex, os.path.basename(o)) | ||
| 107 | |||
| 108 | if not m: | ||
| 109 | continue | ||
| 110 | f = os.path.join(dvar + root, o) | ||
| 111 | mode = os.lstat(f).st_mode | ||
| 112 | if not (stat.S_ISREG(mode) or (allow_dirs and stat.S_ISDIR(mode))): | ||
| 113 | continue | ||
| 114 | on = legitimize_package_name(m.group(1)) | ||
| 115 | pkg = output_pattern % on | ||
| 116 | if not pkg in packages: | ||
| 117 | if prepend: | ||
| 118 | packages = [pkg] + packages | ||
| 119 | else: | ||
| 120 | packages.append(pkg) | ||
| 121 | the_files = [os.path.join(root, o)] | ||
| 122 | if aux_files_pattern: | ||
| 123 | if type(aux_files_pattern) is list: | ||
| 124 | for fp in aux_files_pattern: | ||
| 125 | the_files.append(fp % on) | ||
| 126 | else: | ||
| 127 | the_files.append(aux_files_pattern % on) | ||
| 128 | if aux_files_pattern_verbatim: | ||
| 129 | if type(aux_files_pattern_verbatim) is list: | ||
| 130 | for fp in aux_files_pattern_verbatim: | ||
| 131 | the_files.append(fp % m.group(1)) | ||
| 132 | else: | ||
| 133 | the_files.append(aux_files_pattern_verbatim % m.group(1)) | ||
| 134 | bb.data.setVar('FILES_' + pkg, " ".join(the_files), d) | ||
| 135 | if extra_depends != '': | ||
| 136 | the_depends = bb.data.getVar('RDEPENDS_' + pkg, d, 1) | ||
| 137 | if the_depends: | ||
| 138 | the_depends = '%s %s' % (the_depends, extra_depends) | ||
| 139 | else: | ||
| 140 | the_depends = extra_depends | ||
| 141 | bb.data.setVar('RDEPENDS_' + pkg, the_depends, d) | ||
| 142 | bb.data.setVar('DESCRIPTION_' + pkg, description % on, d) | ||
| 143 | if postinst: | ||
| 144 | bb.data.setVar('pkg_postinst_' + pkg, postinst, d) | ||
| 145 | if postrm: | ||
| 146 | bb.data.setVar('pkg_postrm_' + pkg, postrm, d) | ||
| 147 | else: | ||
| 148 | oldfiles = bb.data.getVar('FILES_' + pkg, d, 1) | ||
| 149 | if not oldfiles: | ||
| 150 | bb.fatal("Package '%s' exists but has no files" % pkg) | ||
| 151 | bb.data.setVar('FILES_' + pkg, oldfiles + " " + os.path.join(root, o), d) | ||
| 152 | if callable(hook): | ||
| 153 | hook(f, pkg, file_regex, output_pattern, m.group(1)) | ||
| 154 | |||
| 155 | bb.data.setVar('PACKAGES', ' '.join(packages), d) | ||
| 156 | |||
| 157 | # Function to strip a single file, called from RUNSTRIP below | ||
| 158 | # A working 'file' (one which works on the target architecture) | ||
| 159 | # is necessary for this stuff to work. | ||
| 160 | #PACKAGE_DEPENDS ?= "file-native" | ||
| 161 | #DEPENDS_prepend =+ "${PACKAGE_DEPENDS} " | ||
| 162 | #FIXME: this should be "" when any errors are gone! | ||
| 163 | IGNORE_STRIP_ERRORS ?= "1" | ||
| 164 | |||
| 165 | runstrip() { | ||
| 166 | local ro st | ||
| 167 | st=0 | ||
| 168 | if { file "$1" || { | ||
| 169 | oewarn "file $1: failed (forced strip)" >&2 | ||
| 170 | echo 'not stripped' | ||
| 171 | } | ||
| 172 | } | grep -q 'not stripped' | ||
| 173 | then | ||
| 174 | oenote "${STRIP} $1" | ||
| 175 | ro= | ||
| 176 | test -w "$1" || { | ||
| 177 | ro=1 | ||
| 178 | chmod +w "$1" | ||
| 179 | } | ||
| 180 | '${STRIP}' "$1" | ||
| 181 | st=$? | ||
| 182 | test -n "$ro" && chmod -w "$1" | ||
| 183 | if test $st -ne 0 | ||
| 184 | then | ||
| 185 | oewarn "runstrip: ${STRIP} $1: strip failed" >&2 | ||
| 186 | if [ x${IGNORE_STRIP_ERRORS} == x1 ] | ||
| 187 | then | ||
| 188 | #FIXME: remove this, it's for error detection | ||
| 189 | if file "$1" 2>/dev/null >&2 | ||
| 190 | then | ||
| 191 | (oefatal "${STRIP} $1: command failed" >/dev/tty) | ||
| 192 | else | ||
| 193 | (oefatal "file $1: command failed" >/dev/tty) | ||
| 194 | fi | ||
| 195 | st=0 | ||
| 196 | fi | ||
| 197 | fi | ||
| 198 | else | ||
| 199 | oenote "runstrip: skip $1" | ||
| 200 | fi | ||
| 201 | return $st | ||
| 202 | } | ||
| 203 | |||
| 204 | python populate_packages () { | ||
| 205 | import glob, stat, errno, re | ||
| 206 | |||
| 207 | workdir = bb.data.getVar('WORKDIR', d, 1) | ||
| 208 | if not workdir: | ||
| 209 | bb.error("WORKDIR not defined, unable to package") | ||
| 210 | return | ||
| 211 | |||
| 212 | import os # path manipulations | ||
| 213 | outdir = bb.data.getVar('DEPLOY_DIR', d, 1) | ||
| 214 | if not outdir: | ||
| 215 | bb.error("DEPLOY_DIR not defined, unable to package") | ||
| 216 | return | ||
| 217 | bb.mkdirhier(outdir) | ||
| 218 | |||
| 219 | dvar = bb.data.getVar('D', d, 1) | ||
| 220 | if not dvar: | ||
| 221 | bb.error("D not defined, unable to package") | ||
| 222 | return | ||
| 223 | bb.mkdirhier(dvar) | ||
| 224 | |||
| 225 | packages = bb.data.getVar('PACKAGES', d, 1) | ||
| 226 | if not packages: | ||
| 227 | bb.debug(1, "PACKAGES not defined, nothing to package") | ||
| 228 | return | ||
| 229 | |||
| 230 | pn = bb.data.getVar('PN', d, 1) | ||
| 231 | if not pn: | ||
| 232 | bb.error("PN not defined") | ||
| 233 | return | ||
| 234 | |||
| 235 | os.chdir(dvar) | ||
| 236 | |||
| 237 | def isexec(path): | ||
| 238 | try: | ||
| 239 | s = os.stat(path) | ||
| 240 | except (os.error, AttributeError): | ||
| 241 | return 0 | ||
| 242 | return (s[stat.ST_MODE] & stat.S_IEXEC) | ||
| 243 | |||
| 244 | # Sanity check PACKAGES for duplicates - should be moved to | ||
| 245 | # sanity.bbclass once we have he infrastucture | ||
| 246 | pkgs = [] | ||
| 247 | for pkg in packages.split(): | ||
| 248 | if pkg in pkgs: | ||
| 249 | bb.error("%s is listed in PACKAGES mutliple times. Undefined behaviour will result." % pkg) | ||
| 250 | pkgs += pkg | ||
| 251 | |||
| 252 | for pkg in packages.split(): | ||
| 253 | localdata = bb.data.createCopy(d) | ||
| 254 | root = os.path.join(workdir, "install", pkg) | ||
| 255 | |||
| 256 | os.system('rm -rf %s' % root) | ||
| 257 | |||
| 258 | bb.data.setVar('ROOT', '', localdata) | ||
| 259 | bb.data.setVar('ROOT_%s' % pkg, root, localdata) | ||
| 260 | pkgname = bb.data.getVar('PKG_%s' % pkg, localdata, 1) | ||
| 261 | if not pkgname: | ||
| 262 | pkgname = pkg | ||
| 263 | bb.data.setVar('PKG', pkgname, localdata) | ||
| 264 | |||
| 265 | overrides = bb.data.getVar('OVERRIDES', localdata, 1) | ||
| 266 | if not overrides: | ||
| 267 | raise bb.build.FuncFailed('OVERRIDES not defined') | ||
| 268 | bb.data.setVar('OVERRIDES', overrides+':'+pkg, localdata) | ||
| 269 | |||
| 270 | bb.data.update_data(localdata) | ||
| 271 | |||
| 272 | root = bb.data.getVar('ROOT', localdata, 1) | ||
| 273 | bb.mkdirhier(root) | ||
| 274 | filesvar = bb.data.getVar('FILES', localdata, 1) or "" | ||
| 275 | files = filesvar.split() | ||
| 276 | stripfunc = "" | ||
| 277 | for file in files: | ||
| 278 | if os.path.isabs(file): | ||
| 279 | file = '.' + file | ||
| 280 | if not os.path.islink(file): | ||
| 281 | if os.path.isdir(file): | ||
| 282 | newfiles = [ os.path.join(file,x) for x in os.listdir(file) ] | ||
| 283 | if newfiles: | ||
| 284 | files += newfiles | ||
| 285 | continue | ||
| 286 | globbed = glob.glob(file) | ||
| 287 | if globbed: | ||
| 288 | if [ file ] != globbed: | ||
| 289 | files += globbed | ||
| 290 | continue | ||
| 291 | if (not os.path.islink(file)) and (not os.path.exists(file)): | ||
| 292 | continue | ||
| 293 | fpath = os.path.join(root,file) | ||
| 294 | dpath = os.path.dirname(fpath) | ||
| 295 | bb.mkdirhier(dpath) | ||
| 296 | if (bb.data.getVar('INHIBIT_PACKAGE_STRIP', d, 1) != '1') and not os.path.islink(file) and isexec(file): | ||
| 297 | stripfunc += "\trunstrip %s || st=1\n" % fpath | ||
| 298 | ret = bb.movefile(file,fpath) | ||
| 299 | if ret is None or ret == 0: | ||
| 300 | raise bb.build.FuncFailed("File population failed") | ||
| 301 | if not stripfunc == "": | ||
| 302 | from bb import build | ||
| 303 | # strip | ||
| 304 | bb.data.setVar('RUNSTRIP', '\tlocal st\n\tst=0\n%s\treturn $st' % stripfunc, localdata) | ||
| 305 | bb.data.setVarFlag('RUNSTRIP', 'func', 1, localdata) | ||
| 306 | bb.build.exec_func('RUNSTRIP', localdata) | ||
| 307 | del localdata | ||
| 308 | os.chdir(workdir) | ||
| 309 | |||
| 310 | unshipped = [] | ||
| 311 | for root, dirs, files in os.walk(dvar): | ||
| 312 | for f in files: | ||
| 313 | path = os.path.join(root[len(dvar):], f) | ||
| 314 | unshipped.append(path) | ||
| 315 | |||
| 316 | if unshipped != []: | ||
| 317 | bb.note("the following files were installed but not shipped in any package:") | ||
| 318 | for f in unshipped: | ||
| 319 | bb.note(" " + f) | ||
| 320 | |||
| 321 | bb.build.exec_func("package_name_hook", d) | ||
| 322 | |||
| 323 | for pkg in packages.split(): | ||
| 324 | pkgname = bb.data.getVar('PKG_%s' % pkg, d, 1) | ||
| 325 | if pkgname is None: | ||
| 326 | bb.data.setVar('PKG_%s' % pkg, pkg, d) | ||
| 327 | else: | ||
| 328 | add_package_mapping(pkg, pkgname, d) | ||
| 329 | |||
| 330 | dangling_links = {} | ||
| 331 | pkg_files = {} | ||
| 332 | for pkg in packages.split(): | ||
| 333 | dangling_links[pkg] = [] | ||
| 334 | pkg_files[pkg] = [] | ||
| 335 | inst_root = os.path.join(workdir, "install", pkg) | ||
| 336 | for root, dirs, files in os.walk(inst_root): | ||
| 337 | for f in files: | ||
| 338 | path = os.path.join(root, f) | ||
| 339 | rpath = path[len(inst_root):] | ||
| 340 | pkg_files[pkg].append(rpath) | ||
| 341 | try: | ||
| 342 | s = os.stat(path) | ||
| 343 | except OSError, (err, strerror): | ||
| 344 | if err != errno.ENOENT: | ||
| 345 | raise | ||
| 346 | target = os.readlink(path) | ||
| 347 | if target[0] != '/': | ||
| 348 | target = os.path.join(root[len(inst_root):], target) | ||
| 349 | dangling_links[pkg].append(os.path.normpath(target)) | ||
| 350 | |||
| 351 | for pkg in packages.split(): | ||
| 352 | rdepends = explode_deps(bb.data.getVar('RDEPENDS_' + pkg, d, 1) or bb.data.getVar('RDEPENDS', d, 1) or "") | ||
| 353 | for l in dangling_links[pkg]: | ||
| 354 | found = False | ||
| 355 | bb.debug(1, "%s contains dangling link %s" % (pkg, l)) | ||
| 356 | for p in packages.split(): | ||
| 357 | for f in pkg_files[p]: | ||
| 358 | if f == l: | ||
| 359 | found = True | ||
| 360 | bb.debug(1, "target found in %s" % p) | ||
| 361 | if p == pkg: | ||
| 362 | break | ||
| 363 | dp = bb.data.getVar('PKG_' + p, d, 1) or p | ||
| 364 | if not dp in rdepends: | ||
| 365 | rdepends.append(dp) | ||
| 366 | break | ||
| 367 | if found == False: | ||
| 368 | bb.note("%s contains dangling symlink to %s" % (pkg, l)) | ||
| 369 | bb.data.setVar('RDEPENDS_' + pkg, " " + " ".join(rdepends), d) | ||
| 370 | |||
| 371 | def write_if_exists(f, pkg, var): | ||
| 372 | def encode(str): | ||
| 373 | import codecs | ||
| 374 | c = codecs.getencoder("string_escape") | ||
| 375 | return c(str)[0] | ||
| 376 | |||
| 377 | val = bb.data.getVar('%s_%s' % (var, pkg), d, 1) | ||
| 378 | if val: | ||
| 379 | f.write('%s_%s: %s\n' % (var, pkg, encode(val))) | ||
| 380 | |||
| 381 | data_file = os.path.join(workdir, "install", pn + ".package") | ||
| 382 | f = open(data_file, 'w') | ||
| 383 | f.write("PACKAGES: %s\n" % packages) | ||
| 384 | for pkg in packages.split(): | ||
| 385 | write_if_exists(f, pkg, 'DESCRIPTION') | ||
| 386 | write_if_exists(f, pkg, 'RDEPENDS') | ||
| 387 | write_if_exists(f, pkg, 'RPROVIDES') | ||
| 388 | write_if_exists(f, pkg, 'PKG') | ||
| 389 | write_if_exists(f, pkg, 'ALLOW_EMPTY') | ||
| 390 | write_if_exists(f, pkg, 'FILES') | ||
| 391 | write_if_exists(f, pkg, 'pkg_postinst') | ||
| 392 | write_if_exists(f, pkg, 'pkg_postrm') | ||
| 393 | write_if_exists(f, pkg, 'pkg_preinst') | ||
| 394 | write_if_exists(f, pkg, 'pkg_prerm') | ||
| 395 | f.close() | ||
| 396 | bb.build.exec_func("read_subpackage_metadata", d) | ||
| 397 | } | ||
| 398 | |||
| 399 | ldconfig_postinst_fragment() { | ||
| 400 | if [ x"$D" = "x" ]; then | ||
| 401 | ldconfig | ||
| 402 | fi | ||
| 403 | } | ||
| 404 | |||
| 405 | python package_do_shlibs() { | ||
| 406 | import os, re, os.path | ||
| 407 | |||
| 408 | exclude_shlibs = bb.data.getVar('EXCLUDE_FROM_SHLIBS', d, 0) | ||
| 409 | if exclude_shlibs: | ||
| 410 | bb.note("not generating shlibs") | ||
| 411 | return | ||
| 412 | |||
| 413 | lib_re = re.compile("^lib.*\.so") | ||
| 414 | libdir_re = re.compile(".*/lib$") | ||
| 415 | |||
| 416 | packages = bb.data.getVar('PACKAGES', d, 1) | ||
| 417 | if not packages: | ||
| 418 | bb.debug(1, "no packages to build; not calculating shlibs") | ||
| 419 | return | ||
| 420 | |||
| 421 | workdir = bb.data.getVar('WORKDIR', d, 1) | ||
| 422 | if not workdir: | ||
| 423 | bb.error("WORKDIR not defined") | ||
| 424 | return | ||
| 425 | |||
| 426 | staging = bb.data.getVar('STAGING_DIR', d, 1) | ||
| 427 | if not staging: | ||
| 428 | bb.error("STAGING_DIR not defined") | ||
| 429 | return | ||
| 430 | |||
| 431 | ver = bb.data.getVar('PV', d, 1) | ||
| 432 | if not ver: | ||
| 433 | bb.error("PV not defined") | ||
| 434 | return | ||
| 435 | |||
| 436 | target_sys = bb.data.getVar('TARGET_SYS', d, 1) | ||
| 437 | if not target_sys: | ||
| 438 | bb.error("TARGET_SYS not defined") | ||
| 439 | return | ||
| 440 | |||
| 441 | shlibs_dir = os.path.join(staging, target_sys, "shlibs") | ||
| 442 | old_shlibs_dir = os.path.join(staging, "shlibs") | ||
| 443 | bb.mkdirhier(shlibs_dir) | ||
| 444 | |||
| 445 | needed = {} | ||
| 446 | for pkg in packages.split(): | ||
| 447 | needs_ldconfig = False | ||
| 448 | bb.debug(2, "calculating shlib provides for %s" % pkg) | ||
| 449 | |||
| 450 | pkgname = bb.data.getVar('PKG_%s' % pkg, d, 1) | ||
| 451 | if not pkgname: | ||
| 452 | pkgname = pkg | ||
| 453 | |||
| 454 | needed[pkg] = [] | ||
| 455 | sonames = list() | ||
| 456 | top = os.path.join(workdir, "install", pkg) | ||
| 457 | for root, dirs, files in os.walk(top): | ||
| 458 | for file in files: | ||
| 459 | soname = None | ||
| 460 | path = os.path.join(root, file) | ||
| 461 | if os.access(path, os.X_OK) or lib_re.match(file): | ||
| 462 | cmd = (bb.data.getVar('BUILD_PREFIX', d, 1) or "") + "objdump -p " + path + " 2>/dev/null" | ||
| 463 | fd = os.popen(cmd) | ||
| 464 | lines = fd.readlines() | ||
| 465 | fd.close() | ||
| 466 | for l in lines: | ||
| 467 | m = re.match("\s+NEEDED\s+([^\s]*)", l) | ||
| 468 | if m: | ||
| 469 | needed[pkg].append(m.group(1)) | ||
| 470 | m = re.match("\s+SONAME\s+([^\s]*)", l) | ||
| 471 | if m and not m.group(1) in sonames: | ||
| 472 | sonames.append(m.group(1)) | ||
| 473 | if m and libdir_re.match(root): | ||
| 474 | needs_ldconfig = True | ||
| 475 | shlibs_file = os.path.join(shlibs_dir, pkgname + ".list") | ||
| 476 | if os.path.exists(shlibs_file): | ||
| 477 | os.remove(shlibs_file) | ||
| 478 | shver_file = os.path.join(shlibs_dir, pkgname + ".ver") | ||
| 479 | if os.path.exists(shver_file): | ||
| 480 | os.remove(shver_file) | ||
| 481 | if len(sonames): | ||
| 482 | fd = open(shlibs_file, 'w') | ||
| 483 | for s in sonames: | ||
| 484 | fd.write(s + '\n') | ||
| 485 | fd.close() | ||
| 486 | fd = open(shver_file, 'w') | ||
| 487 | fd.write(ver + '\n') | ||
| 488 | fd.close() | ||
| 489 | if needs_ldconfig: | ||
| 490 | bb.debug(1, 'adding ldconfig call to postinst for %s' % pkg) | ||
| 491 | postinst = bb.data.getVar('pkg_postinst_%s' % pkg, d, 1) or bb.data.getVar('pkg_postinst', d, 1) | ||
| 492 | if not postinst: | ||
| 493 | postinst = '#!/bin/sh\n' | ||
| 494 | postinst += bb.data.getVar('ldconfig_postinst_fragment', d, 1) | ||
| 495 | bb.data.setVar('pkg_postinst_%s' % pkg, postinst, d) | ||
| 496 | |||
| 497 | shlib_provider = {} | ||
| 498 | list_re = re.compile('^(.*)\.list$') | ||
| 499 | for dir in [old_shlibs_dir, shlibs_dir]: | ||
| 500 | if not os.path.exists(dir): | ||
| 501 | continue | ||
| 502 | for file in os.listdir(dir): | ||
| 503 | m = list_re.match(file) | ||
| 504 | if m: | ||
| 505 | dep_pkg = m.group(1) | ||
| 506 | fd = open(os.path.join(dir, file)) | ||
| 507 | lines = fd.readlines() | ||
| 508 | fd.close() | ||
| 509 | ver_file = os.path.join(dir, dep_pkg + '.ver') | ||
| 510 | lib_ver = None | ||
| 511 | if os.path.exists(ver_file): | ||
| 512 | fd = open(ver_file) | ||
| 513 | lib_ver = fd.readline().rstrip() | ||
| 514 | fd.close() | ||
| 515 | for l in lines: | ||
| 516 | shlib_provider[l.rstrip()] = (dep_pkg, lib_ver) | ||
| 517 | |||
| 518 | |||
| 519 | for pkg in packages.split(): | ||
| 520 | bb.debug(2, "calculating shlib requirements for %s" % pkg) | ||
| 521 | |||
| 522 | p_pkg = bb.data.getVar("PKG_%s" % pkg, d, 1) or pkg | ||
| 523 | |||
| 524 | deps = list() | ||
| 525 | for n in needed[pkg]: | ||
| 526 | if n in shlib_provider.keys(): | ||
| 527 | (dep_pkg, ver_needed) = shlib_provider[n] | ||
| 528 | |||
| 529 | if dep_pkg == p_pkg: | ||
| 530 | continue | ||
| 531 | |||
| 532 | if ver_needed: | ||
| 533 | dep = "%s (>= %s)" % (dep_pkg, ver_needed) | ||
| 534 | else: | ||
| 535 | dep = dep_pkg | ||
| 536 | if not dep in deps: | ||
| 537 | deps.append(dep) | ||
| 538 | else: | ||
| 539 | bb.note("Couldn't find shared library provider for %s" % n) | ||
| 540 | |||
| 541 | |||
| 542 | deps_file = os.path.join(workdir, "install", pkg + ".shlibdeps") | ||
| 543 | if os.path.exists(deps_file): | ||
| 544 | os.remove(deps_file) | ||
| 545 | if len(deps): | ||
| 546 | fd = open(deps_file, 'w') | ||
| 547 | for dep in deps: | ||
| 548 | fd.write(dep + '\n') | ||
| 549 | fd.close() | ||
| 550 | } | ||
| 551 | |||
| 552 | python package_do_pkgconfig () { | ||
| 553 | import re, os | ||
| 554 | |||
| 555 | packages = bb.data.getVar('PACKAGES', d, 1) | ||
| 556 | if not packages: | ||
| 557 | bb.debug(1, "no packages to build; not calculating pkgconfig dependencies") | ||
| 558 | return | ||
| 559 | |||
| 560 | workdir = bb.data.getVar('WORKDIR', d, 1) | ||
| 561 | if not workdir: | ||
| 562 | bb.error("WORKDIR not defined") | ||
| 563 | return | ||
| 564 | |||
| 565 | staging = bb.data.getVar('STAGING_DIR', d, 1) | ||
| 566 | if not staging: | ||
| 567 | bb.error("STAGING_DIR not defined") | ||
| 568 | return | ||
| 569 | |||
| 570 | target_sys = bb.data.getVar('TARGET_SYS', d, 1) | ||
| 571 | if not target_sys: | ||
| 572 | bb.error("TARGET_SYS not defined") | ||
| 573 | return | ||
| 574 | |||
| 575 | shlibs_dir = os.path.join(staging, target_sys, "shlibs") | ||
| 576 | old_shlibs_dir = os.path.join(staging, "shlibs") | ||
| 577 | bb.mkdirhier(shlibs_dir) | ||
| 578 | |||
| 579 | pc_re = re.compile('(.*)\.pc$') | ||
| 580 | var_re = re.compile('(.*)=(.*)') | ||
| 581 | field_re = re.compile('(.*): (.*)') | ||
| 582 | |||
| 583 | pkgconfig_provided = {} | ||
| 584 | pkgconfig_needed = {} | ||
| 585 | for pkg in packages.split(): | ||
| 586 | pkgconfig_provided[pkg] = [] | ||
| 587 | pkgconfig_needed[pkg] = [] | ||
| 588 | top = os.path.join(workdir, "install", pkg) | ||
| 589 | for root, dirs, files in os.walk(top): | ||
| 590 | for file in files: | ||
| 591 | m = pc_re.match(file) | ||
| 592 | if m: | ||
| 593 | pd = bb.data.init() | ||
| 594 | name = m.group(1) | ||
| 595 | pkgconfig_provided[pkg].append(name) | ||
| 596 | path = os.path.join(root, file) | ||
| 597 | if not os.access(path, os.R_OK): | ||
| 598 | continue | ||
| 599 | f = open(path, 'r') | ||
| 600 | lines = f.readlines() | ||
| 601 | f.close() | ||
| 602 | for l in lines: | ||
| 603 | m = var_re.match(l) | ||
| 604 | if m: | ||
| 605 | name = m.group(1) | ||
| 606 | val = m.group(2) | ||
| 607 | bb.data.setVar(name, bb.data.expand(val, pd), pd) | ||
| 608 | continue | ||
| 609 | m = field_re.match(l) | ||
| 610 | if m: | ||
| 611 | hdr = m.group(1) | ||
| 612 | exp = bb.data.expand(m.group(2), pd) | ||
| 613 | if hdr == 'Requires': | ||
| 614 | pkgconfig_needed[pkg] += exp.replace(',', ' ').split() | ||
| 615 | |||
| 616 | for pkg in packages.split(): | ||
| 617 | ppkg = bb.data.getVar("PKG_" + pkg, d, 1) or pkg | ||
| 618 | pkgs_file = os.path.join(shlibs_dir, ppkg + ".pclist") | ||
| 619 | if os.path.exists(pkgs_file): | ||
| 620 | os.remove(pkgs_file) | ||
| 621 | if pkgconfig_provided[pkg] != []: | ||
| 622 | f = open(pkgs_file, 'w') | ||
| 623 | for p in pkgconfig_provided[pkg]: | ||
| 624 | f.write('%s\n' % p) | ||
| 625 | f.close() | ||
| 626 | |||
| 627 | for dir in [old_shlibs_dir, shlibs_dir]: | ||
| 628 | if not os.path.exists(dir): | ||
| 629 | continue | ||
| 630 | for file in os.listdir(dir): | ||
| 631 | m = re.match('^(.*)\.pclist$', file) | ||
| 632 | if m: | ||
| 633 | pkg = m.group(1) | ||
| 634 | fd = open(os.path.join(dir, file)) | ||
| 635 | lines = fd.readlines() | ||
| 636 | fd.close() | ||
| 637 | pkgconfig_provided[pkg] = [] | ||
| 638 | for l in lines: | ||
| 639 | pkgconfig_provided[pkg].append(l.rstrip()) | ||
| 640 | |||
| 641 | for pkg in packages.split(): | ||
| 642 | deps = [] | ||
| 643 | for n in pkgconfig_needed[pkg]: | ||
| 644 | found = False | ||
| 645 | for k in pkgconfig_provided.keys(): | ||
| 646 | if n in pkgconfig_provided[k]: | ||
| 647 | if k != pkg and not (k in deps): | ||
| 648 | deps.append(k) | ||
| 649 | found = True | ||
| 650 | if found == False: | ||
| 651 | bb.note("couldn't find pkgconfig module '%s' in any package" % n) | ||
| 652 | deps_file = os.path.join(workdir, "install", pkg + ".pcdeps") | ||
| 653 | if os.path.exists(deps_file): | ||
| 654 | os.remove(deps_file) | ||
| 655 | if len(deps): | ||
| 656 | fd = open(deps_file, 'w') | ||
| 657 | for dep in deps: | ||
| 658 | fd.write(dep + '\n') | ||
| 659 | fd.close() | ||
| 660 | } | ||
| 661 | |||
| 662 | python package_do_split_locales() { | ||
| 663 | import os | ||
| 664 | |||
| 665 | if (bb.data.getVar('PACKAGE_NO_LOCALE', d, 1) == '1'): | ||
| 666 | bb.debug(1, "package requested not splitting locales") | ||
| 667 | return | ||
| 668 | |||
| 669 | packages = (bb.data.getVar('PACKAGES', d, 1) or "").split() | ||
| 670 | if not packages: | ||
| 671 | bb.debug(1, "no packages to build; not splitting locales") | ||
| 672 | return | ||
| 673 | |||
| 674 | datadir = bb.data.getVar('datadir', d, 1) | ||
| 675 | if not datadir: | ||
| 676 | bb.note("datadir not defined") | ||
| 677 | return | ||
| 678 | |||
| 679 | dvar = bb.data.getVar('D', d, 1) | ||
| 680 | if not dvar: | ||
| 681 | bb.error("D not defined") | ||
| 682 | return | ||
| 683 | |||
| 684 | pn = bb.data.getVar('PN', d, 1) | ||
| 685 | if not pn: | ||
| 686 | bb.error("PN not defined") | ||
| 687 | return | ||
| 688 | |||
| 689 | if pn + '-locale' in packages: | ||
| 690 | packages.remove(pn + '-locale') | ||
| 691 | |||
| 692 | localedir = os.path.join(dvar + datadir, 'locale') | ||
| 693 | |||
| 694 | if not os.path.isdir(localedir): | ||
| 695 | bb.debug(1, "No locale files in this package") | ||
| 696 | return | ||
| 697 | |||
| 698 | locales = os.listdir(localedir) | ||
| 699 | |||
| 700 | mainpkg = packages[0] | ||
| 701 | |||
| 702 | for l in locales: | ||
| 703 | ln = legitimize_package_name(l) | ||
| 704 | pkg = pn + '-locale-' + ln | ||
| 705 | packages.append(pkg) | ||
| 706 | bb.data.setVar('FILES_' + pkg, os.path.join(datadir, 'locale', l), d) | ||
| 707 | bb.data.setVar('RDEPENDS_' + pkg, '${PKG_%s} virtual-locale-%s' % (mainpkg, ln), d) | ||
| 708 | bb.data.setVar('RPROVIDES_' + pkg, '%s-locale %s-translation' % (pn, ln), d) | ||
| 709 | bb.data.setVar('DESCRIPTION_' + pkg, '%s translation for %s' % (l, pn), d) | ||
| 710 | |||
| 711 | bb.data.setVar('PACKAGES', ' '.join(packages), d) | ||
| 712 | |||
| 713 | rdep = (bb.data.getVar('RDEPENDS_%s' % mainpkg, d, 1) or bb.data.getVar('RDEPENDS', d, 1) or "").split() | ||
| 714 | rdep.append('%s-locale*' % pn) | ||
| 715 | bb.data.setVar('RDEPENDS_%s' % mainpkg, ' '.join(rdep), d) | ||
| 716 | } | ||
| 717 | |||
| 718 | PACKAGEFUNCS = "do_install package_do_split_locales \ | ||
| 719 | populate_packages package_do_shlibs \ | ||
| 720 | package_do_pkgconfig read_shlibdeps" | ||
| 721 | python package_do_package () { | ||
| 722 | for f in (bb.data.getVar('PACKAGEFUNCS', d, 1) or '').split(): | ||
| 723 | bb.build.exec_func(f, d) | ||
| 724 | } | ||
| 725 | |||
| 726 | do_package[dirs] = "${D}" | ||
| 727 | populate_packages[dirs] = "${D}" | ||
| 728 | EXPORT_FUNCTIONS do_package do_shlibs do_split_locales mapping_rename_hook | ||
| 729 | addtask package before do_build after do_populate_staging | ||
