diff options
Diffstat (limited to 'meta/classes/archiver.bbclass')
| -rw-r--r-- | meta/classes/archiver.bbclass | 631 |
1 files changed, 0 insertions, 631 deletions
diff --git a/meta/classes/archiver.bbclass b/meta/classes/archiver.bbclass deleted file mode 100644 index a95c899a0f..0000000000 --- a/meta/classes/archiver.bbclass +++ /dev/null | |||
| @@ -1,631 +0,0 @@ | |||
| 1 | # | ||
| 2 | # Copyright OpenEmbedded Contributors | ||
| 3 | # | ||
| 4 | # SPDX-License-Identifier: MIT | ||
| 5 | # | ||
| 6 | |||
| 7 | # | ||
| 8 | # This bbclass is used for creating archive for: | ||
| 9 | # 1) original (or unpacked) source: ARCHIVER_MODE[src] = "original" | ||
| 10 | # 2) patched source: ARCHIVER_MODE[src] = "patched" (default) | ||
| 11 | # 3) configured source: ARCHIVER_MODE[src] = "configured" | ||
| 12 | # 4) source mirror: ARCHIVER_MODE[src] = "mirror" | ||
| 13 | # 5) The patches between do_unpack and do_patch: | ||
| 14 | # ARCHIVER_MODE[diff] = "1" | ||
| 15 | # And you can set the one that you'd like to exclude from the diff: | ||
| 16 | # ARCHIVER_MODE[diff-exclude] ?= ".pc autom4te.cache patches" | ||
| 17 | # 6) The environment data, similar to 'bitbake -e recipe': | ||
| 18 | # ARCHIVER_MODE[dumpdata] = "1" | ||
| 19 | # 7) The recipe (.bb and .inc): ARCHIVER_MODE[recipe] = "1" | ||
| 20 | # 8) Whether output the .src.rpm package: | ||
| 21 | # ARCHIVER_MODE[srpm] = "1" | ||
| 22 | # 9) Filter the license, the recipe whose license in | ||
| 23 | # COPYLEFT_LICENSE_INCLUDE will be included, and in | ||
| 24 | # COPYLEFT_LICENSE_EXCLUDE will be excluded. | ||
| 25 | # COPYLEFT_LICENSE_INCLUDE = 'GPL* LGPL*' | ||
| 26 | # COPYLEFT_LICENSE_EXCLUDE = 'CLOSED Proprietary' | ||
| 27 | # 10) The recipe type that will be archived: | ||
| 28 | # COPYLEFT_RECIPE_TYPES = 'target' | ||
| 29 | # 11) The source mirror mode: | ||
| 30 | # ARCHIVER_MODE[mirror] = "split" (default): Sources are split into | ||
| 31 | # per-recipe directories in a similar way to other archiver modes. | ||
| 32 | # Post-processing may be required to produce a single mirror directory. | ||
| 33 | # This does however allow inspection of duplicate sources and more | ||
| 34 | # intelligent handling. | ||
| 35 | # ARCHIVER_MODE[mirror] = "combined": All sources are placed into a single | ||
| 36 | # directory suitable for direct use as a mirror. Duplicate sources are | ||
| 37 | # ignored. | ||
| 38 | # 12) Source mirror exclusions: | ||
| 39 | # ARCHIVER_MIRROR_EXCLUDE is a list of prefixes to exclude from the mirror. | ||
| 40 | # This may be used for sources which you are already publishing yourself | ||
| 41 | # (e.g. if the URI starts with 'https://mysite.com/' and your mirror is | ||
| 42 | # going to be published to the same site). It may also be used to exclude | ||
| 43 | # local files (with the prefix 'file://') if these will be provided as part | ||
| 44 | # of an archive of the layers themselves. | ||
| 45 | # | ||
| 46 | |||
| 47 | # Create archive for all the recipe types | ||
| 48 | COPYLEFT_RECIPE_TYPES ?= 'target native nativesdk cross crosssdk cross-canadian' | ||
| 49 | inherit copyleft_filter | ||
| 50 | |||
| 51 | ARCHIVER_MODE[srpm] ?= "0" | ||
| 52 | ARCHIVER_MODE[src] ?= "patched" | ||
| 53 | ARCHIVER_MODE[diff] ?= "0" | ||
| 54 | ARCHIVER_MODE[diff-exclude] ?= ".pc autom4te.cache patches" | ||
| 55 | ARCHIVER_MODE[dumpdata] ?= "0" | ||
| 56 | ARCHIVER_MODE[recipe] ?= "0" | ||
| 57 | ARCHIVER_MODE[mirror] ?= "split" | ||
| 58 | ARCHIVER_MODE[compression] ?= "xz" | ||
| 59 | |||
| 60 | DEPLOY_DIR_SRC ?= "${DEPLOY_DIR}/sources" | ||
| 61 | ARCHIVER_TOPDIR ?= "${WORKDIR}/archiver-sources" | ||
| 62 | ARCHIVER_ARCH = "${TARGET_SYS}" | ||
| 63 | ARCHIVER_OUTDIR = "${ARCHIVER_TOPDIR}/${ARCHIVER_ARCH}/${PF}/" | ||
| 64 | ARCHIVER_RPMTOPDIR ?= "${WORKDIR}/deploy-sources-rpm" | ||
| 65 | ARCHIVER_RPMOUTDIR = "${ARCHIVER_RPMTOPDIR}/${ARCHIVER_ARCH}/${PF}/" | ||
| 66 | ARCHIVER_WORKDIR = "${WORKDIR}/archiver-work/" | ||
| 67 | |||
| 68 | # When producing a combined mirror directory, allow duplicates for the case | ||
| 69 | # where multiple recipes use the same SRC_URI. | ||
| 70 | ARCHIVER_COMBINED_MIRRORDIR = "${ARCHIVER_TOPDIR}/mirror" | ||
| 71 | SSTATE_ALLOW_OVERLAP_FILES += "${DEPLOY_DIR_SRC}/mirror" | ||
| 72 | |||
| 73 | do_dumpdata[dirs] = "${ARCHIVER_OUTDIR}" | ||
| 74 | do_ar_recipe[dirs] = "${ARCHIVER_OUTDIR}" | ||
| 75 | do_ar_original[dirs] = "${ARCHIVER_OUTDIR} ${ARCHIVER_WORKDIR}" | ||
| 76 | |||
| 77 | # This is a convenience for the shell script to use it | ||
| 78 | |||
| 79 | def include_package(d, pn): | ||
| 80 | |||
| 81 | included, reason = copyleft_should_include(d) | ||
| 82 | if not included: | ||
| 83 | bb.debug(1, 'archiver: %s is excluded: %s' % (pn, reason)) | ||
| 84 | return False | ||
| 85 | |||
| 86 | else: | ||
| 87 | bb.debug(1, 'archiver: %s is included: %s' % (pn, reason)) | ||
| 88 | |||
| 89 | # glibc-locale: do_fetch, do_unpack and do_patch tasks have been deleted, | ||
| 90 | # so avoid archiving source here. | ||
| 91 | if pn.startswith('glibc-locale'): | ||
| 92 | return False | ||
| 93 | |||
| 94 | # We just archive gcc-source for all the gcc related recipes | ||
| 95 | if d.getVar('BPN') in ['gcc', 'libgcc'] \ | ||
| 96 | and not pn.startswith('gcc-source'): | ||
| 97 | bb.debug(1, 'archiver: %s is excluded, covered by gcc-source' % pn) | ||
| 98 | return False | ||
| 99 | |||
| 100 | return True | ||
| 101 | |||
| 102 | python () { | ||
| 103 | pn = d.getVar('PN') | ||
| 104 | assume_provided = (d.getVar("ASSUME_PROVIDED") or "").split() | ||
| 105 | if pn in assume_provided: | ||
| 106 | for p in d.getVar("PROVIDES").split(): | ||
| 107 | if p != pn: | ||
| 108 | pn = p | ||
| 109 | break | ||
| 110 | |||
| 111 | if not include_package(d, pn): | ||
| 112 | return | ||
| 113 | |||
| 114 | # TARGET_SYS in ARCHIVER_ARCH will break the stamp for gcc-source in multiconfig | ||
| 115 | if pn.startswith('gcc-source'): | ||
| 116 | d.setVar('ARCHIVER_ARCH', "allarch") | ||
| 117 | |||
| 118 | def hasTask(task): | ||
| 119 | return bool(d.getVarFlag(task, "task", False)) and not bool(d.getVarFlag(task, "noexec", False)) | ||
| 120 | |||
| 121 | ar_src = d.getVarFlag('ARCHIVER_MODE', 'src') | ||
| 122 | ar_dumpdata = d.getVarFlag('ARCHIVER_MODE', 'dumpdata') | ||
| 123 | ar_recipe = d.getVarFlag('ARCHIVER_MODE', 'recipe') | ||
| 124 | |||
| 125 | if ar_src == "original": | ||
| 126 | d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_original' % pn) | ||
| 127 | # 'patched' and 'configured' invoke do_unpack_and_patch because | ||
| 128 | # do_ar_patched resp. do_ar_configured depend on it, but for 'original' | ||
| 129 | # we have to add it explicitly. | ||
| 130 | if d.getVarFlag('ARCHIVER_MODE', 'diff') == '1': | ||
| 131 | d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_unpack_and_patch' % pn) | ||
| 132 | elif ar_src == "patched": | ||
| 133 | d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_patched' % pn) | ||
| 134 | elif ar_src == "configured": | ||
| 135 | # We can't use "addtask do_ar_configured after do_configure" since it | ||
| 136 | # will cause the deptask of do_populate_sysroot to run no matter what | ||
| 137 | # archives we need, so we add the depends here. | ||
| 138 | |||
| 139 | # There is a corner case with "gcc-source-${PV}" recipes, they don't have | ||
| 140 | # the "do_configure" task, so we need to use "do_preconfigure" | ||
| 141 | if hasTask("do_preconfigure"): | ||
| 142 | d.appendVarFlag('do_ar_configured', 'depends', ' %s:do_preconfigure' % pn) | ||
| 143 | elif hasTask("do_configure"): | ||
| 144 | d.appendVarFlag('do_ar_configured', 'depends', ' %s:do_configure' % pn) | ||
| 145 | d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_configured' % pn) | ||
| 146 | elif ar_src == "mirror": | ||
| 147 | d.appendVarFlag('do_deploy_archives', 'depends', '%s:do_ar_mirror' % pn) | ||
| 148 | |||
| 149 | elif ar_src: | ||
| 150 | bb.fatal("Invalid ARCHIVER_MODE[src]: %s" % ar_src) | ||
| 151 | |||
| 152 | if ar_dumpdata == "1": | ||
| 153 | d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_dumpdata' % pn) | ||
| 154 | |||
| 155 | if ar_recipe == "1": | ||
| 156 | d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_recipe' % pn) | ||
| 157 | |||
| 158 | # Output the SRPM package | ||
| 159 | if d.getVarFlag('ARCHIVER_MODE', 'srpm') == "1" and d.getVar('PACKAGES'): | ||
| 160 | if "package_rpm" not in d.getVar('PACKAGE_CLASSES'): | ||
| 161 | bb.fatal("ARCHIVER_MODE[srpm] needs package_rpm in PACKAGE_CLASSES") | ||
| 162 | |||
| 163 | # Some recipes do not have any packaging tasks | ||
| 164 | if hasTask("do_package_write_rpm"): | ||
| 165 | d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_package_write_rpm' % pn) | ||
| 166 | d.appendVarFlag('do_package_write_rpm', 'dirs', ' ${ARCHIVER_RPMTOPDIR}') | ||
| 167 | d.appendVarFlag('do_package_write_rpm', 'sstate-inputdirs', ' ${ARCHIVER_RPMTOPDIR}') | ||
| 168 | d.appendVarFlag('do_package_write_rpm', 'sstate-outputdirs', ' ${DEPLOY_DIR_SRC}') | ||
| 169 | d.appendVar('PSEUDO_INCLUDE_PATHS', ',${ARCHIVER_TOPDIR}') | ||
| 170 | if ar_dumpdata == "1": | ||
| 171 | d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_dumpdata' % pn) | ||
| 172 | if ar_recipe == "1": | ||
| 173 | d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_recipe' % pn) | ||
| 174 | if ar_src == "original": | ||
| 175 | d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_original' % pn) | ||
| 176 | elif ar_src == "patched": | ||
| 177 | d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_patched' % pn) | ||
| 178 | elif ar_src == "configured": | ||
| 179 | d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_ar_configured' % pn) | ||
| 180 | } | ||
| 181 | |||
| 182 | # Take all the sources for a recipe and put them in WORKDIR/archiver-work/. | ||
| 183 | # Files in SRC_URI are copied directly, anything that's a directory | ||
| 184 | # (e.g. git repositories) is "unpacked" and then put into a tarball. | ||
| 185 | python do_ar_original() { | ||
| 186 | |||
| 187 | import shutil, tempfile | ||
| 188 | |||
| 189 | if d.getVarFlag('ARCHIVER_MODE', 'src') != "original": | ||
| 190 | return | ||
| 191 | |||
| 192 | ar_outdir = d.getVar('ARCHIVER_OUTDIR') | ||
| 193 | bb.note('Archiving the original source...') | ||
| 194 | urls = d.getVar("SRC_URI").split() | ||
| 195 | # destsuffix (git fetcher) and subdir (everything else) are allowed to be | ||
| 196 | # absolute paths (for example, destsuffix=${S}/foobar). | ||
| 197 | # That messes with unpacking inside our tmpdir below, because the fetchers | ||
| 198 | # will then unpack in that directory and completely ignore the tmpdir. | ||
| 199 | # That breaks parallel tasks relying on ${S}, like do_compile. | ||
| 200 | # | ||
| 201 | # To solve this, we remove these parameters from all URLs. | ||
| 202 | # We do this even for relative paths because it makes the content of the | ||
| 203 | # archives more useful (no extra paths that are only used during | ||
| 204 | # compilation). | ||
| 205 | for i, url in enumerate(urls): | ||
| 206 | decoded = bb.fetch2.decodeurl(url) | ||
| 207 | for param in ('destsuffix', 'subdir'): | ||
| 208 | if param in decoded[5]: | ||
| 209 | del decoded[5][param] | ||
| 210 | encoded = bb.fetch2.encodeurl(decoded) | ||
| 211 | urls[i] = encoded | ||
| 212 | |||
| 213 | # Cleanup SRC_URI before call bb.fetch2.Fetch() since now SRC_URI is in the | ||
| 214 | # variable "urls", otherwise there might be errors like: | ||
| 215 | # The SRCREV_FORMAT variable must be set when multiple SCMs are used | ||
| 216 | ld = bb.data.createCopy(d) | ||
| 217 | ld.setVar('SRC_URI', '') | ||
| 218 | fetch = bb.fetch2.Fetch(urls, ld) | ||
| 219 | tarball_suffix = {} | ||
| 220 | for url in fetch.urls: | ||
| 221 | local = fetch.localpath(url).rstrip("/"); | ||
| 222 | if os.path.isfile(local): | ||
| 223 | shutil.copy(local, ar_outdir) | ||
| 224 | elif os.path.isdir(local): | ||
| 225 | tmpdir = tempfile.mkdtemp(dir=d.getVar('ARCHIVER_WORKDIR')) | ||
| 226 | fetch.unpack(tmpdir, (url,)) | ||
| 227 | # To handle recipes with more than one source, we add the "name" | ||
| 228 | # URL parameter as suffix. We treat it as an error when | ||
| 229 | # there's more than one URL without a name, or a name gets reused. | ||
| 230 | # This is an additional safety net, in practice the name has | ||
| 231 | # to be set when using the git fetcher, otherwise SRCREV cannot | ||
| 232 | # be set separately for each URL. | ||
| 233 | params = bb.fetch2.decodeurl(url)[5] | ||
| 234 | type = bb.fetch2.decodeurl(url)[0] | ||
| 235 | location = bb.fetch2.decodeurl(url)[2] | ||
| 236 | name = params.get('name', '') | ||
| 237 | if type.lower() == 'file': | ||
| 238 | name_tmp = location.rstrip("*").rstrip("/") | ||
| 239 | name = os.path.basename(name_tmp) | ||
| 240 | else: | ||
| 241 | if name in tarball_suffix: | ||
| 242 | if not name: | ||
| 243 | bb.fatal("Cannot determine archive names for original source because 'name' URL parameter is unset in more than one URL. Add it to at least one of these: %s %s" % (tarball_suffix[name], url)) | ||
| 244 | else: | ||
| 245 | bb.fatal("Cannot determine archive names for original source because 'name=' URL parameter '%s' is used twice. Make it unique in: %s %s" % (tarball_suffix[name], url)) | ||
| 246 | tarball_suffix[name] = url | ||
| 247 | create_tarball(d, tmpdir + '/.', name, ar_outdir) | ||
| 248 | |||
| 249 | # Emit patch series files for 'original' | ||
| 250 | bb.note('Writing patch series files...') | ||
| 251 | for patch in src_patches(d): | ||
| 252 | _, _, local, _, _, parm = bb.fetch.decodeurl(patch) | ||
| 253 | patchdir = parm.get('patchdir') | ||
| 254 | if patchdir: | ||
| 255 | series = os.path.join(ar_outdir, 'series.subdir.%s' % patchdir.replace('/', '_')) | ||
| 256 | else: | ||
| 257 | series = os.path.join(ar_outdir, 'series') | ||
| 258 | |||
| 259 | with open(series, 'a') as s: | ||
| 260 | s.write('%s -p%s\n' % (os.path.basename(local), parm['striplevel'])) | ||
| 261 | } | ||
| 262 | |||
| 263 | python do_ar_patched() { | ||
| 264 | |||
| 265 | if d.getVarFlag('ARCHIVER_MODE', 'src') != 'patched': | ||
| 266 | return | ||
| 267 | |||
| 268 | # Get the ARCHIVER_OUTDIR before we reset the WORKDIR | ||
| 269 | ar_outdir = d.getVar('ARCHIVER_OUTDIR') | ||
| 270 | if not is_work_shared(d): | ||
| 271 | ar_workdir = d.getVar('ARCHIVER_WORKDIR') | ||
| 272 | d.setVar('WORKDIR', ar_workdir) | ||
| 273 | bb.note('Archiving the patched source...') | ||
| 274 | create_tarball(d, d.getVar('S'), 'patched', ar_outdir) | ||
| 275 | } | ||
| 276 | |||
| 277 | python do_ar_configured() { | ||
| 278 | import shutil | ||
| 279 | |||
| 280 | # Forcibly expand the sysroot paths as we're about to change WORKDIR | ||
| 281 | d.setVar('STAGING_DIR_HOST', d.getVar('STAGING_DIR_HOST')) | ||
| 282 | d.setVar('STAGING_DIR_TARGET', d.getVar('STAGING_DIR_TARGET')) | ||
| 283 | d.setVar('RECIPE_SYSROOT', d.getVar('RECIPE_SYSROOT')) | ||
| 284 | d.setVar('RECIPE_SYSROOT_NATIVE', d.getVar('RECIPE_SYSROOT_NATIVE')) | ||
| 285 | |||
| 286 | ar_outdir = d.getVar('ARCHIVER_OUTDIR') | ||
| 287 | if d.getVarFlag('ARCHIVER_MODE', 'src') == 'configured': | ||
| 288 | bb.note('Archiving the configured source...') | ||
| 289 | pn = d.getVar('PN') | ||
| 290 | # "gcc-source-${PV}" recipes don't have "do_configure" | ||
| 291 | # task, so we need to run "do_preconfigure" instead | ||
| 292 | if pn.startswith("gcc-source-"): | ||
| 293 | d.setVar('WORKDIR', d.getVar('ARCHIVER_WORKDIR')) | ||
| 294 | bb.build.exec_func('do_preconfigure', d) | ||
| 295 | |||
| 296 | # The libtool-native's do_configure will remove the | ||
| 297 | # ${STAGING_DATADIR}/aclocal/libtool.m4, so we can't re-run the | ||
| 298 | # do_configure, we archive the already configured ${S} to | ||
| 299 | # instead of. | ||
| 300 | # The kernel class functions require it to be on work-shared, we | ||
| 301 | # don't unpack, patch, configure again, just archive the already | ||
| 302 | # configured ${S} | ||
| 303 | elif not (pn == 'libtool-native' or is_work_shared(d)): | ||
| 304 | def runTask(task): | ||
| 305 | prefuncs = d.getVarFlag(task, 'prefuncs') or '' | ||
| 306 | for func in prefuncs.split(): | ||
| 307 | if func != "sysroot_cleansstate": | ||
| 308 | bb.build.exec_func(func, d) | ||
| 309 | bb.build.exec_func(task, d) | ||
| 310 | postfuncs = d.getVarFlag(task, 'postfuncs') or '' | ||
| 311 | for func in postfuncs.split(): | ||
| 312 | if func != 'do_qa_configure': | ||
| 313 | bb.build.exec_func(func, d) | ||
| 314 | |||
| 315 | # Change the WORKDIR to make do_configure run in another dir. | ||
| 316 | d.setVar('WORKDIR', d.getVar('ARCHIVER_WORKDIR')) | ||
| 317 | |||
| 318 | preceeds = bb.build.preceedtask('do_configure', False, d) | ||
| 319 | for task in preceeds: | ||
| 320 | if task != 'do_patch' and task != 'do_prepare_recipe_sysroot': | ||
| 321 | runTask(task) | ||
| 322 | runTask('do_configure') | ||
| 323 | |||
| 324 | srcdir = d.getVar('S') | ||
| 325 | builddir = d.getVar('B') | ||
| 326 | if srcdir != builddir: | ||
| 327 | if os.path.exists(builddir): | ||
| 328 | oe.path.copytree(builddir, os.path.join(srcdir, \ | ||
| 329 | 'build.%s.ar_configured' % d.getVar('PF'))) | ||
| 330 | create_tarball(d, srcdir, 'configured', ar_outdir) | ||
| 331 | } | ||
| 332 | |||
| 333 | python do_ar_mirror() { | ||
| 334 | import subprocess | ||
| 335 | |||
| 336 | src_uri = (d.getVar('SRC_URI') or '').split() | ||
| 337 | if len(src_uri) == 0: | ||
| 338 | return | ||
| 339 | |||
| 340 | dl_dir = d.getVar('DL_DIR') | ||
| 341 | mirror_exclusions = (d.getVar('ARCHIVER_MIRROR_EXCLUDE') or '').split() | ||
| 342 | mirror_mode = d.getVarFlag('ARCHIVER_MODE', 'mirror') | ||
| 343 | have_mirror_tarballs = oe.types.boolean(d.getVar('BB_GENERATE_MIRROR_TARBALLS')) | ||
| 344 | |||
| 345 | if mirror_mode == 'combined': | ||
| 346 | destdir = d.getVar('ARCHIVER_COMBINED_MIRRORDIR') | ||
| 347 | elif mirror_mode == 'split': | ||
| 348 | destdir = d.getVar('ARCHIVER_OUTDIR') | ||
| 349 | else: | ||
| 350 | bb.fatal('Invalid ARCHIVER_MODE[mirror]: %s' % (mirror_mode)) | ||
| 351 | |||
| 352 | if not have_mirror_tarballs: | ||
| 353 | bb.fatal('Using `ARCHIVER_MODE[src] = "mirror"` depends on setting `BB_GENERATE_MIRROR_TARBALLS = "1"`') | ||
| 354 | |||
| 355 | def is_excluded(url): | ||
| 356 | for prefix in mirror_exclusions: | ||
| 357 | if url.startswith(prefix): | ||
| 358 | return True | ||
| 359 | return False | ||
| 360 | |||
| 361 | bb.note('Archiving the source as a mirror...') | ||
| 362 | |||
| 363 | bb.utils.mkdirhier(destdir) | ||
| 364 | |||
| 365 | fetcher = bb.fetch2.Fetch(src_uri, d) | ||
| 366 | |||
| 367 | for ud in fetcher.expanded_urldata(): | ||
| 368 | if is_excluded(ud.url): | ||
| 369 | bb.note('Skipping excluded url: %s' % (ud.url)) | ||
| 370 | continue | ||
| 371 | |||
| 372 | bb.note('Archiving url: %s' % (ud.url)) | ||
| 373 | ud.setup_localpath(d) | ||
| 374 | localpath = None | ||
| 375 | |||
| 376 | # Check for mirror tarballs first. We will archive the first mirror | ||
| 377 | # tarball that we find as it's assumed that we just need one. | ||
| 378 | for mirror_fname in ud.mirrortarballs: | ||
| 379 | mirror_path = os.path.join(dl_dir, mirror_fname) | ||
| 380 | if os.path.exists(mirror_path): | ||
| 381 | bb.note('Found mirror tarball: %s' % (mirror_path)) | ||
| 382 | localpath = mirror_path | ||
| 383 | break | ||
| 384 | |||
| 385 | if len(ud.mirrortarballs) and not localpath: | ||
| 386 | bb.warn('Mirror tarballs are listed for a source but none are present. ' \ | ||
| 387 | 'Falling back to original download.\n' \ | ||
| 388 | 'SRC_URI = %s' % (ud.url)) | ||
| 389 | |||
| 390 | # Check original download | ||
| 391 | if not localpath: | ||
| 392 | bb.note('Using original download: %s' % (ud.localpath)) | ||
| 393 | localpath = ud.localpath | ||
| 394 | |||
| 395 | if not localpath or not os.path.exists(localpath): | ||
| 396 | bb.fatal('Original download is missing for a source.\n' \ | ||
| 397 | 'SRC_URI = %s' % (ud.url)) | ||
| 398 | |||
| 399 | # We now have an appropriate localpath | ||
| 400 | bb.note('Copying source mirror') | ||
| 401 | cmd = 'cp -fpPRH %s %s' % (localpath, destdir) | ||
| 402 | subprocess.check_call(cmd, shell=True) | ||
| 403 | } | ||
| 404 | |||
| 405 | def create_tarball(d, srcdir, suffix, ar_outdir): | ||
| 406 | """ | ||
| 407 | create the tarball from srcdir | ||
| 408 | """ | ||
| 409 | import subprocess | ||
| 410 | |||
| 411 | # Make sure we are only creating a single tarball for gcc sources | ||
| 412 | if (d.getVar('SRC_URI') == ""): | ||
| 413 | return | ||
| 414 | |||
| 415 | # For the kernel archive, srcdir may just be a link to the | ||
| 416 | # work-shared location. Use os.path.realpath to make sure | ||
| 417 | # that we archive the actual directory and not just the link. | ||
| 418 | srcdir = os.path.realpath(srcdir) | ||
| 419 | |||
| 420 | compression_method = d.getVarFlag('ARCHIVER_MODE', 'compression') | ||
| 421 | if compression_method == "xz": | ||
| 422 | compression_cmd = "xz %s" % d.getVar('XZ_DEFAULTS') | ||
| 423 | # To keep compatibility with ARCHIVER_MODE[compression] | ||
| 424 | elif compression_method == "gz": | ||
| 425 | compression_cmd = "gzip" | ||
| 426 | elif compression_method == "bz2": | ||
| 427 | compression_cmd = "bzip2" | ||
| 428 | else: | ||
| 429 | bb.fatal("Unsupported compression_method: %s" % compression_method) | ||
| 430 | |||
| 431 | bb.utils.mkdirhier(ar_outdir) | ||
| 432 | if suffix: | ||
| 433 | filename = '%s-%s.tar.%s' % (d.getVar('PF'), suffix, compression_method) | ||
| 434 | else: | ||
| 435 | filename = '%s.tar.%s' % (d.getVar('PF'), compression_method) | ||
| 436 | tarname = os.path.join(ar_outdir, filename) | ||
| 437 | |||
| 438 | bb.note('Creating %s' % tarname) | ||
| 439 | dirname = os.path.dirname(srcdir) | ||
| 440 | basename = os.path.basename(srcdir) | ||
| 441 | exclude = "--exclude=temp --exclude=patches --exclude='.pc'" | ||
| 442 | tar_cmd = "tar %s -cf - %s | %s > %s" % (exclude, basename, compression_cmd, tarname) | ||
| 443 | subprocess.check_call(tar_cmd, cwd=dirname, shell=True) | ||
| 444 | |||
| 445 | # creating .diff.gz between source.orig and source | ||
| 446 | def create_diff_gz(d, src_orig, src, ar_outdir): | ||
| 447 | |||
| 448 | import subprocess | ||
| 449 | |||
| 450 | if not os.path.isdir(src) or not os.path.isdir(src_orig): | ||
| 451 | return | ||
| 452 | |||
| 453 | # The diff --exclude can't exclude the file with path, so we copy | ||
| 454 | # the patched source, and remove the files that we'd like to | ||
| 455 | # exclude. | ||
| 456 | src_patched = src + '.patched' | ||
| 457 | oe.path.copyhardlinktree(src, src_patched) | ||
| 458 | for i in d.getVarFlag('ARCHIVER_MODE', 'diff-exclude').split(): | ||
| 459 | bb.utils.remove(os.path.join(src_orig, i), recurse=True) | ||
| 460 | bb.utils.remove(os.path.join(src_patched, i), recurse=True) | ||
| 461 | |||
| 462 | dirname = os.path.dirname(src) | ||
| 463 | basename = os.path.basename(src) | ||
| 464 | bb.utils.mkdirhier(ar_outdir) | ||
| 465 | cwd = os.getcwd() | ||
| 466 | try: | ||
| 467 | os.chdir(dirname) | ||
| 468 | out_file = os.path.join(ar_outdir, '%s-diff.gz' % d.getVar('PF')) | ||
| 469 | diff_cmd = 'diff -Naur %s.orig %s.patched | gzip -c > %s' % (basename, basename, out_file) | ||
| 470 | subprocess.check_call(diff_cmd, shell=True) | ||
| 471 | bb.utils.remove(src_patched, recurse=True) | ||
| 472 | finally: | ||
| 473 | os.chdir(cwd) | ||
| 474 | |||
| 475 | def is_work_shared(d): | ||
| 476 | sharedworkdir = os.path.join(d.getVar('TMPDIR'), 'work-shared') | ||
| 477 | sourcedir = os.path.realpath(d.getVar('S')) | ||
| 478 | return sourcedir.startswith(sharedworkdir) | ||
| 479 | |||
| 480 | # Run do_unpack and do_patch | ||
| 481 | python do_unpack_and_patch() { | ||
| 482 | if d.getVarFlag('ARCHIVER_MODE', 'src') not in \ | ||
| 483 | [ 'patched', 'configured'] and \ | ||
| 484 | d.getVarFlag('ARCHIVER_MODE', 'diff') != '1': | ||
| 485 | return | ||
| 486 | ar_outdir = d.getVar('ARCHIVER_OUTDIR') | ||
| 487 | ar_workdir = d.getVar('ARCHIVER_WORKDIR') | ||
| 488 | ar_sysroot_native = d.getVar('STAGING_DIR_NATIVE') | ||
| 489 | pn = d.getVar('PN') | ||
| 490 | |||
| 491 | # The kernel class functions require it to be on work-shared, so we don't change WORKDIR | ||
| 492 | if not is_work_shared(d): | ||
| 493 | # Change the WORKDIR to make do_unpack do_patch run in another dir. | ||
| 494 | d.setVar('WORKDIR', ar_workdir) | ||
| 495 | # Restore the original path to recipe's native sysroot (it's relative to WORKDIR). | ||
| 496 | d.setVar('STAGING_DIR_NATIVE', ar_sysroot_native) | ||
| 497 | |||
| 498 | # The changed 'WORKDIR' also caused 'B' changed, create dir 'B' for the | ||
| 499 | # possibly requiring of the following tasks (such as some recipes's | ||
| 500 | # do_patch required 'B' existed). | ||
| 501 | bb.utils.mkdirhier(d.getVar('B')) | ||
| 502 | |||
| 503 | bb.build.exec_func('do_unpack', d) | ||
| 504 | |||
| 505 | # Save the original source for creating the patches | ||
| 506 | if d.getVarFlag('ARCHIVER_MODE', 'diff') == '1': | ||
| 507 | src = d.getVar('S').rstrip('/') | ||
| 508 | src_orig = '%s.orig' % src | ||
| 509 | oe.path.copytree(src, src_orig) | ||
| 510 | |||
| 511 | if bb.data.inherits_class('dos2unix', d): | ||
| 512 | bb.build.exec_func('do_convert_crlf_to_lf', d) | ||
| 513 | |||
| 514 | # Make sure gcc and kernel sources are patched only once | ||
| 515 | if not (d.getVar('SRC_URI') == "" or is_work_shared(d)): | ||
| 516 | bb.build.exec_func('do_patch', d) | ||
| 517 | |||
| 518 | # Create the patches | ||
| 519 | if d.getVarFlag('ARCHIVER_MODE', 'diff') == '1': | ||
| 520 | bb.note('Creating diff gz...') | ||
| 521 | create_diff_gz(d, src_orig, src, ar_outdir) | ||
| 522 | bb.utils.remove(src_orig, recurse=True) | ||
| 523 | } | ||
| 524 | |||
| 525 | # BBINCLUDED is special (excluded from basehash signature | ||
| 526 | # calculation). Using it in a task signature can cause "basehash | ||
| 527 | # changed" errors. | ||
| 528 | # | ||
| 529 | # Depending on BBINCLUDED also causes do_ar_recipe to run again | ||
| 530 | # for unrelated changes, like adding or removing buildhistory.bbclass. | ||
| 531 | # | ||
| 532 | # For these reasons we ignore the dependency completely. The versioning | ||
| 533 | # of the output file ensures that we create it each time the recipe | ||
| 534 | # gets rebuilt, at least as long as a PR server is used. We also rely | ||
| 535 | # on that mechanism to catch changes in the file content, because the | ||
| 536 | # file content is not part of the task signature either. | ||
| 537 | do_ar_recipe[vardepsexclude] += "BBINCLUDED" | ||
| 538 | python do_ar_recipe () { | ||
| 539 | """ | ||
| 540 | archive the recipe, including .bb and .inc. | ||
| 541 | """ | ||
| 542 | import re | ||
| 543 | import shutil | ||
| 544 | |||
| 545 | require_re = re.compile( r"require\s+(.+)" ) | ||
| 546 | include_re = re.compile( r"include\s+(.+)" ) | ||
| 547 | bbfile = d.getVar('FILE') | ||
| 548 | outdir = os.path.join(d.getVar('WORKDIR'), \ | ||
| 549 | '%s-recipe' % d.getVar('PF')) | ||
| 550 | bb.utils.mkdirhier(outdir) | ||
| 551 | shutil.copy(bbfile, outdir) | ||
| 552 | |||
| 553 | pn = d.getVar('PN') | ||
| 554 | bbappend_files = d.getVar('BBINCLUDED').split() | ||
| 555 | # If recipe name is aa, we need to match files like aa.bbappend and aa_1.1.bbappend | ||
| 556 | # Files like aa1.bbappend or aa1_1.1.bbappend must be excluded. | ||
| 557 | bbappend_re = re.compile( r".*/%s_[^/]*\.bbappend$" % re.escape(pn)) | ||
| 558 | bbappend_re1 = re.compile( r".*/%s\.bbappend$" % re.escape(pn)) | ||
| 559 | for file in bbappend_files: | ||
| 560 | if bbappend_re.match(file) or bbappend_re1.match(file): | ||
| 561 | shutil.copy(file, outdir) | ||
| 562 | |||
| 563 | dirname = os.path.dirname(bbfile) | ||
| 564 | bbpath = '%s:%s' % (dirname, d.getVar('BBPATH')) | ||
| 565 | f = open(bbfile, 'r') | ||
| 566 | for line in f.readlines(): | ||
| 567 | incfile = None | ||
| 568 | if require_re.match(line): | ||
| 569 | incfile = require_re.match(line).group(1) | ||
| 570 | elif include_re.match(line): | ||
| 571 | incfile = include_re.match(line).group(1) | ||
| 572 | if incfile: | ||
| 573 | incfile = d.expand(incfile) | ||
| 574 | if incfile: | ||
| 575 | incfile = bb.utils.which(bbpath, incfile) | ||
| 576 | if incfile: | ||
| 577 | shutil.copy(incfile, outdir) | ||
| 578 | |||
| 579 | create_tarball(d, outdir, 'recipe', d.getVar('ARCHIVER_OUTDIR')) | ||
| 580 | bb.utils.remove(outdir, recurse=True) | ||
| 581 | } | ||
| 582 | |||
| 583 | python do_dumpdata () { | ||
| 584 | """ | ||
| 585 | dump environment data to ${PF}-showdata.dump | ||
| 586 | """ | ||
| 587 | |||
| 588 | dumpfile = os.path.join(d.getVar('ARCHIVER_OUTDIR'), \ | ||
| 589 | '%s-showdata.dump' % d.getVar('PF')) | ||
| 590 | bb.note('Dumping metadata into %s' % dumpfile) | ||
| 591 | with open(dumpfile, "w") as f: | ||
| 592 | # emit variables and shell functions | ||
| 593 | bb.data.emit_env(f, d, True) | ||
| 594 | # emit the metadata which isn't valid shell | ||
| 595 | for e in d.keys(): | ||
| 596 | if d.getVarFlag(e, "python", False): | ||
| 597 | f.write("\npython %s () {\n%s}\n" % (e, d.getVar(e, False))) | ||
| 598 | } | ||
| 599 | |||
| 600 | SSTATETASKS += "do_deploy_archives" | ||
| 601 | do_deploy_archives () { | ||
| 602 | bbnote "Deploying source archive files from ${ARCHIVER_TOPDIR} to ${DEPLOY_DIR_SRC}." | ||
| 603 | } | ||
| 604 | python do_deploy_archives_setscene () { | ||
| 605 | sstate_setscene(d) | ||
| 606 | } | ||
| 607 | do_deploy_archives[dirs] = "${ARCHIVER_TOPDIR}" | ||
| 608 | do_deploy_archives[sstate-inputdirs] = "${ARCHIVER_TOPDIR}" | ||
| 609 | do_deploy_archives[sstate-outputdirs] = "${DEPLOY_DIR_SRC}" | ||
| 610 | addtask do_deploy_archives_setscene | ||
| 611 | |||
| 612 | addtask do_ar_original after do_unpack | ||
| 613 | addtask do_unpack_and_patch after do_patch do_preconfigure | ||
| 614 | addtask do_ar_patched after do_unpack_and_patch | ||
| 615 | addtask do_ar_configured after do_unpack_and_patch | ||
| 616 | addtask do_ar_mirror after do_fetch | ||
| 617 | addtask do_dumpdata | ||
| 618 | addtask do_ar_recipe | ||
| 619 | addtask do_deploy_archives | ||
| 620 | do_build[recrdeptask] += "do_deploy_archives" | ||
| 621 | do_rootfs[recrdeptask] += "do_deploy_archives" | ||
| 622 | do_populate_sdk[recrdeptask] += "do_deploy_archives" | ||
| 623 | |||
| 624 | python () { | ||
| 625 | # Add tasks in the correct order, specifically for linux-yocto to avoid race condition. | ||
| 626 | # sstatesig.py:sstate_rundepfilter has special support that excludes this dependency | ||
| 627 | # so that do_kernel_configme does not need to run again when do_unpack_and_patch | ||
| 628 | # gets added or removed (by adding or removing archiver.bbclass). | ||
| 629 | if bb.data.inherits_class('kernel-yocto', d): | ||
| 630 | bb.build.addtask('do_kernel_configme', 'do_configure', 'do_unpack_and_patch', d) | ||
| 631 | } | ||
