diff options
| author | Richard Purdie <richard.purdie@linuxfoundation.org> | 2025-11-07 13:31:53 +0000 |
|---|---|---|
| committer | Richard Purdie <richard.purdie@linuxfoundation.org> | 2025-11-07 13:31:53 +0000 |
| commit | 8c22ff0d8b70d9b12f0487ef696a7e915b9e3173 (patch) | |
| tree | efdc32587159d0050a69009bdf2330a531727d95 /scripts/lib/recipetool/create.py | |
| parent | d412d2747595c1cc4a5e3ca975e3adc31b2f7891 (diff) | |
| download | poky-8c22ff0d8b70d9b12f0487ef696a7e915b9e3173.tar.gz | |
The poky repository master branch is no longer being updated.
You can either:
a) switch to individual clones of bitbake, openembedded-core, meta-yocto and yocto-docs
b) use the new bitbake-setup
You can find information about either approach in our documentation:
https://docs.yoctoproject.org/
Note that "poky" the distro setting is still available in meta-yocto as
before and we continue to use and maintain that.
Long live Poky!
Some further information on the background of this change can be found
in: https://lists.openembedded.org/g/openembedded-architecture/message/2179
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Diffstat (limited to 'scripts/lib/recipetool/create.py')
| -rw-r--r-- | scripts/lib/recipetool/create.py | 1212 |
1 files changed, 0 insertions, 1212 deletions
diff --git a/scripts/lib/recipetool/create.py b/scripts/lib/recipetool/create.py deleted file mode 100644 index ef0ba974a9..0000000000 --- a/scripts/lib/recipetool/create.py +++ /dev/null | |||
| @@ -1,1212 +0,0 @@ | |||
| 1 | # Recipe creation tool - create command plugin | ||
| 2 | # | ||
| 3 | # Copyright (C) 2014-2017 Intel Corporation | ||
| 4 | # | ||
| 5 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 6 | # | ||
| 7 | |||
| 8 | import sys | ||
| 9 | import os | ||
| 10 | import argparse | ||
| 11 | import glob | ||
| 12 | import fnmatch | ||
| 13 | import re | ||
| 14 | import json | ||
| 15 | import logging | ||
| 16 | import scriptutils | ||
| 17 | from urllib.parse import urlparse, urldefrag, urlsplit | ||
| 18 | import hashlib | ||
| 19 | import bb.fetch2 | ||
| 20 | logger = logging.getLogger('recipetool') | ||
| 21 | from oe.license import tidy_licenses | ||
| 22 | from oe.license_finder import find_licenses | ||
| 23 | |||
| 24 | tinfoil = None | ||
| 25 | plugins = None | ||
| 26 | |||
| 27 | def log_error_cond(message, debugonly): | ||
| 28 | if debugonly: | ||
| 29 | logger.debug(message) | ||
| 30 | else: | ||
| 31 | logger.error(message) | ||
| 32 | |||
| 33 | def log_info_cond(message, debugonly): | ||
| 34 | if debugonly: | ||
| 35 | logger.debug(message) | ||
| 36 | else: | ||
| 37 | logger.info(message) | ||
| 38 | |||
| 39 | def plugin_init(pluginlist): | ||
| 40 | # Take a reference to the list so we can use it later | ||
| 41 | global plugins | ||
| 42 | plugins = pluginlist | ||
| 43 | |||
| 44 | def tinfoil_init(instance): | ||
| 45 | global tinfoil | ||
| 46 | tinfoil = instance | ||
| 47 | |||
| 48 | class RecipeHandler(object): | ||
| 49 | recipelibmap = {} | ||
| 50 | recipeheadermap = {} | ||
| 51 | recipecmakefilemap = {} | ||
| 52 | recipebinmap = {} | ||
| 53 | |||
| 54 | def __init__(self): | ||
| 55 | self._devtool = False | ||
| 56 | |||
| 57 | @staticmethod | ||
| 58 | def load_libmap(d): | ||
| 59 | '''Load library->recipe mapping''' | ||
| 60 | import oe.package | ||
| 61 | |||
| 62 | if RecipeHandler.recipelibmap: | ||
| 63 | return | ||
| 64 | # First build up library->package mapping | ||
| 65 | d2 = bb.data.createCopy(d) | ||
| 66 | d2.setVar("WORKDIR_PKGDATA", "${PKGDATA_DIR}") | ||
| 67 | shlib_providers = oe.package.read_shlib_providers(d2) | ||
| 68 | libdir = d.getVar('libdir') | ||
| 69 | base_libdir = d.getVar('base_libdir') | ||
| 70 | libpaths = list(set([base_libdir, libdir])) | ||
| 71 | libname_re = re.compile(r'^lib(.+)\.so.*$') | ||
| 72 | pkglibmap = {} | ||
| 73 | for lib, item in shlib_providers.items(): | ||
| 74 | for path, pkg in item.items(): | ||
| 75 | if path in libpaths: | ||
| 76 | res = libname_re.match(lib) | ||
| 77 | if res: | ||
| 78 | libname = res.group(1) | ||
| 79 | if not libname in pkglibmap: | ||
| 80 | pkglibmap[libname] = pkg[0] | ||
| 81 | else: | ||
| 82 | logger.debug('unable to extract library name from %s' % lib) | ||
| 83 | |||
| 84 | # Now turn it into a library->recipe mapping | ||
| 85 | pkgdata_dir = d.getVar('PKGDATA_DIR') | ||
| 86 | for libname, pkg in pkglibmap.items(): | ||
| 87 | try: | ||
| 88 | with open(os.path.join(pkgdata_dir, 'runtime', pkg)) as f: | ||
| 89 | for line in f: | ||
| 90 | if line.startswith('PN:'): | ||
| 91 | RecipeHandler.recipelibmap[libname] = line.split(':', 1)[-1].strip() | ||
| 92 | break | ||
| 93 | except IOError as ioe: | ||
| 94 | if ioe.errno == 2: | ||
| 95 | logger.warning('unable to find a pkgdata file for package %s' % pkg) | ||
| 96 | else: | ||
| 97 | raise | ||
| 98 | |||
| 99 | # Some overrides - these should be mapped to the virtual | ||
| 100 | RecipeHandler.recipelibmap['GL'] = 'virtual/libgl' | ||
| 101 | RecipeHandler.recipelibmap['EGL'] = 'virtual/egl' | ||
| 102 | RecipeHandler.recipelibmap['GLESv2'] = 'virtual/libgles2' | ||
| 103 | |||
| 104 | @staticmethod | ||
| 105 | def load_devel_filemap(d): | ||
| 106 | '''Build up development file->recipe mapping''' | ||
| 107 | if RecipeHandler.recipeheadermap: | ||
| 108 | return | ||
| 109 | pkgdata_dir = d.getVar('PKGDATA_DIR') | ||
| 110 | includedir = d.getVar('includedir') | ||
| 111 | cmakedir = os.path.join(d.getVar('libdir'), 'cmake') | ||
| 112 | for pkg in glob.glob(os.path.join(pkgdata_dir, 'runtime', '*-dev')): | ||
| 113 | with open(os.path.join(pkgdata_dir, 'runtime', pkg)) as f: | ||
| 114 | pn = None | ||
| 115 | headers = [] | ||
| 116 | cmakefiles = [] | ||
| 117 | for line in f: | ||
| 118 | if line.startswith('PN:'): | ||
| 119 | pn = line.split(':', 1)[-1].strip() | ||
| 120 | elif line.startswith('FILES_INFO:%s:' % pkg): | ||
| 121 | val = line.split(': ', 1)[1].strip() | ||
| 122 | dictval = json.loads(val) | ||
| 123 | for fullpth in sorted(dictval): | ||
| 124 | if fullpth.startswith(includedir) and fullpth.endswith('.h'): | ||
| 125 | headers.append(os.path.relpath(fullpth, includedir)) | ||
| 126 | elif fullpth.startswith(cmakedir) and fullpth.endswith('.cmake'): | ||
| 127 | cmakefiles.append(os.path.relpath(fullpth, cmakedir)) | ||
| 128 | if pn and headers: | ||
| 129 | for header in headers: | ||
| 130 | RecipeHandler.recipeheadermap[header] = pn | ||
| 131 | if pn and cmakefiles: | ||
| 132 | for fn in cmakefiles: | ||
| 133 | RecipeHandler.recipecmakefilemap[fn] = pn | ||
| 134 | |||
| 135 | @staticmethod | ||
| 136 | def load_binmap(d): | ||
| 137 | '''Build up native binary->recipe mapping''' | ||
| 138 | if RecipeHandler.recipebinmap: | ||
| 139 | return | ||
| 140 | sstate_manifests = d.getVar('SSTATE_MANIFESTS') | ||
| 141 | staging_bindir_native = d.getVar('STAGING_BINDIR_NATIVE') | ||
| 142 | build_arch = d.getVar('BUILD_ARCH') | ||
| 143 | fileprefix = 'manifest-%s-' % build_arch | ||
| 144 | for fn in glob.glob(os.path.join(sstate_manifests, '%s*-native.populate_sysroot' % fileprefix)): | ||
| 145 | with open(fn, 'r') as f: | ||
| 146 | pn = os.path.basename(fn).rsplit('.', 1)[0][len(fileprefix):] | ||
| 147 | for line in f: | ||
| 148 | if line.startswith(staging_bindir_native): | ||
| 149 | prog = os.path.basename(line.rstrip()) | ||
| 150 | RecipeHandler.recipebinmap[prog] = pn | ||
| 151 | |||
| 152 | @staticmethod | ||
| 153 | def checkfiles(path, speclist, recursive=False, excludedirs=None): | ||
| 154 | results = [] | ||
| 155 | if recursive: | ||
| 156 | for root, dirs, files in os.walk(path, topdown=True): | ||
| 157 | if excludedirs: | ||
| 158 | dirs[:] = [d for d in dirs if d not in excludedirs] | ||
| 159 | for fn in files: | ||
| 160 | for spec in speclist: | ||
| 161 | if fnmatch.fnmatch(fn, spec): | ||
| 162 | results.append(os.path.join(root, fn)) | ||
| 163 | else: | ||
| 164 | for spec in speclist: | ||
| 165 | results.extend(glob.glob(os.path.join(path, spec))) | ||
| 166 | return results | ||
| 167 | |||
| 168 | @staticmethod | ||
| 169 | def handle_depends(libdeps, pcdeps, deps, outlines, values, d): | ||
| 170 | if pcdeps: | ||
| 171 | recipemap = read_pkgconfig_provides(d) | ||
| 172 | if libdeps: | ||
| 173 | RecipeHandler.load_libmap(d) | ||
| 174 | |||
| 175 | ignorelibs = ['socket'] | ||
| 176 | ignoredeps = ['gcc-runtime', 'glibc', 'uclibc', 'musl', 'tar-native', 'binutils-native', 'coreutils-native'] | ||
| 177 | |||
| 178 | unmappedpc = [] | ||
| 179 | pcdeps = list(set(pcdeps)) | ||
| 180 | for pcdep in pcdeps: | ||
| 181 | if isinstance(pcdep, str): | ||
| 182 | recipe = recipemap.get(pcdep, None) | ||
| 183 | if recipe: | ||
| 184 | deps.append(recipe) | ||
| 185 | else: | ||
| 186 | if not pcdep.startswith('$'): | ||
| 187 | unmappedpc.append(pcdep) | ||
| 188 | else: | ||
| 189 | for item in pcdep: | ||
| 190 | recipe = recipemap.get(pcdep, None) | ||
| 191 | if recipe: | ||
| 192 | deps.append(recipe) | ||
| 193 | break | ||
| 194 | else: | ||
| 195 | unmappedpc.append('(%s)' % ' or '.join(pcdep)) | ||
| 196 | |||
| 197 | unmappedlibs = [] | ||
| 198 | for libdep in libdeps: | ||
| 199 | if isinstance(libdep, tuple): | ||
| 200 | lib, header = libdep | ||
| 201 | else: | ||
| 202 | lib = libdep | ||
| 203 | header = None | ||
| 204 | |||
| 205 | if lib in ignorelibs: | ||
| 206 | logger.debug('Ignoring library dependency %s' % lib) | ||
| 207 | continue | ||
| 208 | |||
| 209 | recipe = RecipeHandler.recipelibmap.get(lib, None) | ||
| 210 | if recipe: | ||
| 211 | deps.append(recipe) | ||
| 212 | elif recipe is None: | ||
| 213 | if header: | ||
| 214 | RecipeHandler.load_devel_filemap(d) | ||
| 215 | recipe = RecipeHandler.recipeheadermap.get(header, None) | ||
| 216 | if recipe: | ||
| 217 | deps.append(recipe) | ||
| 218 | elif recipe is None: | ||
| 219 | unmappedlibs.append(lib) | ||
| 220 | else: | ||
| 221 | unmappedlibs.append(lib) | ||
| 222 | |||
| 223 | deps = set(deps).difference(set(ignoredeps)) | ||
| 224 | |||
| 225 | if unmappedpc: | ||
| 226 | outlines.append('# NOTE: unable to map the following pkg-config dependencies: %s' % ' '.join(unmappedpc)) | ||
| 227 | outlines.append('# (this is based on recipes that have previously been built and packaged)') | ||
| 228 | |||
| 229 | if unmappedlibs: | ||
| 230 | outlines.append('# NOTE: the following library dependencies are unknown, ignoring: %s' % ' '.join(list(set(unmappedlibs)))) | ||
| 231 | outlines.append('# (this is based on recipes that have previously been built and packaged)') | ||
| 232 | |||
| 233 | if deps: | ||
| 234 | values['DEPENDS'] = ' '.join(deps) | ||
| 235 | |||
| 236 | @staticmethod | ||
| 237 | def genfunction(outlines, funcname, content, python=False, forcespace=False): | ||
| 238 | if python: | ||
| 239 | prefix = 'python ' | ||
| 240 | else: | ||
| 241 | prefix = '' | ||
| 242 | outlines.append('%s%s () {' % (prefix, funcname)) | ||
| 243 | if python or forcespace: | ||
| 244 | indent = ' ' | ||
| 245 | else: | ||
| 246 | indent = '\t' | ||
| 247 | addnoop = not python | ||
| 248 | for line in content: | ||
| 249 | outlines.append('%s%s' % (indent, line)) | ||
| 250 | if addnoop: | ||
| 251 | strippedline = line.lstrip() | ||
| 252 | if strippedline and not strippedline.startswith('#'): | ||
| 253 | addnoop = False | ||
| 254 | if addnoop: | ||
| 255 | # Without this there'll be a syntax error | ||
| 256 | outlines.append('%s:' % indent) | ||
| 257 | outlines.append('}') | ||
| 258 | outlines.append('') | ||
| 259 | |||
| 260 | def process(self, srctree, classes, lines_before, lines_after, handled, extravalues): | ||
| 261 | return False | ||
| 262 | |||
| 263 | |||
| 264 | def validate_pv(pv): | ||
| 265 | if not pv or '_version' in pv.lower() or pv[0] not in '0123456789': | ||
| 266 | return False | ||
| 267 | return True | ||
| 268 | |||
| 269 | def determine_from_filename(srcfile): | ||
| 270 | """Determine name and version from a filename""" | ||
| 271 | if is_package(srcfile): | ||
| 272 | # Force getting the value from the package metadata | ||
| 273 | return None, None | ||
| 274 | |||
| 275 | if '.tar.' in srcfile: | ||
| 276 | namepart = srcfile.split('.tar.')[0] | ||
| 277 | else: | ||
| 278 | namepart = os.path.splitext(srcfile)[0] | ||
| 279 | namepart = namepart.lower().replace('_', '-') | ||
| 280 | if namepart.endswith('.src'): | ||
| 281 | namepart = namepart[:-4] | ||
| 282 | if namepart.endswith('.orig'): | ||
| 283 | namepart = namepart[:-5] | ||
| 284 | splitval = namepart.split('-') | ||
| 285 | logger.debug('determine_from_filename: split name %s into: %s' % (srcfile, splitval)) | ||
| 286 | |||
| 287 | ver_re = re.compile('^v?[0-9]') | ||
| 288 | |||
| 289 | pv = None | ||
| 290 | pn = None | ||
| 291 | if len(splitval) == 1: | ||
| 292 | # Try to split the version out if there is no separator (or a .) | ||
| 293 | res = re.match('^([^0-9]+)([0-9.]+.*)$', namepart) | ||
| 294 | if res: | ||
| 295 | if len(res.group(1)) > 1 and len(res.group(2)) > 1: | ||
| 296 | pn = res.group(1).rstrip('.') | ||
| 297 | pv = res.group(2) | ||
| 298 | else: | ||
| 299 | pn = namepart | ||
| 300 | else: | ||
| 301 | if splitval[-1] in ['source', 'src']: | ||
| 302 | splitval.pop() | ||
| 303 | if len(splitval) > 2 and re.match('^(alpha|beta|stable|release|rc[0-9]|pre[0-9]|p[0-9]|[0-9]{8})', splitval[-1]) and ver_re.match(splitval[-2]): | ||
| 304 | pv = '-'.join(splitval[-2:]) | ||
| 305 | if pv.endswith('-release'): | ||
| 306 | pv = pv[:-8] | ||
| 307 | splitval = splitval[:-2] | ||
| 308 | elif ver_re.match(splitval[-1]): | ||
| 309 | pv = splitval.pop() | ||
| 310 | pn = '-'.join(splitval) | ||
| 311 | if pv and pv.startswith('v'): | ||
| 312 | pv = pv[1:] | ||
| 313 | logger.debug('determine_from_filename: name = "%s" version = "%s"' % (pn, pv)) | ||
| 314 | return (pn, pv) | ||
| 315 | |||
| 316 | def determine_from_url(srcuri): | ||
| 317 | """Determine name and version from a URL""" | ||
| 318 | pn = None | ||
| 319 | pv = None | ||
| 320 | parseres = urlparse(srcuri.lower().split(';', 1)[0]) | ||
| 321 | if parseres.path: | ||
| 322 | if 'github.com' in parseres.netloc: | ||
| 323 | res = re.search(r'.*/(.*?)/archive/(.*)-final\.(tar|zip)', parseres.path) | ||
| 324 | if res: | ||
| 325 | pn = res.group(1).strip().replace('_', '-') | ||
| 326 | pv = res.group(2).strip().replace('_', '.') | ||
| 327 | else: | ||
| 328 | res = re.search(r'.*/(.*?)/archive/v?(.*)\.(tar|zip)', parseres.path) | ||
| 329 | if res: | ||
| 330 | pn = res.group(1).strip().replace('_', '-') | ||
| 331 | pv = res.group(2).strip().replace('_', '.') | ||
| 332 | elif 'bitbucket.org' in parseres.netloc: | ||
| 333 | res = re.search(r'.*/(.*?)/get/[a-zA-Z_-]*([0-9][0-9a-zA-Z_.]*)\.(tar|zip)', parseres.path) | ||
| 334 | if res: | ||
| 335 | pn = res.group(1).strip().replace('_', '-') | ||
| 336 | pv = res.group(2).strip().replace('_', '.') | ||
| 337 | |||
| 338 | if not pn and not pv: | ||
| 339 | if parseres.scheme not in ['git', 'gitsm', 'svn', 'hg']: | ||
| 340 | srcfile = os.path.basename(parseres.path.rstrip('/')) | ||
| 341 | pn, pv = determine_from_filename(srcfile) | ||
| 342 | elif parseres.scheme in ['git', 'gitsm']: | ||
| 343 | pn = os.path.basename(parseres.path.rstrip('/')).lower().replace('_', '-') | ||
| 344 | if pn.endswith('.git'): | ||
| 345 | pn = pn[:-4] | ||
| 346 | |||
| 347 | logger.debug('Determined from source URL: name = "%s", version = "%s"' % (pn, pv)) | ||
| 348 | return (pn, pv) | ||
| 349 | |||
| 350 | def supports_srcrev(uri): | ||
| 351 | localdata = bb.data.createCopy(tinfoil.config_data) | ||
| 352 | # This is a bit sad, but if you don't have this set there can be some | ||
| 353 | # odd interactions with the urldata cache which lead to errors | ||
| 354 | localdata.setVar('SRCREV', '${AUTOREV}') | ||
| 355 | try: | ||
| 356 | fetcher = bb.fetch2.Fetch([uri], localdata) | ||
| 357 | urldata = fetcher.ud | ||
| 358 | for u in urldata: | ||
| 359 | if urldata[u].method.supports_srcrev(): | ||
| 360 | return True | ||
| 361 | except bb.fetch2.FetchError as e: | ||
| 362 | logger.debug('FetchError in supports_srcrev: %s' % str(e)) | ||
| 363 | # Fall back to basic check | ||
| 364 | if uri.startswith(('git://', 'gitsm://')): | ||
| 365 | return True | ||
| 366 | return False | ||
| 367 | |||
| 368 | def reformat_git_uri(uri): | ||
| 369 | '''Convert any http[s]://....git URI into git://...;protocol=http[s]''' | ||
| 370 | checkuri = uri.split(';', 1)[0] | ||
| 371 | if checkuri.endswith('.git') or '/git/' in checkuri or re.match('https?://git(hub|lab).com/[^/]+/[^/]+/?$', checkuri): | ||
| 372 | # Appends scheme if the scheme is missing | ||
| 373 | if not '://' in uri: | ||
| 374 | uri = 'git://' + uri | ||
| 375 | scheme, host, path, user, pswd, parms = bb.fetch2.decodeurl(uri) | ||
| 376 | # Detection mechanism, this is required due to certain URL are formatter with ":" rather than "/" | ||
| 377 | # which causes decodeurl to fail getting the right host and path | ||
| 378 | if len(host.split(':')) > 1: | ||
| 379 | splitslash = host.split(':') | ||
| 380 | # Port number should not be split from host | ||
| 381 | if not re.match('^[0-9]+$', splitslash[1]): | ||
| 382 | host = splitslash[0] | ||
| 383 | path = '/' + splitslash[1] + path | ||
| 384 | #Algorithm: | ||
| 385 | # if user is defined, append protocol=ssh or if a protocol is defined, then honor the user-defined protocol | ||
| 386 | # if no user & password is defined, check for scheme type and append the protocol with the scheme type | ||
| 387 | # finally if protocols or if the url is well-formed, do nothing and rejoin everything back to normal | ||
| 388 | # Need to repackage the arguments for encodeurl, the format is: (scheme, host, path, user, password, OrderedDict([('key', 'value')])) | ||
| 389 | if user: | ||
| 390 | if not 'protocol' in parms: | ||
| 391 | parms.update({('protocol', 'ssh')}) | ||
| 392 | elif (scheme == "http" or scheme == 'https' or scheme == 'ssh') and not ('protocol' in parms): | ||
| 393 | parms.update({('protocol', scheme)}) | ||
| 394 | # Always append 'git://' | ||
| 395 | fUrl = bb.fetch2.encodeurl(('git', host, path, user, pswd, parms)) | ||
| 396 | return fUrl | ||
| 397 | else: | ||
| 398 | return uri | ||
| 399 | |||
| 400 | def is_package(url): | ||
| 401 | '''Check if a URL points to a package''' | ||
| 402 | checkurl = url.split(';', 1)[0] | ||
| 403 | if checkurl.endswith(('.deb', '.ipk', '.rpm', '.srpm')): | ||
| 404 | return True | ||
| 405 | return False | ||
| 406 | |||
| 407 | def create_recipe(args): | ||
| 408 | import bb.process | ||
| 409 | import tempfile | ||
| 410 | import shutil | ||
| 411 | import oe.recipeutils | ||
| 412 | |||
| 413 | pkgarch = "" | ||
| 414 | if args.machine: | ||
| 415 | pkgarch = "${MACHINE_ARCH}" | ||
| 416 | |||
| 417 | extravalues = {} | ||
| 418 | checksums = {} | ||
| 419 | tempsrc = '' | ||
| 420 | source = args.source | ||
| 421 | srcsubdir = '' | ||
| 422 | srcrev = '${AUTOREV}' | ||
| 423 | srcbranch = '' | ||
| 424 | scheme = '' | ||
| 425 | storeTagName = '' | ||
| 426 | pv_srcpv = False | ||
| 427 | |||
| 428 | handled = [] | ||
| 429 | classes = [] | ||
| 430 | |||
| 431 | # Find all plugins that want to register handlers | ||
| 432 | logger.debug('Loading recipe handlers') | ||
| 433 | raw_handlers = [] | ||
| 434 | for plugin in plugins: | ||
| 435 | if hasattr(plugin, 'register_recipe_handlers'): | ||
| 436 | plugin.register_recipe_handlers(raw_handlers) | ||
| 437 | # Sort handlers by priority | ||
| 438 | handlers = [] | ||
| 439 | for i, handler in enumerate(raw_handlers): | ||
| 440 | if isinstance(handler, tuple): | ||
| 441 | handlers.append((handler[0], handler[1], i)) | ||
| 442 | else: | ||
| 443 | handlers.append((handler, 0, i)) | ||
| 444 | handlers.sort(key=lambda item: (item[1], -item[2]), reverse=True) | ||
| 445 | for handler, priority, _ in handlers: | ||
| 446 | logger.debug('Handler: %s (priority %d)' % (handler.__class__.__name__, priority)) | ||
| 447 | setattr(handler, '_devtool', args.devtool) | ||
| 448 | handlers = [item[0] for item in handlers] | ||
| 449 | |||
| 450 | fetchuri = None | ||
| 451 | for handler in handlers: | ||
| 452 | if hasattr(handler, 'process_url'): | ||
| 453 | ret = handler.process_url(args, classes, handled, extravalues) | ||
| 454 | if 'url' in handled and ret: | ||
| 455 | fetchuri = ret | ||
| 456 | break | ||
| 457 | |||
| 458 | if os.path.isfile(source): | ||
| 459 | source = 'file://%s' % os.path.abspath(source) | ||
| 460 | |||
| 461 | if scriptutils.is_src_url(source): | ||
| 462 | # Warn about github archive URLs | ||
| 463 | if re.match(r'https?://github.com/[^/]+/[^/]+/archive/.+(\.tar\..*|\.zip)$', source): | ||
| 464 | logger.warning('github archive files are not guaranteed to be stable and may be re-generated over time. If the latter occurs, the checksums will likely change and the recipe will fail at do_fetch. It is recommended that you point to an actual commit or tag in the repository instead (using the repository URL in conjunction with the -S/--srcrev option).') | ||
| 465 | # Fetch a URL | ||
| 466 | if not fetchuri: | ||
| 467 | fetchuri = reformat_git_uri(urldefrag(source)[0]) | ||
| 468 | if args.binary: | ||
| 469 | # Assume the archive contains the directory structure verbatim | ||
| 470 | # so we need to extract to a subdirectory | ||
| 471 | fetchuri += ';subdir=${BPN}' | ||
| 472 | srcuri = fetchuri | ||
| 473 | rev_re = re.compile(';rev=([^;]+)') | ||
| 474 | res = rev_re.search(srcuri) | ||
| 475 | if res: | ||
| 476 | if args.srcrev: | ||
| 477 | logger.error('rev= parameter and -S/--srcrev option cannot both be specified - use one or the other') | ||
| 478 | sys.exit(1) | ||
| 479 | if args.autorev: | ||
| 480 | logger.error('rev= parameter and -a/--autorev option cannot both be specified - use one or the other') | ||
| 481 | sys.exit(1) | ||
| 482 | srcrev = res.group(1) | ||
| 483 | srcuri = rev_re.sub('', srcuri) | ||
| 484 | elif args.srcrev: | ||
| 485 | srcrev = args.srcrev | ||
| 486 | |||
| 487 | # Check whether users provides any branch info in fetchuri. | ||
| 488 | # If true, we will skip all branch checking process to honor all user's input. | ||
| 489 | scheme, network, path, user, passwd, params = bb.fetch2.decodeurl(fetchuri) | ||
| 490 | srcbranch = params.get('branch') | ||
| 491 | if args.srcbranch: | ||
| 492 | if srcbranch: | ||
| 493 | logger.error('branch= parameter and -B/--srcbranch option cannot both be specified - use one or the other') | ||
| 494 | sys.exit(1) | ||
| 495 | srcbranch = args.srcbranch | ||
| 496 | params['branch'] = srcbranch | ||
| 497 | nobranch = params.get('nobranch') | ||
| 498 | if nobranch and srcbranch: | ||
| 499 | logger.error('nobranch= cannot be used if you specify a branch') | ||
| 500 | sys.exit(1) | ||
| 501 | tag = params.get('tag') | ||
| 502 | if not srcbranch and not nobranch and srcrev != '${AUTOREV}': | ||
| 503 | # Append nobranch=1 in the following conditions: | ||
| 504 | # 1. User did not set 'branch=' in srcuri, and | ||
| 505 | # 2. User did not set 'nobranch=1' in srcuri, and | ||
| 506 | # 3. Source revision is not '${AUTOREV}' | ||
| 507 | params['nobranch'] = '1' | ||
| 508 | if tag: | ||
| 509 | # Keep a copy of tag and append nobranch=1 then remove tag from URL. | ||
| 510 | # Bitbake fetcher unable to fetch when {AUTOREV} and tag is set at the same time. | ||
| 511 | storeTagName = params['tag'] | ||
| 512 | params['nobranch'] = '1' | ||
| 513 | del params['tag'] | ||
| 514 | # Assume 'master' branch if not set | ||
| 515 | if scheme in ['git', 'gitsm'] and 'branch' not in params and 'nobranch' not in params: | ||
| 516 | params['branch'] = 'master' | ||
| 517 | fetchuri = bb.fetch2.encodeurl((scheme, network, path, user, passwd, params)) | ||
| 518 | |||
| 519 | tmpparent = tinfoil.config_data.getVar('BASE_WORKDIR') | ||
| 520 | bb.utils.mkdirhier(tmpparent) | ||
| 521 | tempsrc = tempfile.mkdtemp(prefix='recipetool-', dir=tmpparent) | ||
| 522 | srctree = os.path.join(tempsrc, 'source') | ||
| 523 | |||
| 524 | try: | ||
| 525 | checksums, ftmpdir = scriptutils.fetch_url(tinfoil, fetchuri, srcrev, srctree, logger, preserve_tmp=args.keep_temp) | ||
| 526 | except scriptutils.FetchUrlFailure as e: | ||
| 527 | logger.error(str(e)) | ||
| 528 | sys.exit(1) | ||
| 529 | |||
| 530 | if ftmpdir and args.keep_temp: | ||
| 531 | logger.info('Fetch temp directory is %s' % ftmpdir) | ||
| 532 | |||
| 533 | dirlist = os.listdir(srctree) | ||
| 534 | logger.debug('Directory listing (excluding filtered out):\n %s' % '\n '.join(dirlist)) | ||
| 535 | if len(dirlist) == 1: | ||
| 536 | singleitem = os.path.join(srctree, dirlist[0]) | ||
| 537 | if os.path.isdir(singleitem): | ||
| 538 | # We unpacked a single directory, so we should use that | ||
| 539 | srcsubdir = dirlist[0] | ||
| 540 | srctree = os.path.join(srctree, srcsubdir) | ||
| 541 | else: | ||
| 542 | check_single_file(dirlist[0], fetchuri) | ||
| 543 | elif len(dirlist) == 0: | ||
| 544 | if '/' in fetchuri: | ||
| 545 | fn = os.path.join(tinfoil.config_data.getVar('DL_DIR'), fetchuri.split('/')[-1]) | ||
| 546 | if os.path.isfile(fn): | ||
| 547 | check_single_file(fn, fetchuri) | ||
| 548 | # If we've got to here then there's no source so we might as well give up | ||
| 549 | logger.error('URL %s resulted in an empty source tree' % fetchuri) | ||
| 550 | sys.exit(1) | ||
| 551 | |||
| 552 | # We need this checking mechanism to improve the recipe created by recipetool and devtool | ||
| 553 | # is able to parse and build by bitbake. | ||
| 554 | # If there is no input for branch name, then check for branch name with SRCREV provided. | ||
| 555 | if not srcbranch and not nobranch and srcrev and (srcrev != '${AUTOREV}') and scheme in ['git', 'gitsm']: | ||
| 556 | try: | ||
| 557 | cmd = 'git branch -r --contains' | ||
| 558 | check_branch, check_branch_err = bb.process.run('%s %s' % (cmd, srcrev), cwd=srctree) | ||
| 559 | except bb.process.ExecutionError as err: | ||
| 560 | logger.error(str(err)) | ||
| 561 | sys.exit(1) | ||
| 562 | get_branch = [x.strip() for x in check_branch.splitlines()] | ||
| 563 | # Remove HEAD reference point and drop remote prefix | ||
| 564 | get_branch = [x.split('/', 1)[1] for x in get_branch if not x.startswith('origin/HEAD')] | ||
| 565 | if 'master' in get_branch: | ||
| 566 | # Even with the case where get_branch has multiple objects, if 'master' is one | ||
| 567 | # of them, we should default take from 'master' | ||
| 568 | srcbranch = 'master' | ||
| 569 | elif len(get_branch) == 1: | ||
| 570 | # If 'master' isn't in get_branch and get_branch contains only ONE object, then store result into 'srcbranch' | ||
| 571 | srcbranch = get_branch[0] | ||
| 572 | else: | ||
| 573 | # If get_branch contains more than one objects, then display error and exit. | ||
| 574 | mbrch = '\n ' + '\n '.join(get_branch) | ||
| 575 | logger.error('Revision %s was found on multiple branches: %s\nPlease provide the correct branch with -B/--srcbranch' % (srcrev, mbrch)) | ||
| 576 | sys.exit(1) | ||
| 577 | |||
| 578 | # Since we might have a value in srcbranch, we need to | ||
| 579 | # recontruct the srcuri to include 'branch' in params. | ||
| 580 | scheme, network, path, user, passwd, params = bb.fetch2.decodeurl(srcuri) | ||
| 581 | if scheme in ['git', 'gitsm']: | ||
| 582 | params['branch'] = srcbranch or 'master' | ||
| 583 | |||
| 584 | if storeTagName and scheme in ['git', 'gitsm']: | ||
| 585 | # Check srcrev using tag and check validity of the tag | ||
| 586 | cmd = ('git rev-parse --verify %s' % (storeTagName)) | ||
| 587 | try: | ||
| 588 | check_tag, check_tag_err = bb.process.run('%s' % cmd, cwd=srctree) | ||
| 589 | srcrev = check_tag.split()[0] | ||
| 590 | except bb.process.ExecutionError as err: | ||
| 591 | logger.error(str(err)) | ||
| 592 | logger.error("Possibly wrong tag name is provided") | ||
| 593 | sys.exit(1) | ||
| 594 | # Drop tag from srcuri as it will have conflicts with SRCREV during recipe parse. | ||
| 595 | del params['tag'] | ||
| 596 | srcuri = bb.fetch2.encodeurl((scheme, network, path, user, passwd, params)) | ||
| 597 | |||
| 598 | if os.path.exists(os.path.join(srctree, '.gitmodules')) and srcuri.startswith('git://'): | ||
| 599 | srcuri = 'gitsm://' + srcuri[6:] | ||
| 600 | logger.info('Fetching submodules...') | ||
| 601 | bb.process.run('git submodule update --init --recursive', cwd=srctree) | ||
| 602 | |||
| 603 | if is_package(fetchuri): | ||
| 604 | localdata = bb.data.createCopy(tinfoil.config_data) | ||
| 605 | pkgfile = bb.fetch2.localpath(fetchuri, localdata) | ||
| 606 | if pkgfile: | ||
| 607 | tmpfdir = tempfile.mkdtemp(prefix='recipetool-') | ||
| 608 | try: | ||
| 609 | if pkgfile.endswith(('.deb', '.ipk')): | ||
| 610 | stdout, _ = bb.process.run('ar x %s' % pkgfile, cwd=tmpfdir) | ||
| 611 | stdout, _ = bb.process.run('tar xf control.tar.gz', cwd=tmpfdir) | ||
| 612 | values = convert_debian(tmpfdir) | ||
| 613 | extravalues.update(values) | ||
| 614 | elif pkgfile.endswith(('.rpm', '.srpm')): | ||
| 615 | stdout, _ = bb.process.run('rpm -qp --xml %s > pkginfo.xml' % pkgfile, cwd=tmpfdir) | ||
| 616 | values = convert_rpm_xml(os.path.join(tmpfdir, 'pkginfo.xml')) | ||
| 617 | extravalues.update(values) | ||
| 618 | finally: | ||
| 619 | shutil.rmtree(tmpfdir) | ||
| 620 | else: | ||
| 621 | # Assume we're pointing to an existing source tree | ||
| 622 | if args.extract_to: | ||
| 623 | logger.error('--extract-to cannot be specified if source is a directory') | ||
| 624 | sys.exit(1) | ||
| 625 | if not os.path.isdir(source): | ||
| 626 | logger.error('Invalid source directory %s' % source) | ||
| 627 | sys.exit(1) | ||
| 628 | srctree = source | ||
| 629 | srcuri = '' | ||
| 630 | if os.path.exists(os.path.join(srctree, '.git')): | ||
| 631 | # Try to get upstream repo location from origin remote | ||
| 632 | try: | ||
| 633 | stdout, _ = bb.process.run('git remote -v', cwd=srctree, shell=True) | ||
| 634 | except bb.process.ExecutionError as e: | ||
| 635 | stdout = None | ||
| 636 | if stdout: | ||
| 637 | for line in stdout.splitlines(): | ||
| 638 | splitline = line.split() | ||
| 639 | if len(splitline) > 1: | ||
| 640 | if splitline[0] == 'origin' and scriptutils.is_src_url(splitline[1]): | ||
| 641 | srcuri = reformat_git_uri(splitline[1]) + ';branch=master' | ||
| 642 | break | ||
| 643 | |||
| 644 | if args.src_subdir: | ||
| 645 | srcsubdir = os.path.join(srcsubdir, args.src_subdir) | ||
| 646 | srctree_use = os.path.abspath(os.path.join(srctree, args.src_subdir)) | ||
| 647 | else: | ||
| 648 | srctree_use = os.path.abspath(srctree) | ||
| 649 | |||
| 650 | if args.outfile and os.path.isdir(args.outfile): | ||
| 651 | outfile = None | ||
| 652 | outdir = args.outfile | ||
| 653 | else: | ||
| 654 | outfile = args.outfile | ||
| 655 | outdir = None | ||
| 656 | if outfile and outfile != '-': | ||
| 657 | if os.path.exists(outfile): | ||
| 658 | logger.error('Output file %s already exists' % outfile) | ||
| 659 | sys.exit(1) | ||
| 660 | |||
| 661 | lines_before = [] | ||
| 662 | lines_after = [] | ||
| 663 | |||
| 664 | lines_before.append('# Recipe created by %s' % os.path.basename(sys.argv[0])) | ||
| 665 | lines_before.append('# This is the basis of a recipe and may need further editing in order to be fully functional.') | ||
| 666 | lines_before.append('# (Feel free to remove these comments when editing.)') | ||
| 667 | # We need a blank line here so that patch_recipe_lines can rewind before the LICENSE comments | ||
| 668 | lines_before.append('') | ||
| 669 | |||
| 670 | # We'll come back and replace this later in handle_license_vars() | ||
| 671 | lines_before.append('##LICENSE_PLACEHOLDER##') | ||
| 672 | |||
| 673 | |||
| 674 | # FIXME This is kind of a hack, we probably ought to be using bitbake to do this | ||
| 675 | pn = None | ||
| 676 | pv = None | ||
| 677 | if outfile: | ||
| 678 | recipefn = os.path.splitext(os.path.basename(outfile))[0] | ||
| 679 | fnsplit = recipefn.split('_') | ||
| 680 | if len(fnsplit) > 1: | ||
| 681 | pn = fnsplit[0] | ||
| 682 | pv = fnsplit[1] | ||
| 683 | else: | ||
| 684 | pn = recipefn | ||
| 685 | |||
| 686 | if args.version: | ||
| 687 | pv = args.version | ||
| 688 | |||
| 689 | if args.name: | ||
| 690 | pn = args.name | ||
| 691 | if args.name.endswith('-native'): | ||
| 692 | if args.also_native: | ||
| 693 | logger.error('--also-native cannot be specified for a recipe named *-native (*-native denotes a recipe that is already only for native) - either remove the -native suffix from the name or drop --also-native') | ||
| 694 | sys.exit(1) | ||
| 695 | classes.append('native') | ||
| 696 | elif args.name.startswith('nativesdk-'): | ||
| 697 | if args.also_native: | ||
| 698 | logger.error('--also-native cannot be specified for a recipe named nativesdk-* (nativesdk-* denotes a recipe that is already only for nativesdk)') | ||
| 699 | sys.exit(1) | ||
| 700 | classes.append('nativesdk') | ||
| 701 | |||
| 702 | if pv and pv not in 'git svn hg'.split(): | ||
| 703 | realpv = pv | ||
| 704 | else: | ||
| 705 | realpv = None | ||
| 706 | |||
| 707 | if not srcuri: | ||
| 708 | lines_before.append('# No information for SRC_URI yet (only an external source tree was specified)') | ||
| 709 | lines_before.append('SRC_URI = "%s"' % srcuri) | ||
| 710 | shown_checksums = ["%ssum" % s for s in bb.fetch2.SHOWN_CHECKSUM_LIST] | ||
| 711 | for key, value in sorted(checksums.items()): | ||
| 712 | if key in shown_checksums: | ||
| 713 | lines_before.append('SRC_URI[%s] = "%s"' % (key, value)) | ||
| 714 | if srcuri and supports_srcrev(srcuri): | ||
| 715 | lines_before.append('') | ||
| 716 | lines_before.append('# Modify these as desired') | ||
| 717 | # Note: we have code to replace realpv further down if it gets set to some other value | ||
| 718 | scheme, _, _, _, _, _ = bb.fetch2.decodeurl(srcuri) | ||
| 719 | if scheme in ['git', 'gitsm']: | ||
| 720 | srcpvprefix = 'git' | ||
| 721 | elif scheme == 'svn': | ||
| 722 | srcpvprefix = 'svnr' | ||
| 723 | else: | ||
| 724 | srcpvprefix = scheme | ||
| 725 | lines_before.append('PV = "%s+%s"' % (realpv or '1.0', srcpvprefix)) | ||
| 726 | pv_srcpv = True | ||
| 727 | if not args.autorev and srcrev == '${AUTOREV}': | ||
| 728 | if os.path.exists(os.path.join(srctree, '.git')): | ||
| 729 | (stdout, _) = bb.process.run('git rev-parse HEAD', cwd=srctree) | ||
| 730 | srcrev = stdout.rstrip() | ||
| 731 | lines_before.append('SRCREV = "%s"' % srcrev) | ||
| 732 | if args.provides: | ||
| 733 | lines_before.append('PROVIDES = "%s"' % args.provides) | ||
| 734 | lines_before.append('') | ||
| 735 | |||
| 736 | if srcsubdir and not args.binary: | ||
| 737 | # (for binary packages we explicitly specify subdir= when fetching to | ||
| 738 | # match the default value of S, so we don't need to set it in that case) | ||
| 739 | lines_before.append('S = "${UNPACKDIR}/%s"' % srcsubdir) | ||
| 740 | lines_before.append('') | ||
| 741 | |||
| 742 | if pkgarch: | ||
| 743 | lines_after.append('PACKAGE_ARCH = "%s"' % pkgarch) | ||
| 744 | lines_after.append('') | ||
| 745 | |||
| 746 | if args.binary: | ||
| 747 | lines_after.append('INSANE_SKIP:${PN} += "already-stripped"') | ||
| 748 | lines_after.append('') | ||
| 749 | |||
| 750 | if args.npm_dev: | ||
| 751 | extravalues['NPM_INSTALL_DEV'] = 1 | ||
| 752 | |||
| 753 | # Apply the handlers | ||
| 754 | if args.binary: | ||
| 755 | classes.append('bin_package') | ||
| 756 | handled.append('buildsystem') | ||
| 757 | |||
| 758 | for handler in handlers: | ||
| 759 | handler.process(srctree_use, classes, lines_before, lines_after, handled, extravalues) | ||
| 760 | |||
| 761 | # native and nativesdk classes are special and must be inherited last | ||
| 762 | # If present, put them at the end of the classes list | ||
| 763 | classes.sort(key=lambda c: c in ("native", "nativesdk")) | ||
| 764 | |||
| 765 | extrafiles = extravalues.pop('extrafiles', {}) | ||
| 766 | extra_pn = extravalues.pop('PN', None) | ||
| 767 | extra_pv = extravalues.pop('PV', None) | ||
| 768 | run_tasks = extravalues.pop('run_tasks', "").split() | ||
| 769 | |||
| 770 | if extra_pv and not realpv: | ||
| 771 | realpv = extra_pv | ||
| 772 | if not validate_pv(realpv): | ||
| 773 | realpv = None | ||
| 774 | else: | ||
| 775 | realpv = realpv.lower().split()[0] | ||
| 776 | if '_' in realpv: | ||
| 777 | realpv = realpv.replace('_', '-') | ||
| 778 | if extra_pn and not pn: | ||
| 779 | pn = extra_pn | ||
| 780 | if pn.startswith('GNU '): | ||
| 781 | pn = pn[4:] | ||
| 782 | if ' ' in pn: | ||
| 783 | # Probably a descriptive identifier rather than a proper name | ||
| 784 | pn = None | ||
| 785 | else: | ||
| 786 | pn = pn.lower() | ||
| 787 | if '_' in pn: | ||
| 788 | pn = pn.replace('_', '-') | ||
| 789 | |||
| 790 | if srcuri and not realpv or not pn: | ||
| 791 | name_pn, name_pv = determine_from_url(srcuri) | ||
| 792 | if name_pn and not pn: | ||
| 793 | pn = name_pn | ||
| 794 | if name_pv and not realpv: | ||
| 795 | realpv = name_pv | ||
| 796 | |||
| 797 | licvalues = handle_license_vars(srctree_use, lines_before, handled, extravalues, tinfoil.config_data) | ||
| 798 | |||
| 799 | if not outfile: | ||
| 800 | if not pn: | ||
| 801 | log_error_cond('Unable to determine short program name from source tree - please specify name with -N/--name or output file name with -o/--outfile', args.devtool) | ||
| 802 | # devtool looks for this specific exit code, so don't change it | ||
| 803 | sys.exit(15) | ||
| 804 | else: | ||
| 805 | if srcuri and srcuri.startswith(('gitsm://', 'git://', 'hg://', 'svn://')): | ||
| 806 | suffix = srcuri.split(':', 1)[0] | ||
| 807 | if suffix == 'gitsm': | ||
| 808 | suffix = 'git' | ||
| 809 | outfile = '%s_%s.bb' % (pn, suffix) | ||
| 810 | elif realpv: | ||
| 811 | outfile = '%s_%s.bb' % (pn, realpv) | ||
| 812 | else: | ||
| 813 | outfile = '%s.bb' % pn | ||
| 814 | if outdir: | ||
| 815 | outfile = os.path.join(outdir, outfile) | ||
| 816 | # We need to check this again | ||
| 817 | if os.path.exists(outfile): | ||
| 818 | logger.error('Output file %s already exists' % outfile) | ||
| 819 | sys.exit(1) | ||
| 820 | |||
| 821 | # Move any extra files the plugins created to a directory next to the recipe | ||
| 822 | if extrafiles: | ||
| 823 | if outfile == '-': | ||
| 824 | extraoutdir = pn | ||
| 825 | else: | ||
| 826 | extraoutdir = os.path.join(os.path.dirname(outfile), pn) | ||
| 827 | bb.utils.mkdirhier(extraoutdir) | ||
| 828 | for destfn, extrafile in extrafiles.items(): | ||
| 829 | fn = destfn.format(pn=pn, pv=realpv) | ||
| 830 | shutil.move(extrafile, os.path.join(extraoutdir, fn)) | ||
| 831 | |||
| 832 | lines = lines_before | ||
| 833 | lines_before = [] | ||
| 834 | skipblank = True | ||
| 835 | for line in lines: | ||
| 836 | if skipblank: | ||
| 837 | skipblank = False | ||
| 838 | if not line: | ||
| 839 | continue | ||
| 840 | if line.startswith('S = '): | ||
| 841 | if realpv and pv not in 'git svn hg'.split(): | ||
| 842 | line = line.replace(realpv, '${PV}') | ||
| 843 | if pn: | ||
| 844 | line = line.replace(pn, '${BPN}') | ||
| 845 | if line == 'S = "${UNPACKDIR}/${BPN}-${PV}"' or 'tmp-recipetool-' in line: | ||
| 846 | skipblank = True | ||
| 847 | continue | ||
| 848 | elif line.startswith('SRC_URI = '): | ||
| 849 | if realpv and not pv_srcpv: | ||
| 850 | line = line.replace(realpv, '${PV}') | ||
| 851 | elif line.startswith('PV = '): | ||
| 852 | if realpv: | ||
| 853 | # Replace the first part of the PV value | ||
| 854 | line = re.sub(r'"[^+]*\+', '"%s+' % realpv, line) | ||
| 855 | lines_before.append(line) | ||
| 856 | |||
| 857 | if args.also_native: | ||
| 858 | lines = lines_after | ||
| 859 | lines_after = [] | ||
| 860 | bbclassextend = None | ||
| 861 | for line in lines: | ||
| 862 | if line.startswith('BBCLASSEXTEND ='): | ||
| 863 | splitval = line.split('"') | ||
| 864 | if len(splitval) > 1: | ||
| 865 | bbclassextend = splitval[1].split() | ||
| 866 | if not 'native' in bbclassextend: | ||
| 867 | bbclassextend.insert(0, 'native') | ||
| 868 | line = 'BBCLASSEXTEND = "%s"' % ' '.join(bbclassextend) | ||
| 869 | lines_after.append(line) | ||
| 870 | if not bbclassextend: | ||
| 871 | lines_after.append('BBCLASSEXTEND = "native"') | ||
| 872 | |||
| 873 | postinst = ("postinst", extravalues.pop('postinst', None)) | ||
| 874 | postrm = ("postrm", extravalues.pop('postrm', None)) | ||
| 875 | preinst = ("preinst", extravalues.pop('preinst', None)) | ||
| 876 | prerm = ("prerm", extravalues.pop('prerm', None)) | ||
| 877 | funcs = [postinst, postrm, preinst, prerm] | ||
| 878 | for func in funcs: | ||
| 879 | if func[1]: | ||
| 880 | RecipeHandler.genfunction(lines_after, 'pkg_%s_${PN}' % func[0], func[1]) | ||
| 881 | |||
| 882 | outlines = [] | ||
| 883 | outlines.extend(lines_before) | ||
| 884 | if classes: | ||
| 885 | if outlines[-1] and not outlines[-1].startswith('#'): | ||
| 886 | outlines.append('') | ||
| 887 | outlines.append('inherit %s' % ' '.join(classes)) | ||
| 888 | outlines.append('') | ||
| 889 | outlines.extend(lines_after) | ||
| 890 | |||
| 891 | outlines = [ line.rstrip('\n') +"\n" for line in outlines] | ||
| 892 | |||
| 893 | if extravalues: | ||
| 894 | _, outlines = oe.recipeutils.patch_recipe_lines(outlines, extravalues, trailing_newline=True) | ||
| 895 | |||
| 896 | if args.extract_to: | ||
| 897 | scriptutils.git_convert_standalone_clone(srctree) | ||
| 898 | if os.path.isdir(args.extract_to): | ||
| 899 | # If the directory exists we'll move the temp dir into it instead of | ||
| 900 | # its contents - of course, we could try to always move its contents | ||
| 901 | # but that is a pain if there are symlinks; the simplest solution is | ||
| 902 | # to just remove it first | ||
| 903 | os.rmdir(args.extract_to) | ||
| 904 | shutil.move(srctree, args.extract_to) | ||
| 905 | if tempsrc == srctree: | ||
| 906 | tempsrc = None | ||
| 907 | log_info_cond('Source extracted to %s' % args.extract_to, args.devtool) | ||
| 908 | |||
| 909 | if outfile == '-': | ||
| 910 | sys.stdout.write(''.join(outlines) + '\n') | ||
| 911 | else: | ||
| 912 | with open(outfile, 'w') as f: | ||
| 913 | lastline = None | ||
| 914 | for line in outlines: | ||
| 915 | if not lastline and not line: | ||
| 916 | # Skip extra blank lines | ||
| 917 | continue | ||
| 918 | f.write('%s' % line) | ||
| 919 | lastline = line | ||
| 920 | log_info_cond('Recipe %s has been created; further editing may be required to make it fully functional' % outfile, args.devtool) | ||
| 921 | tinfoil.modified_files() | ||
| 922 | |||
| 923 | for task in run_tasks: | ||
| 924 | logger.info("Running task %s" % task) | ||
| 925 | tinfoil.build_file_sync(outfile, task) | ||
| 926 | |||
| 927 | if tempsrc: | ||
| 928 | if args.keep_temp: | ||
| 929 | logger.info('Preserving temporary directory %s' % tempsrc) | ||
| 930 | else: | ||
| 931 | shutil.rmtree(tempsrc) | ||
| 932 | |||
| 933 | return 0 | ||
| 934 | |||
| 935 | def check_single_file(fn, fetchuri): | ||
| 936 | """Determine if a single downloaded file is something we can't handle""" | ||
| 937 | with open(fn, 'r', errors='surrogateescape') as f: | ||
| 938 | if '<html' in f.read(100).lower(): | ||
| 939 | logger.error('Fetching "%s" returned a single HTML page - check the URL is correct and functional' % fetchuri) | ||
| 940 | sys.exit(1) | ||
| 941 | |||
| 942 | def split_value(value): | ||
| 943 | if isinstance(value, str): | ||
| 944 | return value.split() | ||
| 945 | else: | ||
| 946 | return value | ||
| 947 | |||
| 948 | def fixup_license(value): | ||
| 949 | # Ensure licenses with OR starts and ends with brackets | ||
| 950 | if '|' in value: | ||
| 951 | return '(' + value + ')' | ||
| 952 | return value | ||
| 953 | |||
| 954 | def handle_license_vars(srctree, lines_before, handled, extravalues, d): | ||
| 955 | lichandled = [x for x in handled if x[0] == 'license'] | ||
| 956 | if lichandled: | ||
| 957 | # Someone else has already handled the license vars, just return their value | ||
| 958 | return lichandled[0][1] | ||
| 959 | |||
| 960 | licvalues = find_licenses(srctree, d) | ||
| 961 | licenses = [] | ||
| 962 | lic_files_chksum = [] | ||
| 963 | lic_unknown = [] | ||
| 964 | lines = [] | ||
| 965 | if licvalues: | ||
| 966 | for licvalue in licvalues: | ||
| 967 | license = licvalue[0] | ||
| 968 | lics = tidy_licenses(fixup_license(license)) | ||
| 969 | lics = [lic for lic in lics if lic not in licenses] | ||
| 970 | if len(lics): | ||
| 971 | licenses.extend(lics) | ||
| 972 | lic_files_chksum.append('file://%s;md5=%s' % (licvalue[1], licvalue[2])) | ||
| 973 | if license == 'Unknown': | ||
| 974 | lic_unknown.append(licvalue[1]) | ||
| 975 | if lic_unknown: | ||
| 976 | lines.append('#') | ||
| 977 | lines.append('# The following license files were not able to be identified and are') | ||
| 978 | lines.append('# represented as "Unknown" below, you will need to check them yourself:') | ||
| 979 | for licfile in lic_unknown: | ||
| 980 | lines.append('# %s' % licfile) | ||
| 981 | |||
| 982 | extra_license = tidy_licenses(extravalues.pop('LICENSE', '')) | ||
| 983 | if extra_license: | ||
| 984 | if licenses == ['Unknown']: | ||
| 985 | licenses = extra_license | ||
| 986 | else: | ||
| 987 | for item in extra_license: | ||
| 988 | if item not in licenses: | ||
| 989 | licenses.append(item) | ||
| 990 | extra_lic_files_chksum = split_value(extravalues.pop('LIC_FILES_CHKSUM', [])) | ||
| 991 | for item in extra_lic_files_chksum: | ||
| 992 | if item not in lic_files_chksum: | ||
| 993 | lic_files_chksum.append(item) | ||
| 994 | |||
| 995 | if lic_files_chksum: | ||
| 996 | # We are going to set the vars, so prepend the standard disclaimer | ||
| 997 | lines.insert(0, '# WARNING: the following LICENSE and LIC_FILES_CHKSUM values are best guesses - it is') | ||
| 998 | lines.insert(1, '# your responsibility to verify that the values are complete and correct.') | ||
| 999 | else: | ||
| 1000 | # Without LIC_FILES_CHKSUM we set LICENSE = "CLOSED" to allow the | ||
| 1001 | # user to get started easily | ||
| 1002 | lines.append('# Unable to find any files that looked like license statements. Check the accompanying') | ||
| 1003 | lines.append('# documentation and source headers and set LICENSE and LIC_FILES_CHKSUM accordingly.') | ||
| 1004 | lines.append('#') | ||
| 1005 | lines.append('# NOTE: LICENSE is being set to "CLOSED" to allow you to at least start building - if') | ||
| 1006 | lines.append('# this is not accurate with respect to the licensing of the software being built (it') | ||
| 1007 | lines.append('# will not be in most cases) you must specify the correct value before using this') | ||
| 1008 | lines.append('# recipe for anything other than initial testing/development!') | ||
| 1009 | licenses = ['CLOSED'] | ||
| 1010 | |||
| 1011 | if extra_license and sorted(licenses) != sorted(extra_license): | ||
| 1012 | lines.append('# NOTE: Original package / source metadata indicates license is: %s' % ' & '.join(extra_license)) | ||
| 1013 | |||
| 1014 | if len(licenses) > 1: | ||
| 1015 | lines.append('#') | ||
| 1016 | lines.append('# NOTE: multiple licenses have been detected; they have been separated with &') | ||
| 1017 | lines.append('# in the LICENSE value for now since it is a reasonable assumption that all') | ||
| 1018 | lines.append('# of the licenses apply. If instead there is a choice between the multiple') | ||
| 1019 | lines.append('# licenses then you should change the value to separate the licenses with |') | ||
| 1020 | lines.append('# instead of &. If there is any doubt, check the accompanying documentation') | ||
| 1021 | lines.append('# to determine which situation is applicable.') | ||
| 1022 | |||
| 1023 | lines.append('LICENSE = "%s"' % ' & '.join(sorted(licenses, key=str.casefold))) | ||
| 1024 | lines.append('LIC_FILES_CHKSUM = "%s"' % ' \\\n '.join(lic_files_chksum)) | ||
| 1025 | lines.append('') | ||
| 1026 | |||
| 1027 | # Replace the placeholder so we get the values in the right place in the recipe file | ||
| 1028 | try: | ||
| 1029 | pos = lines_before.index('##LICENSE_PLACEHOLDER##') | ||
| 1030 | except ValueError: | ||
| 1031 | pos = -1 | ||
| 1032 | if pos == -1: | ||
| 1033 | lines_before.extend(lines) | ||
| 1034 | else: | ||
| 1035 | lines_before[pos:pos+1] = lines | ||
| 1036 | |||
| 1037 | handled.append(('license', licvalues)) | ||
| 1038 | return licvalues | ||
| 1039 | |||
| 1040 | def split_pkg_licenses(licvalues, packages, outlines, fallback_licenses=None, pn='${PN}'): | ||
| 1041 | """ | ||
| 1042 | Given a list of (license, path, md5sum) as returned by match_licenses(), | ||
| 1043 | a dict of package name to path mappings, write out a set of | ||
| 1044 | package-specific LICENSE values. | ||
| 1045 | """ | ||
| 1046 | pkglicenses = {pn: []} | ||
| 1047 | for license, licpath, _ in licvalues: | ||
| 1048 | license = fixup_license(license) | ||
| 1049 | for pkgname, pkgpath in packages.items(): | ||
| 1050 | if licpath.startswith(pkgpath + '/'): | ||
| 1051 | if pkgname in pkglicenses: | ||
| 1052 | pkglicenses[pkgname].append(license) | ||
| 1053 | else: | ||
| 1054 | pkglicenses[pkgname] = [license] | ||
| 1055 | break | ||
| 1056 | else: | ||
| 1057 | # Accumulate on the main package | ||
| 1058 | pkglicenses[pn].append(license) | ||
| 1059 | outlicenses = {} | ||
| 1060 | for pkgname in packages: | ||
| 1061 | # Assume AND operator between license files | ||
| 1062 | license = ' & '.join(list(set(pkglicenses.get(pkgname, ['Unknown'])))) or 'Unknown' | ||
| 1063 | if license == 'Unknown' and fallback_licenses and pkgname in fallback_licenses: | ||
| 1064 | license = fallback_licenses[pkgname] | ||
| 1065 | licenses = tidy_licenses(license) | ||
| 1066 | license = ' & '.join(licenses) | ||
| 1067 | outlines.append('LICENSE:%s = "%s"' % (pkgname, license)) | ||
| 1068 | outlicenses[pkgname] = licenses | ||
| 1069 | return outlicenses | ||
| 1070 | |||
| 1071 | def generate_common_licenses_chksums(common_licenses, d): | ||
| 1072 | lic_files_chksums = [] | ||
| 1073 | for license in tidy_licenses(common_licenses): | ||
| 1074 | licfile = '${COMMON_LICENSE_DIR}/' + license | ||
| 1075 | md5value = bb.utils.md5_file(d.expand(licfile)) | ||
| 1076 | lic_files_chksums.append('file://%s;md5=%s' % (licfile, md5value)) | ||
| 1077 | return lic_files_chksums | ||
| 1078 | |||
| 1079 | def read_pkgconfig_provides(d): | ||
| 1080 | pkgdatadir = d.getVar('PKGDATA_DIR') | ||
| 1081 | pkgmap = {} | ||
| 1082 | for fn in glob.glob(os.path.join(pkgdatadir, 'shlibs2', '*.pclist')): | ||
| 1083 | with open(fn, 'r') as f: | ||
| 1084 | for line in f: | ||
| 1085 | pkgmap[os.path.basename(line.rstrip())] = os.path.splitext(os.path.basename(fn))[0] | ||
| 1086 | recipemap = {} | ||
| 1087 | for pc, pkg in pkgmap.items(): | ||
| 1088 | pkgdatafile = os.path.join(pkgdatadir, 'runtime', pkg) | ||
| 1089 | if os.path.exists(pkgdatafile): | ||
| 1090 | with open(pkgdatafile, 'r') as f: | ||
| 1091 | for line in f: | ||
| 1092 | if line.startswith('PN: '): | ||
| 1093 | recipemap[pc] = line.split(':', 1)[1].strip() | ||
| 1094 | return recipemap | ||
| 1095 | |||
| 1096 | def convert_debian(debpath): | ||
| 1097 | value_map = {'Package': 'PN', | ||
| 1098 | 'Version': 'PV', | ||
| 1099 | 'Section': 'SECTION', | ||
| 1100 | 'License': 'LICENSE', | ||
| 1101 | 'Homepage': 'HOMEPAGE'} | ||
| 1102 | |||
| 1103 | # FIXME extend this mapping - perhaps use distro_alias.inc? | ||
| 1104 | depmap = {'libz-dev': 'zlib'} | ||
| 1105 | |||
| 1106 | values = {} | ||
| 1107 | depends = [] | ||
| 1108 | with open(os.path.join(debpath, 'control'), 'r', errors='surrogateescape') as f: | ||
| 1109 | indesc = False | ||
| 1110 | for line in f: | ||
| 1111 | if indesc: | ||
| 1112 | if line.startswith(' '): | ||
| 1113 | if line.startswith(' This package contains'): | ||
| 1114 | indesc = False | ||
| 1115 | else: | ||
| 1116 | if 'DESCRIPTION' in values: | ||
| 1117 | values['DESCRIPTION'] += ' ' + line.strip() | ||
| 1118 | else: | ||
| 1119 | values['DESCRIPTION'] = line.strip() | ||
| 1120 | else: | ||
| 1121 | indesc = False | ||
| 1122 | if not indesc: | ||
| 1123 | splitline = line.split(':', 1) | ||
| 1124 | if len(splitline) < 2: | ||
| 1125 | continue | ||
| 1126 | key = splitline[0] | ||
| 1127 | value = splitline[1].strip() | ||
| 1128 | if key == 'Build-Depends': | ||
| 1129 | for dep in value.split(','): | ||
| 1130 | dep = dep.split()[0] | ||
| 1131 | mapped = depmap.get(dep, '') | ||
| 1132 | if mapped: | ||
| 1133 | depends.append(mapped) | ||
| 1134 | elif key == 'Description': | ||
| 1135 | values['SUMMARY'] = value | ||
| 1136 | indesc = True | ||
| 1137 | else: | ||
| 1138 | varname = value_map.get(key, None) | ||
| 1139 | if varname: | ||
| 1140 | values[varname] = value | ||
| 1141 | postinst = os.path.join(debpath, 'postinst') | ||
| 1142 | postrm = os.path.join(debpath, 'postrm') | ||
| 1143 | preinst = os.path.join(debpath, 'preinst') | ||
| 1144 | prerm = os.path.join(debpath, 'prerm') | ||
| 1145 | sfiles = [postinst, postrm, preinst, prerm] | ||
| 1146 | for sfile in sfiles: | ||
| 1147 | if os.path.isfile(sfile): | ||
| 1148 | logger.info("Converting %s file to recipe function..." % | ||
| 1149 | os.path.basename(sfile).upper()) | ||
| 1150 | content = [] | ||
| 1151 | with open(sfile) as f: | ||
| 1152 | for line in f: | ||
| 1153 | if "#!/" in line: | ||
| 1154 | continue | ||
| 1155 | line = line.rstrip("\n") | ||
| 1156 | if line.strip(): | ||
| 1157 | content.append(line) | ||
| 1158 | if content: | ||
| 1159 | values[os.path.basename(f.name)] = content | ||
| 1160 | |||
| 1161 | #if depends: | ||
| 1162 | # values['DEPENDS'] = ' '.join(depends) | ||
| 1163 | |||
| 1164 | return values | ||
| 1165 | |||
| 1166 | def convert_rpm_xml(xmlfile): | ||
| 1167 | '''Converts the output from rpm -qp --xml to a set of variable values''' | ||
| 1168 | import xml.etree.ElementTree as ElementTree | ||
| 1169 | rpmtag_map = {'Name': 'PN', | ||
| 1170 | 'Version': 'PV', | ||
| 1171 | 'Summary': 'SUMMARY', | ||
| 1172 | 'Description': 'DESCRIPTION', | ||
| 1173 | 'License': 'LICENSE', | ||
| 1174 | 'Url': 'HOMEPAGE'} | ||
| 1175 | |||
| 1176 | values = {} | ||
| 1177 | tree = ElementTree.parse(xmlfile) | ||
| 1178 | root = tree.getroot() | ||
| 1179 | for child in root: | ||
| 1180 | if child.tag == 'rpmTag': | ||
| 1181 | name = child.attrib.get('name', None) | ||
| 1182 | if name: | ||
| 1183 | varname = rpmtag_map.get(name, None) | ||
| 1184 | if varname: | ||
| 1185 | values[varname] = child[0].text | ||
| 1186 | return values | ||
| 1187 | |||
| 1188 | |||
| 1189 | def register_commands(subparsers): | ||
| 1190 | parser_create = subparsers.add_parser('create', | ||
| 1191 | help='Create a new recipe', | ||
| 1192 | description='Creates a new recipe from a source tree') | ||
| 1193 | parser_create.add_argument('source', help='Path or URL to source') | ||
| 1194 | parser_create.add_argument('-o', '--outfile', help='Specify filename for recipe to create') | ||
| 1195 | parser_create.add_argument('-p', '--provides', help='Specify an alias for the item provided by the recipe') | ||
| 1196 | parser_create.add_argument('-m', '--machine', help='Make recipe machine-specific as opposed to architecture-specific', action='store_true') | ||
| 1197 | parser_create.add_argument('-x', '--extract-to', metavar='EXTRACTPATH', help='Assuming source is a URL, fetch it and extract it to the directory specified as %(metavar)s') | ||
| 1198 | parser_create.add_argument('-N', '--name', help='Name to use within recipe (PN)') | ||
| 1199 | parser_create.add_argument('-V', '--version', help='Version to use within recipe (PV)') | ||
| 1200 | parser_create.add_argument('-b', '--binary', help='Treat the source tree as something that should be installed verbatim (no compilation, same directory structure)', action='store_true') | ||
| 1201 | parser_create.add_argument('--also-native', help='Also add native variant (i.e. support building recipe for the build host as well as the target machine)', action='store_true') | ||
| 1202 | parser_create.add_argument('--src-subdir', help='Specify subdirectory within source tree to use', metavar='SUBDIR') | ||
| 1203 | group = parser_create.add_mutually_exclusive_group() | ||
| 1204 | group.add_argument('-a', '--autorev', help='When fetching from a git repository, set SRCREV in the recipe to a floating revision instead of fixed', action="store_true") | ||
| 1205 | group.add_argument('-S', '--srcrev', help='Source revision to fetch if fetching from an SCM such as git (default latest)') | ||
| 1206 | parser_create.add_argument('-B', '--srcbranch', help='Branch in source repository if fetching from an SCM such as git (default master)') | ||
| 1207 | parser_create.add_argument('--keep-temp', action="store_true", help='Keep temporary directory (for debugging)') | ||
| 1208 | parser_create.add_argument('--npm-dev', action="store_true", help='For npm, also fetch devDependencies') | ||
| 1209 | parser_create.add_argument('--no-pypi', action="store_true", help='Do not inherit pypi class') | ||
| 1210 | parser_create.add_argument('--devtool', action="store_true", help=argparse.SUPPRESS) | ||
| 1211 | parser_create.add_argument('--mirrors', action="store_true", help='Enable PREMIRRORS and MIRRORS for source tree fetching (disabled by default).') | ||
| 1212 | parser_create.set_defaults(func=create_recipe) | ||
