diff options
Diffstat (limited to 'scripts/lib/scriptutils.py')
| -rw-r--r-- | scripts/lib/scriptutils.py | 274 |
1 files changed, 0 insertions, 274 deletions
diff --git a/scripts/lib/scriptutils.py b/scripts/lib/scriptutils.py deleted file mode 100644 index 32e749dbb1..0000000000 --- a/scripts/lib/scriptutils.py +++ /dev/null | |||
| @@ -1,274 +0,0 @@ | |||
| 1 | # Script utility functions | ||
| 2 | # | ||
| 3 | # Copyright (C) 2014 Intel Corporation | ||
| 4 | # | ||
| 5 | # SPDX-License-Identifier: GPL-2.0-only | ||
| 6 | # | ||
| 7 | |||
| 8 | import glob | ||
| 9 | import logging | ||
| 10 | import os | ||
| 11 | import random | ||
| 12 | import shlex | ||
| 13 | import shutil | ||
| 14 | import string | ||
| 15 | import subprocess | ||
| 16 | import sys | ||
| 17 | import tempfile | ||
| 18 | import threading | ||
| 19 | import importlib | ||
| 20 | import importlib.machinery | ||
| 21 | import importlib.util | ||
| 22 | |||
| 23 | class KeepAliveStreamHandler(logging.StreamHandler): | ||
| 24 | def __init__(self, keepalive=True, **kwargs): | ||
| 25 | super().__init__(**kwargs) | ||
| 26 | if keepalive is True: | ||
| 27 | keepalive = 5000 # default timeout | ||
| 28 | self._timeout = threading.Condition() | ||
| 29 | self._stop = False | ||
| 30 | |||
| 31 | # background thread waits on condition, if the condition does not | ||
| 32 | # happen emit a keep alive message | ||
| 33 | def thread(): | ||
| 34 | while not self._stop: | ||
| 35 | with self._timeout: | ||
| 36 | if not self._timeout.wait(keepalive): | ||
| 37 | self.emit(logging.LogRecord("keepalive", logging.INFO, | ||
| 38 | None, None, "Keepalive message", None, None)) | ||
| 39 | |||
| 40 | self._thread = threading.Thread(target=thread, daemon=True) | ||
| 41 | self._thread.start() | ||
| 42 | |||
| 43 | def close(self): | ||
| 44 | # mark the thread to stop and notify it | ||
| 45 | self._stop = True | ||
| 46 | with self._timeout: | ||
| 47 | self._timeout.notify() | ||
| 48 | # wait for it to join | ||
| 49 | self._thread.join() | ||
| 50 | super().close() | ||
| 51 | |||
| 52 | def emit(self, record): | ||
| 53 | super().emit(record) | ||
| 54 | # trigger timer reset | ||
| 55 | with self._timeout: | ||
| 56 | self._timeout.notify() | ||
| 57 | |||
| 58 | def logger_create(name, stream=None, keepalive=None): | ||
| 59 | logger = logging.getLogger(name) | ||
| 60 | if keepalive is not None: | ||
| 61 | loggerhandler = KeepAliveStreamHandler(stream=stream, keepalive=keepalive) | ||
| 62 | else: | ||
| 63 | loggerhandler = logging.StreamHandler(stream=stream) | ||
| 64 | loggerhandler.setFormatter(logging.Formatter("%(levelname)s: %(message)s")) | ||
| 65 | logger.addHandler(loggerhandler) | ||
| 66 | logger.setLevel(logging.INFO) | ||
| 67 | return logger | ||
| 68 | |||
| 69 | def logger_setup_color(logger, color='auto'): | ||
| 70 | from bb.msg import BBLogFormatter | ||
| 71 | |||
| 72 | for handler in logger.handlers: | ||
| 73 | if (isinstance(handler, logging.StreamHandler) and | ||
| 74 | isinstance(handler.formatter, BBLogFormatter)): | ||
| 75 | if color == 'always' or (color == 'auto' and handler.stream.isatty()): | ||
| 76 | handler.formatter.enable_color() | ||
| 77 | |||
| 78 | |||
| 79 | def load_plugins(logger, plugins, pluginpath): | ||
| 80 | def load_plugin(name): | ||
| 81 | logger.debug('Loading plugin %s' % name) | ||
| 82 | spec = importlib.machinery.PathFinder.find_spec(name, path=[pluginpath]) | ||
| 83 | if spec: | ||
| 84 | mod = importlib.util.module_from_spec(spec) | ||
| 85 | spec.loader.exec_module(mod) | ||
| 86 | return mod | ||
| 87 | |||
| 88 | def plugin_name(filename): | ||
| 89 | return os.path.splitext(os.path.basename(filename))[0] | ||
| 90 | |||
| 91 | known_plugins = [plugin_name(p.__name__) for p in plugins] | ||
| 92 | logger.debug('Loading plugins from %s...' % pluginpath) | ||
| 93 | for fn in glob.glob(os.path.join(pluginpath, '*.py')): | ||
| 94 | name = plugin_name(fn) | ||
| 95 | if name != '__init__' and name not in known_plugins: | ||
| 96 | plugin = load_plugin(name) | ||
| 97 | if hasattr(plugin, 'plugin_init'): | ||
| 98 | plugin.plugin_init(plugins) | ||
| 99 | plugins.append(plugin) | ||
| 100 | |||
| 101 | |||
| 102 | def git_convert_standalone_clone(repodir): | ||
| 103 | """If specified directory is a git repository, ensure it's a standalone clone""" | ||
| 104 | import bb.process | ||
| 105 | if os.path.exists(os.path.join(repodir, '.git')): | ||
| 106 | alternatesfile = os.path.join(repodir, '.git', 'objects', 'info', 'alternates') | ||
| 107 | if os.path.exists(alternatesfile): | ||
| 108 | # This will have been cloned with -s, so we need to convert it so none | ||
| 109 | # of the contents is shared | ||
| 110 | bb.process.run('git repack -a', cwd=repodir) | ||
| 111 | os.remove(alternatesfile) | ||
| 112 | |||
| 113 | def _get_temp_recipe_dir(d): | ||
| 114 | # This is a little bit hacky but we need to find a place where we can put | ||
| 115 | # the recipe so that bitbake can find it. We're going to delete it at the | ||
| 116 | # end so it doesn't really matter where we put it. | ||
| 117 | bbfiles = d.getVar('BBFILES').split() | ||
| 118 | fetchrecipedir = None | ||
| 119 | for pth in bbfiles: | ||
| 120 | if pth.endswith('.bb'): | ||
| 121 | pthdir = os.path.dirname(pth) | ||
| 122 | if os.access(os.path.dirname(os.path.dirname(pthdir)), os.W_OK): | ||
| 123 | fetchrecipedir = pthdir.replace('*', 'recipetool') | ||
| 124 | if pthdir.endswith('workspace/recipes/*'): | ||
| 125 | # Prefer the workspace | ||
| 126 | break | ||
| 127 | return fetchrecipedir | ||
| 128 | |||
| 129 | class FetchUrlFailure(Exception): | ||
| 130 | def __init__(self, url): | ||
| 131 | self.url = url | ||
| 132 | def __str__(self): | ||
| 133 | return "Failed to fetch URL %s" % self.url | ||
| 134 | |||
| 135 | def fetch_url(tinfoil, srcuri, srcrev, destdir, logger, preserve_tmp=False, mirrors=False): | ||
| 136 | """ | ||
| 137 | Fetch the specified URL using normal do_fetch and do_unpack tasks, i.e. | ||
| 138 | any dependencies that need to be satisfied in order to support the fetch | ||
| 139 | operation will be taken care of | ||
| 140 | """ | ||
| 141 | |||
| 142 | import bb | ||
| 143 | |||
| 144 | checksums = {} | ||
| 145 | fetchrecipepn = None | ||
| 146 | |||
| 147 | # We need to put our temp directory under ${BASE_WORKDIR} otherwise | ||
| 148 | # we may have problems with the recipe-specific sysroot population | ||
| 149 | tmpparent = tinfoil.config_data.getVar('BASE_WORKDIR') | ||
| 150 | bb.utils.mkdirhier(tmpparent) | ||
| 151 | tmpdir = tempfile.mkdtemp(prefix='recipetool-', dir=tmpparent) | ||
| 152 | try: | ||
| 153 | tmpworkdir = os.path.join(tmpdir, 'work') | ||
| 154 | logger.debug('fetch_url: temp dir is %s' % tmpdir) | ||
| 155 | |||
| 156 | fetchrecipedir = _get_temp_recipe_dir(tinfoil.config_data) | ||
| 157 | if not fetchrecipedir: | ||
| 158 | logger.error('Searched BBFILES but unable to find a writeable place to put temporary recipe') | ||
| 159 | sys.exit(1) | ||
| 160 | fetchrecipe = None | ||
| 161 | bb.utils.mkdirhier(fetchrecipedir) | ||
| 162 | try: | ||
| 163 | # Generate a dummy recipe so we can follow more or less normal paths | ||
| 164 | # for do_fetch and do_unpack | ||
| 165 | # I'd use tempfile functions here but underscores can be produced by that and those | ||
| 166 | # aren't allowed in recipe file names except to separate the version | ||
| 167 | rndstring = ''.join(random.choice(string.ascii_lowercase + string.digits) for _ in range(8)) | ||
| 168 | fetchrecipe = os.path.join(fetchrecipedir, 'tmp-recipetool-%s.bb' % rndstring) | ||
| 169 | fetchrecipepn = os.path.splitext(os.path.basename(fetchrecipe))[0] | ||
| 170 | logger.debug('Generating initial recipe %s for fetching' % fetchrecipe) | ||
| 171 | with open(fetchrecipe, 'w') as f: | ||
| 172 | # We don't want to have to specify LIC_FILES_CHKSUM | ||
| 173 | f.write('LICENSE = "CLOSED"\n') | ||
| 174 | # We don't need the cross-compiler | ||
| 175 | f.write('INHIBIT_DEFAULT_DEPS = "1"\n') | ||
| 176 | # We don't have the checksums yet so we can't require them | ||
| 177 | f.write('BB_STRICT_CHECKSUM = "ignore"\n') | ||
| 178 | f.write('SRC_URI = "%s"\n' % srcuri) | ||
| 179 | f.write('SRCREV = "%s"\n' % srcrev) | ||
| 180 | f.write('PV = "0.0+"\n') | ||
| 181 | f.write('WORKDIR = "%s"\n' % tmpworkdir) | ||
| 182 | f.write('UNPACKDIR = "%s"\n' % destdir) | ||
| 183 | |||
| 184 | # Set S out of the way so it doesn't get created under the workdir | ||
| 185 | s_dir = os.path.join(tmpdir, 'emptysrc') | ||
| 186 | bb.utils.mkdirhier(s_dir) | ||
| 187 | f.write('S = "%s"\n' % s_dir) | ||
| 188 | |||
| 189 | if not mirrors: | ||
| 190 | # We do not need PREMIRRORS since we are almost certainly | ||
| 191 | # fetching new source rather than something that has already | ||
| 192 | # been fetched. Hence, we disable them by default. | ||
| 193 | # However, we provide an option for users to enable it. | ||
| 194 | f.write('PREMIRRORS = ""\n') | ||
| 195 | f.write('MIRRORS = ""\n') | ||
| 196 | |||
| 197 | logger.info('Fetching %s...' % srcuri) | ||
| 198 | |||
| 199 | # FIXME this is too noisy at the moment | ||
| 200 | |||
| 201 | # Parse recipes so our new recipe gets picked up | ||
| 202 | tinfoil.parse_recipes() | ||
| 203 | |||
| 204 | def eventhandler(event): | ||
| 205 | if isinstance(event, bb.fetch2.MissingChecksumEvent): | ||
| 206 | checksums.update(event.checksums) | ||
| 207 | return True | ||
| 208 | return False | ||
| 209 | |||
| 210 | # Run the fetch + unpack tasks | ||
| 211 | res = tinfoil.build_targets(fetchrecipepn, | ||
| 212 | 'do_unpack', | ||
| 213 | handle_events=True, | ||
| 214 | extra_events=['bb.fetch2.MissingChecksumEvent'], | ||
| 215 | event_callback=eventhandler) | ||
| 216 | if not res: | ||
| 217 | raise FetchUrlFailure(srcuri) | ||
| 218 | |||
| 219 | # Remove unneeded directories | ||
| 220 | rd = tinfoil.parse_recipe(fetchrecipepn) | ||
| 221 | if rd: | ||
| 222 | pathvars = ['T', 'RECIPE_SYSROOT', 'RECIPE_SYSROOT_NATIVE'] | ||
| 223 | for pathvar in pathvars: | ||
| 224 | path = rd.getVar(pathvar) | ||
| 225 | if os.path.exists(path): | ||
| 226 | shutil.rmtree(path) | ||
| 227 | finally: | ||
| 228 | if fetchrecipe: | ||
| 229 | try: | ||
| 230 | os.remove(fetchrecipe) | ||
| 231 | except FileNotFoundError: | ||
| 232 | pass | ||
| 233 | try: | ||
| 234 | os.rmdir(fetchrecipedir) | ||
| 235 | except OSError as e: | ||
| 236 | import errno | ||
| 237 | if e.errno != errno.ENOTEMPTY: | ||
| 238 | raise | ||
| 239 | |||
| 240 | finally: | ||
| 241 | if not preserve_tmp: | ||
| 242 | shutil.rmtree(tmpdir) | ||
| 243 | tmpdir = None | ||
| 244 | |||
| 245 | return checksums, tmpdir | ||
| 246 | |||
| 247 | |||
| 248 | def run_editor(fn, logger=None): | ||
| 249 | if isinstance(fn, str): | ||
| 250 | files = [fn] | ||
| 251 | else: | ||
| 252 | files = fn | ||
| 253 | |||
| 254 | editor = os.getenv('VISUAL', os.getenv('EDITOR', 'vi')) | ||
| 255 | try: | ||
| 256 | #print(shlex.split(editor) + files) | ||
| 257 | return subprocess.check_call(shlex.split(editor) + files) | ||
| 258 | except subprocess.CalledProcessError as exc: | ||
| 259 | logger.error("Execution of '%s' failed: %s" % (editor, exc)) | ||
| 260 | return 1 | ||
| 261 | |||
| 262 | def is_src_url(param): | ||
| 263 | """ | ||
| 264 | Check if a parameter is a URL and return True if so | ||
| 265 | NOTE: be careful about changing this as it will influence how devtool/recipetool command line handling works | ||
| 266 | """ | ||
| 267 | if not param: | ||
| 268 | return False | ||
| 269 | elif '://' in param: | ||
| 270 | return True | ||
| 271 | elif param.startswith('git@') or ('@' in param and param.endswith('.git')): | ||
| 272 | return True | ||
| 273 | return False | ||
| 274 | |||
