summaryrefslogtreecommitdiffstats
path: root/bitbake/lib/bb/utils.py
diff options
context:
space:
mode:
Diffstat (limited to 'bitbake/lib/bb/utils.py')
-rw-r--r--bitbake/lib/bb/utils.py916
1 files changed, 916 insertions, 0 deletions
diff --git a/bitbake/lib/bb/utils.py b/bitbake/lib/bb/utils.py
new file mode 100644
index 0000000000..2562db8e47
--- /dev/null
+++ b/bitbake/lib/bb/utils.py
@@ -0,0 +1,916 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake Utility Functions
5"""
6
7# Copyright (C) 2004 Michael Lauer
8#
9# This program is free software; you can redistribute it and/or modify
10# it under the terms of the GNU General Public License version 2 as
11# published by the Free Software Foundation.
12#
13# This program is distributed in the hope that it will be useful,
14# but WITHOUT ANY WARRANTY; without even the implied warranty of
15# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16# GNU General Public License for more details.
17#
18# You should have received a copy of the GNU General Public License along
19# with this program; if not, write to the Free Software Foundation, Inc.,
20# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
21
22import re, fcntl, os, string, stat, shutil, time
23import sys
24import errno
25import logging
26import bb
27import bb.msg
28import multiprocessing
29import fcntl
30import subprocess
31import glob
32import traceback
33import errno
34from commands import getstatusoutput
35from contextlib import contextmanager
36
37logger = logging.getLogger("BitBake.Util")
38
39def clean_context():
40 return {
41 "os": os,
42 "bb": bb,
43 "time": time,
44 }
45
46def get_context():
47 return _context
48
49
50def set_context(ctx):
51 _context = ctx
52
53# Context used in better_exec, eval
54_context = clean_context()
55
56def explode_version(s):
57 r = []
58 alpha_regexp = re.compile('^([a-zA-Z]+)(.*)$')
59 numeric_regexp = re.compile('^(\d+)(.*)$')
60 while (s != ''):
61 if s[0] in string.digits:
62 m = numeric_regexp.match(s)
63 r.append((0, int(m.group(1))))
64 s = m.group(2)
65 continue
66 if s[0] in string.letters:
67 m = alpha_regexp.match(s)
68 r.append((1, m.group(1)))
69 s = m.group(2)
70 continue
71 if s[0] == '~':
72 r.append((-1, s[0]))
73 else:
74 r.append((2, s[0]))
75 s = s[1:]
76 return r
77
78def split_version(s):
79 """Split a version string into its constituent parts (PE, PV, PR)"""
80 s = s.strip(" <>=")
81 e = 0
82 if s.count(':'):
83 e = int(s.split(":")[0])
84 s = s.split(":")[1]
85 r = ""
86 if s.count('-'):
87 r = s.rsplit("-", 1)[1]
88 s = s.rsplit("-", 1)[0]
89 v = s
90 return (e, v, r)
91
92def vercmp_part(a, b):
93 va = explode_version(a)
94 vb = explode_version(b)
95 while True:
96 if va == []:
97 (oa, ca) = (0, None)
98 else:
99 (oa, ca) = va.pop(0)
100 if vb == []:
101 (ob, cb) = (0, None)
102 else:
103 (ob, cb) = vb.pop(0)
104 if (oa, ca) == (0, None) and (ob, cb) == (0, None):
105 return 0
106 if oa < ob:
107 return -1
108 elif oa > ob:
109 return 1
110 elif ca < cb:
111 return -1
112 elif ca > cb:
113 return 1
114
115def vercmp(ta, tb):
116 (ea, va, ra) = ta
117 (eb, vb, rb) = tb
118
119 r = int(ea or 0) - int(eb or 0)
120 if (r == 0):
121 r = vercmp_part(va, vb)
122 if (r == 0):
123 r = vercmp_part(ra, rb)
124 return r
125
126def vercmp_string(a, b):
127 ta = split_version(a)
128 tb = split_version(b)
129 return vercmp(ta, tb)
130
131def explode_deps(s):
132 """
133 Take an RDEPENDS style string of format:
134 "DEPEND1 (optional version) DEPEND2 (optional version) ..."
135 and return a list of dependencies.
136 Version information is ignored.
137 """
138 r = []
139 l = s.split()
140 flag = False
141 for i in l:
142 if i[0] == '(':
143 flag = True
144 #j = []
145 if not flag:
146 r.append(i)
147 #else:
148 # j.append(i)
149 if flag and i.endswith(')'):
150 flag = False
151 # Ignore version
152 #r[-1] += ' ' + ' '.join(j)
153 return r
154
155def explode_dep_versions2(s):
156 """
157 Take an RDEPENDS style string of format:
158 "DEPEND1 (optional version) DEPEND2 (optional version) ..."
159 and return a dictionary of dependencies and versions.
160 """
161 r = {}
162 l = s.replace(",", "").split()
163 lastdep = None
164 lastcmp = ""
165 lastver = ""
166 incmp = False
167 inversion = False
168 for i in l:
169 if i[0] == '(':
170 incmp = True
171 i = i[1:].strip()
172 if not i:
173 continue
174
175 if incmp:
176 incmp = False
177 inversion = True
178 # This list is based on behavior and supported comparisons from deb, opkg and rpm.
179 #
180 # Even though =<, <<, ==, !=, =>, and >> may not be supported,
181 # we list each possibly valid item.
182 # The build system is responsible for validation of what it supports.
183 if i.startswith(('<=', '=<', '<<', '==', '!=', '>=', '=>', '>>')):
184 lastcmp = i[0:2]
185 i = i[2:]
186 elif i.startswith(('<', '>', '=')):
187 lastcmp = i[0:1]
188 i = i[1:]
189 else:
190 # This is an unsupported case!
191 lastcmp = (i or "")
192 i = ""
193 i.strip()
194 if not i:
195 continue
196
197 if inversion:
198 if i.endswith(')'):
199 i = i[:-1] or ""
200 inversion = False
201 if lastver and i:
202 lastver += " "
203 if i:
204 lastver += i
205 if lastdep not in r:
206 r[lastdep] = []
207 r[lastdep].append(lastcmp + " " + lastver)
208 continue
209
210 #if not inversion:
211 lastdep = i
212 lastver = ""
213 lastcmp = ""
214 if not (i in r and r[i]):
215 r[lastdep] = []
216
217 return r
218
219def explode_dep_versions(s):
220 r = explode_dep_versions2(s)
221 for d in r:
222 if not r[d]:
223 r[d] = None
224 continue
225 if len(r[d]) > 1:
226 bb.warn("explode_dep_versions(): Item %s appeared in dependency string '%s' multiple times with different values. explode_dep_versions cannot cope with this." % (d, s))
227 r[d] = r[d][0]
228 return r
229
230def join_deps(deps, commasep=True):
231 """
232 Take the result from explode_dep_versions and generate a dependency string
233 """
234 result = []
235 for dep in deps:
236 if deps[dep]:
237 if isinstance(deps[dep], list):
238 for v in deps[dep]:
239 result.append(dep + " (" + v + ")")
240 else:
241 result.append(dep + " (" + deps[dep] + ")")
242 else:
243 result.append(dep)
244 if commasep:
245 return ", ".join(result)
246 else:
247 return " ".join(result)
248
249def _print_trace(body, line):
250 """
251 Print the Environment of a Text Body
252 """
253 error = []
254 # print the environment of the method
255 min_line = max(1, line-4)
256 max_line = min(line + 4, len(body))
257 for i in range(min_line, max_line + 1):
258 if line == i:
259 error.append(' *** %.4d:%s' % (i, body[i-1].rstrip()))
260 else:
261 error.append(' %.4d:%s' % (i, body[i-1].rstrip()))
262 return error
263
264def better_compile(text, file, realfile, mode = "exec"):
265 """
266 A better compile method. This method
267 will print the offending lines.
268 """
269 try:
270 return compile(text, file, mode)
271 except Exception as e:
272 error = []
273 # split the text into lines again
274 body = text.split('\n')
275 error.append("Error in compiling python function in %s:\n" % realfile)
276 if e.lineno:
277 error.append("The code lines resulting in this error were:")
278 error.extend(_print_trace(body, e.lineno))
279 else:
280 error.append("The function causing this error was:")
281 for line in body:
282 error.append(line)
283 error.append("%s: %s" % (e.__class__.__name__, str(e)))
284
285 logger.error("\n".join(error))
286
287 e = bb.BBHandledException(e)
288 raise e
289
290def _print_exception(t, value, tb, realfile, text, context):
291 error = []
292 try:
293 exception = traceback.format_exception_only(t, value)
294 error.append('Error executing a python function in %s:\n' % realfile)
295
296 # Strip 'us' from the stack (better_exec call)
297 tb = tb.tb_next
298
299 textarray = text.split('\n')
300
301 linefailed = tb.tb_lineno
302
303 tbextract = traceback.extract_tb(tb)
304 tbformat = traceback.format_list(tbextract)
305 error.append("The stack trace of python calls that resulted in this exception/failure was:")
306 error.append("File: '%s', lineno: %s, function: %s" % (tbextract[0][0], tbextract[0][1], tbextract[0][2]))
307 error.extend(_print_trace(textarray, linefailed))
308
309 # See if this is a function we constructed and has calls back into other functions in
310 # "text". If so, try and improve the context of the error by diving down the trace
311 level = 0
312 nexttb = tb.tb_next
313 while nexttb is not None and (level+1) < len(tbextract):
314 error.append("File: '%s', lineno: %s, function: %s" % (tbextract[level+1][0], tbextract[level+1][1], tbextract[level+1][2]))
315 if tbextract[level][0] == tbextract[level+1][0] and tbextract[level+1][2] == tbextract[level][0]:
316 # The code was possibly in the string we compiled ourselves
317 error.extend(_print_trace(textarray, tbextract[level+1][1]))
318 elif tbextract[level+1][0].startswith("/"):
319 # The code looks like it might be in a file, try and load it
320 try:
321 with open(tbextract[level+1][0], "r") as f:
322 text = f.readlines()
323 error.extend(_print_trace(text, tbextract[level+1][1]))
324 except:
325 error.append(tbformat[level+1])
326 elif "d" in context and tbextract[level+1][2]:
327 # Try and find the code in the datastore based on the functionname
328 d = context["d"]
329 functionname = tbextract[level+1][2]
330 text = d.getVar(functionname, True)
331 if text:
332 error.extend(_print_trace(text.split('\n'), tbextract[level+1][1]))
333 else:
334 error.append(tbformat[level+1])
335 else:
336 error.append(tbformat[level+1])
337 nexttb = tb.tb_next
338 level = level + 1
339
340 error.append("Exception: %s" % ''.join(exception))
341 finally:
342 logger.error("\n".join(error))
343
344def better_exec(code, context, text = None, realfile = "<code>"):
345 """
346 Similiar to better_compile, better_exec will
347 print the lines that are responsible for the
348 error.
349 """
350 import bb.parse
351 if not text:
352 text = code
353 if not hasattr(code, "co_filename"):
354 code = better_compile(code, realfile, realfile)
355 try:
356 exec(code, get_context(), context)
357 except (bb.BBHandledException, bb.parse.SkipRecipe, bb.build.FuncFailed, bb.data_smart.ExpansionError):
358 # Error already shown so passthrough, no need for traceback
359 raise
360 except Exception as e:
361 (t, value, tb) = sys.exc_info()
362 try:
363 _print_exception(t, value, tb, realfile, text, context)
364 except Exception as e:
365 logger.error("Exception handler error: %s" % str(e))
366
367 e = bb.BBHandledException(e)
368 raise e
369
370def simple_exec(code, context):
371 exec(code, get_context(), context)
372
373def better_eval(source, locals):
374 return eval(source, get_context(), locals)
375
376@contextmanager
377def fileslocked(files):
378 """Context manager for locking and unlocking file locks."""
379 locks = []
380 if files:
381 for lockfile in files:
382 locks.append(bb.utils.lockfile(lockfile))
383
384 yield
385
386 for lock in locks:
387 bb.utils.unlockfile(lock)
388
389def lockfile(name, shared=False, retry=True):
390 """
391 Use the file fn as a lock file, return when the lock has been acquired.
392 Returns a variable to pass to unlockfile().
393 """
394 dirname = os.path.dirname(name)
395 mkdirhier(dirname)
396
397 if not os.access(dirname, os.W_OK):
398 logger.error("Unable to acquire lock '%s', directory is not writable",
399 name)
400 sys.exit(1)
401
402 op = fcntl.LOCK_EX
403 if shared:
404 op = fcntl.LOCK_SH
405 if not retry:
406 op = op | fcntl.LOCK_NB
407
408 while True:
409 # If we leave the lockfiles lying around there is no problem
410 # but we should clean up after ourselves. This gives potential
411 # for races though. To work around this, when we acquire the lock
412 # we check the file we locked was still the lock file on disk.
413 # by comparing inode numbers. If they don't match or the lockfile
414 # no longer exists, we start again.
415
416 # This implementation is unfair since the last person to request the
417 # lock is the most likely to win it.
418
419 try:
420 lf = open(name, 'a+')
421 fileno = lf.fileno()
422 fcntl.flock(fileno, op)
423 statinfo = os.fstat(fileno)
424 if os.path.exists(lf.name):
425 statinfo2 = os.stat(lf.name)
426 if statinfo.st_ino == statinfo2.st_ino:
427 return lf
428 lf.close()
429 except Exception:
430 try:
431 lf.close()
432 except Exception:
433 pass
434 pass
435 if not retry:
436 return None
437
438def unlockfile(lf):
439 """
440 Unlock a file locked using lockfile()
441 """
442 try:
443 # If we had a shared lock, we need to promote to exclusive before
444 # removing the lockfile. Attempt this, ignore failures.
445 fcntl.flock(lf.fileno(), fcntl.LOCK_EX|fcntl.LOCK_NB)
446 os.unlink(lf.name)
447 except (IOError, OSError):
448 pass
449 fcntl.flock(lf.fileno(), fcntl.LOCK_UN)
450 lf.close()
451
452def md5_file(filename):
453 """
454 Return the hex string representation of the MD5 checksum of filename.
455 """
456 try:
457 import hashlib
458 m = hashlib.md5()
459 except ImportError:
460 import md5
461 m = md5.new()
462
463 with open(filename, "rb") as f:
464 for line in f:
465 m.update(line)
466 return m.hexdigest()
467
468def sha256_file(filename):
469 """
470 Return the hex string representation of the 256-bit SHA checksum of
471 filename. On Python 2.4 this will return None, so callers will need to
472 handle that by either skipping SHA checks, or running a standalone sha256sum
473 binary.
474 """
475 try:
476 import hashlib
477 except ImportError:
478 return None
479
480 s = hashlib.sha256()
481 with open(filename, "rb") as f:
482 for line in f:
483 s.update(line)
484 return s.hexdigest()
485
486def preserved_envvars_exported():
487 """Variables which are taken from the environment and placed in and exported
488 from the metadata"""
489 return [
490 'BB_TASKHASH',
491 'HOME',
492 'LOGNAME',
493 'PATH',
494 'PWD',
495 'SHELL',
496 'TERM',
497 'USER',
498 ]
499
500def preserved_envvars():
501 """Variables which are taken from the environment and placed in the metadata"""
502 v = [
503 'BBPATH',
504 'BB_PRESERVE_ENV',
505 'BB_ENV_WHITELIST',
506 'BB_ENV_EXTRAWHITE',
507 ]
508 return v + preserved_envvars_exported()
509
510def filter_environment(good_vars):
511 """
512 Create a pristine environment for bitbake. This will remove variables that
513 are not known and may influence the build in a negative way.
514 """
515
516 removed_vars = {}
517 for key in os.environ.keys():
518 if key in good_vars:
519 continue
520
521 removed_vars[key] = os.environ[key]
522 os.unsetenv(key)
523 del os.environ[key]
524
525 if len(removed_vars):
526 logger.debug(1, "Removed the following variables from the environment: %s", ", ".join(removed_vars.keys()))
527
528 return removed_vars
529
530def approved_variables():
531 """
532 Determine and return the list of whitelisted variables which are approved
533 to remain in the environment.
534 """
535 if 'BB_PRESERVE_ENV' in os.environ:
536 return os.environ.keys()
537 approved = []
538 if 'BB_ENV_WHITELIST' in os.environ:
539 approved = os.environ['BB_ENV_WHITELIST'].split()
540 approved.extend(['BB_ENV_WHITELIST'])
541 else:
542 approved = preserved_envvars()
543 if 'BB_ENV_EXTRAWHITE' in os.environ:
544 approved.extend(os.environ['BB_ENV_EXTRAWHITE'].split())
545 if 'BB_ENV_EXTRAWHITE' not in approved:
546 approved.extend(['BB_ENV_EXTRAWHITE'])
547 return approved
548
549def clean_environment():
550 """
551 Clean up any spurious environment variables. This will remove any
552 variables the user hasn't chosen to preserve.
553 """
554 if 'BB_PRESERVE_ENV' not in os.environ:
555 good_vars = approved_variables()
556 return filter_environment(good_vars)
557
558 return {}
559
560def empty_environment():
561 """
562 Remove all variables from the environment.
563 """
564 for s in os.environ.keys():
565 os.unsetenv(s)
566 del os.environ[s]
567
568def build_environment(d):
569 """
570 Build an environment from all exported variables.
571 """
572 import bb.data
573 for var in bb.data.keys(d):
574 export = d.getVarFlag(var, "export")
575 if export:
576 os.environ[var] = d.getVar(var, True) or ""
577
578def _check_unsafe_delete_path(path):
579 """
580 Basic safeguard against recursively deleting something we shouldn't. If it returns True,
581 the caller should raise an exception with an appropriate message.
582 NOTE: This is NOT meant to be a security mechanism - just a guard against silly mistakes
583 with potentially disastrous results.
584 """
585 extra = ''
586 # HOME might not be /home/something, so in case we can get it, check against it
587 homedir = os.environ.get('HOME', '')
588 if homedir:
589 extra = '|%s' % homedir
590 if re.match('(/|//|/home|/home/[^/]*%s)$' % extra, os.path.abspath(path)):
591 return True
592 return False
593
594def remove(path, recurse=False):
595 """Equivalent to rm -f or rm -rf"""
596 if not path:
597 return
598 if recurse:
599 for name in glob.glob(path):
600 if _check_unsafe_delete_path(path):
601 raise Exception('bb.utils.remove: called with dangerous path "%s" and recurse=True, refusing to delete!' % path)
602 # shutil.rmtree(name) would be ideal but its too slow
603 subprocess.call(['rm', '-rf'] + glob.glob(path))
604 return
605 for name in glob.glob(path):
606 try:
607 os.unlink(name)
608 except OSError as exc:
609 if exc.errno != errno.ENOENT:
610 raise
611
612def prunedir(topdir):
613 # Delete everything reachable from the directory named in 'topdir'.
614 # CAUTION: This is dangerous!
615 if _check_unsafe_delete_path(topdir):
616 raise Exception('bb.utils.prunedir: called with dangerous path "%s", refusing to delete!' % topdir)
617 for root, dirs, files in os.walk(topdir, topdown = False):
618 for name in files:
619 os.remove(os.path.join(root, name))
620 for name in dirs:
621 if os.path.islink(os.path.join(root, name)):
622 os.remove(os.path.join(root, name))
623 else:
624 os.rmdir(os.path.join(root, name))
625 os.rmdir(topdir)
626
627#
628# Could also use return re.compile("(%s)" % "|".join(map(re.escape, suffixes))).sub(lambda mo: "", var)
629# but thats possibly insane and suffixes is probably going to be small
630#
631def prune_suffix(var, suffixes, d):
632 # See if var ends with any of the suffixes listed and
633 # remove it if found
634 for suffix in suffixes:
635 if var.endswith(suffix):
636 return var.replace(suffix, "")
637 return var
638
639def mkdirhier(directory):
640 """Create a directory like 'mkdir -p', but does not complain if
641 directory already exists like os.makedirs
642 """
643
644 try:
645 os.makedirs(directory)
646 except OSError as e:
647 if e.errno != errno.EEXIST:
648 raise e
649
650def movefile(src, dest, newmtime = None, sstat = None):
651 """Moves a file from src to dest, preserving all permissions and
652 attributes; mtime will be preserved even when moving across
653 filesystems. Returns true on success and false on failure. Move is
654 atomic.
655 """
656
657 #print "movefile(" + src + "," + dest + "," + str(newmtime) + "," + str(sstat) + ")"
658 try:
659 if not sstat:
660 sstat = os.lstat(src)
661 except Exception as e:
662 print("movefile: Stating source file failed...", e)
663 return None
664
665 destexists = 1
666 try:
667 dstat = os.lstat(dest)
668 except:
669 dstat = os.lstat(os.path.dirname(dest))
670 destexists = 0
671
672 if destexists:
673 if stat.S_ISLNK(dstat[stat.ST_MODE]):
674 try:
675 os.unlink(dest)
676 destexists = 0
677 except Exception as e:
678 pass
679
680 if stat.S_ISLNK(sstat[stat.ST_MODE]):
681 try:
682 target = os.readlink(src)
683 if destexists and not stat.S_ISDIR(dstat[stat.ST_MODE]):
684 os.unlink(dest)
685 os.symlink(target, dest)
686 #os.lchown(dest,sstat[stat.ST_UID],sstat[stat.ST_GID])
687 os.unlink(src)
688 return os.lstat(dest)
689 except Exception as e:
690 print("movefile: failed to properly create symlink:", dest, "->", target, e)
691 return None
692
693 renamefailed = 1
694 if sstat[stat.ST_DEV] == dstat[stat.ST_DEV]:
695 try:
696 os.rename(src, dest)
697 renamefailed = 0
698 except Exception as e:
699 if e[0] != errno.EXDEV:
700 # Some random error.
701 print("movefile: Failed to move", src, "to", dest, e)
702 return None
703 # Invalid cross-device-link 'bind' mounted or actually Cross-Device
704
705 if renamefailed:
706 didcopy = 0
707 if stat.S_ISREG(sstat[stat.ST_MODE]):
708 try: # For safety copy then move it over.
709 shutil.copyfile(src, dest + "#new")
710 os.rename(dest + "#new", dest)
711 didcopy = 1
712 except Exception as e:
713 print('movefile: copy', src, '->', dest, 'failed.', e)
714 return None
715 else:
716 #we don't yet handle special, so we need to fall back to /bin/mv
717 a = getstatusoutput("/bin/mv -f " + "'" + src + "' '" + dest + "'")
718 if a[0] != 0:
719 print("movefile: Failed to move special file:" + src + "' to '" + dest + "'", a)
720 return None # failure
721 try:
722 if didcopy:
723 os.lchown(dest, sstat[stat.ST_UID], sstat[stat.ST_GID])
724 os.chmod(dest, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown
725 os.unlink(src)
726 except Exception as e:
727 print("movefile: Failed to chown/chmod/unlink", dest, e)
728 return None
729
730 if newmtime:
731 os.utime(dest, (newmtime, newmtime))
732 else:
733 os.utime(dest, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME]))
734 newmtime = sstat[stat.ST_MTIME]
735 return newmtime
736
737def copyfile(src, dest, newmtime = None, sstat = None):
738 """
739 Copies a file from src to dest, preserving all permissions and
740 attributes; mtime will be preserved even when moving across
741 filesystems. Returns true on success and false on failure.
742 """
743 #print "copyfile(" + src + "," + dest + "," + str(newmtime) + "," + str(sstat) + ")"
744 try:
745 if not sstat:
746 sstat = os.lstat(src)
747 except Exception as e:
748 logger.warn("copyfile: stat of %s failed (%s)" % (src, e))
749 return False
750
751 destexists = 1
752 try:
753 dstat = os.lstat(dest)
754 except:
755 dstat = os.lstat(os.path.dirname(dest))
756 destexists = 0
757
758 if destexists:
759 if stat.S_ISLNK(dstat[stat.ST_MODE]):
760 try:
761 os.unlink(dest)
762 destexists = 0
763 except Exception as e:
764 pass
765
766 if stat.S_ISLNK(sstat[stat.ST_MODE]):
767 try:
768 target = os.readlink(src)
769 if destexists and not stat.S_ISDIR(dstat[stat.ST_MODE]):
770 os.unlink(dest)
771 os.symlink(target, dest)
772 #os.lchown(dest,sstat[stat.ST_UID],sstat[stat.ST_GID])
773 return os.lstat(dest)
774 except Exception as e:
775 logger.warn("copyfile: failed to create symlink %s to %s (%s)" % (dest, target, e))
776 return False
777
778 if stat.S_ISREG(sstat[stat.ST_MODE]):
779 try:
780 srcchown = False
781 if not os.access(src, os.R_OK):
782 # Make sure we can read it
783 srcchown = True
784 os.chmod(src, sstat[stat.ST_MODE] | stat.S_IRUSR)
785
786 # For safety copy then move it over.
787 shutil.copyfile(src, dest + "#new")
788 os.rename(dest + "#new", dest)
789 except Exception as e:
790 logger.warn("copyfile: copy %s to %s failed (%s)" % (src, dest, e))
791 return False
792 finally:
793 if srcchown:
794 os.chmod(src, sstat[stat.ST_MODE])
795 os.utime(src, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME]))
796
797 else:
798 #we don't yet handle special, so we need to fall back to /bin/mv
799 a = getstatusoutput("/bin/cp -f " + "'" + src + "' '" + dest + "'")
800 if a[0] != 0:
801 logger.warn("copyfile: failed to copy special file %s to %s (%s)" % (src, dest, a))
802 return False # failure
803 try:
804 os.lchown(dest, sstat[stat.ST_UID], sstat[stat.ST_GID])
805 os.chmod(dest, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown
806 except Exception as e:
807 logger.warn("copyfile: failed to chown/chmod %s (%s)" % (dest, e))
808 return False
809
810 if newmtime:
811 os.utime(dest, (newmtime, newmtime))
812 else:
813 os.utime(dest, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME]))
814 newmtime = sstat[stat.ST_MTIME]
815 return newmtime
816
817def which(path, item, direction = 0, history = False):
818 """
819 Locate a file in a PATH
820 """
821
822 hist = []
823 paths = (path or "").split(':')
824 if direction != 0:
825 paths.reverse()
826
827 for p in paths:
828 next = os.path.join(p, item)
829 hist.append(next)
830 if os.path.exists(next):
831 if not os.path.isabs(next):
832 next = os.path.abspath(next)
833 if history:
834 return next, hist
835 return next
836
837 if history:
838 return "", hist
839 return ""
840
841def to_boolean(string, default=None):
842 if not string:
843 return default
844
845 normalized = string.lower()
846 if normalized in ("y", "yes", "1", "true"):
847 return True
848 elif normalized in ("n", "no", "0", "false"):
849 return False
850 else:
851 raise ValueError("Invalid value for to_boolean: %s" % string)
852
853def contains(variable, checkvalues, truevalue, falsevalue, d):
854 val = d.getVar(variable, True)
855 if not val:
856 return falsevalue
857 val = set(val.split())
858 if isinstance(checkvalues, basestring):
859 checkvalues = set(checkvalues.split())
860 else:
861 checkvalues = set(checkvalues)
862 if checkvalues.issubset(val):
863 return truevalue
864 return falsevalue
865
866def contains_any(variable, checkvalues, truevalue, falsevalue, d):
867 val = d.getVar(variable, True)
868 if not val:
869 return falsevalue
870 val = set(val.split())
871 if isinstance(checkvalues, basestring):
872 checkvalues = set(checkvalues.split())
873 else:
874 checkvalues = set(checkvalues)
875 if checkvalues & val:
876 return truevalue
877 return falsevalue
878
879def cpu_count():
880 return multiprocessing.cpu_count()
881
882def nonblockingfd(fd):
883 fcntl.fcntl(fd, fcntl.F_SETFL, fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK)
884
885def process_profilelog(fn):
886 pout = open(fn + '.processed', 'w')
887
888 import pstats
889 p = pstats.Stats(fn, stream=pout)
890 p.sort_stats('time')
891 p.print_stats()
892 p.print_callers()
893 p.sort_stats('cumulative')
894 p.print_stats()
895
896 pout.flush()
897 pout.close()
898
899#
900# Was present to work around multiprocessing pool bugs in python < 2.7.3
901#
902def multiprocessingpool(*args, **kwargs):
903
904 import multiprocessing.pool
905 #import multiprocessing.util
906 #multiprocessing.util.log_to_stderr(10)
907 # Deal with a multiprocessing bug where signals to the processes would be delayed until the work
908 # completes. Putting in a timeout means the signals (like SIGINT/SIGTERM) get processed.
909 def wrapper(func):
910 def wrap(self, timeout=None):
911 return func(self, timeout=timeout if timeout is not None else 1e100)
912 return wrap
913 multiprocessing.pool.IMapIterator.next = wrapper(multiprocessing.pool.IMapIterator.next)
914
915 return multiprocessing.Pool(*args, **kwargs)
916