summaryrefslogtreecommitdiffstats
path: root/bitbake/lib/bb/utils.py
diff options
context:
space:
mode:
Diffstat (limited to 'bitbake/lib/bb/utils.py')
-rw-r--r--bitbake/lib/bb/utils.py878
1 files changed, 878 insertions, 0 deletions
diff --git a/bitbake/lib/bb/utils.py b/bitbake/lib/bb/utils.py
new file mode 100644
index 0000000..0be45e1
--- /dev/null
+++ b/bitbake/lib/bb/utils.py
@@ -0,0 +1,878 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake Utility Functions
5"""
6
7# Copyright (C) 2004 Michael Lauer
8#
9# This program is free software; you can redistribute it and/or modify
10# it under the terms of the GNU General Public License version 2 as
11# published by the Free Software Foundation.
12#
13# This program is distributed in the hope that it will be useful,
14# but WITHOUT ANY WARRANTY; without even the implied warranty of
15# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16# GNU General Public License for more details.
17#
18# You should have received a copy of the GNU General Public License along
19# with this program; if not, write to the Free Software Foundation, Inc.,
20# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
21
22import re, fcntl, os, string, stat, shutil, time
23import sys
24import errno
25import logging
26import bb
27import bb.msg
28import multiprocessing
29import fcntl
30import subprocess
31import glob
32import traceback
33import errno
34from commands import getstatusoutput
35from contextlib import contextmanager
36
37logger = logging.getLogger("BitBake.Util")
38
39def clean_context():
40 return {
41 "os": os,
42 "bb": bb,
43 "time": time,
44 }
45
46def get_context():
47 return _context
48
49
50def set_context(ctx):
51 _context = ctx
52
53# Context used in better_exec, eval
54_context = clean_context()
55
56def explode_version(s):
57 r = []
58 alpha_regexp = re.compile('^([a-zA-Z]+)(.*)$')
59 numeric_regexp = re.compile('^(\d+)(.*)$')
60 while (s != ''):
61 if s[0] in string.digits:
62 m = numeric_regexp.match(s)
63 r.append((0, int(m.group(1))))
64 s = m.group(2)
65 continue
66 if s[0] in string.letters:
67 m = alpha_regexp.match(s)
68 r.append((1, m.group(1)))
69 s = m.group(2)
70 continue
71 if s[0] == '~':
72 r.append((-1, s[0]))
73 else:
74 r.append((2, s[0]))
75 s = s[1:]
76 return r
77
78def split_version(s):
79 """Split a version string into its constituent parts (PE, PV, PR)"""
80 s = s.strip(" <>=")
81 e = 0
82 if s.count(':'):
83 e = int(s.split(":")[0])
84 s = s.split(":")[1]
85 r = ""
86 if s.count('-'):
87 r = s.rsplit("-", 1)[1]
88 s = s.rsplit("-", 1)[0]
89 v = s
90 return (e, v, r)
91
92def vercmp_part(a, b):
93 va = explode_version(a)
94 vb = explode_version(b)
95 while True:
96 if va == []:
97 (oa, ca) = (0, None)
98 else:
99 (oa, ca) = va.pop(0)
100 if vb == []:
101 (ob, cb) = (0, None)
102 else:
103 (ob, cb) = vb.pop(0)
104 if (oa, ca) == (0, None) and (ob, cb) == (0, None):
105 return 0
106 if oa < ob:
107 return -1
108 elif oa > ob:
109 return 1
110 elif ca < cb:
111 return -1
112 elif ca > cb:
113 return 1
114
115def vercmp(ta, tb):
116 (ea, va, ra) = ta
117 (eb, vb, rb) = tb
118
119 r = int(ea or 0) - int(eb or 0)
120 if (r == 0):
121 r = vercmp_part(va, vb)
122 if (r == 0):
123 r = vercmp_part(ra, rb)
124 return r
125
126def vercmp_string(a, b):
127 ta = split_version(a)
128 tb = split_version(b)
129 return vercmp(ta, tb)
130
131def explode_deps(s):
132 """
133 Take an RDEPENDS style string of format:
134 "DEPEND1 (optional version) DEPEND2 (optional version) ..."
135 and return a list of dependencies.
136 Version information is ignored.
137 """
138 r = []
139 l = s.split()
140 flag = False
141 for i in l:
142 if i[0] == '(':
143 flag = True
144 #j = []
145 if not flag:
146 r.append(i)
147 #else:
148 # j.append(i)
149 if flag and i.endswith(')'):
150 flag = False
151 # Ignore version
152 #r[-1] += ' ' + ' '.join(j)
153 return r
154
155def explode_dep_versions2(s):
156 """
157 Take an RDEPENDS style string of format:
158 "DEPEND1 (optional version) DEPEND2 (optional version) ..."
159 and return a dictionary of dependencies and versions.
160 """
161 r = {}
162 l = s.replace(",", "").split()
163 lastdep = None
164 lastcmp = ""
165 lastver = ""
166 incmp = False
167 inversion = False
168 for i in l:
169 if i[0] == '(':
170 incmp = True
171 i = i[1:].strip()
172 if not i:
173 continue
174
175 if incmp:
176 incmp = False
177 inversion = True
178 # This list is based on behavior and supported comparisons from deb, opkg and rpm.
179 #
180 # Even though =<, <<, ==, !=, =>, and >> may not be supported,
181 # we list each possibly valid item.
182 # The build system is responsible for validation of what it supports.
183 if i.startswith(('<=', '=<', '<<', '==', '!=', '>=', '=>', '>>')):
184 lastcmp = i[0:2]
185 i = i[2:]
186 elif i.startswith(('<', '>', '=')):
187 lastcmp = i[0:1]
188 i = i[1:]
189 else:
190 # This is an unsupported case!
191 lastcmp = (i or "")
192 i = ""
193 i.strip()
194 if not i:
195 continue
196
197 if inversion:
198 if i.endswith(')'):
199 i = i[:-1] or ""
200 inversion = False
201 if lastver and i:
202 lastver += " "
203 if i:
204 lastver += i
205 if lastdep not in r:
206 r[lastdep] = []
207 r[lastdep].append(lastcmp + " " + lastver)
208 continue
209
210 #if not inversion:
211 lastdep = i
212 lastver = ""
213 lastcmp = ""
214 if not (i in r and r[i]):
215 r[lastdep] = []
216
217 return r
218
219def explode_dep_versions(s):
220 r = explode_dep_versions2(s)
221 for d in r:
222 if not r[d]:
223 r[d] = None
224 continue
225 if len(r[d]) > 1:
226 bb.warn("explode_dep_versions(): Item %s appeared in dependency string '%s' multiple times with different values. explode_dep_versions cannot cope with this." % (d, s))
227 r[d] = r[d][0]
228 return r
229
230def join_deps(deps, commasep=True):
231 """
232 Take the result from explode_dep_versions and generate a dependency string
233 """
234 result = []
235 for dep in deps:
236 if deps[dep]:
237 if isinstance(deps[dep], list):
238 for v in deps[dep]:
239 result.append(dep + " (" + v + ")")
240 else:
241 result.append(dep + " (" + deps[dep] + ")")
242 else:
243 result.append(dep)
244 if commasep:
245 return ", ".join(result)
246 else:
247 return " ".join(result)
248
249def _print_trace(body, line):
250 """
251 Print the Environment of a Text Body
252 """
253 error = []
254 # print the environment of the method
255 min_line = max(1, line-4)
256 max_line = min(line + 4, len(body))
257 for i in range(min_line, max_line + 1):
258 if line == i:
259 error.append(' *** %.4d:%s' % (i, body[i-1].rstrip()))
260 else:
261 error.append(' %.4d:%s' % (i, body[i-1].rstrip()))
262 return error
263
264def better_compile(text, file, realfile, mode = "exec"):
265 """
266 A better compile method. This method
267 will print the offending lines.
268 """
269 try:
270 return compile(text, file, mode)
271 except Exception as e:
272 error = []
273 # split the text into lines again
274 body = text.split('\n')
275 error.append("Error in compiling python function in %s:\n" % realfile)
276 if e.lineno:
277 error.append("The code lines resulting in this error were:")
278 error.extend(_print_trace(body, e.lineno))
279 else:
280 error.append("The function causing this error was:")
281 for line in body:
282 error.append(line)
283 error.append("%s: %s" % (e.__class__.__name__, str(e)))
284
285 logger.error("\n".join(error))
286
287 e = bb.BBHandledException(e)
288 raise e
289
290def _print_exception(t, value, tb, realfile, text, context):
291 error = []
292 try:
293 exception = traceback.format_exception_only(t, value)
294 error.append('Error executing a python function in %s:\n' % realfile)
295
296 # Strip 'us' from the stack (better_exec call)
297 tb = tb.tb_next
298
299 textarray = text.split('\n')
300
301 linefailed = tb.tb_lineno
302
303 tbextract = traceback.extract_tb(tb)
304 tbformat = traceback.format_list(tbextract)
305 error.append("The stack trace of python calls that resulted in this exception/failure was:")
306 error.append("File: '%s', lineno: %s, function: %s" % (tbextract[0][0], tbextract[0][1], tbextract[0][2]))
307 error.extend(_print_trace(textarray, linefailed))
308
309 # See if this is a function we constructed and has calls back into other functions in
310 # "text". If so, try and improve the context of the error by diving down the trace
311 level = 0
312 nexttb = tb.tb_next
313 while nexttb is not None and (level+1) < len(tbextract):
314 error.append("File: '%s', lineno: %s, function: %s" % (tbextract[level+1][0], tbextract[level+1][1], tbextract[level+1][2]))
315 if tbextract[level][0] == tbextract[level+1][0] and tbextract[level+1][2] == tbextract[level][0]:
316 # The code was possibly in the string we compiled ourselves
317 error.extend(_print_trace(textarray, tbextract[level+1][1]))
318 elif tbextract[level+1][0].startswith("/"):
319 # The code looks like it might be in a file, try and load it
320 try:
321 with open(tbextract[level+1][0], "r") as f:
322 text = f.readlines()
323 error.extend(_print_trace(text, tbextract[level+1][1]))
324 except:
325 error.append(tbformat[level+1])
326 elif "d" in context and tbextract[level+1][2]:
327 # Try and find the code in the datastore based on the functionname
328 d = context["d"]
329 functionname = tbextract[level+1][2]
330 text = d.getVar(functionname, True)
331 if text:
332 error.extend(_print_trace(text.split('\n'), tbextract[level+1][1]))
333 else:
334 error.append(tbformat[level+1])
335 else:
336 error.append(tbformat[level+1])
337 nexttb = tb.tb_next
338 level = level + 1
339
340 error.append("Exception: %s" % ''.join(exception))
341 finally:
342 logger.error("\n".join(error))
343
344def better_exec(code, context, text = None, realfile = "<code>"):
345 """
346 Similiar to better_compile, better_exec will
347 print the lines that are responsible for the
348 error.
349 """
350 import bb.parse
351 if not text:
352 text = code
353 if not hasattr(code, "co_filename"):
354 code = better_compile(code, realfile, realfile)
355 try:
356 exec(code, get_context(), context)
357 except bb.BBHandledException:
358 # Error already shown so passthrough
359 raise
360 except Exception as e:
361 (t, value, tb) = sys.exc_info()
362
363 if t in [bb.parse.SkipPackage, bb.build.FuncFailed]:
364 raise
365 try:
366 _print_exception(t, value, tb, realfile, text, context)
367 except Exception as e:
368 logger.error("Exception handler error: %s" % str(e))
369
370 e = bb.BBHandledException(e)
371 raise e
372
373def simple_exec(code, context):
374 exec(code, get_context(), context)
375
376def better_eval(source, locals):
377 return eval(source, get_context(), locals)
378
379@contextmanager
380def fileslocked(files):
381 """Context manager for locking and unlocking file locks."""
382 locks = []
383 if files:
384 for lockfile in files:
385 locks.append(bb.utils.lockfile(lockfile))
386
387 yield
388
389 for lock in locks:
390 bb.utils.unlockfile(lock)
391
392def lockfile(name, shared=False, retry=True):
393 """
394 Use the file fn as a lock file, return when the lock has been acquired.
395 Returns a variable to pass to unlockfile().
396 """
397 dirname = os.path.dirname(name)
398 mkdirhier(dirname)
399
400 if not os.access(dirname, os.W_OK):
401 logger.error("Unable to acquire lock '%s', directory is not writable",
402 name)
403 sys.exit(1)
404
405 op = fcntl.LOCK_EX
406 if shared:
407 op = fcntl.LOCK_SH
408 if not retry:
409 op = op | fcntl.LOCK_NB
410
411 while True:
412 # If we leave the lockfiles lying around there is no problem
413 # but we should clean up after ourselves. This gives potential
414 # for races though. To work around this, when we acquire the lock
415 # we check the file we locked was still the lock file on disk.
416 # by comparing inode numbers. If they don't match or the lockfile
417 # no longer exists, we start again.
418
419 # This implementation is unfair since the last person to request the
420 # lock is the most likely to win it.
421
422 try:
423 lf = open(name, 'a+')
424 fileno = lf.fileno()
425 fcntl.flock(fileno, op)
426 statinfo = os.fstat(fileno)
427 if os.path.exists(lf.name):
428 statinfo2 = os.stat(lf.name)
429 if statinfo.st_ino == statinfo2.st_ino:
430 return lf
431 lf.close()
432 except Exception:
433 try:
434 lf.close()
435 except Exception:
436 pass
437 pass
438 if not retry:
439 return None
440
441def unlockfile(lf):
442 """
443 Unlock a file locked using lockfile()
444 """
445 try:
446 # If we had a shared lock, we need to promote to exclusive before
447 # removing the lockfile. Attempt this, ignore failures.
448 fcntl.flock(lf.fileno(), fcntl.LOCK_EX|fcntl.LOCK_NB)
449 os.unlink(lf.name)
450 except (IOError, OSError):
451 pass
452 fcntl.flock(lf.fileno(), fcntl.LOCK_UN)
453 lf.close()
454
455def md5_file(filename):
456 """
457 Return the hex string representation of the MD5 checksum of filename.
458 """
459 try:
460 import hashlib
461 m = hashlib.md5()
462 except ImportError:
463 import md5
464 m = md5.new()
465
466 with open(filename, "rb") as f:
467 for line in f:
468 m.update(line)
469 return m.hexdigest()
470
471def sha256_file(filename):
472 """
473 Return the hex string representation of the 256-bit SHA checksum of
474 filename. On Python 2.4 this will return None, so callers will need to
475 handle that by either skipping SHA checks, or running a standalone sha256sum
476 binary.
477 """
478 try:
479 import hashlib
480 except ImportError:
481 return None
482
483 s = hashlib.sha256()
484 with open(filename, "rb") as f:
485 for line in f:
486 s.update(line)
487 return s.hexdigest()
488
489def preserved_envvars_exported():
490 """Variables which are taken from the environment and placed in and exported
491 from the metadata"""
492 return [
493 'BB_TASKHASH',
494 'HOME',
495 'LOGNAME',
496 'PATH',
497 'PWD',
498 'SHELL',
499 'TERM',
500 'USER',
501 ]
502
503def preserved_envvars():
504 """Variables which are taken from the environment and placed in the metadata"""
505 v = [
506 'BBPATH',
507 'BB_PRESERVE_ENV',
508 'BB_ENV_WHITELIST',
509 'BB_ENV_EXTRAWHITE',
510 ]
511 return v + preserved_envvars_exported()
512
513def filter_environment(good_vars):
514 """
515 Create a pristine environment for bitbake. This will remove variables that
516 are not known and may influence the build in a negative way.
517 """
518
519 removed_vars = {}
520 for key in os.environ.keys():
521 if key in good_vars:
522 continue
523
524 removed_vars[key] = os.environ[key]
525 os.unsetenv(key)
526 del os.environ[key]
527
528 if len(removed_vars):
529 logger.debug(1, "Removed the following variables from the environment: %s", ", ".join(removed_vars.keys()))
530
531 return removed_vars
532
533def approved_variables():
534 """
535 Determine and return the list of whitelisted variables which are approved
536 to remain in the envrionment.
537 """
538 if 'BB_PRESERVE_ENV' in os.environ:
539 return os.environ.keys()
540 approved = []
541 if 'BB_ENV_WHITELIST' in os.environ:
542 approved = os.environ['BB_ENV_WHITELIST'].split()
543 approved.extend(['BB_ENV_WHITELIST'])
544 else:
545 approved = preserved_envvars()
546 if 'BB_ENV_EXTRAWHITE' in os.environ:
547 approved.extend(os.environ['BB_ENV_EXTRAWHITE'].split())
548 if 'BB_ENV_EXTRAWHITE' not in approved:
549 approved.extend(['BB_ENV_EXTRAWHITE'])
550 return approved
551
552def clean_environment():
553 """
554 Clean up any spurious environment variables. This will remove any
555 variables the user hasn't chosen to preserve.
556 """
557 if 'BB_PRESERVE_ENV' not in os.environ:
558 good_vars = approved_variables()
559 return filter_environment(good_vars)
560
561 return {}
562
563def empty_environment():
564 """
565 Remove all variables from the environment.
566 """
567 for s in os.environ.keys():
568 os.unsetenv(s)
569 del os.environ[s]
570
571def build_environment(d):
572 """
573 Build an environment from all exported variables.
574 """
575 import bb.data
576 for var in bb.data.keys(d):
577 export = d.getVarFlag(var, "export")
578 if export:
579 os.environ[var] = d.getVar(var, True) or ""
580
581def remove(path, recurse=False):
582 """Equivalent to rm -f or rm -rf"""
583 if not path:
584 return
585 if recurse:
586 # shutil.rmtree(name) would be ideal but its too slow
587 subprocess.call(['rm', '-rf'] + glob.glob(path))
588 return
589 for name in glob.glob(path):
590 try:
591 os.unlink(name)
592 except OSError as exc:
593 if exc.errno != errno.ENOENT:
594 raise
595
596def prunedir(topdir):
597 # Delete everything reachable from the directory named in 'topdir'.
598 # CAUTION: This is dangerous!
599 for root, dirs, files in os.walk(topdir, topdown = False):
600 for name in files:
601 os.remove(os.path.join(root, name))
602 for name in dirs:
603 if os.path.islink(os.path.join(root, name)):
604 os.remove(os.path.join(root, name))
605 else:
606 os.rmdir(os.path.join(root, name))
607 os.rmdir(topdir)
608
609#
610# Could also use return re.compile("(%s)" % "|".join(map(re.escape, suffixes))).sub(lambda mo: "", var)
611# but thats possibly insane and suffixes is probably going to be small
612#
613def prune_suffix(var, suffixes, d):
614 # See if var ends with any of the suffixes listed and
615 # remove it if found
616 for suffix in suffixes:
617 if var.endswith(suffix):
618 return var.replace(suffix, "")
619 return var
620
621def mkdirhier(directory):
622 """Create a directory like 'mkdir -p', but does not complain if
623 directory already exists like os.makedirs
624 """
625
626 try:
627 os.makedirs(directory)
628 except OSError as e:
629 if e.errno != errno.EEXIST:
630 raise e
631
632def movefile(src, dest, newmtime = None, sstat = None):
633 """Moves a file from src to dest, preserving all permissions and
634 attributes; mtime will be preserved even when moving across
635 filesystems. Returns true on success and false on failure. Move is
636 atomic.
637 """
638
639 #print "movefile(" + src + "," + dest + "," + str(newmtime) + "," + str(sstat) + ")"
640 try:
641 if not sstat:
642 sstat = os.lstat(src)
643 except Exception as e:
644 print("movefile: Stating source file failed...", e)
645 return None
646
647 destexists = 1
648 try:
649 dstat = os.lstat(dest)
650 except:
651 dstat = os.lstat(os.path.dirname(dest))
652 destexists = 0
653
654 if destexists:
655 if stat.S_ISLNK(dstat[stat.ST_MODE]):
656 try:
657 os.unlink(dest)
658 destexists = 0
659 except Exception as e:
660 pass
661
662 if stat.S_ISLNK(sstat[stat.ST_MODE]):
663 try:
664 target = os.readlink(src)
665 if destexists and not stat.S_ISDIR(dstat[stat.ST_MODE]):
666 os.unlink(dest)
667 os.symlink(target, dest)
668 #os.lchown(dest,sstat[stat.ST_UID],sstat[stat.ST_GID])
669 os.unlink(src)
670 return os.lstat(dest)
671 except Exception as e:
672 print("movefile: failed to properly create symlink:", dest, "->", target, e)
673 return None
674
675 renamefailed = 1
676 if sstat[stat.ST_DEV] == dstat[stat.ST_DEV]:
677 try:
678 os.rename(src, dest)
679 renamefailed = 0
680 except Exception as e:
681 if e[0] != errno.EXDEV:
682 # Some random error.
683 print("movefile: Failed to move", src, "to", dest, e)
684 return None
685 # Invalid cross-device-link 'bind' mounted or actually Cross-Device
686
687 if renamefailed:
688 didcopy = 0
689 if stat.S_ISREG(sstat[stat.ST_MODE]):
690 try: # For safety copy then move it over.
691 shutil.copyfile(src, dest + "#new")
692 os.rename(dest + "#new", dest)
693 didcopy = 1
694 except Exception as e:
695 print('movefile: copy', src, '->', dest, 'failed.', e)
696 return None
697 else:
698 #we don't yet handle special, so we need to fall back to /bin/mv
699 a = getstatusoutput("/bin/mv -f " + "'" + src + "' '" + dest + "'")
700 if a[0] != 0:
701 print("movefile: Failed to move special file:" + src + "' to '" + dest + "'", a)
702 return None # failure
703 try:
704 if didcopy:
705 os.lchown(dest, sstat[stat.ST_UID], sstat[stat.ST_GID])
706 os.chmod(dest, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown
707 os.unlink(src)
708 except Exception as e:
709 print("movefile: Failed to chown/chmod/unlink", dest, e)
710 return None
711
712 if newmtime:
713 os.utime(dest, (newmtime, newmtime))
714 else:
715 os.utime(dest, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME]))
716 newmtime = sstat[stat.ST_MTIME]
717 return newmtime
718
719def copyfile(src, dest, newmtime = None, sstat = None):
720 """
721 Copies a file from src to dest, preserving all permissions and
722 attributes; mtime will be preserved even when moving across
723 filesystems. Returns true on success and false on failure.
724 """
725 #print "copyfile(" + src + "," + dest + "," + str(newmtime) + "," + str(sstat) + ")"
726 try:
727 if not sstat:
728 sstat = os.lstat(src)
729 except Exception as e:
730 logger.warn("copyfile: stat of %s failed (%s)" % (src, e))
731 return False
732
733 destexists = 1
734 try:
735 dstat = os.lstat(dest)
736 except:
737 dstat = os.lstat(os.path.dirname(dest))
738 destexists = 0
739
740 if destexists:
741 if stat.S_ISLNK(dstat[stat.ST_MODE]):
742 try:
743 os.unlink(dest)
744 destexists = 0
745 except Exception as e:
746 pass
747
748 if stat.S_ISLNK(sstat[stat.ST_MODE]):
749 try:
750 target = os.readlink(src)
751 if destexists and not stat.S_ISDIR(dstat[stat.ST_MODE]):
752 os.unlink(dest)
753 os.symlink(target, dest)
754 #os.lchown(dest,sstat[stat.ST_UID],sstat[stat.ST_GID])
755 return os.lstat(dest)
756 except Exception as e:
757 logger.warn("copyfile: failed to create symlink %s to %s (%s)" % (dest, target, e))
758 return False
759
760 if stat.S_ISREG(sstat[stat.ST_MODE]):
761 try:
762 srcchown = False
763 if not os.access(src, os.R_OK):
764 # Make sure we can read it
765 srcchown = True
766 os.chmod(src, sstat[stat.ST_MODE] | stat.S_IRUSR)
767
768 # For safety copy then move it over.
769 shutil.copyfile(src, dest + "#new")
770 os.rename(dest + "#new", dest)
771 except Exception as e:
772 logger.warn("copyfile: copy %s to %s failed (%s)" % (src, dest, e))
773 return False
774 finally:
775 if srcchown:
776 os.chmod(src, sstat[stat.ST_MODE])
777 os.utime(src, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME]))
778
779 else:
780 #we don't yet handle special, so we need to fall back to /bin/mv
781 a = getstatusoutput("/bin/cp -f " + "'" + src + "' '" + dest + "'")
782 if a[0] != 0:
783 logger.warn("copyfile: failed to copy special file %s to %s (%s)" % (src, dest, a))
784 return False # failure
785 try:
786 os.lchown(dest, sstat[stat.ST_UID], sstat[stat.ST_GID])
787 os.chmod(dest, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown
788 except Exception as e:
789 logger.warn("copyfile: failed to chown/chmod %s (%s)" % (dest, e))
790 return False
791
792 if newmtime:
793 os.utime(dest, (newmtime, newmtime))
794 else:
795 os.utime(dest, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME]))
796 newmtime = sstat[stat.ST_MTIME]
797 return newmtime
798
799def which(path, item, direction = 0, history = False):
800 """
801 Locate a file in a PATH
802 """
803
804 hist = []
805 paths = (path or "").split(':')
806 if direction != 0:
807 paths.reverse()
808
809 for p in paths:
810 next = os.path.join(p, item)
811 hist.append(next)
812 if os.path.exists(next):
813 if not os.path.isabs(next):
814 next = os.path.abspath(next)
815 if history:
816 return next, hist
817 return next
818
819 if history:
820 return "", hist
821 return ""
822
823def to_boolean(string, default=None):
824 if not string:
825 return default
826
827 normalized = string.lower()
828 if normalized in ("y", "yes", "1", "true"):
829 return True
830 elif normalized in ("n", "no", "0", "false"):
831 return False
832 else:
833 raise ValueError("Invalid value for to_boolean: %s" % string)
834
835def contains(variable, checkvalues, truevalue, falsevalue, d):
836 val = d.getVar(variable, True)
837 if not val:
838 return falsevalue
839 val = set(val.split())
840 if isinstance(checkvalues, basestring):
841 checkvalues = set(checkvalues.split())
842 else:
843 checkvalues = set(checkvalues)
844 if checkvalues.issubset(val):
845 return truevalue
846 return falsevalue
847
848def cpu_count():
849 return multiprocessing.cpu_count()
850
851def nonblockingfd(fd):
852 fcntl.fcntl(fd, fcntl.F_SETFL, fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK)
853
854def process_profilelog(fn):
855 # Redirect stdout to capture profile information
856 pout = open(fn + '.processed', 'w')
857 so = sys.stdout.fileno()
858 orig_so = os.dup(sys.stdout.fileno())
859 os.dup2(pout.fileno(), so)
860
861 import pstats
862 p = pstats.Stats(fn)
863 p.sort_stats('time')
864 p.print_stats()
865 p.print_callers()
866 p.sort_stats('cumulative')
867 p.print_stats()
868
869 os.dup2(orig_so, so)
870 pout.flush()
871 pout.close()
872
873#
874# Was present to work around multiprocessing pool bugs in python < 2.7.3
875#
876def multiprocessingpool(*args, **kwargs):
877 return multiprocessing.Pool(*args, **kwargs)
878