summaryrefslogtreecommitdiffstats
path: root/bitbake/lib/bb/build.py
diff options
context:
space:
mode:
Diffstat (limited to 'bitbake/lib/bb/build.py')
-rw-r--r--bitbake/lib/bb/build.py291
1 files changed, 151 insertions, 140 deletions
diff --git a/bitbake/lib/bb/build.py b/bitbake/lib/bb/build.py
index f4f897e41a..40839a81b5 100644
--- a/bitbake/lib/bb/build.py
+++ b/bitbake/lib/bb/build.py
@@ -20,10 +20,12 @@ import itertools
20import time 20import time
21import re 21import re
22import stat 22import stat
23import datetime
23import bb 24import bb
24import bb.msg 25import bb.msg
25import bb.process 26import bb.process
26import bb.progress 27import bb.progress
28from io import StringIO
27from bb import data, event, utils 29from bb import data, event, utils
28 30
29bblogger = logging.getLogger('BitBake') 31bblogger = logging.getLogger('BitBake')
@@ -176,7 +178,9 @@ class StdoutNoopContextManager:
176 178
177 @property 179 @property
178 def name(self): 180 def name(self):
179 return sys.stdout.name 181 if "name" in dir(sys.stdout):
182 return sys.stdout.name
183 return "<mem>"
180 184
181 185
182def exec_func(func, d, dirs = None): 186def exec_func(func, d, dirs = None):
@@ -193,6 +197,8 @@ def exec_func(func, d, dirs = None):
193 for cdir in d.expand(cleandirs).split(): 197 for cdir in d.expand(cleandirs).split():
194 bb.utils.remove(cdir, True) 198 bb.utils.remove(cdir, True)
195 bb.utils.mkdirhier(cdir) 199 bb.utils.mkdirhier(cdir)
200 if cdir == oldcwd:
201 os.chdir(cdir)
196 202
197 if flags and dirs is None: 203 if flags and dirs is None:
198 dirs = flags.get('dirs') 204 dirs = flags.get('dirs')
@@ -295,9 +301,25 @@ def exec_func_python(func, d, runfile, cwd=None):
295 lineno = int(d.getVarFlag(func, "lineno", False)) 301 lineno = int(d.getVarFlag(func, "lineno", False))
296 bb.methodpool.insert_method(func, text, fn, lineno - 1) 302 bb.methodpool.insert_method(func, text, fn, lineno - 1)
297 303
298 comp = utils.better_compile(code, func, "exec_python_func() autogenerated") 304 if verboseStdoutLogging:
299 utils.better_exec(comp, {"d": d}, code, "exec_python_func() autogenerated") 305 sys.stdout.flush()
306 sys.stderr.flush()
307 currout = sys.stdout
308 currerr = sys.stderr
309 sys.stderr = sys.stdout = execio = StringIO()
310 comp = utils.better_compile(code, func, "exec_func_python() autogenerated")
311 utils.better_exec(comp, {"d": d}, code, "exec_func_python() autogenerated")
300 finally: 312 finally:
313 if verboseStdoutLogging:
314 execio.flush()
315 logger.plain("%s" % execio.getvalue())
316 sys.stdout = currout
317 sys.stderr = currerr
318 execio.close()
319 # We want any stdout/stderr to be printed before any other log messages to make debugging
320 # more accurate. In some cases we seem to lose stdout/stderr entirely in logging tests without this.
321 sys.stdout.flush()
322 sys.stderr.flush()
301 bb.debug(2, "Python function %s finished" % func) 323 bb.debug(2, "Python function %s finished" % func)
302 324
303 if cwd and olddir: 325 if cwd and olddir:
@@ -375,7 +397,7 @@ def create_progress_handler(func, progress, logfile, d):
375 # Use specified regex 397 # Use specified regex
376 return bb.progress.OutOfProgressHandler(d, regex=progress.split(':', 1)[1], outfile=logfile) 398 return bb.progress.OutOfProgressHandler(d, regex=progress.split(':', 1)[1], outfile=logfile)
377 elif progress.startswith("custom:"): 399 elif progress.startswith("custom:"):
378 # Use a custom progress handler that was injected via OE_EXTRA_IMPORTS or __builtins__ 400 # Use a custom progress handler that was injected via other means
379 import functools 401 import functools
380 from types import ModuleType 402 from types import ModuleType
381 403
@@ -436,7 +458,11 @@ exit $ret
436 if fakerootcmd: 458 if fakerootcmd:
437 cmd = [fakerootcmd, runfile] 459 cmd = [fakerootcmd, runfile]
438 460
439 if verboseStdoutLogging: 461 # We only want to output to logger via LogTee if stdout is sys.__stdout__ (which will either
462 # be real stdout or subprocess PIPE or similar). In other cases we are being run "recursively",
463 # ie. inside another function, in which case stdout is already being captured so we don't
464 # want to Tee here as output would be printed twice, and out of order.
465 if verboseStdoutLogging and sys.stdout == sys.__stdout__:
440 logfile = LogTee(logger, StdoutNoopContextManager()) 466 logfile = LogTee(logger, StdoutNoopContextManager())
441 else: 467 else:
442 logfile = StdoutNoopContextManager() 468 logfile = StdoutNoopContextManager()
@@ -565,10 +591,8 @@ exit $ret
565def _task_data(fn, task, d): 591def _task_data(fn, task, d):
566 localdata = bb.data.createCopy(d) 592 localdata = bb.data.createCopy(d)
567 localdata.setVar('BB_FILENAME', fn) 593 localdata.setVar('BB_FILENAME', fn)
568 localdata.setVar('BB_CURRENTTASK', task[3:])
569 localdata.setVar('OVERRIDES', 'task-%s:%s' % 594 localdata.setVar('OVERRIDES', 'task-%s:%s' %
570 (task[3:].replace('_', '-'), d.getVar('OVERRIDES', False))) 595 (task[3:].replace('_', '-'), d.getVar('OVERRIDES', False)))
571 localdata.finalize()
572 bb.data.expandKeys(localdata) 596 bb.data.expandKeys(localdata)
573 return localdata 597 return localdata
574 598
@@ -579,7 +603,7 @@ def _exec_task(fn, task, d, quieterr):
579 running it with its own local metadata, and with some useful variables set. 603 running it with its own local metadata, and with some useful variables set.
580 """ 604 """
581 if not d.getVarFlag(task, 'task', False): 605 if not d.getVarFlag(task, 'task', False):
582 event.fire(TaskInvalid(task, d), d) 606 event.fire(TaskInvalid(task, fn, d), d)
583 logger.error("No such task: %s" % task) 607 logger.error("No such task: %s" % task)
584 return 1 608 return 1
585 609
@@ -615,7 +639,8 @@ def _exec_task(fn, task, d, quieterr):
615 logorder = os.path.join(tempdir, 'log.task_order') 639 logorder = os.path.join(tempdir, 'log.task_order')
616 try: 640 try:
617 with open(logorder, 'a') as logorderfile: 641 with open(logorder, 'a') as logorderfile:
618 logorderfile.write('{0} ({1}): {2}\n'.format(task, os.getpid(), logbase)) 642 timestamp = datetime.datetime.now().strftime("%Y%m%d-%H%M%S.%f")
643 logorderfile.write('{0} {1} ({2}): {3}\n'.format(timestamp, task, os.getpid(), logbase))
619 except OSError: 644 except OSError:
620 logger.exception("Opening log file '%s'", logorder) 645 logger.exception("Opening log file '%s'", logorder)
621 pass 646 pass
@@ -682,47 +707,55 @@ def _exec_task(fn, task, d, quieterr):
682 try: 707 try:
683 try: 708 try:
684 event.fire(TaskStarted(task, fn, logfn, flags, localdata), localdata) 709 event.fire(TaskStarted(task, fn, logfn, flags, localdata), localdata)
685 except (bb.BBHandledException, SystemExit):
686 return 1
687 710
688 try:
689 for func in (prefuncs or '').split(): 711 for func in (prefuncs or '').split():
690 exec_func(func, localdata) 712 exec_func(func, localdata)
691 exec_func(task, localdata) 713 exec_func(task, localdata)
692 for func in (postfuncs or '').split(): 714 for func in (postfuncs or '').split():
693 exec_func(func, localdata) 715 exec_func(func, localdata)
694 except bb.BBHandledException: 716 finally:
695 event.fire(TaskFailed(task, fn, logfn, localdata, True), localdata) 717 # Need to flush and close the logs before sending events where the
696 return 1 718 # UI may try to look at the logs.
697 except Exception as exc: 719 sys.stdout.flush()
698 if quieterr: 720 sys.stderr.flush()
699 event.fire(TaskFailedSilent(task, fn, logfn, localdata), localdata) 721
700 else: 722 bblogger.removeHandler(handler)
701 errprinted = errchk.triggered 723
724 # Restore the backup fds
725 os.dup2(osi[0], osi[1])
726 os.dup2(oso[0], oso[1])
727 os.dup2(ose[0], ose[1])
728
729 # Close the backup fds
730 os.close(osi[0])
731 os.close(oso[0])
732 os.close(ose[0])
733
734 logfile.close()
735 if os.path.exists(logfn) and os.path.getsize(logfn) == 0:
736 logger.debug2("Zero size logfn %s, removing", logfn)
737 bb.utils.remove(logfn)
738 bb.utils.remove(loglink)
739 except (Exception, SystemExit) as exc:
740 handled = False
741 if isinstance(exc, bb.BBHandledException):
742 handled = True
743
744 if quieterr:
745 if not handled:
746 logger.warning(str(exc))
747 event.fire(TaskFailedSilent(task, fn, logfn, localdata), localdata)
748 else:
749 errprinted = errchk.triggered
750 # If the output is already on stdout, we've printed the information in the
751 # logs once already so don't duplicate
752 if verboseStdoutLogging or handled:
753 errprinted = True
754 if not handled:
702 logger.error(str(exc)) 755 logger.error(str(exc))
703 event.fire(TaskFailed(task, fn, logfn, localdata, errprinted), localdata) 756 event.fire(TaskFailed(task, fn, logfn, localdata, errprinted), localdata)
704 return 1 757 return 1
705 finally:
706 sys.stdout.flush()
707 sys.stderr.flush()
708
709 bblogger.removeHandler(handler)
710
711 # Restore the backup fds
712 os.dup2(osi[0], osi[1])
713 os.dup2(oso[0], oso[1])
714 os.dup2(ose[0], ose[1])
715
716 # Close the backup fds
717 os.close(osi[0])
718 os.close(oso[0])
719 os.close(ose[0])
720 758
721 logfile.close()
722 if os.path.exists(logfn) and os.path.getsize(logfn) == 0:
723 logger.debug2("Zero size logfn %s, removing", logfn)
724 bb.utils.remove(logfn)
725 bb.utils.remove(loglink)
726 event.fire(TaskSucceeded(task, fn, logfn, localdata), localdata) 759 event.fire(TaskSucceeded(task, fn, logfn, localdata), localdata)
727 760
728 if not localdata.getVarFlag(task, 'nostamp', False) and not localdata.getVarFlag(task, 'selfstamp', False): 761 if not localdata.getVarFlag(task, 'nostamp', False) and not localdata.getVarFlag(task, 'selfstamp', False):
@@ -760,132 +793,92 @@ def exec_task(fn, task, d, profile = False):
760 event.fire(failedevent, d) 793 event.fire(failedevent, d)
761 return 1 794 return 1
762 795
763def stamp_internal(taskname, d, file_name, baseonly=False, noextra=False): 796def _get_cleanmask(taskname, mcfn):
764 """ 797 """
765 Internal stamp helper function 798 Internal stamp helper function to generate stamp cleaning mask
766 Makes sure the stamp directory exists
767 Returns the stamp path+filename 799 Returns the stamp path+filename
768 800
769 In the bitbake core, d can be a CacheData and file_name will be set. 801 In the bitbake core, d can be a CacheData and file_name will be set.
770 When called in task context, d will be a data store, file_name will not be set 802 When called in task context, d will be a data store, file_name will not be set
771 """ 803 """
772 taskflagname = taskname 804 cleanmask = bb.parse.siggen.stampcleanmask_mcfn(taskname, mcfn)
773 if taskname.endswith("_setscene") and taskname != "do_setscene": 805 taskflagname = taskname.replace("_setscene", "")
774 taskflagname = taskname.replace("_setscene", "") 806 if cleanmask:
775 807 return [cleanmask, cleanmask.replace(taskflagname, taskflagname + "_setscene")]
776 if file_name: 808 return []
777 stamp = d.stamp[file_name] 809
778 extrainfo = d.stamp_extrainfo[file_name].get(taskflagname) or "" 810def clean_stamp_mcfn(task, mcfn):
779 else: 811 cleanmask = _get_cleanmask(task, mcfn)
780 stamp = d.getVar('STAMP') 812 for mask in cleanmask:
781 file_name = d.getVar('BB_FILENAME') 813 for name in glob.glob(mask):
782 extrainfo = d.getVarFlag(taskflagname, 'stamp-extra-info') or "" 814 # Preserve sigdata files in the stamps directory
815 if "sigdata" in name or "sigbasedata" in name:
816 continue
817 # Preserve taint files in the stamps directory
818 if name.endswith('.taint'):
819 continue
820 os.unlink(name)
783 821
784 if baseonly: 822def clean_stamp(task, d):
785 return stamp 823 mcfn = d.getVar('BB_FILENAME')
786 if noextra: 824 clean_stamp_mcfn(task, mcfn)
787 extrainfo = ""
788 825
789 if not stamp: 826def make_stamp_mcfn(task, mcfn):
790 return
791 827
792 stamp = bb.parse.siggen.stampfile(stamp, file_name, taskname, extrainfo) 828 basestamp = bb.parse.siggen.stampfile_mcfn(task, mcfn)
793 829
794 stampdir = os.path.dirname(stamp) 830 stampdir = os.path.dirname(basestamp)
795 if cached_mtime_noerror(stampdir) == 0: 831 if cached_mtime_noerror(stampdir) == 0:
796 bb.utils.mkdirhier(stampdir) 832 bb.utils.mkdirhier(stampdir)
797 833
798 return stamp 834 clean_stamp_mcfn(task, mcfn)
799 835
800def stamp_cleanmask_internal(taskname, d, file_name): 836 # Remove the file and recreate to force timestamp
801 """ 837 # change on broken NFS filesystems
802 Internal stamp helper function to generate stamp cleaning mask 838 if basestamp:
803 Returns the stamp path+filename 839 bb.utils.remove(basestamp)
840 open(basestamp, "w").close()
804 841
805 In the bitbake core, d can be a CacheData and file_name will be set. 842def make_stamp(task, d):
806 When called in task context, d will be a data store, file_name will not be set
807 """ 843 """
808 taskflagname = taskname 844 Creates/updates a stamp for a given task
809 if taskname.endswith("_setscene") and taskname != "do_setscene": 845 """
810 taskflagname = taskname.replace("_setscene", "") 846 mcfn = d.getVar('BB_FILENAME')
811
812 if file_name:
813 stamp = d.stampclean[file_name]
814 extrainfo = d.stamp_extrainfo[file_name].get(taskflagname) or ""
815 else:
816 stamp = d.getVar('STAMPCLEAN')
817 file_name = d.getVar('BB_FILENAME')
818 extrainfo = d.getVarFlag(taskflagname, 'stamp-extra-info') or ""
819 847
820 if not stamp: 848 make_stamp_mcfn(task, mcfn)
821 return []
822 849
823 cleanmask = bb.parse.siggen.stampcleanmask(stamp, file_name, taskname, extrainfo) 850 # If we're in task context, write out a signature file for each task
851 # as it completes
852 if not task.endswith("_setscene"):
853 stampbase = bb.parse.siggen.stampfile_base(mcfn)
854 bb.parse.siggen.dump_sigtask(mcfn, task, stampbase, True)
824 855
825 return [cleanmask, cleanmask.replace(taskflagname, taskflagname + "_setscene")]
826 856
827def make_stamp(task, d, file_name = None): 857def find_stale_stamps(task, mcfn):
828 """ 858 current = bb.parse.siggen.stampfile_mcfn(task, mcfn)
829 Creates/updates a stamp for a given task 859 current2 = bb.parse.siggen.stampfile_mcfn(task + "_setscene", mcfn)
830 (d can be a data dict or dataCache) 860 cleanmask = _get_cleanmask(task, mcfn)
831 """ 861 found = []
832 cleanmask = stamp_cleanmask_internal(task, d, file_name)
833 for mask in cleanmask: 862 for mask in cleanmask:
834 for name in glob.glob(mask): 863 for name in glob.glob(mask):
835 # Preserve sigdata files in the stamps directory
836 if "sigdata" in name or "sigbasedata" in name: 864 if "sigdata" in name or "sigbasedata" in name:
837 continue 865 continue
838 # Preserve taint files in the stamps directory
839 if name.endswith('.taint'): 866 if name.endswith('.taint'):
840 continue 867 continue
841 os.unlink(name) 868 if name == current or name == current2:
842 869 continue
843 stamp = stamp_internal(task, d, file_name) 870 logger.debug2("Stampfile %s does not match %s or %s" % (name, current, current2))
844 # Remove the file and recreate to force timestamp 871 found.append(name)
845 # change on broken NFS filesystems 872 return found
846 if stamp:
847 bb.utils.remove(stamp)
848 open(stamp, "w").close()
849
850 # If we're in task context, write out a signature file for each task
851 # as it completes
852 if not task.endswith("_setscene") and task != "do_setscene" and not file_name:
853 stampbase = stamp_internal(task, d, None, True)
854 file_name = d.getVar('BB_FILENAME')
855 bb.parse.siggen.dump_sigtask(file_name, task, stampbase, True)
856
857def del_stamp(task, d, file_name = None):
858 """
859 Removes a stamp for a given task
860 (d can be a data dict or dataCache)
861 """
862 stamp = stamp_internal(task, d, file_name)
863 bb.utils.remove(stamp)
864 873
865def write_taint(task, d, file_name = None): 874def write_taint(task, d):
866 """ 875 """
867 Creates a "taint" file which will force the specified task and its 876 Creates a "taint" file which will force the specified task and its
868 dependents to be re-run the next time by influencing the value of its 877 dependents to be re-run the next time by influencing the value of its
869 taskhash. 878 taskhash.
870 (d can be a data dict or dataCache)
871 """ 879 """
872 import uuid 880 mcfn = d.getVar('BB_FILENAME')
873 if file_name: 881 bb.parse.siggen.invalidate_task(task, mcfn)
874 taintfn = d.stamp[file_name] + '.' + task + '.taint'
875 else:
876 taintfn = d.getVar('STAMP') + '.' + task + '.taint'
877 bb.utils.mkdirhier(os.path.dirname(taintfn))
878 # The specific content of the taint file is not really important,
879 # we just need it to be random, so a random UUID is used
880 with open(taintfn, 'w') as taintf:
881 taintf.write(str(uuid.uuid4()))
882
883def stampfile(taskname, d, file_name = None, noextra=False):
884 """
885 Return the stamp for a given task
886 (d can be a data dict or dataCache)
887 """
888 return stamp_internal(taskname, d, file_name, noextra=noextra)
889 882
890def add_tasks(tasklist, d): 883def add_tasks(tasklist, d):
891 task_deps = d.getVar('_task_deps', False) 884 task_deps = d.getVar('_task_deps', False)
@@ -910,6 +903,11 @@ def add_tasks(tasklist, d):
910 task_deps[name] = {} 903 task_deps[name] = {}
911 if name in flags: 904 if name in flags:
912 deptask = d.expand(flags[name]) 905 deptask = d.expand(flags[name])
906 if name in ['noexec', 'fakeroot', 'nostamp']:
907 if deptask != '1':
908 bb.warn("In a future version of BitBake, setting the '{}' flag to something other than '1' "
909 "will result in the flag not being set. See YP bug #13808.".format(name))
910
913 task_deps[name][task] = deptask 911 task_deps[name][task] = deptask
914 getTask('mcdepends') 912 getTask('mcdepends')
915 getTask('depends') 913 getTask('depends')
@@ -934,9 +932,13 @@ def add_tasks(tasklist, d):
934 # don't assume holding a reference 932 # don't assume holding a reference
935 d.setVar('_task_deps', task_deps) 933 d.setVar('_task_deps', task_deps)
936 934
935def ensure_task_prefix(name):
936 if name[:3] != "do_":
937 name = "do_" + name
938 return name
939
937def addtask(task, before, after, d): 940def addtask(task, before, after, d):
938 if task[:3] != "do_": 941 task = ensure_task_prefix(task)
939 task = "do_" + task
940 942
941 d.setVarFlag(task, "task", 1) 943 d.setVarFlag(task, "task", 1)
942 bbtasks = d.getVar('__BBTASKS', False) or [] 944 bbtasks = d.getVar('__BBTASKS', False) or []
@@ -948,19 +950,20 @@ def addtask(task, before, after, d):
948 if after is not None: 950 if after is not None:
949 # set up deps for function 951 # set up deps for function
950 for entry in after.split(): 952 for entry in after.split():
953 entry = ensure_task_prefix(entry)
951 if entry not in existing: 954 if entry not in existing:
952 existing.append(entry) 955 existing.append(entry)
953 d.setVarFlag(task, "deps", existing) 956 d.setVarFlag(task, "deps", existing)
954 if before is not None: 957 if before is not None:
955 # set up things that depend on this func 958 # set up things that depend on this func
956 for entry in before.split(): 959 for entry in before.split():
960 entry = ensure_task_prefix(entry)
957 existing = d.getVarFlag(entry, "deps", False) or [] 961 existing = d.getVarFlag(entry, "deps", False) or []
958 if task not in existing: 962 if task not in existing:
959 d.setVarFlag(entry, "deps", [task] + existing) 963 d.setVarFlag(entry, "deps", [task] + existing)
960 964
961def deltask(task, d): 965def deltask(task, d):
962 if task[:3] != "do_": 966 task = ensure_task_prefix(task)
963 task = "do_" + task
964 967
965 bbtasks = d.getVar('__BBTASKS', False) or [] 968 bbtasks = d.getVar('__BBTASKS', False) or []
966 if task in bbtasks: 969 if task in bbtasks:
@@ -1008,6 +1011,8 @@ def tasksbetween(task_start, task_end, d):
1008 def follow_chain(task, endtask, chain=None): 1011 def follow_chain(task, endtask, chain=None):
1009 if not chain: 1012 if not chain:
1010 chain = [] 1013 chain = []
1014 if task in chain:
1015 bb.fatal("Circular task dependencies as %s depends on itself via the chain %s" % (task, " -> ".join(chain)))
1011 chain.append(task) 1016 chain.append(task)
1012 for othertask in tasks: 1017 for othertask in tasks:
1013 if othertask == task: 1018 if othertask == task:
@@ -1023,3 +1028,9 @@ def tasksbetween(task_start, task_end, d):
1023 chain.pop() 1028 chain.pop()
1024 follow_chain(task_start, task_end) 1029 follow_chain(task_start, task_end)
1025 return outtasks 1030 return outtasks
1031
1032def listtasks(d):
1033 """
1034 Return the list of tasks in the current recipe.
1035 """
1036 return tuple(d.getVar('__BBTASKS', False) or ())