summaryrefslogtreecommitdiffstats
path: root/bitbake/lib/bb
diff options
context:
space:
mode:
Diffstat (limited to 'bitbake/lib/bb')
-rw-r--r--bitbake/lib/bb/COW.py12
-rw-r--r--bitbake/lib/bb/__init__.py141
-rwxr-xr-xbitbake/lib/bb/acl.py213
-rw-r--r--bitbake/lib/bb/asyncrpc/__init__.py16
-rw-r--r--bitbake/lib/bb/asyncrpc/client.py271
-rw-r--r--bitbake/lib/bb/asyncrpc/connection.py146
-rw-r--r--bitbake/lib/bb/asyncrpc/exceptions.py21
-rw-r--r--bitbake/lib/bb/asyncrpc/serv.py410
-rw-r--r--bitbake/lib/bb/build.py291
-rw-r--r--bitbake/lib/bb/cache.py373
-rw-r--r--bitbake/lib/bb/checksum.py47
-rw-r--r--bitbake/lib/bb/codeparser.py195
-rw-r--r--bitbake/lib/bb/command.py147
-rw-r--r--bitbake/lib/bb/compress/_pipecompress.py196
-rw-r--r--bitbake/lib/bb/compress/lz4.py19
-rw-r--r--bitbake/lib/bb/compress/zstd.py30
-rw-r--r--bitbake/lib/bb/cooker.py904
-rw-r--r--bitbake/lib/bb/cookerdata.py211
-rw-r--r--bitbake/lib/bb/daemonize.py44
-rw-r--r--bitbake/lib/bb/data.py147
-rw-r--r--bitbake/lib/bb/data_smart.py371
-rw-r--r--bitbake/lib/bb/event.py204
-rw-r--r--bitbake/lib/bb/exceptions.py94
-rw-r--r--bitbake/lib/bb/fetch2/README57
-rw-r--r--bitbake/lib/bb/fetch2/__init__.py561
-rw-r--r--bitbake/lib/bb/fetch2/az.py98
-rw-r--r--bitbake/lib/bb/fetch2/clearcase.py6
-rw-r--r--bitbake/lib/bb/fetch2/crate.py150
-rw-r--r--bitbake/lib/bb/fetch2/gcp.py102
-rw-r--r--bitbake/lib/bb/fetch2/git.py669
-rw-r--r--bitbake/lib/bb/fetch2/gitsm.py159
-rw-r--r--bitbake/lib/bb/fetch2/gomod.py273
-rw-r--r--bitbake/lib/bb/fetch2/hg.py1
-rw-r--r--bitbake/lib/bb/fetch2/local.py25
-rw-r--r--bitbake/lib/bb/fetch2/npm.py83
-rw-r--r--bitbake/lib/bb/fetch2/npmsw.py112
-rw-r--r--bitbake/lib/bb/fetch2/osc.py52
-rw-r--r--bitbake/lib/bb/fetch2/perforce.py2
-rw-r--r--bitbake/lib/bb/fetch2/s3.py43
-rw-r--r--bitbake/lib/bb/fetch2/sftp.py4
-rw-r--r--bitbake/lib/bb/fetch2/ssh.py50
-rw-r--r--bitbake/lib/bb/fetch2/svn.py15
-rw-r--r--bitbake/lib/bb/fetch2/wget.py263
-rwxr-xr-xbitbake/lib/bb/main.py407
-rw-r--r--bitbake/lib/bb/monitordisk.py24
-rw-r--r--bitbake/lib/bb/msg.py38
-rw-r--r--bitbake/lib/bb/parse/__init__.py61
-rw-r--r--bitbake/lib/bb/parse/ast.py217
-rw-r--r--bitbake/lib/bb/parse/parse_py/BBHandler.py125
-rw-r--r--bitbake/lib/bb/parse/parse_py/ConfHandler.py58
-rw-r--r--bitbake/lib/bb/persist_data.py301
-rw-r--r--bitbake/lib/bb/process.py9
-rw-r--r--bitbake/lib/bb/progress.py9
-rw-r--r--bitbake/lib/bb/providers.py92
-rw-r--r--bitbake/lib/bb/runqueue.py1146
-rw-r--r--bitbake/lib/bb/server/process.py402
-rw-r--r--bitbake/lib/bb/server/xmlrpcserver.py14
-rw-r--r--bitbake/lib/bb/siggen.py731
-rw-r--r--bitbake/lib/bb/taskdata.py14
-rw-r--r--bitbake/lib/bb/tests/codeparser.py106
-rw-r--r--bitbake/lib/bb/tests/color.py4
-rw-r--r--bitbake/lib/bb/tests/compression.py100
-rw-r--r--bitbake/lib/bb/tests/cooker.py2
-rw-r--r--bitbake/lib/bb/tests/data.py181
-rw-r--r--bitbake/lib/bb/tests/event.py62
-rw-r--r--bitbake/lib/bb/tests/fetch-testdata/debian/pool/main/m/minicom/index.html59
-rw-r--r--bitbake/lib/bb/tests/fetch-testdata/software/libxml2/2.10/index.html20
-rw-r--r--bitbake/lib/bb/tests/fetch-testdata/software/libxml2/2.9/index.html40
-rw-r--r--bitbake/lib/bb/tests/fetch-testdata/software/libxml2/index.html19
-rw-r--r--bitbake/lib/bb/tests/fetch-testdata/software/miniupnp/download.php3528
-rw-r--r--bitbake/lib/bb/tests/fetch.py1959
-rw-r--r--bitbake/lib/bb/tests/parse.py303
-rw-r--r--bitbake/lib/bb/tests/persist_data.py129
-rw-r--r--bitbake/lib/bb/tests/runqueue-tests/classes/base.bbclass2
-rw-r--r--bitbake/lib/bb/tests/runqueue-tests/conf/bitbake.conf2
-rw-r--r--bitbake/lib/bb/tests/runqueue-tests/recipes/g1.bb2
-rw-r--r--bitbake/lib/bb/tests/runqueue-tests/recipes/h1.bb0
-rw-r--r--bitbake/lib/bb/tests/runqueue.py65
-rw-r--r--bitbake/lib/bb/tests/siggen.py77
-rw-r--r--bitbake/lib/bb/tests/utils.py39
-rw-r--r--bitbake/lib/bb/tinfoil.py215
-rw-r--r--bitbake/lib/bb/ui/buildinfohelper.py101
-rw-r--r--bitbake/lib/bb/ui/eventreplay.py86
-rw-r--r--bitbake/lib/bb/ui/knotty.py246
-rw-r--r--bitbake/lib/bb/ui/ncurses.py3
-rw-r--r--bitbake/lib/bb/ui/taskexp.py7
-rwxr-xr-xbitbake/lib/bb/ui/taskexp_ncurses.py1511
-rw-r--r--bitbake/lib/bb/ui/teamcity.py5
-rw-r--r--bitbake/lib/bb/ui/toasterui.py2
-rw-r--r--bitbake/lib/bb/ui/uievent.py32
-rw-r--r--bitbake/lib/bb/ui/uihelper.py8
-rw-r--r--bitbake/lib/bb/utils.py962
-rwxr-xr-xbitbake/lib/bb/xattr.py126
93 files changed, 16365 insertions, 4514 deletions
diff --git a/bitbake/lib/bb/COW.py b/bitbake/lib/bb/COW.py
index 23c22b65ef..4af03c54ad 100644
--- a/bitbake/lib/bb/COW.py
+++ b/bitbake/lib/bb/COW.py
@@ -3,6 +3,8 @@
3# 3#
4# Copyright (C) 2006 Tim Ansell 4# Copyright (C) 2006 Tim Ansell
5# 5#
6# SPDX-License-Identifier: GPL-2.0-only
7#
6# Please Note: 8# Please Note:
7# Be careful when using mutable types (ie Dict and Lists) - operations involving these are SLOW. 9# Be careful when using mutable types (ie Dict and Lists) - operations involving these are SLOW.
8# Assign a file to __warn__ to get warnings about slow operations. 10# Assign a file to __warn__ to get warnings about slow operations.
@@ -34,8 +36,9 @@ class COWDictMeta(COWMeta):
34 __marker__ = tuple() 36 __marker__ = tuple()
35 37
36 def __str__(cls): 38 def __str__(cls):
37 # FIXME: I have magic numbers! 39 ignored_keys = set(["__count__", "__doc__", "__module__", "__firstlineno__", "__static_attributes__"])
38 return "<COWDict Level: %i Current Keys: %i>" % (cls.__count__, len(cls.__dict__) - 3) 40 keys = set(cls.__dict__.keys()) - ignored_keys
41 return "<COWDict Level: %i Current Keys: %i>" % (cls.__count__, len(keys))
39 42
40 __repr__ = __str__ 43 __repr__ = __str__
41 44
@@ -159,8 +162,9 @@ class COWDictMeta(COWMeta):
159 162
160class COWSetMeta(COWDictMeta): 163class COWSetMeta(COWDictMeta):
161 def __str__(cls): 164 def __str__(cls):
162 # FIXME: I have magic numbers! 165 ignored_keys = set(["__count__", "__doc__", "__module__", "__firstlineno__", "__static_attributes__"])
163 return "<COWSet Level: %i Current Keys: %i>" % (cls.__count__, len(cls.__dict__) - 3) 166 keys = set(cls.__dict__.keys()) - ignored_keys
167 return "<COWSet Level: %i Current Keys: %i>" % (cls.__count__, len(keys))
164 168
165 __repr__ = __str__ 169 __repr__ = __str__
166 170
diff --git a/bitbake/lib/bb/__init__.py b/bitbake/lib/bb/__init__.py
index 84a9051c13..bf4c54d829 100644
--- a/bitbake/lib/bb/__init__.py
+++ b/bitbake/lib/bb/__init__.py
@@ -9,12 +9,19 @@
9# SPDX-License-Identifier: GPL-2.0-only 9# SPDX-License-Identifier: GPL-2.0-only
10# 10#
11 11
12__version__ = "1.49.2" 12__version__ = "2.15.1"
13 13
14import sys 14import sys
15if sys.version_info < (3, 5, 0): 15if sys.version_info < (3, 9, 0):
16 raise RuntimeError("Sorry, python 3.5.0 or later is required for this version of bitbake") 16 raise RuntimeError("Sorry, python 3.9.0 or later is required for this version of bitbake")
17 17
18if sys.version_info < (3, 10, 0):
19 # With python 3.8 and 3.9, we see errors of "libgcc_s.so.1 must be installed for pthread_cancel to work"
20 # https://stackoverflow.com/questions/64797838/libgcc-s-so-1-must-be-installed-for-pthread-cancel-to-work
21 # https://bugs.ams1.psf.io/issue42888
22 # so ensure libgcc_s is loaded early on
23 import ctypes
24 libgcc_s = ctypes.CDLL('libgcc_s.so.1')
18 25
19class BBHandledException(Exception): 26class BBHandledException(Exception):
20 """ 27 """
@@ -29,6 +36,7 @@ class BBHandledException(Exception):
29 36
30import os 37import os
31import logging 38import logging
39from collections import namedtuple
32 40
33 41
34class NullHandler(logging.Handler): 42class NullHandler(logging.Handler):
@@ -58,8 +66,12 @@ class BBLoggerMixin(object):
58 if not bb.event.worker_pid: 66 if not bb.event.worker_pid:
59 if self.name in bb.msg.loggerDefaultDomains and loglevel > (bb.msg.loggerDefaultDomains[self.name]): 67 if self.name in bb.msg.loggerDefaultDomains and loglevel > (bb.msg.loggerDefaultDomains[self.name]):
60 return 68 return
61 if loglevel > bb.msg.loggerDefaultLogLevel: 69 if loglevel < bb.msg.loggerDefaultLogLevel:
62 return 70 return
71
72 if not isinstance(level, int) or not isinstance(msg, str):
73 mainlogger.warning("Invalid arguments in bbdebug: %s" % repr((level, msg,) + args))
74
63 return self.log(loglevel, msg, *args, **kwargs) 75 return self.log(loglevel, msg, *args, **kwargs)
64 76
65 def plain(self, msg, *args, **kwargs): 77 def plain(self, msg, *args, **kwargs):
@@ -71,6 +83,13 @@ class BBLoggerMixin(object):
71 def verbnote(self, msg, *args, **kwargs): 83 def verbnote(self, msg, *args, **kwargs):
72 return self.log(logging.INFO + 2, msg, *args, **kwargs) 84 return self.log(logging.INFO + 2, msg, *args, **kwargs)
73 85
86 def warnonce(self, msg, *args, **kwargs):
87 return self.log(logging.WARNING - 1, msg, *args, **kwargs)
88
89 def erroronce(self, msg, *args, **kwargs):
90 return self.log(logging.ERROR - 1, msg, *args, **kwargs)
91
92
74Logger = logging.getLoggerClass() 93Logger = logging.getLoggerClass()
75class BBLogger(Logger, BBLoggerMixin): 94class BBLogger(Logger, BBLoggerMixin):
76 def __init__(self, name, *args, **kwargs): 95 def __init__(self, name, *args, **kwargs):
@@ -85,26 +104,6 @@ class BBLoggerAdapter(logging.LoggerAdapter, BBLoggerMixin):
85 self.setup_bblogger(logger.name) 104 self.setup_bblogger(logger.name)
86 super().__init__(logger, *args, **kwargs) 105 super().__init__(logger, *args, **kwargs)
87 106
88 if sys.version_info < (3, 6):
89 # These properties were added in Python 3.6. Add them in older versions
90 # for compatibility
91 @property
92 def manager(self):
93 return self.logger.manager
94
95 @manager.setter
96 def manager(self, value):
97 self.logger.manager = value
98
99 @property
100 def name(self):
101 return self.logger.name
102
103 def __repr__(self):
104 logger = self.logger
105 level = logger.getLevelName(logger.getEffectiveLevel())
106 return '<%s %s (%s)>' % (self.__class__.__name__, logger.name, level)
107
108logging.LoggerAdapter = BBLoggerAdapter 107logging.LoggerAdapter = BBLoggerAdapter
109 108
110logger = logging.getLogger("BitBake") 109logger = logging.getLogger("BitBake")
@@ -130,9 +129,25 @@ sys.modules['bb.fetch'] = sys.modules['bb.fetch2']
130 129
131# Messaging convenience functions 130# Messaging convenience functions
132def plain(*args): 131def plain(*args):
132 """
133 Prints a message at "plain" level (higher level than a ``bb.note()``).
134
135 Arguments:
136
137 - ``args``: one or more strings to print.
138 """
133 mainlogger.plain(''.join(args)) 139 mainlogger.plain(''.join(args))
134 140
135def debug(lvl, *args): 141def debug(lvl, *args):
142 """
143 Prints a debug message.
144
145 Arguments:
146
147 - ``lvl``: debug level. Higher value increases the debug level
148 (determined by ``bitbake -D``).
149 - ``args``: one or more strings to print.
150 """
136 if isinstance(lvl, str): 151 if isinstance(lvl, str):
137 mainlogger.warning("Passed invalid debug level '%s' to bb.debug", lvl) 152 mainlogger.warning("Passed invalid debug level '%s' to bb.debug", lvl)
138 args = (lvl,) + args 153 args = (lvl,) + args
@@ -140,27 +155,81 @@ def debug(lvl, *args):
140 mainlogger.bbdebug(lvl, ''.join(args)) 155 mainlogger.bbdebug(lvl, ''.join(args))
141 156
142def note(*args): 157def note(*args):
158 """
159 Prints a message at "note" level.
160
161 Arguments:
162
163 - ``args``: one or more strings to print.
164 """
143 mainlogger.info(''.join(args)) 165 mainlogger.info(''.join(args))
144 166
145#
146# A higher prioity note which will show on the console but isn't a warning
147#
148# Something is happening the user should be aware of but they probably did
149# something to make it happen
150#
151def verbnote(*args): 167def verbnote(*args):
168 """
169 A higher priority note which will show on the console but isn't a warning.
170
171 Use in contexts when something is happening the user should be aware of but
172 they probably did something to make it happen.
173
174 Arguments:
175
176 - ``args``: one or more strings to print.
177 """
152 mainlogger.verbnote(''.join(args)) 178 mainlogger.verbnote(''.join(args))
153 179
154# 180#
155# Warnings - things the user likely needs to pay attention to and fix 181# Warnings - things the user likely needs to pay attention to and fix
156# 182#
157def warn(*args): 183def warn(*args):
184 """
185 Prints a warning message.
186
187 Arguments:
188
189 - ``args``: one or more strings to print.
190 """
158 mainlogger.warning(''.join(args)) 191 mainlogger.warning(''.join(args))
159 192
193def warnonce(*args):
194 """
195 Prints a warning message like ``bb.warn()``, but only prints the message
196 once.
197
198 Arguments:
199
200 - ``args``: one or more strings to print.
201 """
202 mainlogger.warnonce(''.join(args))
203
160def error(*args, **kwargs): 204def error(*args, **kwargs):
205 """
206 Prints an error message.
207
208 Arguments:
209
210 - ``args``: one or more strings to print.
211 """
161 mainlogger.error(''.join(args), extra=kwargs) 212 mainlogger.error(''.join(args), extra=kwargs)
162 213
214def erroronce(*args):
215 """
216 Prints an error message like ``bb.error()``, but only prints the message
217 once.
218
219 Arguments:
220
221 - ``args``: one or more strings to print.
222 """
223 mainlogger.erroronce(''.join(args))
224
163def fatal(*args, **kwargs): 225def fatal(*args, **kwargs):
226 """
227 Prints an error message and stops the BitBake execution.
228
229 Arguments:
230
231 - ``args``: one or more strings to print.
232 """
164 mainlogger.critical(''.join(args), extra=kwargs) 233 mainlogger.critical(''.join(args), extra=kwargs)
165 raise BBHandledException() 234 raise BBHandledException()
166 235
@@ -189,7 +258,6 @@ def deprecated(func, name=None, advice=""):
189# For compatibility 258# For compatibility
190def deprecate_import(current, modulename, fromlist, renames = None): 259def deprecate_import(current, modulename, fromlist, renames = None):
191 """Import objects from one module into another, wrapping them with a DeprecationWarning""" 260 """Import objects from one module into another, wrapping them with a DeprecationWarning"""
192 import sys
193 261
194 module = __import__(modulename, fromlist = fromlist) 262 module = __import__(modulename, fromlist = fromlist)
195 for position, objname in enumerate(fromlist): 263 for position, objname in enumerate(fromlist):
@@ -203,3 +271,14 @@ def deprecate_import(current, modulename, fromlist, renames = None):
203 271
204 setattr(sys.modules[current], newname, newobj) 272 setattr(sys.modules[current], newname, newobj)
205 273
274TaskData = namedtuple("TaskData", [
275 "pn",
276 "taskname",
277 "fn",
278 "deps",
279 "provides",
280 "taskhash",
281 "unihash",
282 "hashfn",
283 "taskhash_deps",
284])
diff --git a/bitbake/lib/bb/acl.py b/bitbake/lib/bb/acl.py
new file mode 100755
index 0000000000..e9dbdb617f
--- /dev/null
+++ b/bitbake/lib/bb/acl.py
@@ -0,0 +1,213 @@
1#! /usr/bin/env python3
2#
3# Copyright 2023 by Garmin Ltd. or its subsidiaries
4#
5# SPDX-License-Identifier: MIT
6
7
8import sys
9import ctypes
10import os
11import errno
12import pwd
13import grp
14
15libacl = ctypes.CDLL("libacl.so.1", use_errno=True)
16
17
18ACL_TYPE_ACCESS = 0x8000
19ACL_TYPE_DEFAULT = 0x4000
20
21ACL_FIRST_ENTRY = 0
22ACL_NEXT_ENTRY = 1
23
24ACL_UNDEFINED_TAG = 0x00
25ACL_USER_OBJ = 0x01
26ACL_USER = 0x02
27ACL_GROUP_OBJ = 0x04
28ACL_GROUP = 0x08
29ACL_MASK = 0x10
30ACL_OTHER = 0x20
31
32ACL_READ = 0x04
33ACL_WRITE = 0x02
34ACL_EXECUTE = 0x01
35
36acl_t = ctypes.c_void_p
37acl_entry_t = ctypes.c_void_p
38acl_permset_t = ctypes.c_void_p
39acl_perm_t = ctypes.c_uint
40
41acl_tag_t = ctypes.c_int
42
43libacl.acl_free.argtypes = [acl_t]
44
45
46def acl_free(acl):
47 libacl.acl_free(acl)
48
49
50libacl.acl_get_file.restype = acl_t
51libacl.acl_get_file.argtypes = [ctypes.c_char_p, ctypes.c_uint]
52
53
54def acl_get_file(path, typ):
55 acl = libacl.acl_get_file(os.fsencode(path), typ)
56 if acl is None:
57 err = ctypes.get_errno()
58 raise OSError(err, os.strerror(err), str(path))
59
60 return acl
61
62
63libacl.acl_get_entry.argtypes = [acl_t, ctypes.c_int, ctypes.c_void_p]
64
65
66def acl_get_entry(acl, entry_id):
67 entry = acl_entry_t()
68 ret = libacl.acl_get_entry(acl, entry_id, ctypes.byref(entry))
69 if ret < 0:
70 err = ctypes.get_errno()
71 raise OSError(err, os.strerror(err))
72
73 if ret == 0:
74 return None
75
76 return entry
77
78
79libacl.acl_get_tag_type.argtypes = [acl_entry_t, ctypes.c_void_p]
80
81
82def acl_get_tag_type(entry_d):
83 tag = acl_tag_t()
84 ret = libacl.acl_get_tag_type(entry_d, ctypes.byref(tag))
85 if ret < 0:
86 err = ctypes.get_errno()
87 raise OSError(err, os.strerror(err))
88 return tag.value
89
90
91libacl.acl_get_qualifier.restype = ctypes.c_void_p
92libacl.acl_get_qualifier.argtypes = [acl_entry_t]
93
94
95def acl_get_qualifier(entry_d):
96 ret = libacl.acl_get_qualifier(entry_d)
97 if ret is None:
98 err = ctypes.get_errno()
99 raise OSError(err, os.strerror(err))
100 return ctypes.c_void_p(ret)
101
102
103libacl.acl_get_permset.argtypes = [acl_entry_t, ctypes.c_void_p]
104
105
106def acl_get_permset(entry_d):
107 permset = acl_permset_t()
108 ret = libacl.acl_get_permset(entry_d, ctypes.byref(permset))
109 if ret < 0:
110 err = ctypes.get_errno()
111 raise OSError(err, os.strerror(err))
112
113 return permset
114
115
116libacl.acl_get_perm.argtypes = [acl_permset_t, acl_perm_t]
117
118
119def acl_get_perm(permset_d, perm):
120 ret = libacl.acl_get_perm(permset_d, perm)
121 if ret < 0:
122 err = ctypes.get_errno()
123 raise OSError(err, os.strerror(err))
124 return bool(ret)
125
126
127class Entry(object):
128 def __init__(self, tag, qualifier, mode):
129 self.tag = tag
130 self.qualifier = qualifier
131 self.mode = mode
132
133 def __str__(self):
134 typ = ""
135 qual = ""
136 if self.tag == ACL_USER:
137 typ = "user"
138 qual = pwd.getpwuid(self.qualifier).pw_name
139 elif self.tag == ACL_GROUP:
140 typ = "group"
141 qual = grp.getgrgid(self.qualifier).gr_name
142 elif self.tag == ACL_USER_OBJ:
143 typ = "user"
144 elif self.tag == ACL_GROUP_OBJ:
145 typ = "group"
146 elif self.tag == ACL_MASK:
147 typ = "mask"
148 elif self.tag == ACL_OTHER:
149 typ = "other"
150
151 r = "r" if self.mode & ACL_READ else "-"
152 w = "w" if self.mode & ACL_WRITE else "-"
153 x = "x" if self.mode & ACL_EXECUTE else "-"
154
155 return f"{typ}:{qual}:{r}{w}{x}"
156
157
158class ACL(object):
159 def __init__(self, acl):
160 self.acl = acl
161
162 def __del__(self):
163 acl_free(self.acl)
164
165 def entries(self):
166 entry_id = ACL_FIRST_ENTRY
167 while True:
168 entry = acl_get_entry(self.acl, entry_id)
169 if entry is None:
170 break
171
172 permset = acl_get_permset(entry)
173
174 mode = 0
175 for m in (ACL_READ, ACL_WRITE, ACL_EXECUTE):
176 if acl_get_perm(permset, m):
177 mode |= m
178
179 qualifier = None
180 tag = acl_get_tag_type(entry)
181
182 if tag == ACL_USER or tag == ACL_GROUP:
183 qual = acl_get_qualifier(entry)
184 qualifier = ctypes.cast(qual, ctypes.POINTER(ctypes.c_int))[0]
185
186 yield Entry(tag, qualifier, mode)
187
188 entry_id = ACL_NEXT_ENTRY
189
190 @classmethod
191 def from_path(cls, path, typ):
192 acl = acl_get_file(path, typ)
193 return cls(acl)
194
195
196def main():
197 import argparse
198 from pathlib import Path
199
200 parser = argparse.ArgumentParser()
201 parser.add_argument("path", help="File Path", type=Path)
202
203 args = parser.parse_args()
204
205 acl = ACL.from_path(args.path, ACL_TYPE_ACCESS)
206 for entry in acl.entries():
207 print(str(entry))
208
209 return 0
210
211
212if __name__ == "__main__":
213 sys.exit(main())
diff --git a/bitbake/lib/bb/asyncrpc/__init__.py b/bitbake/lib/bb/asyncrpc/__init__.py
new file mode 100644
index 0000000000..a4371643d7
--- /dev/null
+++ b/bitbake/lib/bb/asyncrpc/__init__.py
@@ -0,0 +1,16 @@
1#
2# Copyright BitBake Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7
8from .client import AsyncClient, Client
9from .serv import AsyncServer, AsyncServerConnection
10from .connection import DEFAULT_MAX_CHUNK
11from .exceptions import (
12 ClientError,
13 ServerError,
14 ConnectionClosedError,
15 InvokeError,
16)
diff --git a/bitbake/lib/bb/asyncrpc/client.py b/bitbake/lib/bb/asyncrpc/client.py
new file mode 100644
index 0000000000..17b72033b9
--- /dev/null
+++ b/bitbake/lib/bb/asyncrpc/client.py
@@ -0,0 +1,271 @@
1#
2# Copyright BitBake Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7import abc
8import asyncio
9import json
10import os
11import socket
12import sys
13import re
14import contextlib
15from threading import Thread
16from .connection import StreamConnection, WebsocketConnection, DEFAULT_MAX_CHUNK
17from .exceptions import ConnectionClosedError, InvokeError
18
19UNIX_PREFIX = "unix://"
20WS_PREFIX = "ws://"
21WSS_PREFIX = "wss://"
22
23ADDR_TYPE_UNIX = 0
24ADDR_TYPE_TCP = 1
25ADDR_TYPE_WS = 2
26
27WEBSOCKETS_MIN_VERSION = (9, 1)
28# Need websockets 10 with python 3.10+
29if sys.version_info >= (3, 10, 0):
30 WEBSOCKETS_MIN_VERSION = (10, 0)
31
32
33def parse_address(addr):
34 if addr.startswith(UNIX_PREFIX):
35 return (ADDR_TYPE_UNIX, (addr[len(UNIX_PREFIX) :],))
36 elif addr.startswith(WS_PREFIX) or addr.startswith(WSS_PREFIX):
37 return (ADDR_TYPE_WS, (addr,))
38 else:
39 m = re.match(r"\[(?P<host>[^\]]*)\]:(?P<port>\d+)$", addr)
40 if m is not None:
41 host = m.group("host")
42 port = m.group("port")
43 else:
44 host, port = addr.split(":")
45
46 return (ADDR_TYPE_TCP, (host, int(port)))
47
48
49class AsyncClient(object):
50 def __init__(
51 self,
52 proto_name,
53 proto_version,
54 logger,
55 timeout=30,
56 server_headers=False,
57 headers={},
58 ):
59 self.socket = None
60 self.max_chunk = DEFAULT_MAX_CHUNK
61 self.proto_name = proto_name
62 self.proto_version = proto_version
63 self.logger = logger
64 self.timeout = timeout
65 self.needs_server_headers = server_headers
66 self.server_headers = {}
67 self.headers = headers
68
69 async def connect_tcp(self, address, port):
70 async def connect_sock():
71 reader, writer = await asyncio.open_connection(address, port)
72 return StreamConnection(reader, writer, self.timeout, self.max_chunk)
73
74 self._connect_sock = connect_sock
75
76 async def connect_unix(self, path):
77 async def connect_sock():
78 # AF_UNIX has path length issues so chdir here to workaround
79 cwd = os.getcwd()
80 try:
81 os.chdir(os.path.dirname(path))
82 # The socket must be opened synchronously so that CWD doesn't get
83 # changed out from underneath us so we pass as a sock into asyncio
84 sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM, 0)
85 sock.connect(os.path.basename(path))
86 finally:
87 os.chdir(cwd)
88 reader, writer = await asyncio.open_unix_connection(sock=sock)
89 return StreamConnection(reader, writer, self.timeout, self.max_chunk)
90
91 self._connect_sock = connect_sock
92
93 async def connect_websocket(self, uri):
94 import websockets
95
96 try:
97 version = tuple(
98 int(v)
99 for v in websockets.__version__.split(".")[
100 0 : len(WEBSOCKETS_MIN_VERSION)
101 ]
102 )
103 except ValueError:
104 raise ImportError(
105 f"Unable to parse websockets version '{websockets.__version__}'"
106 )
107
108 if version < WEBSOCKETS_MIN_VERSION:
109 min_ver_str = ".".join(str(v) for v in WEBSOCKETS_MIN_VERSION)
110 raise ImportError(
111 f"Websockets version {websockets.__version__} is less than minimum required version {min_ver_str}"
112 )
113
114 async def connect_sock():
115 try:
116 websocket = await websockets.connect(
117 uri,
118 ping_interval=None,
119 open_timeout=self.timeout,
120 )
121 except asyncio.exceptions.TimeoutError:
122 raise ConnectionError("Timeout while connecting to websocket")
123 except (OSError, websockets.InvalidHandshake, websockets.InvalidURI) as exc:
124 raise ConnectionError(f"Could not connect to websocket: {exc}") from exc
125 return WebsocketConnection(websocket, self.timeout)
126
127 self._connect_sock = connect_sock
128
129 async def setup_connection(self):
130 # Send headers
131 await self.socket.send("%s %s" % (self.proto_name, self.proto_version))
132 await self.socket.send(
133 "needs-headers: %s" % ("true" if self.needs_server_headers else "false")
134 )
135 for k, v in self.headers.items():
136 await self.socket.send("%s: %s" % (k, v))
137
138 # End of headers
139 await self.socket.send("")
140
141 self.server_headers = {}
142 if self.needs_server_headers:
143 while True:
144 line = await self.socket.recv()
145 if not line:
146 # End headers
147 break
148 tag, value = line.split(":", 1)
149 self.server_headers[tag.lower()] = value.strip()
150
151 async def get_header(self, tag, default):
152 await self.connect()
153 return self.server_headers.get(tag, default)
154
155 async def connect(self):
156 if self.socket is None:
157 self.socket = await self._connect_sock()
158 await self.setup_connection()
159
160 async def disconnect(self):
161 if self.socket is not None:
162 await self.socket.close()
163 self.socket = None
164
165 async def close(self):
166 await self.disconnect()
167
168 async def _send_wrapper(self, proc):
169 count = 0
170 while True:
171 try:
172 await self.connect()
173 return await proc()
174 except (
175 OSError,
176 ConnectionError,
177 ConnectionClosedError,
178 json.JSONDecodeError,
179 UnicodeDecodeError,
180 ) as e:
181 self.logger.warning("Error talking to server: %s" % e)
182 if count >= 3:
183 if not isinstance(e, ConnectionError):
184 raise ConnectionError(str(e))
185 raise e
186 await self.close()
187 count += 1
188
189 def check_invoke_error(self, msg):
190 if isinstance(msg, dict) and "invoke-error" in msg:
191 raise InvokeError(msg["invoke-error"]["message"])
192
193 async def invoke(self, msg):
194 async def proc():
195 await self.socket.send_message(msg)
196 return await self.socket.recv_message()
197
198 result = await self._send_wrapper(proc)
199 self.check_invoke_error(result)
200 return result
201
202 async def ping(self):
203 return await self.invoke({"ping": {}})
204
205 async def __aenter__(self):
206 return self
207
208 async def __aexit__(self, exc_type, exc_value, traceback):
209 await self.close()
210
211
212class Client(object):
213 def __init__(self):
214 self.client = self._get_async_client()
215 self.loop = asyncio.new_event_loop()
216
217 # Override any pre-existing loop.
218 # Without this, the PR server export selftest triggers a hang
219 # when running with Python 3.7. The drawback is that there is
220 # potential for issues if the PR and hash equiv (or some new)
221 # clients need to both be instantiated in the same process.
222 # This should be revisited if/when Python 3.9 becomes the
223 # minimum required version for BitBake, as it seems not
224 # required (but harmless) with it.
225 asyncio.set_event_loop(self.loop)
226
227 self._add_methods("connect_tcp", "ping")
228
229 @abc.abstractmethod
230 def _get_async_client(self):
231 pass
232
233 def _get_downcall_wrapper(self, downcall):
234 def wrapper(*args, **kwargs):
235 return self.loop.run_until_complete(downcall(*args, **kwargs))
236
237 return wrapper
238
239 def _add_methods(self, *methods):
240 for m in methods:
241 downcall = getattr(self.client, m)
242 setattr(self, m, self._get_downcall_wrapper(downcall))
243
244 def connect_unix(self, path):
245 self.loop.run_until_complete(self.client.connect_unix(path))
246 self.loop.run_until_complete(self.client.connect())
247
248 @property
249 def max_chunk(self):
250 return self.client.max_chunk
251
252 @max_chunk.setter
253 def max_chunk(self, value):
254 self.client.max_chunk = value
255
256 def disconnect(self):
257 self.loop.run_until_complete(self.client.close())
258
259 def close(self):
260 if self.loop:
261 self.loop.run_until_complete(self.client.close())
262 self.loop.run_until_complete(self.loop.shutdown_asyncgens())
263 self.loop.close()
264 self.loop = None
265
266 def __enter__(self):
267 return self
268
269 def __exit__(self, exc_type, exc_value, traceback):
270 self.close()
271 return False
diff --git a/bitbake/lib/bb/asyncrpc/connection.py b/bitbake/lib/bb/asyncrpc/connection.py
new file mode 100644
index 0000000000..7f0cf6ba96
--- /dev/null
+++ b/bitbake/lib/bb/asyncrpc/connection.py
@@ -0,0 +1,146 @@
1#
2# Copyright BitBake Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7import asyncio
8import itertools
9import json
10from datetime import datetime
11from .exceptions import ClientError, ConnectionClosedError
12
13
14# The Python async server defaults to a 64K receive buffer, so we hardcode our
15# maximum chunk size. It would be better if the client and server reported to
16# each other what the maximum chunk sizes were, but that will slow down the
17# connection setup with a round trip delay so I'd rather not do that unless it
18# is necessary
19DEFAULT_MAX_CHUNK = 32 * 1024
20
21
22def chunkify(msg, max_chunk):
23 if len(msg) < max_chunk - 1:
24 yield "".join((msg, "\n"))
25 else:
26 yield "".join((json.dumps({"chunk-stream": None}), "\n"))
27
28 args = [iter(msg)] * (max_chunk - 1)
29 for m in map("".join, itertools.zip_longest(*args, fillvalue="")):
30 yield "".join(itertools.chain(m, "\n"))
31 yield "\n"
32
33
34def json_serialize(obj):
35 if isinstance(obj, datetime):
36 return obj.isoformat()
37 raise TypeError("Type %s not serializeable" % type(obj))
38
39
40class StreamConnection(object):
41 def __init__(self, reader, writer, timeout, max_chunk=DEFAULT_MAX_CHUNK):
42 self.reader = reader
43 self.writer = writer
44 self.timeout = timeout
45 self.max_chunk = max_chunk
46
47 @property
48 def address(self):
49 return self.writer.get_extra_info("peername")
50
51 async def send_message(self, msg):
52 for c in chunkify(json.dumps(msg, default=json_serialize), self.max_chunk):
53 self.writer.write(c.encode("utf-8"))
54 await self.writer.drain()
55
56 async def recv_message(self):
57 l = await self.recv()
58
59 m = json.loads(l)
60 if not m:
61 return m
62
63 if "chunk-stream" in m:
64 lines = []
65 while True:
66 l = await self.recv()
67 if not l:
68 break
69 lines.append(l)
70
71 m = json.loads("".join(lines))
72
73 return m
74
75 async def send(self, msg):
76 self.writer.write(("%s\n" % msg).encode("utf-8"))
77 await self.writer.drain()
78
79 async def recv(self):
80 if self.timeout < 0:
81 line = await self.reader.readline()
82 else:
83 try:
84 line = await asyncio.wait_for(self.reader.readline(), self.timeout)
85 except asyncio.TimeoutError:
86 raise ConnectionError("Timed out waiting for data")
87
88 if not line:
89 raise ConnectionClosedError("Connection closed")
90
91 line = line.decode("utf-8")
92
93 if not line.endswith("\n"):
94 raise ConnectionError("Bad message %r" % (line))
95
96 return line.rstrip()
97
98 async def close(self):
99 self.reader = None
100 if self.writer is not None:
101 self.writer.close()
102 self.writer = None
103
104
105class WebsocketConnection(object):
106 def __init__(self, socket, timeout):
107 self.socket = socket
108 self.timeout = timeout
109
110 @property
111 def address(self):
112 return ":".join(str(s) for s in self.socket.remote_address)
113
114 async def send_message(self, msg):
115 await self.send(json.dumps(msg, default=json_serialize))
116
117 async def recv_message(self):
118 m = await self.recv()
119 return json.loads(m)
120
121 async def send(self, msg):
122 import websockets.exceptions
123
124 try:
125 await self.socket.send(msg)
126 except websockets.exceptions.ConnectionClosed:
127 raise ConnectionClosedError("Connection closed")
128
129 async def recv(self):
130 import websockets.exceptions
131
132 try:
133 if self.timeout < 0:
134 return await self.socket.recv()
135
136 try:
137 return await asyncio.wait_for(self.socket.recv(), self.timeout)
138 except asyncio.TimeoutError:
139 raise ConnectionError("Timed out waiting for data")
140 except websockets.exceptions.ConnectionClosed:
141 raise ConnectionClosedError("Connection closed")
142
143 async def close(self):
144 if self.socket is not None:
145 await self.socket.close()
146 self.socket = None
diff --git a/bitbake/lib/bb/asyncrpc/exceptions.py b/bitbake/lib/bb/asyncrpc/exceptions.py
new file mode 100644
index 0000000000..ae1043a38b
--- /dev/null
+++ b/bitbake/lib/bb/asyncrpc/exceptions.py
@@ -0,0 +1,21 @@
1#
2# Copyright BitBake Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7
8class ClientError(Exception):
9 pass
10
11
12class InvokeError(Exception):
13 pass
14
15
16class ServerError(Exception):
17 pass
18
19
20class ConnectionClosedError(Exception):
21 pass
diff --git a/bitbake/lib/bb/asyncrpc/serv.py b/bitbake/lib/bb/asyncrpc/serv.py
new file mode 100644
index 0000000000..667217c5c1
--- /dev/null
+++ b/bitbake/lib/bb/asyncrpc/serv.py
@@ -0,0 +1,410 @@
1#
2# Copyright BitBake Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7import abc
8import asyncio
9import json
10import os
11import signal
12import socket
13import sys
14import multiprocessing
15import logging
16from .connection import StreamConnection, WebsocketConnection
17from .exceptions import ClientError, ServerError, ConnectionClosedError, InvokeError
18
19
20class ClientLoggerAdapter(logging.LoggerAdapter):
21 def process(self, msg, kwargs):
22 return f"[Client {self.extra['address']}] {msg}", kwargs
23
24
25class AsyncServerConnection(object):
26 # If a handler returns this object (e.g. `return self.NO_RESPONSE`), no
27 # return message will be automatically be sent back to the client
28 NO_RESPONSE = object()
29
30 def __init__(self, socket, proto_name, logger):
31 self.socket = socket
32 self.proto_name = proto_name
33 self.handlers = {
34 "ping": self.handle_ping,
35 }
36 self.logger = ClientLoggerAdapter(
37 logger,
38 {
39 "address": socket.address,
40 },
41 )
42 self.client_headers = {}
43
44 async def close(self):
45 await self.socket.close()
46
47 async def handle_headers(self, headers):
48 return {}
49
50 async def process_requests(self):
51 try:
52 self.logger.info("Client %r connected" % (self.socket.address,))
53
54 # Read protocol and version
55 client_protocol = await self.socket.recv()
56 if not client_protocol:
57 return
58
59 (client_proto_name, client_proto_version) = client_protocol.split()
60 if client_proto_name != self.proto_name:
61 self.logger.debug("Rejecting invalid protocol %s" % (self.proto_name))
62 return
63
64 self.proto_version = tuple(int(v) for v in client_proto_version.split("."))
65 if not self.validate_proto_version():
66 self.logger.debug(
67 "Rejecting invalid protocol version %s" % (client_proto_version)
68 )
69 return
70
71 # Read headers
72 self.client_headers = {}
73 while True:
74 header = await self.socket.recv()
75 if not header:
76 # Empty line. End of headers
77 break
78 tag, value = header.split(":", 1)
79 self.client_headers[tag.lower()] = value.strip()
80
81 if self.client_headers.get("needs-headers", "false") == "true":
82 for k, v in (await self.handle_headers(self.client_headers)).items():
83 await self.socket.send("%s: %s" % (k, v))
84 await self.socket.send("")
85
86 # Handle messages
87 while True:
88 d = await self.socket.recv_message()
89 if d is None:
90 break
91 try:
92 response = await self.dispatch_message(d)
93 except InvokeError as e:
94 await self.socket.send_message(
95 {"invoke-error": {"message": str(e)}}
96 )
97 break
98
99 if response is not self.NO_RESPONSE:
100 await self.socket.send_message(response)
101
102 except ConnectionClosedError as e:
103 self.logger.info(str(e))
104 except (ClientError, ConnectionError) as e:
105 self.logger.error(str(e))
106 finally:
107 await self.close()
108
109 async def dispatch_message(self, msg):
110 for k in self.handlers.keys():
111 if k in msg:
112 self.logger.debug("Handling %s" % k)
113 return await self.handlers[k](msg[k])
114
115 raise ClientError("Unrecognized command %r" % msg)
116
117 async def handle_ping(self, request):
118 return {"alive": True}
119
120
121class StreamServer(object):
122 def __init__(self, handler, logger):
123 self.handler = handler
124 self.logger = logger
125 self.closed = False
126
127 async def handle_stream_client(self, reader, writer):
128 # writer.transport.set_write_buffer_limits(0)
129 socket = StreamConnection(reader, writer, -1)
130 if self.closed:
131 await socket.close()
132 return
133
134 await self.handler(socket)
135
136 async def stop(self):
137 self.closed = True
138
139
140class TCPStreamServer(StreamServer):
141 def __init__(self, host, port, handler, logger, *, reuseport=False):
142 super().__init__(handler, logger)
143 self.host = host
144 self.port = port
145 self.reuseport = reuseport
146
147 def start(self, loop):
148 self.server = loop.run_until_complete(
149 asyncio.start_server(
150 self.handle_stream_client,
151 self.host,
152 self.port,
153 reuse_port=self.reuseport,
154 )
155 )
156
157 for s in self.server.sockets:
158 self.logger.debug("Listening on %r" % (s.getsockname(),))
159 # Newer python does this automatically. Do it manually here for
160 # maximum compatibility
161 s.setsockopt(socket.SOL_TCP, socket.TCP_NODELAY, 1)
162 s.setsockopt(socket.SOL_TCP, socket.TCP_QUICKACK, 1)
163
164 # Enable keep alives. This prevents broken client connections
165 # from persisting on the server for long periods of time.
166 s.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
167 s.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPIDLE, 30)
168 s.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPINTVL, 15)
169 s.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPCNT, 4)
170
171 name = self.server.sockets[0].getsockname()
172 if self.server.sockets[0].family == socket.AF_INET6:
173 self.address = "[%s]:%d" % (name[0], name[1])
174 else:
175 self.address = "%s:%d" % (name[0], name[1])
176
177 return [self.server.wait_closed()]
178
179 async def stop(self):
180 await super().stop()
181 self.server.close()
182
183 def cleanup(self):
184 pass
185
186
187class UnixStreamServer(StreamServer):
188 def __init__(self, path, handler, logger):
189 super().__init__(handler, logger)
190 self.path = path
191
192 def start(self, loop):
193 cwd = os.getcwd()
194 try:
195 # Work around path length limits in AF_UNIX
196 os.chdir(os.path.dirname(self.path))
197 self.server = loop.run_until_complete(
198 asyncio.start_unix_server(
199 self.handle_stream_client, os.path.basename(self.path)
200 )
201 )
202 finally:
203 os.chdir(cwd)
204
205 self.logger.debug("Listening on %r" % self.path)
206 self.address = "unix://%s" % os.path.abspath(self.path)
207 return [self.server.wait_closed()]
208
209 async def stop(self):
210 await super().stop()
211 self.server.close()
212
213 def cleanup(self):
214 os.unlink(self.path)
215
216
217class WebsocketsServer(object):
218 def __init__(self, host, port, handler, logger, *, reuseport=False):
219 self.host = host
220 self.port = port
221 self.handler = handler
222 self.logger = logger
223 self.reuseport = reuseport
224
225 def start(self, loop):
226 import websockets.server
227
228 self.server = loop.run_until_complete(
229 websockets.server.serve(
230 self.client_handler,
231 self.host,
232 self.port,
233 ping_interval=None,
234 reuse_port=self.reuseport,
235 )
236 )
237
238 for s in self.server.sockets:
239 self.logger.debug("Listening on %r" % (s.getsockname(),))
240
241 # Enable keep alives. This prevents broken client connections
242 # from persisting on the server for long periods of time.
243 s.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
244 s.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPIDLE, 30)
245 s.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPINTVL, 15)
246 s.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPCNT, 4)
247
248 name = self.server.sockets[0].getsockname()
249 if self.server.sockets[0].family == socket.AF_INET6:
250 self.address = "ws://[%s]:%d" % (name[0], name[1])
251 else:
252 self.address = "ws://%s:%d" % (name[0], name[1])
253
254 return [self.server.wait_closed()]
255
256 async def stop(self):
257 self.server.close()
258
259 def cleanup(self):
260 pass
261
262 async def client_handler(self, websocket):
263 socket = WebsocketConnection(websocket, -1)
264 await self.handler(socket)
265
266
267class AsyncServer(object):
268 def __init__(self, logger):
269 self.logger = logger
270 self.loop = None
271 self.run_tasks = []
272
273 def start_tcp_server(self, host, port, *, reuseport=False):
274 self.server = TCPStreamServer(
275 host,
276 port,
277 self._client_handler,
278 self.logger,
279 reuseport=reuseport,
280 )
281
282 def start_unix_server(self, path):
283 self.server = UnixStreamServer(path, self._client_handler, self.logger)
284
285 def start_websocket_server(self, host, port, reuseport=False):
286 self.server = WebsocketsServer(
287 host,
288 port,
289 self._client_handler,
290 self.logger,
291 reuseport=reuseport,
292 )
293
294 async def _client_handler(self, socket):
295 address = socket.address
296 try:
297 client = self.accept_client(socket)
298 await client.process_requests()
299 except Exception as e:
300 import traceback
301
302 self.logger.error(
303 "Error from client %s: %s" % (address, str(e)), exc_info=True
304 )
305 traceback.print_exc()
306 finally:
307 self.logger.debug("Client %s disconnected", address)
308 await socket.close()
309
310 @abc.abstractmethod
311 def accept_client(self, socket):
312 pass
313
314 async def stop(self):
315 self.logger.debug("Stopping server")
316 await self.server.stop()
317
318 def start(self):
319 tasks = self.server.start(self.loop)
320 self.address = self.server.address
321 return tasks
322
323 def signal_handler(self):
324 self.logger.debug("Got exit signal")
325 self.loop.create_task(self.stop())
326
327 def _serve_forever(self, tasks):
328 try:
329 self.loop.add_signal_handler(signal.SIGTERM, self.signal_handler)
330 self.loop.add_signal_handler(signal.SIGINT, self.signal_handler)
331 self.loop.add_signal_handler(signal.SIGQUIT, self.signal_handler)
332 signal.pthread_sigmask(signal.SIG_UNBLOCK, [signal.SIGTERM])
333
334 self.loop.run_until_complete(asyncio.gather(*tasks))
335
336 self.logger.debug("Server shutting down")
337 finally:
338 self.server.cleanup()
339
340 def serve_forever(self):
341 """
342 Serve requests in the current process
343 """
344 self._create_loop()
345 tasks = self.start()
346 self._serve_forever(tasks)
347 self.loop.close()
348
349 def _create_loop(self):
350 # Create loop and override any loop that may have existed in
351 # a parent process. It is possible that the usecases of
352 # serve_forever might be constrained enough to allow using
353 # get_event_loop here, but better safe than sorry for now.
354 self.loop = asyncio.new_event_loop()
355 asyncio.set_event_loop(self.loop)
356
357 def serve_as_process(self, *, prefunc=None, args=(), log_level=None):
358 """
359 Serve requests in a child process
360 """
361
362 def run(queue):
363 # Create loop and override any loop that may have existed
364 # in a parent process. Without doing this and instead
365 # using get_event_loop, at the very minimum the hashserv
366 # unit tests will hang when running the second test.
367 # This happens since get_event_loop in the spawned server
368 # process for the second testcase ends up with the loop
369 # from the hashserv client created in the unit test process
370 # when running the first testcase. The problem is somewhat
371 # more general, though, as any potential use of asyncio in
372 # Cooker could create a loop that needs to replaced in this
373 # new process.
374 self._create_loop()
375 try:
376 self.address = None
377 tasks = self.start()
378 finally:
379 # Always put the server address to wake up the parent task
380 queue.put(self.address)
381 queue.close()
382
383 if prefunc is not None:
384 prefunc(self, *args)
385
386 if log_level is not None:
387 self.logger.setLevel(log_level)
388
389 self._serve_forever(tasks)
390
391 self.loop.run_until_complete(self.loop.shutdown_asyncgens())
392 self.loop.close()
393
394 queue = multiprocessing.Queue()
395
396 # Temporarily block SIGTERM. The server process will inherit this
397 # block which will ensure it doesn't receive the SIGTERM until the
398 # handler is ready for it
399 mask = signal.pthread_sigmask(signal.SIG_BLOCK, [signal.SIGTERM])
400 try:
401 self.process = multiprocessing.Process(target=run, args=(queue,))
402 self.process.start()
403
404 self.address = queue.get()
405 queue.close()
406 queue.join_thread()
407
408 return self.process
409 finally:
410 signal.pthread_sigmask(signal.SIG_SETMASK, mask)
diff --git a/bitbake/lib/bb/build.py b/bitbake/lib/bb/build.py
index f4f897e41a..40839a81b5 100644
--- a/bitbake/lib/bb/build.py
+++ b/bitbake/lib/bb/build.py
@@ -20,10 +20,12 @@ import itertools
20import time 20import time
21import re 21import re
22import stat 22import stat
23import datetime
23import bb 24import bb
24import bb.msg 25import bb.msg
25import bb.process 26import bb.process
26import bb.progress 27import bb.progress
28from io import StringIO
27from bb import data, event, utils 29from bb import data, event, utils
28 30
29bblogger = logging.getLogger('BitBake') 31bblogger = logging.getLogger('BitBake')
@@ -176,7 +178,9 @@ class StdoutNoopContextManager:
176 178
177 @property 179 @property
178 def name(self): 180 def name(self):
179 return sys.stdout.name 181 if "name" in dir(sys.stdout):
182 return sys.stdout.name
183 return "<mem>"
180 184
181 185
182def exec_func(func, d, dirs = None): 186def exec_func(func, d, dirs = None):
@@ -193,6 +197,8 @@ def exec_func(func, d, dirs = None):
193 for cdir in d.expand(cleandirs).split(): 197 for cdir in d.expand(cleandirs).split():
194 bb.utils.remove(cdir, True) 198 bb.utils.remove(cdir, True)
195 bb.utils.mkdirhier(cdir) 199 bb.utils.mkdirhier(cdir)
200 if cdir == oldcwd:
201 os.chdir(cdir)
196 202
197 if flags and dirs is None: 203 if flags and dirs is None:
198 dirs = flags.get('dirs') 204 dirs = flags.get('dirs')
@@ -295,9 +301,25 @@ def exec_func_python(func, d, runfile, cwd=None):
295 lineno = int(d.getVarFlag(func, "lineno", False)) 301 lineno = int(d.getVarFlag(func, "lineno", False))
296 bb.methodpool.insert_method(func, text, fn, lineno - 1) 302 bb.methodpool.insert_method(func, text, fn, lineno - 1)
297 303
298 comp = utils.better_compile(code, func, "exec_python_func() autogenerated") 304 if verboseStdoutLogging:
299 utils.better_exec(comp, {"d": d}, code, "exec_python_func() autogenerated") 305 sys.stdout.flush()
306 sys.stderr.flush()
307 currout = sys.stdout
308 currerr = sys.stderr
309 sys.stderr = sys.stdout = execio = StringIO()
310 comp = utils.better_compile(code, func, "exec_func_python() autogenerated")
311 utils.better_exec(comp, {"d": d}, code, "exec_func_python() autogenerated")
300 finally: 312 finally:
313 if verboseStdoutLogging:
314 execio.flush()
315 logger.plain("%s" % execio.getvalue())
316 sys.stdout = currout
317 sys.stderr = currerr
318 execio.close()
319 # We want any stdout/stderr to be printed before any other log messages to make debugging
320 # more accurate. In some cases we seem to lose stdout/stderr entirely in logging tests without this.
321 sys.stdout.flush()
322 sys.stderr.flush()
301 bb.debug(2, "Python function %s finished" % func) 323 bb.debug(2, "Python function %s finished" % func)
302 324
303 if cwd and olddir: 325 if cwd and olddir:
@@ -375,7 +397,7 @@ def create_progress_handler(func, progress, logfile, d):
375 # Use specified regex 397 # Use specified regex
376 return bb.progress.OutOfProgressHandler(d, regex=progress.split(':', 1)[1], outfile=logfile) 398 return bb.progress.OutOfProgressHandler(d, regex=progress.split(':', 1)[1], outfile=logfile)
377 elif progress.startswith("custom:"): 399 elif progress.startswith("custom:"):
378 # Use a custom progress handler that was injected via OE_EXTRA_IMPORTS or __builtins__ 400 # Use a custom progress handler that was injected via other means
379 import functools 401 import functools
380 from types import ModuleType 402 from types import ModuleType
381 403
@@ -436,7 +458,11 @@ exit $ret
436 if fakerootcmd: 458 if fakerootcmd:
437 cmd = [fakerootcmd, runfile] 459 cmd = [fakerootcmd, runfile]
438 460
439 if verboseStdoutLogging: 461 # We only want to output to logger via LogTee if stdout is sys.__stdout__ (which will either
462 # be real stdout or subprocess PIPE or similar). In other cases we are being run "recursively",
463 # ie. inside another function, in which case stdout is already being captured so we don't
464 # want to Tee here as output would be printed twice, and out of order.
465 if verboseStdoutLogging and sys.stdout == sys.__stdout__:
440 logfile = LogTee(logger, StdoutNoopContextManager()) 466 logfile = LogTee(logger, StdoutNoopContextManager())
441 else: 467 else:
442 logfile = StdoutNoopContextManager() 468 logfile = StdoutNoopContextManager()
@@ -565,10 +591,8 @@ exit $ret
565def _task_data(fn, task, d): 591def _task_data(fn, task, d):
566 localdata = bb.data.createCopy(d) 592 localdata = bb.data.createCopy(d)
567 localdata.setVar('BB_FILENAME', fn) 593 localdata.setVar('BB_FILENAME', fn)
568 localdata.setVar('BB_CURRENTTASK', task[3:])
569 localdata.setVar('OVERRIDES', 'task-%s:%s' % 594 localdata.setVar('OVERRIDES', 'task-%s:%s' %
570 (task[3:].replace('_', '-'), d.getVar('OVERRIDES', False))) 595 (task[3:].replace('_', '-'), d.getVar('OVERRIDES', False)))
571 localdata.finalize()
572 bb.data.expandKeys(localdata) 596 bb.data.expandKeys(localdata)
573 return localdata 597 return localdata
574 598
@@ -579,7 +603,7 @@ def _exec_task(fn, task, d, quieterr):
579 running it with its own local metadata, and with some useful variables set. 603 running it with its own local metadata, and with some useful variables set.
580 """ 604 """
581 if not d.getVarFlag(task, 'task', False): 605 if not d.getVarFlag(task, 'task', False):
582 event.fire(TaskInvalid(task, d), d) 606 event.fire(TaskInvalid(task, fn, d), d)
583 logger.error("No such task: %s" % task) 607 logger.error("No such task: %s" % task)
584 return 1 608 return 1
585 609
@@ -615,7 +639,8 @@ def _exec_task(fn, task, d, quieterr):
615 logorder = os.path.join(tempdir, 'log.task_order') 639 logorder = os.path.join(tempdir, 'log.task_order')
616 try: 640 try:
617 with open(logorder, 'a') as logorderfile: 641 with open(logorder, 'a') as logorderfile:
618 logorderfile.write('{0} ({1}): {2}\n'.format(task, os.getpid(), logbase)) 642 timestamp = datetime.datetime.now().strftime("%Y%m%d-%H%M%S.%f")
643 logorderfile.write('{0} {1} ({2}): {3}\n'.format(timestamp, task, os.getpid(), logbase))
619 except OSError: 644 except OSError:
620 logger.exception("Opening log file '%s'", logorder) 645 logger.exception("Opening log file '%s'", logorder)
621 pass 646 pass
@@ -682,47 +707,55 @@ def _exec_task(fn, task, d, quieterr):
682 try: 707 try:
683 try: 708 try:
684 event.fire(TaskStarted(task, fn, logfn, flags, localdata), localdata) 709 event.fire(TaskStarted(task, fn, logfn, flags, localdata), localdata)
685 except (bb.BBHandledException, SystemExit):
686 return 1
687 710
688 try:
689 for func in (prefuncs or '').split(): 711 for func in (prefuncs or '').split():
690 exec_func(func, localdata) 712 exec_func(func, localdata)
691 exec_func(task, localdata) 713 exec_func(task, localdata)
692 for func in (postfuncs or '').split(): 714 for func in (postfuncs or '').split():
693 exec_func(func, localdata) 715 exec_func(func, localdata)
694 except bb.BBHandledException: 716 finally:
695 event.fire(TaskFailed(task, fn, logfn, localdata, True), localdata) 717 # Need to flush and close the logs before sending events where the
696 return 1 718 # UI may try to look at the logs.
697 except Exception as exc: 719 sys.stdout.flush()
698 if quieterr: 720 sys.stderr.flush()
699 event.fire(TaskFailedSilent(task, fn, logfn, localdata), localdata) 721
700 else: 722 bblogger.removeHandler(handler)
701 errprinted = errchk.triggered 723
724 # Restore the backup fds
725 os.dup2(osi[0], osi[1])
726 os.dup2(oso[0], oso[1])
727 os.dup2(ose[0], ose[1])
728
729 # Close the backup fds
730 os.close(osi[0])
731 os.close(oso[0])
732 os.close(ose[0])
733
734 logfile.close()
735 if os.path.exists(logfn) and os.path.getsize(logfn) == 0:
736 logger.debug2("Zero size logfn %s, removing", logfn)
737 bb.utils.remove(logfn)
738 bb.utils.remove(loglink)
739 except (Exception, SystemExit) as exc:
740 handled = False
741 if isinstance(exc, bb.BBHandledException):
742 handled = True
743
744 if quieterr:
745 if not handled:
746 logger.warning(str(exc))
747 event.fire(TaskFailedSilent(task, fn, logfn, localdata), localdata)
748 else:
749 errprinted = errchk.triggered
750 # If the output is already on stdout, we've printed the information in the
751 # logs once already so don't duplicate
752 if verboseStdoutLogging or handled:
753 errprinted = True
754 if not handled:
702 logger.error(str(exc)) 755 logger.error(str(exc))
703 event.fire(TaskFailed(task, fn, logfn, localdata, errprinted), localdata) 756 event.fire(TaskFailed(task, fn, logfn, localdata, errprinted), localdata)
704 return 1 757 return 1
705 finally:
706 sys.stdout.flush()
707 sys.stderr.flush()
708
709 bblogger.removeHandler(handler)
710
711 # Restore the backup fds
712 os.dup2(osi[0], osi[1])
713 os.dup2(oso[0], oso[1])
714 os.dup2(ose[0], ose[1])
715
716 # Close the backup fds
717 os.close(osi[0])
718 os.close(oso[0])
719 os.close(ose[0])
720 758
721 logfile.close()
722 if os.path.exists(logfn) and os.path.getsize(logfn) == 0:
723 logger.debug2("Zero size logfn %s, removing", logfn)
724 bb.utils.remove(logfn)
725 bb.utils.remove(loglink)
726 event.fire(TaskSucceeded(task, fn, logfn, localdata), localdata) 759 event.fire(TaskSucceeded(task, fn, logfn, localdata), localdata)
727 760
728 if not localdata.getVarFlag(task, 'nostamp', False) and not localdata.getVarFlag(task, 'selfstamp', False): 761 if not localdata.getVarFlag(task, 'nostamp', False) and not localdata.getVarFlag(task, 'selfstamp', False):
@@ -760,132 +793,92 @@ def exec_task(fn, task, d, profile = False):
760 event.fire(failedevent, d) 793 event.fire(failedevent, d)
761 return 1 794 return 1
762 795
763def stamp_internal(taskname, d, file_name, baseonly=False, noextra=False): 796def _get_cleanmask(taskname, mcfn):
764 """ 797 """
765 Internal stamp helper function 798 Internal stamp helper function to generate stamp cleaning mask
766 Makes sure the stamp directory exists
767 Returns the stamp path+filename 799 Returns the stamp path+filename
768 800
769 In the bitbake core, d can be a CacheData and file_name will be set. 801 In the bitbake core, d can be a CacheData and file_name will be set.
770 When called in task context, d will be a data store, file_name will not be set 802 When called in task context, d will be a data store, file_name will not be set
771 """ 803 """
772 taskflagname = taskname 804 cleanmask = bb.parse.siggen.stampcleanmask_mcfn(taskname, mcfn)
773 if taskname.endswith("_setscene") and taskname != "do_setscene": 805 taskflagname = taskname.replace("_setscene", "")
774 taskflagname = taskname.replace("_setscene", "") 806 if cleanmask:
775 807 return [cleanmask, cleanmask.replace(taskflagname, taskflagname + "_setscene")]
776 if file_name: 808 return []
777 stamp = d.stamp[file_name] 809
778 extrainfo = d.stamp_extrainfo[file_name].get(taskflagname) or "" 810def clean_stamp_mcfn(task, mcfn):
779 else: 811 cleanmask = _get_cleanmask(task, mcfn)
780 stamp = d.getVar('STAMP') 812 for mask in cleanmask:
781 file_name = d.getVar('BB_FILENAME') 813 for name in glob.glob(mask):
782 extrainfo = d.getVarFlag(taskflagname, 'stamp-extra-info') or "" 814 # Preserve sigdata files in the stamps directory
815 if "sigdata" in name or "sigbasedata" in name:
816 continue
817 # Preserve taint files in the stamps directory
818 if name.endswith('.taint'):
819 continue
820 os.unlink(name)
783 821
784 if baseonly: 822def clean_stamp(task, d):
785 return stamp 823 mcfn = d.getVar('BB_FILENAME')
786 if noextra: 824 clean_stamp_mcfn(task, mcfn)
787 extrainfo = ""
788 825
789 if not stamp: 826def make_stamp_mcfn(task, mcfn):
790 return
791 827
792 stamp = bb.parse.siggen.stampfile(stamp, file_name, taskname, extrainfo) 828 basestamp = bb.parse.siggen.stampfile_mcfn(task, mcfn)
793 829
794 stampdir = os.path.dirname(stamp) 830 stampdir = os.path.dirname(basestamp)
795 if cached_mtime_noerror(stampdir) == 0: 831 if cached_mtime_noerror(stampdir) == 0:
796 bb.utils.mkdirhier(stampdir) 832 bb.utils.mkdirhier(stampdir)
797 833
798 return stamp 834 clean_stamp_mcfn(task, mcfn)
799 835
800def stamp_cleanmask_internal(taskname, d, file_name): 836 # Remove the file and recreate to force timestamp
801 """ 837 # change on broken NFS filesystems
802 Internal stamp helper function to generate stamp cleaning mask 838 if basestamp:
803 Returns the stamp path+filename 839 bb.utils.remove(basestamp)
840 open(basestamp, "w").close()
804 841
805 In the bitbake core, d can be a CacheData and file_name will be set. 842def make_stamp(task, d):
806 When called in task context, d will be a data store, file_name will not be set
807 """ 843 """
808 taskflagname = taskname 844 Creates/updates a stamp for a given task
809 if taskname.endswith("_setscene") and taskname != "do_setscene": 845 """
810 taskflagname = taskname.replace("_setscene", "") 846 mcfn = d.getVar('BB_FILENAME')
811
812 if file_name:
813 stamp = d.stampclean[file_name]
814 extrainfo = d.stamp_extrainfo[file_name].get(taskflagname) or ""
815 else:
816 stamp = d.getVar('STAMPCLEAN')
817 file_name = d.getVar('BB_FILENAME')
818 extrainfo = d.getVarFlag(taskflagname, 'stamp-extra-info') or ""
819 847
820 if not stamp: 848 make_stamp_mcfn(task, mcfn)
821 return []
822 849
823 cleanmask = bb.parse.siggen.stampcleanmask(stamp, file_name, taskname, extrainfo) 850 # If we're in task context, write out a signature file for each task
851 # as it completes
852 if not task.endswith("_setscene"):
853 stampbase = bb.parse.siggen.stampfile_base(mcfn)
854 bb.parse.siggen.dump_sigtask(mcfn, task, stampbase, True)
824 855
825 return [cleanmask, cleanmask.replace(taskflagname, taskflagname + "_setscene")]
826 856
827def make_stamp(task, d, file_name = None): 857def find_stale_stamps(task, mcfn):
828 """ 858 current = bb.parse.siggen.stampfile_mcfn(task, mcfn)
829 Creates/updates a stamp for a given task 859 current2 = bb.parse.siggen.stampfile_mcfn(task + "_setscene", mcfn)
830 (d can be a data dict or dataCache) 860 cleanmask = _get_cleanmask(task, mcfn)
831 """ 861 found = []
832 cleanmask = stamp_cleanmask_internal(task, d, file_name)
833 for mask in cleanmask: 862 for mask in cleanmask:
834 for name in glob.glob(mask): 863 for name in glob.glob(mask):
835 # Preserve sigdata files in the stamps directory
836 if "sigdata" in name or "sigbasedata" in name: 864 if "sigdata" in name or "sigbasedata" in name:
837 continue 865 continue
838 # Preserve taint files in the stamps directory
839 if name.endswith('.taint'): 866 if name.endswith('.taint'):
840 continue 867 continue
841 os.unlink(name) 868 if name == current or name == current2:
842 869 continue
843 stamp = stamp_internal(task, d, file_name) 870 logger.debug2("Stampfile %s does not match %s or %s" % (name, current, current2))
844 # Remove the file and recreate to force timestamp 871 found.append(name)
845 # change on broken NFS filesystems 872 return found
846 if stamp:
847 bb.utils.remove(stamp)
848 open(stamp, "w").close()
849
850 # If we're in task context, write out a signature file for each task
851 # as it completes
852 if not task.endswith("_setscene") and task != "do_setscene" and not file_name:
853 stampbase = stamp_internal(task, d, None, True)
854 file_name = d.getVar('BB_FILENAME')
855 bb.parse.siggen.dump_sigtask(file_name, task, stampbase, True)
856
857def del_stamp(task, d, file_name = None):
858 """
859 Removes a stamp for a given task
860 (d can be a data dict or dataCache)
861 """
862 stamp = stamp_internal(task, d, file_name)
863 bb.utils.remove(stamp)
864 873
865def write_taint(task, d, file_name = None): 874def write_taint(task, d):
866 """ 875 """
867 Creates a "taint" file which will force the specified task and its 876 Creates a "taint" file which will force the specified task and its
868 dependents to be re-run the next time by influencing the value of its 877 dependents to be re-run the next time by influencing the value of its
869 taskhash. 878 taskhash.
870 (d can be a data dict or dataCache)
871 """ 879 """
872 import uuid 880 mcfn = d.getVar('BB_FILENAME')
873 if file_name: 881 bb.parse.siggen.invalidate_task(task, mcfn)
874 taintfn = d.stamp[file_name] + '.' + task + '.taint'
875 else:
876 taintfn = d.getVar('STAMP') + '.' + task + '.taint'
877 bb.utils.mkdirhier(os.path.dirname(taintfn))
878 # The specific content of the taint file is not really important,
879 # we just need it to be random, so a random UUID is used
880 with open(taintfn, 'w') as taintf:
881 taintf.write(str(uuid.uuid4()))
882
883def stampfile(taskname, d, file_name = None, noextra=False):
884 """
885 Return the stamp for a given task
886 (d can be a data dict or dataCache)
887 """
888 return stamp_internal(taskname, d, file_name, noextra=noextra)
889 882
890def add_tasks(tasklist, d): 883def add_tasks(tasklist, d):
891 task_deps = d.getVar('_task_deps', False) 884 task_deps = d.getVar('_task_deps', False)
@@ -910,6 +903,11 @@ def add_tasks(tasklist, d):
910 task_deps[name] = {} 903 task_deps[name] = {}
911 if name in flags: 904 if name in flags:
912 deptask = d.expand(flags[name]) 905 deptask = d.expand(flags[name])
906 if name in ['noexec', 'fakeroot', 'nostamp']:
907 if deptask != '1':
908 bb.warn("In a future version of BitBake, setting the '{}' flag to something other than '1' "
909 "will result in the flag not being set. See YP bug #13808.".format(name))
910
913 task_deps[name][task] = deptask 911 task_deps[name][task] = deptask
914 getTask('mcdepends') 912 getTask('mcdepends')
915 getTask('depends') 913 getTask('depends')
@@ -934,9 +932,13 @@ def add_tasks(tasklist, d):
934 # don't assume holding a reference 932 # don't assume holding a reference
935 d.setVar('_task_deps', task_deps) 933 d.setVar('_task_deps', task_deps)
936 934
935def ensure_task_prefix(name):
936 if name[:3] != "do_":
937 name = "do_" + name
938 return name
939
937def addtask(task, before, after, d): 940def addtask(task, before, after, d):
938 if task[:3] != "do_": 941 task = ensure_task_prefix(task)
939 task = "do_" + task
940 942
941 d.setVarFlag(task, "task", 1) 943 d.setVarFlag(task, "task", 1)
942 bbtasks = d.getVar('__BBTASKS', False) or [] 944 bbtasks = d.getVar('__BBTASKS', False) or []
@@ -948,19 +950,20 @@ def addtask(task, before, after, d):
948 if after is not None: 950 if after is not None:
949 # set up deps for function 951 # set up deps for function
950 for entry in after.split(): 952 for entry in after.split():
953 entry = ensure_task_prefix(entry)
951 if entry not in existing: 954 if entry not in existing:
952 existing.append(entry) 955 existing.append(entry)
953 d.setVarFlag(task, "deps", existing) 956 d.setVarFlag(task, "deps", existing)
954 if before is not None: 957 if before is not None:
955 # set up things that depend on this func 958 # set up things that depend on this func
956 for entry in before.split(): 959 for entry in before.split():
960 entry = ensure_task_prefix(entry)
957 existing = d.getVarFlag(entry, "deps", False) or [] 961 existing = d.getVarFlag(entry, "deps", False) or []
958 if task not in existing: 962 if task not in existing:
959 d.setVarFlag(entry, "deps", [task] + existing) 963 d.setVarFlag(entry, "deps", [task] + existing)
960 964
961def deltask(task, d): 965def deltask(task, d):
962 if task[:3] != "do_": 966 task = ensure_task_prefix(task)
963 task = "do_" + task
964 967
965 bbtasks = d.getVar('__BBTASKS', False) or [] 968 bbtasks = d.getVar('__BBTASKS', False) or []
966 if task in bbtasks: 969 if task in bbtasks:
@@ -1008,6 +1011,8 @@ def tasksbetween(task_start, task_end, d):
1008 def follow_chain(task, endtask, chain=None): 1011 def follow_chain(task, endtask, chain=None):
1009 if not chain: 1012 if not chain:
1010 chain = [] 1013 chain = []
1014 if task in chain:
1015 bb.fatal("Circular task dependencies as %s depends on itself via the chain %s" % (task, " -> ".join(chain)))
1011 chain.append(task) 1016 chain.append(task)
1012 for othertask in tasks: 1017 for othertask in tasks:
1013 if othertask == task: 1018 if othertask == task:
@@ -1023,3 +1028,9 @@ def tasksbetween(task_start, task_end, d):
1023 chain.pop() 1028 chain.pop()
1024 follow_chain(task_start, task_end) 1029 follow_chain(task_start, task_end)
1025 return outtasks 1030 return outtasks
1031
1032def listtasks(d):
1033 """
1034 Return the list of tasks in the current recipe.
1035 """
1036 return tuple(d.getVar('__BBTASKS', False) or ())
diff --git a/bitbake/lib/bb/cache.py b/bitbake/lib/bb/cache.py
index aea2b8bc11..2361c5684d 100644
--- a/bitbake/lib/bb/cache.py
+++ b/bitbake/lib/bb/cache.py
@@ -19,14 +19,16 @@
19import os 19import os
20import logging 20import logging
21import pickle 21import pickle
22from collections import defaultdict, Mapping 22from collections import defaultdict
23from collections.abc import Mapping
23import bb.utils 24import bb.utils
24from bb import PrefixLoggerAdapter 25from bb import PrefixLoggerAdapter
25import re 26import re
27import shutil
26 28
27logger = logging.getLogger("BitBake.Cache") 29logger = logging.getLogger("BitBake.Cache")
28 30
29__cache_version__ = "154" 31__cache_version__ = "156"
30 32
31def getCacheFile(path, filename, mc, data_hash): 33def getCacheFile(path, filename, mc, data_hash):
32 mcspec = '' 34 mcspec = ''
@@ -53,12 +55,12 @@ class RecipeInfoCommon(object):
53 55
54 @classmethod 56 @classmethod
55 def pkgvar(cls, var, packages, metadata): 57 def pkgvar(cls, var, packages, metadata):
56 return dict((pkg, cls.depvar("%s_%s" % (var, pkg), metadata)) 58 return dict((pkg, cls.depvar("%s:%s" % (var, pkg), metadata))
57 for pkg in packages) 59 for pkg in packages)
58 60
59 @classmethod 61 @classmethod
60 def taskvar(cls, var, tasks, metadata): 62 def taskvar(cls, var, tasks, metadata):
61 return dict((task, cls.getvar("%s_task-%s" % (var, task), metadata)) 63 return dict((task, cls.getvar("%s:task-%s" % (var, task), metadata))
62 for task in tasks) 64 for task in tasks)
63 65
64 @classmethod 66 @classmethod
@@ -103,7 +105,7 @@ class CoreRecipeInfo(RecipeInfoCommon):
103 105
104 self.tasks = metadata.getVar('__BBTASKS', False) 106 self.tasks = metadata.getVar('__BBTASKS', False)
105 107
106 self.basetaskhashes = self.taskvar('BB_BASEHASH', self.tasks, metadata) 108 self.basetaskhashes = metadata.getVar('__siggen_basehashes', False) or {}
107 self.hashfilename = self.getvar('BB_HASHFILENAME', metadata) 109 self.hashfilename = self.getvar('BB_HASHFILENAME', metadata)
108 110
109 self.task_deps = metadata.getVar('_task_deps', False) or {'tasks': [], 'parents': {}} 111 self.task_deps = metadata.getVar('_task_deps', False) or {'tasks': [], 'parents': {}}
@@ -126,6 +128,7 @@ class CoreRecipeInfo(RecipeInfoCommon):
126 self.inherits = self.getvar('__inherit_cache', metadata, expand=False) 128 self.inherits = self.getvar('__inherit_cache', metadata, expand=False)
127 self.fakerootenv = self.getvar('FAKEROOTENV', metadata) 129 self.fakerootenv = self.getvar('FAKEROOTENV', metadata)
128 self.fakerootdirs = self.getvar('FAKEROOTDIRS', metadata) 130 self.fakerootdirs = self.getvar('FAKEROOTDIRS', metadata)
131 self.fakerootlogs = self.getvar('FAKEROOTLOGS', metadata)
129 self.fakerootnoenv = self.getvar('FAKEROOTNOENV', metadata) 132 self.fakerootnoenv = self.getvar('FAKEROOTNOENV', metadata)
130 self.extradepsfunc = self.getvar('calculate_extra_depends', metadata) 133 self.extradepsfunc = self.getvar('calculate_extra_depends', metadata)
131 134
@@ -163,6 +166,7 @@ class CoreRecipeInfo(RecipeInfoCommon):
163 cachedata.fakerootenv = {} 166 cachedata.fakerootenv = {}
164 cachedata.fakerootnoenv = {} 167 cachedata.fakerootnoenv = {}
165 cachedata.fakerootdirs = {} 168 cachedata.fakerootdirs = {}
169 cachedata.fakerootlogs = {}
166 cachedata.extradepsfunc = {} 170 cachedata.extradepsfunc = {}
167 171
168 def add_cacheData(self, cachedata, fn): 172 def add_cacheData(self, cachedata, fn):
@@ -212,7 +216,7 @@ class CoreRecipeInfo(RecipeInfoCommon):
212 216
213 # Collect files we may need for possible world-dep 217 # Collect files we may need for possible world-dep
214 # calculations 218 # calculations
215 if not self.not_world: 219 if not bb.utils.to_boolean(self.not_world):
216 cachedata.possible_world.append(fn) 220 cachedata.possible_world.append(fn)
217 #else: 221 #else:
218 # logger.debug2("EXCLUDE FROM WORLD: %s", fn) 222 # logger.debug2("EXCLUDE FROM WORLD: %s", fn)
@@ -231,17 +235,116 @@ class CoreRecipeInfo(RecipeInfoCommon):
231 cachedata.fakerootenv[fn] = self.fakerootenv 235 cachedata.fakerootenv[fn] = self.fakerootenv
232 cachedata.fakerootnoenv[fn] = self.fakerootnoenv 236 cachedata.fakerootnoenv[fn] = self.fakerootnoenv
233 cachedata.fakerootdirs[fn] = self.fakerootdirs 237 cachedata.fakerootdirs[fn] = self.fakerootdirs
238 cachedata.fakerootlogs[fn] = self.fakerootlogs
234 cachedata.extradepsfunc[fn] = self.extradepsfunc 239 cachedata.extradepsfunc[fn] = self.extradepsfunc
235 240
241
242class SiggenRecipeInfo(RecipeInfoCommon):
243 __slots__ = ()
244
245 classname = "SiggenRecipeInfo"
246 cachefile = "bb_cache_" + classname +".dat"
247 # we don't want to show this information in graph files so don't set cachefields
248 #cachefields = []
249
250 def __init__(self, filename, metadata):
251 self.siggen_gendeps = metadata.getVar("__siggen_gendeps", False)
252 self.siggen_varvals = metadata.getVar("__siggen_varvals", False)
253 self.siggen_taskdeps = metadata.getVar("__siggen_taskdeps", False)
254
255 @classmethod
256 def init_cacheData(cls, cachedata):
257 cachedata.siggen_taskdeps = {}
258 cachedata.siggen_gendeps = {}
259 cachedata.siggen_varvals = {}
260
261 def add_cacheData(self, cachedata, fn):
262 cachedata.siggen_gendeps[fn] = self.siggen_gendeps
263 cachedata.siggen_varvals[fn] = self.siggen_varvals
264 cachedata.siggen_taskdeps[fn] = self.siggen_taskdeps
265
266 # The siggen variable data is large and impacts:
267 # - bitbake's overall memory usage
268 # - the amount of data sent over IPC between parsing processes and the server
269 # - the size of the cache files on disk
270 # - the size of "sigdata" hash information files on disk
271 # The data consists of strings (some large) or frozenset lists of variables
272 # As such, we a) deplicate the data here and b) pass references to the object at second
273 # access (e.g. over IPC or saving into pickle).
274
275 store = {}
276 save_map = {}
277 save_count = 1
278 restore_map = {}
279 restore_count = {}
280
281 @classmethod
282 def reset(cls):
283 # Needs to be called before starting new streamed data in a given process
284 # (e.g. writing out the cache again)
285 cls.save_map = {}
286 cls.save_count = 1
287 cls.restore_map = {}
288
289 @classmethod
290 def _save(cls, deps):
291 ret = []
292 if not deps:
293 return deps
294 for dep in deps:
295 fs = deps[dep]
296 if fs is None:
297 ret.append((dep, None, None))
298 elif fs in cls.save_map:
299 ret.append((dep, None, cls.save_map[fs]))
300 else:
301 cls.save_map[fs] = cls.save_count
302 ret.append((dep, fs, cls.save_count))
303 cls.save_count = cls.save_count + 1
304 return ret
305
306 @classmethod
307 def _restore(cls, deps, pid):
308 ret = {}
309 if not deps:
310 return deps
311 if pid not in cls.restore_map:
312 cls.restore_map[pid] = {}
313 map = cls.restore_map[pid]
314 for dep, fs, mapnum in deps:
315 if fs is None and mapnum is None:
316 ret[dep] = None
317 elif fs is None:
318 ret[dep] = map[mapnum]
319 else:
320 try:
321 fs = cls.store[fs]
322 except KeyError:
323 cls.store[fs] = fs
324 map[mapnum] = fs
325 ret[dep] = fs
326 return ret
327
328 def __getstate__(self):
329 ret = {}
330 for key in ["siggen_gendeps", "siggen_taskdeps", "siggen_varvals"]:
331 ret[key] = self._save(self.__dict__[key])
332 ret['pid'] = os.getpid()
333 return ret
334
335 def __setstate__(self, state):
336 pid = state['pid']
337 for key in ["siggen_gendeps", "siggen_taskdeps", "siggen_varvals"]:
338 setattr(self, key, self._restore(state[key], pid))
339
340
236def virtualfn2realfn(virtualfn): 341def virtualfn2realfn(virtualfn):
237 """ 342 """
238 Convert a virtual file name to a real one + the associated subclass keyword 343 Convert a virtual file name to a real one + the associated subclass keyword
239 """ 344 """
240 mc = "" 345 mc = ""
241 if virtualfn.startswith('mc:') and virtualfn.count(':') >= 2: 346 if virtualfn.startswith('mc:') and virtualfn.count(':') >= 2:
242 elems = virtualfn.split(':') 347 (_, mc, virtualfn) = virtualfn.split(':', 2)
243 mc = elems[1]
244 virtualfn = ":".join(elems[2:])
245 348
246 fn = virtualfn 349 fn = virtualfn
247 cls = "" 350 cls = ""
@@ -264,7 +367,7 @@ def realfn2virtual(realfn, cls, mc):
264 367
265def variant2virtual(realfn, variant): 368def variant2virtual(realfn, variant):
266 """ 369 """
267 Convert a real filename + the associated subclass keyword to a virtual filename 370 Convert a real filename + a variant to a virtual filename
268 """ 371 """
269 if variant == "": 372 if variant == "":
270 return realfn 373 return realfn
@@ -275,104 +378,26 @@ def variant2virtual(realfn, variant):
275 return "mc:" + elems[1] + ":" + realfn 378 return "mc:" + elems[1] + ":" + realfn
276 return "virtual:" + variant + ":" + realfn 379 return "virtual:" + variant + ":" + realfn
277 380
278def parse_recipe(bb_data, bbfile, appends, mc=''): 381#
279 """ 382# Cooker calls cacheValid on its recipe list, then either calls loadCached
280 Parse a recipe 383# from it's main thread or parse from separate processes to generate an up to
281 """ 384# date cache
282 385#
283 chdir_back = False 386class Cache(object):
284
285 bb_data.setVar("__BBMULTICONFIG", mc)
286
287 # expand tmpdir to include this topdir
288 bb_data.setVar('TMPDIR', bb_data.getVar('TMPDIR') or "")
289 bbfile_loc = os.path.abspath(os.path.dirname(bbfile))
290 oldpath = os.path.abspath(os.getcwd())
291 bb.parse.cached_mtime_noerror(bbfile_loc)
292
293 # The ConfHandler first looks if there is a TOPDIR and if not
294 # then it would call getcwd().
295 # Previously, we chdir()ed to bbfile_loc, called the handler
296 # and finally chdir()ed back, a couple of thousand times. We now
297 # just fill in TOPDIR to point to bbfile_loc if there is no TOPDIR yet.
298 if not bb_data.getVar('TOPDIR', False):
299 chdir_back = True
300 bb_data.setVar('TOPDIR', bbfile_loc)
301 try:
302 if appends:
303 bb_data.setVar('__BBAPPEND', " ".join(appends))
304 bb_data = bb.parse.handle(bbfile, bb_data)
305 if chdir_back:
306 os.chdir(oldpath)
307 return bb_data
308 except:
309 if chdir_back:
310 os.chdir(oldpath)
311 raise
312
313
314
315class NoCache(object):
316
317 def __init__(self, databuilder):
318 self.databuilder = databuilder
319 self.data = databuilder.data
320
321 def loadDataFull(self, virtualfn, appends):
322 """
323 Return a complete set of data for fn.
324 To do this, we need to parse the file.
325 """
326 logger.debug("Parsing %s (full)" % virtualfn)
327 (fn, virtual, mc) = virtualfn2realfn(virtualfn)
328 bb_data = self.load_bbfile(virtualfn, appends, virtonly=True)
329 return bb_data[virtual]
330
331 def load_bbfile(self, bbfile, appends, virtonly = False, mc=None):
332 """
333 Load and parse one .bb build file
334 Return the data and whether parsing resulted in the file being skipped
335 """
336
337 if virtonly:
338 (bbfile, virtual, mc) = virtualfn2realfn(bbfile)
339 bb_data = self.databuilder.mcdata[mc].createCopy()
340 bb_data.setVar("__ONLYFINALISE", virtual or "default")
341 datastores = parse_recipe(bb_data, bbfile, appends, mc)
342 return datastores
343
344 if mc is not None:
345 bb_data = self.databuilder.mcdata[mc].createCopy()
346 return parse_recipe(bb_data, bbfile, appends, mc)
347
348 bb_data = self.data.createCopy()
349 datastores = parse_recipe(bb_data, bbfile, appends)
350
351 for mc in self.databuilder.mcdata:
352 if not mc:
353 continue
354 bb_data = self.databuilder.mcdata[mc].createCopy()
355 newstores = parse_recipe(bb_data, bbfile, appends, mc)
356 for ns in newstores:
357 datastores["mc:%s:%s" % (mc, ns)] = newstores[ns]
358
359 return datastores
360
361class Cache(NoCache):
362 """ 387 """
363 BitBake Cache implementation 388 BitBake Cache implementation
364 """ 389 """
365 def __init__(self, databuilder, mc, data_hash, caches_array): 390 def __init__(self, databuilder, mc, data_hash, caches_array):
366 super().__init__(databuilder) 391 self.databuilder = databuilder
367 data = databuilder.data 392 self.data = databuilder.data
368 393
369 # Pass caches_array information into Cache Constructor 394 # Pass caches_array information into Cache Constructor
370 # It will be used later for deciding whether we 395 # It will be used later for deciding whether we
371 # need extra cache file dump/load support 396 # need extra cache file dump/load support
372 self.mc = mc 397 self.mc = mc
373 self.logger = PrefixLoggerAdapter("Cache: %s: " % (mc if mc else "default"), logger) 398 self.logger = PrefixLoggerAdapter("Cache: %s: " % (mc if mc else ''), logger)
374 self.caches_array = caches_array 399 self.caches_array = caches_array
375 self.cachedir = data.getVar("CACHE") 400 self.cachedir = self.data.getVar("CACHE")
376 self.clean = set() 401 self.clean = set()
377 self.checked = set() 402 self.checked = set()
378 self.depends_cache = {} 403 self.depends_cache = {}
@@ -382,20 +407,12 @@ class Cache(NoCache):
382 self.filelist_regex = re.compile(r'(?:(?<=:True)|(?<=:False))\s+') 407 self.filelist_regex = re.compile(r'(?:(?<=:True)|(?<=:False))\s+')
383 408
384 if self.cachedir in [None, '']: 409 if self.cachedir in [None, '']:
385 self.has_cache = False 410 bb.fatal("Please ensure CACHE is set to the cache directory for BitBake to use")
386 self.logger.info("Not using a cache. "
387 "Set CACHE = <directory> to enable.")
388 return
389
390 self.has_cache = True
391 411
392 def getCacheFile(self, cachefile): 412 def getCacheFile(self, cachefile):
393 return getCacheFile(self.cachedir, cachefile, self.mc, self.data_hash) 413 return getCacheFile(self.cachedir, cachefile, self.mc, self.data_hash)
394 414
395 def prepare_cache(self, progress): 415 def prepare_cache(self, progress):
396 if not self.has_cache:
397 return 0
398
399 loaded = 0 416 loaded = 0
400 417
401 self.cachefile = self.getCacheFile("bb_cache.dat") 418 self.cachefile = self.getCacheFile("bb_cache.dat")
@@ -424,7 +441,7 @@ class Cache(NoCache):
424 else: 441 else:
425 symlink = os.path.join(self.cachedir, "bb_cache.dat") 442 symlink = os.path.join(self.cachedir, "bb_cache.dat")
426 443
427 if os.path.exists(symlink): 444 if os.path.exists(symlink) or os.path.islink(symlink):
428 bb.utils.remove(symlink) 445 bb.utils.remove(symlink)
429 try: 446 try:
430 os.symlink(os.path.basename(self.cachefile), symlink) 447 os.symlink(os.path.basename(self.cachefile), symlink)
@@ -434,9 +451,6 @@ class Cache(NoCache):
434 return loaded 451 return loaded
435 452
436 def cachesize(self): 453 def cachesize(self):
437 if not self.has_cache:
438 return 0
439
440 cachesize = 0 454 cachesize = 0
441 for cache_class in self.caches_array: 455 for cache_class in self.caches_array:
442 cachefile = self.getCacheFile(cache_class.cachefile) 456 cachefile = self.getCacheFile(cache_class.cachefile)
@@ -498,11 +512,11 @@ class Cache(NoCache):
498 512
499 return len(self.depends_cache) 513 return len(self.depends_cache)
500 514
501 def parse(self, filename, appends): 515 def parse(self, filename, appends, layername):
502 """Parse the specified filename, returning the recipe information""" 516 """Parse the specified filename, returning the recipe information"""
503 self.logger.debug("Parsing %s", filename) 517 self.logger.debug("Parsing %s", filename)
504 infos = [] 518 infos = []
505 datastores = self.load_bbfile(filename, appends, mc=self.mc) 519 datastores = self.databuilder.parseRecipeVariants(filename, appends, mc=self.mc, layername=layername)
506 depends = [] 520 depends = []
507 variants = [] 521 variants = []
508 # Process the "real" fn last so we can store variants list 522 # Process the "real" fn last so we can store variants list
@@ -524,43 +538,19 @@ class Cache(NoCache):
524 538
525 return infos 539 return infos
526 540
527 def load(self, filename, appends): 541 def loadCached(self, filename, appends):
528 """Obtain the recipe information for the specified filename, 542 """Obtain the recipe information for the specified filename,
529 using cached values if available, otherwise parsing. 543 using cached values.
530 544 """
531 Note that if it does parse to obtain the info, it will not
532 automatically add the information to the cache or to your
533 CacheData. Use the add or add_info method to do so after
534 running this, or use loadData instead."""
535 cached = self.cacheValid(filename, appends)
536 if cached:
537 infos = []
538 # info_array item is a list of [CoreRecipeInfo, XXXRecipeInfo]
539 info_array = self.depends_cache[filename]
540 for variant in info_array[0].variants:
541 virtualfn = variant2virtual(filename, variant)
542 infos.append((virtualfn, self.depends_cache[virtualfn]))
543 else:
544 return self.parse(filename, appends, configdata, self.caches_array)
545
546 return cached, infos
547
548 def loadData(self, fn, appends, cacheData):
549 """Load the recipe info for the specified filename,
550 parsing and adding to the cache if necessary, and adding
551 the recipe information to the supplied CacheData instance."""
552 skipped, virtuals = 0, 0
553 545
554 cached, infos = self.load(fn, appends) 546 infos = []
555 for virtualfn, info_array in infos: 547 # info_array item is a list of [CoreRecipeInfo, XXXRecipeInfo]
556 if info_array[0].skipped: 548 info_array = self.depends_cache[filename]
557 self.logger.debug("Skipping %s: %s", virtualfn, info_array[0].skipreason) 549 for variant in info_array[0].variants:
558 skipped += 1 550 virtualfn = variant2virtual(filename, variant)
559 else: 551 infos.append((virtualfn, self.depends_cache[virtualfn]))
560 self.add_info(virtualfn, info_array, cacheData, not cached)
561 virtuals += 1
562 552
563 return cached, skipped, virtuals 553 return infos
564 554
565 def cacheValid(self, fn, appends): 555 def cacheValid(self, fn, appends):
566 """ 556 """
@@ -569,10 +559,6 @@ class Cache(NoCache):
569 """ 559 """
570 if fn not in self.checked: 560 if fn not in self.checked:
571 self.cacheValidUpdate(fn, appends) 561 self.cacheValidUpdate(fn, appends)
572
573 # Is cache enabled?
574 if not self.has_cache:
575 return False
576 if fn in self.clean: 562 if fn in self.clean:
577 return True 563 return True
578 return False 564 return False
@@ -582,10 +568,6 @@ class Cache(NoCache):
582 Is the cache valid for fn? 568 Is the cache valid for fn?
583 Make thorough (slower) checks including timestamps. 569 Make thorough (slower) checks including timestamps.
584 """ 570 """
585 # Is cache enabled?
586 if not self.has_cache:
587 return False
588
589 self.checked.add(fn) 571 self.checked.add(fn)
590 572
591 # File isn't in depends_cache 573 # File isn't in depends_cache
@@ -636,7 +618,7 @@ class Cache(NoCache):
636 for f in flist: 618 for f in flist:
637 if not f: 619 if not f:
638 continue 620 continue
639 f, exist = f.split(":") 621 f, exist = f.rsplit(":", 1)
640 if (exist == "True" and not os.path.exists(f)) or (exist == "False" and os.path.exists(f)): 622 if (exist == "True" and not os.path.exists(f)) or (exist == "False" and os.path.exists(f)):
641 self.logger.debug2("%s's file checksum list file %s changed", 623 self.logger.debug2("%s's file checksum list file %s changed",
642 fn, f) 624 fn, f)
@@ -692,10 +674,6 @@ class Cache(NoCache):
692 Save the cache 674 Save the cache
693 Called from the parser when complete (or exiting) 675 Called from the parser when complete (or exiting)
694 """ 676 """
695
696 if not self.has_cache:
697 return
698
699 if self.cacheclean: 677 if self.cacheclean:
700 self.logger.debug2("Cache is clean, not saving.") 678 self.logger.debug2("Cache is clean, not saving.")
701 return 679 return
@@ -716,6 +694,7 @@ class Cache(NoCache):
716 p.dump(info) 694 p.dump(info)
717 695
718 del self.depends_cache 696 del self.depends_cache
697 SiggenRecipeInfo.reset()
719 698
720 @staticmethod 699 @staticmethod
721 def mtime(cachefile): 700 def mtime(cachefile):
@@ -738,26 +717,11 @@ class Cache(NoCache):
738 if watcher: 717 if watcher:
739 watcher(info_array[0].file_depends) 718 watcher(info_array[0].file_depends)
740 719
741 if not self.has_cache:
742 return
743
744 if (info_array[0].skipped or 'SRCREVINACTION' not in info_array[0].pv) and not info_array[0].nocache: 720 if (info_array[0].skipped or 'SRCREVINACTION' not in info_array[0].pv) and not info_array[0].nocache:
745 if parsed: 721 if parsed:
746 self.cacheclean = False 722 self.cacheclean = False
747 self.depends_cache[filename] = info_array 723 self.depends_cache[filename] = info_array
748 724
749 def add(self, file_name, data, cacheData, parsed=None):
750 """
751 Save data we need into the cache
752 """
753
754 realfn = virtualfn2realfn(file_name)[0]
755
756 info_array = []
757 for cache_class in self.caches_array:
758 info_array.append(cache_class(realfn, data))
759 self.add_info(file_name, info_array, cacheData, parsed)
760
761class MulticonfigCache(Mapping): 725class MulticonfigCache(Mapping):
762 def __init__(self, databuilder, data_hash, caches_array): 726 def __init__(self, databuilder, data_hash, caches_array):
763 def progress(p): 727 def progress(p):
@@ -794,6 +758,7 @@ class MulticonfigCache(Mapping):
794 loaded = 0 758 loaded = 0
795 759
796 for c in self.__caches.values(): 760 for c in self.__caches.values():
761 SiggenRecipeInfo.reset()
797 loaded += c.prepare_cache(progress) 762 loaded += c.prepare_cache(progress)
798 previous_progress = current_progress 763 previous_progress = current_progress
799 764
@@ -814,25 +779,6 @@ class MulticonfigCache(Mapping):
814 for k in self.__caches: 779 for k in self.__caches:
815 yield k 780 yield k
816 781
817def init(cooker):
818 """
819 The Objective: Cache the minimum amount of data possible yet get to the
820 stage of building packages (i.e. tryBuild) without reparsing any .bb files.
821
822 To do this, we intercept getVar calls and only cache the variables we see
823 being accessed. We rely on the cache getVar calls being made for all
824 variables bitbake might need to use to reach this stage. For each cached
825 file we need to track:
826
827 * Its mtime
828 * The mtimes of all its dependencies
829 * Whether it caused a parse.SkipRecipe exception
830
831 Files causing parsing errors are evicted from the cache.
832
833 """
834 return Cache(cooker.configuration.data, cooker.configuration.data_hash)
835
836 782
837class CacheData(object): 783class CacheData(object):
838 """ 784 """
@@ -871,11 +817,10 @@ class MultiProcessCache(object):
871 self.cachedata = self.create_cachedata() 817 self.cachedata = self.create_cachedata()
872 self.cachedata_extras = self.create_cachedata() 818 self.cachedata_extras = self.create_cachedata()
873 819
874 def init_cache(self, d, cache_file_name=None): 820 def init_cache(self, cachedir, cache_file_name=None):
875 cachedir = (d.getVar("PERSISTENT_DIR") or 821 if not cachedir:
876 d.getVar("CACHE"))
877 if cachedir in [None, '']:
878 return 822 return
823
879 bb.utils.mkdirhier(cachedir) 824 bb.utils.mkdirhier(cachedir)
880 self.cachefile = os.path.join(cachedir, 825 self.cachefile = os.path.join(cachedir,
881 cache_file_name or self.__class__.cache_file_name) 826 cache_file_name or self.__class__.cache_file_name)
@@ -902,10 +847,24 @@ class MultiProcessCache(object):
902 data = [{}] 847 data = [{}]
903 return data 848 return data
904 849
850 def clear_cache(self):
851 if not self.cachefile:
852 bb.fatal("Can't clear invalid cachefile")
853
854 self.cachedata = self.create_cachedata()
855 self.cachedata_extras = self.create_cachedata()
856 with bb.utils.fileslocked([self.cachefile + ".lock"]):
857 bb.utils.remove(self.cachefile)
858 bb.utils.remove(self.cachefile + "-*")
859
905 def save_extras(self): 860 def save_extras(self):
906 if not self.cachefile: 861 if not self.cachefile:
907 return 862 return
908 863
864 have_data = any(self.cachedata_extras)
865 if not have_data:
866 return
867
909 glf = bb.utils.lockfile(self.cachefile + ".lock", shared=True) 868 glf = bb.utils.lockfile(self.cachefile + ".lock", shared=True)
910 869
911 i = os.getpid() 870 i = os.getpid()
@@ -940,6 +899,8 @@ class MultiProcessCache(object):
940 899
941 data = self.cachedata 900 data = self.cachedata
942 901
902 have_data = False
903
943 for f in [y for y in os.listdir(os.path.dirname(self.cachefile)) if y.startswith(os.path.basename(self.cachefile) + '-')]: 904 for f in [y for y in os.listdir(os.path.dirname(self.cachefile)) if y.startswith(os.path.basename(self.cachefile) + '-')]:
944 f = os.path.join(os.path.dirname(self.cachefile), f) 905 f = os.path.join(os.path.dirname(self.cachefile), f)
945 try: 906 try:
@@ -954,12 +915,14 @@ class MultiProcessCache(object):
954 os.unlink(f) 915 os.unlink(f)
955 continue 916 continue
956 917
918 have_data = True
957 self.merge_data(extradata, data) 919 self.merge_data(extradata, data)
958 os.unlink(f) 920 os.unlink(f)
959 921
960 with open(self.cachefile, "wb") as f: 922 if have_data:
961 p = pickle.Pickler(f, -1) 923 with open(self.cachefile, "wb") as f:
962 p.dump([data, self.__class__.CACHE_VERSION]) 924 p = pickle.Pickler(f, -1)
925 p.dump([data, self.__class__.CACHE_VERSION])
963 926
964 bb.utils.unlockfile(glf) 927 bb.utils.unlockfile(glf)
965 928
@@ -1015,3 +978,11 @@ class SimpleCache(object):
1015 p.dump([data, self.cacheversion]) 978 p.dump([data, self.cacheversion])
1016 979
1017 bb.utils.unlockfile(glf) 980 bb.utils.unlockfile(glf)
981
982 def copyfile(self, target):
983 if not self.cachefile:
984 return
985
986 glf = bb.utils.lockfile(self.cachefile + ".lock")
987 shutil.copy(self.cachefile, target)
988 bb.utils.unlockfile(glf)
diff --git a/bitbake/lib/bb/checksum.py b/bitbake/lib/bb/checksum.py
index 1d50a26426..3fb39a303e 100644
--- a/bitbake/lib/bb/checksum.py
+++ b/bitbake/lib/bb/checksum.py
@@ -11,10 +11,13 @@ import os
11import stat 11import stat
12import bb.utils 12import bb.utils
13import logging 13import logging
14import re
14from bb.cache import MultiProcessCache 15from bb.cache import MultiProcessCache
15 16
16logger = logging.getLogger("BitBake.Cache") 17logger = logging.getLogger("BitBake.Cache")
17 18
19filelist_regex = re.compile(r'(?:(?<=:True)|(?<=:False))\s+')
20
18# mtime cache (non-persistent) 21# mtime cache (non-persistent)
19# based upon the assumption that files do not change during bitbake run 22# based upon the assumption that files do not change during bitbake run
20class FileMtimeCache(object): 23class FileMtimeCache(object):
@@ -50,6 +53,7 @@ class FileChecksumCache(MultiProcessCache):
50 MultiProcessCache.__init__(self) 53 MultiProcessCache.__init__(self)
51 54
52 def get_checksum(self, f): 55 def get_checksum(self, f):
56 f = os.path.normpath(f)
53 entry = self.cachedata[0].get(f) 57 entry = self.cachedata[0].get(f)
54 cmtime = self.mtime_cache.cached_mtime(f) 58 cmtime = self.mtime_cache.cached_mtime(f)
55 if entry: 59 if entry:
@@ -84,22 +88,36 @@ class FileChecksumCache(MultiProcessCache):
84 return None 88 return None
85 return checksum 89 return checksum
86 90
91 #
92 # Changing the format of file-checksums is problematic as both OE and Bitbake have
93 # knowledge of them. We need to encode a new piece of data, the portion of the path
94 # we care about from a checksum perspective. This means that files that change subdirectory
95 # are tracked by the task hashes. To do this, we do something horrible and put a "/./" into
96 # the path. The filesystem handles it but it gives us a marker to know which subsection
97 # of the path to cache.
98 #
87 def checksum_dir(pth): 99 def checksum_dir(pth):
88 # Handle directories recursively 100 # Handle directories recursively
89 if pth == "/": 101 if pth == "/":
90 bb.fatal("Refusing to checksum /") 102 bb.fatal("Refusing to checksum /")
103 pth = pth.rstrip("/")
91 dirchecksums = [] 104 dirchecksums = []
92 for root, dirs, files in os.walk(pth, topdown=True): 105 for root, dirs, files in os.walk(pth, topdown=True):
93 [dirs.remove(d) for d in list(dirs) if d in localdirsexclude] 106 [dirs.remove(d) for d in list(dirs) if d in localdirsexclude]
94 for name in files: 107 for name in files:
95 fullpth = os.path.join(root, name) 108 fullpth = os.path.join(root, name).replace(pth, os.path.join(pth, "."))
96 checksum = checksum_file(fullpth) 109 checksum = checksum_file(fullpth)
97 if checksum: 110 if checksum:
98 dirchecksums.append((fullpth, checksum)) 111 dirchecksums.append((fullpth, checksum))
99 return dirchecksums 112 return dirchecksums
100 113
101 checksums = [] 114 checksums = []
102 for pth in filelist.split(): 115 for pth in filelist_regex.split(filelist):
116 if not pth:
117 continue
118 pth = pth.strip()
119 if not pth:
120 continue
103 exist = pth.split(":")[1] 121 exist = pth.split(":")[1]
104 if exist == "False": 122 if exist == "False":
105 continue 123 continue
@@ -124,3 +142,28 @@ class FileChecksumCache(MultiProcessCache):
124 142
125 checksums.sort(key=operator.itemgetter(1)) 143 checksums.sort(key=operator.itemgetter(1))
126 return checksums 144 return checksums
145
146class RevisionsCache(MultiProcessCache):
147 cache_file_name = "local_srcrevisions.dat"
148 CACHE_VERSION = 1
149
150 def __init__(self):
151 MultiProcessCache.__init__(self)
152
153 def get_revs(self):
154 return self.cachedata[0]
155
156 def get_rev(self, k):
157 if k in self.cachedata_extras[0]:
158 return self.cachedata_extras[0][k]
159 if k in self.cachedata[0]:
160 return self.cachedata[0][k]
161 return None
162
163 def set_rev(self, k, v):
164 self.cachedata[0][k] = v
165 self.cachedata_extras[0][k] = v
166
167 def merge_data(self, source, dest):
168 for h in source[0]:
169 dest[0][h] = source[0][h]
diff --git a/bitbake/lib/bb/codeparser.py b/bitbake/lib/bb/codeparser.py
index 25a7ac69d3..4f70cf7fe7 100644
--- a/bitbake/lib/bb/codeparser.py
+++ b/bitbake/lib/bb/codeparser.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright BitBake Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
@@ -25,6 +27,7 @@ import ast
25import sys 27import sys
26import codegen 28import codegen
27import logging 29import logging
30import inspect
28import bb.pysh as pysh 31import bb.pysh as pysh
29import bb.utils, bb.data 32import bb.utils, bb.data
30import hashlib 33import hashlib
@@ -56,10 +59,56 @@ def check_indent(codestr):
56 59
57 return codestr 60 return codestr
58 61
59# A custom getstate/setstate using tuples is actually worth 15% cachesize by 62modulecode_deps = {}
60# avoiding duplication of the attribute names!
61 63
64def add_module_functions(fn, functions, namespace):
65 import os
66 fstat = os.stat(fn)
67 fixedhash = fn + ":" + str(fstat.st_size) + ":" + str(fstat.st_mtime)
68 for f in functions:
69 name = "%s.%s" % (namespace, f)
70 parser = PythonParser(name, logger)
71 try:
72 parser.parse_python(None, filename=fn, lineno=1, fixedhash=fixedhash+f, func=functions[f])
73 #bb.warn("Cached %s" % f)
74 except KeyError:
75 try:
76 targetfn = inspect.getsourcefile(functions[f])
77 except TypeError:
78 # Builtin
79 continue
80 if fn != targetfn:
81 # Skip references to other modules outside this file
82 #bb.warn("Skipping %s" % name)
83 continue
84 try:
85 lines, lineno = inspect.getsourcelines(functions[f])
86 except TypeError:
87 # Builtin
88 continue
89 src = "".join(lines)
90 parser.parse_python(src, filename=fn, lineno=lineno, fixedhash=fixedhash+f, func=functions[f])
91 #bb.warn("Not cached %s" % f)
92 execs = parser.execs.copy()
93 # Expand internal module exec references
94 for e in parser.execs:
95 if e in functions:
96 execs.remove(e)
97 execs.add(namespace + "." + e)
98 visitorcode = None
99 if hasattr(functions[f], 'visitorcode'):
100 visitorcode = getattr(functions[f], "visitorcode")
101 modulecode_deps[name] = [parser.references.copy(), execs, parser.var_execs.copy(), parser.contains.copy(), parser.extra, visitorcode]
102 #bb.warn("%s: %s\nRefs:%s Execs: %s %s %s" % (name, fn, parser.references, parser.execs, parser.var_execs, parser.contains))
103
104def update_module_dependencies(d):
105 for mod in modulecode_deps:
106 excludes = set((d.getVarFlag(mod, "vardepsexclude") or "").split())
107 if excludes:
108 modulecode_deps[mod] = [modulecode_deps[mod][0] - excludes, modulecode_deps[mod][1] - excludes, modulecode_deps[mod][2] - excludes, modulecode_deps[mod][3], modulecode_deps[mod][4], modulecode_deps[mod][5]]
62 109
110# A custom getstate/setstate using tuples is actually worth 15% cachesize by
111# avoiding duplication of the attribute names!
63class SetCache(object): 112class SetCache(object):
64 def __init__(self): 113 def __init__(self):
65 self.setcache = {} 114 self.setcache = {}
@@ -79,21 +128,22 @@ class SetCache(object):
79codecache = SetCache() 128codecache = SetCache()
80 129
81class pythonCacheLine(object): 130class pythonCacheLine(object):
82 def __init__(self, refs, execs, contains): 131 def __init__(self, refs, execs, contains, extra):
83 self.refs = codecache.internSet(refs) 132 self.refs = codecache.internSet(refs)
84 self.execs = codecache.internSet(execs) 133 self.execs = codecache.internSet(execs)
85 self.contains = {} 134 self.contains = {}
86 for c in contains: 135 for c in contains:
87 self.contains[c] = codecache.internSet(contains[c]) 136 self.contains[c] = codecache.internSet(contains[c])
137 self.extra = extra
88 138
89 def __getstate__(self): 139 def __getstate__(self):
90 return (self.refs, self.execs, self.contains) 140 return (self.refs, self.execs, self.contains, self.extra)
91 141
92 def __setstate__(self, state): 142 def __setstate__(self, state):
93 (refs, execs, contains) = state 143 (refs, execs, contains, extra) = state
94 self.__init__(refs, execs, contains) 144 self.__init__(refs, execs, contains, extra)
95 def __hash__(self): 145 def __hash__(self):
96 l = (hash(self.refs), hash(self.execs)) 146 l = (hash(self.refs), hash(self.execs), hash(self.extra))
97 for c in sorted(self.contains.keys()): 147 for c in sorted(self.contains.keys()):
98 l = l + (c, hash(self.contains[c])) 148 l = l + (c, hash(self.contains[c]))
99 return hash(l) 149 return hash(l)
@@ -122,7 +172,7 @@ class CodeParserCache(MultiProcessCache):
122 # so that an existing cache gets invalidated. Additionally you'll need 172 # so that an existing cache gets invalidated. Additionally you'll need
123 # to increment __cache_version__ in cache.py in order to ensure that old 173 # to increment __cache_version__ in cache.py in order to ensure that old
124 # recipe caches don't trigger "Taskhash mismatch" errors. 174 # recipe caches don't trigger "Taskhash mismatch" errors.
125 CACHE_VERSION = 11 175 CACHE_VERSION = 14
126 176
127 def __init__(self): 177 def __init__(self):
128 MultiProcessCache.__init__(self) 178 MultiProcessCache.__init__(self)
@@ -136,8 +186,8 @@ class CodeParserCache(MultiProcessCache):
136 self.pythoncachelines = {} 186 self.pythoncachelines = {}
137 self.shellcachelines = {} 187 self.shellcachelines = {}
138 188
139 def newPythonCacheLine(self, refs, execs, contains): 189 def newPythonCacheLine(self, refs, execs, contains, extra):
140 cacheline = pythonCacheLine(refs, execs, contains) 190 cacheline = pythonCacheLine(refs, execs, contains, extra)
141 h = hash(cacheline) 191 h = hash(cacheline)
142 if h in self.pythoncachelines: 192 if h in self.pythoncachelines:
143 return self.pythoncachelines[h] 193 return self.pythoncachelines[h]
@@ -152,12 +202,12 @@ class CodeParserCache(MultiProcessCache):
152 self.shellcachelines[h] = cacheline 202 self.shellcachelines[h] = cacheline
153 return cacheline 203 return cacheline
154 204
155 def init_cache(self, d): 205 def init_cache(self, cachedir):
156 # Check if we already have the caches 206 # Check if we already have the caches
157 if self.pythoncache: 207 if self.pythoncache:
158 return 208 return
159 209
160 MultiProcessCache.init_cache(self, d) 210 MultiProcessCache.init_cache(self, cachedir)
161 211
162 # cachedata gets re-assigned in the parent 212 # cachedata gets re-assigned in the parent
163 self.pythoncache = self.cachedata[0] 213 self.pythoncache = self.cachedata[0]
@@ -169,8 +219,8 @@ class CodeParserCache(MultiProcessCache):
169 219
170codeparsercache = CodeParserCache() 220codeparsercache = CodeParserCache()
171 221
172def parser_cache_init(d): 222def parser_cache_init(cachedir):
173 codeparsercache.init_cache(d) 223 codeparsercache.init_cache(cachedir)
174 224
175def parser_cache_save(): 225def parser_cache_save():
176 codeparsercache.save_extras() 226 codeparsercache.save_extras()
@@ -195,6 +245,10 @@ class BufferedLogger(Logger):
195 self.target.handle(record) 245 self.target.handle(record)
196 self.buffer = [] 246 self.buffer = []
197 247
248class DummyLogger():
249 def flush(self):
250 return
251
198class PythonParser(): 252class PythonParser():
199 getvars = (".getVar", ".appendVar", ".prependVar", "oe.utils.conditional") 253 getvars = (".getVar", ".appendVar", ".prependVar", "oe.utils.conditional")
200 getvarflags = (".getVarFlag", ".appendVarFlag", ".prependVarFlag") 254 getvarflags = (".getVarFlag", ".appendVarFlag", ".prependVarFlag")
@@ -212,26 +266,34 @@ class PythonParser():
212 funcstr = codegen.to_source(func) 266 funcstr = codegen.to_source(func)
213 argstr = codegen.to_source(arg) 267 argstr = codegen.to_source(arg)
214 except TypeError: 268 except TypeError:
215 self.log.debug(2, 'Failed to convert function and argument to source form') 269 self.log.debug2('Failed to convert function and argument to source form')
216 else: 270 else:
217 self.log.debug(1, self.unhandled_message % (funcstr, argstr)) 271 self.log.debug(self.unhandled_message % (funcstr, argstr))
218 272
219 def visit_Call(self, node): 273 def visit_Call(self, node):
220 name = self.called_node_name(node.func) 274 name = self.called_node_name(node.func)
221 if name and (name.endswith(self.getvars) or name.endswith(self.getvarflags) or name in self.containsfuncs or name in self.containsanyfuncs): 275 if name and name in modulecode_deps and modulecode_deps[name][5]:
222 if isinstance(node.args[0], ast.Str): 276 visitorcode = modulecode_deps[name][5]
223 varname = node.args[0].s 277 contains, execs, warn = visitorcode(name, node.args)
224 if name in self.containsfuncs and isinstance(node.args[1], ast.Str): 278 for i in contains:
279 self.contains[i] = contains[i]
280 self.execs |= execs
281 if warn:
282 self.warn(node.func, warn)
283 elif name and (name.endswith(self.getvars) or name.endswith(self.getvarflags) or name in self.containsfuncs or name in self.containsanyfuncs):
284 if isinstance(node.args[0], ast.Constant) and isinstance(node.args[0].value, str):
285 varname = node.args[0].value
286 if name in self.containsfuncs and isinstance(node.args[1], ast.Constant):
225 if varname not in self.contains: 287 if varname not in self.contains:
226 self.contains[varname] = set() 288 self.contains[varname] = set()
227 self.contains[varname].add(node.args[1].s) 289 self.contains[varname].add(node.args[1].value)
228 elif name in self.containsanyfuncs and isinstance(node.args[1], ast.Str): 290 elif name in self.containsanyfuncs and isinstance(node.args[1], ast.Constant):
229 if varname not in self.contains: 291 if varname not in self.contains:
230 self.contains[varname] = set() 292 self.contains[varname] = set()
231 self.contains[varname].update(node.args[1].s.split()) 293 self.contains[varname].update(node.args[1].value.split())
232 elif name.endswith(self.getvarflags): 294 elif name.endswith(self.getvarflags):
233 if isinstance(node.args[1], ast.Str): 295 if isinstance(node.args[1], ast.Constant):
234 self.references.add('%s[%s]' % (varname, node.args[1].s)) 296 self.references.add('%s[%s]' % (varname, node.args[1].value))
235 else: 297 else:
236 self.warn(node.func, node.args[1]) 298 self.warn(node.func, node.args[1])
237 else: 299 else:
@@ -239,8 +301,8 @@ class PythonParser():
239 else: 301 else:
240 self.warn(node.func, node.args[0]) 302 self.warn(node.func, node.args[0])
241 elif name and name.endswith(".expand"): 303 elif name and name.endswith(".expand"):
242 if isinstance(node.args[0], ast.Str): 304 if isinstance(node.args[0], ast.Constant):
243 value = node.args[0].s 305 value = node.args[0].value
244 d = bb.data.init() 306 d = bb.data.init()
245 parser = d.expandWithRefs(value, self.name) 307 parser = d.expandWithRefs(value, self.name)
246 self.references |= parser.references 308 self.references |= parser.references
@@ -250,8 +312,8 @@ class PythonParser():
250 self.contains[varname] = set() 312 self.contains[varname] = set()
251 self.contains[varname] |= parser.contains[varname] 313 self.contains[varname] |= parser.contains[varname]
252 elif name in self.execfuncs: 314 elif name in self.execfuncs:
253 if isinstance(node.args[0], ast.Str): 315 if isinstance(node.args[0], ast.Constant):
254 self.var_execs.add(node.args[0].s) 316 self.var_execs.add(node.args[0].value)
255 else: 317 else:
256 self.warn(node.func, node.args[0]) 318 self.warn(node.func, node.args[0])
257 elif name and isinstance(node.func, (ast.Name, ast.Attribute)): 319 elif name and isinstance(node.func, (ast.Name, ast.Attribute)):
@@ -276,16 +338,24 @@ class PythonParser():
276 self.contains = {} 338 self.contains = {}
277 self.execs = set() 339 self.execs = set()
278 self.references = set() 340 self.references = set()
279 self.log = BufferedLogger('BitBake.Data.PythonParser', logging.DEBUG, log) 341 self._log = log
342 # Defer init as expensive
343 self.log = DummyLogger()
280 344
281 self.unhandled_message = "in call of %s, argument '%s' is not a string literal" 345 self.unhandled_message = "in call of %s, argument '%s' is not a string literal"
282 self.unhandled_message = "while parsing %s, %s" % (name, self.unhandled_message) 346 self.unhandled_message = "while parsing %s, %s" % (name, self.unhandled_message)
283 347
284 def parse_python(self, node, lineno=0, filename="<string>"): 348 # For the python module code it is expensive to have the function text so it is
285 if not node or not node.strip(): 349 # uses a different fixedhash to cache against. We can take the hit on obtaining the
350 # text if it isn't in the cache.
351 def parse_python(self, node, lineno=0, filename="<string>", fixedhash=None, func=None):
352 if not fixedhash and (not node or not node.strip()):
286 return 353 return
287 354
288 h = bbhash(str(node)) 355 if fixedhash:
356 h = fixedhash
357 else:
358 h = bbhash(str(node))
289 359
290 if h in codeparsercache.pythoncache: 360 if h in codeparsercache.pythoncache:
291 self.references = set(codeparsercache.pythoncache[h].refs) 361 self.references = set(codeparsercache.pythoncache[h].refs)
@@ -293,6 +363,7 @@ class PythonParser():
293 self.contains = {} 363 self.contains = {}
294 for i in codeparsercache.pythoncache[h].contains: 364 for i in codeparsercache.pythoncache[h].contains:
295 self.contains[i] = set(codeparsercache.pythoncache[h].contains[i]) 365 self.contains[i] = set(codeparsercache.pythoncache[h].contains[i])
366 self.extra = codeparsercache.pythoncache[h].extra
296 return 367 return
297 368
298 if h in codeparsercache.pythoncacheextras: 369 if h in codeparsercache.pythoncacheextras:
@@ -301,8 +372,15 @@ class PythonParser():
301 self.contains = {} 372 self.contains = {}
302 for i in codeparsercache.pythoncacheextras[h].contains: 373 for i in codeparsercache.pythoncacheextras[h].contains:
303 self.contains[i] = set(codeparsercache.pythoncacheextras[h].contains[i]) 374 self.contains[i] = set(codeparsercache.pythoncacheextras[h].contains[i])
375 self.extra = codeparsercache.pythoncacheextras[h].extra
304 return 376 return
305 377
378 if fixedhash and not node:
379 raise KeyError
380
381 # Need to parse so take the hit on the real log buffer
382 self.log = BufferedLogger('BitBake.Data.PythonParser', logging.DEBUG, self._log)
383
306 # We can't add to the linenumbers for compile, we can pad to the correct number of blank lines though 384 # We can't add to the linenumbers for compile, we can pad to the correct number of blank lines though
307 node = "\n" * int(lineno) + node 385 node = "\n" * int(lineno) + node
308 code = compile(check_indent(str(node)), filename, "exec", 386 code = compile(check_indent(str(node)), filename, "exec",
@@ -312,16 +390,27 @@ class PythonParser():
312 if n.__class__.__name__ == "Call": 390 if n.__class__.__name__ == "Call":
313 self.visit_Call(n) 391 self.visit_Call(n)
314 392
393 if func is not None:
394 self.references |= getattr(func, "bb_vardeps", set())
395 self.references -= getattr(func, "bb_vardepsexclude", set())
396
315 self.execs.update(self.var_execs) 397 self.execs.update(self.var_execs)
398 self.extra = None
399 if fixedhash:
400 self.extra = bbhash(str(node))
316 401
317 codeparsercache.pythoncacheextras[h] = codeparsercache.newPythonCacheLine(self.references, self.execs, self.contains) 402 codeparsercache.pythoncacheextras[h] = codeparsercache.newPythonCacheLine(self.references, self.execs, self.contains, self.extra)
318 403
319class ShellParser(): 404class ShellParser():
320 def __init__(self, name, log): 405 def __init__(self, name, log):
321 self.funcdefs = set() 406 self.funcdefs = set()
322 self.allexecs = set() 407 self.allexecs = set()
323 self.execs = set() 408 self.execs = set()
324 self.log = BufferedLogger('BitBake.Data.%s' % name, logging.DEBUG, log) 409 self._name = name
410 self._log = log
411 # Defer init as expensive
412 self.log = DummyLogger()
413
325 self.unhandled_template = "unable to handle non-literal command '%s'" 414 self.unhandled_template = "unable to handle non-literal command '%s'"
326 self.unhandled_template = "while parsing %s, %s" % (name, self.unhandled_template) 415 self.unhandled_template = "while parsing %s, %s" % (name, self.unhandled_template)
327 416
@@ -340,6 +429,9 @@ class ShellParser():
340 self.execs = set(codeparsercache.shellcacheextras[h].execs) 429 self.execs = set(codeparsercache.shellcacheextras[h].execs)
341 return self.execs 430 return self.execs
342 431
432 # Need to parse so take the hit on the real log buffer
433 self.log = BufferedLogger('BitBake.Data.%s' % self._name, logging.DEBUG, self._log)
434
343 self._parse_shell(value) 435 self._parse_shell(value)
344 self.execs = set(cmd for cmd in self.allexecs if cmd not in self.funcdefs) 436 self.execs = set(cmd for cmd in self.allexecs if cmd not in self.funcdefs)
345 437
@@ -426,19 +518,34 @@ class ShellParser():
426 """ 518 """
427 519
428 words = list(words) 520 words = list(words)
429 for word in list(words): 521 for word in words:
430 wtree = pyshlex.make_wordtree(word[1]) 522 wtree = pyshlex.make_wordtree(word[1])
431 for part in wtree: 523 for part in wtree:
432 if not isinstance(part, list): 524 if not isinstance(part, list):
433 continue 525 continue
434 526
435 if part[0] in ('`', '$('): 527 candidates = [part]
436 command = pyshlex.wordtree_as_string(part[1:-1]) 528
437 self._parse_shell(command) 529 # If command is of type:
438 530 #
439 if word[0] in ("cmd_name", "cmd_word"): 531 # var="... $(cmd [...]) ..."
440 if word in words: 532 #
441 words.remove(word) 533 # Then iterate on what's between the quotes and if we find a
534 # list, make that what we check for below.
535 if len(part) >= 3 and part[0] == '"':
536 for p in part[1:-1]:
537 if isinstance(p, list):
538 candidates.append(p)
539
540 for candidate in candidates:
541 if len(candidate) >= 2:
542 if candidate[0] in ('`', '$('):
543 command = pyshlex.wordtree_as_string(candidate[1:-1])
544 self._parse_shell(command)
545
546 if word[0] in ("cmd_name", "cmd_word"):
547 if word in words:
548 words.remove(word)
442 549
443 usetoken = False 550 usetoken = False
444 for word in words: 551 for word in words:
@@ -450,7 +557,7 @@ class ShellParser():
450 557
451 cmd = word[1] 558 cmd = word[1]
452 if cmd.startswith("$"): 559 if cmd.startswith("$"):
453 self.log.debug(1, self.unhandled_template % cmd) 560 self.log.debug(self.unhandled_template % cmd)
454 elif cmd == "eval": 561 elif cmd == "eval":
455 command = " ".join(word for _, word in words[1:]) 562 command = " ".join(word for _, word in words[1:])
456 self._parse_shell(command) 563 self._parse_shell(command)
diff --git a/bitbake/lib/bb/command.py b/bitbake/lib/bb/command.py
index dd77cdd6e2..59a979ee90 100644
--- a/bitbake/lib/bb/command.py
+++ b/bitbake/lib/bb/command.py
@@ -20,9 +20,11 @@ Commands are queued in a CommandQueue
20 20
21from collections import OrderedDict, defaultdict 21from collections import OrderedDict, defaultdict
22 22
23import io
23import bb.event 24import bb.event
24import bb.cooker 25import bb.cooker
25import bb.remotedata 26import bb.remotedata
27import bb.parse
26 28
27class DataStoreConnectionHandle(object): 29class DataStoreConnectionHandle(object):
28 def __init__(self, dsindex=0): 30 def __init__(self, dsindex=0):
@@ -50,23 +52,32 @@ class Command:
50 """ 52 """
51 A queue of asynchronous commands for bitbake 53 A queue of asynchronous commands for bitbake
52 """ 54 """
53 def __init__(self, cooker): 55 def __init__(self, cooker, process_server):
54 self.cooker = cooker 56 self.cooker = cooker
55 self.cmds_sync = CommandsSync() 57 self.cmds_sync = CommandsSync()
56 self.cmds_async = CommandsAsync() 58 self.cmds_async = CommandsAsync()
57 self.remotedatastores = None 59 self.remotedatastores = None
58 60
59 # FIXME Add lock for this 61 self.process_server = process_server
62 # Access with locking using process_server.{get/set/clear}_async_cmd()
60 self.currentAsyncCommand = None 63 self.currentAsyncCommand = None
61 64
62 def runCommand(self, commandline, ro_only = False): 65 def runCommand(self, commandline, process_server, ro_only=False):
63 command = commandline.pop(0) 66 command = commandline.pop(0)
64 67
65 # Ensure cooker is ready for commands 68 # Ensure cooker is ready for commands
66 if command != "updateConfig" and command != "setFeatures": 69 if command not in ["updateConfig", "setFeatures", "ping"]:
67 self.cooker.init_configdata() 70 try:
68 if not self.remotedatastores: 71 self.cooker.init_configdata()
69 self.remotedatastores = bb.remotedata.RemoteDatastores(self.cooker) 72 if not self.remotedatastores:
73 self.remotedatastores = bb.remotedata.RemoteDatastores(self.cooker)
74 except (Exception, SystemExit) as exc:
75 import traceback
76 if isinstance(exc, bb.BBHandledException):
77 # We need to start returning real exceptions here. Until we do, we can't
78 # tell if an exception is an instance of bb.BBHandledException
79 return None, "bb.BBHandledException()\n" + traceback.format_exc()
80 return None, traceback.format_exc()
70 81
71 if hasattr(CommandsSync, command): 82 if hasattr(CommandsSync, command):
72 # Can run synchronous commands straight away 83 # Can run synchronous commands straight away
@@ -75,7 +86,6 @@ class Command:
75 if not hasattr(command_method, 'readonly') or not getattr(command_method, 'readonly'): 86 if not hasattr(command_method, 'readonly') or not getattr(command_method, 'readonly'):
76 return None, "Not able to execute not readonly commands in readonly mode" 87 return None, "Not able to execute not readonly commands in readonly mode"
77 try: 88 try:
78 self.cooker.process_inotify_updates()
79 if getattr(command_method, 'needconfig', True): 89 if getattr(command_method, 'needconfig', True):
80 self.cooker.updateCacheSync() 90 self.cooker.updateCacheSync()
81 result = command_method(self, commandline) 91 result = command_method(self, commandline)
@@ -90,61 +100,57 @@ class Command:
90 return None, traceback.format_exc() 100 return None, traceback.format_exc()
91 else: 101 else:
92 return result, None 102 return result, None
93 if self.currentAsyncCommand is not None:
94 return None, "Busy (%s in progress)" % self.currentAsyncCommand[0]
95 if command not in CommandsAsync.__dict__: 103 if command not in CommandsAsync.__dict__:
96 return None, "No such command" 104 return None, "No such command"
97 self.currentAsyncCommand = (command, commandline) 105 if not process_server.set_async_cmd((command, commandline)):
98 self.cooker.idleCallBackRegister(self.cooker.runCommands, self.cooker) 106 return None, "Busy (%s in progress)" % self.process_server.get_async_cmd()[0]
107 self.cooker.idleCallBackRegister(self.runAsyncCommand, process_server)
99 return True, None 108 return True, None
100 109
101 def runAsyncCommand(self): 110 def runAsyncCommand(self, _, process_server, halt):
102 try: 111 try:
103 self.cooker.process_inotify_updates() 112 if self.cooker.state in (bb.cooker.State.ERROR, bb.cooker.State.SHUTDOWN, bb.cooker.State.FORCE_SHUTDOWN):
104 if self.cooker.state in (bb.cooker.state.error, bb.cooker.state.shutdown, bb.cooker.state.forceshutdown):
105 # updateCache will trigger a shutdown of the parser 113 # updateCache will trigger a shutdown of the parser
106 # and then raise BBHandledException triggering an exit 114 # and then raise BBHandledException triggering an exit
107 self.cooker.updateCache() 115 self.cooker.updateCache()
108 return False 116 return bb.server.process.idleFinish("Cooker in error state")
109 if self.currentAsyncCommand is not None: 117 cmd = process_server.get_async_cmd()
110 (command, options) = self.currentAsyncCommand 118 if cmd is not None:
119 (command, options) = cmd
111 commandmethod = getattr(CommandsAsync, command) 120 commandmethod = getattr(CommandsAsync, command)
112 needcache = getattr( commandmethod, "needcache" ) 121 needcache = getattr( commandmethod, "needcache" )
113 if needcache and self.cooker.state != bb.cooker.state.running: 122 if needcache and self.cooker.state != bb.cooker.State.RUNNING:
114 self.cooker.updateCache() 123 self.cooker.updateCache()
115 return True 124 return True
116 else: 125 else:
117 commandmethod(self.cmds_async, self, options) 126 commandmethod(self.cmds_async, self, options)
118 return False 127 return False
119 else: 128 else:
120 return False 129 return bb.server.process.idleFinish("Nothing to do, no async command?")
121 except KeyboardInterrupt as exc: 130 except KeyboardInterrupt as exc:
122 self.finishAsyncCommand("Interrupted") 131 return bb.server.process.idleFinish("Interrupted")
123 return False
124 except SystemExit as exc: 132 except SystemExit as exc:
125 arg = exc.args[0] 133 arg = exc.args[0]
126 if isinstance(arg, str): 134 if isinstance(arg, str):
127 self.finishAsyncCommand(arg) 135 return bb.server.process.idleFinish(arg)
128 else: 136 else:
129 self.finishAsyncCommand("Exited with %s" % arg) 137 return bb.server.process.idleFinish("Exited with %s" % arg)
130 return False
131 except Exception as exc: 138 except Exception as exc:
132 import traceback 139 import traceback
133 if isinstance(exc, bb.BBHandledException): 140 if isinstance(exc, bb.BBHandledException):
134 self.finishAsyncCommand("") 141 return bb.server.process.idleFinish("")
135 else: 142 else:
136 self.finishAsyncCommand(traceback.format_exc()) 143 return bb.server.process.idleFinish(traceback.format_exc())
137 return False
138 144
139 def finishAsyncCommand(self, msg=None, code=None): 145 def finishAsyncCommand(self, msg=None, code=None):
146 self.cooker.finishcommand()
147 self.process_server.clear_async_cmd()
140 if msg or msg == "": 148 if msg or msg == "":
141 bb.event.fire(CommandFailed(msg), self.cooker.data) 149 bb.event.fire(CommandFailed(msg), self.cooker.data)
142 elif code: 150 elif code:
143 bb.event.fire(CommandExit(code), self.cooker.data) 151 bb.event.fire(CommandExit(code), self.cooker.data)
144 else: 152 else:
145 bb.event.fire(CommandCompleted(), self.cooker.data) 153 bb.event.fire(CommandCompleted(), self.cooker.data)
146 self.currentAsyncCommand = None
147 self.cooker.finishcommand()
148 154
149 def reset(self): 155 def reset(self):
150 if self.remotedatastores: 156 if self.remotedatastores:
@@ -157,6 +163,14 @@ class CommandsSync:
157 These must not influence any running synchronous command. 163 These must not influence any running synchronous command.
158 """ 164 """
159 165
166 def ping(self, command, params):
167 """
168 Allow a UI to check the server is still alive
169 """
170 return "Still alive!"
171 ping.needconfig = False
172 ping.readonly = True
173
160 def stateShutdown(self, command, params): 174 def stateShutdown(self, command, params):
161 """ 175 """
162 Trigger cooker 'shutdown' mode 176 Trigger cooker 'shutdown' mode
@@ -294,6 +308,11 @@ class CommandsSync:
294 return ret 308 return ret
295 getLayerPriorities.readonly = True 309 getLayerPriorities.readonly = True
296 310
311 def revalidateCaches(self, command, params):
312 """Called by UI clients when metadata may have changed"""
313 command.cooker.revalidateCaches()
314 revalidateCaches.needconfig = False
315
297 def getRecipes(self, command, params): 316 def getRecipes(self, command, params):
298 try: 317 try:
299 mc = params[0] 318 mc = params[0]
@@ -402,15 +421,30 @@ class CommandsSync:
402 return command.cooker.recipecaches[mc].pkg_dp 421 return command.cooker.recipecaches[mc].pkg_dp
403 getDefaultPreference.readonly = True 422 getDefaultPreference.readonly = True
404 423
424
405 def getSkippedRecipes(self, command, params): 425 def getSkippedRecipes(self, command, params):
426 """
427 Get the map of skipped recipes for the specified multiconfig/mc name (`params[0]`).
428
429 Invoked by `bb.tinfoil.Tinfoil.get_skipped_recipes`
430
431 :param command: Internally used parameter.
432 :param params: Parameter array. params[0] is multiconfig/mc name. If not given, then default mc '' is assumed.
433 :return: Dict whose keys are virtualfns and values are `bb.cooker.SkippedPackage`
434 """
435 try:
436 mc = params[0]
437 except IndexError:
438 mc = ''
439
406 # Return list sorted by reverse priority order 440 # Return list sorted by reverse priority order
407 import bb.cache 441 import bb.cache
408 def sortkey(x): 442 def sortkey(x):
409 vfn, _ = x 443 vfn, _ = x
410 realfn, _, mc = bb.cache.virtualfn2realfn(vfn) 444 realfn, _, item_mc = bb.cache.virtualfn2realfn(vfn)
411 return (-command.cooker.collections[mc].calc_bbfile_priority(realfn)[0], vfn) 445 return -command.cooker.collections[item_mc].calc_bbfile_priority(realfn)[0], vfn
412 446
413 skipdict = OrderedDict(sorted(command.cooker.skiplist.items(), key=sortkey)) 447 skipdict = OrderedDict(sorted(command.cooker.skiplist_by_mc[mc].items(), key=sortkey))
414 return list(skipdict.items()) 448 return list(skipdict.items())
415 getSkippedRecipes.readonly = True 449 getSkippedRecipes.readonly = True
416 450
@@ -500,6 +534,17 @@ class CommandsSync:
500 d = command.remotedatastores[dsindex].varhistory 534 d = command.remotedatastores[dsindex].varhistory
501 return getattr(d, method)(*args, **kwargs) 535 return getattr(d, method)(*args, **kwargs)
502 536
537 def dataStoreConnectorVarHistCmdEmit(self, command, params):
538 dsindex = params[0]
539 var = params[1]
540 oval = params[2]
541 val = params[3]
542 d = command.remotedatastores[params[4]]
543
544 o = io.StringIO()
545 command.remotedatastores[dsindex].varhistory.emit(var, oval, val, o, d)
546 return o.getvalue()
547
503 def dataStoreConnectorIncHistCmd(self, command, params): 548 def dataStoreConnectorIncHistCmd(self, command, params):
504 dsindex = params[0] 549 dsindex = params[0]
505 method = params[1] 550 method = params[1]
@@ -521,8 +566,8 @@ class CommandsSync:
521 and return a datastore object representing the environment 566 and return a datastore object representing the environment
522 for the recipe. 567 for the recipe.
523 """ 568 """
524 fn = params[0] 569 virtualfn = params[0]
525 mc = bb.runqueue.mc_from_tid(fn) 570 (fn, cls, mc) = bb.cache.virtualfn2realfn(virtualfn)
526 appends = params[1] 571 appends = params[1]
527 appendlist = params[2] 572 appendlist = params[2]
528 if len(params) > 3: 573 if len(params) > 3:
@@ -537,6 +582,7 @@ class CommandsSync:
537 appendfiles = command.cooker.collections[mc].get_file_appends(fn) 582 appendfiles = command.cooker.collections[mc].get_file_appends(fn)
538 else: 583 else:
539 appendfiles = [] 584 appendfiles = []
585 layername = command.cooker.collections[mc].calc_bbfile_priority(fn)[2]
540 # We are calling bb.cache locally here rather than on the server, 586 # We are calling bb.cache locally here rather than on the server,
541 # but that's OK because it doesn't actually need anything from 587 # but that's OK because it doesn't actually need anything from
542 # the server barring the global datastore (which we have a remote 588 # the server barring the global datastore (which we have a remote
@@ -544,15 +590,21 @@ class CommandsSync:
544 if config_data: 590 if config_data:
545 # We have to use a different function here if we're passing in a datastore 591 # We have to use a different function here if we're passing in a datastore
546 # NOTE: we took a copy above, so we don't do it here again 592 # NOTE: we took a copy above, so we don't do it here again
547 envdata = bb.cache.parse_recipe(config_data, fn, appendfiles, mc)[''] 593 envdata = command.cooker.databuilder._parse_recipe(config_data, fn, appendfiles, mc, layername)[cls]
548 else: 594 else:
549 # Use the standard path 595 # Use the standard path
550 parser = bb.cache.NoCache(command.cooker.databuilder) 596 envdata = command.cooker.databuilder.parseRecipe(virtualfn, appendfiles, layername)
551 envdata = parser.loadDataFull(fn, appendfiles)
552 idx = command.remotedatastores.store(envdata) 597 idx = command.remotedatastores.store(envdata)
553 return DataStoreConnectionHandle(idx) 598 return DataStoreConnectionHandle(idx)
554 parseRecipeFile.readonly = True 599 parseRecipeFile.readonly = True
555 600
601 def finalizeData(self, command, params):
602 newdata = command.cooker.data.createCopy()
603 bb.data.expandKeys(newdata)
604 bb.parse.ast.runAnonFuncs(newdata)
605 idx = command.remotedatastores.store(newdata)
606 return DataStoreConnectionHandle(idx)
607
556class CommandsAsync: 608class CommandsAsync:
557 """ 609 """
558 A class of asynchronous commands 610 A class of asynchronous commands
@@ -647,6 +699,16 @@ class CommandsAsync:
647 command.finishAsyncCommand() 699 command.finishAsyncCommand()
648 findFilesMatchingInDir.needcache = False 700 findFilesMatchingInDir.needcache = False
649 701
702 def testCookerCommandEvent(self, command, params):
703 """
704 Dummy command used by OEQA selftest to test tinfoil without IO
705 """
706 pattern = params[0]
707
708 command.cooker.testCookerCommandEvent(pattern)
709 command.finishAsyncCommand()
710 testCookerCommandEvent.needcache = False
711
650 def findConfigFilePath(self, command, params): 712 def findConfigFilePath(self, command, params):
651 """ 713 """
652 Find the path of the requested configuration file 714 Find the path of the requested configuration file
@@ -711,7 +773,7 @@ class CommandsAsync:
711 """ 773 """
712 event = params[0] 774 event = params[0]
713 bb.event.fire(eval(event), command.cooker.data) 775 bb.event.fire(eval(event), command.cooker.data)
714 command.currentAsyncCommand = None 776 process_server.clear_async_cmd()
715 triggerEvent.needcache = False 777 triggerEvent.needcache = False
716 778
717 def resetCooker(self, command, params): 779 def resetCooker(self, command, params):
@@ -738,7 +800,14 @@ class CommandsAsync:
738 (mc, pn) = bb.runqueue.split_mc(params[0]) 800 (mc, pn) = bb.runqueue.split_mc(params[0])
739 taskname = params[1] 801 taskname = params[1]
740 sigs = params[2] 802 sigs = params[2]
803 bb.siggen.check_siggen_version(bb.siggen)
741 res = bb.siggen.find_siginfo(pn, taskname, sigs, command.cooker.databuilder.mcdata[mc]) 804 res = bb.siggen.find_siginfo(pn, taskname, sigs, command.cooker.databuilder.mcdata[mc])
742 bb.event.fire(bb.event.FindSigInfoResult(res), command.cooker.databuilder.mcdata[mc]) 805 bb.event.fire(bb.event.FindSigInfoResult(res), command.cooker.databuilder.mcdata[mc])
743 command.finishAsyncCommand() 806 command.finishAsyncCommand()
744 findSigInfo.needcache = False 807 findSigInfo.needcache = False
808
809 def getTaskSignatures(self, command, params):
810 res = command.cooker.getTaskSignatures(params[0], params[1])
811 bb.event.fire(bb.event.GetTaskSignatureResult(res), command.cooker.data)
812 command.finishAsyncCommand()
813 getTaskSignatures.needcache = True
diff --git a/bitbake/lib/bb/compress/_pipecompress.py b/bitbake/lib/bb/compress/_pipecompress.py
new file mode 100644
index 0000000000..4a403d62cf
--- /dev/null
+++ b/bitbake/lib/bb/compress/_pipecompress.py
@@ -0,0 +1,196 @@
1#
2# Copyright BitBake Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6# Helper library to implement streaming compression and decompression using an
7# external process
8#
9# This library should be used directly by end users; a wrapper library for the
10# specific compression tool should be created
11
12import builtins
13import io
14import os
15import subprocess
16
17
18def open_wrap(
19 cls, filename, mode="rb", *, encoding=None, errors=None, newline=None, **kwargs
20):
21 """
22 Open a compressed file in binary or text mode.
23
24 Users should not call this directly. A specific compression library can use
25 this helper to provide it's own "open" command
26
27 The filename argument can be an actual filename (a str or bytes object), or
28 an existing file object to read from or write to.
29
30 The mode argument can be "r", "rb", "w", "wb", "x", "xb", "a" or "ab" for
31 binary mode, or "rt", "wt", "xt" or "at" for text mode. The default mode is
32 "rb".
33
34 For binary mode, this function is equivalent to the cls constructor:
35 cls(filename, mode). In this case, the encoding, errors and newline
36 arguments must not be provided.
37
38 For text mode, a cls object is created, and wrapped in an
39 io.TextIOWrapper instance with the specified encoding, error handling
40 behavior, and line ending(s).
41 """
42 if "t" in mode:
43 if "b" in mode:
44 raise ValueError("Invalid mode: %r" % (mode,))
45 else:
46 if encoding is not None:
47 raise ValueError("Argument 'encoding' not supported in binary mode")
48 if errors is not None:
49 raise ValueError("Argument 'errors' not supported in binary mode")
50 if newline is not None:
51 raise ValueError("Argument 'newline' not supported in binary mode")
52
53 file_mode = mode.replace("t", "")
54 if isinstance(filename, (str, bytes, os.PathLike, int)):
55 binary_file = cls(filename, file_mode, **kwargs)
56 elif hasattr(filename, "read") or hasattr(filename, "write"):
57 binary_file = cls(None, file_mode, fileobj=filename, **kwargs)
58 else:
59 raise TypeError("filename must be a str or bytes object, or a file")
60
61 if "t" in mode:
62 return io.TextIOWrapper(
63 binary_file, encoding, errors, newline, write_through=True
64 )
65 else:
66 return binary_file
67
68
69class CompressionError(OSError):
70 pass
71
72
73class PipeFile(io.RawIOBase):
74 """
75 Class that implements generically piping to/from a compression program
76
77 Derived classes should add the function get_compress() and get_decompress()
78 that return the required commands. Input will be piped into stdin and the
79 (de)compressed output should be written to stdout, e.g.:
80
81 class FooFile(PipeCompressionFile):
82 def get_decompress(self):
83 return ["fooc", "--decompress", "--stdout"]
84
85 def get_compress(self):
86 return ["fooc", "--compress", "--stdout"]
87
88 """
89
90 READ = 0
91 WRITE = 1
92
93 def __init__(self, filename=None, mode="rb", *, stderr=None, fileobj=None):
94 if "t" in mode or "U" in mode:
95 raise ValueError("Invalid mode: {!r}".format(mode))
96
97 if not "b" in mode:
98 mode += "b"
99
100 if mode.startswith("r"):
101 self.mode = self.READ
102 elif mode.startswith("w"):
103 self.mode = self.WRITE
104 else:
105 raise ValueError("Invalid mode %r" % mode)
106
107 if fileobj is not None:
108 self.fileobj = fileobj
109 else:
110 self.fileobj = builtins.open(filename, mode or "rb")
111
112 if self.mode == self.READ:
113 self.p = subprocess.Popen(
114 self.get_decompress(),
115 stdin=self.fileobj,
116 stdout=subprocess.PIPE,
117 stderr=stderr,
118 close_fds=True,
119 )
120 self.pipe = self.p.stdout
121 else:
122 self.p = subprocess.Popen(
123 self.get_compress(),
124 stdin=subprocess.PIPE,
125 stdout=self.fileobj,
126 stderr=stderr,
127 close_fds=True,
128 )
129 self.pipe = self.p.stdin
130
131 self.__closed = False
132
133 def _check_process(self):
134 if self.p is None:
135 return
136
137 returncode = self.p.wait()
138 if returncode:
139 raise CompressionError("Process died with %d" % returncode)
140 self.p = None
141
142 def close(self):
143 if self.closed:
144 return
145
146 self.pipe.close()
147 if self.p is not None:
148 self._check_process()
149 self.fileobj.close()
150
151 self.__closed = True
152
153 @property
154 def closed(self):
155 return self.__closed
156
157 def fileno(self):
158 return self.pipe.fileno()
159
160 def flush(self):
161 self.pipe.flush()
162
163 def isatty(self):
164 return self.pipe.isatty()
165
166 def readable(self):
167 return self.mode == self.READ
168
169 def writable(self):
170 return self.mode == self.WRITE
171
172 def readinto(self, b):
173 if self.mode != self.READ:
174 import errno
175
176 raise OSError(
177 errno.EBADF, "read() on write-only %s object" % self.__class__.__name__
178 )
179 size = self.pipe.readinto(b)
180 if size == 0:
181 self._check_process()
182 return size
183
184 def write(self, data):
185 if self.mode != self.WRITE:
186 import errno
187
188 raise OSError(
189 errno.EBADF, "write() on read-only %s object" % self.__class__.__name__
190 )
191 data = self.pipe.write(data)
192
193 if not data:
194 self._check_process()
195
196 return data
diff --git a/bitbake/lib/bb/compress/lz4.py b/bitbake/lib/bb/compress/lz4.py
new file mode 100644
index 0000000000..2a64681c86
--- /dev/null
+++ b/bitbake/lib/bb/compress/lz4.py
@@ -0,0 +1,19 @@
1#
2# Copyright BitBake Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7import bb.compress._pipecompress
8
9
10def open(*args, **kwargs):
11 return bb.compress._pipecompress.open_wrap(LZ4File, *args, **kwargs)
12
13
14class LZ4File(bb.compress._pipecompress.PipeFile):
15 def get_compress(self):
16 return ["lz4", "-z", "-c"]
17
18 def get_decompress(self):
19 return ["lz4", "-d", "-c"]
diff --git a/bitbake/lib/bb/compress/zstd.py b/bitbake/lib/bb/compress/zstd.py
new file mode 100644
index 0000000000..cdbbe9d60f
--- /dev/null
+++ b/bitbake/lib/bb/compress/zstd.py
@@ -0,0 +1,30 @@
1#
2# Copyright BitBake Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7import bb.compress._pipecompress
8import shutil
9
10
11def open(*args, **kwargs):
12 return bb.compress._pipecompress.open_wrap(ZstdFile, *args, **kwargs)
13
14
15class ZstdFile(bb.compress._pipecompress.PipeFile):
16 def __init__(self, *args, num_threads=1, compresslevel=3, **kwargs):
17 self.num_threads = num_threads
18 self.compresslevel = compresslevel
19 super().__init__(*args, **kwargs)
20
21 def _get_zstd(self):
22 if self.num_threads == 1 or not shutil.which("pzstd"):
23 return ["zstd"]
24 return ["pzstd", "-p", "%d" % self.num_threads]
25
26 def get_compress(self):
27 return self._get_zstd() + ["-c", "-%d" % self.compresslevel]
28
29 def get_decompress(self):
30 return self._get_zstd() + ["-d", "-c"]
diff --git a/bitbake/lib/bb/cooker.py b/bitbake/lib/bb/cooker.py
index f4ab797edf..1810bcc604 100644
--- a/bitbake/lib/bb/cooker.py
+++ b/bitbake/lib/bb/cooker.py
@@ -8,22 +8,20 @@
8# 8#
9# SPDX-License-Identifier: GPL-2.0-only 9# SPDX-License-Identifier: GPL-2.0-only
10# 10#
11 11import enum
12import sys, os, glob, os.path, re, time 12import sys, os, glob, os.path, re, time
13import itertools 13import itertools
14import logging 14import logging
15import multiprocessing 15import multiprocessing
16import sre_constants
17import threading 16import threading
18from io import StringIO, UnsupportedOperation 17from io import StringIO, UnsupportedOperation
19from contextlib import closing 18from contextlib import closing
20from collections import defaultdict, namedtuple 19from collections import defaultdict, namedtuple
21import bb, bb.exceptions, bb.command 20import bb, bb.command
22from bb import utils, data, parse, event, cache, providers, taskdata, runqueue, build 21from bb import utils, data, parse, event, cache, providers, taskdata, runqueue, build
23import queue 22import queue
24import signal 23import signal
25import prserv.serv 24import prserv.serv
26import pyinotify
27import json 25import json
28import pickle 26import pickle
29import codecs 27import codecs
@@ -50,16 +48,15 @@ class CollectionError(bb.BBHandledException):
50 Exception raised when layer configuration is incorrect 48 Exception raised when layer configuration is incorrect
51 """ 49 """
52 50
53class state:
54 initial, parsing, running, shutdown, forceshutdown, stopped, error = list(range(7))
55 51
56 @classmethod 52class State(enum.Enum):
57 def get_name(cls, code): 53 INITIAL = 0,
58 for name in dir(cls): 54 PARSING = 1,
59 value = getattr(cls, name) 55 RUNNING = 2,
60 if type(value) == type(cls.initial) and value == code: 56 SHUTDOWN = 3,
61 return name 57 FORCE_SHUTDOWN = 4,
62 raise ValueError("Invalid status code: %s" % code) 58 STOPPED = 5,
59 ERROR = 6
63 60
64 61
65class SkippedPackage: 62class SkippedPackage:
@@ -81,7 +78,7 @@ class SkippedPackage:
81 78
82 79
83class CookerFeatures(object): 80class CookerFeatures(object):
84 _feature_list = [HOB_EXTRA_CACHES, BASEDATASTORE_TRACKING, SEND_SANITYEVENTS] = list(range(3)) 81 _feature_list = [HOB_EXTRA_CACHES, BASEDATASTORE_TRACKING, SEND_SANITYEVENTS, RECIPE_SIGGEN_INFO] = list(range(4))
85 82
86 def __init__(self): 83 def __init__(self):
87 self._features=set() 84 self._features=set()
@@ -104,12 +101,15 @@ class CookerFeatures(object):
104 101
105class EventWriter: 102class EventWriter:
106 def __init__(self, cooker, eventfile): 103 def __init__(self, cooker, eventfile):
107 self.file_inited = None
108 self.cooker = cooker 104 self.cooker = cooker
109 self.eventfile = eventfile 105 self.eventfile = eventfile
110 self.event_queue = [] 106 self.event_queue = []
111 107
112 def write_event(self, event): 108 def write_variables(self):
109 with open(self.eventfile, "a") as f:
110 f.write("%s\n" % json.dumps({ "allvariables" : self.cooker.getAllKeysWithFlags(["doc", "func"])}))
111
112 def send(self, event):
113 with open(self.eventfile, "a") as f: 113 with open(self.eventfile, "a") as f:
114 try: 114 try:
115 str_event = codecs.encode(pickle.dumps(event), 'base64').decode('utf-8') 115 str_event = codecs.encode(pickle.dumps(event), 'base64').decode('utf-8')
@@ -119,28 +119,6 @@ class EventWriter:
119 import traceback 119 import traceback
120 print(err, traceback.format_exc()) 120 print(err, traceback.format_exc())
121 121
122 def send(self, event):
123 if self.file_inited:
124 # we have the file, just write the event
125 self.write_event(event)
126 else:
127 # init on bb.event.BuildStarted
128 name = "%s.%s" % (event.__module__, event.__class__.__name__)
129 if name in ("bb.event.BuildStarted", "bb.cooker.CookerExit"):
130 with open(self.eventfile, "w") as f:
131 f.write("%s\n" % json.dumps({ "allvariables" : self.cooker.getAllKeysWithFlags(["doc", "func"])}))
132
133 self.file_inited = True
134
135 # write pending events
136 for evt in self.event_queue:
137 self.write_event(evt)
138
139 # also write the current event
140 self.write_event(event)
141 else:
142 # queue all events until the file is inited
143 self.event_queue.append(event)
144 122
145#============================================================================# 123#============================================================================#
146# BBCooker 124# BBCooker
@@ -150,43 +128,34 @@ class BBCooker:
150 Manages one bitbake build run 128 Manages one bitbake build run
151 """ 129 """
152 130
153 def __init__(self, featureSet=None, idleCallBackRegister=None): 131 def __init__(self, featureSet=None, server=None):
154 self.recipecaches = None 132 self.recipecaches = None
133 self.baseconfig_valid = False
134 self.parsecache_valid = False
155 self.eventlog = None 135 self.eventlog = None
156 self.skiplist = {} 136 # The skiplists, one per multiconfig
137 self.skiplist_by_mc = defaultdict(dict)
157 self.featureset = CookerFeatures() 138 self.featureset = CookerFeatures()
158 if featureSet: 139 if featureSet:
159 for f in featureSet: 140 for f in featureSet:
160 self.featureset.setFeature(f) 141 self.featureset.setFeature(f)
161 142
143 self.orig_syspath = sys.path.copy()
144 self.orig_sysmodules = [*sys.modules]
145
162 self.configuration = bb.cookerdata.CookerConfiguration() 146 self.configuration = bb.cookerdata.CookerConfiguration()
163 147
164 self.idleCallBackRegister = idleCallBackRegister 148 self.process_server = server
149 self.idleCallBackRegister = None
150 self.waitIdle = None
151 if server:
152 self.idleCallBackRegister = server.register_idle_function
153 self.waitIdle = server.wait_for_idle
165 154
166 bb.debug(1, "BBCooker starting %s" % time.time()) 155 bb.debug(1, "BBCooker starting %s" % time.time())
167 sys.stdout.flush() 156
168 157 self.configwatched = {}
169 self.configwatcher = pyinotify.WatchManager() 158 self.parsewatched = {}
170 bb.debug(1, "BBCooker pyinotify1 %s" % time.time())
171 sys.stdout.flush()
172
173 self.configwatcher.bbseen = set()
174 self.configwatcher.bbwatchedfiles = set()
175 self.confignotifier = pyinotify.Notifier(self.configwatcher, self.config_notifications)
176 bb.debug(1, "BBCooker pyinotify2 %s" % time.time())
177 sys.stdout.flush()
178 self.watchmask = pyinotify.IN_CLOSE_WRITE | pyinotify.IN_CREATE | pyinotify.IN_DELETE | \
179 pyinotify.IN_DELETE_SELF | pyinotify.IN_MODIFY | pyinotify.IN_MOVE_SELF | \
180 pyinotify.IN_MOVED_FROM | pyinotify.IN_MOVED_TO
181 self.watcher = pyinotify.WatchManager()
182 bb.debug(1, "BBCooker pyinotify3 %s" % time.time())
183 sys.stdout.flush()
184 self.watcher.bbseen = set()
185 self.watcher.bbwatchedfiles = set()
186 self.notifier = pyinotify.Notifier(self.watcher, self.notifications)
187
188 bb.debug(1, "BBCooker pyinotify complete %s" % time.time())
189 sys.stdout.flush()
190 159
191 # If being called by something like tinfoil, we need to clean cached data 160 # If being called by something like tinfoil, we need to clean cached data
192 # which may now be invalid 161 # which may now be invalid
@@ -197,14 +166,6 @@ class BBCooker:
197 self.hashserv = None 166 self.hashserv = None
198 self.hashservaddr = None 167 self.hashservaddr = None
199 168
200 self.inotify_modified_files = []
201
202 def _process_inotify_updates(server, cooker, abort):
203 cooker.process_inotify_updates()
204 return 1.0
205
206 self.idleCallBackRegister(_process_inotify_updates, self)
207
208 # TOSTOP must not be set or our children will hang when they output 169 # TOSTOP must not be set or our children will hang when they output
209 try: 170 try:
210 fd = sys.stdout.fileno() 171 fd = sys.stdout.fileno()
@@ -218,8 +179,8 @@ class BBCooker:
218 except UnsupportedOperation: 179 except UnsupportedOperation:
219 pass 180 pass
220 181
221 self.command = bb.command.Command(self) 182 self.command = bb.command.Command(self, self.process_server)
222 self.state = state.initial 183 self.state = State.INITIAL
223 184
224 self.parser = None 185 self.parser = None
225 186
@@ -228,108 +189,68 @@ class BBCooker:
228 signal.signal(signal.SIGHUP, self.sigterm_exception) 189 signal.signal(signal.SIGHUP, self.sigterm_exception)
229 190
230 bb.debug(1, "BBCooker startup complete %s" % time.time()) 191 bb.debug(1, "BBCooker startup complete %s" % time.time())
231 sys.stdout.flush()
232 192
233 def init_configdata(self): 193 def init_configdata(self):
234 if not hasattr(self, "data"): 194 if not hasattr(self, "data"):
235 self.initConfigurationData() 195 self.initConfigurationData()
236 bb.debug(1, "BBCooker parsed base configuration %s" % time.time()) 196 bb.debug(1, "BBCooker parsed base configuration %s" % time.time())
237 sys.stdout.flush()
238 self.handlePRServ() 197 self.handlePRServ()
239 198
240 def process_inotify_updates(self): 199 def _baseconfig_set(self, value):
241 for n in [self.confignotifier, self.notifier]: 200 if value and not self.baseconfig_valid:
242 if n.check_events(timeout=0): 201 bb.server.process.serverlog("Base config valid")
243 # read notified events and enqeue them 202 elif not value and self.baseconfig_valid:
244 n.read_events() 203 bb.server.process.serverlog("Base config invalidated")
245 n.process_events() 204 self.baseconfig_valid = value
246 205
247 def config_notifications(self, event): 206 def _parsecache_set(self, value):
248 if event.maskname == "IN_Q_OVERFLOW": 207 if value and not self.parsecache_valid:
249 bb.warn("inotify event queue overflowed, invalidating caches.") 208 bb.server.process.serverlog("Parse cache valid")
250 self.parsecache_valid = False 209 elif not value and self.parsecache_valid:
251 self.baseconfig_valid = False 210 bb.server.process.serverlog("Parse cache invalidated")
252 bb.parse.clear_cache() 211 self.parsecache_valid = value
253 return 212
254 if not event.pathname in self.configwatcher.bbwatchedfiles: 213 def add_filewatch(self, deps, configwatcher=False):
255 return 214 if configwatcher:
256 if not event.pathname in self.inotify_modified_files: 215 watcher = self.configwatched
257 self.inotify_modified_files.append(event.pathname) 216 else:
258 self.baseconfig_valid = False 217 watcher = self.parsewatched
259
260 def notifications(self, event):
261 if event.maskname == "IN_Q_OVERFLOW":
262 bb.warn("inotify event queue overflowed, invalidating caches.")
263 self.parsecache_valid = False
264 bb.parse.clear_cache()
265 return
266 if event.pathname.endswith("bitbake-cookerdaemon.log") \
267 or event.pathname.endswith("bitbake.lock"):
268 return
269 if not event.pathname in self.inotify_modified_files:
270 self.inotify_modified_files.append(event.pathname)
271 self.parsecache_valid = False
272 218
273 def add_filewatch(self, deps, watcher=None, dirs=False):
274 if not watcher:
275 watcher = self.watcher
276 for i in deps: 219 for i in deps:
277 watcher.bbwatchedfiles.add(i[0]) 220 f = i[0]
278 if dirs: 221 mtime = i[1]
279 f = i[0] 222 watcher[f] = mtime
280 else:
281 f = os.path.dirname(i[0])
282 if f in watcher.bbseen:
283 continue
284 watcher.bbseen.add(f)
285 watchtarget = None
286 while True:
287 # We try and add watches for files that don't exist but if they did, would influence
288 # the parser. The parent directory of these files may not exist, in which case we need
289 # to watch any parent that does exist for changes.
290 try:
291 watcher.add_watch(f, self.watchmask, quiet=False)
292 if watchtarget:
293 watcher.bbwatchedfiles.add(watchtarget)
294 break
295 except pyinotify.WatchManagerError as e:
296 if 'ENOENT' in str(e):
297 watchtarget = f
298 f = os.path.dirname(f)
299 if f in watcher.bbseen:
300 break
301 watcher.bbseen.add(f)
302 continue
303 if 'ENOSPC' in str(e):
304 providerlog.error("No space left on device or exceeds fs.inotify.max_user_watches?")
305 providerlog.error("To check max_user_watches: sysctl -n fs.inotify.max_user_watches.")
306 providerlog.error("To modify max_user_watches: sysctl -n -w fs.inotify.max_user_watches=<value>.")
307 providerlog.error("Root privilege is required to modify max_user_watches.")
308 raise
309 223
310 def sigterm_exception(self, signum, stackframe): 224 def sigterm_exception(self, signum, stackframe):
311 if signum == signal.SIGTERM: 225 if signum == signal.SIGTERM:
312 bb.warn("Cooker received SIGTERM, shutting down...") 226 bb.warn("Cooker received SIGTERM, shutting down...")
313 elif signum == signal.SIGHUP: 227 elif signum == signal.SIGHUP:
314 bb.warn("Cooker received SIGHUP, shutting down...") 228 bb.warn("Cooker received SIGHUP, shutting down...")
315 self.state = state.forceshutdown 229 self.state = State.FORCE_SHUTDOWN
230 bb.event._should_exit.set()
316 231
317 def setFeatures(self, features): 232 def setFeatures(self, features):
318 # we only accept a new feature set if we're in state initial, so we can reset without problems 233 # we only accept a new feature set if we're in state initial, so we can reset without problems
319 if not self.state in [state.initial, state.shutdown, state.forceshutdown, state.stopped, state.error]: 234 if not self.state in [State.INITIAL, State.SHUTDOWN, State.FORCE_SHUTDOWN, State.STOPPED, State.ERROR]:
320 raise Exception("Illegal state for feature set change") 235 raise Exception("Illegal state for feature set change")
321 original_featureset = list(self.featureset) 236 original_featureset = list(self.featureset)
322 for feature in features: 237 for feature in features:
323 self.featureset.setFeature(feature) 238 self.featureset.setFeature(feature)
324 bb.debug(1, "Features set %s (was %s)" % (original_featureset, list(self.featureset))) 239 bb.debug(1, "Features set %s (was %s)" % (original_featureset, list(self.featureset)))
325 if (original_featureset != list(self.featureset)) and self.state != state.error and hasattr(self, "data"): 240 if (original_featureset != list(self.featureset)) and self.state != State.ERROR and hasattr(self, "data"):
326 self.reset() 241 self.reset()
327 242
328 def initConfigurationData(self): 243 def initConfigurationData(self):
329 244 self.state = State.INITIAL
330 self.state = state.initial
331 self.caches_array = [] 245 self.caches_array = []
332 246
247 sys.path = self.orig_syspath.copy()
248 for mod in [*sys.modules]:
249 if mod not in self.orig_sysmodules:
250 del sys.modules[mod]
251
252 self.configwatched = {}
253
333 # Need to preserve BB_CONSOLELOG over resets 254 # Need to preserve BB_CONSOLELOG over resets
334 consolelog = None 255 consolelog = None
335 if hasattr(self, "data"): 256 if hasattr(self, "data"):
@@ -338,12 +259,12 @@ class BBCooker:
338 if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset: 259 if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
339 self.enableDataTracking() 260 self.enableDataTracking()
340 261
341 all_extra_cache_names = [] 262 caches_name_array = ['bb.cache:CoreRecipeInfo']
342 # We hardcode all known cache types in a single place, here. 263 # We hardcode all known cache types in a single place, here.
343 if CookerFeatures.HOB_EXTRA_CACHES in self.featureset: 264 if CookerFeatures.HOB_EXTRA_CACHES in self.featureset:
344 all_extra_cache_names.append("bb.cache_extra:HobRecipeInfo") 265 caches_name_array.append("bb.cache_extra:HobRecipeInfo")
345 266 if CookerFeatures.RECIPE_SIGGEN_INFO in self.featureset:
346 caches_name_array = ['bb.cache:CoreRecipeInfo'] + all_extra_cache_names 267 caches_name_array.append("bb.cache:SiggenRecipeInfo")
347 268
348 # At least CoreRecipeInfo will be loaded, so caches_array will never be empty! 269 # At least CoreRecipeInfo will be loaded, so caches_array will never be empty!
349 # This is the entry point, no further check needed! 270 # This is the entry point, no further check needed!
@@ -359,9 +280,12 @@ class BBCooker:
359 self.databuilder = bb.cookerdata.CookerDataBuilder(self.configuration, False) 280 self.databuilder = bb.cookerdata.CookerDataBuilder(self.configuration, False)
360 self.databuilder.parseBaseConfiguration() 281 self.databuilder.parseBaseConfiguration()
361 self.data = self.databuilder.data 282 self.data = self.databuilder.data
362 self.data_hash = self.databuilder.data_hash
363 self.extraconfigdata = {} 283 self.extraconfigdata = {}
364 284
285 eventlog = self.data.getVar("BB_DEFAULT_EVENTLOG")
286 if not self.configuration.writeeventlog and eventlog:
287 self.setupEventLog(eventlog)
288
365 if consolelog: 289 if consolelog:
366 self.data.setVar("BB_CONSOLELOG", consolelog) 290 self.data.setVar("BB_CONSOLELOG", consolelog)
367 291
@@ -371,31 +295,48 @@ class BBCooker:
371 self.disableDataTracking() 295 self.disableDataTracking()
372 296
373 for mc in self.databuilder.mcdata.values(): 297 for mc in self.databuilder.mcdata.values():
374 mc.renameVar("__depends", "__base_depends") 298 self.add_filewatch(mc.getVar("__base_depends", False), configwatcher=True)
375 self.add_filewatch(mc.getVar("__base_depends", False), self.configwatcher)
376 299
377 self.baseconfig_valid = True 300 self._baseconfig_set(True)
378 self.parsecache_valid = False 301 self._parsecache_set(False)
379 302
380 def handlePRServ(self): 303 def handlePRServ(self):
381 # Setup a PR Server based on the new configuration 304 # Setup a PR Server based on the new configuration
382 try: 305 try:
383 self.prhost = prserv.serv.auto_start(self.data) 306 self.prhost = prserv.serv.auto_start(self.data)
384 except prserv.serv.PRServiceConfigError as e: 307 except prserv.serv.PRServiceConfigError as e:
385 bb.fatal("Unable to start PR Server, exitting") 308 bb.fatal("Unable to start PR Server, exiting, check the bitbake-cookerdaemon.log")
386 309
387 if self.data.getVar("BB_HASHSERVE") == "auto": 310 if self.data.getVar("BB_HASHSERVE") == "auto":
388 # Create a new hash server bound to a unix domain socket 311 # Create a new hash server bound to a unix domain socket
389 if not self.hashserv: 312 if not self.hashserv:
390 dbfile = (self.data.getVar("PERSISTENT_DIR") or self.data.getVar("CACHE")) + "/hashserv.db" 313 dbfile = (self.data.getVar("PERSISTENT_DIR") or self.data.getVar("CACHE")) + "/hashserv.db"
314 upstream = self.data.getVar("BB_HASHSERVE_UPSTREAM") or None
315 if upstream:
316 try:
317 with hashserv.create_client(upstream) as client:
318 client.ping()
319 except ImportError as e:
320 bb.fatal(""""Unable to use hash equivalence server at '%s' due to missing or incorrect python module:
321%s
322Please install the needed module on the build host, or use an environment containing it (e.g a pip venv or OpenEmbedded's buildtools tarball).
323You can also remove the BB_HASHSERVE_UPSTREAM setting, but this may result in significantly longer build times as bitbake will be unable to reuse prebuilt sstate artefacts."""
324 % (upstream, repr(e)))
325 except ConnectionError as e:
326 bb.warn("Unable to connect to hash equivalence server at '%s', please correct or remove BB_HASHSERVE_UPSTREAM:\n%s"
327 % (upstream, repr(e)))
328 upstream = None
329
391 self.hashservaddr = "unix://%s/hashserve.sock" % self.data.getVar("TOPDIR") 330 self.hashservaddr = "unix://%s/hashserve.sock" % self.data.getVar("TOPDIR")
392 self.hashserv = hashserv.create_server(self.hashservaddr, dbfile, sync=False) 331 self.hashserv = hashserv.create_server(
393 self.hashserv.process = multiprocessing.Process(target=self.hashserv.serve_forever) 332 self.hashservaddr,
394 self.hashserv.process.start() 333 dbfile,
395 self.data.setVar("BB_HASHSERVE", self.hashservaddr) 334 sync=False,
396 self.databuilder.origdata.setVar("BB_HASHSERVE", self.hashservaddr) 335 upstream=upstream,
397 self.databuilder.data.setVar("BB_HASHSERVE", self.hashservaddr) 336 )
337 self.hashserv.serve_as_process(log_level=logging.WARNING)
398 for mc in self.databuilder.mcdata: 338 for mc in self.databuilder.mcdata:
339 self.databuilder.mcorigdata[mc].setVar("BB_HASHSERVE", self.hashservaddr)
399 self.databuilder.mcdata[mc].setVar("BB_HASHSERVE", self.hashservaddr) 340 self.databuilder.mcdata[mc].setVar("BB_HASHSERVE", self.hashservaddr)
400 341
401 bb.parse.init_parser(self.data) 342 bb.parse.init_parser(self.data)
@@ -410,6 +351,34 @@ class BBCooker:
410 if hasattr(self, "data"): 351 if hasattr(self, "data"):
411 self.data.disableTracking() 352 self.data.disableTracking()
412 353
354 def revalidateCaches(self):
355 bb.parse.clear_cache()
356
357 clean = True
358 for f in self.configwatched:
359 if not bb.parse.check_mtime(f, self.configwatched[f]):
360 bb.server.process.serverlog("Found %s changed, invalid cache" % f)
361 self._baseconfig_set(False)
362 self._parsecache_set(False)
363 clean = False
364 break
365
366 if clean:
367 for f in self.parsewatched:
368 if not bb.parse.check_mtime(f, self.parsewatched[f]):
369 bb.server.process.serverlog("Found %s changed, invalid cache" % f)
370 self._parsecache_set(False)
371 clean = False
372 break
373
374 if not clean:
375 bb.parse.BBHandler.cached_statements = {}
376
377 # If writes were made to any of the data stores, we need to recalculate the data
378 # store cache
379 if hasattr(self, "databuilder"):
380 self.databuilder.calc_datastore_hashes()
381
413 def parseConfiguration(self): 382 def parseConfiguration(self):
414 self.updateCacheSync() 383 self.updateCacheSync()
415 384
@@ -428,8 +397,24 @@ class BBCooker:
428 self.recipecaches[mc] = bb.cache.CacheData(self.caches_array) 397 self.recipecaches[mc] = bb.cache.CacheData(self.caches_array)
429 398
430 self.handleCollections(self.data.getVar("BBFILE_COLLECTIONS")) 399 self.handleCollections(self.data.getVar("BBFILE_COLLECTIONS"))
431 400 self.collections = {}
432 self.parsecache_valid = False 401 for mc in self.multiconfigs:
402 self.collections[mc] = CookerCollectFiles(self.bbfile_config_priorities, mc)
403
404 self._parsecache_set(False)
405
406 def setupEventLog(self, eventlog):
407 if self.eventlog and self.eventlog[0] != eventlog:
408 bb.event.unregister_UIHhandler(self.eventlog[1])
409 self.eventlog = None
410 if not self.eventlog or self.eventlog[0] != eventlog:
411 # we log all events to a file if so directed
412 # register the log file writer as UI Handler
413 if not os.path.exists(os.path.dirname(eventlog)):
414 bb.utils.mkdirhier(os.path.dirname(eventlog))
415 writer = EventWriter(self, eventlog)
416 EventLogWriteHandler = namedtuple('EventLogWriteHandler', ['event'])
417 self.eventlog = (eventlog, bb.event.register_UIHhandler(EventLogWriteHandler(writer)), writer)
433 418
434 def updateConfigOpts(self, options, environment, cmdline): 419 def updateConfigOpts(self, options, environment, cmdline):
435 self.ui_cmdline = cmdline 420 self.ui_cmdline = cmdline
@@ -450,14 +435,7 @@ class BBCooker:
450 setattr(self.configuration, o, options[o]) 435 setattr(self.configuration, o, options[o])
451 436
452 if self.configuration.writeeventlog: 437 if self.configuration.writeeventlog:
453 if self.eventlog and self.eventlog[0] != self.configuration.writeeventlog: 438 self.setupEventLog(self.configuration.writeeventlog)
454 bb.event.unregister_UIHhandler(self.eventlog[1])
455 if not self.eventlog or self.eventlog[0] != self.configuration.writeeventlog:
456 # we log all events to a file if so directed
457 # register the log file writer as UI Handler
458 writer = EventWriter(self, self.configuration.writeeventlog)
459 EventLogWriteHandler = namedtuple('EventLogWriteHandler', ['event'])
460 self.eventlog = (self.configuration.writeeventlog, bb.event.register_UIHhandler(EventLogWriteHandler(writer)))
461 439
462 bb.msg.loggerDefaultLogLevel = self.configuration.default_loglevel 440 bb.msg.loggerDefaultLogLevel = self.configuration.default_loglevel
463 bb.msg.loggerDefaultDomains = self.configuration.debug_domains 441 bb.msg.loggerDefaultDomains = self.configuration.debug_domains
@@ -487,37 +465,37 @@ class BBCooker:
487 # Now update all the variables not in the datastore to match 465 # Now update all the variables not in the datastore to match
488 self.configuration.env = environment 466 self.configuration.env = environment
489 467
468 self.revalidateCaches()
490 if not clean: 469 if not clean:
491 logger.debug("Base environment change, triggering reparse") 470 logger.debug("Base environment change, triggering reparse")
492 self.reset() 471 self.reset()
493 472
494 def runCommands(self, server, data, abort):
495 """
496 Run any queued asynchronous command
497 This is done by the idle handler so it runs in true context rather than
498 tied to any UI.
499 """
500
501 return self.command.runAsyncCommand()
502
503 def showVersions(self): 473 def showVersions(self):
504 474
505 (latest_versions, preferred_versions) = self.findProviders() 475 (latest_versions, preferred_versions, required) = self.findProviders()
506 476
507 logger.plain("%-35s %25s %25s", "Recipe Name", "Latest Version", "Preferred Version") 477 logger.plain("%-35s %25s %25s %25s", "Recipe Name", "Latest Version", "Preferred Version", "Required Version")
508 logger.plain("%-35s %25s %25s\n", "===========", "==============", "=================") 478 logger.plain("%-35s %25s %25s %25s\n", "===========", "==============", "=================", "================")
509 479
510 for p in sorted(self.recipecaches[''].pkg_pn): 480 for p in sorted(self.recipecaches[''].pkg_pn):
511 pref = preferred_versions[p] 481 preferred = preferred_versions[p]
512 latest = latest_versions[p] 482 latest = latest_versions[p]
483 requiredstr = ""
484 preferredstr = ""
485 if required[p]:
486 if preferred[0] is not None:
487 requiredstr = preferred[0][0] + ":" + preferred[0][1] + '-' + preferred[0][2]
488 else:
489 bb.fatal("REQUIRED_VERSION of package %s not available" % p)
490 else:
491 preferredstr = preferred[0][0] + ":" + preferred[0][1] + '-' + preferred[0][2]
513 492
514 prefstr = pref[0][0] + ":" + pref[0][1] + '-' + pref[0][2]
515 lateststr = latest[0][0] + ":" + latest[0][1] + "-" + latest[0][2] 493 lateststr = latest[0][0] + ":" + latest[0][1] + "-" + latest[0][2]
516 494
517 if pref == latest: 495 if preferred == latest:
518 prefstr = "" 496 preferredstr = ""
519 497
520 logger.plain("%-35s %25s %25s", p, lateststr, prefstr) 498 logger.plain("%-35s %25s %25s %25s", p, lateststr, preferredstr, requiredstr)
521 499
522 def showEnvironment(self, buildfile=None, pkgs_to_build=None): 500 def showEnvironment(self, buildfile=None, pkgs_to_build=None):
523 """ 501 """
@@ -533,6 +511,8 @@ class BBCooker:
533 if not orig_tracking: 511 if not orig_tracking:
534 self.enableDataTracking() 512 self.enableDataTracking()
535 self.reset() 513 self.reset()
514 # reset() resets to the UI requested value so we have to redo this
515 self.enableDataTracking()
536 516
537 def mc_base(p): 517 def mc_base(p):
538 if p.startswith('mc:'): 518 if p.startswith('mc:'):
@@ -556,21 +536,21 @@ class BBCooker:
556 if pkgs_to_build[0] in set(ignore.split()): 536 if pkgs_to_build[0] in set(ignore.split()):
557 bb.fatal("%s is in ASSUME_PROVIDED" % pkgs_to_build[0]) 537 bb.fatal("%s is in ASSUME_PROVIDED" % pkgs_to_build[0])
558 538
559 taskdata, runlist = self.buildTaskData(pkgs_to_build, None, self.configuration.abort, allowincomplete=True) 539 taskdata, runlist = self.buildTaskData(pkgs_to_build, None, self.configuration.halt, allowincomplete=True)
560 540
561 mc = runlist[0][0] 541 mc = runlist[0][0]
562 fn = runlist[0][3] 542 fn = runlist[0][3]
563 543
564 if fn: 544 if fn:
565 try: 545 try:
566 bb_caches = bb.cache.MulticonfigCache(self.databuilder, self.data_hash, self.caches_array) 546 layername = self.collections[mc].calc_bbfile_priority(fn)[2]
567 envdata = bb_caches[mc].loadDataFull(fn, self.collections[mc].get_file_appends(fn)) 547 envdata = self.databuilder.parseRecipe(fn, self.collections[mc].get_file_appends(fn), layername)
568 except Exception as e: 548 except Exception as e:
569 parselog.exception("Unable to read %s", fn) 549 parselog.exception("Unable to read %s", fn)
570 raise 550 raise
571 else: 551 else:
572 if not mc in self.databuilder.mcdata: 552 if not mc in self.databuilder.mcdata:
573 bb.fatal('Not multiconfig named "%s" found' % mc) 553 bb.fatal('No multiconfig named "%s" found' % mc)
574 envdata = self.databuilder.mcdata[mc] 554 envdata = self.databuilder.mcdata[mc]
575 data.expandKeys(envdata) 555 data.expandKeys(envdata)
576 parse.ast.runAnonFuncs(envdata) 556 parse.ast.runAnonFuncs(envdata)
@@ -585,7 +565,7 @@ class BBCooker:
585 data.emit_env(env, envdata, True) 565 data.emit_env(env, envdata, True)
586 logger.plain(env.getvalue()) 566 logger.plain(env.getvalue())
587 567
588 # emit the metadata which isnt valid shell 568 # emit the metadata which isn't valid shell
589 for e in sorted(envdata.keys()): 569 for e in sorted(envdata.keys()):
590 if envdata.getVarFlag(e, 'func', False) and envdata.getVarFlag(e, 'python', False): 570 if envdata.getVarFlag(e, 'func', False) and envdata.getVarFlag(e, 'python', False):
591 logger.plain("\npython %s () {\n%s}\n", e, envdata.getVar(e, False)) 571 logger.plain("\npython %s () {\n%s}\n", e, envdata.getVar(e, False))
@@ -594,7 +574,7 @@ class BBCooker:
594 self.disableDataTracking() 574 self.disableDataTracking()
595 self.reset() 575 self.reset()
596 576
597 def buildTaskData(self, pkgs_to_build, task, abort, allowincomplete=False): 577 def buildTaskData(self, pkgs_to_build, task, halt, allowincomplete=False):
598 """ 578 """
599 Prepare a runqueue and taskdata object for iteration over pkgs_to_build 579 Prepare a runqueue and taskdata object for iteration over pkgs_to_build
600 """ 580 """
@@ -641,8 +621,8 @@ class BBCooker:
641 localdata = {} 621 localdata = {}
642 622
643 for mc in self.multiconfigs: 623 for mc in self.multiconfigs:
644 taskdata[mc] = bb.taskdata.TaskData(abort, skiplist=self.skiplist, allowincomplete=allowincomplete) 624 taskdata[mc] = bb.taskdata.TaskData(halt, skiplist=self.skiplist_by_mc[mc], allowincomplete=allowincomplete)
645 localdata[mc] = data.createCopy(self.databuilder.mcdata[mc]) 625 localdata[mc] = bb.data.createCopy(self.databuilder.mcdata[mc])
646 bb.data.expandKeys(localdata[mc]) 626 bb.data.expandKeys(localdata[mc])
647 627
648 current = 0 628 current = 0
@@ -690,19 +670,18 @@ class BBCooker:
690 taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc]) 670 taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc])
691 mcdeps |= set(taskdata[mc].get_mcdepends()) 671 mcdeps |= set(taskdata[mc].get_mcdepends())
692 new = False 672 new = False
693 for mc in self.multiconfigs: 673 for k in mcdeps:
694 for k in mcdeps: 674 if k in seen:
695 if k in seen: 675 continue
696 continue 676 l = k.split(':')
697 l = k.split(':') 677 depmc = l[2]
698 depmc = l[2] 678 if depmc not in self.multiconfigs:
699 if depmc not in self.multiconfigs: 679 bb.fatal("Multiconfig dependency %s depends on nonexistent multiconfig configuration named configuration %s" % (k,depmc))
700 bb.fatal("Multiconfig dependency %s depends on nonexistent multiconfig configuration named configuration %s" % (k,depmc)) 680 else:
701 else: 681 logger.debug("Adding providers for multiconfig dependency %s" % l[3])
702 logger.debug("Adding providers for multiconfig dependency %s" % l[3]) 682 taskdata[depmc].add_provider(localdata[depmc], self.recipecaches[depmc], l[3])
703 taskdata[depmc].add_provider(localdata[depmc], self.recipecaches[depmc], l[3]) 683 seen.add(k)
704 seen.add(k) 684 new = True
705 new = True
706 685
707 for mc in self.multiconfigs: 686 for mc in self.multiconfigs:
708 taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc]) 687 taskdata[mc].add_unresolved(localdata[mc], self.recipecaches[mc])
@@ -710,14 +689,14 @@ class BBCooker:
710 bb.event.fire(bb.event.TreeDataPreparationCompleted(len(fulltargetlist)), self.data) 689 bb.event.fire(bb.event.TreeDataPreparationCompleted(len(fulltargetlist)), self.data)
711 return taskdata, runlist 690 return taskdata, runlist
712 691
713 def prepareTreeData(self, pkgs_to_build, task): 692 def prepareTreeData(self, pkgs_to_build, task, halt=False):
714 """ 693 """
715 Prepare a runqueue and taskdata object for iteration over pkgs_to_build 694 Prepare a runqueue and taskdata object for iteration over pkgs_to_build
716 """ 695 """
717 696
718 # We set abort to False here to prevent unbuildable targets raising 697 # We set halt to False here to prevent unbuildable targets raising
719 # an exception when we're just generating data 698 # an exception when we're just generating data
720 taskdata, runlist = self.buildTaskData(pkgs_to_build, task, False, allowincomplete=True) 699 taskdata, runlist = self.buildTaskData(pkgs_to_build, task, halt, allowincomplete=True)
721 700
722 return runlist, taskdata 701 return runlist, taskdata
723 702
@@ -731,7 +710,7 @@ class BBCooker:
731 if not task.startswith("do_"): 710 if not task.startswith("do_"):
732 task = "do_%s" % task 711 task = "do_%s" % task
733 712
734 runlist, taskdata = self.prepareTreeData(pkgs_to_build, task) 713 runlist, taskdata = self.prepareTreeData(pkgs_to_build, task, halt=True)
735 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist) 714 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
736 rq.rqdata.prepare() 715 rq.rqdata.prepare()
737 return self.buildDependTree(rq, taskdata) 716 return self.buildDependTree(rq, taskdata)
@@ -792,7 +771,9 @@ class BBCooker:
792 for dep in rq.rqdata.runtaskentries[tid].depends: 771 for dep in rq.rqdata.runtaskentries[tid].depends:
793 (depmc, depfn, _, deptaskfn) = bb.runqueue.split_tid_mcfn(dep) 772 (depmc, depfn, _, deptaskfn) = bb.runqueue.split_tid_mcfn(dep)
794 deppn = self.recipecaches[depmc].pkg_fn[deptaskfn] 773 deppn = self.recipecaches[depmc].pkg_fn[deptaskfn]
795 depend_tree["tdepends"][dotname].append("%s.%s" % (deppn, bb.runqueue.taskname_from_tid(dep))) 774 if depmc:
775 depmc = "mc:" + depmc + ":"
776 depend_tree["tdepends"][dotname].append("%s%s.%s" % (depmc, deppn, bb.runqueue.taskname_from_tid(dep)))
796 if taskfn not in seen_fns: 777 if taskfn not in seen_fns:
797 seen_fns.append(taskfn) 778 seen_fns.append(taskfn)
798 packages = [] 779 packages = []
@@ -924,10 +905,11 @@ class BBCooker:
924 905
925 depgraph = self.generateTaskDepTreeData(pkgs_to_build, task) 906 depgraph = self.generateTaskDepTreeData(pkgs_to_build, task)
926 907
927 with open('pn-buildlist', 'w') as f: 908 pns = depgraph["pn"].keys()
928 for pn in depgraph["pn"]: 909 if pns:
929 f.write(pn + "\n") 910 with open('pn-buildlist', 'w') as f:
930 logger.info("PN build list saved to 'pn-buildlist'") 911 f.write("%s\n" % "\n".join(sorted(pns)))
912 logger.info("PN build list saved to 'pn-buildlist'")
931 913
932 # Remove old format output files to ensure no confusion with stale data 914 # Remove old format output files to ensure no confusion with stale data
933 try: 915 try:
@@ -961,7 +943,7 @@ class BBCooker:
961 for mc in self.multiconfigs: 943 for mc in self.multiconfigs:
962 # First get list of recipes, including skipped 944 # First get list of recipes, including skipped
963 recipefns = list(self.recipecaches[mc].pkg_fn.keys()) 945 recipefns = list(self.recipecaches[mc].pkg_fn.keys())
964 recipefns.extend(self.skiplist.keys()) 946 recipefns.extend(self.skiplist_by_mc[mc].keys())
965 947
966 # Work out list of bbappends that have been applied 948 # Work out list of bbappends that have been applied
967 applied_appends = [] 949 applied_appends = []
@@ -980,13 +962,7 @@ class BBCooker:
980 '\n '.join(appends_without_recipes[mc]))) 962 '\n '.join(appends_without_recipes[mc])))
981 963
982 if msgs: 964 if msgs:
983 msg = "\n".join(msgs) 965 bb.fatal("\n".join(msgs))
984 warn_only = self.databuilder.mcdata[mc].getVar("BB_DANGLINGAPPENDS_WARNONLY", \
985 False) or "no"
986 if warn_only.lower() in ("1", "yes", "true"):
987 bb.warn(msg)
988 else:
989 bb.fatal(msg)
990 966
991 def handlePrefProviders(self): 967 def handlePrefProviders(self):
992 968
@@ -1056,6 +1032,11 @@ class BBCooker:
1056 if matches: 1032 if matches:
1057 bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data) 1033 bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data)
1058 1034
1035 def testCookerCommandEvent(self, filepattern):
1036 # Dummy command used by OEQA selftest to test tinfoil without IO
1037 matches = ["A", "B"]
1038 bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data)
1039
1059 def findProviders(self, mc=''): 1040 def findProviders(self, mc=''):
1060 return bb.providers.findProviders(self.databuilder.mcdata[mc], self.recipecaches[mc], self.recipecaches[mc].pkg_pn) 1041 return bb.providers.findProviders(self.databuilder.mcdata[mc], self.recipecaches[mc], self.recipecaches[mc].pkg_pn)
1061 1042
@@ -1063,10 +1044,16 @@ class BBCooker:
1063 if pn in self.recipecaches[mc].providers: 1044 if pn in self.recipecaches[mc].providers:
1064 filenames = self.recipecaches[mc].providers[pn] 1045 filenames = self.recipecaches[mc].providers[pn]
1065 eligible, foundUnique = bb.providers.filterProviders(filenames, pn, self.databuilder.mcdata[mc], self.recipecaches[mc]) 1046 eligible, foundUnique = bb.providers.filterProviders(filenames, pn, self.databuilder.mcdata[mc], self.recipecaches[mc])
1066 filename = eligible[0] 1047 if eligible is not None:
1048 filename = eligible[0]
1049 else:
1050 filename = None
1067 return None, None, None, filename 1051 return None, None, None, filename
1068 elif pn in self.recipecaches[mc].pkg_pn: 1052 elif pn in self.recipecaches[mc].pkg_pn:
1069 return bb.providers.findBestProvider(pn, self.databuilder.mcdata[mc], self.recipecaches[mc], self.recipecaches[mc].pkg_pn) 1053 (latest, latest_f, preferred_ver, preferred_file, required) = bb.providers.findBestProvider(pn, self.databuilder.mcdata[mc], self.recipecaches[mc], self.recipecaches[mc].pkg_pn)
1054 if required and preferred_file is None:
1055 return None, None, None, None
1056 return (latest, latest_f, preferred_ver, preferred_file)
1070 else: 1057 else:
1071 return None, None, None, None 1058 return None, None, None, None
1072 1059
@@ -1211,15 +1198,15 @@ class BBCooker:
1211 except bb.utils.VersionStringException as vse: 1198 except bb.utils.VersionStringException as vse:
1212 bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse))) 1199 bb.fatal('Error parsing LAYERRECOMMENDS_%s: %s' % (c, str(vse)))
1213 if not res: 1200 if not res:
1214 parselog.debug(3,"Layer '%s' recommends version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec, layerver) 1201 parselog.debug3("Layer '%s' recommends version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec, layerver)
1215 continue 1202 continue
1216 else: 1203 else:
1217 parselog.debug(3,"Layer '%s' recommends version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec) 1204 parselog.debug3("Layer '%s' recommends version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, rec)
1218 continue 1205 continue
1219 parselog.debug(3,"Layer '%s' recommends layer '%s', so we are adding it", c, rec) 1206 parselog.debug3("Layer '%s' recommends layer '%s', so we are adding it", c, rec)
1220 collection_depends[c].append(rec) 1207 collection_depends[c].append(rec)
1221 else: 1208 else:
1222 parselog.debug(3,"Layer '%s' recommends layer '%s', but this layer is not enabled in your configuration", c, rec) 1209 parselog.debug3("Layer '%s' recommends layer '%s', but this layer is not enabled in your configuration", c, rec)
1223 1210
1224 # Recursively work out collection priorities based on dependencies 1211 # Recursively work out collection priorities based on dependencies
1225 def calc_layer_priority(collection): 1212 def calc_layer_priority(collection):
@@ -1231,7 +1218,7 @@ class BBCooker:
1231 if depprio > max_depprio: 1218 if depprio > max_depprio:
1232 max_depprio = depprio 1219 max_depprio = depprio
1233 max_depprio += 1 1220 max_depprio += 1
1234 parselog.debug(1, "Calculated priority of layer %s as %d", collection, max_depprio) 1221 parselog.debug("Calculated priority of layer %s as %d", collection, max_depprio)
1235 collection_priorities[collection] = max_depprio 1222 collection_priorities[collection] = max_depprio
1236 1223
1237 # Calculate all layer priorities using calc_layer_priority and store in bbfile_config_priorities 1224 # Calculate all layer priorities using calc_layer_priority and store in bbfile_config_priorities
@@ -1243,7 +1230,7 @@ class BBCooker:
1243 errors = True 1230 errors = True
1244 continue 1231 continue
1245 elif regex == "": 1232 elif regex == "":
1246 parselog.debug(1, "BBFILE_PATTERN_%s is empty" % c) 1233 parselog.debug("BBFILE_PATTERN_%s is empty" % c)
1247 cre = re.compile('^NULL$') 1234 cre = re.compile('^NULL$')
1248 errors = False 1235 errors = False
1249 else: 1236 else:
@@ -1290,8 +1277,8 @@ class BBCooker:
1290 if bf.startswith("/") or bf.startswith("../"): 1277 if bf.startswith("/") or bf.startswith("../"):
1291 bf = os.path.abspath(bf) 1278 bf = os.path.abspath(bf)
1292 1279
1293 self.collections = {mc: CookerCollectFiles(self.bbfile_config_priorities, mc)} 1280 collections = {mc: CookerCollectFiles(self.bbfile_config_priorities, mc)}
1294 filelist, masked, searchdirs = self.collections[mc].collect_bbfiles(self.databuilder.mcdata[mc], self.databuilder.mcdata[mc]) 1281 filelist, masked, searchdirs = collections[mc].collect_bbfiles(self.databuilder.mcdata[mc], self.databuilder.mcdata[mc])
1295 try: 1282 try:
1296 os.stat(bf) 1283 os.stat(bf)
1297 bf = os.path.abspath(bf) 1284 bf = os.path.abspath(bf)
@@ -1355,9 +1342,10 @@ class BBCooker:
1355 self.buildSetVars() 1342 self.buildSetVars()
1356 self.reset_mtime_caches() 1343 self.reset_mtime_caches()
1357 1344
1358 bb_caches = bb.cache.MulticonfigCache(self.databuilder, self.data_hash, self.caches_array) 1345 bb_caches = bb.cache.MulticonfigCache(self.databuilder, self.databuilder.data_hash, self.caches_array)
1359 1346
1360 infos = bb_caches[mc].parse(fn, self.collections[mc].get_file_appends(fn)) 1347 layername = self.collections[mc].calc_bbfile_priority(fn)[2]
1348 infos = bb_caches[mc].parse(fn, self.collections[mc].get_file_appends(fn), layername)
1361 infos = dict(infos) 1349 infos = dict(infos)
1362 1350
1363 fn = bb.cache.realfn2virtual(fn, cls, mc) 1351 fn = bb.cache.realfn2virtual(fn, cls, mc)
@@ -1383,14 +1371,16 @@ class BBCooker:
1383 self.recipecaches[mc].rundeps[fn] = defaultdict(list) 1371 self.recipecaches[mc].rundeps[fn] = defaultdict(list)
1384 self.recipecaches[mc].runrecs[fn] = defaultdict(list) 1372 self.recipecaches[mc].runrecs[fn] = defaultdict(list)
1385 1373
1374 bb.parse.siggen.setup_datacache(self.recipecaches)
1375
1386 # Invalidate task for target if force mode active 1376 # Invalidate task for target if force mode active
1387 if self.configuration.force: 1377 if self.configuration.force:
1388 logger.verbose("Invalidate task %s, %s", task, fn) 1378 logger.verbose("Invalidate task %s, %s", task, fn)
1389 bb.parse.siggen.invalidate_task(task, self.recipecaches[mc], fn) 1379 bb.parse.siggen.invalidate_task(task, fn)
1390 1380
1391 # Setup taskdata structure 1381 # Setup taskdata structure
1392 taskdata = {} 1382 taskdata = {}
1393 taskdata[mc] = bb.taskdata.TaskData(self.configuration.abort) 1383 taskdata[mc] = bb.taskdata.TaskData(self.configuration.halt)
1394 taskdata[mc].add_provider(self.databuilder.mcdata[mc], self.recipecaches[mc], item) 1384 taskdata[mc].add_provider(self.databuilder.mcdata[mc], self.recipecaches[mc], item)
1395 1385
1396 if quietlog: 1386 if quietlog:
@@ -1400,21 +1390,24 @@ class BBCooker:
1400 buildname = self.databuilder.mcdata[mc].getVar("BUILDNAME") 1390 buildname = self.databuilder.mcdata[mc].getVar("BUILDNAME")
1401 if fireevents: 1391 if fireevents:
1402 bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.databuilder.mcdata[mc]) 1392 bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.databuilder.mcdata[mc])
1393 if self.eventlog:
1394 self.eventlog[2].write_variables()
1395 bb.event.enable_heartbeat()
1403 1396
1404 # Execute the runqueue 1397 # Execute the runqueue
1405 runlist = [[mc, item, task, fn]] 1398 runlist = [[mc, item, task, fn]]
1406 1399
1407 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist) 1400 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
1408 1401
1409 def buildFileIdle(server, rq, abort): 1402 def buildFileIdle(server, rq, halt):
1410 1403
1411 msg = None 1404 msg = None
1412 interrupted = 0 1405 interrupted = 0
1413 if abort or self.state == state.forceshutdown: 1406 if halt or self.state == State.FORCE_SHUTDOWN:
1414 rq.finish_runqueue(True) 1407 rq.finish_runqueue(True)
1415 msg = "Forced shutdown" 1408 msg = "Forced shutdown"
1416 interrupted = 2 1409 interrupted = 2
1417 elif self.state == state.shutdown: 1410 elif self.state == State.SHUTDOWN:
1418 rq.finish_runqueue(False) 1411 rq.finish_runqueue(False)
1419 msg = "Stopped build" 1412 msg = "Stopped build"
1420 interrupted = 1 1413 interrupted = 1
@@ -1425,41 +1418,71 @@ class BBCooker:
1425 failures += len(exc.args) 1418 failures += len(exc.args)
1426 retval = False 1419 retval = False
1427 except SystemExit as exc: 1420 except SystemExit as exc:
1428 self.command.finishAsyncCommand(str(exc))
1429 if quietlog: 1421 if quietlog:
1430 bb.runqueue.logger.setLevel(rqloglevel) 1422 bb.runqueue.logger.setLevel(rqloglevel)
1431 return False 1423 return bb.server.process.idleFinish(str(exc))
1432 1424
1433 if not retval: 1425 if not retval:
1434 if fireevents: 1426 if fireevents:
1435 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, item, failures, interrupted), self.databuilder.mcdata[mc]) 1427 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, item, failures, interrupted), self.databuilder.mcdata[mc])
1436 self.command.finishAsyncCommand(msg) 1428 bb.event.disable_heartbeat()
1437 # We trashed self.recipecaches above 1429 # We trashed self.recipecaches above
1438 self.parsecache_valid = False 1430 self._parsecache_set(False)
1439 self.configuration.limited_deps = False 1431 self.configuration.limited_deps = False
1440 bb.parse.siggen.reset(self.data) 1432 bb.parse.siggen.reset(self.data)
1441 if quietlog: 1433 if quietlog:
1442 bb.runqueue.logger.setLevel(rqloglevel) 1434 bb.runqueue.logger.setLevel(rqloglevel)
1443 return False 1435 return bb.server.process.idleFinish(msg)
1444 if retval is True: 1436 if retval is True:
1445 return True 1437 return True
1446 return retval 1438 return retval
1447 1439
1448 self.idleCallBackRegister(buildFileIdle, rq) 1440 self.idleCallBackRegister(buildFileIdle, rq)
1449 1441
1442 def getTaskSignatures(self, target, tasks):
1443 sig = []
1444 getAllTaskSignatures = False
1445
1446 if not tasks:
1447 tasks = ["do_build"]
1448 getAllTaskSignatures = True
1449
1450 for task in tasks:
1451 taskdata, runlist = self.buildTaskData(target, task, self.configuration.halt)
1452 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
1453 rq.rqdata.prepare()
1454
1455 for l in runlist:
1456 mc, pn, taskname, fn = l
1457
1458 taskdep = rq.rqdata.dataCaches[mc].task_deps[fn]
1459 for t in taskdep['tasks']:
1460 if t in taskdep['nostamp'] or "setscene" in t:
1461 continue
1462 tid = bb.runqueue.build_tid(mc, fn, t)
1463
1464 if t in task or getAllTaskSignatures:
1465 try:
1466 sig.append([pn, t, rq.rqdata.get_task_unihash(tid)])
1467 except KeyError:
1468 sig.append(self.getTaskSignatures(target, [t])[0])
1469
1470 return sig
1471
1450 def buildTargets(self, targets, task): 1472 def buildTargets(self, targets, task):
1451 """ 1473 """
1452 Attempt to build the targets specified 1474 Attempt to build the targets specified
1453 """ 1475 """
1454 1476
1455 def buildTargetsIdle(server, rq, abort): 1477 def buildTargetsIdle(server, rq, halt):
1456 msg = None 1478 msg = None
1457 interrupted = 0 1479 interrupted = 0
1458 if abort or self.state == state.forceshutdown: 1480 if halt or self.state == State.FORCE_SHUTDOWN:
1481 bb.event._should_exit.set()
1459 rq.finish_runqueue(True) 1482 rq.finish_runqueue(True)
1460 msg = "Forced shutdown" 1483 msg = "Forced shutdown"
1461 interrupted = 2 1484 interrupted = 2
1462 elif self.state == state.shutdown: 1485 elif self.state == State.SHUTDOWN:
1463 rq.finish_runqueue(False) 1486 rq.finish_runqueue(False)
1464 msg = "Stopped build" 1487 msg = "Stopped build"
1465 interrupted = 1 1488 interrupted = 1
@@ -1470,16 +1493,16 @@ class BBCooker:
1470 failures += len(exc.args) 1493 failures += len(exc.args)
1471 retval = False 1494 retval = False
1472 except SystemExit as exc: 1495 except SystemExit as exc:
1473 self.command.finishAsyncCommand(str(exc)) 1496 return bb.server.process.idleFinish(str(exc))
1474 return False
1475 1497
1476 if not retval: 1498 if not retval:
1477 try: 1499 try:
1478 for mc in self.multiconfigs: 1500 for mc in self.multiconfigs:
1479 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, targets, failures, interrupted), self.databuilder.mcdata[mc]) 1501 bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, targets, failures, interrupted), self.databuilder.mcdata[mc])
1480 finally: 1502 finally:
1481 self.command.finishAsyncCommand(msg) 1503 bb.event.disable_heartbeat()
1482 return False 1504 return bb.server.process.idleFinish(msg)
1505
1483 if retval is True: 1506 if retval is True:
1484 return True 1507 return True
1485 return retval 1508 return retval
@@ -1498,7 +1521,7 @@ class BBCooker:
1498 1521
1499 bb.event.fire(bb.event.BuildInit(packages), self.data) 1522 bb.event.fire(bb.event.BuildInit(packages), self.data)
1500 1523
1501 taskdata, runlist = self.buildTaskData(targets, task, self.configuration.abort) 1524 taskdata, runlist = self.buildTaskData(targets, task, self.configuration.halt)
1502 1525
1503 buildname = self.data.getVar("BUILDNAME", False) 1526 buildname = self.data.getVar("BUILDNAME", False)
1504 1527
@@ -1511,6 +1534,9 @@ class BBCooker:
1511 1534
1512 for mc in self.multiconfigs: 1535 for mc in self.multiconfigs:
1513 bb.event.fire(bb.event.BuildStarted(buildname, ntargets), self.databuilder.mcdata[mc]) 1536 bb.event.fire(bb.event.BuildStarted(buildname, ntargets), self.databuilder.mcdata[mc])
1537 if self.eventlog:
1538 self.eventlog[2].write_variables()
1539 bb.event.enable_heartbeat()
1514 1540
1515 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist) 1541 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
1516 if 'universe' in targets: 1542 if 'universe' in targets:
@@ -1520,7 +1546,13 @@ class BBCooker:
1520 1546
1521 1547
1522 def getAllKeysWithFlags(self, flaglist): 1548 def getAllKeysWithFlags(self, flaglist):
1549 def dummy_autorev(d):
1550 return
1551
1523 dump = {} 1552 dump = {}
1553 # Horrible but for now we need to avoid any sideeffects of autorev being called
1554 saved = bb.fetch2.get_autorev
1555 bb.fetch2.get_autorev = dummy_autorev
1524 for k in self.data.keys(): 1556 for k in self.data.keys():
1525 try: 1557 try:
1526 expand = True 1558 expand = True
@@ -1540,20 +1572,14 @@ class BBCooker:
1540 dump[k][d] = None 1572 dump[k][d] = None
1541 except Exception as e: 1573 except Exception as e:
1542 print(e) 1574 print(e)
1575 bb.fetch2.get_autorev = saved
1543 return dump 1576 return dump
1544 1577
1545 1578
1546 def updateCacheSync(self): 1579 def updateCacheSync(self):
1547 if self.state == state.running: 1580 if self.state == State.RUNNING:
1548 return 1581 return
1549 1582
1550 # reload files for which we got notifications
1551 for p in self.inotify_modified_files:
1552 bb.parse.update_cache(p)
1553 if p in bb.parse.BBHandler.cached_statements:
1554 del bb.parse.BBHandler.cached_statements[p]
1555 self.inotify_modified_files = []
1556
1557 if not self.baseconfig_valid: 1583 if not self.baseconfig_valid:
1558 logger.debug("Reloading base configuration data") 1584 logger.debug("Reloading base configuration data")
1559 self.initConfigurationData() 1585 self.initConfigurationData()
@@ -1561,19 +1587,22 @@ class BBCooker:
1561 1587
1562 # This is called for all async commands when self.state != running 1588 # This is called for all async commands when self.state != running
1563 def updateCache(self): 1589 def updateCache(self):
1564 if self.state == state.running: 1590 if self.state == State.RUNNING:
1565 return 1591 return
1566 1592
1567 if self.state in (state.shutdown, state.forceshutdown, state.error): 1593 if self.state in (State.SHUTDOWN, State.FORCE_SHUTDOWN, State.ERROR):
1568 if hasattr(self.parser, 'shutdown'): 1594 if hasattr(self.parser, 'shutdown'):
1569 self.parser.shutdown(clean=False, force = True) 1595 self.parser.shutdown(clean=False)
1570 self.parser.final_cleanup() 1596 self.parser.final_cleanup()
1571 raise bb.BBHandledException() 1597 raise bb.BBHandledException()
1572 1598
1573 if self.state != state.parsing: 1599 if self.state != State.PARSING:
1574 self.updateCacheSync() 1600 self.updateCacheSync()
1575 1601
1576 if self.state != state.parsing and not self.parsecache_valid: 1602 if self.state != State.PARSING and not self.parsecache_valid:
1603 bb.server.process.serverlog("Parsing started")
1604 self.parsewatched = {}
1605
1577 bb.parse.siggen.reset(self.data) 1606 bb.parse.siggen.reset(self.data)
1578 self.parseConfiguration () 1607 self.parseConfiguration ()
1579 if CookerFeatures.SEND_SANITYEVENTS in self.featureset: 1608 if CookerFeatures.SEND_SANITYEVENTS in self.featureset:
@@ -1587,37 +1616,35 @@ class BBCooker:
1587 for dep in self.configuration.extra_assume_provided: 1616 for dep in self.configuration.extra_assume_provided:
1588 self.recipecaches[mc].ignored_dependencies.add(dep) 1617 self.recipecaches[mc].ignored_dependencies.add(dep)
1589 1618
1590 self.collections = {}
1591
1592 mcfilelist = {} 1619 mcfilelist = {}
1593 total_masked = 0 1620 total_masked = 0
1594 searchdirs = set() 1621 searchdirs = set()
1595 for mc in self.multiconfigs: 1622 for mc in self.multiconfigs:
1596 self.collections[mc] = CookerCollectFiles(self.bbfile_config_priorities, mc)
1597 (filelist, masked, search) = self.collections[mc].collect_bbfiles(self.databuilder.mcdata[mc], self.databuilder.mcdata[mc]) 1623 (filelist, masked, search) = self.collections[mc].collect_bbfiles(self.databuilder.mcdata[mc], self.databuilder.mcdata[mc])
1598 1624
1599 mcfilelist[mc] = filelist 1625 mcfilelist[mc] = filelist
1600 total_masked += masked 1626 total_masked += masked
1601 searchdirs |= set(search) 1627 searchdirs |= set(search)
1602 1628
1603 # Add inotify watches for directories searched for bb/bbappend files 1629 # Add mtimes for directories searched for bb/bbappend files
1604 for dirent in searchdirs: 1630 for dirent in searchdirs:
1605 self.add_filewatch([[dirent]], dirs=True) 1631 self.add_filewatch([(dirent, bb.parse.cached_mtime_noerror(dirent))])
1606 1632
1607 self.parser = CookerParser(self, mcfilelist, total_masked) 1633 self.parser = CookerParser(self, mcfilelist, total_masked)
1608 self.parsecache_valid = True 1634 self._parsecache_set(True)
1609 1635
1610 self.state = state.parsing 1636 self.state = State.PARSING
1611 1637
1612 if not self.parser.parse_next(): 1638 if not self.parser.parse_next():
1613 collectlog.debug(1, "parsing complete") 1639 bb.server.process.serverlog("Parsing completed")
1640 collectlog.debug("parsing complete")
1614 if self.parser.error: 1641 if self.parser.error:
1615 raise bb.BBHandledException() 1642 raise bb.BBHandledException()
1616 self.show_appends_with_no_recipes() 1643 self.show_appends_with_no_recipes()
1617 self.handlePrefProviders() 1644 self.handlePrefProviders()
1618 for mc in self.multiconfigs: 1645 for mc in self.multiconfigs:
1619 self.recipecaches[mc].bbfile_priority = self.collections[mc].collection_priorities(self.recipecaches[mc].pkg_fn, self.parser.mcfilelist[mc], self.data) 1646 self.recipecaches[mc].bbfile_priority = self.collections[mc].collection_priorities(self.recipecaches[mc].pkg_fn, self.parser.mcfilelist[mc], self.data)
1620 self.state = state.running 1647 self.state = State.RUNNING
1621 1648
1622 # Send an event listing all stamps reachable after parsing 1649 # Send an event listing all stamps reachable after parsing
1623 # which the metadata may use to clean up stale data 1650 # which the metadata may use to clean up stale data
@@ -1633,7 +1660,7 @@ class BBCooker:
1633 # Return a copy, don't modify the original 1660 # Return a copy, don't modify the original
1634 pkgs_to_build = pkgs_to_build[:] 1661 pkgs_to_build = pkgs_to_build[:]
1635 1662
1636 if len(pkgs_to_build) == 0: 1663 if not pkgs_to_build:
1637 raise NothingToBuild 1664 raise NothingToBuild
1638 1665
1639 ignore = (self.data.getVar("ASSUME_PROVIDED") or "").split() 1666 ignore = (self.data.getVar("ASSUME_PROVIDED") or "").split()
@@ -1655,7 +1682,7 @@ class BBCooker:
1655 1682
1656 if 'universe' in pkgs_to_build: 1683 if 'universe' in pkgs_to_build:
1657 parselog.verbnote("The \"universe\" target is only intended for testing and may produce errors.") 1684 parselog.verbnote("The \"universe\" target is only intended for testing and may produce errors.")
1658 parselog.debug(1, "collating packages for \"universe\"") 1685 parselog.debug("collating packages for \"universe\"")
1659 pkgs_to_build.remove('universe') 1686 pkgs_to_build.remove('universe')
1660 for mc in self.multiconfigs: 1687 for mc in self.multiconfigs:
1661 for t in self.recipecaches[mc].universe_target: 1688 for t in self.recipecaches[mc].universe_target:
@@ -1680,26 +1707,36 @@ class BBCooker:
1680 def post_serve(self): 1707 def post_serve(self):
1681 self.shutdown(force=True) 1708 self.shutdown(force=True)
1682 prserv.serv.auto_shutdown() 1709 prserv.serv.auto_shutdown()
1710 if hasattr(bb.parse, "siggen"):
1711 bb.parse.siggen.exit()
1683 if self.hashserv: 1712 if self.hashserv:
1684 self.hashserv.process.terminate() 1713 self.hashserv.process.terminate()
1685 self.hashserv.process.join() 1714 self.hashserv.process.join()
1686 if hasattr(self, "data"): 1715 if hasattr(self, "data"):
1687 bb.event.fire(CookerExit(), self.data) 1716 bb.event.fire(CookerExit(), self.data)
1688 1717
1689 def shutdown(self, force = False): 1718 def shutdown(self, force=False):
1690 if force: 1719 if force:
1691 self.state = state.forceshutdown 1720 self.state = State.FORCE_SHUTDOWN
1721 bb.event._should_exit.set()
1692 else: 1722 else:
1693 self.state = state.shutdown 1723 self.state = State.SHUTDOWN
1694 1724
1695 if self.parser: 1725 if self.parser:
1696 self.parser.shutdown(clean=not force, force=force) 1726 self.parser.shutdown(clean=False)
1697 self.parser.final_cleanup() 1727 self.parser.final_cleanup()
1698 1728
1699 def finishcommand(self): 1729 def finishcommand(self):
1700 self.state = state.initial 1730 if hasattr(self.parser, 'shutdown'):
1731 self.parser.shutdown(clean=False)
1732 self.parser.final_cleanup()
1733 self.state = State.INITIAL
1734 bb.event._should_exit.clear()
1701 1735
1702 def reset(self): 1736 def reset(self):
1737 if hasattr(bb.parse, "siggen"):
1738 bb.parse.siggen.exit()
1739 self.finishcommand()
1703 self.initConfigurationData() 1740 self.initConfigurationData()
1704 self.handlePRServ() 1741 self.handlePRServ()
1705 1742
@@ -1711,9 +1748,9 @@ class BBCooker:
1711 if hasattr(self, "data"): 1748 if hasattr(self, "data"):
1712 self.databuilder.reset() 1749 self.databuilder.reset()
1713 self.data = self.databuilder.data 1750 self.data = self.databuilder.data
1714 self.parsecache_valid = False 1751 # In theory tinfoil could have modified the base data before parsing,
1715 self.baseconfig_valid = False 1752 # ideally need to track if anything did modify the datastore
1716 1753 self._parsecache_set(False)
1717 1754
1718class CookerExit(bb.event.Event): 1755class CookerExit(bb.event.Event):
1719 """ 1756 """
@@ -1728,16 +1765,16 @@ class CookerCollectFiles(object):
1728 def __init__(self, priorities, mc=''): 1765 def __init__(self, priorities, mc=''):
1729 self.mc = mc 1766 self.mc = mc
1730 self.bbappends = [] 1767 self.bbappends = []
1731 # Priorities is a list of tupples, with the second element as the pattern. 1768 # Priorities is a list of tuples, with the second element as the pattern.
1732 # We need to sort the list with the longest pattern first, and so on to 1769 # We need to sort the list with the longest pattern first, and so on to
1733 # the shortest. This allows nested layers to be properly evaluated. 1770 # the shortest. This allows nested layers to be properly evaluated.
1734 self.bbfile_config_priorities = sorted(priorities, key=lambda tup: tup[1], reverse=True) 1771 self.bbfile_config_priorities = sorted(priorities, key=lambda tup: tup[1], reverse=True)
1735 1772
1736 def calc_bbfile_priority(self, filename): 1773 def calc_bbfile_priority(self, filename):
1737 for _, _, regex, pri in self.bbfile_config_priorities: 1774 for layername, _, regex, pri in self.bbfile_config_priorities:
1738 if regex.match(filename): 1775 if regex.match(filename):
1739 return pri, regex 1776 return pri, regex, layername
1740 return 0, None 1777 return 0, None, None
1741 1778
1742 def get_bbfiles(self): 1779 def get_bbfiles(self):
1743 """Get list of default .bb files by reading out the current directory""" 1780 """Get list of default .bb files by reading out the current directory"""
@@ -1756,7 +1793,7 @@ class CookerCollectFiles(object):
1756 for ignored in ('SCCS', 'CVS', '.svn'): 1793 for ignored in ('SCCS', 'CVS', '.svn'):
1757 if ignored in dirs: 1794 if ignored in dirs:
1758 dirs.remove(ignored) 1795 dirs.remove(ignored)
1759 found += [os.path.join(dir, f) for f in files if (f.endswith(['.bb', '.bbappend']))] 1796 found += [os.path.join(dir, f) for f in files if (f.endswith(('.bb', '.bbappend')))]
1760 1797
1761 return found 1798 return found
1762 1799
@@ -1764,7 +1801,7 @@ class CookerCollectFiles(object):
1764 """Collect all available .bb build files""" 1801 """Collect all available .bb build files"""
1765 masked = 0 1802 masked = 0
1766 1803
1767 collectlog.debug(1, "collecting .bb files") 1804 collectlog.debug("collecting .bb files")
1768 1805
1769 files = (config.getVar( "BBFILES") or "").split() 1806 files = (config.getVar( "BBFILES") or "").split()
1770 1807
@@ -1772,16 +1809,16 @@ class CookerCollectFiles(object):
1772 files.sort( key=lambda fileitem: self.calc_bbfile_priority(fileitem)[0] ) 1809 files.sort( key=lambda fileitem: self.calc_bbfile_priority(fileitem)[0] )
1773 config.setVar("BBFILES_PRIORITIZED", " ".join(files)) 1810 config.setVar("BBFILES_PRIORITIZED", " ".join(files))
1774 1811
1775 if not len(files): 1812 if not files:
1776 files = self.get_bbfiles() 1813 files = self.get_bbfiles()
1777 1814
1778 if not len(files): 1815 if not files:
1779 collectlog.error("no recipe files to build, check your BBPATH and BBFILES?") 1816 collectlog.error("no recipe files to build, check your BBPATH and BBFILES?")
1780 bb.event.fire(CookerExit(), eventdata) 1817 bb.event.fire(CookerExit(), eventdata)
1781 1818
1782 # We need to track where we look so that we can add inotify watches. There 1819 # We need to track where we look so that we can know when the cache is invalid. There
1783 # is no nice way to do this, this is horrid. We intercept the os.listdir() 1820 # is no nice way to do this, this is horrid. We intercept the os.listdir() and os.scandir()
1784 # (or os.scandir() for python 3.6+) calls while we run glob(). 1821 # calls while we run glob().
1785 origlistdir = os.listdir 1822 origlistdir = os.listdir
1786 if hasattr(os, 'scandir'): 1823 if hasattr(os, 'scandir'):
1787 origscandir = os.scandir 1824 origscandir = os.scandir
@@ -1835,7 +1872,7 @@ class CookerCollectFiles(object):
1835 try: 1872 try:
1836 re.compile(mask) 1873 re.compile(mask)
1837 bbmasks.append(mask) 1874 bbmasks.append(mask)
1838 except sre_constants.error: 1875 except re.error:
1839 collectlog.critical("BBMASK contains an invalid regular expression, ignoring: %s" % mask) 1876 collectlog.critical("BBMASK contains an invalid regular expression, ignoring: %s" % mask)
1840 1877
1841 # Then validate the combined regular expressions. This should never 1878 # Then validate the combined regular expressions. This should never
@@ -1843,7 +1880,7 @@ class CookerCollectFiles(object):
1843 bbmask = "|".join(bbmasks) 1880 bbmask = "|".join(bbmasks)
1844 try: 1881 try:
1845 bbmask_compiled = re.compile(bbmask) 1882 bbmask_compiled = re.compile(bbmask)
1846 except sre_constants.error: 1883 except re.error:
1847 collectlog.critical("BBMASK is not a valid regular expression, ignoring: %s" % bbmask) 1884 collectlog.critical("BBMASK is not a valid regular expression, ignoring: %s" % bbmask)
1848 bbmask = None 1885 bbmask = None
1849 1886
@@ -1851,7 +1888,7 @@ class CookerCollectFiles(object):
1851 bbappend = [] 1888 bbappend = []
1852 for f in newfiles: 1889 for f in newfiles:
1853 if bbmask and bbmask_compiled.search(f): 1890 if bbmask and bbmask_compiled.search(f):
1854 collectlog.debug(1, "skipping masked file %s", f) 1891 collectlog.debug("skipping masked file %s", f)
1855 masked += 1 1892 masked += 1
1856 continue 1893 continue
1857 if f.endswith('.bb'): 1894 if f.endswith('.bb'):
@@ -1859,7 +1896,7 @@ class CookerCollectFiles(object):
1859 elif f.endswith('.bbappend'): 1896 elif f.endswith('.bbappend'):
1860 bbappend.append(f) 1897 bbappend.append(f)
1861 else: 1898 else:
1862 collectlog.debug(1, "skipping %s: unknown file extension", f) 1899 collectlog.debug("skipping %s: unknown file extension", f)
1863 1900
1864 # Build a list of .bbappend files for each .bb file 1901 # Build a list of .bbappend files for each .bb file
1865 for f in bbappend: 1902 for f in bbappend:
@@ -1910,7 +1947,7 @@ class CookerCollectFiles(object):
1910 # Calculate priorities for each file 1947 # Calculate priorities for each file
1911 for p in pkgfns: 1948 for p in pkgfns:
1912 realfn, cls, mc = bb.cache.virtualfn2realfn(p) 1949 realfn, cls, mc = bb.cache.virtualfn2realfn(p)
1913 priorities[p], regex = self.calc_bbfile_priority(realfn) 1950 priorities[p], regex, _ = self.calc_bbfile_priority(realfn)
1914 if regex in unmatched_regex: 1951 if regex in unmatched_regex:
1915 matched_regex.add(regex) 1952 matched_regex.add(regex)
1916 unmatched_regex.remove(regex) 1953 unmatched_regex.remove(regex)
@@ -1961,15 +1998,30 @@ class ParsingFailure(Exception):
1961 Exception.__init__(self, realexception, recipe) 1998 Exception.__init__(self, realexception, recipe)
1962 1999
1963class Parser(multiprocessing.Process): 2000class Parser(multiprocessing.Process):
1964 def __init__(self, jobs, results, quit, init, profile): 2001 def __init__(self, jobs, results, quit, profile):
1965 self.jobs = jobs 2002 self.jobs = jobs
1966 self.results = results 2003 self.results = results
1967 self.quit = quit 2004 self.quit = quit
1968 self.init = init
1969 multiprocessing.Process.__init__(self) 2005 multiprocessing.Process.__init__(self)
1970 self.context = bb.utils.get_context().copy() 2006 self.context = bb.utils.get_context().copy()
1971 self.handlers = bb.event.get_class_handlers().copy() 2007 self.handlers = bb.event.get_class_handlers().copy()
1972 self.profile = profile 2008 self.profile = profile
2009 self.queue_signals = False
2010 self.signal_received = []
2011 self.signal_threadlock = threading.Lock()
2012
2013 def catch_sig(self, signum, frame):
2014 if self.queue_signals:
2015 self.signal_received.append(signum)
2016 else:
2017 self.handle_sig(signum, frame)
2018
2019 def handle_sig(self, signum, frame):
2020 if signum == signal.SIGTERM:
2021 signal.signal(signal.SIGTERM, signal.SIG_DFL)
2022 os.kill(os.getpid(), signal.SIGTERM)
2023 elif signum == signal.SIGINT:
2024 signal.default_int_handler(signum, frame)
1973 2025
1974 def run(self): 2026 def run(self):
1975 2027
@@ -1989,38 +2041,50 @@ class Parser(multiprocessing.Process):
1989 prof.dump_stats(logfile) 2041 prof.dump_stats(logfile)
1990 2042
1991 def realrun(self): 2043 def realrun(self):
1992 if self.init: 2044 # Signal handling here is hard. We must not terminate any process or thread holding the write
1993 self.init() 2045 # lock for the event stream as it will not be released, ever, and things will hang.
2046 # Python handles signals in the main thread/process but they can be raised from any thread and
2047 # we want to defer processing of any SIGTERM/SIGINT signal until we're outside the critical section
2048 # and don't hold the lock (see server/process.py). We therefore always catch the signals (so any
2049 # new thread should also do so) and we defer handling but we handle with the local thread lock
2050 # held (a threading lock, not a multiprocessing one) so that no other thread in the process
2051 # can be in the critical section.
2052 signal.signal(signal.SIGTERM, self.catch_sig)
2053 signal.signal(signal.SIGHUP, signal.SIG_DFL)
2054 signal.signal(signal.SIGINT, self.catch_sig)
2055 bb.utils.set_process_name(multiprocessing.current_process().name)
2056 multiprocessing.util.Finalize(None, bb.codeparser.parser_cache_save, exitpriority=1)
2057 multiprocessing.util.Finalize(None, bb.fetch.fetcher_parse_save, exitpriority=1)
1994 2058
1995 pending = [] 2059 pending = []
1996 while True: 2060 havejobs = True
1997 try: 2061 try:
1998 self.quit.get_nowait() 2062 while havejobs or pending:
1999 except queue.Empty: 2063 if self.quit.is_set():
2000 pass 2064 break
2001 else:
2002 self.results.close()
2003 self.results.join_thread()
2004 break
2005 2065
2006 if pending: 2066 job = None
2007 result = pending.pop()
2008 else:
2009 try: 2067 try:
2010 job = self.jobs.pop() 2068 job = self.jobs.pop()
2011 except IndexError: 2069 except IndexError:
2012 self.results.close() 2070 havejobs = False
2013 self.results.join_thread() 2071 if job:
2014 break 2072 result = self.parse(*job)
2015 result = self.parse(*job) 2073 # Clear the siggen cache after parsing to control memory usage, its huge
2016 # Clear the siggen cache after parsing to control memory usage, its huge 2074 bb.parse.siggen.postparsing_clean_cache()
2017 bb.parse.siggen.postparsing_clean_cache() 2075 pending.append(result)
2018 try: 2076
2019 self.results.put(result, timeout=0.25) 2077 if pending:
2020 except queue.Full: 2078 try:
2021 pending.append(result) 2079 result = pending.pop()
2080 self.results.put(result, timeout=0.05)
2081 except queue.Full:
2082 pending.append(result)
2083 finally:
2084 self.results.close()
2085 self.results.join_thread()
2022 2086
2023 def parse(self, mc, cache, filename, appends): 2087 def parse(self, mc, cache, filename, appends, layername):
2024 try: 2088 try:
2025 origfilter = bb.event.LogHandler.filter 2089 origfilter = bb.event.LogHandler.filter
2026 # Record the filename we're parsing into any events generated 2090 # Record the filename we're parsing into any events generated
@@ -2034,17 +2098,16 @@ class Parser(multiprocessing.Process):
2034 bb.event.set_class_handlers(self.handlers.copy()) 2098 bb.event.set_class_handlers(self.handlers.copy())
2035 bb.event.LogHandler.filter = parse_filter 2099 bb.event.LogHandler.filter = parse_filter
2036 2100
2037 return True, mc, cache.parse(filename, appends) 2101 return True, mc, cache.parse(filename, appends, layername)
2038 except Exception as exc: 2102 except Exception as exc:
2039 tb = sys.exc_info()[2] 2103 tb = sys.exc_info()[2]
2040 exc.recipe = filename 2104 exc.recipe = filename
2041 exc.traceback = list(bb.exceptions.extract_traceback(tb, context=3)) 2105 return True, None, exc
2042 return True, exc
2043 # Need to turn BaseExceptions into Exceptions here so we gracefully shutdown 2106 # Need to turn BaseExceptions into Exceptions here so we gracefully shutdown
2044 # and for example a worker thread doesn't just exit on its own in response to 2107 # and for example a worker thread doesn't just exit on its own in response to
2045 # a SystemExit event for example. 2108 # a SystemExit event for example.
2046 except BaseException as exc: 2109 except BaseException as exc:
2047 return True, ParsingFailure(exc, filename) 2110 return True, None, ParsingFailure(exc, filename)
2048 finally: 2111 finally:
2049 bb.event.LogHandler.filter = origfilter 2112 bb.event.LogHandler.filter = origfilter
2050 2113
@@ -2053,7 +2116,7 @@ class CookerParser(object):
2053 self.mcfilelist = mcfilelist 2116 self.mcfilelist = mcfilelist
2054 self.cooker = cooker 2117 self.cooker = cooker
2055 self.cfgdata = cooker.data 2118 self.cfgdata = cooker.data
2056 self.cfghash = cooker.data_hash 2119 self.cfghash = cooker.databuilder.data_hash
2057 self.cfgbuilder = cooker.databuilder 2120 self.cfgbuilder = cooker.databuilder
2058 2121
2059 # Accounting statistics 2122 # Accounting statistics
@@ -2074,10 +2137,11 @@ class CookerParser(object):
2074 for mc in self.cooker.multiconfigs: 2137 for mc in self.cooker.multiconfigs:
2075 for filename in self.mcfilelist[mc]: 2138 for filename in self.mcfilelist[mc]:
2076 appends = self.cooker.collections[mc].get_file_appends(filename) 2139 appends = self.cooker.collections[mc].get_file_appends(filename)
2140 layername = self.cooker.collections[mc].calc_bbfile_priority(filename)[2]
2077 if not self.bb_caches[mc].cacheValid(filename, appends): 2141 if not self.bb_caches[mc].cacheValid(filename, appends):
2078 self.willparse.add((mc, self.bb_caches[mc], filename, appends)) 2142 self.willparse.add((mc, self.bb_caches[mc], filename, appends, layername))
2079 else: 2143 else:
2080 self.fromcache.add((mc, self.bb_caches[mc], filename, appends)) 2144 self.fromcache.add((mc, self.bb_caches[mc], filename, appends, layername))
2081 2145
2082 self.total = len(self.fromcache) + len(self.willparse) 2146 self.total = len(self.fromcache) + len(self.willparse)
2083 self.toparse = len(self.willparse) 2147 self.toparse = len(self.willparse)
@@ -2086,6 +2150,7 @@ class CookerParser(object):
2086 self.num_processes = min(int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS") or 2150 self.num_processes = min(int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS") or
2087 multiprocessing.cpu_count()), self.toparse) 2151 multiprocessing.cpu_count()), self.toparse)
2088 2152
2153 bb.cache.SiggenRecipeInfo.reset()
2089 self.start() 2154 self.start()
2090 self.haveshutdown = False 2155 self.haveshutdown = False
2091 self.syncthread = None 2156 self.syncthread = None
@@ -2095,15 +2160,8 @@ class CookerParser(object):
2095 self.processes = [] 2160 self.processes = []
2096 if self.toparse: 2161 if self.toparse:
2097 bb.event.fire(bb.event.ParseStarted(self.toparse), self.cfgdata) 2162 bb.event.fire(bb.event.ParseStarted(self.toparse), self.cfgdata)
2098 def init(): 2163
2099 signal.signal(signal.SIGTERM, signal.SIG_DFL) 2164 self.parser_quit = multiprocessing.Event()
2100 signal.signal(signal.SIGHUP, signal.SIG_DFL)
2101 signal.signal(signal.SIGINT, signal.SIG_IGN)
2102 bb.utils.set_process_name(multiprocessing.current_process().name)
2103 multiprocessing.util.Finalize(None, bb.codeparser.parser_cache_save, exitpriority=1)
2104 multiprocessing.util.Finalize(None, bb.fetch.fetcher_parse_save, exitpriority=1)
2105
2106 self.parser_quit = multiprocessing.Queue(maxsize=self.num_processes)
2107 self.result_queue = multiprocessing.Queue() 2165 self.result_queue = multiprocessing.Queue()
2108 2166
2109 def chunkify(lst,n): 2167 def chunkify(lst,n):
@@ -2111,14 +2169,14 @@ class CookerParser(object):
2111 self.jobs = chunkify(list(self.willparse), self.num_processes) 2169 self.jobs = chunkify(list(self.willparse), self.num_processes)
2112 2170
2113 for i in range(0, self.num_processes): 2171 for i in range(0, self.num_processes):
2114 parser = Parser(self.jobs[i], self.result_queue, self.parser_quit, init, self.cooker.configuration.profile) 2172 parser = Parser(self.jobs[i], self.result_queue, self.parser_quit, self.cooker.configuration.profile)
2115 parser.start() 2173 parser.start()
2116 self.process_names.append(parser.name) 2174 self.process_names.append(parser.name)
2117 self.processes.append(parser) 2175 self.processes.append(parser)
2118 2176
2119 self.results = itertools.chain(self.results, self.parse_generator()) 2177 self.results = itertools.chain(self.results, self.parse_generator())
2120 2178
2121 def shutdown(self, clean=True, force=False): 2179 def shutdown(self, clean=True, eventmsg="Parsing halted due to errors"):
2122 if not self.toparse: 2180 if not self.toparse:
2123 return 2181 return
2124 if self.haveshutdown: 2182 if self.haveshutdown:
@@ -2132,9 +2190,9 @@ class CookerParser(object):
2132 self.total) 2190 self.total)
2133 2191
2134 bb.event.fire(event, self.cfgdata) 2192 bb.event.fire(event, self.cfgdata)
2135 2193 else:
2136 for process in self.processes: 2194 bb.event.fire(bb.event.ParseError(eventmsg), self.cfgdata)
2137 self.parser_quit.put(None) 2195 bb.error("Parsing halted due to errors, see error messages above")
2138 2196
2139 # Cleanup the queue before call process.join(), otherwise there might be 2197 # Cleanup the queue before call process.join(), otherwise there might be
2140 # deadlocks. 2198 # deadlocks.
@@ -2144,106 +2202,152 @@ class CookerParser(object):
2144 except queue.Empty: 2202 except queue.Empty:
2145 break 2203 break
2146 2204
2147 for process in self.processes:
2148 if force:
2149 process.join(.1)
2150 process.terminate()
2151 else:
2152 process.join()
2153
2154 self.parser_quit.close()
2155 # Allow data left in the cancel queue to be discarded
2156 self.parser_quit.cancel_join_thread()
2157
2158 def sync_caches(): 2205 def sync_caches():
2159 for c in self.bb_caches.values(): 2206 for c in self.bb_caches.values():
2207 bb.cache.SiggenRecipeInfo.reset()
2160 c.sync() 2208 c.sync()
2161 2209
2162 sync = threading.Thread(target=sync_caches, name="SyncThread") 2210 self.syncthread = threading.Thread(target=sync_caches, name="SyncThread")
2163 self.syncthread = sync 2211 self.syncthread.start()
2164 sync.start() 2212
2213 self.parser_quit.set()
2214
2215 for process in self.processes:
2216 process.join(0.5)
2217
2218 for process in self.processes:
2219 if process.exitcode is None:
2220 os.kill(process.pid, signal.SIGINT)
2221
2222 for process in self.processes:
2223 process.join(0.5)
2224
2225 for process in self.processes:
2226 if process.exitcode is None:
2227 process.terminate()
2228
2229 for process in self.processes:
2230 process.join()
2231 # clean up zombies
2232 process.close()
2233
2234 bb.codeparser.parser_cache_save()
2165 bb.codeparser.parser_cache_savemerge() 2235 bb.codeparser.parser_cache_savemerge()
2236 bb.cache.SiggenRecipeInfo.reset()
2166 bb.fetch.fetcher_parse_done() 2237 bb.fetch.fetcher_parse_done()
2167 if self.cooker.configuration.profile: 2238 if self.cooker.configuration.profile:
2168 profiles = [] 2239 profiles = []
2169 for i in self.process_names: 2240 for i in self.process_names:
2170 logfile = "profile-parse-%s.log" % i 2241 logfile = "profile-parse-%s.log" % i
2171 if os.path.exists(logfile): 2242 if os.path.exists(logfile) and os.path.getsize(logfile):
2172 profiles.append(logfile) 2243 profiles.append(logfile)
2173 2244
2174 pout = "profile-parse.log.processed" 2245 if profiles:
2175 bb.utils.process_profilelog(profiles, pout = pout) 2246 pout = "profile-parse.log.processed"
2176 print("Processed parsing statistics saved to %s" % (pout)) 2247 bb.utils.process_profilelog(profiles, pout = pout)
2248 print("Processed parsing statistics saved to %s" % (pout))
2177 2249
2178 def final_cleanup(self): 2250 def final_cleanup(self):
2179 if self.syncthread: 2251 if self.syncthread:
2180 self.syncthread.join() 2252 self.syncthread.join()
2181 2253
2182 def load_cached(self): 2254 def load_cached(self):
2183 for mc, cache, filename, appends in self.fromcache: 2255 for mc, cache, filename, appends, layername in self.fromcache:
2184 cached, infos = cache.load(filename, appends) 2256 infos = cache.loadCached(filename, appends)
2185 yield not cached, mc, infos 2257 yield False, mc, infos
2186 2258
2187 def parse_generator(self): 2259 def parse_generator(self):
2188 while True: 2260 empty = False
2261 while self.processes or not empty:
2262 for process in self.processes.copy():
2263 if not process.is_alive():
2264 process.join()
2265 self.processes.remove(process)
2266
2189 if self.parsed >= self.toparse: 2267 if self.parsed >= self.toparse:
2190 break 2268 break
2191 2269
2192 try: 2270 try:
2193 result = self.result_queue.get(timeout=0.25) 2271 result = self.result_queue.get(timeout=0.25)
2194 except queue.Empty: 2272 except queue.Empty:
2195 pass 2273 empty = True
2274 yield None, None, None
2196 else: 2275 else:
2197 value = result[1] 2276 empty = False
2198 if isinstance(value, BaseException): 2277 yield result
2199 raise value 2278
2200 else: 2279 if not (self.parsed >= self.toparse):
2201 yield result 2280 raise bb.parse.ParseError("Not all recipes parsed, parser thread killed/died? Exiting.", None)
2281
2202 2282
2203 def parse_next(self): 2283 def parse_next(self):
2204 result = [] 2284 result = []
2205 parsed = None 2285 parsed = None
2206 try: 2286 try:
2207 parsed, mc, result = next(self.results) 2287 parsed, mc, result = next(self.results)
2288 if isinstance(result, BaseException):
2289 # Turn exceptions back into exceptions
2290 raise result
2291 if parsed is None:
2292 # Timeout, loop back through the main loop
2293 return True
2294
2208 except StopIteration: 2295 except StopIteration:
2209 self.shutdown() 2296 self.shutdown()
2210 return False 2297 return False
2211 except bb.BBHandledException as exc: 2298 except bb.BBHandledException as exc:
2212 self.error += 1 2299 self.error += 1
2213 logger.error('Failed to parse recipe: %s' % exc.recipe) 2300 logger.debug('Failed to parse recipe: %s' % exc.recipe)
2214 self.shutdown(clean=False, force=True) 2301 self.shutdown(clean=False)
2215 return False 2302 return False
2216 except ParsingFailure as exc: 2303 except ParsingFailure as exc:
2217 self.error += 1 2304 self.error += 1
2218 logger.error('Unable to parse %s: %s' % 2305
2219 (exc.recipe, bb.exceptions.to_string(exc.realexception))) 2306 exc_desc = str(exc)
2220 self.shutdown(clean=False, force=True) 2307 if isinstance(exc, SystemExit) and not isinstance(exc.code, str):
2308 exc_desc = 'Exited with "%d"' % exc.code
2309
2310 logger.error('Unable to parse %s: %s' % (exc.recipe, exc_desc))
2311 self.shutdown(clean=False)
2221 return False 2312 return False
2222 except bb.parse.ParseError as exc: 2313 except bb.parse.ParseError as exc:
2223 self.error += 1 2314 self.error += 1
2224 logger.error(str(exc)) 2315 logger.error(str(exc))
2225 self.shutdown(clean=False, force=True) 2316 self.shutdown(clean=False, eventmsg=str(exc))
2226 return False 2317 return False
2227 except bb.data_smart.ExpansionError as exc: 2318 except bb.data_smart.ExpansionError as exc:
2319 def skip_frames(f, fn_prefix):
2320 while f and f.tb_frame.f_code.co_filename.startswith(fn_prefix):
2321 f = f.tb_next
2322 return f
2323
2228 self.error += 1 2324 self.error += 1
2229 bbdir = os.path.dirname(__file__) + os.sep 2325 bbdir = os.path.dirname(__file__) + os.sep
2230 etype, value, _ = sys.exc_info() 2326 etype, value, tb = sys.exc_info()
2231 tb = list(itertools.dropwhile(lambda e: e.filename.startswith(bbdir), exc.traceback)) 2327
2328 # Remove any frames where the code comes from bitbake. This
2329 # prevents deep (and pretty useless) backtraces for expansion error
2330 tb = skip_frames(tb, bbdir)
2331 cur = tb
2332 while cur:
2333 cur.tb_next = skip_frames(cur.tb_next, bbdir)
2334 cur = cur.tb_next
2335
2232 logger.error('ExpansionError during parsing %s', value.recipe, 2336 logger.error('ExpansionError during parsing %s', value.recipe,
2233 exc_info=(etype, value, tb)) 2337 exc_info=(etype, value, tb))
2234 self.shutdown(clean=False, force=True) 2338 self.shutdown(clean=False)
2235 return False 2339 return False
2236 except Exception as exc: 2340 except Exception as exc:
2237 self.error += 1 2341 self.error += 1
2238 etype, value, tb = sys.exc_info() 2342 _, value, _ = sys.exc_info()
2239 if hasattr(value, "recipe"): 2343 if hasattr(value, "recipe"):
2240 logger.error('Unable to parse %s' % value.recipe, 2344 logger.error('Unable to parse %s' % value.recipe,
2241 exc_info=(etype, value, exc.traceback)) 2345 exc_info=sys.exc_info())
2242 else: 2346 else:
2243 # Most likely, an exception occurred during raising an exception 2347 # Most likely, an exception occurred during raising an exception
2244 import traceback 2348 import traceback
2245 logger.error('Exception during parse: %s' % traceback.format_exc()) 2349 logger.error('Exception during parse: %s' % traceback.format_exc())
2246 self.shutdown(clean=False, force=True) 2350 self.shutdown(clean=False)
2247 return False 2351 return False
2248 2352
2249 self.current += 1 2353 self.current += 1
@@ -2259,17 +2363,19 @@ class CookerParser(object):
2259 for virtualfn, info_array in result: 2363 for virtualfn, info_array in result:
2260 if info_array[0].skipped: 2364 if info_array[0].skipped:
2261 self.skipped += 1 2365 self.skipped += 1
2262 self.cooker.skiplist[virtualfn] = SkippedPackage(info_array[0]) 2366 self.cooker.skiplist_by_mc[mc][virtualfn] = SkippedPackage(info_array[0])
2263 self.bb_caches[mc].add_info(virtualfn, info_array, self.cooker.recipecaches[mc], 2367 self.bb_caches[mc].add_info(virtualfn, info_array, self.cooker.recipecaches[mc],
2264 parsed=parsed, watcher = self.cooker.add_filewatch) 2368 parsed=parsed, watcher = self.cooker.add_filewatch)
2265 return True 2369 return True
2266 2370
2267 def reparse(self, filename): 2371 def reparse(self, filename):
2372 bb.cache.SiggenRecipeInfo.reset()
2268 to_reparse = set() 2373 to_reparse = set()
2269 for mc in self.cooker.multiconfigs: 2374 for mc in self.cooker.multiconfigs:
2270 to_reparse.add((mc, filename, self.cooker.collections[mc].get_file_appends(filename))) 2375 layername = self.cooker.collections[mc].calc_bbfile_priority(filename)[2]
2376 to_reparse.add((mc, filename, self.cooker.collections[mc].get_file_appends(filename), layername))
2271 2377
2272 for mc, filename, appends in to_reparse: 2378 for mc, filename, appends, layername in to_reparse:
2273 infos = self.bb_caches[mc].parse(filename, appends) 2379 infos = self.bb_caches[mc].parse(filename, appends, layername)
2274 for vfn, info_array in infos: 2380 for vfn, info_array in infos:
2275 self.cooker.recipecaches[mc].add_from_recipeinfo(vfn, info_array) 2381 self.cooker.recipecaches[mc].add_from_recipeinfo(vfn, info_array)
diff --git a/bitbake/lib/bb/cookerdata.py b/bitbake/lib/bb/cookerdata.py
index 1c1e008c6b..65c153a5bb 100644
--- a/bitbake/lib/bb/cookerdata.py
+++ b/bitbake/lib/bb/cookerdata.py
@@ -1,3 +1,4 @@
1
1# 2#
2# Copyright (C) 2003, 2004 Chris Larson 3# Copyright (C) 2003, 2004 Chris Larson
3# Copyright (C) 2003, 2004 Phil Blundell 4# Copyright (C) 2003, 2004 Phil Blundell
@@ -57,7 +58,7 @@ class ConfigParameters(object):
57 58
58 def updateToServer(self, server, environment): 59 def updateToServer(self, server, environment):
59 options = {} 60 options = {}
60 for o in ["abort", "force", "invalidate_stamp", 61 for o in ["halt", "force", "invalidate_stamp",
61 "dry_run", "dump_signatures", 62 "dry_run", "dump_signatures",
62 "extra_assume_provided", "profile", 63 "extra_assume_provided", "profile",
63 "prefile", "postfile", "server_timeout", 64 "prefile", "postfile", "server_timeout",
@@ -86,7 +87,7 @@ class ConfigParameters(object):
86 action['msg'] = "Only one target can be used with the --environment option." 87 action['msg'] = "Only one target can be used with the --environment option."
87 elif self.options.buildfile and len(self.options.pkgs_to_build) > 0: 88 elif self.options.buildfile and len(self.options.pkgs_to_build) > 0:
88 action['msg'] = "No target should be used with the --environment and --buildfile options." 89 action['msg'] = "No target should be used with the --environment and --buildfile options."
89 elif len(self.options.pkgs_to_build) > 0: 90 elif self.options.pkgs_to_build:
90 action['action'] = ["showEnvironmentTarget", self.options.pkgs_to_build] 91 action['action'] = ["showEnvironmentTarget", self.options.pkgs_to_build]
91 else: 92 else:
92 action['action'] = ["showEnvironment", self.options.buildfile] 93 action['action'] = ["showEnvironment", self.options.buildfile]
@@ -124,7 +125,7 @@ class CookerConfiguration(object):
124 self.prefile = [] 125 self.prefile = []
125 self.postfile = [] 126 self.postfile = []
126 self.cmd = None 127 self.cmd = None
127 self.abort = True 128 self.halt = True
128 self.force = False 129 self.force = False
129 self.profile = False 130 self.profile = False
130 self.nosetscene = False 131 self.nosetscene = False
@@ -160,12 +161,7 @@ def catch_parse_error(func):
160 def wrapped(fn, *args): 161 def wrapped(fn, *args):
161 try: 162 try:
162 return func(fn, *args) 163 return func(fn, *args)
163 except IOError as exc: 164 except Exception as exc:
164 import traceback
165 parselog.critical(traceback.format_exc())
166 parselog.critical("Unable to parse %s: %s" % (fn, exc))
167 raise bb.BBHandledException()
168 except bb.data_smart.ExpansionError as exc:
169 import traceback 165 import traceback
170 166
171 bbdir = os.path.dirname(__file__) + os.sep 167 bbdir = os.path.dirname(__file__) + os.sep
@@ -177,14 +173,11 @@ def catch_parse_error(func):
177 break 173 break
178 parselog.critical("Unable to parse %s" % fn, exc_info=(exc_class, exc, tb)) 174 parselog.critical("Unable to parse %s" % fn, exc_info=(exc_class, exc, tb))
179 raise bb.BBHandledException() 175 raise bb.BBHandledException()
180 except bb.parse.ParseError as exc:
181 parselog.critical(str(exc))
182 raise bb.BBHandledException()
183 return wrapped 176 return wrapped
184 177
185@catch_parse_error 178@catch_parse_error
186def parse_config_file(fn, data, include=True): 179def parse_config_file(fn, data, include=True):
187 return bb.parse.handle(fn, data, include) 180 return bb.parse.handle(fn, data, include, baseconfig=True)
188 181
189@catch_parse_error 182@catch_parse_error
190def _inherit(bbclass, data): 183def _inherit(bbclass, data):
@@ -210,7 +203,7 @@ def findConfigFile(configfile, data):
210 203
211# 204#
212# We search for a conf/bblayers.conf under an entry in BBPATH or in cwd working 205# We search for a conf/bblayers.conf under an entry in BBPATH or in cwd working
213# up to /. If that fails, we search for a conf/bitbake.conf in BBPATH. 206# up to /. If that fails, bitbake would fall back to cwd.
214# 207#
215 208
216def findTopdir(): 209def findTopdir():
@@ -223,11 +216,8 @@ def findTopdir():
223 layerconf = findConfigFile("bblayers.conf", d) 216 layerconf = findConfigFile("bblayers.conf", d)
224 if layerconf: 217 if layerconf:
225 return os.path.dirname(os.path.dirname(layerconf)) 218 return os.path.dirname(os.path.dirname(layerconf))
226 if bbpath: 219
227 bitbakeconf = bb.utils.which(bbpath, "conf/bitbake.conf") 220 return os.path.abspath(os.getcwd())
228 if bitbakeconf:
229 return os.path.dirname(os.path.dirname(bitbakeconf))
230 return None
231 221
232class CookerDataBuilder(object): 222class CookerDataBuilder(object):
233 223
@@ -250,10 +240,14 @@ class CookerDataBuilder(object):
250 self.savedenv = bb.data.init() 240 self.savedenv = bb.data.init()
251 for k in cookercfg.env: 241 for k in cookercfg.env:
252 self.savedenv.setVar(k, cookercfg.env[k]) 242 self.savedenv.setVar(k, cookercfg.env[k])
243 if k in bb.data_smart.bitbake_renamed_vars:
244 bb.error('Shell environment variable %s has been renamed to %s' % (k, bb.data_smart.bitbake_renamed_vars[k]))
245 bb.fatal("Exiting to allow enviroment variables to be corrected")
253 246
254 filtered_keys = bb.utils.approved_variables() 247 filtered_keys = bb.utils.approved_variables()
255 bb.data.inheritFromOS(self.basedata, self.savedenv, filtered_keys) 248 bb.data.inheritFromOS(self.basedata, self.savedenv, filtered_keys)
256 self.basedata.setVar("BB_ORIGENV", self.savedenv) 249 self.basedata.setVar("BB_ORIGENV", self.savedenv)
250 self.basedata.setVar("__bbclasstype", "global")
257 251
258 if worker: 252 if worker:
259 self.basedata.setVar("BB_WORKERCONTEXT", "1") 253 self.basedata.setVar("BB_WORKERCONTEXT", "1")
@@ -261,15 +255,22 @@ class CookerDataBuilder(object):
261 self.data = self.basedata 255 self.data = self.basedata
262 self.mcdata = {} 256 self.mcdata = {}
263 257
264 def parseBaseConfiguration(self): 258 def calc_datastore_hashes(self):
265 data_hash = hashlib.sha256() 259 data_hash = hashlib.sha256()
260 data_hash.update(self.data.get_hash().encode('utf-8'))
261 multiconfig = (self.data.getVar("BBMULTICONFIG") or "").split()
262 for config in multiconfig:
263 data_hash.update(self.mcdata[config].get_hash().encode('utf-8'))
264 self.data_hash = data_hash.hexdigest()
265
266 def parseBaseConfiguration(self, worker=False):
267 mcdata = {}
266 try: 268 try:
267 self.data = self.parseConfigurationFiles(self.prefiles, self.postfiles) 269 self.data = self.parseConfigurationFiles(self.prefiles, self.postfiles)
268 270
269 if self.data.getVar("BB_WORKERCONTEXT", False) is None: 271 servercontext = self.data.getVar("BB_WORKERCONTEXT", False) is None and not worker
270 bb.fetch.fetcher_init(self.data) 272 bb.fetch.fetcher_init(self.data, servercontext)
271 bb.parse.init_parser(self.data) 273 bb.parse.init_parser(self.data)
272 bb.codeparser.parser_cache_init(self.data)
273 274
274 bb.event.fire(bb.event.ConfigParsed(), self.data) 275 bb.event.fire(bb.event.ConfigParsed(), self.data)
275 276
@@ -286,44 +287,66 @@ class CookerDataBuilder(object):
286 bb.event.fire(bb.event.ConfigParsed(), self.data) 287 bb.event.fire(bb.event.ConfigParsed(), self.data)
287 288
288 bb.parse.init_parser(self.data) 289 bb.parse.init_parser(self.data)
289 data_hash.update(self.data.get_hash().encode('utf-8')) 290 mcdata[''] = self.data
290 self.mcdata[''] = self.data
291 291
292 multiconfig = (self.data.getVar("BBMULTICONFIG") or "").split() 292 multiconfig = (self.data.getVar("BBMULTICONFIG") or "").split()
293 for config in multiconfig: 293 for config in multiconfig:
294 mcdata = self.parseConfigurationFiles(self.prefiles, self.postfiles, config) 294 if config[0].isdigit():
295 bb.event.fire(bb.event.ConfigParsed(), mcdata) 295 bb.fatal("Multiconfig name '%s' is invalid as multiconfigs cannot start with a digit" % config)
296 self.mcdata[config] = mcdata 296 parsed_mcdata = self.parseConfigurationFiles(self.prefiles, self.postfiles, config)
297 data_hash.update(mcdata.get_hash().encode('utf-8')) 297 bb.event.fire(bb.event.ConfigParsed(), parsed_mcdata)
298 mcdata[config] = parsed_mcdata
298 if multiconfig: 299 if multiconfig:
299 bb.event.fire(bb.event.MultiConfigParsed(self.mcdata), self.data) 300 bb.event.fire(bb.event.MultiConfigParsed(mcdata), self.data)
300 301
301 self.data_hash = data_hash.hexdigest()
302 except (SyntaxError, bb.BBHandledException):
303 raise bb.BBHandledException()
304 except bb.data_smart.ExpansionError as e: 302 except bb.data_smart.ExpansionError as e:
305 logger.error(str(e)) 303 logger.error(str(e))
306 raise bb.BBHandledException() 304 raise bb.BBHandledException()
307 except Exception: 305
308 logger.exception("Error parsing configuration files") 306 bb.codeparser.update_module_dependencies(self.data)
307
308 # Handle obsolete variable names
309 d = self.data
310 renamedvars = d.getVarFlags('BB_RENAMED_VARIABLES') or {}
311 renamedvars.update(bb.data_smart.bitbake_renamed_vars)
312 issues = False
313 for v in renamedvars:
314 if d.getVar(v) != None or d.hasOverrides(v):
315 issues = True
316 loginfo = {}
317 history = d.varhistory.get_variable_refs(v)
318 for h in history:
319 for line in history[h]:
320 loginfo = {'file' : h, 'line' : line}
321 bb.data.data_smart._print_rename_error(v, loginfo, renamedvars)
322 if not history:
323 bb.data.data_smart._print_rename_error(v, loginfo, renamedvars)
324 if issues:
309 raise bb.BBHandledException() 325 raise bb.BBHandledException()
310 326
327 for mc in mcdata:
328 mcdata[mc].renameVar("__depends", "__base_depends")
329 mcdata[mc].setVar("__bbclasstype", "recipe")
330
311 # Create a copy so we can reset at a later date when UIs disconnect 331 # Create a copy so we can reset at a later date when UIs disconnect
312 self.origdata = self.data 332 self.mcorigdata = mcdata
313 self.data = bb.data.createCopy(self.origdata) 333 for mc in mcdata:
314 self.mcdata[''] = self.data 334 self.mcdata[mc] = bb.data.createCopy(mcdata[mc])
335 self.data = self.mcdata['']
336 self.calc_datastore_hashes()
315 337
316 def reset(self): 338 def reset(self):
317 # We may not have run parseBaseConfiguration() yet 339 # We may not have run parseBaseConfiguration() yet
318 if not hasattr(self, 'origdata'): 340 if not hasattr(self, 'mcorigdata'):
319 return 341 return
320 self.data = bb.data.createCopy(self.origdata) 342 for mc in self.mcorigdata:
321 self.mcdata[''] = self.data 343 self.mcdata[mc] = bb.data.createCopy(self.mcorigdata[mc])
344 self.data = self.mcdata['']
322 345
323 def _findLayerConf(self, data): 346 def _findLayerConf(self, data):
324 return findConfigFile("bblayers.conf", data) 347 return findConfigFile("bblayers.conf", data)
325 348
326 def parseConfigurationFiles(self, prefiles, postfiles, mc = "default"): 349 def parseConfigurationFiles(self, prefiles, postfiles, mc = ""):
327 data = bb.data.createCopy(self.basedata) 350 data = bb.data.createCopy(self.basedata)
328 data.setVar("BB_CURRENT_MC", mc) 351 data.setVar("BB_CURRENT_MC", mc)
329 352
@@ -333,15 +356,23 @@ class CookerDataBuilder(object):
333 356
334 layerconf = self._findLayerConf(data) 357 layerconf = self._findLayerConf(data)
335 if layerconf: 358 if layerconf:
336 parselog.debug(2, "Found bblayers.conf (%s)", layerconf) 359 parselog.debug2("Found bblayers.conf (%s)", layerconf)
337 # By definition bblayers.conf is in conf/ of TOPDIR. 360 # By definition bblayers.conf is in conf/ of TOPDIR.
338 # We may have been called with cwd somewhere else so reset TOPDIR 361 # We may have been called with cwd somewhere else so reset TOPDIR
339 data.setVar("TOPDIR", os.path.dirname(os.path.dirname(layerconf))) 362 data.setVar("TOPDIR", os.path.dirname(os.path.dirname(layerconf)))
340 data = parse_config_file(layerconf, data) 363 data = parse_config_file(layerconf, data)
341 364
365 if not data.getVar("BB_CACHEDIR"):
366 data.setVar("BB_CACHEDIR", "${TOPDIR}/cache")
367
368 bb.codeparser.parser_cache_init(data.getVar("BB_CACHEDIR"))
369
342 layers = (data.getVar('BBLAYERS') or "").split() 370 layers = (data.getVar('BBLAYERS') or "").split()
343 broken_layers = [] 371 broken_layers = []
344 372
373 if not layers:
374 bb.fatal("The bblayers.conf file doesn't contain any BBLAYERS definition")
375
345 data = bb.data.createCopy(data) 376 data = bb.data.createCopy(data)
346 approved = bb.utils.approved_variables() 377 approved = bb.utils.approved_variables()
347 378
@@ -357,8 +388,10 @@ class CookerDataBuilder(object):
357 parselog.critical("Please check BBLAYERS in %s" % (layerconf)) 388 parselog.critical("Please check BBLAYERS in %s" % (layerconf))
358 raise bb.BBHandledException() 389 raise bb.BBHandledException()
359 390
391 layerseries = None
392 compat_entries = {}
360 for layer in layers: 393 for layer in layers:
361 parselog.debug(2, "Adding layer %s", layer) 394 parselog.debug2("Adding layer %s", layer)
362 if 'HOME' in approved and '~' in layer: 395 if 'HOME' in approved and '~' in layer:
363 layer = os.path.expanduser(layer) 396 layer = os.path.expanduser(layer)
364 if layer.endswith('/'): 397 if layer.endswith('/'):
@@ -369,8 +402,27 @@ class CookerDataBuilder(object):
369 data.expandVarref('LAYERDIR') 402 data.expandVarref('LAYERDIR')
370 data.expandVarref('LAYERDIR_RE') 403 data.expandVarref('LAYERDIR_RE')
371 404
405 # Sadly we can't have nice things.
406 # Some layers think they're going to be 'clever' and copy the values from
407 # another layer, e.g. using ${LAYERSERIES_COMPAT_core}. The whole point of
408 # this mechanism is to make it clear which releases a layer supports and
409 # show when a layer master branch is bitrotting and is unmaintained.
410 # We therefore avoid people doing this here.
411 collections = (data.getVar('BBFILE_COLLECTIONS') or "").split()
412 for c in collections:
413 compat_entry = data.getVar("LAYERSERIES_COMPAT_%s" % c)
414 if compat_entry:
415 compat_entries[c] = set(compat_entry.split())
416 data.delVar("LAYERSERIES_COMPAT_%s" % c)
417 if not layerseries:
418 layerseries = set((data.getVar("LAYERSERIES_CORENAMES") or "").split())
419 if layerseries:
420 data.delVar("LAYERSERIES_CORENAMES")
421
372 data.delVar('LAYERDIR_RE') 422 data.delVar('LAYERDIR_RE')
373 data.delVar('LAYERDIR') 423 data.delVar('LAYERDIR')
424 for c in compat_entries:
425 data.setVar("LAYERSERIES_COMPAT_%s" % c, " ".join(sorted(compat_entries[c])))
374 426
375 bbfiles_dynamic = (data.getVar('BBFILES_DYNAMIC') or "").split() 427 bbfiles_dynamic = (data.getVar('BBFILES_DYNAMIC') or "").split()
376 collections = (data.getVar('BBFILE_COLLECTIONS') or "").split() 428 collections = (data.getVar('BBFILE_COLLECTIONS') or "").split()
@@ -389,26 +441,38 @@ class CookerDataBuilder(object):
389 if invalid: 441 if invalid:
390 bb.fatal("BBFILES_DYNAMIC entries must be of the form {!}<collection name>:<filename pattern>, not:\n %s" % "\n ".join(invalid)) 442 bb.fatal("BBFILES_DYNAMIC entries must be of the form {!}<collection name>:<filename pattern>, not:\n %s" % "\n ".join(invalid))
391 443
392 layerseries = set((data.getVar("LAYERSERIES_CORENAMES") or "").split())
393 collections_tmp = collections[:] 444 collections_tmp = collections[:]
394 for c in collections: 445 for c in collections:
395 collections_tmp.remove(c) 446 collections_tmp.remove(c)
396 if c in collections_tmp: 447 if c in collections_tmp:
397 bb.fatal("Found duplicated BBFILE_COLLECTIONS '%s', check bblayers.conf or layer.conf to fix it." % c) 448 bb.fatal("Found duplicated BBFILE_COLLECTIONS '%s', check bblayers.conf or layer.conf to fix it." % c)
398 compat = set((data.getVar("LAYERSERIES_COMPAT_%s" % c) or "").split()) 449
450 compat = set()
451 if c in compat_entries:
452 compat = compat_entries[c]
453 if compat and not layerseries:
454 bb.fatal("No core layer found to work with layer '%s'. Missing entry in bblayers.conf?" % c)
399 if compat and not (compat & layerseries): 455 if compat and not (compat & layerseries):
400 bb.fatal("Layer %s is not compatible with the core layer which only supports these series: %s (layer is compatible with %s)" 456 bb.fatal("Layer %s is not compatible with the core layer which only supports these series: %s (layer is compatible with %s)"
401 % (c, " ".join(layerseries), " ".join(compat))) 457 % (c, " ".join(layerseries), " ".join(compat)))
402 elif not compat and not data.getVar("BB_WORKERCONTEXT"): 458 elif not compat and not data.getVar("BB_WORKERCONTEXT"):
403 bb.warn("Layer %s should set LAYERSERIES_COMPAT_%s in its conf/layer.conf file to list the core layer names it is compatible with." % (c, c)) 459 bb.warn("Layer %s should set LAYERSERIES_COMPAT_%s in its conf/layer.conf file to list the core layer names it is compatible with." % (c, c))
404 460
461 data.setVar("LAYERSERIES_CORENAMES", " ".join(sorted(layerseries)))
462
405 if not data.getVar("BBPATH"): 463 if not data.getVar("BBPATH"):
406 msg = "The BBPATH variable is not set" 464 msg = "The BBPATH variable is not set"
407 if not layerconf: 465 if not layerconf:
408 msg += (" and bitbake did not find a conf/bblayers.conf file in" 466 msg += (" and bitbake did not find a conf/bblayers.conf file in"
409 " the expected location.\nMaybe you accidentally" 467 " the expected location.\nMaybe you accidentally"
410 " invoked bitbake from the wrong directory?") 468 " invoked bitbake from the wrong directory?")
411 raise SystemExit(msg) 469 bb.fatal(msg)
470
471 if not data.getVar("TOPDIR"):
472 data.setVar("TOPDIR", os.path.abspath(os.getcwd()))
473 if not data.getVar("BB_CACHEDIR"):
474 data.setVar("BB_CACHEDIR", "${TOPDIR}/cache")
475 bb.codeparser.parser_cache_init(data.getVar("BB_CACHEDIR"))
412 476
413 data = parse_config_file(os.path.join("conf", "bitbake.conf"), data) 477 data = parse_config_file(os.path.join("conf", "bitbake.conf"), data)
414 478
@@ -421,7 +485,7 @@ class CookerDataBuilder(object):
421 for bbclass in bbclasses: 485 for bbclass in bbclasses:
422 data = _inherit(bbclass, data) 486 data = _inherit(bbclass, data)
423 487
424 # Nomally we only register event handlers at the end of parsing .bb files 488 # Normally we only register event handlers at the end of parsing .bb files
425 # We register any handlers we've found so far here... 489 # We register any handlers we've found so far here...
426 for var in data.getVar('__BBHANDLERS', False) or []: 490 for var in data.getVar('__BBHANDLERS', False) or []:
427 handlerfn = data.getVarFlag(var, "filename", False) 491 handlerfn = data.getVarFlag(var, "filename", False)
@@ -435,3 +499,54 @@ class CookerDataBuilder(object):
435 499
436 return data 500 return data
437 501
502 @staticmethod
503 def _parse_recipe(bb_data, bbfile, appends, mc, layername):
504 bb_data.setVar("__BBMULTICONFIG", mc)
505 bb_data.setVar("FILE_LAYERNAME", layername)
506
507 bbfile_loc = os.path.abspath(os.path.dirname(bbfile))
508 bb.parse.cached_mtime_noerror(bbfile_loc)
509
510 if appends:
511 bb_data.setVar('__BBAPPEND', " ".join(appends))
512
513 return bb.parse.handle(bbfile, bb_data)
514
515 def parseRecipeVariants(self, bbfile, appends, virtonly=False, mc=None, layername=None):
516 """
517 Load and parse one .bb build file
518 Return the data and whether parsing resulted in the file being skipped
519 """
520
521 if virtonly:
522 (bbfile, virtual, mc) = bb.cache.virtualfn2realfn(bbfile)
523 bb_data = self.mcdata[mc].createCopy()
524 bb_data.setVar("__ONLYFINALISE", virtual or "default")
525 return self._parse_recipe(bb_data, bbfile, appends, mc, layername)
526
527 if mc is not None:
528 bb_data = self.mcdata[mc].createCopy()
529 return self._parse_recipe(bb_data, bbfile, appends, mc, layername)
530
531 bb_data = self.data.createCopy()
532 datastores = self._parse_recipe(bb_data, bbfile, appends, '', layername)
533
534 for mc in self.mcdata:
535 if not mc:
536 continue
537 bb_data = self.mcdata[mc].createCopy()
538 newstores = self._parse_recipe(bb_data, bbfile, appends, mc, layername)
539 for ns in newstores:
540 datastores["mc:%s:%s" % (mc, ns)] = newstores[ns]
541
542 return datastores
543
544 def parseRecipe(self, virtualfn, appends, layername):
545 """
546 Return a complete set of data for fn.
547 To do this, we need to parse the file.
548 """
549 logger.debug("Parsing %s (full)" % virtualfn)
550 (fn, virtual, mc) = bb.cache.virtualfn2realfn(virtualfn)
551 datastores = self.parseRecipeVariants(virtualfn, appends, virtonly=True, layername=layername)
552 return datastores[virtual]
diff --git a/bitbake/lib/bb/daemonize.py b/bitbake/lib/bb/daemonize.py
index c187fcfc6c..7689404436 100644
--- a/bitbake/lib/bb/daemonize.py
+++ b/bitbake/lib/bb/daemonize.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright BitBake Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
@@ -74,26 +76,26 @@ def createDaemon(function, logfile):
74 with open('/dev/null', 'r') as si: 76 with open('/dev/null', 'r') as si:
75 os.dup2(si.fileno(), sys.stdin.fileno()) 77 os.dup2(si.fileno(), sys.stdin.fileno())
76 78
77 try: 79 with open(logfile, 'a+') as so:
78 so = open(logfile, 'a+') 80 try:
79 os.dup2(so.fileno(), sys.stdout.fileno()) 81 os.dup2(so.fileno(), sys.stdout.fileno())
80 os.dup2(so.fileno(), sys.stderr.fileno()) 82 os.dup2(so.fileno(), sys.stderr.fileno())
81 except io.UnsupportedOperation: 83 except io.UnsupportedOperation:
82 sys.stdout = open(logfile, 'a+') 84 sys.stdout = so
83 85
84 # Have stdout and stderr be the same so log output matches chronologically 86 # Have stdout and stderr be the same so log output matches chronologically
85 # and there aren't two seperate buffers 87 # and there aren't two separate buffers
86 sys.stderr = sys.stdout 88 sys.stderr = sys.stdout
87 89
88 try: 90 try:
89 function() 91 function()
90 except Exception as e: 92 except Exception as e:
91 traceback.print_exc() 93 traceback.print_exc()
92 finally: 94 finally:
93 bb.event.print_ui_queue() 95 bb.event.print_ui_queue()
94 # os._exit() doesn't flush open files like os.exit() does. Manually flush 96 # os._exit() doesn't flush open files like os.exit() does. Manually flush
95 # stdout and stderr so that any logging output will be seen, particularly 97 # stdout and stderr so that any logging output will be seen, particularly
96 # exception tracebacks. 98 # exception tracebacks.
97 sys.stdout.flush() 99 sys.stdout.flush()
98 sys.stderr.flush() 100 sys.stderr.flush()
99 os._exit(0) 101 os._exit(0)
diff --git a/bitbake/lib/bb/data.py b/bitbake/lib/bb/data.py
index 97022853ca..f672a84451 100644
--- a/bitbake/lib/bb/data.py
+++ b/bitbake/lib/bb/data.py
@@ -4,14 +4,16 @@ BitBake 'Data' implementations
4Functions for interacting with the data structure used by the 4Functions for interacting with the data structure used by the
5BitBake build tools. 5BitBake build tools.
6 6
7The expandKeys and update_data are the most expensive 7expandKeys and datastore iteration are the most expensive
8operations. At night the cookie monster came by and 8operations. Updating overrides is now "on the fly" but still based
9on the idea of the cookie monster introduced by zecke:
10"At night the cookie monster came by and
9suggested 'give me cookies on setting the variables and 11suggested 'give me cookies on setting the variables and
10things will work out'. Taking this suggestion into account 12things will work out'. Taking this suggestion into account
11applying the skills from the not yet passed 'Entwurf und 13applying the skills from the not yet passed 'Entwurf und
12Analyse von Algorithmen' lecture and the cookie 14Analyse von Algorithmen' lecture and the cookie
13monster seems to be right. We will track setVar more carefully 15monster seems to be right. We will track setVar more carefully
14to have faster update_data and expandKeys operations. 16to have faster datastore operations."
15 17
16This is a trade-off between speed and memory again but 18This is a trade-off between speed and memory again but
17the speed is more critical here. 19the speed is more critical here.
@@ -26,11 +28,6 @@ the speed is more critical here.
26 28
27import sys, os, re 29import sys, os, re
28import hashlib 30import hashlib
29if sys.argv[0][-5:] == "pydoc":
30 path = os.path.dirname(os.path.dirname(sys.argv[1]))
31else:
32 path = os.path.dirname(os.path.dirname(sys.argv[0]))
33sys.path.insert(0, path)
34from itertools import groupby 31from itertools import groupby
35 32
36from bb import data_smart 33from bb import data_smart
@@ -70,10 +67,6 @@ def keys(d):
70 """Return a list of keys in d""" 67 """Return a list of keys in d"""
71 return d.keys() 68 return d.keys()
72 69
73
74__expand_var_regexp__ = re.compile(r"\${[^{}]+}")
75__expand_python_regexp__ = re.compile(r"\${@.+?}")
76
77def expand(s, d, varname = None): 70def expand(s, d, varname = None):
78 """Variable expansion using the data store""" 71 """Variable expansion using the data store"""
79 return d.expand(s, varname) 72 return d.expand(s, varname)
@@ -121,8 +114,8 @@ def emit_var(var, o=sys.__stdout__, d = init(), all=False):
121 if d.getVarFlag(var, 'python', False) and func: 114 if d.getVarFlag(var, 'python', False) and func:
122 return False 115 return False
123 116
124 export = d.getVarFlag(var, "export", False) 117 export = bb.utils.to_boolean(d.getVarFlag(var, "export"))
125 unexport = d.getVarFlag(var, "unexport", False) 118 unexport = bb.utils.to_boolean(d.getVarFlag(var, "unexport"))
126 if not all and not export and not unexport and not func: 119 if not all and not export and not unexport and not func:
127 return False 120 return False
128 121
@@ -195,8 +188,8 @@ def emit_env(o=sys.__stdout__, d = init(), all=False):
195 188
196def exported_keys(d): 189def exported_keys(d):
197 return (key for key in d.keys() if not key.startswith('__') and 190 return (key for key in d.keys() if not key.startswith('__') and
198 d.getVarFlag(key, 'export', False) and 191 bb.utils.to_boolean(d.getVarFlag(key, 'export')) and
199 not d.getVarFlag(key, 'unexport', False)) 192 not bb.utils.to_boolean(d.getVarFlag(key, 'unexport')))
200 193
201def exported_vars(d): 194def exported_vars(d):
202 k = list(exported_keys(d)) 195 k = list(exported_keys(d))
@@ -226,7 +219,7 @@ def emit_func(func, o=sys.__stdout__, d = init()):
226 deps = newdeps 219 deps = newdeps
227 seen |= deps 220 seen |= deps
228 newdeps = set() 221 newdeps = set()
229 for dep in deps: 222 for dep in sorted(deps):
230 if d.getVarFlag(dep, "func", False) and not d.getVarFlag(dep, "python", False): 223 if d.getVarFlag(dep, "func", False) and not d.getVarFlag(dep, "python", False):
231 emit_var(dep, o, d, False) and o.write('\n') 224 emit_var(dep, o, d, False) and o.write('\n')
232 newdeps |= bb.codeparser.ShellParser(dep, logger).parse_shell(d.getVar(dep)) 225 newdeps |= bb.codeparser.ShellParser(dep, logger).parse_shell(d.getVar(dep))
@@ -268,65 +261,72 @@ def emit_func_python(func, o=sys.__stdout__, d = init()):
268 newdeps |= set((d.getVarFlag(dep, "vardeps") or "").split()) 261 newdeps |= set((d.getVarFlag(dep, "vardeps") or "").split())
269 newdeps -= seen 262 newdeps -= seen
270 263
271def update_data(d): 264def build_dependencies(key, keys, mod_funcs, shelldeps, varflagsexcl, ignored_vars, d, codeparsedata):
272 """Performs final steps upon the datastore, including application of overrides""" 265 def handle_contains(value, contains, exclusions, d):
273 d.finalize(parent = True) 266 newvalue = []
267 if value:
268 newvalue.append(str(value))
269 for k in sorted(contains):
270 if k in exclusions or k in ignored_vars:
271 continue
272 l = (d.getVar(k) or "").split()
273 for item in sorted(contains[k]):
274 for word in item.split():
275 if not word in l:
276 newvalue.append("\n%s{%s} = Unset" % (k, item))
277 break
278 else:
279 newvalue.append("\n%s{%s} = Set" % (k, item))
280 return "".join(newvalue)
281
282 def handle_remove(value, deps, removes, d):
283 for r in sorted(removes):
284 r2 = d.expandWithRefs(r, None)
285 value += "\n_remove of %s" % r
286 deps |= r2.references
287 deps = deps | (keys & r2.execs)
288 value = handle_contains(value, r2.contains, exclusions, d)
289 return value
274 290
275def build_dependencies(key, keys, shelldeps, varflagsexcl, d):
276 deps = set() 291 deps = set()
277 try: 292 try:
293 if key in mod_funcs:
294 exclusions = set()
295 moddep = bb.codeparser.modulecode_deps[key]
296 value = handle_contains(moddep[4], moddep[3], exclusions, d)
297 return frozenset((moddep[0] | keys & moddep[1]) - ignored_vars), value
298
278 if key[-1] == ']': 299 if key[-1] == ']':
279 vf = key[:-1].split('[') 300 vf = key[:-1].split('[')
301 if vf[1] == "vardepvalueexclude":
302 return deps, ""
280 value, parser = d.getVarFlag(vf[0], vf[1], False, retparser=True) 303 value, parser = d.getVarFlag(vf[0], vf[1], False, retparser=True)
281 deps |= parser.references 304 deps |= parser.references
282 deps = deps | (keys & parser.execs) 305 deps = deps | (keys & parser.execs)
283 return deps, value 306 deps -= ignored_vars
307 return frozenset(deps), value
284 varflags = d.getVarFlags(key, ["vardeps", "vardepvalue", "vardepsexclude", "exports", "postfuncs", "prefuncs", "lineno", "filename"]) or {} 308 varflags = d.getVarFlags(key, ["vardeps", "vardepvalue", "vardepsexclude", "exports", "postfuncs", "prefuncs", "lineno", "filename"]) or {}
285 vardeps = varflags.get("vardeps") 309 vardeps = varflags.get("vardeps")
286 310 exclusions = varflags.get("vardepsexclude", "").split()
287 def handle_contains(value, contains, d):
288 newvalue = ""
289 for k in sorted(contains):
290 l = (d.getVar(k) or "").split()
291 for item in sorted(contains[k]):
292 for word in item.split():
293 if not word in l:
294 newvalue += "\n%s{%s} = Unset" % (k, item)
295 break
296 else:
297 newvalue += "\n%s{%s} = Set" % (k, item)
298 if not newvalue:
299 return value
300 if not value:
301 return newvalue
302 return value + newvalue
303
304 def handle_remove(value, deps, removes, d):
305 for r in sorted(removes):
306 r2 = d.expandWithRefs(r, None)
307 value += "\n_remove of %s" % r
308 deps |= r2.references
309 deps = deps | (keys & r2.execs)
310 return value
311 311
312 if "vardepvalue" in varflags: 312 if "vardepvalue" in varflags:
313 value = varflags.get("vardepvalue") 313 value = varflags.get("vardepvalue")
314 elif varflags.get("func"): 314 elif varflags.get("func"):
315 if varflags.get("python"): 315 if varflags.get("python"):
316 value = d.getVarFlag(key, "_content", False) 316 value = codeparsedata.getVarFlag(key, "_content", False)
317 parser = bb.codeparser.PythonParser(key, logger) 317 parser = bb.codeparser.PythonParser(key, logger)
318 parser.parse_python(value, filename=varflags.get("filename"), lineno=varflags.get("lineno")) 318 parser.parse_python(value, filename=varflags.get("filename"), lineno=varflags.get("lineno"))
319 deps = deps | parser.references 319 deps = deps | parser.references
320 deps = deps | (keys & parser.execs) 320 deps = deps | (keys & parser.execs)
321 value = handle_contains(value, parser.contains, d) 321 value = handle_contains(value, parser.contains, exclusions, d)
322 else: 322 else:
323 value, parsedvar = d.getVarFlag(key, "_content", False, retparser=True) 323 value, parsedvar = codeparsedata.getVarFlag(key, "_content", False, retparser=True)
324 parser = bb.codeparser.ShellParser(key, logger) 324 parser = bb.codeparser.ShellParser(key, logger)
325 parser.parse_shell(parsedvar.value) 325 parser.parse_shell(parsedvar.value)
326 deps = deps | shelldeps 326 deps = deps | shelldeps
327 deps = deps | parsedvar.references 327 deps = deps | parsedvar.references
328 deps = deps | (keys & parser.execs) | (keys & parsedvar.execs) 328 deps = deps | (keys & parser.execs) | (keys & parsedvar.execs)
329 value = handle_contains(value, parsedvar.contains, d) 329 value = handle_contains(value, parsedvar.contains, exclusions, d)
330 if hasattr(parsedvar, "removes"): 330 if hasattr(parsedvar, "removes"):
331 value = handle_remove(value, deps, parsedvar.removes, d) 331 value = handle_remove(value, deps, parsedvar.removes, d)
332 if vardeps is None: 332 if vardeps is None:
@@ -341,7 +341,7 @@ def build_dependencies(key, keys, shelldeps, varflagsexcl, d):
341 value, parser = d.getVarFlag(key, "_content", False, retparser=True) 341 value, parser = d.getVarFlag(key, "_content", False, retparser=True)
342 deps |= parser.references 342 deps |= parser.references
343 deps = deps | (keys & parser.execs) 343 deps = deps | (keys & parser.execs)
344 value = handle_contains(value, parser.contains, d) 344 value = handle_contains(value, parser.contains, exclusions, d)
345 if hasattr(parser, "removes"): 345 if hasattr(parser, "removes"):
346 value = handle_remove(value, deps, parser.removes, d) 346 value = handle_remove(value, deps, parser.removes, d)
347 347
@@ -361,43 +361,50 @@ def build_dependencies(key, keys, shelldeps, varflagsexcl, d):
361 deps |= set(varfdeps) 361 deps |= set(varfdeps)
362 362
363 deps |= set((vardeps or "").split()) 363 deps |= set((vardeps or "").split())
364 deps -= set(varflags.get("vardepsexclude", "").split()) 364 deps -= set(exclusions)
365 deps -= ignored_vars
365 except bb.parse.SkipRecipe: 366 except bb.parse.SkipRecipe:
366 raise 367 raise
367 except Exception as e: 368 except Exception as e:
368 bb.warn("Exception during build_dependencies for %s" % key) 369 bb.warn("Exception during build_dependencies for %s" % key)
369 raise 370 raise
370 return deps, value 371 return frozenset(deps), value
371 #bb.note("Variable %s references %s and calls %s" % (key, str(deps), str(execs))) 372 #bb.note("Variable %s references %s and calls %s" % (key, str(deps), str(execs)))
372 #d.setVarFlag(key, "vardeps", deps) 373 #d.setVarFlag(key, "vardeps", deps)
373 374
374def generate_dependencies(d, whitelist): 375def generate_dependencies(d, ignored_vars):
375 376
376 keys = set(key for key in d if not key.startswith("__")) 377 mod_funcs = set(bb.codeparser.modulecode_deps.keys())
377 shelldeps = set(key for key in d.getVar("__exportlist", False) if d.getVarFlag(key, "export", False) and not d.getVarFlag(key, "unexport", False)) 378 keys = set(key for key in d if not key.startswith("__")) | mod_funcs
379 shelldeps = set(key for key in d.getVar("__exportlist", False) if bb.utils.to_boolean(d.getVarFlag(key, "export")) and not bb.utils.to_boolean(d.getVarFlag(key, "unexport")))
378 varflagsexcl = d.getVar('BB_SIGNATURE_EXCLUDE_FLAGS') 380 varflagsexcl = d.getVar('BB_SIGNATURE_EXCLUDE_FLAGS')
379 381
382 codeparserd = d.createCopy()
383 for forced in (d.getVar('BB_HASH_CODEPARSER_VALS') or "").split():
384 key, value = forced.split("=", 1)
385 codeparserd.setVar(key, value)
386
380 deps = {} 387 deps = {}
381 values = {} 388 values = {}
382 389
383 tasklist = d.getVar('__BBTASKS', False) or [] 390 tasklist = d.getVar('__BBTASKS', False) or []
384 for task in tasklist: 391 for task in tasklist:
385 deps[task], values[task] = build_dependencies(task, keys, shelldeps, varflagsexcl, d) 392 deps[task], values[task] = build_dependencies(task, keys, mod_funcs, shelldeps, varflagsexcl, ignored_vars, d, codeparserd)
386 newdeps = deps[task] 393 newdeps = deps[task]
387 seen = set() 394 seen = set()
388 while newdeps: 395 while newdeps:
389 nextdeps = newdeps - whitelist 396 nextdeps = newdeps
390 seen |= nextdeps 397 seen |= nextdeps
391 newdeps = set() 398 newdeps = set()
392 for dep in nextdeps: 399 for dep in nextdeps:
393 if dep not in deps: 400 if dep not in deps:
394 deps[dep], values[dep] = build_dependencies(dep, keys, shelldeps, varflagsexcl, d) 401 deps[dep], values[dep] = build_dependencies(dep, keys, mod_funcs, shelldeps, varflagsexcl, ignored_vars, d, codeparserd)
395 newdeps |= deps[dep] 402 newdeps |= deps[dep]
396 newdeps -= seen 403 newdeps -= seen
397 #print "For %s: %s" % (task, str(deps[task])) 404 #print "For %s: %s" % (task, str(deps[task]))
398 return tasklist, deps, values 405 return tasklist, deps, values
399 406
400def generate_dependency_hash(tasklist, gendeps, lookupcache, whitelist, fn): 407def generate_dependency_hash(tasklist, gendeps, lookupcache, ignored_vars, fn):
401 taskdeps = {} 408 taskdeps = {}
402 basehash = {} 409 basehash = {}
403 410
@@ -406,9 +413,10 @@ def generate_dependency_hash(tasklist, gendeps, lookupcache, whitelist, fn):
406 413
407 if data is None: 414 if data is None:
408 bb.error("Task %s from %s seems to be empty?!" % (task, fn)) 415 bb.error("Task %s from %s seems to be empty?!" % (task, fn))
409 data = '' 416 data = []
417 else:
418 data = [data]
410 419
411 gendeps[task] -= whitelist
412 newdeps = gendeps[task] 420 newdeps = gendeps[task]
413 seen = set() 421 seen = set()
414 while newdeps: 422 while newdeps:
@@ -416,27 +424,24 @@ def generate_dependency_hash(tasklist, gendeps, lookupcache, whitelist, fn):
416 seen |= nextdeps 424 seen |= nextdeps
417 newdeps = set() 425 newdeps = set()
418 for dep in nextdeps: 426 for dep in nextdeps:
419 if dep in whitelist:
420 continue
421 gendeps[dep] -= whitelist
422 newdeps |= gendeps[dep] 427 newdeps |= gendeps[dep]
423 newdeps -= seen 428 newdeps -= seen
424 429
425 alldeps = sorted(seen) 430 alldeps = sorted(seen)
426 for dep in alldeps: 431 for dep in alldeps:
427 data = data + dep 432 data.append(dep)
428 var = lookupcache[dep] 433 var = lookupcache[dep]
429 if var is not None: 434 if var is not None:
430 data = data + str(var) 435 data.append(str(var))
431 k = fn + ":" + task 436 k = fn + ":" + task
432 basehash[k] = hashlib.sha256(data.encode("utf-8")).hexdigest() 437 basehash[k] = hashlib.sha256("".join(data).encode("utf-8")).hexdigest()
433 taskdeps[task] = alldeps 438 taskdeps[task] = frozenset(seen)
434 439
435 return taskdeps, basehash 440 return taskdeps, basehash
436 441
437def inherits_class(klass, d): 442def inherits_class(klass, d):
438 val = d.getVar('__inherit_cache', False) or [] 443 val = d.getVar('__inherit_cache', False) or []
439 needle = os.path.join('classes', '%s.bbclass' % klass) 444 needle = '/%s.bbclass' % klass
440 for v in val: 445 for v in val:
441 if v.endswith(needle): 446 if v.endswith(needle):
442 return True 447 return True
diff --git a/bitbake/lib/bb/data_smart.py b/bitbake/lib/bb/data_smart.py
index 2328c334ac..8e7dd98384 100644
--- a/bitbake/lib/bb/data_smart.py
+++ b/bitbake/lib/bb/data_smart.py
@@ -16,8 +16,11 @@ BitBake build tools.
16# 16#
17# Based on functions from the base bb module, Copyright 2003 Holger Schurig 17# Based on functions from the base bb module, Copyright 2003 Holger Schurig
18 18
19import copy, re, sys, traceback 19import builtins
20from collections import MutableMapping 20import copy
21import re
22import sys
23from collections.abc import MutableMapping
21import logging 24import logging
22import hashlib 25import hashlib
23import bb, bb.codeparser 26import bb, bb.codeparser
@@ -26,13 +29,25 @@ from bb.COW import COWDictBase
26 29
27logger = logging.getLogger("BitBake.Data") 30logger = logging.getLogger("BitBake.Data")
28 31
29__setvar_keyword__ = ["_append", "_prepend", "_remove"] 32__setvar_keyword__ = [":append", ":prepend", ":remove"]
30__setvar_regexp__ = re.compile(r'(?P<base>.*?)(?P<keyword>_append|_prepend|_remove)(_(?P<add>[^A-Z]*))?$') 33__setvar_regexp__ = re.compile(r'(?P<base>.*?)(?P<keyword>:append|:prepend|:remove)(:(?P<add>[^A-Z]*))?$')
31__expand_var_regexp__ = re.compile(r"\${[a-zA-Z0-9\-_+./~]+?}") 34__expand_var_regexp__ = re.compile(r"\${[a-zA-Z0-9\-_+./~:]+}")
32__expand_python_regexp__ = re.compile(r"\${@.+?}") 35__expand_python_regexp__ = re.compile(r"\${@(?:{.*?}|.)+?}")
33__whitespace_split__ = re.compile(r'(\s)') 36__whitespace_split__ = re.compile(r'(\s)')
34__override_regexp__ = re.compile(r'[a-z0-9]+') 37__override_regexp__ = re.compile(r'[a-z0-9]+')
35 38
39bitbake_renamed_vars = {
40 "BB_ENV_WHITELIST": "BB_ENV_PASSTHROUGH",
41 "BB_ENV_EXTRAWHITE": "BB_ENV_PASSTHROUGH_ADDITIONS",
42 "BB_HASHBASE_WHITELIST": "BB_BASEHASH_IGNORE_VARS",
43 "BB_HASHCONFIG_WHITELIST": "BB_HASHCONFIG_IGNORE_VARS",
44 "BB_HASHTASK_WHITELIST": "BB_TASKHASH_IGNORE_TASKS",
45 "BB_SETSCENE_ENFORCE_WHITELIST": "BB_SETSCENE_ENFORCE_IGNORE_TASKS",
46 "MULTI_PROVIDER_WHITELIST": "BB_MULTI_PROVIDER_ALLOWED",
47 "BB_STAMP_WHITELIST": "is a deprecated variable and support has been removed",
48 "BB_STAMP_POLICY": "is a deprecated variable and support has been removed",
49}
50
36def infer_caller_details(loginfo, parent = False, varval = True): 51def infer_caller_details(loginfo, parent = False, varval = True):
37 """Save the caller the trouble of specifying everything.""" 52 """Save the caller the trouble of specifying everything."""
38 # Save effort. 53 # Save effort.
@@ -80,68 +95,79 @@ def infer_caller_details(loginfo, parent = False, varval = True):
80 loginfo['func'] = func 95 loginfo['func'] = func
81 96
82class VariableParse: 97class VariableParse:
83 def __init__(self, varname, d, val = None): 98 def __init__(self, varname, d, unexpanded_value = None, val = None):
84 self.varname = varname 99 self.varname = varname
85 self.d = d 100 self.d = d
86 self.value = val 101 self.value = val
102 self.unexpanded_value = unexpanded_value
87 103
88 self.references = set() 104 self.references = set()
89 self.execs = set() 105 self.execs = set()
90 self.contains = {} 106 self.contains = {}
91 107
92 def var_sub(self, match): 108 def var_sub(self, match):
93 key = match.group()[2:-1] 109 key = match.group()[2:-1]
94 if self.varname and key: 110 if self.varname and key:
95 if self.varname == key: 111 if self.varname == key:
96 raise Exception("variable %s references itself!" % self.varname) 112 raise Exception("variable %s references itself!" % self.varname)
97 var = self.d.getVarFlag(key, "_content") 113 var = self.d.getVarFlag(key, "_content")
98 self.references.add(key) 114 self.references.add(key)
99 if var is not None: 115 if var is not None:
100 return var 116 return var
101 else: 117 else:
102 return match.group() 118 return match.group()
103 119
104 def python_sub(self, match): 120 def python_sub(self, match):
105 if isinstance(match, str): 121 if isinstance(match, str):
106 code = match 122 code = match
107 else: 123 else:
108 code = match.group()[3:-1] 124 code = match.group()[3:-1]
109 125
110 if self.varname: 126 # Do not run code that contains one or more unexpanded variables
111 varname = 'Var <%s>' % self.varname 127 # instead return the code with the characters we removed put back
112 else: 128 if __expand_var_regexp__.findall(code):
113 varname = '<expansion>' 129 return "${@" + code + "}"
114 codeobj = compile(code.strip(), varname, "eval")
115
116 parser = bb.codeparser.PythonParser(self.varname, logger)
117 parser.parse_python(code)
118 if self.varname:
119 vardeps = self.d.getVarFlag(self.varname, "vardeps")
120 if vardeps is None:
121 parser.log.flush()
122 else:
123 parser.log.flush()
124 self.references |= parser.references
125 self.execs |= parser.execs
126 130
127 for k in parser.contains: 131 if self.varname:
128 if k not in self.contains: 132 varname = 'Var <%s>' % self.varname
129 self.contains[k] = parser.contains[k].copy() 133 else:
130 else: 134 varname = '<expansion>'
131 self.contains[k].update(parser.contains[k]) 135 codeobj = compile(code.strip(), varname, "eval")
132 value = utils.better_eval(codeobj, DataContext(self.d), {'d' : self.d}) 136
133 return str(value) 137 parser = bb.codeparser.PythonParser(self.varname, logger)
138 parser.parse_python(code)
139 if self.varname:
140 vardeps = self.d.getVarFlag(self.varname, "vardeps")
141 if vardeps is None:
142 parser.log.flush()
143 else:
144 parser.log.flush()
145 self.references |= parser.references
146 self.execs |= parser.execs
134 147
148 for k in parser.contains:
149 if k not in self.contains:
150 self.contains[k] = parser.contains[k].copy()
151 else:
152 self.contains[k].update(parser.contains[k])
153 value = utils.better_eval(codeobj, DataContext(self.d), {'d' : self.d})
154 return str(value)
135 155
136class DataContext(dict): 156class DataContext(dict):
157 excluded = set([i for i in dir(builtins) if not i.startswith('_')] + ['oe'])
158
137 def __init__(self, metadata, **kwargs): 159 def __init__(self, metadata, **kwargs):
138 self.metadata = metadata 160 self.metadata = metadata
139 dict.__init__(self, **kwargs) 161 dict.__init__(self, **kwargs)
140 self['d'] = metadata 162 self['d'] = metadata
163 self.context = set(bb.utils.get_context())
141 164
142 def __missing__(self, key): 165 def __missing__(self, key):
166 if key in self.excluded or key in self.context:
167 raise KeyError(key)
168
143 value = self.metadata.getVar(key) 169 value = self.metadata.getVar(key)
144 if value is None or self.metadata.getVarFlag(key, 'func', False): 170 if value is None:
145 raise KeyError(key) 171 raise KeyError(key)
146 else: 172 else:
147 return value 173 return value
@@ -151,6 +177,7 @@ class ExpansionError(Exception):
151 self.expression = expression 177 self.expression = expression
152 self.variablename = varname 178 self.variablename = varname
153 self.exception = exception 179 self.exception = exception
180 self.varlist = [varname or expression or ""]
154 if varname: 181 if varname:
155 if expression: 182 if expression:
156 self.msg = "Failure expanding variable %s, expression was %s which triggered exception %s: %s" % (varname, expression, type(exception).__name__, exception) 183 self.msg = "Failure expanding variable %s, expression was %s which triggered exception %s: %s" % (varname, expression, type(exception).__name__, exception)
@@ -160,8 +187,14 @@ class ExpansionError(Exception):
160 self.msg = "Failure expanding expression %s which triggered exception %s: %s" % (expression, type(exception).__name__, exception) 187 self.msg = "Failure expanding expression %s which triggered exception %s: %s" % (expression, type(exception).__name__, exception)
161 Exception.__init__(self, self.msg) 188 Exception.__init__(self, self.msg)
162 self.args = (varname, expression, exception) 189 self.args = (varname, expression, exception)
190
191 def addVar(self, varname):
192 if varname:
193 self.varlist.append(varname)
194
163 def __str__(self): 195 def __str__(self):
164 return self.msg 196 chain = "\nThe variable dependency chain for the failure is: " + " -> ".join(self.varlist)
197 return self.msg + chain
165 198
166class IncludeHistory(object): 199class IncludeHistory(object):
167 def __init__(self, parent = None, filename = '[TOP LEVEL]'): 200 def __init__(self, parent = None, filename = '[TOP LEVEL]'):
@@ -239,12 +272,9 @@ class VariableHistory(object):
239 return 272 return
240 if 'op' not in loginfo or not loginfo['op']: 273 if 'op' not in loginfo or not loginfo['op']:
241 loginfo['op'] = 'set' 274 loginfo['op'] = 'set'
242 if 'detail' in loginfo:
243 loginfo['detail'] = str(loginfo['detail'])
244 if 'variable' not in loginfo or 'file' not in loginfo: 275 if 'variable' not in loginfo or 'file' not in loginfo:
245 raise ValueError("record() missing variable or file.") 276 raise ValueError("record() missing variable or file.")
246 var = loginfo['variable'] 277 var = loginfo['variable']
247
248 if var not in self.variables: 278 if var not in self.variables:
249 self.variables[var] = [] 279 self.variables[var] = []
250 if not isinstance(self.variables[var], list): 280 if not isinstance(self.variables[var], list):
@@ -277,7 +307,7 @@ class VariableHistory(object):
277 for (r, override) in d.overridedata[var]: 307 for (r, override) in d.overridedata[var]:
278 for event in self.variable(r): 308 for event in self.variable(r):
279 loginfo = event.copy() 309 loginfo = event.copy()
280 if 'flag' in loginfo and not loginfo['flag'].startswith("_"): 310 if 'flag' in loginfo and not loginfo['flag'].startswith(("_", ":")):
281 continue 311 continue
282 loginfo['variable'] = var 312 loginfo['variable'] = var
283 loginfo['op'] = 'override[%s]:%s' % (override, loginfo['op']) 313 loginfo['op'] = 'override[%s]:%s' % (override, loginfo['op'])
@@ -303,7 +333,8 @@ class VariableHistory(object):
303 flag = '[%s] ' % (event['flag']) 333 flag = '[%s] ' % (event['flag'])
304 else: 334 else:
305 flag = '' 335 flag = ''
306 o.write("# %s %s:%s%s\n# %s\"%s\"\n" % (event['op'], event['file'], event['line'], display_func, flag, re.sub('\n', '\n# ', event['detail']))) 336 o.write("# %s %s:%s%s\n# %s\"%s\"\n" % \
337 (event['op'], event['file'], event['line'], display_func, flag, re.sub('\n', '\n# ', str(event['detail']))))
307 if len(history) > 1: 338 if len(history) > 1:
308 o.write("# pre-expansion value:\n") 339 o.write("# pre-expansion value:\n")
309 o.write('# "%s"\n' % (commentVal)) 340 o.write('# "%s"\n' % (commentVal))
@@ -329,6 +360,16 @@ class VariableHistory(object):
329 lines.append(line) 360 lines.append(line)
330 return lines 361 return lines
331 362
363 def get_variable_refs(self, var):
364 """Return a dict of file/line references"""
365 var_history = self.variable(var)
366 refs = {}
367 for event in var_history:
368 if event['file'] not in refs:
369 refs[event['file']] = []
370 refs[event['file']].append(event['line'])
371 return refs
372
332 def get_variable_items_files(self, var): 373 def get_variable_items_files(self, var):
333 """ 374 """
334 Use variable history to map items added to a list variable and 375 Use variable history to map items added to a list variable and
@@ -342,12 +383,12 @@ class VariableHistory(object):
342 for event in history: 383 for event in history:
343 if 'flag' in event: 384 if 'flag' in event:
344 continue 385 continue
345 if event['op'] == '_remove': 386 if event['op'] == ':remove':
346 continue 387 continue
347 if isset and event['op'] == 'set?': 388 if isset and event['op'] == 'set?':
348 continue 389 continue
349 isset = True 390 isset = True
350 items = d.expand(event['detail']).split() 391 items = d.expand(str(event['detail'])).split()
351 for item in items: 392 for item in items:
352 # This is a little crude but is belt-and-braces to avoid us 393 # This is a little crude but is belt-and-braces to avoid us
353 # having to handle every possible operation type specifically 394 # having to handle every possible operation type specifically
@@ -363,6 +404,23 @@ class VariableHistory(object):
363 else: 404 else:
364 self.variables[var] = [] 405 self.variables[var] = []
365 406
407def _print_rename_error(var, loginfo, renamedvars, fullvar=None):
408 info = ""
409 if "file" in loginfo:
410 info = " file: %s" % loginfo["file"]
411 if "line" in loginfo:
412 info += " line: %s" % loginfo["line"]
413 if fullvar and fullvar != var:
414 info += " referenced as: %s" % fullvar
415 if info:
416 info = " (%s)" % info.strip()
417 renameinfo = renamedvars[var]
418 if " " in renameinfo:
419 # A space signals a string to display instead of a rename
420 bb.erroronce('Variable %s %s%s' % (var, renameinfo, info))
421 else:
422 bb.erroronce('Variable %s has been renamed to %s%s' % (var, renameinfo, info))
423
366class DataSmart(MutableMapping): 424class DataSmart(MutableMapping):
367 def __init__(self): 425 def __init__(self):
368 self.dict = {} 426 self.dict = {}
@@ -370,6 +428,8 @@ class DataSmart(MutableMapping):
370 self.inchistory = IncludeHistory() 428 self.inchistory = IncludeHistory()
371 self.varhistory = VariableHistory(self) 429 self.varhistory = VariableHistory(self)
372 self._tracking = False 430 self._tracking = False
431 self._var_renames = {}
432 self._var_renames.update(bitbake_renamed_vars)
373 433
374 self.expand_cache = {} 434 self.expand_cache = {}
375 435
@@ -391,9 +451,9 @@ class DataSmart(MutableMapping):
391 def expandWithRefs(self, s, varname): 451 def expandWithRefs(self, s, varname):
392 452
393 if not isinstance(s, str): # sanity check 453 if not isinstance(s, str): # sanity check
394 return VariableParse(varname, self, s) 454 return VariableParse(varname, self, s, s)
395 455
396 varparse = VariableParse(varname, self) 456 varparse = VariableParse(varname, self, s)
397 457
398 while s.find('${') != -1: 458 while s.find('${') != -1:
399 olds = s 459 olds = s
@@ -403,14 +463,17 @@ class DataSmart(MutableMapping):
403 s = __expand_python_regexp__.sub(varparse.python_sub, s) 463 s = __expand_python_regexp__.sub(varparse.python_sub, s)
404 except SyntaxError as e: 464 except SyntaxError as e:
405 # Likely unmatched brackets, just don't expand the expression 465 # Likely unmatched brackets, just don't expand the expression
406 if e.msg != "EOL while scanning string literal": 466 if e.msg != "EOL while scanning string literal" and not e.msg.startswith("unterminated string literal"):
407 raise 467 raise
408 if s == olds: 468 if s == olds:
409 break 469 break
410 except ExpansionError: 470 except ExpansionError as e:
471 e.addVar(varname)
411 raise 472 raise
412 except bb.parse.SkipRecipe: 473 except bb.parse.SkipRecipe:
413 raise 474 raise
475 except bb.BBHandledException:
476 raise
414 except Exception as exc: 477 except Exception as exc:
415 tb = sys.exc_info()[2] 478 tb = sys.exc_info()[2]
416 raise ExpansionError(varname, s, exc).with_traceback(tb) from exc 479 raise ExpansionError(varname, s, exc).with_traceback(tb) from exc
@@ -422,24 +485,19 @@ class DataSmart(MutableMapping):
422 def expand(self, s, varname = None): 485 def expand(self, s, varname = None):
423 return self.expandWithRefs(s, varname).value 486 return self.expandWithRefs(s, varname).value
424 487
425 def finalize(self, parent = False):
426 return
427
428 def internal_finalize(self, parent = False):
429 """Performs final steps upon the datastore, including application of overrides"""
430 self.overrides = None
431
432 def need_overrides(self): 488 def need_overrides(self):
433 if self.overrides is not None: 489 if self.overrides is not None:
434 return 490 return
435 if self.inoverride: 491 if self.inoverride:
436 return 492 return
493 overrride_stack = []
437 for count in range(5): 494 for count in range(5):
438 self.inoverride = True 495 self.inoverride = True
439 # Can end up here recursively so setup dummy values 496 # Can end up here recursively so setup dummy values
440 self.overrides = [] 497 self.overrides = []
441 self.overridesset = set() 498 self.overridesset = set()
442 self.overrides = (self.getVar("OVERRIDES") or "").split(":") or [] 499 self.overrides = (self.getVar("OVERRIDES") or "").split(":") or []
500 overrride_stack.append(self.overrides)
443 self.overridesset = set(self.overrides) 501 self.overridesset = set(self.overrides)
444 self.inoverride = False 502 self.inoverride = False
445 self.expand_cache = {} 503 self.expand_cache = {}
@@ -449,7 +507,7 @@ class DataSmart(MutableMapping):
449 self.overrides = newoverrides 507 self.overrides = newoverrides
450 self.overridesset = set(self.overrides) 508 self.overridesset = set(self.overrides)
451 else: 509 else:
452 bb.fatal("Overrides could not be expanded into a stable state after 5 iterations, overrides must be being referenced by other overridden variables in some recursive fashion. Please provide your configuration to bitbake-devel so we can laugh, er, I mean try and understand how to make it work.") 510 bb.fatal("Overrides could not be expanded into a stable state after 5 iterations, overrides must be being referenced by other overridden variables in some recursive fashion. Please provide your configuration to bitbake-devel so we can laugh, er, I mean try and understand how to make it work. The list of failing override expansions: %s" % "\n".join(str(s) for s in overrride_stack))
453 511
454 def initVar(self, var): 512 def initVar(self, var):
455 self.expand_cache = {} 513 self.expand_cache = {}
@@ -460,27 +518,44 @@ class DataSmart(MutableMapping):
460 dest = self.dict 518 dest = self.dict
461 while dest: 519 while dest:
462 if var in dest: 520 if var in dest:
463 return dest[var], self.overridedata.get(var, None) 521 return dest[var]
464 522
465 if "_data" not in dest: 523 if "_data" not in dest:
466 break 524 break
467 dest = dest["_data"] 525 dest = dest["_data"]
468 return None, self.overridedata.get(var, None) 526 return None
469 527
470 def _makeShadowCopy(self, var): 528 def _makeShadowCopy(self, var):
471 if var in self.dict: 529 if var in self.dict:
472 return 530 return
473 531
474 local_var, _ = self._findVar(var) 532 local_var = self._findVar(var)
475 533
476 if local_var: 534 if local_var:
477 self.dict[var] = copy.copy(local_var) 535 self.dict[var] = copy.copy(local_var)
478 else: 536 else:
479 self.initVar(var) 537 self.initVar(var)
480 538
539 def hasOverrides(self, var):
540 return var in self.overridedata
481 541
482 def setVar(self, var, value, **loginfo): 542 def setVar(self, var, value, **loginfo):
483 #print("var=" + str(var) + " val=" + str(value)) 543 #print("var=" + str(var) + " val=" + str(value))
544
545 if not var.startswith("__anon_") and ("_append" in var or "_prepend" in var or "_remove" in var):
546 info = "%s" % var
547 if "file" in loginfo:
548 info += " file: %s" % loginfo["file"]
549 if "line" in loginfo:
550 info += " line: %s" % loginfo["line"]
551 bb.fatal("Variable %s contains an operation using the old override syntax. Please convert this layer/metadata before attempting to use with a newer bitbake." % info)
552
553 shortvar = var.split(":", 1)[0]
554 if shortvar in self._var_renames:
555 _print_rename_error(shortvar, loginfo, self._var_renames, fullvar=var)
556 # Mark that we have seen a renamed variable
557 self.setVar("_FAILPARSINGERRORHANDLED", True)
558
484 self.expand_cache = {} 559 self.expand_cache = {}
485 parsing=False 560 parsing=False
486 if 'parsing' in loginfo: 561 if 'parsing' in loginfo:
@@ -505,12 +580,10 @@ class DataSmart(MutableMapping):
505 else: 580 else:
506 loginfo['op'] = keyword 581 loginfo['op'] = keyword
507 self.varhistory.record(**loginfo) 582 self.varhistory.record(**loginfo)
508 # todo make sure keyword is not __doc__ or __module__
509 # pay the cookie monster 583 # pay the cookie monster
510 584
511 # more cookies for the cookie monster 585 # more cookies for the cookie monster
512 if '_' in var: 586 self._setvar_update_overrides(base, **loginfo)
513 self._setvar_update_overrides(base, **loginfo)
514 587
515 if base in self.overridevars: 588 if base in self.overridevars:
516 self._setvar_update_overridevars(var, value) 589 self._setvar_update_overridevars(var, value)
@@ -520,27 +593,27 @@ class DataSmart(MutableMapping):
520 self._makeShadowCopy(var) 593 self._makeShadowCopy(var)
521 594
522 if not parsing: 595 if not parsing:
523 if "_append" in self.dict[var]: 596 if ":append" in self.dict[var]:
524 del self.dict[var]["_append"] 597 del self.dict[var][":append"]
525 if "_prepend" in self.dict[var]: 598 if ":prepend" in self.dict[var]:
526 del self.dict[var]["_prepend"] 599 del self.dict[var][":prepend"]
527 if "_remove" in self.dict[var]: 600 if ":remove" in self.dict[var]:
528 del self.dict[var]["_remove"] 601 del self.dict[var][":remove"]
529 if var in self.overridedata: 602 if var in self.overridedata:
530 active = [] 603 active = []
531 self.need_overrides() 604 self.need_overrides()
532 for (r, o) in self.overridedata[var]: 605 for (r, o) in self.overridedata[var]:
533 if o in self.overridesset: 606 if o in self.overridesset:
534 active.append(r) 607 active.append(r)
535 elif "_" in o: 608 elif ":" in o:
536 if set(o.split("_")).issubset(self.overridesset): 609 if set(o.split(":")).issubset(self.overridesset):
537 active.append(r) 610 active.append(r)
538 for a in active: 611 for a in active:
539 self.delVar(a) 612 self.delVar(a)
540 del self.overridedata[var] 613 del self.overridedata[var]
541 614
542 # more cookies for the cookie monster 615 # more cookies for the cookie monster
543 if '_' in var: 616 if ':' in var:
544 self._setvar_update_overrides(var, **loginfo) 617 self._setvar_update_overrides(var, **loginfo)
545 618
546 # setting var 619 # setting var
@@ -562,12 +635,13 @@ class DataSmart(MutableMapping):
562 nextnew.update(vardata.references) 635 nextnew.update(vardata.references)
563 nextnew.update(vardata.contains.keys()) 636 nextnew.update(vardata.contains.keys())
564 new = nextnew 637 new = nextnew
565 self.internal_finalize(True) 638 self.overrides = None
639 self.expand_cache = {}
566 640
567 def _setvar_update_overrides(self, var, **loginfo): 641 def _setvar_update_overrides(self, var, **loginfo):
568 # aka pay the cookie monster 642 # aka pay the cookie monster
569 override = var[var.rfind('_')+1:] 643 override = var[var.rfind(':')+1:]
570 shortvar = var[:var.rfind('_')] 644 shortvar = var[:var.rfind(':')]
571 while override and __override_regexp__.match(override): 645 while override and __override_regexp__.match(override):
572 if shortvar not in self.overridedata: 646 if shortvar not in self.overridedata:
573 self.overridedata[shortvar] = [] 647 self.overridedata[shortvar] = []
@@ -576,9 +650,9 @@ class DataSmart(MutableMapping):
576 self.overridedata[shortvar] = list(self.overridedata[shortvar]) 650 self.overridedata[shortvar] = list(self.overridedata[shortvar])
577 self.overridedata[shortvar].append([var, override]) 651 self.overridedata[shortvar].append([var, override])
578 override = None 652 override = None
579 if "_" in shortvar: 653 if ":" in shortvar:
580 override = var[shortvar.rfind('_')+1:] 654 override = var[shortvar.rfind(':')+1:]
581 shortvar = var[:shortvar.rfind('_')] 655 shortvar = var[:shortvar.rfind(':')]
582 if len(shortvar) == 0: 656 if len(shortvar) == 0:
583 override = None 657 override = None
584 658
@@ -602,10 +676,11 @@ class DataSmart(MutableMapping):
602 self.varhistory.record(**loginfo) 676 self.varhistory.record(**loginfo)
603 self.setVar(newkey, val, ignore=True, parsing=True) 677 self.setVar(newkey, val, ignore=True, parsing=True)
604 678
605 for i in (__setvar_keyword__): 679 srcflags = self.getVarFlags(key, False, True) or {}
606 src = self.getVarFlag(key, i, False) 680 for i in srcflags:
607 if src is None: 681 if i not in (__setvar_keyword__):
608 continue 682 continue
683 src = srcflags[i]
609 684
610 dest = self.getVarFlag(newkey, i, False) or [] 685 dest = self.getVarFlag(newkey, i, False) or []
611 dest.extend(src) 686 dest.extend(src)
@@ -617,7 +692,7 @@ class DataSmart(MutableMapping):
617 self.overridedata[newkey].append([v.replace(key, newkey), o]) 692 self.overridedata[newkey].append([v.replace(key, newkey), o])
618 self.renameVar(v, v.replace(key, newkey)) 693 self.renameVar(v, v.replace(key, newkey))
619 694
620 if '_' in newkey and val is None: 695 if ':' in newkey and val is None:
621 self._setvar_update_overrides(newkey, **loginfo) 696 self._setvar_update_overrides(newkey, **loginfo)
622 697
623 loginfo['variable'] = key 698 loginfo['variable'] = key
@@ -629,12 +704,12 @@ class DataSmart(MutableMapping):
629 def appendVar(self, var, value, **loginfo): 704 def appendVar(self, var, value, **loginfo):
630 loginfo['op'] = 'append' 705 loginfo['op'] = 'append'
631 self.varhistory.record(**loginfo) 706 self.varhistory.record(**loginfo)
632 self.setVar(var + "_append", value, ignore=True, parsing=True) 707 self.setVar(var + ":append", value, ignore=True, parsing=True)
633 708
634 def prependVar(self, var, value, **loginfo): 709 def prependVar(self, var, value, **loginfo):
635 loginfo['op'] = 'prepend' 710 loginfo['op'] = 'prepend'
636 self.varhistory.record(**loginfo) 711 self.varhistory.record(**loginfo)
637 self.setVar(var + "_prepend", value, ignore=True, parsing=True) 712 self.setVar(var + ":prepend", value, ignore=True, parsing=True)
638 713
639 def delVar(self, var, **loginfo): 714 def delVar(self, var, **loginfo):
640 self.expand_cache = {} 715 self.expand_cache = {}
@@ -645,10 +720,10 @@ class DataSmart(MutableMapping):
645 self.dict[var] = {} 720 self.dict[var] = {}
646 if var in self.overridedata: 721 if var in self.overridedata:
647 del self.overridedata[var] 722 del self.overridedata[var]
648 if '_' in var: 723 if ':' in var:
649 override = var[var.rfind('_')+1:] 724 override = var[var.rfind(':')+1:]
650 shortvar = var[:var.rfind('_')] 725 shortvar = var[:var.rfind(':')]
651 while override and override.islower(): 726 while override and __override_regexp__.match(override):
652 try: 727 try:
653 if shortvar in self.overridedata: 728 if shortvar in self.overridedata:
654 # Force CoW by recreating the list first 729 # Force CoW by recreating the list first
@@ -657,15 +732,23 @@ class DataSmart(MutableMapping):
657 except ValueError as e: 732 except ValueError as e:
658 pass 733 pass
659 override = None 734 override = None
660 if "_" in shortvar: 735 if ":" in shortvar:
661 override = var[shortvar.rfind('_')+1:] 736 override = var[shortvar.rfind(':')+1:]
662 shortvar = var[:shortvar.rfind('_')] 737 shortvar = var[:shortvar.rfind(':')]
663 if len(shortvar) == 0: 738 if len(shortvar) == 0:
664 override = None 739 override = None
665 740
666 def setVarFlag(self, var, flag, value, **loginfo): 741 def setVarFlag(self, var, flag, value, **loginfo):
667 self.expand_cache = {} 742 self.expand_cache = {}
668 743
744 if var == "BB_RENAMED_VARIABLES":
745 self._var_renames[flag] = value
746
747 if var in self._var_renames:
748 _print_rename_error(var, loginfo, self._var_renames)
749 # Mark that we have seen a renamed variable
750 self.setVar("_FAILPARSINGERRORHANDLED", True)
751
669 if 'op' not in loginfo: 752 if 'op' not in loginfo:
670 loginfo['op'] = "set" 753 loginfo['op'] = "set"
671 loginfo['flag'] = flag 754 loginfo['flag'] = flag
@@ -674,7 +757,7 @@ class DataSmart(MutableMapping):
674 self._makeShadowCopy(var) 757 self._makeShadowCopy(var)
675 self.dict[var][flag] = value 758 self.dict[var][flag] = value
676 759
677 if flag == "_defaultval" and '_' in var: 760 if flag == "_defaultval" and ':' in var:
678 self._setvar_update_overrides(var, **loginfo) 761 self._setvar_update_overrides(var, **loginfo)
679 if flag == "_defaultval" and var in self.overridevars: 762 if flag == "_defaultval" and var in self.overridevars:
680 self._setvar_update_overridevars(var, value) 763 self._setvar_update_overridevars(var, value)
@@ -695,22 +778,27 @@ class DataSmart(MutableMapping):
695 return None 778 return None
696 cachename = var + "[" + flag + "]" 779 cachename = var + "[" + flag + "]"
697 780
781 if not expand and retparser and cachename in self.expand_cache:
782 return self.expand_cache[cachename].unexpanded_value, self.expand_cache[cachename]
783
698 if expand and cachename in self.expand_cache: 784 if expand and cachename in self.expand_cache:
699 return self.expand_cache[cachename].value 785 return self.expand_cache[cachename].value
700 786
701 local_var, overridedata = self._findVar(var) 787 local_var = self._findVar(var)
702 value = None 788 value = None
703 removes = set() 789 removes = set()
704 if flag == "_content" and overridedata is not None and not parsing: 790 if flag == "_content" and not parsing:
791 overridedata = self.overridedata.get(var, None)
792 if flag == "_content" and not parsing and overridedata is not None:
705 match = False 793 match = False
706 active = {} 794 active = {}
707 self.need_overrides() 795 self.need_overrides()
708 for (r, o) in overridedata: 796 for (r, o) in overridedata:
709 # What about double overrides both with "_" in the name? 797 # FIXME What about double overrides both with "_" in the name?
710 if o in self.overridesset: 798 if o in self.overridesset:
711 active[o] = r 799 active[o] = r
712 elif "_" in o: 800 elif ":" in o:
713 if set(o.split("_")).issubset(self.overridesset): 801 if set(o.split(":")).issubset(self.overridesset):
714 active[o] = r 802 active[o] = r
715 803
716 mod = True 804 mod = True
@@ -718,10 +806,10 @@ class DataSmart(MutableMapping):
718 mod = False 806 mod = False
719 for o in self.overrides: 807 for o in self.overrides:
720 for a in active.copy(): 808 for a in active.copy():
721 if a.endswith("_" + o): 809 if a.endswith(":" + o):
722 t = active[a] 810 t = active[a]
723 del active[a] 811 del active[a]
724 active[a.replace("_" + o, "")] = t 812 active[a.replace(":" + o, "")] = t
725 mod = True 813 mod = True
726 elif a == o: 814 elif a == o:
727 match = active[a] 815 match = active[a]
@@ -738,33 +826,35 @@ class DataSmart(MutableMapping):
738 value = copy.copy(local_var[flag]) 826 value = copy.copy(local_var[flag])
739 elif flag == "_content" and "_defaultval" in local_var and not noweakdefault: 827 elif flag == "_content" and "_defaultval" in local_var and not noweakdefault:
740 value = copy.copy(local_var["_defaultval"]) 828 value = copy.copy(local_var["_defaultval"])
829 elif "_defaultval_flag_"+flag in local_var and not noweakdefault:
830 value = copy.copy(local_var["_defaultval_flag_"+flag])
741 831
742 832
743 if flag == "_content" and local_var is not None and "_append" in local_var and not parsing: 833 if flag == "_content" and local_var is not None and ":append" in local_var and not parsing:
744 if not value:
745 value = ""
746 self.need_overrides() 834 self.need_overrides()
747 for (r, o) in local_var["_append"]: 835 for (r, o) in local_var[":append"]:
748 match = True 836 match = True
749 if o: 837 if o:
750 for o2 in o.split("_"): 838 for o2 in o.split(":"):
751 if not o2 in self.overrides: 839 if not o2 in self.overrides:
752 match = False 840 match = False
753 if match: 841 if match:
842 if value is None:
843 value = ""
754 value = value + r 844 value = value + r
755 845
756 if flag == "_content" and local_var is not None and "_prepend" in local_var and not parsing: 846 if flag == "_content" and local_var is not None and ":prepend" in local_var and not parsing:
757 if not value:
758 value = ""
759 self.need_overrides() 847 self.need_overrides()
760 for (r, o) in local_var["_prepend"]: 848 for (r, o) in local_var[":prepend"]:
761 849
762 match = True 850 match = True
763 if o: 851 if o:
764 for o2 in o.split("_"): 852 for o2 in o.split(":"):
765 if not o2 in self.overrides: 853 if not o2 in self.overrides:
766 match = False 854 match = False
767 if match: 855 if match:
856 if value is None:
857 value = ""
768 value = r + value 858 value = r + value
769 859
770 parser = None 860 parser = None
@@ -773,12 +863,12 @@ class DataSmart(MutableMapping):
773 if expand: 863 if expand:
774 value = parser.value 864 value = parser.value
775 865
776 if value and flag == "_content" and local_var is not None and "_remove" in local_var and not parsing: 866 if value and flag == "_content" and local_var is not None and ":remove" in local_var and not parsing:
777 self.need_overrides() 867 self.need_overrides()
778 for (r, o) in local_var["_remove"]: 868 for (r, o) in local_var[":remove"]:
779 match = True 869 match = True
780 if o: 870 if o:
781 for o2 in o.split("_"): 871 for o2 in o.split(":"):
782 if not o2 in self.overrides: 872 if not o2 in self.overrides:
783 match = False 873 match = False
784 if match: 874 if match:
@@ -791,7 +881,7 @@ class DataSmart(MutableMapping):
791 expanded_removes[r] = self.expand(r).split() 881 expanded_removes[r] = self.expand(r).split()
792 882
793 parser.removes = set() 883 parser.removes = set()
794 val = "" 884 val = []
795 for v in __whitespace_split__.split(parser.value): 885 for v in __whitespace_split__.split(parser.value):
796 skip = False 886 skip = False
797 for r in removes: 887 for r in removes:
@@ -800,8 +890,8 @@ class DataSmart(MutableMapping):
800 skip = True 890 skip = True
801 if skip: 891 if skip:
802 continue 892 continue
803 val = val + v 893 val.append(v)
804 parser.value = val 894 parser.value = "".join(val)
805 if expand: 895 if expand:
806 value = parser.value 896 value = parser.value
807 897
@@ -816,7 +906,7 @@ class DataSmart(MutableMapping):
816 def delVarFlag(self, var, flag, **loginfo): 906 def delVarFlag(self, var, flag, **loginfo):
817 self.expand_cache = {} 907 self.expand_cache = {}
818 908
819 local_var, _ = self._findVar(var) 909 local_var = self._findVar(var)
820 if not local_var: 910 if not local_var:
821 return 911 return
822 if not var in self.dict: 912 if not var in self.dict:
@@ -829,6 +919,8 @@ class DataSmart(MutableMapping):
829 self.varhistory.record(**loginfo) 919 self.varhistory.record(**loginfo)
830 920
831 del self.dict[var][flag] 921 del self.dict[var][flag]
922 if ("_defaultval_flag_" + flag) in self.dict[var]:
923 del self.dict[var]["_defaultval_flag_" + flag]
832 924
833 def appendVarFlag(self, var, flag, value, **loginfo): 925 def appendVarFlag(self, var, flag, value, **loginfo):
834 loginfo['op'] = 'append' 926 loginfo['op'] = 'append'
@@ -859,21 +951,26 @@ class DataSmart(MutableMapping):
859 self.dict[var][i] = flags[i] 951 self.dict[var][i] = flags[i]
860 952
861 def getVarFlags(self, var, expand = False, internalflags=False): 953 def getVarFlags(self, var, expand = False, internalflags=False):
862 local_var, _ = self._findVar(var) 954 local_var = self._findVar(var)
863 flags = {} 955 flags = {}
864 956
865 if local_var: 957 if local_var:
866 for i in local_var: 958 for i, val in local_var.items():
867 if i.startswith("_") and not internalflags: 959 if i.startswith("_defaultval_flag_") and not internalflags:
960 i = i[len("_defaultval_flag_"):]
961 if i not in local_var:
962 flags[i] = val
963 elif i.startswith(("_", ":")) and not internalflags:
868 continue 964 continue
869 flags[i] = local_var[i] 965 else:
966 flags[i] = val
967
870 if expand and i in expand: 968 if expand and i in expand:
871 flags[i] = self.expand(flags[i], var + "[" + i + "]") 969 flags[i] = self.expand(flags[i], var + "[" + i + "]")
872 if len(flags) == 0: 970 if len(flags) == 0:
873 return None 971 return None
874 return flags 972 return flags
875 973
876
877 def delVarFlags(self, var, **loginfo): 974 def delVarFlags(self, var, **loginfo):
878 self.expand_cache = {} 975 self.expand_cache = {}
879 if not var in self.dict: 976 if not var in self.dict:
@@ -905,6 +1002,7 @@ class DataSmart(MutableMapping):
905 data.inchistory = self.inchistory.copy() 1002 data.inchistory = self.inchistory.copy()
906 1003
907 data._tracking = self._tracking 1004 data._tracking = self._tracking
1005 data._var_renames = self._var_renames
908 1006
909 data.overrides = None 1007 data.overrides = None
910 data.overridevars = copy.copy(self.overridevars) 1008 data.overridevars = copy.copy(self.overridevars)
@@ -927,7 +1025,7 @@ class DataSmart(MutableMapping):
927 value = self.getVar(variable, False) 1025 value = self.getVar(variable, False)
928 for key in keys: 1026 for key in keys:
929 referrervalue = self.getVar(key, False) 1027 referrervalue = self.getVar(key, False)
930 if referrervalue and ref in referrervalue: 1028 if referrervalue and isinstance(referrervalue, str) and ref in referrervalue:
931 self.setVar(key, referrervalue.replace(ref, value)) 1029 self.setVar(key, referrervalue.replace(ref, value))
932 1030
933 def localkeys(self): 1031 def localkeys(self):
@@ -962,8 +1060,8 @@ class DataSmart(MutableMapping):
962 for (r, o) in self.overridedata[var]: 1060 for (r, o) in self.overridedata[var]:
963 if o in self.overridesset: 1061 if o in self.overridesset:
964 overrides.add(var) 1062 overrides.add(var)
965 elif "_" in o: 1063 elif ":" in o:
966 if set(o.split("_")).issubset(self.overridesset): 1064 if set(o.split(":")).issubset(self.overridesset):
967 overrides.add(var) 1065 overrides.add(var)
968 1066
969 for k in keylist(self.dict): 1067 for k in keylist(self.dict):
@@ -993,10 +1091,10 @@ class DataSmart(MutableMapping):
993 d = self.createCopy() 1091 d = self.createCopy()
994 bb.data.expandKeys(d) 1092 bb.data.expandKeys(d)
995 1093
996 config_whitelist = set((d.getVar("BB_HASHCONFIG_WHITELIST") or "").split()) 1094 config_ignore_vars = set((d.getVar("BB_HASHCONFIG_IGNORE_VARS") or "").split())
997 keys = set(key for key in iter(d) if not key.startswith("__")) 1095 keys = set(key for key in iter(d) if not key.startswith("__"))
998 for key in keys: 1096 for key in keys:
999 if key in config_whitelist: 1097 if key in config_ignore_vars:
1000 continue 1098 continue
1001 1099
1002 value = d.getVar(key, False) or "" 1100 value = d.getVar(key, False) or ""
@@ -1022,5 +1120,10 @@ class DataSmart(MutableMapping):
1022 value = d.getVar(i, False) or "" 1120 value = d.getVar(i, False) or ""
1023 data.update({i:value}) 1121 data.update({i:value})
1024 1122
1123 moddeps = bb.codeparser.modulecode_deps
1124 for dep in sorted(moddeps):
1125 # Ignore visitor code, sort sets
1126 data.update({'moddep[%s]' % dep : [sorted(moddeps[dep][0]), sorted(moddeps[dep][1]), sorted(moddeps[dep][2]), sorted(moddeps[dep][3]), moddeps[dep][4]]})
1127
1025 data_str = str([(k, data[k]) for k in sorted(data.keys())]) 1128 data_str = str([(k, data[k]) for k in sorted(data.keys())])
1026 return hashlib.sha256(data_str.encode("utf-8")).hexdigest() 1129 return hashlib.sha256(data_str.encode("utf-8")).hexdigest()
diff --git a/bitbake/lib/bb/event.py b/bitbake/lib/bb/event.py
index 23e1f3187b..b29f0a5568 100644
--- a/bitbake/lib/bb/event.py
+++ b/bitbake/lib/bb/event.py
@@ -19,7 +19,6 @@ import sys
19import threading 19import threading
20import traceback 20import traceback
21 21
22import bb.exceptions
23import bb.utils 22import bb.utils
24 23
25# This is the pid for which we should generate the event. This is set when 24# This is the pid for which we should generate the event. This is set when
@@ -40,7 +39,7 @@ class HeartbeatEvent(Event):
40 """Triggered at regular time intervals of 10 seconds. Other events can fire much more often 39 """Triggered at regular time intervals of 10 seconds. Other events can fire much more often
41 (runQueueTaskStarted when there are many short tasks) or not at all for long periods 40 (runQueueTaskStarted when there are many short tasks) or not at all for long periods
42 of time (again runQueueTaskStarted, when there is just one long-running task), so this 41 of time (again runQueueTaskStarted, when there is just one long-running task), so this
43 event is more suitable for doing some task-independent work occassionally.""" 42 event is more suitable for doing some task-independent work occasionally."""
44 def __init__(self, time): 43 def __init__(self, time):
45 Event.__init__(self) 44 Event.__init__(self)
46 self.time = time 45 self.time = time
@@ -68,29 +67,39 @@ _catchall_handlers = {}
68_eventfilter = None 67_eventfilter = None
69_uiready = False 68_uiready = False
70_thread_lock = threading.Lock() 69_thread_lock = threading.Lock()
71_thread_lock_enabled = False 70_heartbeat_enabled = False
72 71_should_exit = threading.Event()
73if hasattr(__builtins__, '__setitem__'):
74 builtins = __builtins__
75else:
76 builtins = __builtins__.__dict__
77 72
78def enable_threadlock(): 73def enable_threadlock():
79 global _thread_lock_enabled 74 # Always needed now
80 _thread_lock_enabled = True 75 return
81 76
82def disable_threadlock(): 77def disable_threadlock():
83 global _thread_lock_enabled 78 # Always needed now
84 _thread_lock_enabled = False 79 return
80
81def enable_heartbeat():
82 global _heartbeat_enabled
83 _heartbeat_enabled = True
84
85def disable_heartbeat():
86 global _heartbeat_enabled
87 _heartbeat_enabled = False
88
89#
90# In long running code, this function should be called periodically
91# to check if we should exit due to an interuption (.e.g Ctrl+C from the UI)
92#
93def check_for_interrupts(d):
94 global _should_exit
95 if _should_exit.is_set():
96 bb.warn("Exiting due to interrupt.")
97 raise bb.BBHandledException()
85 98
86def execute_handler(name, handler, event, d): 99def execute_handler(name, handler, event, d):
87 event.data = d 100 event.data = d
88 addedd = False
89 if 'd' not in builtins:
90 builtins['d'] = d
91 addedd = True
92 try: 101 try:
93 ret = handler(event) 102 ret = handler(event, d)
94 except (bb.parse.SkipRecipe, bb.BBHandledException): 103 except (bb.parse.SkipRecipe, bb.BBHandledException):
95 raise 104 raise
96 except Exception: 105 except Exception:
@@ -104,8 +113,7 @@ def execute_handler(name, handler, event, d):
104 raise 113 raise
105 finally: 114 finally:
106 del event.data 115 del event.data
107 if addedd: 116
108 del builtins['d']
109 117
110def fire_class_handlers(event, d): 118def fire_class_handlers(event, d):
111 if isinstance(event, logging.LogRecord): 119 if isinstance(event, logging.LogRecord):
@@ -118,7 +126,7 @@ def fire_class_handlers(event, d):
118 if _eventfilter: 126 if _eventfilter:
119 if not _eventfilter(name, handler, event, d): 127 if not _eventfilter(name, handler, event, d):
120 continue 128 continue
121 if d and not name in (d.getVar("__BBHANDLERS_MC") or []): 129 if d is not None and not name in (d.getVar("__BBHANDLERS_MC") or set()):
122 continue 130 continue
123 execute_handler(name, handler, event, d) 131 execute_handler(name, handler, event, d)
124 132
@@ -132,8 +140,14 @@ def print_ui_queue():
132 if not _uiready: 140 if not _uiready:
133 from bb.msg import BBLogFormatter 141 from bb.msg import BBLogFormatter
134 # Flush any existing buffered content 142 # Flush any existing buffered content
135 sys.stdout.flush() 143 try:
136 sys.stderr.flush() 144 sys.stdout.flush()
145 except:
146 pass
147 try:
148 sys.stderr.flush()
149 except:
150 pass
137 stdout = logging.StreamHandler(sys.stdout) 151 stdout = logging.StreamHandler(sys.stdout)
138 stderr = logging.StreamHandler(sys.stderr) 152 stderr = logging.StreamHandler(sys.stderr)
139 formatter = BBLogFormatter("%(levelname)s: %(message)s") 153 formatter = BBLogFormatter("%(levelname)s: %(message)s")
@@ -174,36 +188,38 @@ def print_ui_queue():
174 188
175def fire_ui_handlers(event, d): 189def fire_ui_handlers(event, d):
176 global _thread_lock 190 global _thread_lock
177 global _thread_lock_enabled
178 191
179 if not _uiready: 192 if not _uiready:
180 # No UI handlers registered yet, queue up the messages 193 # No UI handlers registered yet, queue up the messages
181 ui_queue.append(event) 194 ui_queue.append(event)
182 return 195 return
183 196
184 if _thread_lock_enabled: 197 with bb.utils.lock_timeout_nocheck(_thread_lock) as lock:
185 _thread_lock.acquire() 198 if not lock:
186 199 # If we can't get the lock, we may be recursively called, queue and return
187 errors = [] 200 ui_queue.append(event)
188 for h in _ui_handlers: 201 return
189 #print "Sending event %s" % event
190 try:
191 if not _ui_logfilters[h].filter(event):
192 continue
193 # We use pickle here since it better handles object instances
194 # which xmlrpc's marshaller does not. Events *must* be serializable
195 # by pickle.
196 if hasattr(_ui_handlers[h].event, "sendpickle"):
197 _ui_handlers[h].event.sendpickle((pickle.dumps(event)))
198 else:
199 _ui_handlers[h].event.send(event)
200 except:
201 errors.append(h)
202 for h in errors:
203 del _ui_handlers[h]
204 202
205 if _thread_lock_enabled: 203 errors = []
206 _thread_lock.release() 204 for h in _ui_handlers:
205 #print "Sending event %s" % event
206 try:
207 if not _ui_logfilters[h].filter(event):
208 continue
209 # We use pickle here since it better handles object instances
210 # which xmlrpc's marshaller does not. Events *must* be serializable
211 # by pickle.
212 if hasattr(_ui_handlers[h].event, "sendpickle"):
213 _ui_handlers[h].event.sendpickle((pickle.dumps(event)))
214 else:
215 _ui_handlers[h].event.send(event)
216 except:
217 errors.append(h)
218 for h in errors:
219 del _ui_handlers[h]
220
221 while ui_queue:
222 fire_ui_handlers(ui_queue.pop(), d)
207 223
208def fire(event, d): 224def fire(event, d):
209 """Fire off an Event""" 225 """Fire off an Event"""
@@ -232,26 +248,31 @@ noop = lambda _: None
232def register(name, handler, mask=None, filename=None, lineno=None, data=None): 248def register(name, handler, mask=None, filename=None, lineno=None, data=None):
233 """Register an Event handler""" 249 """Register an Event handler"""
234 250
235 if data and data.getVar("BB_CURRENT_MC"): 251 if data is not None and data.getVar("BB_CURRENT_MC"):
236 mc = data.getVar("BB_CURRENT_MC") 252 mc = data.getVar("BB_CURRENT_MC")
237 name = '%s%s' % (mc.replace('-', '_'), name) 253 name = '%s%s' % (mc.replace('-', '_'), name)
238 254
239 # already registered 255 # already registered
240 if name in _handlers: 256 if name in _handlers:
257 if data is not None:
258 bbhands_mc = (data.getVar("__BBHANDLERS_MC") or set())
259 bbhands_mc.add(name)
260 data.setVar("__BBHANDLERS_MC", bbhands_mc)
241 return AlreadyRegistered 261 return AlreadyRegistered
242 262
243 if handler is not None: 263 if handler is not None:
244 # handle string containing python code 264 # handle string containing python code
245 if isinstance(handler, str): 265 if isinstance(handler, str):
246 tmp = "def %s(e):\n%s" % (name, handler) 266 tmp = "def %s(e, d):\n%s" % (name, handler)
267 # Inject empty lines to make code match lineno in filename
268 if lineno is not None:
269 tmp = "\n" * (lineno-1) + tmp
247 try: 270 try:
248 code = bb.methodpool.compile_cache(tmp) 271 code = bb.methodpool.compile_cache(tmp)
249 if not code: 272 if not code:
250 if filename is None: 273 if filename is None:
251 filename = "%s(e)" % name 274 filename = "%s(e, d)" % name
252 code = compile(tmp, filename, "exec", ast.PyCF_ONLY_AST) 275 code = compile(tmp, filename, "exec", ast.PyCF_ONLY_AST)
253 if lineno is not None:
254 ast.increment_lineno(code, lineno-1)
255 code = compile(code, filename, "exec") 276 code = compile(code, filename, "exec")
256 bb.methodpool.compile_cache_add(tmp, code) 277 bb.methodpool.compile_cache_add(tmp, code)
257 except SyntaxError: 278 except SyntaxError:
@@ -274,16 +295,16 @@ def register(name, handler, mask=None, filename=None, lineno=None, data=None):
274 _event_handler_map[m] = {} 295 _event_handler_map[m] = {}
275 _event_handler_map[m][name] = True 296 _event_handler_map[m][name] = True
276 297
277 if data: 298 if data is not None:
278 bbhands_mc = (data.getVar("__BBHANDLERS_MC") or []) 299 bbhands_mc = (data.getVar("__BBHANDLERS_MC") or set())
279 bbhands_mc.append(name) 300 bbhands_mc.add(name)
280 data.setVar("__BBHANDLERS_MC", bbhands_mc) 301 data.setVar("__BBHANDLERS_MC", bbhands_mc)
281 302
282 return Registered 303 return Registered
283 304
284def remove(name, handler, data=None): 305def remove(name, handler, data=None):
285 """Remove an Event handler""" 306 """Remove an Event handler"""
286 if data: 307 if data is not None:
287 if data.getVar("BB_CURRENT_MC"): 308 if data.getVar("BB_CURRENT_MC"):
288 mc = data.getVar("BB_CURRENT_MC") 309 mc = data.getVar("BB_CURRENT_MC")
289 name = '%s%s' % (mc.replace('-', '_'), name) 310 name = '%s%s' % (mc.replace('-', '_'), name)
@@ -295,8 +316,8 @@ def remove(name, handler, data=None):
295 if name in _event_handler_map[event]: 316 if name in _event_handler_map[event]:
296 _event_handler_map[event].pop(name) 317 _event_handler_map[event].pop(name)
297 318
298 if data: 319 if data is not None:
299 bbhands_mc = (data.getVar("__BBHANDLERS_MC") or []) 320 bbhands_mc = (data.getVar("__BBHANDLERS_MC") or set())
300 if name in bbhands_mc: 321 if name in bbhands_mc:
301 bbhands_mc.remove(name) 322 bbhands_mc.remove(name)
302 data.setVar("__BBHANDLERS_MC", bbhands_mc) 323 data.setVar("__BBHANDLERS_MC", bbhands_mc)
@@ -313,21 +334,23 @@ def set_eventfilter(func):
313 _eventfilter = func 334 _eventfilter = func
314 335
315def register_UIHhandler(handler, mainui=False): 336def register_UIHhandler(handler, mainui=False):
316 bb.event._ui_handler_seq = bb.event._ui_handler_seq + 1 337 with bb.utils.lock_timeout(_thread_lock):
317 _ui_handlers[_ui_handler_seq] = handler 338 bb.event._ui_handler_seq = bb.event._ui_handler_seq + 1
318 level, debug_domains = bb.msg.constructLogOptions() 339 _ui_handlers[_ui_handler_seq] = handler
319 _ui_logfilters[_ui_handler_seq] = UIEventFilter(level, debug_domains) 340 level, debug_domains = bb.msg.constructLogOptions()
320 if mainui: 341 _ui_logfilters[_ui_handler_seq] = UIEventFilter(level, debug_domains)
321 global _uiready 342 if mainui:
322 _uiready = _ui_handler_seq 343 global _uiready
323 return _ui_handler_seq 344 _uiready = _ui_handler_seq
345 return _ui_handler_seq
324 346
325def unregister_UIHhandler(handlerNum, mainui=False): 347def unregister_UIHhandler(handlerNum, mainui=False):
326 if mainui: 348 if mainui:
327 global _uiready 349 global _uiready
328 _uiready = False 350 _uiready = False
329 if handlerNum in _ui_handlers: 351 with bb.utils.lock_timeout(_thread_lock):
330 del _ui_handlers[handlerNum] 352 if handlerNum in _ui_handlers:
353 del _ui_handlers[handlerNum]
331 return 354 return
332 355
333def get_uihandler(): 356def get_uihandler():
@@ -408,6 +431,16 @@ class RecipeEvent(Event):
408 self.fn = fn 431 self.fn = fn
409 Event.__init__(self) 432 Event.__init__(self)
410 433
434class RecipePreDeferredInherits(RecipeEvent):
435 """
436 Called before deferred inherits are processed so code can snoop on class extensions for example
437 Limitations: It won't see inherits of inherited classes and the data is unexpanded
438 """
439 def __init__(self, fn, inherits):
440 self.fn = fn
441 self.inherits = inherits
442 Event.__init__(self)
443
411class RecipePreFinalise(RecipeEvent): 444class RecipePreFinalise(RecipeEvent):
412 """ Recipe Parsing Complete but not yet finalised""" 445 """ Recipe Parsing Complete but not yet finalised"""
413 446
@@ -482,7 +515,7 @@ class BuildCompleted(BuildBase, OperationCompleted):
482 BuildBase.__init__(self, n, p, failures) 515 BuildBase.__init__(self, n, p, failures)
483 516
484class DiskFull(Event): 517class DiskFull(Event):
485 """Disk full case build aborted""" 518 """Disk full case build halted"""
486 def __init__(self, dev, type, freespace, mountpoint): 519 def __init__(self, dev, type, freespace, mountpoint):
487 Event.__init__(self) 520 Event.__init__(self)
488 self._dev = dev 521 self._dev = dev
@@ -666,6 +699,17 @@ class ReachableStamps(Event):
666 Event.__init__(self) 699 Event.__init__(self)
667 self.stamps = stamps 700 self.stamps = stamps
668 701
702class StaleSetSceneTasks(Event):
703 """
704 An event listing setscene tasks which are 'stale' and will
705 be rerun. The metadata may use to clean up stale data.
706 tasks is a mapping of tasks and matching stale stamps.
707 """
708
709 def __init__(self, tasks):
710 Event.__init__(self)
711 self.tasks = tasks
712
669class FilesMatchingFound(Event): 713class FilesMatchingFound(Event):
670 """ 714 """
671 Event when a list of files matching the supplied pattern has 715 Event when a list of files matching the supplied pattern has
@@ -732,13 +776,7 @@ class LogHandler(logging.Handler):
732 776
733 def emit(self, record): 777 def emit(self, record):
734 if record.exc_info: 778 if record.exc_info:
735 etype, value, tb = record.exc_info 779 record.bb_exc_formatted = traceback.format_exception(*record.exc_info)
736 if hasattr(tb, 'tb_next'):
737 tb = list(bb.exceptions.extract_traceback(tb, context=3))
738 # Need to turn the value into something the logging system can pickle
739 record.bb_exc_info = (etype, value, tb)
740 record.bb_exc_formatted = bb.exceptions.format_exception(etype, value, tb, limit=5)
741 value = str(value)
742 record.exc_info = None 780 record.exc_info = None
743 fire(record, None) 781 fire(record, None)
744 782
@@ -749,7 +787,7 @@ class LogHandler(logging.Handler):
749class MetadataEvent(Event): 787class MetadataEvent(Event):
750 """ 788 """
751 Generic event that target for OE-Core classes 789 Generic event that target for OE-Core classes
752 to report information during asynchrous execution 790 to report information during asynchronous execution
753 """ 791 """
754 def __init__(self, eventtype, eventdata): 792 def __init__(self, eventtype, eventdata):
755 Event.__init__(self) 793 Event.__init__(self)
@@ -830,3 +868,19 @@ class FindSigInfoResult(Event):
830 def __init__(self, result): 868 def __init__(self, result):
831 Event.__init__(self) 869 Event.__init__(self)
832 self.result = result 870 self.result = result
871
872class GetTaskSignatureResult(Event):
873 """
874 Event to return results from GetTaskSignatures command
875 """
876 def __init__(self, sig):
877 Event.__init__(self)
878 self.sig = sig
879
880class ParseError(Event):
881 """
882 Event to indicate parse failed
883 """
884 def __init__(self, msg):
885 super().__init__()
886 self._msg = msg
diff --git a/bitbake/lib/bb/exceptions.py b/bitbake/lib/bb/exceptions.py
deleted file mode 100644
index ecbad59970..0000000000
--- a/bitbake/lib/bb/exceptions.py
+++ /dev/null
@@ -1,94 +0,0 @@
1#
2# SPDX-License-Identifier: GPL-2.0-only
3#
4
5import inspect
6import traceback
7import bb.namedtuple_with_abc
8from collections import namedtuple
9
10
11class TracebackEntry(namedtuple.abc):
12 """Pickleable representation of a traceback entry"""
13 _fields = 'filename lineno function args code_context index'
14 _header = ' File "{0.filename}", line {0.lineno}, in {0.function}{0.args}'
15
16 def format(self, formatter=None):
17 if not self.code_context:
18 return self._header.format(self) + '\n'
19
20 formatted = [self._header.format(self) + ':\n']
21
22 for lineindex, line in enumerate(self.code_context):
23 if formatter:
24 line = formatter(line)
25
26 if lineindex == self.index:
27 formatted.append(' >%s' % line)
28 else:
29 formatted.append(' %s' % line)
30 return formatted
31
32 def __str__(self):
33 return ''.join(self.format())
34
35def _get_frame_args(frame):
36 """Get the formatted arguments and class (if available) for a frame"""
37 arginfo = inspect.getargvalues(frame)
38
39 try:
40 if not arginfo.args:
41 return '', None
42 # There have been reports from the field of python 2.6 which doesn't
43 # return a namedtuple here but simply a tuple so fallback gracefully if
44 # args isn't present.
45 except AttributeError:
46 return '', None
47
48 firstarg = arginfo.args[0]
49 if firstarg == 'self':
50 self = arginfo.locals['self']
51 cls = self.__class__.__name__
52
53 arginfo.args.pop(0)
54 del arginfo.locals['self']
55 else:
56 cls = None
57
58 formatted = inspect.formatargvalues(*arginfo)
59 return formatted, cls
60
61def extract_traceback(tb, context=1):
62 frames = inspect.getinnerframes(tb, context)
63 for frame, filename, lineno, function, code_context, index in frames:
64 formatted_args, cls = _get_frame_args(frame)
65 if cls:
66 function = '%s.%s' % (cls, function)
67 yield TracebackEntry(filename, lineno, function, formatted_args,
68 code_context, index)
69
70def format_extracted(extracted, formatter=None, limit=None):
71 if limit:
72 extracted = extracted[-limit:]
73
74 formatted = []
75 for tracebackinfo in extracted:
76 formatted.extend(tracebackinfo.format(formatter))
77 return formatted
78
79
80def format_exception(etype, value, tb, context=1, limit=None, formatter=None):
81 formatted = ['Traceback (most recent call last):\n']
82
83 if hasattr(tb, 'tb_next'):
84 tb = extract_traceback(tb, context)
85
86 formatted.extend(format_extracted(tb, formatter, limit))
87 formatted.extend(traceback.format_exception_only(etype, value))
88 return formatted
89
90def to_string(exc):
91 if isinstance(exc, SystemExit):
92 if not isinstance(exc.code, str):
93 return 'Exited with "%d"' % exc.code
94 return str(exc)
diff --git a/bitbake/lib/bb/fetch2/README b/bitbake/lib/bb/fetch2/README
new file mode 100644
index 0000000000..67b787ef47
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/README
@@ -0,0 +1,57 @@
1There are expectations of users of the fetcher code. This file attempts to document
2some of the constraints that are present. Some are obvious, some are less so. It is
3documented in the context of how OE uses it but the API calls are generic.
4
5a) network access for sources is only expected to happen in the do_fetch task.
6 This is not enforced or tested but is required so that we can:
7
8 i) audit the sources used (i.e. for license/manifest reasons)
9 ii) support offline builds with a suitable cache
10 iii) allow work to continue even with downtime upstream
11 iv) allow for changes upstream in incompatible ways
12 v) allow rebuilding of the software in X years time
13
14b) network access is not expected in do_unpack task.
15
16c) you can take DL_DIR and use it as a mirror for offline builds.
17
18d) access to the network is only made when explicitly configured in recipes
19 (e.g. use of AUTOREV, or use of git tags which change revision).
20
21e) fetcher output is deterministic (i.e. if you fetch configuration XXX now it
22 will match in future exactly in a clean build with a new DL_DIR).
23 One specific pain point example are git tags. They can be replaced and change
24 so the git fetcher has to resolve them with the network. We use git revisions
25 where possible to avoid this and ensure determinism.
26
27f) network access is expected to work with the standard linux proxy variables
28 so that access behind firewalls works (the fetcher sets these in the
29 environment but only in the do_fetch tasks).
30
31g) access during parsing has to be minimal, a "git ls-remote" for an AUTOREV
32 git recipe might be ok but you can't expect to checkout a git tree.
33
34h) we need to provide revision information during parsing such that a version
35 for the recipe can be constructed.
36
37i) versions are expected to be able to increase in a way which sorts allowing
38 package feeds to operate (see PR server required for git revisions to sort).
39
40j) API to query for possible version upgrades of a url is highly desireable to
41 allow our automated upgrage code to function (it is implied this does always
42 have network access).
43
44k) Where fixes or changes to behaviour in the fetcher are made, we ask that
45 test cases are added (run with "bitbake-selftest bb.tests.fetch"). We do
46 have fairly extensive test coverage of the fetcher as it is the only way
47 to track all of its corner cases, it still doesn't give entire coverage
48 though sadly.
49
50l) If using tools during parse time, they will have to be in ASSUME_PROVIDED
51 in OE's context as we can't build git-native, then parse a recipe and use
52 git ls-remote.
53
54Not all fetchers support all features, autorev is optional and doesn't make
55sense for some. Upgrade detection means different things in different contexts
56too.
57
diff --git a/bitbake/lib/bb/fetch2/__init__.py b/bitbake/lib/bb/fetch2/__init__.py
index 19169d780f..0ad987c596 100644
--- a/bitbake/lib/bb/fetch2/__init__.py
+++ b/bitbake/lib/bb/fetch2/__init__.py
@@ -23,17 +23,18 @@ import collections
23import subprocess 23import subprocess
24import pickle 24import pickle
25import errno 25import errno
26import bb.persist_data, bb.utils 26import bb.utils
27import bb.checksum 27import bb.checksum
28import bb.process 28import bb.process
29import bb.event 29import bb.event
30 30
31__version__ = "2" 31__version__ = "2"
32_checksum_cache = bb.checksum.FileChecksumCache() 32_checksum_cache = bb.checksum.FileChecksumCache()
33_revisions_cache = bb.checksum.RevisionsCache()
33 34
34logger = logging.getLogger("BitBake.Fetcher") 35logger = logging.getLogger("BitBake.Fetcher")
35 36
36CHECKSUM_LIST = [ "md5", "sha256", "sha1", "sha384", "sha512" ] 37CHECKSUM_LIST = [ "goh1", "md5", "sha256", "sha1", "sha384", "sha512" ]
37SHOWN_CHECKSUM_LIST = ["sha256"] 38SHOWN_CHECKSUM_LIST = ["sha256"]
38 39
39class BBFetchException(Exception): 40class BBFetchException(Exception):
@@ -113,7 +114,7 @@ class MissingParameterError(BBFetchException):
113 self.args = (missing, url) 114 self.args = (missing, url)
114 115
115class ParameterError(BBFetchException): 116class ParameterError(BBFetchException):
116 """Exception raised when a url cannot be proccessed due to invalid parameters.""" 117 """Exception raised when a url cannot be processed due to invalid parameters."""
117 def __init__(self, message, url): 118 def __init__(self, message, url):
118 msg = "URL: '%s' has invalid parameters. %s" % (url, message) 119 msg = "URL: '%s' has invalid parameters. %s" % (url, message)
119 self.url = url 120 self.url = url
@@ -182,7 +183,7 @@ class URI(object):
182 Some notes about relative URIs: while it's specified that 183 Some notes about relative URIs: while it's specified that
183 a URI beginning with <scheme>:// should either be directly 184 a URI beginning with <scheme>:// should either be directly
184 followed by a hostname or a /, the old URI handling of the 185 followed by a hostname or a /, the old URI handling of the
185 fetch2 library did not comform to this. Therefore, this URI 186 fetch2 library did not conform to this. Therefore, this URI
186 class has some kludges to make sure that URIs are parsed in 187 class has some kludges to make sure that URIs are parsed in
187 a way comforming to bitbake's current usage. This URI class 188 a way comforming to bitbake's current usage. This URI class
188 supports the following: 189 supports the following:
@@ -199,7 +200,7 @@ class URI(object):
199 file://hostname/absolute/path.diff (would be IETF compliant) 200 file://hostname/absolute/path.diff (would be IETF compliant)
200 201
201 Note that the last case only applies to a list of 202 Note that the last case only applies to a list of
202 "whitelisted" schemes (currently only file://), that requires 203 explicitly allowed schemes (currently only file://), that requires
203 its URIs to not have a network location. 204 its URIs to not have a network location.
204 """ 205 """
205 206
@@ -237,7 +238,7 @@ class URI(object):
237 # to RFC compliant URL format. E.g.: 238 # to RFC compliant URL format. E.g.:
238 # file://foo.diff -> file:foo.diff 239 # file://foo.diff -> file:foo.diff
239 if urlp.scheme in self._netloc_forbidden: 240 if urlp.scheme in self._netloc_forbidden:
240 uri = re.sub("(?<=:)//(?!/)", "", uri, 1) 241 uri = re.sub(r"(?<=:)//(?!/)", "", uri, count=1)
241 reparse = 1 242 reparse = 1
242 243
243 if reparse: 244 if reparse:
@@ -290,12 +291,12 @@ class URI(object):
290 291
291 def _param_str_split(self, string, elmdelim, kvdelim="="): 292 def _param_str_split(self, string, elmdelim, kvdelim="="):
292 ret = collections.OrderedDict() 293 ret = collections.OrderedDict()
293 for k, v in [x.split(kvdelim, 1) for x in string.split(elmdelim) if x]: 294 for k, v in [x.split(kvdelim, 1) if kvdelim in x else (x, None) for x in string.split(elmdelim) if x]:
294 ret[k] = v 295 ret[k] = v
295 return ret 296 return ret
296 297
297 def _param_str_join(self, dict_, elmdelim, kvdelim="="): 298 def _param_str_join(self, dict_, elmdelim, kvdelim="="):
298 return elmdelim.join([kvdelim.join([k, v]) for k, v in dict_.items()]) 299 return elmdelim.join([kvdelim.join([k, v]) if v else k for k, v in dict_.items()])
299 300
300 @property 301 @property
301 def hostport(self): 302 def hostport(self):
@@ -352,6 +353,14 @@ def decodeurl(url):
352 user, password, parameters). 353 user, password, parameters).
353 """ 354 """
354 355
356 uri = URI(url)
357 path = uri.path if uri.path else "/"
358 return uri.scheme, uri.hostport, path, uri.username, uri.password, uri.params
359
360def decodemirrorurl(url):
361 """Decodes a mirror URL into the tokens (scheme, network location, path,
362 user, password, parameters).
363 """
355 m = re.compile('(?P<type>[^:]*)://((?P<user>[^/;]+)@)?(?P<location>[^;]+)(;(?P<parm>.*))?').match(url) 364 m = re.compile('(?P<type>[^:]*)://((?P<user>[^/;]+)@)?(?P<location>[^;]+)(;(?P<parm>.*))?').match(url)
356 if not m: 365 if not m:
357 raise MalformedUrl(url) 366 raise MalformedUrl(url)
@@ -370,6 +379,9 @@ def decodeurl(url):
370 elif type.lower() == 'file': 379 elif type.lower() == 'file':
371 host = "" 380 host = ""
372 path = location 381 path = location
382 if user:
383 path = user + '@' + path
384 user = ""
373 else: 385 else:
374 host = location 386 host = location
375 path = "/" 387 path = "/"
@@ -388,7 +400,7 @@ def decodeurl(url):
388 if s: 400 if s:
389 if not '=' in s: 401 if not '=' in s:
390 raise MalformedUrl(url, "The URL: '%s' is invalid: parameter %s does not specify a value (missing '=')" % (url, s)) 402 raise MalformedUrl(url, "The URL: '%s' is invalid: parameter %s does not specify a value (missing '=')" % (url, s))
391 s1, s2 = s.split('=') 403 s1, s2 = s.split('=', 1)
392 p[s1] = s2 404 p[s1] = s2
393 405
394 return type, host, urllib.parse.unquote(path), user, pswd, p 406 return type, host, urllib.parse.unquote(path), user, pswd, p
@@ -402,34 +414,37 @@ def encodeurl(decoded):
402 414
403 if not type: 415 if not type:
404 raise MissingParameterError('type', "encoded from the data %s" % str(decoded)) 416 raise MissingParameterError('type', "encoded from the data %s" % str(decoded))
405 url = '%s://' % type 417 uri = URI()
418 uri.scheme = type
406 if user and type != "file": 419 if user and type != "file":
407 url += "%s" % user 420 uri.username = user
408 if pswd: 421 if pswd:
409 url += ":%s" % pswd 422 uri.password = pswd
410 url += "@"
411 if host and type != "file": 423 if host and type != "file":
412 url += "%s" % host 424 uri.hostname = host
413 if path: 425 if path:
414 # Standardise path to ensure comparisons work 426 # Standardise path to ensure comparisons work
415 while '//' in path: 427 while '//' in path:
416 path = path.replace("//", "/") 428 path = path.replace("//", "/")
417 url += "%s" % urllib.parse.quote(path) 429 uri.path = path
430 if type == "file":
431 # Use old not IETF compliant style
432 uri.relative = False
418 if p: 433 if p:
419 for parm in p: 434 uri.params = p
420 url += ";%s=%s" % (parm, p[parm])
421 435
422 return url 436 return str(uri)
423 437
424def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None): 438def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None):
425 if not ud.url or not uri_find or not uri_replace: 439 if not ud.url or not uri_find or not uri_replace:
426 logger.error("uri_replace: passed an undefined value, not replacing") 440 logger.error("uri_replace: passed an undefined value, not replacing")
427 return None 441 return None
428 uri_decoded = list(decodeurl(ud.url)) 442 uri_decoded = list(decodemirrorurl(ud.url))
429 uri_find_decoded = list(decodeurl(uri_find)) 443 uri_find_decoded = list(decodemirrorurl(uri_find))
430 uri_replace_decoded = list(decodeurl(uri_replace)) 444 uri_replace_decoded = list(decodemirrorurl(uri_replace))
431 logger.debug2("For url %s comparing %s to %s" % (uri_decoded, uri_find_decoded, uri_replace_decoded)) 445 logger.debug2("For url %s comparing %s to %s" % (uri_decoded, uri_find_decoded, uri_replace_decoded))
432 result_decoded = ['', '', '', '', '', {}] 446 result_decoded = ['', '', '', '', '', {}]
447 # 0 - type, 1 - host, 2 - path, 3 - user, 4- pswd, 5 - params
433 for loc, i in enumerate(uri_find_decoded): 448 for loc, i in enumerate(uri_find_decoded):
434 result_decoded[loc] = uri_decoded[loc] 449 result_decoded[loc] = uri_decoded[loc]
435 regexp = i 450 regexp = i
@@ -449,6 +464,9 @@ def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None):
449 for l in replacements: 464 for l in replacements:
450 uri_replace_decoded[loc][k] = uri_replace_decoded[loc][k].replace(l, replacements[l]) 465 uri_replace_decoded[loc][k] = uri_replace_decoded[loc][k].replace(l, replacements[l])
451 result_decoded[loc][k] = uri_replace_decoded[loc][k] 466 result_decoded[loc][k] = uri_replace_decoded[loc][k]
467 elif (loc == 3 or loc == 4) and uri_replace_decoded[loc]:
468 # User/password in the replacement is just a straight replacement
469 result_decoded[loc] = uri_replace_decoded[loc]
452 elif (re.match(regexp, uri_decoded[loc])): 470 elif (re.match(regexp, uri_decoded[loc])):
453 if not uri_replace_decoded[loc]: 471 if not uri_replace_decoded[loc]:
454 result_decoded[loc] = "" 472 result_decoded[loc] = ""
@@ -456,7 +474,7 @@ def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None):
456 for k in replacements: 474 for k in replacements:
457 uri_replace_decoded[loc] = uri_replace_decoded[loc].replace(k, replacements[k]) 475 uri_replace_decoded[loc] = uri_replace_decoded[loc].replace(k, replacements[k])
458 #bb.note("%s %s %s" % (regexp, uri_replace_decoded[loc], uri_decoded[loc])) 476 #bb.note("%s %s %s" % (regexp, uri_replace_decoded[loc], uri_decoded[loc]))
459 result_decoded[loc] = re.sub(regexp, uri_replace_decoded[loc], uri_decoded[loc], 1) 477 result_decoded[loc] = re.sub(regexp, uri_replace_decoded[loc], uri_decoded[loc], count=1)
460 if loc == 2: 478 if loc == 2:
461 # Handle path manipulations 479 # Handle path manipulations
462 basename = None 480 basename = None
@@ -465,10 +483,18 @@ def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None):
465 basename = os.path.basename(mirrortarball) 483 basename = os.path.basename(mirrortarball)
466 # Kill parameters, they make no sense for mirror tarballs 484 # Kill parameters, they make no sense for mirror tarballs
467 uri_decoded[5] = {} 485 uri_decoded[5] = {}
486 uri_find_decoded[5] = {}
468 elif ud.localpath and ud.method.supports_checksum(ud): 487 elif ud.localpath and ud.method.supports_checksum(ud):
469 basename = os.path.basename(ud.localpath) 488 basename = os.path.basename(ud.localpath)
470 if basename and not result_decoded[loc].endswith(basename): 489 if basename:
471 result_decoded[loc] = os.path.join(result_decoded[loc], basename) 490 uri_basename = os.path.basename(uri_decoded[loc])
491 # Prefix with a slash as a sentinel in case
492 # result_decoded[loc] does not contain one.
493 path = "/" + result_decoded[loc]
494 if uri_basename and basename != uri_basename and path.endswith("/" + uri_basename):
495 result_decoded[loc] = path[1:-len(uri_basename)] + basename
496 elif not path.endswith("/" + basename):
497 result_decoded[loc] = os.path.join(path[1:], basename)
472 else: 498 else:
473 return None 499 return None
474 result = encodeurl(result_decoded) 500 result = encodeurl(result_decoded)
@@ -481,18 +507,23 @@ methods = []
481urldata_cache = {} 507urldata_cache = {}
482saved_headrevs = {} 508saved_headrevs = {}
483 509
484def fetcher_init(d): 510def fetcher_init(d, servercontext=True):
485 """ 511 """
486 Called to initialize the fetchers once the configuration data is known. 512 Called to initialize the fetchers once the configuration data is known.
487 Calls before this must not hit the cache. 513 Calls before this must not hit the cache.
488 """ 514 """
489 515
490 revs = bb.persist_data.persist('BB_URI_HEADREVS', d) 516 _checksum_cache.init_cache(d.getVar("BB_CACHEDIR"))
517 _revisions_cache.init_cache(d.getVar("BB_CACHEDIR"))
518
519 if not servercontext:
520 return
521
491 try: 522 try:
492 # fetcher_init is called multiple times, so make sure we only save the 523 # fetcher_init is called multiple times, so make sure we only save the
493 # revs the first time it is called. 524 # revs the first time it is called.
494 if not bb.fetch2.saved_headrevs: 525 if not bb.fetch2.saved_headrevs:
495 bb.fetch2.saved_headrevs = dict(revs) 526 bb.fetch2.saved_headrevs = _revisions_cache.get_revs()
496 except: 527 except:
497 pass 528 pass
498 529
@@ -502,11 +533,10 @@ def fetcher_init(d):
502 logger.debug("Keeping SRCREV cache due to cache policy of: %s", srcrev_policy) 533 logger.debug("Keeping SRCREV cache due to cache policy of: %s", srcrev_policy)
503 elif srcrev_policy == "clear": 534 elif srcrev_policy == "clear":
504 logger.debug("Clearing SRCREV cache due to cache policy of: %s", srcrev_policy) 535 logger.debug("Clearing SRCREV cache due to cache policy of: %s", srcrev_policy)
505 revs.clear() 536 _revisions_cache.clear_cache()
506 else: 537 else:
507 raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy) 538 raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy)
508 539
509 _checksum_cache.init_cache(d)
510 540
511 for m in methods: 541 for m in methods:
512 if hasattr(m, "init"): 542 if hasattr(m, "init"):
@@ -514,9 +544,11 @@ def fetcher_init(d):
514 544
515def fetcher_parse_save(): 545def fetcher_parse_save():
516 _checksum_cache.save_extras() 546 _checksum_cache.save_extras()
547 _revisions_cache.save_extras()
517 548
518def fetcher_parse_done(): 549def fetcher_parse_done():
519 _checksum_cache.save_merge() 550 _checksum_cache.save_merge()
551 _revisions_cache.save_merge()
520 552
521def fetcher_compare_revisions(d): 553def fetcher_compare_revisions(d):
522 """ 554 """
@@ -524,7 +556,7 @@ def fetcher_compare_revisions(d):
524 when bitbake was started and return true if they have changed. 556 when bitbake was started and return true if they have changed.
525 """ 557 """
526 558
527 headrevs = dict(bb.persist_data.persist('BB_URI_HEADREVS', d)) 559 headrevs = _revisions_cache.get_revs()
528 return headrevs != bb.fetch2.saved_headrevs 560 return headrevs != bb.fetch2.saved_headrevs
529 561
530def mirror_from_string(data): 562def mirror_from_string(data):
@@ -534,7 +566,7 @@ def mirror_from_string(data):
534 bb.warn('Invalid mirror data %s, should have paired members.' % data) 566 bb.warn('Invalid mirror data %s, should have paired members.' % data)
535 return list(zip(*[iter(mirrors)]*2)) 567 return list(zip(*[iter(mirrors)]*2))
536 568
537def verify_checksum(ud, d, precomputed={}): 569def verify_checksum(ud, d, precomputed={}, localpath=None, fatal_nochecksum=True):
538 """ 570 """
539 verify the MD5 and SHA256 checksum for downloaded src 571 verify the MD5 and SHA256 checksum for downloaded src
540 572
@@ -548,20 +580,25 @@ def verify_checksum(ud, d, precomputed={}):
548 file against those in the recipe each time, rather than only after 580 file against those in the recipe each time, rather than only after
549 downloading. See https://bugzilla.yoctoproject.org/show_bug.cgi?id=5571. 581 downloading. See https://bugzilla.yoctoproject.org/show_bug.cgi?id=5571.
550 """ 582 """
551
552 if ud.ignore_checksums or not ud.method.supports_checksum(ud): 583 if ud.ignore_checksums or not ud.method.supports_checksum(ud):
553 return {} 584 return {}
554 585
586 if localpath is None:
587 localpath = ud.localpath
588
555 def compute_checksum_info(checksum_id): 589 def compute_checksum_info(checksum_id):
556 checksum_name = getattr(ud, "%s_name" % checksum_id) 590 checksum_name = getattr(ud, "%s_name" % checksum_id)
557 591
558 if checksum_id in precomputed: 592 if checksum_id in precomputed:
559 checksum_data = precomputed[checksum_id] 593 checksum_data = precomputed[checksum_id]
560 else: 594 else:
561 checksum_data = getattr(bb.utils, "%s_file" % checksum_id)(ud.localpath) 595 checksum_data = getattr(bb.utils, "%s_file" % checksum_id)(localpath)
562 596
563 checksum_expected = getattr(ud, "%s_expected" % checksum_id) 597 checksum_expected = getattr(ud, "%s_expected" % checksum_id)
564 598
599 if checksum_expected == '':
600 checksum_expected = None
601
565 return { 602 return {
566 "id": checksum_id, 603 "id": checksum_id,
567 "name": checksum_name, 604 "name": checksum_name,
@@ -581,17 +618,13 @@ def verify_checksum(ud, d, precomputed={}):
581 checksum_lines = ["SRC_URI[%s] = \"%s\"" % (ci["name"], ci["data"])] 618 checksum_lines = ["SRC_URI[%s] = \"%s\"" % (ci["name"], ci["data"])]
582 619
583 # If no checksum has been provided 620 # If no checksum has been provided
584 if ud.method.recommends_checksum(ud) and all(ci["expected"] is None for ci in checksum_infos): 621 if fatal_nochecksum and ud.method.recommends_checksum(ud) and all(ci["expected"] is None for ci in checksum_infos):
585 messages = [] 622 messages = []
586 strict = d.getVar("BB_STRICT_CHECKSUM") or "0" 623 strict = d.getVar("BB_STRICT_CHECKSUM") or "0"
587 624
588 # If strict checking enabled and neither sum defined, raise error 625 # If strict checking enabled and neither sum defined, raise error
589 if strict == "1": 626 if strict == "1":
590 messages.append("No checksum specified for '%s', please add at " \ 627 raise NoChecksumError("\n".join(checksum_lines))
591 "least one to the recipe:" % ud.localpath)
592 messages.extend(checksum_lines)
593 logger.error("\n".join(messages))
594 raise NoChecksumError("Missing SRC_URI checksum", ud.url)
595 628
596 bb.event.fire(MissingChecksumEvent(ud.url, **checksum_event), d) 629 bb.event.fire(MissingChecksumEvent(ud.url, **checksum_event), d)
597 630
@@ -612,8 +645,8 @@ def verify_checksum(ud, d, precomputed={}):
612 645
613 for ci in checksum_infos: 646 for ci in checksum_infos:
614 if ci["expected"] and ci["expected"] != ci["data"]: 647 if ci["expected"] and ci["expected"] != ci["data"]:
615 messages.append("File: '%s' has %s checksum %s when %s was " \ 648 messages.append("File: '%s' has %s checksum '%s' when '%s' was " \
616 "expected" % (ud.localpath, ci["id"], ci["data"], ci["expected"])) 649 "expected" % (localpath, ci["id"], ci["data"], ci["expected"]))
617 bad_checksum = ci["data"] 650 bad_checksum = ci["data"]
618 651
619 if bad_checksum: 652 if bad_checksum:
@@ -731,13 +764,16 @@ def subprocess_setup():
731 # SIGPIPE errors are known issues with gzip/bash 764 # SIGPIPE errors are known issues with gzip/bash
732 signal.signal(signal.SIGPIPE, signal.SIG_DFL) 765 signal.signal(signal.SIGPIPE, signal.SIG_DFL)
733 766
734def get_autorev(d): 767def mark_recipe_nocache(d):
735 # only not cache src rev in autorev case
736 if d.getVar('BB_SRCREV_POLICY') != "cache": 768 if d.getVar('BB_SRCREV_POLICY') != "cache":
737 d.setVar('BB_DONT_CACHE', '1') 769 d.setVar('BB_DONT_CACHE', '1')
770
771def get_autorev(d):
772 mark_recipe_nocache(d)
773 d.setVar("__BBAUTOREV_SEEN", True)
738 return "AUTOINC" 774 return "AUTOINC"
739 775
740def get_srcrev(d, method_name='sortable_revision'): 776def _get_srcrev(d, method_name='sortable_revision'):
741 """ 777 """
742 Return the revision string, usually for use in the version string (PV) of the current package 778 Return the revision string, usually for use in the version string (PV) of the current package
743 Most packages usually only have one SCM so we just pass on the call. 779 Most packages usually only have one SCM so we just pass on the call.
@@ -751,23 +787,34 @@ def get_srcrev(d, method_name='sortable_revision'):
751 that fetcher provides a method with the given name and the same signature as sortable_revision. 787 that fetcher provides a method with the given name and the same signature as sortable_revision.
752 """ 788 """
753 789
790 d.setVar("__BBSRCREV_SEEN", "1")
791 recursion = d.getVar("__BBINSRCREV")
792 if recursion:
793 raise FetchError("There are recursive references in fetcher variables, likely through SRC_URI")
794 d.setVar("__BBINSRCREV", True)
795
754 scms = [] 796 scms = []
797 revs = []
755 fetcher = Fetch(d.getVar('SRC_URI').split(), d) 798 fetcher = Fetch(d.getVar('SRC_URI').split(), d)
756 urldata = fetcher.ud 799 urldata = fetcher.ud
757 for u in urldata: 800 for u in urldata:
758 if urldata[u].method.supports_srcrev(): 801 if urldata[u].method.supports_srcrev():
759 scms.append(u) 802 scms.append(u)
760 803
761 if len(scms) == 0: 804 if not scms:
762 raise FetchError("SRCREV was used yet no valid SCM was found in SRC_URI") 805 d.delVar("__BBINSRCREV")
806 return "", revs
763 807
764 if len(scms) == 1 and len(urldata[scms[0]].names) == 1: 808
765 autoinc, rev = getattr(urldata[scms[0]].method, method_name)(urldata[scms[0]], d, urldata[scms[0]].names[0]) 809 if len(scms) == 1:
810 autoinc, rev = getattr(urldata[scms[0]].method, method_name)(urldata[scms[0]], d, urldata[scms[0]].name)
811 revs.append(rev)
766 if len(rev) > 10: 812 if len(rev) > 10:
767 rev = rev[:10] 813 rev = rev[:10]
814 d.delVar("__BBINSRCREV")
768 if autoinc: 815 if autoinc:
769 return "AUTOINC+" + rev 816 return "AUTOINC+" + rev, revs
770 return rev 817 return rev, revs
771 818
772 # 819 #
773 # Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT 820 # Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT
@@ -781,12 +828,12 @@ def get_srcrev(d, method_name='sortable_revision'):
781 seenautoinc = False 828 seenautoinc = False
782 for scm in scms: 829 for scm in scms:
783 ud = urldata[scm] 830 ud = urldata[scm]
784 for name in ud.names: 831 autoinc, rev = getattr(ud.method, method_name)(ud, d, ud.name)
785 autoinc, rev = getattr(ud.method, method_name)(ud, d, name) 832 revs.append(rev)
786 seenautoinc = seenautoinc or autoinc 833 seenautoinc = seenautoinc or autoinc
787 if len(rev) > 10: 834 if len(rev) > 10:
788 rev = rev[:10] 835 rev = rev[:10]
789 name_to_rev[name] = rev 836 name_to_rev[ud.name] = rev
790 # Replace names by revisions in the SRCREV_FORMAT string. The approach used 837 # Replace names by revisions in the SRCREV_FORMAT string. The approach used
791 # here can handle names being prefixes of other names and names appearing 838 # here can handle names being prefixes of other names and names appearing
792 # as substrings in revisions (in which case the name should not be 839 # as substrings in revisions (in which case the name should not be
@@ -799,12 +846,71 @@ def get_srcrev(d, method_name='sortable_revision'):
799 if seenautoinc: 846 if seenautoinc:
800 format = "AUTOINC+" + format 847 format = "AUTOINC+" + format
801 848
802 return format 849 d.delVar("__BBINSRCREV")
850 return format, revs
851
852def get_hashvalue(d, method_name='sortable_revision'):
853 pkgv, revs = _get_srcrev(d, method_name=method_name)
854 return " ".join(revs)
855
856def get_pkgv_string(d, method_name='sortable_revision'):
857 pkgv, revs = _get_srcrev(d, method_name=method_name)
858 return pkgv
859
860def get_srcrev(d, method_name='sortable_revision'):
861 pkgv, revs = _get_srcrev(d, method_name=method_name)
862 if not pkgv:
863 raise FetchError("SRCREV was used yet no valid SCM was found in SRC_URI")
864 return pkgv
803 865
804def localpath(url, d): 866def localpath(url, d):
805 fetcher = bb.fetch2.Fetch([url], d) 867 fetcher = bb.fetch2.Fetch([url], d)
806 return fetcher.localpath(url) 868 return fetcher.localpath(url)
807 869
870# Need to export PATH as binary could be in metadata paths
871# rather than host provided
872# Also include some other variables.
873FETCH_EXPORT_VARS = ['HOME', 'PATH',
874 'HTTP_PROXY', 'http_proxy',
875 'HTTPS_PROXY', 'https_proxy',
876 'FTP_PROXY', 'ftp_proxy',
877 'FTPS_PROXY', 'ftps_proxy',
878 'NO_PROXY', 'no_proxy',
879 'ALL_PROXY', 'all_proxy',
880 'GIT_PROXY_COMMAND',
881 'GIT_SSH',
882 'GIT_SSH_COMMAND',
883 'GIT_SSL_CAINFO',
884 'GIT_SMART_HTTP',
885 'SSH_AUTH_SOCK', 'SSH_AGENT_PID',
886 'SOCKS5_USER', 'SOCKS5_PASSWD',
887 'DBUS_SESSION_BUS_ADDRESS',
888 'P4CONFIG',
889 'SSL_CERT_FILE',
890 'NODE_EXTRA_CA_CERTS',
891 'AWS_PROFILE',
892 'AWS_ACCESS_KEY_ID',
893 'AWS_SECRET_ACCESS_KEY',
894 'AWS_ROLE_ARN',
895 'AWS_WEB_IDENTITY_TOKEN_FILE',
896 'AWS_DEFAULT_REGION',
897 'AWS_SESSION_TOKEN',
898 'GIT_CACHE_PATH',
899 'REMOTE_CONTAINERS_IPC',
900 'GITHUB_TOKEN',
901 'SSL_CERT_DIR']
902
903def get_fetcher_environment(d):
904 newenv = {}
905 origenv = d.getVar("BB_ORIGENV")
906 for name in bb.fetch2.FETCH_EXPORT_VARS:
907 value = d.getVar(name)
908 if not value and origenv:
909 value = origenv.getVar(name)
910 if value:
911 newenv[name] = value
912 return newenv
913
808def runfetchcmd(cmd, d, quiet=False, cleanup=None, log=None, workdir=None): 914def runfetchcmd(cmd, d, quiet=False, cleanup=None, log=None, workdir=None):
809 """ 915 """
810 Run cmd returning the command output 916 Run cmd returning the command output
@@ -813,25 +919,7 @@ def runfetchcmd(cmd, d, quiet=False, cleanup=None, log=None, workdir=None):
813 Optionally remove the files/directories listed in cleanup upon failure 919 Optionally remove the files/directories listed in cleanup upon failure
814 """ 920 """
815 921
816 # Need to export PATH as binary could be in metadata paths 922 exportvars = FETCH_EXPORT_VARS
817 # rather than host provided
818 # Also include some other variables.
819 # FIXME: Should really include all export varaiables?
820 exportvars = ['HOME', 'PATH',
821 'HTTP_PROXY', 'http_proxy',
822 'HTTPS_PROXY', 'https_proxy',
823 'FTP_PROXY', 'ftp_proxy',
824 'FTPS_PROXY', 'ftps_proxy',
825 'NO_PROXY', 'no_proxy',
826 'ALL_PROXY', 'all_proxy',
827 'GIT_PROXY_COMMAND',
828 'GIT_SSH',
829 'GIT_SSL_CAINFO',
830 'GIT_SMART_HTTP',
831 'SSH_AUTH_SOCK', 'SSH_AGENT_PID',
832 'SOCKS5_USER', 'SOCKS5_PASSWD',
833 'DBUS_SESSION_BUS_ADDRESS',
834 'P4CONFIG']
835 923
836 if not cleanup: 924 if not cleanup:
837 cleanup = [] 925 cleanup = []
@@ -868,14 +956,17 @@ def runfetchcmd(cmd, d, quiet=False, cleanup=None, log=None, workdir=None):
868 (output, errors) = bb.process.run(cmd, log=log, shell=True, stderr=subprocess.PIPE, cwd=workdir) 956 (output, errors) = bb.process.run(cmd, log=log, shell=True, stderr=subprocess.PIPE, cwd=workdir)
869 success = True 957 success = True
870 except bb.process.NotFoundError as e: 958 except bb.process.NotFoundError as e:
871 error_message = "Fetch command %s" % (e.command) 959 error_message = "Fetch command %s not found" % (e.command)
872 except bb.process.ExecutionError as e: 960 except bb.process.ExecutionError as e:
873 if e.stdout: 961 if e.stdout:
874 output = "output:\n%s\n%s" % (e.stdout, e.stderr) 962 output = "output:\n%s\n%s" % (e.stdout, e.stderr)
875 elif e.stderr: 963 elif e.stderr:
876 output = "output:\n%s" % e.stderr 964 output = "output:\n%s" % e.stderr
877 else: 965 else:
878 output = "no output" 966 if log:
967 output = "see logfile for output"
968 else:
969 output = "no output"
879 error_message = "Fetch command %s failed with exit code %s, %s" % (e.command, e.exitcode, output) 970 error_message = "Fetch command %s failed with exit code %s, %s" % (e.command, e.exitcode, output)
880 except bb.process.CmdError as e: 971 except bb.process.CmdError as e:
881 error_message = "Fetch command %s could not be run:\n%s" % (e.command, e.msg) 972 error_message = "Fetch command %s could not be run:\n%s" % (e.command, e.msg)
@@ -937,6 +1028,7 @@ def build_mirroruris(origud, mirrors, ld):
937 1028
938 try: 1029 try:
939 newud = FetchData(newuri, ld) 1030 newud = FetchData(newuri, ld)
1031 newud.ignore_checksums = True
940 newud.setup_localpath(ld) 1032 newud.setup_localpath(ld)
941 except bb.fetch2.BBFetchException as e: 1033 except bb.fetch2.BBFetchException as e:
942 logger.debug("Mirror fetch failure for url %s (original url: %s)" % (newuri, origud.url)) 1034 logger.debug("Mirror fetch failure for url %s (original url: %s)" % (newuri, origud.url))
@@ -1000,6 +1092,10 @@ def try_mirror_url(fetch, origud, ud, ld, check = False):
1000 # If that tarball is a local file:// we need to provide a symlink to it 1092 # If that tarball is a local file:// we need to provide a symlink to it
1001 dldir = ld.getVar("DL_DIR") 1093 dldir = ld.getVar("DL_DIR")
1002 1094
1095 if bb.utils.to_boolean(ld.getVar("BB_FETCH_PREMIRRORONLY")):
1096 ld = ld.createCopy()
1097 ld.setVar("BB_NO_NETWORK", "1")
1098
1003 if origud.mirrortarballs and os.path.basename(ud.localpath) in origud.mirrortarballs and os.path.basename(ud.localpath) != os.path.basename(origud.localpath): 1099 if origud.mirrortarballs and os.path.basename(ud.localpath) in origud.mirrortarballs and os.path.basename(ud.localpath) != os.path.basename(origud.localpath):
1004 # Create donestamp in old format to avoid triggering a re-download 1100 # Create donestamp in old format to avoid triggering a re-download
1005 if ud.donestamp: 1101 if ud.donestamp:
@@ -1021,7 +1117,10 @@ def try_mirror_url(fetch, origud, ud, ld, check = False):
1021 origud.method.build_mirror_data(origud, ld) 1117 origud.method.build_mirror_data(origud, ld)
1022 return origud.localpath 1118 return origud.localpath
1023 # Otherwise the result is a local file:// and we symlink to it 1119 # Otherwise the result is a local file:// and we symlink to it
1024 ensure_symlink(ud.localpath, origud.localpath) 1120 # This may also be a link to a shallow archive
1121 # When using shallow mode, add a symlink to the original fullshallow
1122 # path to ensure a valid symlink even in the `PREMIRRORS` case
1123 origud.method.update_mirror_links(ud, origud)
1025 update_stamp(origud, ld) 1124 update_stamp(origud, ld)
1026 return ud.localpath 1125 return ud.localpath
1027 1126
@@ -1046,7 +1145,8 @@ def try_mirror_url(fetch, origud, ud, ld, check = False):
1046 logger.debug("Mirror fetch failure for url %s (original url: %s)" % (ud.url, origud.url)) 1145 logger.debug("Mirror fetch failure for url %s (original url: %s)" % (ud.url, origud.url))
1047 logger.debug(str(e)) 1146 logger.debug(str(e))
1048 try: 1147 try:
1049 ud.method.clean(ud, ld) 1148 if ud.method.cleanup_upon_failure():
1149 ud.method.clean(ud, ld)
1050 except UnboundLocalError: 1150 except UnboundLocalError:
1051 pass 1151 pass
1052 return False 1152 return False
@@ -1054,23 +1154,6 @@ def try_mirror_url(fetch, origud, ud, ld, check = False):
1054 if ud.lockfile and ud.lockfile != origud.lockfile: 1154 if ud.lockfile and ud.lockfile != origud.lockfile:
1055 bb.utils.unlockfile(lf) 1155 bb.utils.unlockfile(lf)
1056 1156
1057
1058def ensure_symlink(target, link_name):
1059 if not os.path.exists(link_name):
1060 if os.path.islink(link_name):
1061 # Broken symbolic link
1062 os.unlink(link_name)
1063
1064 # In case this is executing without any file locks held (as is
1065 # the case for file:// URLs), two tasks may end up here at the
1066 # same time, in which case we do not want the second task to
1067 # fail when the link has already been created by the first task.
1068 try:
1069 os.symlink(target, link_name)
1070 except FileExistsError:
1071 pass
1072
1073
1074def try_mirrors(fetch, d, origud, mirrors, check = False): 1157def try_mirrors(fetch, d, origud, mirrors, check = False):
1075 """ 1158 """
1076 Try to use a mirrored version of the sources. 1159 Try to use a mirrored version of the sources.
@@ -1099,7 +1182,7 @@ def trusted_network(d, url):
1099 if bb.utils.to_boolean(d.getVar("BB_NO_NETWORK")): 1182 if bb.utils.to_boolean(d.getVar("BB_NO_NETWORK")):
1100 return True 1183 return True
1101 1184
1102 pkgname = d.expand(d.getVar('PN', False)) 1185 pkgname = d.getVar('PN')
1103 trusted_hosts = None 1186 trusted_hosts = None
1104 if pkgname: 1187 if pkgname:
1105 trusted_hosts = d.getVarFlag('BB_ALLOWED_NETWORKS', pkgname, False) 1188 trusted_hosts = d.getVarFlag('BB_ALLOWED_NETWORKS', pkgname, False)
@@ -1140,11 +1223,11 @@ def srcrev_internal_helper(ud, d, name):
1140 pn = d.getVar("PN") 1223 pn = d.getVar("PN")
1141 attempts = [] 1224 attempts = []
1142 if name != '' and pn: 1225 if name != '' and pn:
1143 attempts.append("SRCREV_%s_pn-%s" % (name, pn)) 1226 attempts.append("SRCREV_%s:pn-%s" % (name, pn))
1144 if name != '': 1227 if name != '':
1145 attempts.append("SRCREV_%s" % name) 1228 attempts.append("SRCREV_%s" % name)
1146 if pn: 1229 if pn:
1147 attempts.append("SRCREV_pn-%s" % pn) 1230 attempts.append("SRCREV:pn-%s" % pn)
1148 attempts.append("SRCREV") 1231 attempts.append("SRCREV")
1149 1232
1150 for a in attempts: 1233 for a in attempts:
@@ -1152,23 +1235,21 @@ def srcrev_internal_helper(ud, d, name):
1152 if srcrev and srcrev != "INVALID": 1235 if srcrev and srcrev != "INVALID":
1153 break 1236 break
1154 1237
1155 if 'rev' in ud.parm and 'tag' in ud.parm: 1238 if 'rev' in ud.parm:
1156 raise FetchError("Please specify a ;rev= parameter or a ;tag= parameter in the url %s but not both." % (ud.url)) 1239 parmrev = ud.parm['rev']
1157
1158 if 'rev' in ud.parm or 'tag' in ud.parm:
1159 if 'rev' in ud.parm:
1160 parmrev = ud.parm['rev']
1161 else:
1162 parmrev = ud.parm['tag']
1163 if srcrev == "INVALID" or not srcrev: 1240 if srcrev == "INVALID" or not srcrev:
1164 return parmrev 1241 return parmrev
1165 if srcrev != parmrev: 1242 if srcrev != parmrev:
1166 raise FetchError("Conflicting revisions (%s from SRCREV and %s from the url) found, please specify one valid value" % (srcrev, parmrev)) 1243 raise FetchError("Conflicting revisions (%s from SRCREV and %s from the url) found, please specify one valid value" % (srcrev, parmrev))
1167 return parmrev 1244 return parmrev
1168 1245
1246 if 'tag' in ud.parm and (srcrev == "INVALID" or not srcrev):
1247 return ud.parm['tag']
1248
1169 if srcrev == "INVALID" or not srcrev: 1249 if srcrev == "INVALID" or not srcrev:
1170 raise FetchError("Please set a valid SRCREV for url %s (possible key names are %s, or use a ;rev=X URL parameter)" % (str(attempts), ud.url), ud.url) 1250 raise FetchError("Please set a valid SRCREV for url %s (possible key names are %s, or use a ;rev=X URL parameter)" % (str(attempts), ud.url), ud.url)
1171 if srcrev == "AUTOINC": 1251 if srcrev == "AUTOINC":
1252 d.setVar("__BBAUTOREV_ACTED_UPON", True)
1172 srcrev = ud.method.latest_revision(ud, d, name) 1253 srcrev = ud.method.latest_revision(ud, d, name)
1173 1254
1174 return srcrev 1255 return srcrev
@@ -1180,23 +1261,21 @@ def get_checksum_file_list(d):
1180 SRC_URI as a space-separated string 1261 SRC_URI as a space-separated string
1181 """ 1262 """
1182 fetch = Fetch([], d, cache = False, localonly = True) 1263 fetch = Fetch([], d, cache = False, localonly = True)
1183
1184 dl_dir = d.getVar('DL_DIR')
1185 filelist = [] 1264 filelist = []
1186 for u in fetch.urls: 1265 for u in fetch.urls:
1187 ud = fetch.ud[u] 1266 ud = fetch.ud[u]
1188
1189 if ud and isinstance(ud.method, local.Local): 1267 if ud and isinstance(ud.method, local.Local):
1190 paths = ud.method.localpaths(ud, d) 1268 found = False
1269 paths = ud.method.localfile_searchpaths(ud, d)
1191 for f in paths: 1270 for f in paths:
1192 pth = ud.decodedurl 1271 pth = ud.path
1193 if f.startswith(dl_dir): 1272 if os.path.exists(f):
1194 # The local fetcher's behaviour is to return a path under DL_DIR if it couldn't find the file anywhere else 1273 found = True
1195 if os.path.exists(f):
1196 bb.warn("Getting checksum for %s SRC_URI entry %s: file not found except in DL_DIR" % (d.getVar('PN'), os.path.basename(f)))
1197 else:
1198 bb.warn("Unable to get checksum for %s SRC_URI entry %s: file could not be found" % (d.getVar('PN'), os.path.basename(f)))
1199 filelist.append(f + ":" + str(os.path.exists(f))) 1274 filelist.append(f + ":" + str(os.path.exists(f)))
1275 if not found:
1276 bb.fatal(("Unable to get checksum for %s SRC_URI entry %s: file could not be found"
1277 "\nThe following paths were searched:"
1278 "\n%s") % (d.getVar('PN'), os.path.basename(f), '\n'.join(paths)))
1200 1279
1201 return " ".join(filelist) 1280 return " ".join(filelist)
1202 1281
@@ -1234,28 +1313,28 @@ class FetchData(object):
1234 self.setup = False 1313 self.setup = False
1235 1314
1236 def configure_checksum(checksum_id): 1315 def configure_checksum(checksum_id):
1316 checksum_plain_name = "%ssum" % checksum_id
1237 if "name" in self.parm: 1317 if "name" in self.parm:
1238 checksum_name = "%s.%ssum" % (self.parm["name"], checksum_id) 1318 checksum_name = "%s.%ssum" % (self.parm["name"], checksum_id)
1239 else: 1319 else:
1240 checksum_name = "%ssum" % checksum_id 1320 checksum_name = checksum_plain_name
1241
1242 setattr(self, "%s_name" % checksum_id, checksum_name)
1243 1321
1244 if checksum_name in self.parm: 1322 if checksum_name in self.parm:
1245 checksum_expected = self.parm[checksum_name] 1323 checksum_expected = self.parm[checksum_name]
1246 elif self.type not in ["http", "https", "ftp", "ftps", "sftp", "s3"]: 1324 elif checksum_plain_name in self.parm:
1325 checksum_expected = self.parm[checksum_plain_name]
1326 checksum_name = checksum_plain_name
1327 elif self.type not in ["http", "https", "ftp", "ftps", "sftp", "s3", "az", "crate", "gs", "gomod", "npm"]:
1247 checksum_expected = None 1328 checksum_expected = None
1248 else: 1329 else:
1249 checksum_expected = d.getVarFlag("SRC_URI", checksum_name) 1330 checksum_expected = d.getVarFlag("SRC_URI", checksum_name)
1250 1331
1332 setattr(self, "%s_name" % checksum_id, checksum_name)
1251 setattr(self, "%s_expected" % checksum_id, checksum_expected) 1333 setattr(self, "%s_expected" % checksum_id, checksum_expected)
1252 1334
1253 for checksum_id in CHECKSUM_LIST: 1335 self.name = self.parm.get("name",'default')
1254 configure_checksum(checksum_id) 1336 if "," in self.name:
1255 1337 raise ParameterError("The fetcher no longer supports multiple name parameters in a single url", self.url)
1256 self.ignore_checksums = False
1257
1258 self.names = self.parm.get("name",'default').split(',')
1259 1338
1260 self.method = None 1339 self.method = None
1261 for m in methods: 1340 for m in methods:
@@ -1276,6 +1355,11 @@ class FetchData(object):
1276 if hasattr(self.method, "urldata_init"): 1355 if hasattr(self.method, "urldata_init"):
1277 self.method.urldata_init(self, d) 1356 self.method.urldata_init(self, d)
1278 1357
1358 for checksum_id in CHECKSUM_LIST:
1359 configure_checksum(checksum_id)
1360
1361 self.ignore_checksums = False
1362
1279 if "localpath" in self.parm: 1363 if "localpath" in self.parm:
1280 # if user sets localpath for file, use it instead. 1364 # if user sets localpath for file, use it instead.
1281 self.localpath = self.parm["localpath"] 1365 self.localpath = self.parm["localpath"]
@@ -1302,13 +1386,7 @@ class FetchData(object):
1302 self.lockfile = basepath + '.lock' 1386 self.lockfile = basepath + '.lock'
1303 1387
1304 def setup_revisions(self, d): 1388 def setup_revisions(self, d):
1305 self.revisions = {} 1389 self.revision = srcrev_internal_helper(self, d, self.name)
1306 for name in self.names:
1307 self.revisions[name] = srcrev_internal_helper(self, d, name)
1308
1309 # add compatibility code for non name specified case
1310 if len(self.names) == 1:
1311 self.revision = self.revisions[self.names[0]]
1312 1390
1313 def setup_localpath(self, d): 1391 def setup_localpath(self, d):
1314 if not self.localpath: 1392 if not self.localpath:
@@ -1355,6 +1433,9 @@ class FetchMethod(object):
1355 Is localpath something that can be represented by a checksum? 1433 Is localpath something that can be represented by a checksum?
1356 """ 1434 """
1357 1435
1436 # We cannot compute checksums for None
1437 if urldata.localpath is None:
1438 return False
1358 # We cannot compute checksums for directories 1439 # We cannot compute checksums for directories
1359 if os.path.isdir(urldata.localpath): 1440 if os.path.isdir(urldata.localpath):
1360 return False 1441 return False
@@ -1367,6 +1448,12 @@ class FetchMethod(object):
1367 """ 1448 """
1368 return False 1449 return False
1369 1450
1451 def cleanup_upon_failure(self):
1452 """
1453 When a fetch fails, should clean() be called?
1454 """
1455 return True
1456
1370 def verify_donestamp(self, ud, d): 1457 def verify_donestamp(self, ud, d):
1371 """ 1458 """
1372 Verify the donestamp file 1459 Verify the donestamp file
@@ -1427,37 +1514,40 @@ class FetchMethod(object):
1427 (file, urldata.parm.get('unpack'))) 1514 (file, urldata.parm.get('unpack')))
1428 1515
1429 base, ext = os.path.splitext(file) 1516 base, ext = os.path.splitext(file)
1430 if ext in ['.gz', '.bz2', '.Z', '.xz', '.lz']: 1517 if ext in ['.gz', '.bz2', '.Z', '.xz', '.lz', '.zst']:
1431 efile = os.path.join(rootdir, os.path.basename(base)) 1518 efile = os.path.join(rootdir, os.path.basename(base))
1432 else: 1519 else:
1433 efile = file 1520 efile = file
1434 cmd = None 1521 cmd = None
1435 1522
1436 if unpack: 1523 if unpack:
1524 tar_cmd = 'tar --extract --no-same-owner'
1525 if 'striplevel' in urldata.parm:
1526 tar_cmd += ' --strip-components=%s' % urldata.parm['striplevel']
1437 if file.endswith('.tar'): 1527 if file.endswith('.tar'):
1438 cmd = 'tar x --no-same-owner -f %s' % file 1528 cmd = '%s -f %s' % (tar_cmd, file)
1439 elif file.endswith('.tgz') or file.endswith('.tar.gz') or file.endswith('.tar.Z'): 1529 elif file.endswith('.tgz') or file.endswith('.tar.gz') or file.endswith('.tar.Z'):
1440 cmd = 'tar xz --no-same-owner -f %s' % file 1530 cmd = '%s -z -f %s' % (tar_cmd, file)
1441 elif file.endswith('.tbz') or file.endswith('.tbz2') or file.endswith('.tar.bz2'): 1531 elif file.endswith('.tbz') or file.endswith('.tbz2') or file.endswith('.tar.bz2'):
1442 cmd = 'bzip2 -dc %s | tar x --no-same-owner -f -' % file 1532 cmd = 'bzip2 -dc %s | %s -f -' % (file, tar_cmd)
1443 elif file.endswith('.gz') or file.endswith('.Z') or file.endswith('.z'): 1533 elif file.endswith('.gz') or file.endswith('.Z') or file.endswith('.z'):
1444 cmd = 'gzip -dc %s > %s' % (file, efile) 1534 cmd = 'gzip -dc %s > %s' % (file, efile)
1445 elif file.endswith('.bz2'): 1535 elif file.endswith('.bz2'):
1446 cmd = 'bzip2 -dc %s > %s' % (file, efile) 1536 cmd = 'bzip2 -dc %s > %s' % (file, efile)
1447 elif file.endswith('.txz') or file.endswith('.tar.xz'): 1537 elif file.endswith('.txz') or file.endswith('.tar.xz'):
1448 cmd = 'xz -dc %s | tar x --no-same-owner -f -' % file 1538 cmd = 'xz -dc %s | %s -f -' % (file, tar_cmd)
1449 elif file.endswith('.xz'): 1539 elif file.endswith('.xz'):
1450 cmd = 'xz -dc %s > %s' % (file, efile) 1540 cmd = 'xz -dc %s > %s' % (file, efile)
1451 elif file.endswith('.tar.lz'): 1541 elif file.endswith('.tar.lz'):
1452 cmd = 'lzip -dc %s | tar x --no-same-owner -f -' % file 1542 cmd = 'lzip -dc %s | %s -f -' % (file, tar_cmd)
1453 elif file.endswith('.lz'): 1543 elif file.endswith('.lz'):
1454 cmd = 'lzip -dc %s > %s' % (file, efile) 1544 cmd = 'lzip -dc %s > %s' % (file, efile)
1455 elif file.endswith('.tar.7z'): 1545 elif file.endswith('.tar.7z'):
1456 cmd = '7z x -so %s | tar x --no-same-owner -f -' % file 1546 cmd = '7z x -so %s | %s -f -' % (file, tar_cmd)
1457 elif file.endswith('.7z'): 1547 elif file.endswith('.7z'):
1458 cmd = '7za x -y %s 1>/dev/null' % file 1548 cmd = '7za x -y %s 1>/dev/null' % file
1459 elif file.endswith('.tzst') or file.endswith('.tar.zst'): 1549 elif file.endswith('.tzst') or file.endswith('.tar.zst'):
1460 cmd = 'zstd --decompress --stdout %s | tar x --no-same-owner -f -' % file 1550 cmd = 'zstd --decompress --stdout %s | %s -f -' % (file, tar_cmd)
1461 elif file.endswith('.zst'): 1551 elif file.endswith('.zst'):
1462 cmd = 'zstd --decompress --stdout %s > %s' % (file, efile) 1552 cmd = 'zstd --decompress --stdout %s > %s' % (file, efile)
1463 elif file.endswith('.zip') or file.endswith('.jar'): 1553 elif file.endswith('.zip') or file.endswith('.jar'):
@@ -1483,14 +1573,14 @@ class FetchMethod(object):
1483 datafile = None 1573 datafile = None
1484 if output: 1574 if output:
1485 for line in output.decode().splitlines(): 1575 for line in output.decode().splitlines():
1486 if line.startswith('data.tar.'): 1576 if line.startswith('data.tar.') or line == 'data.tar':
1487 datafile = line 1577 datafile = line
1488 break 1578 break
1489 else: 1579 else:
1490 raise UnpackError("Unable to unpack deb/ipk package - does not contain data.tar.* file", urldata.url) 1580 raise UnpackError("Unable to unpack deb/ipk package - does not contain data.tar* file", urldata.url)
1491 else: 1581 else:
1492 raise UnpackError("Unable to unpack deb/ipk package - could not list contents", urldata.url) 1582 raise UnpackError("Unable to unpack deb/ipk package - could not list contents", urldata.url)
1493 cmd = 'ar x %s %s && tar --no-same-owner -xpf %s && rm %s' % (file, datafile, datafile, datafile) 1583 cmd = 'ar x %s %s && %s -p -f %s && rm %s' % (file, datafile, tar_cmd, datafile, datafile)
1494 1584
1495 # If 'subdir' param exists, create a dir and use it as destination for unpack cmd 1585 # If 'subdir' param exists, create a dir and use it as destination for unpack cmd
1496 if 'subdir' in urldata.parm: 1586 if 'subdir' in urldata.parm:
@@ -1506,6 +1596,7 @@ class FetchMethod(object):
1506 unpackdir = rootdir 1596 unpackdir = rootdir
1507 1597
1508 if not unpack or not cmd: 1598 if not unpack or not cmd:
1599 urldata.unpack_tracer.unpack("file-copy", unpackdir)
1509 # If file == dest, then avoid any copies, as we already put the file into dest! 1600 # If file == dest, then avoid any copies, as we already put the file into dest!
1510 dest = os.path.join(unpackdir, os.path.basename(file)) 1601 dest = os.path.join(unpackdir, os.path.basename(file))
1511 if file != dest and not (os.path.exists(dest) and os.path.samefile(file, dest)): 1602 if file != dest and not (os.path.exists(dest) and os.path.samefile(file, dest)):
@@ -1519,7 +1610,9 @@ class FetchMethod(object):
1519 if urlpath.find("/") != -1: 1610 if urlpath.find("/") != -1:
1520 destdir = urlpath.rsplit("/", 1)[0] + '/' 1611 destdir = urlpath.rsplit("/", 1)[0] + '/'
1521 bb.utils.mkdirhier("%s/%s" % (unpackdir, destdir)) 1612 bb.utils.mkdirhier("%s/%s" % (unpackdir, destdir))
1522 cmd = 'cp -fpPRH "%s" "%s"' % (file, destdir) 1613 cmd = 'cp --force --preserve=timestamps --no-dereference --recursive -H "%s" "%s"' % (file, destdir)
1614 else:
1615 urldata.unpack_tracer.unpack("archive-extract", unpackdir)
1523 1616
1524 if not cmd: 1617 if not cmd:
1525 return 1618 return
@@ -1546,6 +1639,28 @@ class FetchMethod(object):
1546 """ 1639 """
1547 bb.utils.remove(urldata.localpath) 1640 bb.utils.remove(urldata.localpath)
1548 1641
1642 def ensure_symlink(self, target, link_name):
1643 if not os.path.exists(link_name):
1644 dirname = os.path.dirname(link_name)
1645 bb.utils.mkdirhier(dirname)
1646 if os.path.islink(link_name):
1647 # Broken symbolic link
1648 os.unlink(link_name)
1649
1650 # In case this is executing without any file locks held (as is
1651 # the case for file:// URLs), two tasks may end up here at the
1652 # same time, in which case we do not want the second task to
1653 # fail when the link has already been created by the first task.
1654 try:
1655 os.symlink(target, link_name)
1656 except FileExistsError:
1657 pass
1658
1659 def update_mirror_links(self, ud, origud):
1660 # For local file:// results, create a symlink to them
1661 # This may also be a link to a shallow archive
1662 self.ensure_symlink(ud.localpath, origud.localpath)
1663
1549 def try_premirror(self, urldata, d): 1664 def try_premirror(self, urldata, d):
1550 """ 1665 """
1551 Should premirrors be used? 1666 Should premirrors be used?
@@ -1573,13 +1688,13 @@ class FetchMethod(object):
1573 if not hasattr(self, "_latest_revision"): 1688 if not hasattr(self, "_latest_revision"):
1574 raise ParameterError("The fetcher for this URL does not support _latest_revision", ud.url) 1689 raise ParameterError("The fetcher for this URL does not support _latest_revision", ud.url)
1575 1690
1576 revs = bb.persist_data.persist('BB_URI_HEADREVS', d)
1577 key = self.generate_revision_key(ud, d, name) 1691 key = self.generate_revision_key(ud, d, name)
1578 try: 1692
1579 return revs[key] 1693 rev = _revisions_cache.get_rev(key)
1580 except KeyError: 1694 if rev is None:
1581 revs[key] = rev = self._latest_revision(ud, d, name) 1695 rev = self._latest_revision(ud, d, name)
1582 return rev 1696 _revisions_cache.set_rev(key, rev)
1697 return rev
1583 1698
1584 def sortable_revision(self, ud, d, name): 1699 def sortable_revision(self, ud, d, name):
1585 latest_rev = self._build_revision(ud, d, name) 1700 latest_rev = self._build_revision(ud, d, name)
@@ -1611,12 +1726,61 @@ class FetchMethod(object):
1611 """ 1726 """
1612 return [] 1727 return []
1613 1728
1729
1730class DummyUnpackTracer(object):
1731 """
1732 Abstract API definition for a class that traces unpacked source files back
1733 to their respective upstream SRC_URI entries, for software composition
1734 analysis, license compliance and detailed SBOM generation purposes.
1735 User may load their own unpack tracer class (instead of the dummy
1736 one) by setting the BB_UNPACK_TRACER_CLASS config parameter.
1737 """
1738 def start(self, unpackdir, urldata_dict, d):
1739 """
1740 Start tracing the core Fetch.unpack process, using an index to map
1741 unpacked files to each SRC_URI entry.
1742 This method is called by Fetch.unpack and it may receive nested calls by
1743 gitsm and npmsw fetchers, that expand SRC_URI entries by adding implicit
1744 URLs and by recursively calling Fetch.unpack from new (nested) Fetch
1745 instances.
1746 """
1747 return
1748 def start_url(self, url):
1749 """Start tracing url unpack process.
1750 This method is called by Fetch.unpack before the fetcher-specific unpack
1751 method starts, and it may receive nested calls by gitsm and npmsw
1752 fetchers.
1753 """
1754 return
1755 def unpack(self, unpack_type, destdir):
1756 """
1757 Set unpack_type and destdir for current url.
1758 This method is called by the fetcher-specific unpack method after url
1759 tracing started.
1760 """
1761 return
1762 def finish_url(self, url):
1763 """Finish tracing url unpack process and update the file index.
1764 This method is called by Fetch.unpack after the fetcher-specific unpack
1765 method finished its job, and it may receive nested calls by gitsm
1766 and npmsw fetchers.
1767 """
1768 return
1769 def complete(self):
1770 """
1771 Finish tracing the Fetch.unpack process, and check if all nested
1772 Fecth.unpack calls (if any) have been completed; if so, save collected
1773 metadata.
1774 """
1775 return
1776
1777
1614class Fetch(object): 1778class Fetch(object):
1615 def __init__(self, urls, d, cache = True, localonly = False, connection_cache = None): 1779 def __init__(self, urls, d, cache = True, localonly = False, connection_cache = None):
1616 if localonly and cache: 1780 if localonly and cache:
1617 raise Exception("bb.fetch2.Fetch.__init__: cannot set cache and localonly at same time") 1781 raise Exception("bb.fetch2.Fetch.__init__: cannot set cache and localonly at same time")
1618 1782
1619 if len(urls) == 0: 1783 if not urls:
1620 urls = d.getVar("SRC_URI").split() 1784 urls = d.getVar("SRC_URI").split()
1621 self.urls = urls 1785 self.urls = urls
1622 self.d = d 1786 self.d = d
@@ -1631,10 +1795,30 @@ class Fetch(object):
1631 if key in urldata_cache: 1795 if key in urldata_cache:
1632 self.ud = urldata_cache[key] 1796 self.ud = urldata_cache[key]
1633 1797
1798 # the unpack_tracer object needs to be made available to possible nested
1799 # Fetch instances (when those are created by gitsm and npmsw fetchers)
1800 # so we set it as a global variable
1801 global unpack_tracer
1802 try:
1803 unpack_tracer
1804 except NameError:
1805 class_path = d.getVar("BB_UNPACK_TRACER_CLASS")
1806 if class_path:
1807 # use user-defined unpack tracer class
1808 import importlib
1809 module_name, _, class_name = class_path.rpartition(".")
1810 module = importlib.import_module(module_name)
1811 class_ = getattr(module, class_name)
1812 unpack_tracer = class_()
1813 else:
1814 # fall back to the dummy/abstract class
1815 unpack_tracer = DummyUnpackTracer()
1816
1634 for url in urls: 1817 for url in urls:
1635 if url not in self.ud: 1818 if url not in self.ud:
1636 try: 1819 try:
1637 self.ud[url] = FetchData(url, d, localonly) 1820 self.ud[url] = FetchData(url, d, localonly)
1821 self.ud[url].unpack_tracer = unpack_tracer
1638 except NonLocalMethod: 1822 except NonLocalMethod:
1639 if localonly: 1823 if localonly:
1640 self.ud[url] = None 1824 self.ud[url] = None
@@ -1648,7 +1832,7 @@ class Fetch(object):
1648 self.ud[url] = FetchData(url, self.d) 1832 self.ud[url] = FetchData(url, self.d)
1649 1833
1650 self.ud[url].setup_localpath(self.d) 1834 self.ud[url].setup_localpath(self.d)
1651 return self.d.expand(self.ud[url].localpath) 1835 return self.ud[url].localpath
1652 1836
1653 def localpaths(self): 1837 def localpaths(self):
1654 """ 1838 """
@@ -1673,6 +1857,7 @@ class Fetch(object):
1673 network = self.d.getVar("BB_NO_NETWORK") 1857 network = self.d.getVar("BB_NO_NETWORK")
1674 premirroronly = bb.utils.to_boolean(self.d.getVar("BB_FETCH_PREMIRRORONLY")) 1858 premirroronly = bb.utils.to_boolean(self.d.getVar("BB_FETCH_PREMIRRORONLY"))
1675 1859
1860 checksum_missing_messages = []
1676 for u in urls: 1861 for u in urls:
1677 ud = self.ud[u] 1862 ud = self.ud[u]
1678 ud.setup_localpath(self.d) 1863 ud.setup_localpath(self.d)
@@ -1684,7 +1869,6 @@ class Fetch(object):
1684 1869
1685 try: 1870 try:
1686 self.d.setVar("BB_NO_NETWORK", network) 1871 self.d.setVar("BB_NO_NETWORK", network)
1687
1688 if m.verify_donestamp(ud, self.d) and not m.need_update(ud, self.d): 1872 if m.verify_donestamp(ud, self.d) and not m.need_update(ud, self.d):
1689 done = True 1873 done = True
1690 elif m.try_premirror(ud, self.d): 1874 elif m.try_premirror(ud, self.d):
@@ -1701,23 +1885,28 @@ class Fetch(object):
1701 logger.debug(str(e)) 1885 logger.debug(str(e))
1702 done = False 1886 done = False
1703 1887
1888 d = self.d
1704 if premirroronly: 1889 if premirroronly:
1705 self.d.setVar("BB_NO_NETWORK", "1") 1890 # Only disable the network in a copy
1891 d = bb.data.createCopy(self.d)
1892 d.setVar("BB_NO_NETWORK", "1")
1706 1893
1707 firsterr = None 1894 firsterr = None
1708 verified_stamp = m.verify_donestamp(ud, self.d) 1895 verified_stamp = False
1709 if not done and (not verified_stamp or m.need_update(ud, self.d)): 1896 if done:
1897 verified_stamp = m.verify_donestamp(ud, d)
1898 if not done and (not verified_stamp or m.need_update(ud, d)):
1710 try: 1899 try:
1711 if not trusted_network(self.d, ud.url): 1900 if not trusted_network(d, ud.url):
1712 raise UntrustedUrl(ud.url) 1901 raise UntrustedUrl(ud.url)
1713 logger.debug("Trying Upstream") 1902 logger.debug("Trying Upstream")
1714 m.download(ud, self.d) 1903 m.download(ud, d)
1715 if hasattr(m, "build_mirror_data"): 1904 if hasattr(m, "build_mirror_data"):
1716 m.build_mirror_data(ud, self.d) 1905 m.build_mirror_data(ud, d)
1717 done = True 1906 done = True
1718 # early checksum verify, so that if checksum mismatched, 1907 # early checksum verify, so that if checksum mismatched,
1719 # fetcher still have chance to fetch from mirror 1908 # fetcher still have chance to fetch from mirror
1720 m.update_donestamp(ud, self.d) 1909 m.update_donestamp(ud, d)
1721 1910
1722 except bb.fetch2.NetworkAccess: 1911 except bb.fetch2.NetworkAccess:
1723 raise 1912 raise
@@ -1735,18 +1924,18 @@ class Fetch(object):
1735 logger.debug(str(e)) 1924 logger.debug(str(e))
1736 firsterr = e 1925 firsterr = e
1737 # Remove any incomplete fetch 1926 # Remove any incomplete fetch
1738 if not verified_stamp: 1927 if not verified_stamp and m.cleanup_upon_failure():
1739 m.clean(ud, self.d) 1928 m.clean(ud, d)
1740 logger.debug("Trying MIRRORS") 1929 logger.debug("Trying MIRRORS")
1741 mirrors = mirror_from_string(self.d.getVar('MIRRORS')) 1930 mirrors = mirror_from_string(d.getVar('MIRRORS'))
1742 done = m.try_mirrors(self, ud, self.d, mirrors) 1931 done = m.try_mirrors(self, ud, d, mirrors)
1743 1932
1744 if not done or not m.done(ud, self.d): 1933 if not done or not m.done(ud, d):
1745 if firsterr: 1934 if firsterr:
1746 logger.error(str(firsterr)) 1935 logger.error(str(firsterr))
1747 raise FetchError("Unable to fetch URL from any source.", u) 1936 raise FetchError("Unable to fetch URL from any source.", u)
1748 1937
1749 m.update_donestamp(ud, self.d) 1938 m.update_donestamp(ud, d)
1750 1939
1751 except IOError as e: 1940 except IOError as e:
1752 if e.errno in [errno.ESTALE]: 1941 if e.errno in [errno.ESTALE]:
@@ -1754,17 +1943,28 @@ class Fetch(object):
1754 raise ChecksumError("Stale Error Detected") 1943 raise ChecksumError("Stale Error Detected")
1755 1944
1756 except BBFetchException as e: 1945 except BBFetchException as e:
1757 if isinstance(e, ChecksumError): 1946 if isinstance(e, NoChecksumError):
1947 (message, _) = e.args
1948 checksum_missing_messages.append(message)
1949 continue
1950 elif isinstance(e, ChecksumError):
1758 logger.error("Checksum failure fetching %s" % u) 1951 logger.error("Checksum failure fetching %s" % u)
1759 raise 1952 raise
1760 1953
1761 finally: 1954 finally:
1762 if ud.lockfile: 1955 if ud.lockfile:
1763 bb.utils.unlockfile(lf) 1956 bb.utils.unlockfile(lf)
1957 if checksum_missing_messages:
1958 logger.error("Missing SRC_URI checksum, please add those to the recipe: \n%s", "\n".join(checksum_missing_messages))
1959 raise BBFetchException("There was some missing checksums in the recipe")
1764 1960
1765 def checkstatus(self, urls=None): 1961 def checkstatus(self, urls=None):
1766 """ 1962 """
1767 Check all urls exist upstream 1963 Check all URLs exist upstream.
1964
1965 Returns None if the URLs exist, raises FetchError if the check wasn't
1966 successful but there wasn't an error (such as file not found), and
1967 raises other exceptions in error cases.
1768 """ 1968 """
1769 1969
1770 if not urls: 1970 if not urls:
@@ -1787,7 +1987,7 @@ class Fetch(object):
1787 ret = m.try_mirrors(self, ud, self.d, mirrors, True) 1987 ret = m.try_mirrors(self, ud, self.d, mirrors, True)
1788 1988
1789 if not ret: 1989 if not ret:
1790 raise FetchError("URL %s doesn't work" % u, u) 1990 raise FetchError("URL doesn't work", u)
1791 1991
1792 def unpack(self, root, urls=None): 1992 def unpack(self, root, urls=None):
1793 """ 1993 """
@@ -1797,6 +1997,8 @@ class Fetch(object):
1797 if not urls: 1997 if not urls:
1798 urls = self.urls 1998 urls = self.urls
1799 1999
2000 unpack_tracer.start(root, self.ud, self.d)
2001
1800 for u in urls: 2002 for u in urls:
1801 ud = self.ud[u] 2003 ud = self.ud[u]
1802 ud.setup_localpath(self.d) 2004 ud.setup_localpath(self.d)
@@ -1804,11 +2006,15 @@ class Fetch(object):
1804 if ud.lockfile: 2006 if ud.lockfile:
1805 lf = bb.utils.lockfile(ud.lockfile) 2007 lf = bb.utils.lockfile(ud.lockfile)
1806 2008
2009 unpack_tracer.start_url(u)
1807 ud.method.unpack(ud, root, self.d) 2010 ud.method.unpack(ud, root, self.d)
2011 unpack_tracer.finish_url(u)
1808 2012
1809 if ud.lockfile: 2013 if ud.lockfile:
1810 bb.utils.unlockfile(lf) 2014 bb.utils.unlockfile(lf)
1811 2015
2016 unpack_tracer.complete()
2017
1812 def clean(self, urls=None): 2018 def clean(self, urls=None):
1813 """ 2019 """
1814 Clean files that the fetcher gets or places 2020 Clean files that the fetcher gets or places
@@ -1908,6 +2114,10 @@ from . import repo
1908from . import clearcase 2114from . import clearcase
1909from . import npm 2115from . import npm
1910from . import npmsw 2116from . import npmsw
2117from . import az
2118from . import crate
2119from . import gcp
2120from . import gomod
1911 2121
1912methods.append(local.Local()) 2122methods.append(local.Local())
1913methods.append(wget.Wget()) 2123methods.append(wget.Wget())
@@ -1927,3 +2137,8 @@ methods.append(repo.Repo())
1927methods.append(clearcase.ClearCase()) 2137methods.append(clearcase.ClearCase())
1928methods.append(npm.Npm()) 2138methods.append(npm.Npm())
1929methods.append(npmsw.NpmShrinkWrap()) 2139methods.append(npmsw.NpmShrinkWrap())
2140methods.append(az.Az())
2141methods.append(crate.Crate())
2142methods.append(gcp.GCP())
2143methods.append(gomod.GoMod())
2144methods.append(gomod.GoModGit())
diff --git a/bitbake/lib/bb/fetch2/az.py b/bitbake/lib/bb/fetch2/az.py
new file mode 100644
index 0000000000..1d3664f213
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/az.py
@@ -0,0 +1,98 @@
1"""
2BitBake 'Fetch' Azure Storage implementation
3
4"""
5
6# Copyright (C) 2021 Alejandro Hernandez Samaniego
7#
8# Based on bb.fetch2.wget:
9# Copyright (C) 2003, 2004 Chris Larson
10#
11# SPDX-License-Identifier: GPL-2.0-only
12#
13# Based on functions from the base bb module, Copyright 2003 Holger Schurig
14
15import shlex
16import os
17import bb
18from bb.fetch2 import FetchError
19from bb.fetch2 import logger
20from bb.fetch2.wget import Wget
21
22
23class Az(Wget):
24
25 def supports(self, ud, d):
26 """
27 Check to see if a given url can be fetched from Azure Storage
28 """
29 return ud.type in ['az']
30
31
32 def checkstatus(self, fetch, ud, d, try_again=True):
33
34 # checkstatus discards parameters either way, we need to do this before adding the SAS
35 ud.url = ud.url.replace('az://','https://').split(';')[0]
36
37 az_sas = d.getVar('AZ_SAS')
38 if az_sas and az_sas not in ud.url:
39 if not az_sas.startswith('?'):
40 raise FetchError("When using AZ_SAS, it must start with a '?' character to mark the start of the query-parameters.")
41 ud.url += az_sas
42
43 return Wget.checkstatus(self, fetch, ud, d, try_again)
44
45 # Override download method, include retries
46 def download(self, ud, d, retries=3):
47 """Fetch urls"""
48
49 # If were reaching the account transaction limit we might be refused a connection,
50 # retrying allows us to avoid false negatives since the limit changes over time
51 fetchcmd = self.basecmd + ' --retry-connrefused --waitretry=5'
52
53 # We need to provide a localpath to avoid wget using the SAS
54 # ud.localfile either has the downloadfilename or ud.path
55 localpath = os.path.join(d.getVar("DL_DIR"), ud.localfile)
56 bb.utils.mkdirhier(os.path.dirname(localpath))
57 fetchcmd += " -O %s" % shlex.quote(localpath)
58
59
60 if ud.user and ud.pswd:
61 fetchcmd += " --user=%s --password=%s --auth-no-challenge" % (ud.user, ud.pswd)
62
63 # Check if a Shared Access Signature was given and use it
64 az_sas = d.getVar('AZ_SAS')
65
66 if az_sas:
67 if not az_sas.startswith('?'):
68 raise FetchError("When using AZ_SAS, it must start with a '?' character to mark the start of the query-parameters.")
69 azuri = '%s%s%s%s' % ('https://', ud.host, ud.path, az_sas)
70 else:
71 azuri = '%s%s%s' % ('https://', ud.host, ud.path)
72
73 dldir = d.getVar("DL_DIR")
74 if os.path.exists(ud.localpath):
75 # file exists, but we didnt complete it.. trying again.
76 fetchcmd += " -c -P %s '%s'" % (dldir, azuri)
77 else:
78 fetchcmd += " -P %s '%s'" % (dldir, azuri)
79
80 try:
81 self._runwget(ud, d, fetchcmd, False)
82 except FetchError as e:
83 # Azure fails on handshake sometimes when using wget after some stress, producing a
84 # FetchError from the fetcher, if the artifact exists retyring should succeed
85 if 'Unable to establish SSL connection' in str(e):
86 logger.debug2('Unable to establish SSL connection: Retries remaining: %s, Retrying...' % retries)
87 self.download(ud, d, retries -1)
88
89 # Sanity check since wget can pretend it succeed when it didn't
90 # Also, this used to happen if sourceforge sent us to the mirror page
91 if not os.path.exists(ud.localpath):
92 raise FetchError("The fetch command returned success for url %s but %s doesn't exist?!" % (azuri, ud.localpath), azuri)
93
94 if os.path.getsize(ud.localpath) == 0:
95 os.remove(ud.localpath)
96 raise FetchError("The fetch of %s resulted in a zero size file?! Deleting and failing since this isn't right." % (azuri), azuri)
97
98 return True
diff --git a/bitbake/lib/bb/fetch2/clearcase.py b/bitbake/lib/bb/fetch2/clearcase.py
index 1a9c863769..17500daf95 100644
--- a/bitbake/lib/bb/fetch2/clearcase.py
+++ b/bitbake/lib/bb/fetch2/clearcase.py
@@ -108,7 +108,7 @@ class ClearCase(FetchMethod):
108 ud.module.replace("/", "."), 108 ud.module.replace("/", "."),
109 ud.label.replace("/", ".")) 109 ud.label.replace("/", "."))
110 110
111 ud.viewname = "%s-view%s" % (ud.identifier, d.getVar("DATETIME", d, True)) 111 ud.viewname = "%s-view%s" % (ud.identifier, d.getVar("DATETIME"))
112 ud.csname = "%s-config-spec" % (ud.identifier) 112 ud.csname = "%s-config-spec" % (ud.identifier)
113 ud.ccasedir = os.path.join(d.getVar("DL_DIR"), ud.type) 113 ud.ccasedir = os.path.join(d.getVar("DL_DIR"), ud.type)
114 ud.viewdir = os.path.join(ud.ccasedir, ud.viewname) 114 ud.viewdir = os.path.join(ud.ccasedir, ud.viewname)
@@ -130,8 +130,6 @@ class ClearCase(FetchMethod):
130 self.debug("configspecfile = %s" % ud.configspecfile) 130 self.debug("configspecfile = %s" % ud.configspecfile)
131 self.debug("localfile = %s" % ud.localfile) 131 self.debug("localfile = %s" % ud.localfile)
132 132
133 ud.localfile = os.path.join(d.getVar("DL_DIR"), ud.localfile)
134
135 def _build_ccase_command(self, ud, command): 133 def _build_ccase_command(self, ud, command):
136 """ 134 """
137 Build up a commandline based on ud 135 Build up a commandline based on ud
@@ -196,7 +194,7 @@ class ClearCase(FetchMethod):
196 194
197 def need_update(self, ud, d): 195 def need_update(self, ud, d):
198 if ("LATEST" in ud.label) or (ud.customspec and "LATEST" in ud.customspec): 196 if ("LATEST" in ud.label) or (ud.customspec and "LATEST" in ud.customspec):
199 ud.identifier += "-%s" % d.getVar("DATETIME",d, True) 197 ud.identifier += "-%s" % d.getVar("DATETIME")
200 return True 198 return True
201 if os.path.exists(ud.localpath): 199 if os.path.exists(ud.localpath):
202 return False 200 return False
diff --git a/bitbake/lib/bb/fetch2/crate.py b/bitbake/lib/bb/fetch2/crate.py
new file mode 100644
index 0000000000..e611736f06
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/crate.py
@@ -0,0 +1,150 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' implementation for crates.io
5"""
6
7# Copyright (C) 2016 Doug Goldstein
8#
9# SPDX-License-Identifier: GPL-2.0-only
10#
11# Based on functions from the base bb module, Copyright 2003 Holger Schurig
12
13import hashlib
14import json
15import os
16import subprocess
17import bb
18from bb.fetch2 import logger, subprocess_setup, UnpackError
19from bb.fetch2.wget import Wget
20
21
22class Crate(Wget):
23
24 """Class to fetch crates via wget"""
25
26 def _cargo_bitbake_path(self, rootdir):
27 return os.path.join(rootdir, "cargo_home", "bitbake")
28
29 def supports(self, ud, d):
30 """
31 Check to see if a given url is for this fetcher
32 """
33 return ud.type in ['crate']
34
35 def recommends_checksum(self, urldata):
36 return True
37
38 def urldata_init(self, ud, d):
39 """
40 Sets up to download the respective crate from crates.io
41 """
42
43 if ud.type == 'crate':
44 self._crate_urldata_init(ud, d)
45
46 super(Crate, self).urldata_init(ud, d)
47
48 def _crate_urldata_init(self, ud, d):
49 """
50 Sets up the download for a crate
51 """
52
53 # URL syntax is: crate://NAME/VERSION
54 # break the URL apart by /
55 parts = ud.url.split('/')
56 if len(parts) < 5:
57 raise bb.fetch2.ParameterError("Invalid URL: Must be crate://HOST/NAME/VERSION", ud.url)
58
59 # version is expected to be the last token
60 # but ignore possible url parameters which will be used
61 # by the top fetcher class
62 version = parts[-1].split(";")[0]
63 # second to last field is name
64 name = parts[-2]
65 # host (this is to allow custom crate registries to be specified
66 host = '/'.join(parts[2:-2])
67
68 # if using upstream just fix it up nicely
69 if host == 'crates.io':
70 host = 'crates.io/api/v1/crates'
71
72 ud.url = "https://%s/%s/%s/download" % (host, name, version)
73 ud.versionsurl = "https://%s/%s/versions" % (host, name)
74 ud.parm['downloadfilename'] = "%s-%s.crate" % (name, version)
75 if 'name' not in ud.parm:
76 ud.parm['name'] = '%s-%s' % (name, version)
77
78 logger.debug2("Fetching %s to %s" % (ud.url, ud.parm['downloadfilename']))
79
80 def unpack(self, ud, rootdir, d):
81 """
82 Uses the crate to build the necessary paths for cargo to utilize it
83 """
84 if ud.type == 'crate':
85 return self._crate_unpack(ud, rootdir, d)
86 else:
87 super(Crate, self).unpack(ud, rootdir, d)
88
89 def _crate_unpack(self, ud, rootdir, d):
90 """
91 Unpacks a crate
92 """
93 thefile = ud.localpath
94
95 # possible metadata we need to write out
96 metadata = {}
97
98 # change to the rootdir to unpack but save the old working dir
99 save_cwd = os.getcwd()
100 os.chdir(rootdir)
101
102 bp = d.getVar('BP')
103 if bp == ud.parm.get('name'):
104 cmd = "tar -xz --no-same-owner -f %s" % thefile
105 ud.unpack_tracer.unpack("crate-extract", rootdir)
106 else:
107 cargo_bitbake = self._cargo_bitbake_path(rootdir)
108 ud.unpack_tracer.unpack("cargo-extract", cargo_bitbake)
109
110 cmd = "tar -xz --no-same-owner -f %s -C %s" % (thefile, cargo_bitbake)
111
112 # ensure we've got these paths made
113 bb.utils.mkdirhier(cargo_bitbake)
114
115 # generate metadata necessary
116 with open(thefile, 'rb') as f:
117 # get the SHA256 of the original tarball
118 tarhash = hashlib.sha256(f.read()).hexdigest()
119
120 metadata['files'] = {}
121 metadata['package'] = tarhash
122
123 path = d.getVar('PATH')
124 if path:
125 cmd = "PATH=\"%s\" %s" % (path, cmd)
126 bb.note("Unpacking %s to %s/" % (thefile, os.getcwd()))
127
128 ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True)
129
130 os.chdir(save_cwd)
131
132 if ret != 0:
133 raise UnpackError("Unpack command %s failed with return value %s" % (cmd, ret), ud.url)
134
135 # if we have metadata to write out..
136 if len(metadata) > 0:
137 cratepath = os.path.splitext(os.path.basename(thefile))[0]
138 bbpath = self._cargo_bitbake_path(rootdir)
139 mdfile = '.cargo-checksum.json'
140 mdpath = os.path.join(bbpath, cratepath, mdfile)
141 with open(mdpath, "w") as f:
142 json.dump(metadata, f)
143
144 def latest_versionstring(self, ud, d):
145 from functools import cmp_to_key
146 json_data = json.loads(self._fetch_index(ud.versionsurl, ud, d))
147 versions = [(0, i["num"], "") for i in json_data["versions"]]
148 versions = sorted(versions, key=cmp_to_key(bb.utils.vercmp))
149
150 return (versions[-1][1], "")
diff --git a/bitbake/lib/bb/fetch2/gcp.py b/bitbake/lib/bb/fetch2/gcp.py
new file mode 100644
index 0000000000..86546d40bf
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/gcp.py
@@ -0,0 +1,102 @@
1"""
2BitBake 'Fetch' implementation for Google Cloup Platform Storage.
3
4Class for fetching files from Google Cloud Storage using the
5Google Cloud Storage Python Client. The GCS Python Client must
6be correctly installed, configured and authenticated prior to use.
7Additionally, gsutil must also be installed.
8
9"""
10
11# Copyright (C) 2023, Snap Inc.
12#
13# Based in part on bb.fetch2.s3:
14# Copyright (C) 2017 Andre McCurdy
15#
16# SPDX-License-Identifier: GPL-2.0-only
17#
18# Based on functions from the base bb module, Copyright 2003 Holger Schurig
19
20import os
21import bb
22import urllib.parse, urllib.error
23from bb.fetch2 import FetchMethod
24from bb.fetch2 import FetchError
25from bb.fetch2 import logger
26
27class GCP(FetchMethod):
28 """
29 Class to fetch urls via GCP's Python API.
30 """
31 def __init__(self):
32 self.gcp_client = None
33
34 def supports(self, ud, d):
35 """
36 Check to see if a given url can be fetched with GCP.
37 """
38 return ud.type in ['gs']
39
40 def recommends_checksum(self, urldata):
41 return True
42
43 def urldata_init(self, ud, d):
44 if 'downloadfilename' in ud.parm:
45 ud.basename = ud.parm['downloadfilename']
46 else:
47 ud.basename = os.path.basename(ud.path)
48
49 ud.localfile = ud.basename
50
51 def get_gcp_client(self):
52 from google.cloud import storage
53 self.gcp_client = storage.Client(project=None)
54
55 def download(self, ud, d):
56 """
57 Fetch urls using the GCP API.
58 Assumes localpath was called first.
59 """
60 from google.api_core.exceptions import NotFound
61 logger.debug2(f"Trying to download gs://{ud.host}{ud.path} to {ud.localpath}")
62 if self.gcp_client is None:
63 self.get_gcp_client()
64
65 bb.fetch2.check_network_access(d, "blob.download_to_filename", f"gs://{ud.host}{ud.path}")
66
67 # Path sometimes has leading slash, so strip it
68 path = ud.path.lstrip("/")
69 blob = self.gcp_client.bucket(ud.host).blob(path)
70 try:
71 blob.download_to_filename(ud.localpath)
72 except NotFound:
73 raise FetchError("The GCP API threw a NotFound exception")
74
75 # Additional sanity checks copied from the wget class (although there
76 # are no known issues which mean these are required, treat the GCP API
77 # tool with a little healthy suspicion).
78 if not os.path.exists(ud.localpath):
79 raise FetchError(f"The GCP API returned success for gs://{ud.host}{ud.path} but {ud.localpath} doesn't exist?!")
80
81 if os.path.getsize(ud.localpath) == 0:
82 os.remove(ud.localpath)
83 raise FetchError(f"The downloaded file for gs://{ud.host}{ud.path} resulted in a zero size file?! Deleting and failing since this isn't right.")
84
85 return True
86
87 def checkstatus(self, fetch, ud, d):
88 """
89 Check the status of a URL.
90 """
91 logger.debug2(f"Checking status of gs://{ud.host}{ud.path}")
92 if self.gcp_client is None:
93 self.get_gcp_client()
94
95 bb.fetch2.check_network_access(d, "gcp_client.bucket(ud.host).blob(path).exists()", f"gs://{ud.host}{ud.path}")
96
97 # Path sometimes has leading slash, so strip it
98 path = ud.path.lstrip("/")
99 if self.gcp_client.bucket(ud.host).blob(path).exists() == False:
100 raise FetchError(f"The GCP API reported that gs://{ud.host}{ud.path} does not exist")
101 else:
102 return True
diff --git a/bitbake/lib/bb/fetch2/git.py b/bitbake/lib/bb/fetch2/git.py
index e3ba80a3f5..14ec45a3f6 100644
--- a/bitbake/lib/bb/fetch2/git.py
+++ b/bitbake/lib/bb/fetch2/git.py
@@ -9,15 +9,6 @@ Supported SRC_URI options are:
9- branch 9- branch
10 The git branch to retrieve from. The default is "master" 10 The git branch to retrieve from. The default is "master"
11 11
12 This option also supports multiple branch fetching, with branches
13 separated by commas. In multiple branches case, the name option
14 must have the same number of names to match the branches, which is
15 used to specify the SRC_REV for the branch
16 e.g:
17 SRC_URI="git://some.host/somepath;branch=branchX,branchY;name=nameX,nameY"
18 SRCREV_nameX = "xxxxxxxxxxxxxxxxxxxx"
19 SRCREV_nameY = "YYYYYYYYYYYYYYYYYYYY"
20
21- tag 12- tag
22 The git tag to retrieve. The default is "master" 13 The git tag to retrieve. The default is "master"
23 14
@@ -44,13 +35,27 @@ Supported SRC_URI options are:
44 35
45- nobranch 36- nobranch
46 Don't check the SHA validation for branch. set this option for the recipe 37 Don't check the SHA validation for branch. set this option for the recipe
47 referring to commit which is valid in tag instead of branch. 38 referring to commit which is valid in any namespace (branch, tag, ...)
39 instead of branch.
48 The default is "0", set nobranch=1 if needed. 40 The default is "0", set nobranch=1 if needed.
49 41
42- subpath
43 Limit the checkout to a specific subpath of the tree.
44 By default, checkout the whole tree, set subpath=<path> if needed
45
46- destsuffix
47 The name of the path in which to place the checkout.
48 By default, the path is git/, set destsuffix=<suffix> if needed
49
50- usehead 50- usehead
51 For local git:// urls to use the current branch HEAD as the revision for use with 51 For local git:// urls to use the current branch HEAD as the revision for use with
52 AUTOREV. Implies nobranch. 52 AUTOREV. Implies nobranch.
53 53
54- lfs
55 Enable the checkout to use LFS for large files. This will download all LFS files
56 in the download step, as the unpack step does not have network access.
57 The default is "1", set lfs=0 to skip.
58
54""" 59"""
55 60
56# Copyright (C) 2005 Richard Purdie 61# Copyright (C) 2005 Richard Purdie
@@ -64,15 +69,22 @@ import fnmatch
64import os 69import os
65import re 70import re
66import shlex 71import shlex
72import shutil
67import subprocess 73import subprocess
68import tempfile 74import tempfile
75import urllib
69import bb 76import bb
70import bb.progress 77import bb.progress
78from contextlib import contextmanager
71from bb.fetch2 import FetchMethod 79from bb.fetch2 import FetchMethod
72from bb.fetch2 import runfetchcmd 80from bb.fetch2 import runfetchcmd
73from bb.fetch2 import logger 81from bb.fetch2 import logger
82from bb.fetch2 import trusted_network
74 83
75 84
85sha1_re = re.compile(r'^[0-9a-f]{40}$')
86slash_re = re.compile(r"/+")
87
76class GitProgressHandler(bb.progress.LineFilterProgressHandler): 88class GitProgressHandler(bb.progress.LineFilterProgressHandler):
77 """Extract progress information from git output""" 89 """Extract progress information from git output"""
78 def __init__(self, d): 90 def __init__(self, d):
@@ -130,6 +142,9 @@ class Git(FetchMethod):
130 def supports_checksum(self, urldata): 142 def supports_checksum(self, urldata):
131 return False 143 return False
132 144
145 def cleanup_upon_failure(self):
146 return False
147
133 def urldata_init(self, ud, d): 148 def urldata_init(self, ud, d):
134 """ 149 """
135 init git specific variable within url data 150 init git specific variable within url data
@@ -141,6 +156,11 @@ class Git(FetchMethod):
141 ud.proto = 'file' 156 ud.proto = 'file'
142 else: 157 else:
143 ud.proto = "git" 158 ud.proto = "git"
159 if ud.host == "github.com" and ud.proto == "git":
160 # github stopped supporting git protocol
161 # https://github.blog/2021-09-01-improving-git-protocol-security-github/#no-more-unauthenticated-git
162 ud.proto = "https"
163 bb.warn("URL: %s uses git protocol which is no longer supported by github. Please change to ;protocol=https in the url." % ud.url)
144 164
145 if not ud.proto in ('git', 'file', 'ssh', 'http', 'https', 'rsync'): 165 if not ud.proto in ('git', 'file', 'ssh', 'http', 'https', 'rsync'):
146 raise bb.fetch2.ParameterError("Invalid protocol type", ud.url) 166 raise bb.fetch2.ParameterError("Invalid protocol type", ud.url)
@@ -162,18 +182,25 @@ class Git(FetchMethod):
162 ud.bareclone = ud.parm.get("bareclone","0") == "1" 182 ud.bareclone = ud.parm.get("bareclone","0") == "1"
163 if ud.bareclone: 183 if ud.bareclone:
164 ud.nocheckout = 1 184 ud.nocheckout = 1
165
166 ud.unresolvedrev = {}
167 branches = ud.parm.get("branch", "master").split(',')
168 if len(branches) != len(ud.names):
169 raise bb.fetch2.ParameterError("The number of name and branch parameters is not balanced", ud.url)
170 185
171 ud.cloneflags = "-s -n" 186 ud.unresolvedrev = ""
187 ud.branch = ud.parm.get("branch", "")
188 if not ud.branch and not ud.nobranch:
189 raise bb.fetch2.ParameterError("The url does not set any branch parameter or set nobranch=1.", ud.url)
190
191 ud.noshared = d.getVar("BB_GIT_NOSHARED") == "1"
192
193 ud.cloneflags = "-n"
194 if not ud.noshared:
195 ud.cloneflags += " -s"
172 if ud.bareclone: 196 if ud.bareclone:
173 ud.cloneflags += " --mirror" 197 ud.cloneflags += " --mirror"
174 198
199 ud.shallow_skip_fast = False
175 ud.shallow = d.getVar("BB_GIT_SHALLOW") == "1" 200 ud.shallow = d.getVar("BB_GIT_SHALLOW") == "1"
176 ud.shallow_extra_refs = (d.getVar("BB_GIT_SHALLOW_EXTRA_REFS") or "").split() 201 ud.shallow_extra_refs = (d.getVar("BB_GIT_SHALLOW_EXTRA_REFS") or "").split()
202 if 'tag' in ud.parm:
203 ud.shallow_extra_refs.append("refs/tags/" + ud.parm['tag'])
177 204
178 depth_default = d.getVar("BB_GIT_SHALLOW_DEPTH") 205 depth_default = d.getVar("BB_GIT_SHALLOW_DEPTH")
179 if depth_default is not None: 206 if depth_default is not None:
@@ -190,32 +217,27 @@ class Git(FetchMethod):
190 217
191 revs_default = d.getVar("BB_GIT_SHALLOW_REVS") 218 revs_default = d.getVar("BB_GIT_SHALLOW_REVS")
192 ud.shallow_revs = [] 219 ud.shallow_revs = []
193 ud.branches = {} 220
194 for pos, name in enumerate(ud.names): 221 ud.unresolvedrev = ud.branch
195 branch = branches[pos] 222
196 ud.branches[name] = branch 223 shallow_depth = d.getVar("BB_GIT_SHALLOW_DEPTH_%s" % ud.name)
197 ud.unresolvedrev[name] = branch 224 if shallow_depth is not None:
198 225 try:
199 shallow_depth = d.getVar("BB_GIT_SHALLOW_DEPTH_%s" % name) 226 shallow_depth = int(shallow_depth or 0)
200 if shallow_depth is not None: 227 except ValueError:
201 try: 228 raise bb.fetch2.FetchError("Invalid depth for BB_GIT_SHALLOW_DEPTH_%s: %s" % (ud.name, shallow_depth))
202 shallow_depth = int(shallow_depth or 0) 229 else:
203 except ValueError: 230 if shallow_depth < 0:
204 raise bb.fetch2.FetchError("Invalid depth for BB_GIT_SHALLOW_DEPTH_%s: %s" % (name, shallow_depth)) 231 raise bb.fetch2.FetchError("Invalid depth for BB_GIT_SHALLOW_DEPTH_%s: %s" % (ud.name, shallow_depth))
205 else: 232 ud.shallow_depths[ud.name] = shallow_depth
206 if shallow_depth < 0: 233
207 raise bb.fetch2.FetchError("Invalid depth for BB_GIT_SHALLOW_DEPTH_%s: %s" % (name, shallow_depth)) 234 revs = d.getVar("BB_GIT_SHALLOW_REVS_%s" % ud.name)
208 ud.shallow_depths[name] = shallow_depth 235 if revs is not None:
209 236 ud.shallow_revs.extend(revs.split())
210 revs = d.getVar("BB_GIT_SHALLOW_REVS_%s" % name) 237 elif revs_default is not None:
211 if revs is not None: 238 ud.shallow_revs.extend(revs_default.split())
212 ud.shallow_revs.extend(revs.split()) 239
213 elif revs_default is not None: 240 if ud.shallow and not ud.shallow_revs and ud.shallow_depths[ud.name] == 0:
214 ud.shallow_revs.extend(revs_default.split())
215
216 if (ud.shallow and
217 not ud.shallow_revs and
218 all(ud.shallow_depths[n] == 0 for n in ud.names)):
219 # Shallow disabled for this URL 241 # Shallow disabled for this URL
220 ud.shallow = False 242 ud.shallow = False
221 243
@@ -224,10 +246,9 @@ class Git(FetchMethod):
224 # rev of this repository. This will get resolved into a revision 246 # rev of this repository. This will get resolved into a revision
225 # later. If an actual revision happens to have also been provided 247 # later. If an actual revision happens to have also been provided
226 # then this setting will be overridden. 248 # then this setting will be overridden.
227 for name in ud.names: 249 ud.unresolvedrev = 'HEAD'
228 ud.unresolvedrev[name] = 'HEAD'
229 250
230 ud.basecmd = d.getVar("FETCHCMD_git") or "git -c core.fsyncobjectfiles=0" 251 ud.basecmd = d.getVar("FETCHCMD_git") or "git -c gc.autoDetach=false -c core.pager=cat -c safe.bareRepository=all -c clone.defaultRemoteName=origin"
231 252
232 write_tarballs = d.getVar("BB_GENERATE_MIRROR_TARBALLS") or "0" 253 write_tarballs = d.getVar("BB_GENERATE_MIRROR_TARBALLS") or "0"
233 ud.write_tarballs = write_tarballs != "0" or ud.rebaseable 254 ud.write_tarballs = write_tarballs != "0" or ud.rebaseable
@@ -235,24 +256,22 @@ class Git(FetchMethod):
235 256
236 ud.setup_revisions(d) 257 ud.setup_revisions(d)
237 258
238 for name in ud.names: 259 # Ensure any revision that doesn't look like a SHA-1 is translated into one
239 # Ensure anything that doesn't look like a sha256 checksum/revision is translated into one 260 if not sha1_re.match(ud.revision or ''):
240 if not ud.revisions[name] or len(ud.revisions[name]) != 40 or (False in [c in "abcdef0123456789" for c in ud.revisions[name]]): 261 if ud.revision:
241 if ud.revisions[name]: 262 ud.unresolvedrev = ud.revision
242 ud.unresolvedrev[name] = ud.revisions[name] 263 ud.revision = self.latest_revision(ud, d, ud.name)
243 ud.revisions[name] = self.latest_revision(ud, d, name)
244 264
245 gitsrcname = '%s%s' % (ud.host.replace(':', '.'), ud.path.replace('/', '.').replace('*', '.').replace(' ','_')) 265 gitsrcname = '%s%s' % (ud.host.replace(':', '.'), ud.path.replace('/', '.').replace('*', '.').replace(' ','_').replace('(', '_').replace(')', '_'))
246 if gitsrcname.startswith('.'): 266 if gitsrcname.startswith('.'):
247 gitsrcname = gitsrcname[1:] 267 gitsrcname = gitsrcname[1:]
248 268
249 # for rebaseable git repo, it is necessary to keep mirror tar ball 269 # For a rebaseable git repo, it is necessary to keep a mirror tar ball
250 # per revision, so that even the revision disappears from the 270 # per revision, so that even if the revision disappears from the
251 # upstream repo in the future, the mirror will remain intact and still 271 # upstream repo in the future, the mirror will remain intact and still
252 # contains the revision 272 # contain the revision
253 if ud.rebaseable: 273 if ud.rebaseable:
254 for name in ud.names: 274 gitsrcname = gitsrcname + '_' + ud.revision
255 gitsrcname = gitsrcname + '_' + ud.revisions[name]
256 275
257 dl_dir = d.getVar("DL_DIR") 276 dl_dir = d.getVar("DL_DIR")
258 gitdir = d.getVar("GITDIR") or (dl_dir + "/git2") 277 gitdir = d.getVar("GITDIR") or (dl_dir + "/git2")
@@ -270,15 +289,14 @@ class Git(FetchMethod):
270 if ud.shallow_revs: 289 if ud.shallow_revs:
271 tarballname = "%s_%s" % (tarballname, "_".join(sorted(ud.shallow_revs))) 290 tarballname = "%s_%s" % (tarballname, "_".join(sorted(ud.shallow_revs)))
272 291
273 for name, revision in sorted(ud.revisions.items()): 292 tarballname = "%s_%s" % (tarballname, ud.revision[:7])
274 tarballname = "%s_%s" % (tarballname, ud.revisions[name][:7]) 293 depth = ud.shallow_depths[ud.name]
275 depth = ud.shallow_depths[name] 294 if depth:
276 if depth: 295 tarballname = "%s-%s" % (tarballname, depth)
277 tarballname = "%s-%s" % (tarballname, depth)
278 296
279 shallow_refs = [] 297 shallow_refs = []
280 if not ud.nobranch: 298 if not ud.nobranch:
281 shallow_refs.extend(ud.branches.values()) 299 shallow_refs.append(ud.branch)
282 if ud.shallow_extra_refs: 300 if ud.shallow_extra_refs:
283 shallow_refs.extend(r.replace('refs/heads/', '').replace('*', 'ALL') for r in ud.shallow_extra_refs) 301 shallow_refs.extend(r.replace('refs/heads/', '').replace('*', 'ALL') for r in ud.shallow_extra_refs)
284 if shallow_refs: 302 if shallow_refs:
@@ -293,16 +311,29 @@ class Git(FetchMethod):
293 return ud.clonedir 311 return ud.clonedir
294 312
295 def need_update(self, ud, d): 313 def need_update(self, ud, d):
296 return self.clonedir_need_update(ud, d) or self.shallow_tarball_need_update(ud) or self.tarball_need_update(ud) 314 return self.clonedir_need_update(ud, d) \
315 or self.shallow_tarball_need_update(ud) \
316 or self.tarball_need_update(ud) \
317 or self.lfs_need_update(ud, d)
297 318
298 def clonedir_need_update(self, ud, d): 319 def clonedir_need_update(self, ud, d):
299 if not os.path.exists(ud.clonedir): 320 if not os.path.exists(ud.clonedir):
300 return True 321 return True
301 if ud.shallow and ud.write_shallow_tarballs and self.clonedir_need_shallow_revs(ud, d): 322 if ud.shallow and ud.write_shallow_tarballs and self.clonedir_need_shallow_revs(ud, d):
302 return True 323 return True
303 for name in ud.names: 324 if not self._contains_ref(ud, d, ud.name, ud.clonedir):
304 if not self._contains_ref(ud, d, name, ud.clonedir): 325 return True
305 return True 326 return False
327
328 def lfs_need_update(self, ud, d):
329 if not self._need_lfs(ud):
330 return False
331
332 if self.clonedir_need_update(ud, d):
333 return True
334
335 if not self._lfs_objects_downloaded(ud, d, ud.clonedir):
336 return True
306 return False 337 return False
307 338
308 def clonedir_need_shallow_revs(self, ud, d): 339 def clonedir_need_shallow_revs(self, ud, d):
@@ -319,11 +350,28 @@ class Git(FetchMethod):
319 def tarball_need_update(self, ud): 350 def tarball_need_update(self, ud):
320 return ud.write_tarballs and not os.path.exists(ud.fullmirror) 351 return ud.write_tarballs and not os.path.exists(ud.fullmirror)
321 352
353 def update_mirror_links(self, ud, origud):
354 super().update_mirror_links(ud, origud)
355 # When using shallow mode, add a symlink to the original fullshallow
356 # path to ensure a valid symlink even in the `PREMIRRORS` case
357 if ud.shallow and not os.path.exists(origud.fullshallow):
358 self.ensure_symlink(ud.localpath, origud.fullshallow)
359
322 def try_premirror(self, ud, d): 360 def try_premirror(self, ud, d):
323 # If we don't do this, updating an existing checkout with only premirrors 361 # If we don't do this, updating an existing checkout with only premirrors
324 # is not possible 362 # is not possible
325 if bb.utils.to_boolean(d.getVar("BB_FETCH_PREMIRRORONLY")): 363 if bb.utils.to_boolean(d.getVar("BB_FETCH_PREMIRRORONLY")):
326 return True 364 return True
365 # If the url is not in trusted network, that is, BB_NO_NETWORK is set to 0
366 # and BB_ALLOWED_NETWORKS does not contain the host that ud.url uses, then
367 # we need to try premirrors first as using upstream is destined to fail.
368 if not trusted_network(d, ud.url):
369 return True
370 # the following check is to ensure incremental fetch in downloads, this is
371 # because the premirror might be old and does not contain the new rev required,
372 # and this will cause a total removal and new clone. So if we can reach to
373 # network, we prefer upstream over premirror, though the premirror might contain
374 # the new rev.
327 if os.path.exists(ud.clonedir): 375 if os.path.exists(ud.clonedir):
328 return False 376 return False
329 return True 377 return True
@@ -337,21 +385,76 @@ class Git(FetchMethod):
337 if ud.shallow and os.path.exists(ud.fullshallow) and self.need_update(ud, d): 385 if ud.shallow and os.path.exists(ud.fullshallow) and self.need_update(ud, d):
338 ud.localpath = ud.fullshallow 386 ud.localpath = ud.fullshallow
339 return 387 return
340 elif os.path.exists(ud.fullmirror) and not os.path.exists(ud.clonedir): 388 elif os.path.exists(ud.fullmirror) and self.need_update(ud, d):
341 bb.utils.mkdirhier(ud.clonedir) 389 if not os.path.exists(ud.clonedir):
342 runfetchcmd("tar -xzf %s" % ud.fullmirror, d, workdir=ud.clonedir) 390 bb.utils.mkdirhier(ud.clonedir)
343 391 runfetchcmd("tar -xzf %s" % ud.fullmirror, d, workdir=ud.clonedir)
392 else:
393 tmpdir = tempfile.mkdtemp(dir=d.getVar('DL_DIR'))
394 runfetchcmd("tar -xzf %s" % ud.fullmirror, d, workdir=tmpdir)
395 output = runfetchcmd("%s remote" % ud.basecmd, d, quiet=True, workdir=ud.clonedir)
396 if 'mirror' in output:
397 runfetchcmd("%s remote rm mirror" % ud.basecmd, d, workdir=ud.clonedir)
398 runfetchcmd("%s remote add --mirror=fetch mirror %s" % (ud.basecmd, tmpdir), d, workdir=ud.clonedir)
399 fetch_cmd = "LANG=C %s fetch -f --update-head-ok --progress mirror " % (ud.basecmd)
400 runfetchcmd(fetch_cmd, d, workdir=ud.clonedir)
344 repourl = self._get_repo_url(ud) 401 repourl = self._get_repo_url(ud)
345 402
403 needs_clone = False
404 if os.path.exists(ud.clonedir):
405 # The directory may exist, but not be the top level of a bare git
406 # repository in which case it needs to be deleted and re-cloned.
407 try:
408 # Since clones can be bare, use --absolute-git-dir instead of --show-toplevel
409 output = runfetchcmd("LANG=C %s rev-parse --absolute-git-dir" % ud.basecmd, d, workdir=ud.clonedir)
410 toplevel = output.rstrip()
411
412 if not bb.utils.path_is_descendant(toplevel, ud.clonedir):
413 logger.warning("Top level directory '%s' is not a descendant of '%s'. Re-cloning", toplevel, ud.clonedir)
414 needs_clone = True
415 except bb.fetch2.FetchError as e:
416 logger.warning("Unable to get top level for %s (not a git directory?): %s", ud.clonedir, e)
417 needs_clone = True
418 except FileNotFoundError as e:
419 logger.warning("%s", e)
420 needs_clone = True
421
422 if needs_clone:
423 shutil.rmtree(ud.clonedir)
424 else:
425 needs_clone = True
426
346 # If the repo still doesn't exist, fallback to cloning it 427 # If the repo still doesn't exist, fallback to cloning it
347 if not os.path.exists(ud.clonedir): 428 if needs_clone:
348 # We do this since git will use a "-l" option automatically for local urls where possible 429 # We do this since git will use a "-l" option automatically for local urls where possible,
430 # but it doesn't work when git/objects is a symlink, only works when it is a directory.
349 if repourl.startswith("file://"): 431 if repourl.startswith("file://"):
350 repourl = repourl[7:] 432 repourl_path = repourl[7:]
433 objects = os.path.join(repourl_path, 'objects')
434 if os.path.isdir(objects) and not os.path.islink(objects):
435 repourl = repourl_path
351 clone_cmd = "LANG=C %s clone --bare --mirror %s %s --progress" % (ud.basecmd, shlex.quote(repourl), ud.clonedir) 436 clone_cmd = "LANG=C %s clone --bare --mirror %s %s --progress" % (ud.basecmd, shlex.quote(repourl), ud.clonedir)
352 if ud.proto.lower() != 'file': 437 if ud.proto.lower() != 'file':
353 bb.fetch2.check_network_access(d, clone_cmd, ud.url) 438 bb.fetch2.check_network_access(d, clone_cmd, ud.url)
354 progresshandler = GitProgressHandler(d) 439 progresshandler = GitProgressHandler(d)
440
441 # Try creating a fast initial shallow clone
442 # Enabling ud.shallow_skip_fast will skip this
443 # If the Git error "Server does not allow request for unadvertised object"
444 # occurs, shallow_skip_fast is enabled automatically.
445 # This may happen if the Git server does not allow the request
446 # or if the Git client has issues with this functionality.
447 if ud.shallow and not ud.shallow_skip_fast:
448 try:
449 self.clone_shallow_with_tarball(ud, d)
450 # When the shallow clone has succeeded, use the shallow tarball
451 ud.localpath = ud.fullshallow
452 return
453 except:
454 logger.warning("Creating fast initial shallow clone failed, try initial regular clone now.")
455
456 # When skipping fast initial shallow or the fast inital shallow clone failed:
457 # Try again with an initial regular clone
355 runfetchcmd(clone_cmd, d, log=progresshandler) 458 runfetchcmd(clone_cmd, d, log=progresshandler)
356 459
357 # Update the checkout if needed 460 # Update the checkout if needed
@@ -361,7 +464,11 @@ class Git(FetchMethod):
361 runfetchcmd("%s remote rm origin" % ud.basecmd, d, workdir=ud.clonedir) 464 runfetchcmd("%s remote rm origin" % ud.basecmd, d, workdir=ud.clonedir)
362 465
363 runfetchcmd("%s remote add --mirror=fetch origin %s" % (ud.basecmd, shlex.quote(repourl)), d, workdir=ud.clonedir) 466 runfetchcmd("%s remote add --mirror=fetch origin %s" % (ud.basecmd, shlex.quote(repourl)), d, workdir=ud.clonedir)
364 fetch_cmd = "LANG=C %s fetch -f --progress %s refs/*:refs/*" % (ud.basecmd, shlex.quote(repourl)) 467
468 if ud.nobranch:
469 fetch_cmd = "LANG=C %s fetch -f --progress %s refs/*:refs/*" % (ud.basecmd, shlex.quote(repourl))
470 else:
471 fetch_cmd = "LANG=C %s fetch -f --progress %s refs/heads/*:refs/heads/* refs/tags/*:refs/tags/*" % (ud.basecmd, shlex.quote(repourl))
365 if ud.proto.lower() != 'file': 472 if ud.proto.lower() != 'file':
366 bb.fetch2.check_network_access(d, fetch_cmd, ud.url) 473 bb.fetch2.check_network_access(d, fetch_cmd, ud.url)
367 progresshandler = GitProgressHandler(d) 474 progresshandler = GitProgressHandler(d)
@@ -375,138 +482,206 @@ class Git(FetchMethod):
375 if exc.errno != errno.ENOENT: 482 if exc.errno != errno.ENOENT:
376 raise 483 raise
377 484
378 for name in ud.names: 485 if not self._contains_ref(ud, d, ud.name, ud.clonedir):
379 if not self._contains_ref(ud, d, name, ud.clonedir): 486 raise bb.fetch2.FetchError("Unable to find revision %s in branch %s even from upstream" % (ud.revision, ud.branch))
380 raise bb.fetch2.FetchError("Unable to find revision %s in branch %s even from upstream" % (ud.revisions[name], ud.branches[name]))
381 487
382 if ud.shallow and ud.write_shallow_tarballs: 488 if ud.shallow and ud.write_shallow_tarballs:
383 missing_rev = self.clonedir_need_shallow_revs(ud, d) 489 missing_rev = self.clonedir_need_shallow_revs(ud, d)
384 if missing_rev: 490 if missing_rev:
385 raise bb.fetch2.FetchError("Unable to find revision %s even from upstream" % missing_rev) 491 raise bb.fetch2.FetchError("Unable to find revision %s even from upstream" % missing_rev)
386 492
387 if self._contains_lfs(ud, d, ud.clonedir) and self._need_lfs(ud): 493 if self.lfs_need_update(ud, d):
388 # Unpack temporary working copy, use it to run 'git checkout' to force pre-fetching 494 self.lfs_fetch(ud, d, ud.clonedir, ud.revision)
389 # of all LFS blobs needed at the the srcrev. 495
390 # 496 def lfs_fetch(self, ud, d, clonedir, revision, fetchall=False, progresshandler=None):
391 # It would be nice to just do this inline here by running 'git-lfs fetch' 497 """Helper method for fetching Git LFS data"""
392 # on the bare clonedir, but that operation requires a working copy on some 498 try:
393 # releases of Git LFS. 499 if self._need_lfs(ud) and self._contains_lfs(ud, d, clonedir) and len(revision):
394 tmpdir = tempfile.mkdtemp(dir=d.getVar('DL_DIR')) 500 self._ensure_git_lfs(d, ud)
395 try: 501
396 # Do the checkout. This implicitly involves a Git LFS fetch. 502 # Using worktree with the revision because .lfsconfig may exists
397 self.unpack(ud, tmpdir, d) 503 worktree_add_cmd = "%s worktree add wt %s" % (ud.basecmd, revision)
398 504 runfetchcmd(worktree_add_cmd, d, log=progresshandler, workdir=clonedir)
399 # Scoop up a copy of any stuff that Git LFS downloaded. Merge them into 505 lfs_fetch_cmd = "%s lfs fetch %s" % (ud.basecmd, "--all" if fetchall else "")
400 # the bare clonedir. 506 runfetchcmd(lfs_fetch_cmd, d, log=progresshandler, workdir=(clonedir + "/wt"))
401 # 507 worktree_rem_cmd = "%s worktree remove -f wt" % ud.basecmd
402 # As this procedure is invoked repeatedly on incremental fetches as 508 runfetchcmd(worktree_rem_cmd, d, log=progresshandler, workdir=clonedir)
403 # a recipe's SRCREV is bumped throughout its lifetime, this will 509 except:
404 # result in a gradual accumulation of LFS blobs in <ud.clonedir>/lfs 510 logger.warning("Fetching LFS did not succeed.")
405 # corresponding to all the blobs reachable from the different revs 511
406 # fetched across time. 512 @contextmanager
407 # 513 def create_atomic(self, filename):
408 # Only do this if the unpack resulted in a .git/lfs directory being 514 """Create as a temp file and move atomically into position to avoid races"""
409 # created; this only happens if at least one blob needed to be 515 fd, tfile = tempfile.mkstemp(dir=os.path.dirname(filename))
410 # downloaded. 516 try:
411 if os.path.exists(os.path.join(tmpdir, "git", ".git", "lfs")): 517 yield tfile
412 runfetchcmd("tar -cf - lfs | tar -xf - -C %s" % ud.clonedir, d, workdir="%s/git/.git" % tmpdir) 518 umask = os.umask(0o666)
413 finally: 519 os.umask(umask)
414 bb.utils.remove(tmpdir, recurse=True) 520 os.chmod(tfile, (0o666 & ~umask))
521 os.rename(tfile, filename)
522 finally:
523 os.close(fd)
415 524
416 def build_mirror_data(self, ud, d): 525 def build_mirror_data(self, ud, d):
417 if ud.shallow and ud.write_shallow_tarballs: 526 if ud.shallow and ud.write_shallow_tarballs:
418 if not os.path.exists(ud.fullshallow): 527 if not os.path.exists(ud.fullshallow):
419 if os.path.islink(ud.fullshallow): 528 if os.path.islink(ud.fullshallow):
420 os.unlink(ud.fullshallow) 529 os.unlink(ud.fullshallow)
421 tempdir = tempfile.mkdtemp(dir=d.getVar('DL_DIR')) 530 self.clone_shallow_with_tarball(ud, d)
422 shallowclone = os.path.join(tempdir, 'git')
423 try:
424 self.clone_shallow_local(ud, shallowclone, d)
425
426 logger.info("Creating tarball of git repository")
427 runfetchcmd("tar -czf %s ." % ud.fullshallow, d, workdir=shallowclone)
428 runfetchcmd("touch %s.done" % ud.fullshallow, d)
429 finally:
430 bb.utils.remove(tempdir, recurse=True)
431 elif ud.write_tarballs and not os.path.exists(ud.fullmirror): 531 elif ud.write_tarballs and not os.path.exists(ud.fullmirror):
432 if os.path.islink(ud.fullmirror): 532 if os.path.islink(ud.fullmirror):
433 os.unlink(ud.fullmirror) 533 os.unlink(ud.fullmirror)
434 534
435 logger.info("Creating tarball of git repository") 535 logger.info("Creating tarball of git repository")
436 runfetchcmd("tar -czf %s ." % ud.fullmirror, d, workdir=ud.clonedir) 536 with self.create_atomic(ud.fullmirror) as tfile:
537 mtime = runfetchcmd("{} log --all -1 --format=%cD".format(ud.basecmd), d,
538 quiet=True, workdir=ud.clonedir)
539 runfetchcmd("tar -czf %s --owner oe:0 --group oe:0 --mtime \"%s\" ."
540 % (tfile, mtime), d, workdir=ud.clonedir)
437 runfetchcmd("touch %s.done" % ud.fullmirror, d) 541 runfetchcmd("touch %s.done" % ud.fullmirror, d)
438 542
543 def clone_shallow_with_tarball(self, ud, d):
544 ret = False
545 tempdir = tempfile.mkdtemp(dir=d.getVar('DL_DIR'))
546 shallowclone = os.path.join(tempdir, 'git')
547 try:
548 try:
549 self.clone_shallow_local(ud, shallowclone, d)
550 except:
551 logger.warning("Fast shallow clone failed, try to skip fast mode now.")
552 bb.utils.remove(tempdir, recurse=True)
553 os.mkdir(tempdir)
554 ud.shallow_skip_fast = True
555 self.clone_shallow_local(ud, shallowclone, d)
556 logger.info("Creating tarball of git repository")
557 with self.create_atomic(ud.fullshallow) as tfile:
558 runfetchcmd("tar -czf %s ." % tfile, d, workdir=shallowclone)
559 runfetchcmd("touch %s.done" % ud.fullshallow, d)
560 ret = True
561 finally:
562 bb.utils.remove(tempdir, recurse=True)
563
564 return ret
565
439 def clone_shallow_local(self, ud, dest, d): 566 def clone_shallow_local(self, ud, dest, d):
440 """Clone the repo and make it shallow. 567 """
568 Shallow fetch from ud.clonedir (${DL_DIR}/git2/<gitrepo> by default):
569 - For BB_GIT_SHALLOW_DEPTH: git fetch --depth <depth> rev
570 - For BB_GIT_SHALLOW_REVS: git fetch --shallow-exclude=<revs> rev
571 """
441 572
442 The upstream url of the new clone isn't set at this time, as it'll be 573 progresshandler = GitProgressHandler(d)
443 set correctly when unpacked.""" 574 repourl = self._get_repo_url(ud)
444 runfetchcmd("%s clone %s %s %s" % (ud.basecmd, ud.cloneflags, ud.clonedir, dest), d) 575 bb.utils.mkdirhier(dest)
576 init_cmd = "%s init -q" % ud.basecmd
577 if ud.bareclone:
578 init_cmd += " --bare"
579 runfetchcmd(init_cmd, d, workdir=dest)
580 # Use repourl when creating a fast initial shallow clone
581 # Prefer already existing full bare clones if available
582 if not ud.shallow_skip_fast and not os.path.exists(ud.clonedir):
583 remote = shlex.quote(repourl)
584 else:
585 remote = ud.clonedir
586 runfetchcmd("%s remote add origin %s" % (ud.basecmd, remote), d, workdir=dest)
445 587
446 to_parse, shallow_branches = [], [] 588 # Check the histories which should be excluded
447 for name in ud.names: 589 shallow_exclude = ''
448 revision = ud.revisions[name] 590 for revision in ud.shallow_revs:
449 depth = ud.shallow_depths[name] 591 shallow_exclude += " --shallow-exclude=%s" % revision
450 if depth:
451 to_parse.append('%s~%d^{}' % (revision, depth - 1))
452 592
453 # For nobranch, we need a ref, otherwise the commits will be 593 revision = ud.revision
454 # removed, and for non-nobranch, we truncate the branch to our 594 depth = ud.shallow_depths[ud.name]
455 # srcrev, to avoid keeping unnecessary history beyond that.
456 branch = ud.branches[name]
457 if ud.nobranch:
458 ref = "refs/shallow/%s" % name
459 elif ud.bareclone:
460 ref = "refs/heads/%s" % branch
461 else:
462 ref = "refs/remotes/origin/%s" % branch
463 595
464 shallow_branches.append(ref) 596 # The --depth and --shallow-exclude can't be used together
465 runfetchcmd("%s update-ref %s %s" % (ud.basecmd, ref, revision), d, workdir=dest) 597 if depth and shallow_exclude:
598 raise bb.fetch2.FetchError("BB_GIT_SHALLOW_REVS is set, but BB_GIT_SHALLOW_DEPTH is not 0.")
466 599
467 # Map srcrev+depths to revisions 600 # For nobranch, we need a ref, otherwise the commits will be
468 parsed_depths = runfetchcmd("%s rev-parse %s" % (ud.basecmd, " ".join(to_parse)), d, workdir=dest) 601 # removed, and for non-nobranch, we truncate the branch to our
602 # srcrev, to avoid keeping unnecessary history beyond that.
603 branch = ud.branch
604 if ud.nobranch:
605 ref = "refs/shallow/%s" % ud.name
606 elif ud.bareclone:
607 ref = "refs/heads/%s" % branch
608 else:
609 ref = "refs/remotes/origin/%s" % branch
610
611 fetch_cmd = "%s fetch origin %s" % (ud.basecmd, revision)
612 if depth:
613 fetch_cmd += " --depth %s" % depth
614
615 if shallow_exclude:
616 fetch_cmd += shallow_exclude
469 617
470 # Resolve specified revisions 618 # Advertise the revision for lower version git such as 2.25.1:
471 parsed_revs = runfetchcmd("%s rev-parse %s" % (ud.basecmd, " ".join('"%s^{}"' % r for r in ud.shallow_revs)), d, workdir=dest) 619 # error: Server does not allow request for unadvertised object.
472 shallow_revisions = parsed_depths.splitlines() + parsed_revs.splitlines() 620 # The ud.clonedir is a local temporary dir, will be removed when
621 # fetch is done, so we can do anything on it.
622 adv_cmd = 'git branch -f advertise-%s %s' % (revision, revision)
623 if ud.shallow_skip_fast:
624 runfetchcmd(adv_cmd, d, workdir=ud.clonedir)
625
626 runfetchcmd(fetch_cmd, d, workdir=dest)
627 runfetchcmd("%s update-ref %s %s" % (ud.basecmd, ref, revision), d, workdir=dest)
628 # Fetch Git LFS data
629 self.lfs_fetch(ud, d, dest, ud.revision)
473 630
474 # Apply extra ref wildcards 631 # Apply extra ref wildcards
475 all_refs = runfetchcmd('%s for-each-ref "--format=%%(refname)"' % ud.basecmd, 632 all_refs_remote = runfetchcmd("%s ls-remote origin 'refs/*'" % ud.basecmd, \
476 d, workdir=dest).splitlines() 633 d, workdir=dest).splitlines()
634 all_refs = []
635 for line in all_refs_remote:
636 all_refs.append(line.split()[-1])
637 extra_refs = []
477 for r in ud.shallow_extra_refs: 638 for r in ud.shallow_extra_refs:
478 if not ud.bareclone: 639 if not ud.bareclone:
479 r = r.replace('refs/heads/', 'refs/remotes/origin/') 640 r = r.replace('refs/heads/', 'refs/remotes/origin/')
480 641
481 if '*' in r: 642 if '*' in r:
482 matches = filter(lambda a: fnmatch.fnmatchcase(a, r), all_refs) 643 matches = filter(lambda a: fnmatch.fnmatchcase(a, r), all_refs)
483 shallow_branches.extend(matches) 644 extra_refs.extend(matches)
484 else: 645 else:
485 shallow_branches.append(r) 646 extra_refs.append(r)
647
648 for ref in extra_refs:
649 ref_fetch = ref.replace('refs/heads/', '').replace('refs/remotes/origin/', '').replace('refs/tags/', '')
650 runfetchcmd("%s fetch origin --depth 1 %s" % (ud.basecmd, ref_fetch), d, workdir=dest)
651 revision = runfetchcmd("%s rev-parse FETCH_HEAD" % ud.basecmd, d, workdir=dest)
652 runfetchcmd("%s update-ref %s %s" % (ud.basecmd, ref, revision), d, workdir=dest)
486 653
487 # Make the repository shallow 654 # The url is local ud.clonedir, set it to upstream one
488 shallow_cmd = [self.make_shallow_path, '-s'] 655 runfetchcmd("%s remote set-url origin %s" % (ud.basecmd, shlex.quote(repourl)), d, workdir=dest)
489 for b in shallow_branches:
490 shallow_cmd.append('-r')
491 shallow_cmd.append(b)
492 shallow_cmd.extend(shallow_revisions)
493 runfetchcmd(subprocess.list2cmdline(shallow_cmd), d, workdir=dest)
494 656
495 def unpack(self, ud, destdir, d): 657 def unpack(self, ud, destdir, d):
496 """ unpack the downloaded src to destdir""" 658 """ unpack the downloaded src to destdir"""
497 659
498 subdir = ud.parm.get("subpath", "") 660 subdir = ud.parm.get("subdir")
499 if subdir != "": 661 subpath = ud.parm.get("subpath")
500 readpathspec = ":%s" % subdir 662 readpathspec = ""
501 def_destsuffix = "%s/" % os.path.basename(subdir.rstrip('/')) 663 def_destsuffix = (d.getVar("BB_GIT_DEFAULT_DESTSUFFIX") or "git") + "/"
502 else: 664
503 readpathspec = "" 665 if subpath:
504 def_destsuffix = "git/" 666 readpathspec = ":%s" % subpath
667 def_destsuffix = "%s/" % os.path.basename(subpath.rstrip('/'))
668
669 if subdir:
670 # If 'subdir' param exists, create a dir and use it as destination for unpack cmd
671 if os.path.isabs(subdir):
672 if not os.path.realpath(subdir).startswith(os.path.realpath(destdir)):
673 raise bb.fetch2.UnpackError("subdir argument isn't a subdirectory of unpack root %s" % destdir, ud.url)
674 destdir = subdir
675 else:
676 destdir = os.path.join(destdir, subdir)
677 def_destsuffix = ""
505 678
506 destsuffix = ud.parm.get("destsuffix", def_destsuffix) 679 destsuffix = ud.parm.get("destsuffix", def_destsuffix)
507 destdir = ud.destdir = os.path.join(destdir, destsuffix) 680 destdir = ud.destdir = os.path.join(destdir, destsuffix)
508 if os.path.exists(destdir): 681 if os.path.exists(destdir):
509 bb.utils.prunedir(destdir) 682 bb.utils.prunedir(destdir)
683 if not ud.bareclone:
684 ud.unpack_tracer.unpack("git", destdir)
510 685
511 need_lfs = self._need_lfs(ud) 686 need_lfs = self._need_lfs(ud)
512 687
@@ -516,13 +691,12 @@ class Git(FetchMethod):
516 source_found = False 691 source_found = False
517 source_error = [] 692 source_error = []
518 693
519 if not source_found: 694 clonedir_is_up_to_date = not self.clonedir_need_update(ud, d)
520 clonedir_is_up_to_date = not self.clonedir_need_update(ud, d) 695 if clonedir_is_up_to_date:
521 if clonedir_is_up_to_date: 696 runfetchcmd("%s clone %s %s/ %s" % (ud.basecmd, ud.cloneflags, ud.clonedir, destdir), d)
522 runfetchcmd("%s clone %s %s/ %s" % (ud.basecmd, ud.cloneflags, ud.clonedir, destdir), d) 697 source_found = True
523 source_found = True 698 else:
524 else: 699 source_error.append("clone directory not available or not up to date: " + ud.clonedir)
525 source_error.append("clone directory not available or not up to date: " + ud.clonedir)
526 700
527 if not source_found: 701 if not source_found:
528 if ud.shallow: 702 if ud.shallow:
@@ -538,28 +712,43 @@ class Git(FetchMethod):
538 if not source_found: 712 if not source_found:
539 raise bb.fetch2.UnpackError("No up to date source found: " + "; ".join(source_error), ud.url) 713 raise bb.fetch2.UnpackError("No up to date source found: " + "; ".join(source_error), ud.url)
540 714
715 # If there is a tag parameter in the url and we also have a fixed srcrev, check the tag
716 # matches the revision
717 if 'tag' in ud.parm and sha1_re.match(ud.revision):
718 output = runfetchcmd("%s rev-list -n 1 %s" % (ud.basecmd, ud.parm['tag']), d, workdir=destdir)
719 output = output.strip()
720 if output != ud.revision:
721 # It is possible ud.revision is the revision on an annotated tag which won't match the output of rev-list
722 # If it resolves to the same thing there isn't a problem.
723 output2 = runfetchcmd("%s rev-list -n 1 %s" % (ud.basecmd, ud.revision), d, workdir=destdir)
724 output2 = output2.strip()
725 if output != output2:
726 raise bb.fetch2.FetchError("The revision the git tag '%s' resolved to didn't match the SRCREV in use (%s vs %s)" % (ud.parm['tag'], output, ud.revision), ud.url)
727
541 repourl = self._get_repo_url(ud) 728 repourl = self._get_repo_url(ud)
542 runfetchcmd("%s remote set-url origin %s" % (ud.basecmd, shlex.quote(repourl)), d, workdir=destdir) 729 runfetchcmd("%s remote set-url origin %s" % (ud.basecmd, shlex.quote(repourl)), d, workdir=destdir)
543 730
544 if self._contains_lfs(ud, d, destdir): 731 if self._contains_lfs(ud, d, destdir):
545 if need_lfs and not self._find_git_lfs(d): 732 if not need_lfs:
546 raise bb.fetch2.FetchError("Repository %s has LFS content, install git-lfs on host to download (or set lfs=0 to ignore it)" % (repourl))
547 elif not need_lfs:
548 bb.note("Repository %s has LFS content but it is not being fetched" % (repourl)) 733 bb.note("Repository %s has LFS content but it is not being fetched" % (repourl))
734 else:
735 self._ensure_git_lfs(d, ud)
736
737 runfetchcmd("%s lfs install --local" % ud.basecmd, d, workdir=destdir)
549 738
550 if not ud.nocheckout: 739 if not ud.nocheckout:
551 if subdir != "": 740 if subpath:
552 runfetchcmd("%s read-tree %s%s" % (ud.basecmd, ud.revisions[ud.names[0]], readpathspec), d, 741 runfetchcmd("%s read-tree %s%s" % (ud.basecmd, ud.revision, readpathspec), d,
553 workdir=destdir) 742 workdir=destdir)
554 runfetchcmd("%s checkout-index -q -f -a" % ud.basecmd, d, workdir=destdir) 743 runfetchcmd("%s checkout-index -q -f -a" % ud.basecmd, d, workdir=destdir)
555 elif not ud.nobranch: 744 elif not ud.nobranch:
556 branchname = ud.branches[ud.names[0]] 745 branchname = ud.branch
557 runfetchcmd("%s checkout -B %s %s" % (ud.basecmd, branchname, \ 746 runfetchcmd("%s checkout -B %s %s" % (ud.basecmd, branchname, \
558 ud.revisions[ud.names[0]]), d, workdir=destdir) 747 ud.revision), d, workdir=destdir)
559 runfetchcmd("%s branch %s --set-upstream-to origin/%s" % (ud.basecmd, branchname, \ 748 runfetchcmd("%s branch %s --set-upstream-to origin/%s" % (ud.basecmd, branchname, \
560 branchname), d, workdir=destdir) 749 branchname), d, workdir=destdir)
561 else: 750 else:
562 runfetchcmd("%s checkout %s" % (ud.basecmd, ud.revisions[ud.names[0]]), d, workdir=destdir) 751 runfetchcmd("%s checkout %s" % (ud.basecmd, ud.revision), d, workdir=destdir)
563 752
564 return True 753 return True
565 754
@@ -573,8 +762,13 @@ class Git(FetchMethod):
573 clonedir = os.path.realpath(ud.localpath) 762 clonedir = os.path.realpath(ud.localpath)
574 to_remove.append(clonedir) 763 to_remove.append(clonedir)
575 764
765 # Remove shallow mirror tarball
766 if ud.shallow:
767 to_remove.append(ud.fullshallow)
768 to_remove.append(ud.fullshallow + ".done")
769
576 for r in to_remove: 770 for r in to_remove:
577 if os.path.exists(r): 771 if os.path.exists(r) or os.path.islink(r):
578 bb.note('Removing %s' % r) 772 bb.note('Removing %s' % r)
579 bb.utils.remove(r, True) 773 bb.utils.remove(r, True)
580 774
@@ -585,10 +779,10 @@ class Git(FetchMethod):
585 cmd = "" 779 cmd = ""
586 if ud.nobranch: 780 if ud.nobranch:
587 cmd = "%s log --pretty=oneline -n 1 %s -- 2> /dev/null | wc -l" % ( 781 cmd = "%s log --pretty=oneline -n 1 %s -- 2> /dev/null | wc -l" % (
588 ud.basecmd, ud.revisions[name]) 782 ud.basecmd, ud.revision)
589 else: 783 else:
590 cmd = "%s branch --contains %s --list %s 2> /dev/null | wc -l" % ( 784 cmd = "%s branch --contains %s --list %s 2> /dev/null | wc -l" % (
591 ud.basecmd, ud.revisions[name], ud.branches[name]) 785 ud.basecmd, ud.revision, ud.branch)
592 try: 786 try:
593 output = runfetchcmd(cmd, d, quiet=True, workdir=wd) 787 output = runfetchcmd(cmd, d, quiet=True, workdir=wd)
594 except bb.fetch2.FetchError: 788 except bb.fetch2.FetchError:
@@ -597,6 +791,37 @@ class Git(FetchMethod):
597 raise bb.fetch2.FetchError("The command '%s' gave output with more then 1 line unexpectedly, output: '%s'" % (cmd, output)) 791 raise bb.fetch2.FetchError("The command '%s' gave output with more then 1 line unexpectedly, output: '%s'" % (cmd, output))
598 return output.split()[0] != "0" 792 return output.split()[0] != "0"
599 793
794 def _lfs_objects_downloaded(self, ud, d, wd):
795 """
796 Verifies whether the LFS objects for requested revisions have already been downloaded
797 """
798 # Bail out early if this repository doesn't use LFS
799 if not self._contains_lfs(ud, d, wd):
800 return True
801
802 self._ensure_git_lfs(d, ud)
803
804 # The Git LFS specification specifies ([1]) the LFS folder layout so it should be safe to check for file
805 # existence.
806 # [1] https://github.com/git-lfs/git-lfs/blob/main/docs/spec.md#intercepting-git
807 cmd = "%s lfs ls-files -l %s" \
808 % (ud.basecmd, ud.revision)
809 output = runfetchcmd(cmd, d, quiet=True, workdir=wd).rstrip()
810 # Do not do any further matching if no objects are managed by LFS
811 if not output:
812 return True
813
814 # Match all lines beginning with the hexadecimal OID
815 oid_regex = re.compile("^(([a-fA-F0-9]{2})([a-fA-F0-9]{2})[A-Fa-f0-9]+)")
816 for line in output.split("\n"):
817 oid = re.search(oid_regex, line)
818 if not oid:
819 bb.warn("git lfs ls-files output '%s' did not match expected format." % line)
820 if not os.path.exists(os.path.join(wd, "lfs", "objects", oid.group(2), oid.group(3), oid.group(1))):
821 return False
822
823 return True
824
600 def _need_lfs(self, ud): 825 def _need_lfs(self, ud):
601 return ud.parm.get("lfs", "1") == "1" 826 return ud.parm.get("lfs", "1") == "1"
602 827
@@ -604,20 +829,8 @@ class Git(FetchMethod):
604 """ 829 """
605 Check if the repository has 'lfs' (large file) content 830 Check if the repository has 'lfs' (large file) content
606 """ 831 """
607
608 if not ud.nobranch:
609 branchname = ud.branches[ud.names[0]]
610 else:
611 branchname = "master"
612
613 # The bare clonedir doesn't use the remote names; it has the branch immediately.
614 if wd == ud.clonedir:
615 refname = ud.branches[ud.names[0]]
616 else:
617 refname = "origin/%s" % ud.branches[ud.names[0]]
618
619 cmd = "%s grep lfs %s:.gitattributes | wc -l" % ( 832 cmd = "%s grep lfs %s:.gitattributes | wc -l" % (
620 ud.basecmd, refname) 833 ud.basecmd, ud.revision)
621 834
622 try: 835 try:
623 output = runfetchcmd(cmd, d, quiet=True, workdir=wd) 836 output = runfetchcmd(cmd, d, quiet=True, workdir=wd)
@@ -627,12 +840,14 @@ class Git(FetchMethod):
627 pass 840 pass
628 return False 841 return False
629 842
630 def _find_git_lfs(self, d): 843 def _ensure_git_lfs(self, d, ud):
631 """ 844 """
632 Return True if git-lfs can be found, False otherwise. 845 Ensures that git-lfs is available, raising a FetchError if it isn't.
633 """ 846 """
634 import shutil 847 if shutil.which("git-lfs", path=d.getVar('PATH')) is None:
635 return shutil.which("git-lfs", path=d.getVar('PATH')) is not None 848 raise bb.fetch2.FetchError(
849 "Repository %s has LFS content, install git-lfs on host to download (or set lfs=0 "
850 "to ignore it)" % self._get_repo_url(ud))
636 851
637 def _get_repo_url(self, ud): 852 def _get_repo_url(self, ud):
638 """ 853 """
@@ -640,22 +855,21 @@ class Git(FetchMethod):
640 """ 855 """
641 # Note that we do not support passwords directly in the git urls. There are several 856 # Note that we do not support passwords directly in the git urls. There are several
642 # reasons. SRC_URI can be written out to things like buildhistory and people don't 857 # reasons. SRC_URI can be written out to things like buildhistory and people don't
643 # want to leak passwords like that. Its also all too easy to share metadata without 858 # want to leak passwords like that. Its also all too easy to share metadata without
644 # removing the password. ssh keys, ~/.netrc and ~/.ssh/config files can be used as 859 # removing the password. ssh keys, ~/.netrc and ~/.ssh/config files can be used as
645 # alternatives so we will not take patches adding password support here. 860 # alternatives so we will not take patches adding password support here.
646 if ud.user: 861 if ud.user:
647 username = ud.user + '@' 862 username = ud.user + '@'
648 else: 863 else:
649 username = "" 864 username = ""
650 return "%s://%s%s%s" % (ud.proto, username, ud.host, ud.path) 865 return "%s://%s%s%s" % (ud.proto, username, ud.host, urllib.parse.quote(ud.path))
651 866
652 def _revision_key(self, ud, d, name): 867 def _revision_key(self, ud, d, name):
653 """ 868 """
654 Return a unique key for the url 869 Return a unique key for the url
655 """ 870 """
656 # Collapse adjacent slashes 871 # Collapse adjacent slashes
657 slash_re = re.compile(r"/+") 872 return "git:" + ud.host + slash_re.sub(".", ud.path) + ud.unresolvedrev
658 return "git:" + ud.host + slash_re.sub(".", ud.path) + ud.unresolvedrev[name]
659 873
660 def _lsremote(self, ud, d, search): 874 def _lsremote(self, ud, d, search):
661 """ 875 """
@@ -687,21 +901,27 @@ class Git(FetchMethod):
687 """ 901 """
688 Compute the HEAD revision for the url 902 Compute the HEAD revision for the url
689 """ 903 """
904 if not d.getVar("__BBSRCREV_SEEN"):
905 raise bb.fetch2.FetchError("Recipe uses a floating tag/branch '%s' for repo '%s' without a fixed SRCREV yet doesn't call bb.fetch2.get_srcrev() (use SRCPV in PV for OE)." % (ud.unresolvedrev, ud.host+ud.path))
906
907 # Ensure we mark as not cached
908 bb.fetch2.mark_recipe_nocache(d)
909
690 output = self._lsremote(ud, d, "") 910 output = self._lsremote(ud, d, "")
691 # Tags of the form ^{} may not work, need to fallback to other form 911 # Tags of the form ^{} may not work, need to fallback to other form
692 if ud.unresolvedrev[name][:5] == "refs/" or ud.usehead: 912 if ud.unresolvedrev[:5] == "refs/" or ud.usehead:
693 head = ud.unresolvedrev[name] 913 head = ud.unresolvedrev
694 tag = ud.unresolvedrev[name] 914 tag = ud.unresolvedrev
695 else: 915 else:
696 head = "refs/heads/%s" % ud.unresolvedrev[name] 916 head = "refs/heads/%s" % ud.unresolvedrev
697 tag = "refs/tags/%s" % ud.unresolvedrev[name] 917 tag = "refs/tags/%s" % ud.unresolvedrev
698 for s in [head, tag + "^{}", tag]: 918 for s in [head, tag + "^{}", tag]:
699 for l in output.strip().split('\n'): 919 for l in output.strip().split('\n'):
700 sha1, ref = l.split() 920 sha1, ref = l.split()
701 if s == ref: 921 if s == ref:
702 return sha1 922 return sha1
703 raise bb.fetch2.FetchError("Unable to resolve '%s' in upstream git repository in git ls-remote output for %s" % \ 923 raise bb.fetch2.FetchError("Unable to resolve '%s' in upstream git repository in git ls-remote output for %s" % \
704 (ud.unresolvedrev[name], ud.host+ud.path)) 924 (ud.unresolvedrev, ud.host+ud.path))
705 925
706 def latest_versionstring(self, ud, d): 926 def latest_versionstring(self, ud, d):
707 """ 927 """
@@ -711,60 +931,63 @@ class Git(FetchMethod):
711 """ 931 """
712 pupver = ('', '') 932 pupver = ('', '')
713 933
714 tagregex = re.compile(d.getVar('UPSTREAM_CHECK_GITTAGREGEX') or r"(?P<pver>([0-9][\.|_]?)+)")
715 try: 934 try:
716 output = self._lsremote(ud, d, "refs/tags/*") 935 output = self._lsremote(ud, d, "refs/tags/*")
717 except (bb.fetch2.FetchError, bb.fetch2.NetworkAccess) as e: 936 except (bb.fetch2.FetchError, bb.fetch2.NetworkAccess) as e:
718 bb.note("Could not list remote: %s" % str(e)) 937 bb.note("Could not list remote: %s" % str(e))
719 return pupver 938 return pupver
720 939
940 rev_tag_re = re.compile(r"([0-9a-f]{40})\s+refs/tags/(.*)")
941 pver_re = re.compile(d.getVar('UPSTREAM_CHECK_GITTAGREGEX') or r"(?P<pver>([0-9][\.|_]?)+)")
942 nonrel_re = re.compile(r"(alpha|beta|rc|final)+")
943
721 verstring = "" 944 verstring = ""
722 revision = ""
723 for line in output.split("\n"): 945 for line in output.split("\n"):
724 if not line: 946 if not line:
725 break 947 break
726 948
727 tag_head = line.split("/")[-1] 949 m = rev_tag_re.match(line)
950 if not m:
951 continue
952
953 (revision, tag) = m.groups()
954
728 # Ignore non-released branches 955 # Ignore non-released branches
729 m = re.search(r"(alpha|beta|rc|final)+", tag_head) 956 if nonrel_re.search(tag):
730 if m:
731 continue 957 continue
732 958
733 # search for version in the line 959 # search for version in the line
734 tag = tagregex.search(tag_head) 960 m = pver_re.search(tag)
735 if tag is None: 961 if not m:
736 continue 962 continue
737 963
738 tag = tag.group('pver') 964 pver = m.group('pver').replace("_", ".")
739 tag = tag.replace("_", ".")
740 965
741 if verstring and bb.utils.vercmp(("0", tag, ""), ("0", verstring, "")) < 0: 966 if verstring and bb.utils.vercmp(("0", pver, ""), ("0", verstring, "")) < 0:
742 continue 967 continue
743 968
744 verstring = tag 969 verstring = pver
745 revision = line.split()[0]
746 pupver = (verstring, revision) 970 pupver = (verstring, revision)
747 971
748 return pupver 972 return pupver
749 973
750 def _build_revision(self, ud, d, name): 974 def _build_revision(self, ud, d, name):
751 return ud.revisions[name] 975 return ud.revision
752 976
753 def gitpkgv_revision(self, ud, d, name): 977 def gitpkgv_revision(self, ud, d, name):
754 """ 978 """
755 Return a sortable revision number by counting commits in the history 979 Return a sortable revision number by counting commits in the history
756 Based on gitpkgv.bblass in meta-openembedded 980 Based on gitpkgv.bblass in meta-openembedded
757 """ 981 """
758 rev = self._build_revision(ud, d, name) 982 rev = ud.revision
759 localpath = ud.localpath 983 localpath = ud.localpath
760 rev_file = os.path.join(localpath, "oe-gitpkgv_" + rev) 984 rev_file = os.path.join(localpath, "oe-gitpkgv_" + rev)
761 if not os.path.exists(localpath): 985 if not os.path.exists(localpath):
762 commits = None 986 commits = None
763 else: 987 else:
764 if not os.path.exists(rev_file) or not os.path.getsize(rev_file): 988 if not os.path.exists(rev_file) or not os.path.getsize(rev_file):
765 from pipes import quote
766 commits = bb.fetch2.runfetchcmd( 989 commits = bb.fetch2.runfetchcmd(
767 "git rev-list %s -- | wc -l" % quote(rev), 990 "git rev-list %s -- | wc -l" % shlex.quote(rev),
768 d, quiet=True).strip().lstrip('0') 991 d, quiet=True).strip().lstrip('0')
769 if commits: 992 if commits:
770 open(rev_file, "w").write("%d\n" % int(commits)) 993 open(rev_file, "w").write("%d\n" % int(commits))
diff --git a/bitbake/lib/bb/fetch2/gitsm.py b/bitbake/lib/bb/fetch2/gitsm.py
index a4527bf364..5869e1b99b 100644
--- a/bitbake/lib/bb/fetch2/gitsm.py
+++ b/bitbake/lib/bb/fetch2/gitsm.py
@@ -62,36 +62,35 @@ class GitSM(Git):
62 return modules 62 return modules
63 63
64 # Collect the defined submodules, and their attributes 64 # Collect the defined submodules, and their attributes
65 for name in ud.names: 65 try:
66 gitmodules = runfetchcmd("%s show %s:.gitmodules" % (ud.basecmd, ud.revision), d, quiet=True, workdir=workdir)
67 except:
68 # No submodules to update
69 gitmodules = ""
70
71 for m, md in parse_gitmodules(gitmodules).items():
66 try: 72 try:
67 gitmodules = runfetchcmd("%s show %s:.gitmodules" % (ud.basecmd, ud.revisions[name]), d, quiet=True, workdir=workdir) 73 module_hash = runfetchcmd("%s ls-tree -z -d %s %s" % (ud.basecmd, ud.revision, md['path']), d, quiet=True, workdir=workdir)
68 except: 74 except:
69 # No submodules to update 75 # If the command fails, we don't have a valid file to check. If it doesn't
76 # fail -- it still might be a failure, see next check...
77 module_hash = ""
78
79 if not module_hash:
80 logger.debug("submodule %s is defined, but is not initialized in the repository. Skipping", m)
70 continue 81 continue
71 82
72 for m, md in parse_gitmodules(gitmodules).items(): 83 submodules.append(m)
73 try: 84 paths[m] = md['path']
74 module_hash = runfetchcmd("%s ls-tree -z -d %s %s" % (ud.basecmd, ud.revisions[name], md['path']), d, quiet=True, workdir=workdir) 85 revision[m] = ud.revision
75 except: 86 uris[m] = md['url']
76 # If the command fails, we don't have a valid file to check. If it doesn't 87 subrevision[m] = module_hash.split()[2]
77 # fail -- it still might be a failure, see next check... 88
78 module_hash = "" 89 # Convert relative to absolute uri based on parent uri
79 90 if uris[m].startswith('..') or uris[m].startswith('./'):
80 if not module_hash: 91 newud = copy.copy(ud)
81 logger.debug("submodule %s is defined, but is not initialized in the repository. Skipping", m) 92 newud.path = os.path.normpath(os.path.join(newud.path, uris[m]))
82 continue 93 uris[m] = Git._get_repo_url(self, newud)
83
84 submodules.append(m)
85 paths[m] = md['path']
86 revision[m] = ud.revisions[name]
87 uris[m] = md['url']
88 subrevision[m] = module_hash.split()[2]
89
90 # Convert relative to absolute uri based on parent uri
91 if uris[m].startswith('..'):
92 newud = copy.copy(ud)
93 newud.path = os.path.realpath(os.path.join(newud.path, uris[m]))
94 uris[m] = Git._get_repo_url(self, newud)
95 94
96 for module in submodules: 95 for module in submodules:
97 # Translate the module url into a SRC_URI 96 # Translate the module url into a SRC_URI
@@ -115,10 +114,21 @@ class GitSM(Git):
115 # This has to be a file reference 114 # This has to be a file reference
116 proto = "file" 115 proto = "file"
117 url = "gitsm://" + uris[module] 116 url = "gitsm://" + uris[module]
117 if url.endswith("{}{}".format(ud.host, ud.path)):
118 raise bb.fetch2.FetchError("Submodule refers to the parent repository. This will cause deadlock situation in current version of Bitbake." \
119 "Consider using git fetcher instead.")
118 120
119 url += ';protocol=%s' % proto 121 url += ';protocol=%s' % proto
120 url += ";name=%s" % module 122 url += ";name=%s" % module
121 url += ";subpath=%s" % module 123 url += ";subpath=%s" % module
124 url += ";nobranch=1"
125 url += ";lfs=%s" % ("1" if self._need_lfs(ud) else "0")
126 # Note that adding "user=" here to give credentials to the
127 # submodule is not supported. Since using SRC_URI to give git://
128 # URL a password is not supported, one have to use one of the
129 # recommended way (eg. ~/.netrc or SSH config) which does specify
130 # the user (See comment in git.py).
131 # So, we will not take patches adding "user=" support here.
122 132
123 ld = d.createCopy() 133 ld = d.createCopy()
124 # Not necessary to set SRC_URI, since we're passing the URI to 134 # Not necessary to set SRC_URI, since we're passing the URI to
@@ -136,20 +146,26 @@ class GitSM(Git):
136 146
137 return submodules != [] 147 return submodules != []
138 148
149 def call_process_submodules(self, ud, d, extra_check, subfunc):
150 # If we're using a shallow mirror tarball it needs to be
151 # unpacked temporarily so that we can examine the .gitmodules file
152 # Unpack even when ud.clonedir is not available,
153 # which may occur during a fast shallow clone
154 unpack = extra_check or not os.path.exists(ud.clonedir)
155 if ud.shallow and os.path.exists(ud.fullshallow) and unpack:
156 tmpdir = tempfile.mkdtemp(dir=d.getVar("DL_DIR"))
157 try:
158 runfetchcmd("tar -xzf %s" % ud.fullshallow, d, workdir=tmpdir)
159 self.process_submodules(ud, tmpdir, subfunc, d)
160 finally:
161 shutil.rmtree(tmpdir)
162 else:
163 self.process_submodules(ud, ud.clonedir, subfunc, d)
164
139 def need_update(self, ud, d): 165 def need_update(self, ud, d):
140 if Git.need_update(self, ud, d): 166 if Git.need_update(self, ud, d):
141 return True 167 return True
142 168
143 try:
144 # Check for the nugget dropped by the download operation
145 known_srcrevs = runfetchcmd("%s config --get-all bitbake.srcrev" % \
146 (ud.basecmd), d, workdir=ud.clonedir)
147
148 if ud.revisions[ud.names[0]] in known_srcrevs.split():
149 return False
150 except bb.fetch2.FetchError:
151 pass
152
153 need_update_list = [] 169 need_update_list = []
154 def need_update_submodule(ud, url, module, modpath, workdir, d): 170 def need_update_submodule(ud, url, module, modpath, workdir, d):
155 url += ";bareclone=1;nobranch=1" 171 url += ";bareclone=1;nobranch=1"
@@ -163,22 +179,9 @@ class GitSM(Git):
163 logger.error('gitsm: submodule update check failed: %s %s' % (type(e).__name__, str(e))) 179 logger.error('gitsm: submodule update check failed: %s %s' % (type(e).__name__, str(e)))
164 need_update_result = True 180 need_update_result = True
165 181
166 # If we're using a shallow mirror tarball it needs to be unpacked 182 self.call_process_submodules(ud, d, not os.path.exists(ud.clonedir), need_update_submodule)
167 # temporarily so that we can examine the .gitmodules file 183
168 if ud.shallow and os.path.exists(ud.fullshallow) and not os.path.exists(ud.clonedir): 184 if need_update_list:
169 tmpdir = tempfile.mkdtemp(dir=d.getVar("DL_DIR"))
170 runfetchcmd("tar -xzf %s" % ud.fullshallow, d, workdir=tmpdir)
171 self.process_submodules(ud, tmpdir, need_update_submodule, d)
172 shutil.rmtree(tmpdir)
173 else:
174 self.process_submodules(ud, ud.clonedir, need_update_submodule, d)
175 if len(need_update_list) == 0:
176 # We already have the required commits of all submodules. Drop
177 # a nugget so we don't need to check again.
178 runfetchcmd("%s config --add bitbake.srcrev %s" % \
179 (ud.basecmd, ud.revisions[ud.names[0]]), d, workdir=ud.clonedir)
180
181 if len(need_update_list) > 0:
182 logger.debug('gitsm: Submodules requiring update: %s' % (' '.join(need_update_list))) 185 logger.debug('gitsm: Submodules requiring update: %s' % (' '.join(need_update_list)))
183 return True 186 return True
184 187
@@ -199,19 +202,7 @@ class GitSM(Git):
199 raise 202 raise
200 203
201 Git.download(self, ud, d) 204 Git.download(self, ud, d)
202 205 self.call_process_submodules(ud, d, self.need_update(ud, d), download_submodule)
203 # If we're using a shallow mirror tarball it needs to be unpacked
204 # temporarily so that we can examine the .gitmodules file
205 if ud.shallow and os.path.exists(ud.fullshallow) and self.need_update(ud, d):
206 tmpdir = tempfile.mkdtemp(dir=d.getVar("DL_DIR"))
207 runfetchcmd("tar -xzf %s" % ud.fullshallow, d, workdir=tmpdir)
208 self.process_submodules(ud, tmpdir, download_submodule, d)
209 shutil.rmtree(tmpdir)
210 else:
211 self.process_submodules(ud, ud.clonedir, download_submodule, d)
212 # Drop a nugget for the srcrev we've fetched (used by need_update)
213 runfetchcmd("%s config --add bitbake.srcrev %s" % \
214 (ud.basecmd, ud.revisions[ud.names[0]]), d, workdir=ud.clonedir)
215 206
216 def unpack(self, ud, destdir, d): 207 def unpack(self, ud, destdir, d):
217 def unpack_submodules(ud, url, module, modpath, workdir, d): 208 def unpack_submodules(ud, url, module, modpath, workdir, d):
@@ -225,6 +216,10 @@ class GitSM(Git):
225 216
226 try: 217 try:
227 newfetch = Fetch([url], d, cache=False) 218 newfetch = Fetch([url], d, cache=False)
219 # modpath is needed by unpack tracer to calculate submodule
220 # checkout dir
221 new_ud = newfetch.ud[url]
222 new_ud.modpath = modpath
228 newfetch.unpack(root=os.path.dirname(os.path.join(repo_conf, 'modules', module))) 223 newfetch.unpack(root=os.path.dirname(os.path.join(repo_conf, 'modules', module)))
229 except Exception as e: 224 except Exception as e:
230 logger.error('gitsm: submodule unpack failed: %s %s' % (type(e).__name__, str(e))) 225 logger.error('gitsm: submodule unpack failed: %s %s' % (type(e).__name__, str(e)))
@@ -250,13 +245,27 @@ class GitSM(Git):
250 ret = self.process_submodules(ud, ud.destdir, unpack_submodules, d) 245 ret = self.process_submodules(ud, ud.destdir, unpack_submodules, d)
251 246
252 if not ud.bareclone and ret: 247 if not ud.bareclone and ret:
253 # All submodules should already be downloaded and configured in the tree. This simply sets 248 cmdprefix = ""
254 # up the configuration and checks out the files. The main project config should remain 249 # Avoid LFS smudging (replacing the LFS pointers with the actual content) when LFS shouldn't be used but git-lfs is installed.
255 # unmodified, and no download from the internet should occur. 250 if not self._need_lfs(ud):
256 runfetchcmd("%s submodule update --recursive --no-fetch" % (ud.basecmd), d, quiet=True, workdir=ud.destdir) 251 cmdprefix = "GIT_LFS_SKIP_SMUDGE=1 "
252 runfetchcmd("%s%s submodule update --recursive --no-fetch" % (cmdprefix, ud.basecmd), d, quiet=True, workdir=ud.destdir)
253 def clean(self, ud, d):
254 def clean_submodule(ud, url, module, modpath, workdir, d):
255 url += ";bareclone=1;nobranch=1"
256 try:
257 newfetch = Fetch([url], d, cache=False)
258 newfetch.clean()
259 except Exception as e:
260 logger.warning('gitsm: submodule clean failed: %s %s' % (type(e).__name__, str(e)))
261
262 self.call_process_submodules(ud, d, True, clean_submodule)
263
264 # Clean top git dir
265 Git.clean(self, ud, d)
257 266
258 def implicit_urldata(self, ud, d): 267 def implicit_urldata(self, ud, d):
259 import shutil, subprocess, tempfile 268 import subprocess
260 269
261 urldata = [] 270 urldata = []
262 def add_submodule(ud, url, module, modpath, workdir, d): 271 def add_submodule(ud, url, module, modpath, workdir, d):
@@ -264,14 +273,6 @@ class GitSM(Git):
264 newfetch = Fetch([url], d, cache=False) 273 newfetch = Fetch([url], d, cache=False)
265 urldata.extend(newfetch.expanded_urldata()) 274 urldata.extend(newfetch.expanded_urldata())
266 275
267 # If we're using a shallow mirror tarball it needs to be unpacked 276 self.call_process_submodules(ud, d, ud.method.need_update(ud, d), add_submodule)
268 # temporarily so that we can examine the .gitmodules file
269 if ud.shallow and os.path.exists(ud.fullshallow) and ud.method.need_update(ud, d):
270 tmpdir = tempfile.mkdtemp(dir=d.getVar("DL_DIR"))
271 subprocess.check_call("tar -xzf %s" % ud.fullshallow, cwd=tmpdir, shell=True)
272 self.process_submodules(ud, tmpdir, add_submodule, d)
273 shutil.rmtree(tmpdir)
274 else:
275 self.process_submodules(ud, ud.clonedir, add_submodule, d)
276 277
277 return urldata 278 return urldata
diff --git a/bitbake/lib/bb/fetch2/gomod.py b/bitbake/lib/bb/fetch2/gomod.py
new file mode 100644
index 0000000000..53c1d8d115
--- /dev/null
+++ b/bitbake/lib/bb/fetch2/gomod.py
@@ -0,0 +1,273 @@
1"""
2BitBake 'Fetch' implementation for Go modules
3
4The gomod/gomodgit fetchers are used to download Go modules to the module cache
5from a module proxy or directly from a version control repository.
6
7Example SRC_URI:
8
9SRC_URI += "gomod://golang.org/x/net;version=v0.9.0;sha256sum=..."
10SRC_URI += "gomodgit://golang.org/x/net;version=v0.9.0;repo=go.googlesource.com/net;srcrev=..."
11
12Required SRC_URI parameters:
13
14- version
15 The version of the module.
16
17Optional SRC_URI parameters:
18
19- mod
20 Fetch and unpack the go.mod file only instead of the complete module.
21 The go command may need to download go.mod files for many different modules
22 when computing the build list, and go.mod files are much smaller than
23 module zip files.
24 The default is "0", set mod=1 for the go.mod file only.
25
26- sha256sum
27 The checksum of the module zip file, or the go.mod file in case of fetching
28 only the go.mod file. Alternatively, set the SRC_URI varible flag for
29 "module@version.sha256sum".
30
31- protocol
32 The method used when fetching directly from a version control repository.
33 The default is "https" for git.
34
35- repo
36 The URL when fetching directly from a version control repository. Required
37 when the URL is different from the module path.
38
39- srcrev
40 The revision identifier used when fetching directly from a version control
41 repository. Alternatively, set the SRCREV varible for "module@version".
42
43- subdir
44 The module subdirectory when fetching directly from a version control
45 repository. Required when the module is not located in the root of the
46 repository.
47
48Related variables:
49
50- GO_MOD_PROXY
51 The module proxy used by the fetcher.
52
53- GO_MOD_CACHE_DIR
54 The directory where the module cache is located.
55 This must match the exported GOMODCACHE variable for the go command to find
56 the downloaded modules.
57
58See the Go modules reference, https://go.dev/ref/mod, for more information
59about the module cache, module proxies and version control systems.
60"""
61
62import hashlib
63import os
64import re
65import shutil
66import subprocess
67import zipfile
68
69import bb
70from bb.fetch2 import FetchError
71from bb.fetch2 import MissingParameterError
72from bb.fetch2 import runfetchcmd
73from bb.fetch2 import subprocess_setup
74from bb.fetch2.git import Git
75from bb.fetch2.wget import Wget
76
77
78def escape(path):
79 """Escape capital letters using exclamation points."""
80 return re.sub(r'([A-Z])', lambda m: '!' + m.group(1).lower(), path)
81
82
83class GoMod(Wget):
84 """Class to fetch Go modules from a Go module proxy via wget"""
85
86 def supports(self, ud, d):
87 """Check to see if a given URL is for this fetcher."""
88 return ud.type == 'gomod'
89
90 def urldata_init(self, ud, d):
91 """Set up to download the module from the module proxy.
92
93 Set up to download the module zip file to the module cache directory
94 and unpack the go.mod file (unless downloading only the go.mod file):
95
96 cache/download/<module>/@v/<version>.zip: The module zip file.
97 cache/download/<module>/@v/<version>.mod: The go.mod file.
98 """
99
100 proxy = d.getVar('GO_MOD_PROXY') or 'proxy.golang.org'
101 moddir = d.getVar('GO_MOD_CACHE_DIR') or 'pkg/mod'
102
103 if 'version' not in ud.parm:
104 raise MissingParameterError('version', ud.url)
105
106 module = ud.host
107 if ud.path != '/':
108 module += ud.path
109 ud.parm['module'] = module
110 version = ud.parm['version']
111
112 # Set URL and filename for wget download
113 if ud.parm.get('mod', '0') == '1':
114 ext = '.mod'
115 else:
116 ext = '.zip'
117 path = escape(f"{module}/@v/{version}{ext}")
118 ud.url = bb.fetch2.encodeurl(
119 ('https', proxy, '/' + path, None, None, None))
120 ud.parm['downloadfilename'] = f"{module.replace('/', '.')}@{version}{ext}"
121
122 # Set name for checksum verification
123 ud.parm['name'] = f"{module}@{version}"
124
125 # Set path for unpack
126 ud.parm['unpackpath'] = os.path.join(moddir, 'cache/download', path)
127
128 super().urldata_init(ud, d)
129
130 def unpack(self, ud, rootdir, d):
131 """Unpack the module in the module cache."""
132
133 # Unpack the module zip file or go.mod file
134 unpackpath = os.path.join(rootdir, ud.parm['unpackpath'])
135 unpackdir = os.path.dirname(unpackpath)
136 bb.utils.mkdirhier(unpackdir)
137 ud.unpack_tracer.unpack("file-copy", unpackdir)
138 cmd = f"cp {ud.localpath} {unpackpath}"
139 path = d.getVar('PATH')
140 if path:
141 cmd = f"PATH={path} {cmd}"
142 name = os.path.basename(unpackpath)
143 bb.note(f"Unpacking {name} to {unpackdir}/")
144 subprocess.check_call(cmd, shell=True, preexec_fn=subprocess_setup)
145
146 if name.endswith('.zip'):
147 # Unpack the go.mod file from the zip file
148 module = ud.parm['module']
149 name = name.rsplit('.', 1)[0] + '.mod'
150 bb.note(f"Unpacking {name} to {unpackdir}/")
151 with zipfile.ZipFile(ud.localpath) as zf:
152 with open(os.path.join(unpackdir, name), mode='wb') as mf:
153 try:
154 f = module + '@' + ud.parm['version'] + '/go.mod'
155 shutil.copyfileobj(zf.open(f), mf)
156 except KeyError:
157 # If the module does not have a go.mod file, synthesize
158 # one containing only a module statement.
159 mf.write(f'module {module}\n'.encode())
160
161
162class GoModGit(Git):
163 """Class to fetch Go modules directly from a git repository"""
164
165 def supports(self, ud, d):
166 """Check to see if a given URL is for this fetcher."""
167 return ud.type == 'gomodgit'
168
169 def urldata_init(self, ud, d):
170 """Set up to download the module from the git repository.
171
172 Set up to download the git repository to the module cache directory and
173 unpack the module zip file and the go.mod file:
174
175 cache/vcs/<hash>: The bare git repository.
176 cache/download/<module>/@v/<version>.zip: The module zip file.
177 cache/download/<module>/@v/<version>.mod: The go.mod file.
178 """
179
180 moddir = d.getVar('GO_MOD_CACHE_DIR') or 'pkg/mod'
181
182 if 'version' not in ud.parm:
183 raise MissingParameterError('version', ud.url)
184
185 module = ud.host
186 if ud.path != '/':
187 module += ud.path
188 ud.parm['module'] = module
189
190 # Set host, path and srcrev for git download
191 if 'repo' in ud.parm:
192 repo = ud.parm['repo']
193 idx = repo.find('/')
194 if idx != -1:
195 ud.host = repo[:idx]
196 ud.path = repo[idx:]
197 else:
198 ud.host = repo
199 ud.path = ''
200 if 'protocol' not in ud.parm:
201 ud.parm['protocol'] = 'https'
202 ud.name = f"{module}@{ud.parm['version']}"
203 srcrev = d.getVar('SRCREV_' + ud.name)
204 if srcrev:
205 if 'srcrev' not in ud.parm:
206 ud.parm['srcrev'] = srcrev
207 else:
208 if 'srcrev' in ud.parm:
209 d.setVar('SRCREV_' + ud.name, ud.parm['srcrev'])
210 if 'branch' not in ud.parm:
211 ud.parm['nobranch'] = '1'
212
213 # Set subpath, subdir and bareclone for git unpack
214 if 'subdir' in ud.parm:
215 ud.parm['subpath'] = ud.parm['subdir']
216 key = f"git3:{ud.parm['protocol']}://{ud.host}{ud.path}".encode()
217 ud.parm['key'] = key
218 ud.parm['subdir'] = os.path.join(moddir, 'cache/vcs',
219 hashlib.sha256(key).hexdigest())
220 ud.parm['bareclone'] = '1'
221
222 super().urldata_init(ud, d)
223
224 def unpack(self, ud, rootdir, d):
225 """Unpack the module in the module cache."""
226
227 # Unpack the bare git repository
228 super().unpack(ud, rootdir, d)
229
230 moddir = d.getVar('GO_MOD_CACHE_DIR') or 'pkg/mod'
231
232 # Create the info file
233 module = ud.parm['module']
234 repodir = os.path.join(rootdir, ud.parm['subdir'])
235 with open(repodir + '.info', 'wb') as f:
236 f.write(ud.parm['key'])
237
238 # Unpack the go.mod file from the repository
239 unpackdir = os.path.join(rootdir, moddir, 'cache/download',
240 escape(module), '@v')
241 bb.utils.mkdirhier(unpackdir)
242 srcrev = ud.parm['srcrev']
243 version = ud.parm['version']
244 escaped_version = escape(version)
245 cmd = f"git ls-tree -r --name-only '{srcrev}'"
246 if 'subpath' in ud.parm:
247 cmd += f" '{ud.parm['subpath']}'"
248 files = runfetchcmd(cmd, d, workdir=repodir).split()
249 name = escaped_version + '.mod'
250 bb.note(f"Unpacking {name} to {unpackdir}/")
251 with open(os.path.join(unpackdir, name), mode='wb') as mf:
252 f = 'go.mod'
253 if 'subpath' in ud.parm:
254 f = os.path.join(ud.parm['subpath'], f)
255 if f in files:
256 cmd = ['git', 'cat-file', 'blob', srcrev + ':' + f]
257 subprocess.check_call(cmd, stdout=mf, cwd=repodir,
258 preexec_fn=subprocess_setup)
259 else:
260 # If the module does not have a go.mod file, synthesize one
261 # containing only a module statement.
262 mf.write(f'module {module}\n'.encode())
263
264 # Synthesize the module zip file from the repository
265 name = escaped_version + '.zip'
266 bb.note(f"Unpacking {name} to {unpackdir}/")
267 with zipfile.ZipFile(os.path.join(unpackdir, name), mode='w') as zf:
268 prefix = module + '@' + version + '/'
269 for f in files:
270 cmd = ['git', 'cat-file', 'blob', srcrev + ':' + f]
271 data = subprocess.check_output(cmd, cwd=repodir,
272 preexec_fn=subprocess_setup)
273 zf.writestr(prefix + f, data)
diff --git a/bitbake/lib/bb/fetch2/hg.py b/bitbake/lib/bb/fetch2/hg.py
index 063e13008a..cbff8c490c 100644
--- a/bitbake/lib/bb/fetch2/hg.py
+++ b/bitbake/lib/bb/fetch2/hg.py
@@ -242,6 +242,7 @@ class Hg(FetchMethod):
242 revflag = "-r %s" % ud.revision 242 revflag = "-r %s" % ud.revision
243 subdir = ud.parm.get("destsuffix", ud.module) 243 subdir = ud.parm.get("destsuffix", ud.module)
244 codir = "%s/%s" % (destdir, subdir) 244 codir = "%s/%s" % (destdir, subdir)
245 ud.unpack_tracer.unpack("hg", codir)
245 246
246 scmdata = ud.parm.get("scmdata", "") 247 scmdata = ud.parm.get("scmdata", "")
247 if scmdata != "nokeep": 248 if scmdata != "nokeep":
diff --git a/bitbake/lib/bb/fetch2/local.py b/bitbake/lib/bb/fetch2/local.py
index e7d1c8c58f..fda56a564e 100644
--- a/bitbake/lib/bb/fetch2/local.py
+++ b/bitbake/lib/bb/fetch2/local.py
@@ -29,11 +29,10 @@ class Local(FetchMethod):
29 29
30 def urldata_init(self, ud, d): 30 def urldata_init(self, ud, d):
31 # We don't set localfile as for this fetcher the file is already local! 31 # We don't set localfile as for this fetcher the file is already local!
32 ud.decodedurl = urllib.parse.unquote(ud.url.split("://")[1].split(";")[0]) 32 ud.basename = os.path.basename(ud.path)
33 ud.basename = os.path.basename(ud.decodedurl) 33 ud.basepath = ud.path
34 ud.basepath = ud.decodedurl
35 ud.needdonestamp = False 34 ud.needdonestamp = False
36 if "*" in ud.decodedurl: 35 if "*" in ud.path:
37 raise bb.fetch2.ParameterError("file:// urls using globbing are no longer supported. Please place the files in a directory and reference that instead.", ud.url) 36 raise bb.fetch2.ParameterError("file:// urls using globbing are no longer supported. Please place the files in a directory and reference that instead.", ud.url)
38 return 37 return
39 38
@@ -41,28 +40,24 @@ class Local(FetchMethod):
41 """ 40 """
42 Return the local filename of a given url assuming a successful fetch. 41 Return the local filename of a given url assuming a successful fetch.
43 """ 42 """
44 return self.localpaths(urldata, d)[-1] 43 return self.localfile_searchpaths(urldata, d)[-1]
45 44
46 def localpaths(self, urldata, d): 45 def localfile_searchpaths(self, urldata, d):
47 """ 46 """
48 Return the local filename of a given url assuming a successful fetch. 47 Return the local filename of a given url assuming a successful fetch.
49 """ 48 """
50 searched = [] 49 searched = []
51 path = urldata.decodedurl 50 path = urldata.path
52 newpath = path 51 newpath = path
53 if path[0] == "/": 52 if path[0] == "/":
53 logger.debug2("Using absolute %s" % (path))
54 return [path] 54 return [path]
55 filespath = d.getVar('FILESPATH') 55 filespath = d.getVar('FILESPATH')
56 if filespath: 56 if filespath:
57 logger.debug2("Searching for %s in paths:\n %s" % (path, "\n ".join(filespath.split(":")))) 57 logger.debug2("Searching for %s in paths:\n %s" % (path, "\n ".join(filespath.split(":"))))
58 newpath, hist = bb.utils.which(filespath, path, history=True) 58 newpath, hist = bb.utils.which(filespath, path, history=True)
59 logger.debug2("Using %s for %s" % (newpath, path))
59 searched.extend(hist) 60 searched.extend(hist)
60 if not os.path.exists(newpath):
61 dldirfile = os.path.join(d.getVar("DL_DIR"), path)
62 logger.debug2("Defaulting to %s for %s" % (dldirfile, path))
63 bb.utils.mkdirhier(os.path.dirname(dldirfile))
64 searched.append(dldirfile)
65 return searched
66 return searched 61 return searched
67 62
68 def need_update(self, ud, d): 63 def need_update(self, ud, d):
@@ -78,9 +73,7 @@ class Local(FetchMethod):
78 filespath = d.getVar('FILESPATH') 73 filespath = d.getVar('FILESPATH')
79 if filespath: 74 if filespath:
80 locations = filespath.split(":") 75 locations = filespath.split(":")
81 locations.append(d.getVar("DL_DIR")) 76 msg = "Unable to find file " + urldata.url + " anywhere to download to " + urldata.localpath + ". The paths that were searched were:\n " + "\n ".join(locations)
82
83 msg = "Unable to find file " + urldata.url + " anywhere. The paths that were searched were:\n " + "\n ".join(locations)
84 raise FetchError(msg) 77 raise FetchError(msg)
85 78
86 return True 79 return True
diff --git a/bitbake/lib/bb/fetch2/npm.py b/bitbake/lib/bb/fetch2/npm.py
index 47898509ff..e469d66768 100644
--- a/bitbake/lib/bb/fetch2/npm.py
+++ b/bitbake/lib/bb/fetch2/npm.py
@@ -42,19 +42,27 @@ from bb.utils import is_semver
42 42
43def npm_package(package): 43def npm_package(package):
44 """Convert the npm package name to remove unsupported character""" 44 """Convert the npm package name to remove unsupported character"""
45 # Scoped package names (with the @) use the same naming convention 45 # For scoped package names ('@user/package') the '/' is replaced by a '-'.
46 # as the 'npm pack' command. 46 # This is similar to what 'npm pack' does, but 'npm pack' also strips the
47 if package.startswith("@"): 47 # leading '@', which can lead to ambiguous package names.
48 return re.sub("/", "-", package[1:]) 48 name = re.sub("/", "-", package)
49 return package 49 name = name.lower()
50 name = re.sub(r"[^\-a-z0-9@]", "", name)
51 name = name.strip("-")
52 return name
53
50 54
51def npm_filename(package, version): 55def npm_filename(package, version):
52 """Get the filename of a npm package""" 56 """Get the filename of a npm package"""
53 return npm_package(package) + "-" + version + ".tgz" 57 return npm_package(package) + "-" + version + ".tgz"
54 58
55def npm_localfile(package, version): 59def npm_localfile(package, version=None):
56 """Get the local filename of a npm package""" 60 """Get the local filename of a npm package"""
57 return os.path.join("npm2", npm_filename(package, version)) 61 if version is not None:
62 filename = npm_filename(package, version)
63 else:
64 filename = package
65 return os.path.join("npm2", filename)
58 66
59def npm_integrity(integrity): 67def npm_integrity(integrity):
60 """ 68 """
@@ -69,41 +77,67 @@ def npm_unpack(tarball, destdir, d):
69 bb.utils.mkdirhier(destdir) 77 bb.utils.mkdirhier(destdir)
70 cmd = "tar --extract --gzip --file=%s" % shlex.quote(tarball) 78 cmd = "tar --extract --gzip --file=%s" % shlex.quote(tarball)
71 cmd += " --no-same-owner" 79 cmd += " --no-same-owner"
80 cmd += " --delay-directory-restore"
72 cmd += " --strip-components=1" 81 cmd += " --strip-components=1"
73 runfetchcmd(cmd, d, workdir=destdir) 82 runfetchcmd(cmd, d, workdir=destdir)
83 runfetchcmd("chmod -R +X '%s'" % (destdir), d, quiet=True, workdir=destdir)
74 84
75class NpmEnvironment(object): 85class NpmEnvironment(object):
76 """ 86 """
77 Using a npm config file seems more reliable than using cli arguments. 87 Using a npm config file seems more reliable than using cli arguments.
78 This class allows to create a controlled environment for npm commands. 88 This class allows to create a controlled environment for npm commands.
79 """ 89 """
80 def __init__(self, d, configs=None): 90 def __init__(self, d, configs=[], npmrc=None):
81 self.d = d 91 self.d = d
82 self.configs = configs 92
93 self.user_config = tempfile.NamedTemporaryFile(mode="w", buffering=1)
94
95 hn = self._home_npmrc(d)
96 if hn is not None:
97 with open(hn, 'r') as hnf:
98 self.user_config.write(hnf.read())
99
100 for key, value in configs:
101 self.user_config.write("%s=%s\n" % (key, value))
102
103 if npmrc:
104 self.global_config_name = npmrc
105 else:
106 self.global_config_name = "/dev/null"
107
108 def __del__(self):
109 if self.user_config:
110 self.user_config.close()
111
112 def _home_npmrc(self, d):
113 """Function to return user's HOME .npmrc file (or None if it doesn't exist)"""
114 home_npmrc_file = os.path.join(os.environ.get("HOME"), ".npmrc")
115 if d.getVar("BB_USE_HOME_NPMRC") == "1" and os.path.exists(home_npmrc_file):
116 bb.warn(f"BB_USE_HOME_NPMRC flag set and valid .npmrc detected - "\
117 f"npm fetcher will use {home_npmrc_file}")
118 return home_npmrc_file
119 return None
83 120
84 def run(self, cmd, args=None, configs=None, workdir=None): 121 def run(self, cmd, args=None, configs=None, workdir=None):
85 """Run npm command in a controlled environment""" 122 """Run npm command in a controlled environment"""
86 with tempfile.TemporaryDirectory() as tmpdir: 123 with tempfile.TemporaryDirectory() as tmpdir:
87 d = bb.data.createCopy(self.d) 124 d = bb.data.createCopy(self.d)
125 d.setVar("PATH", d.getVar("PATH")) # PATH might contain $HOME - evaluate it before patching
88 d.setVar("HOME", tmpdir) 126 d.setVar("HOME", tmpdir)
89 127
90 cfgfile = os.path.join(tmpdir, "npmrc")
91
92 if not workdir: 128 if not workdir:
93 workdir = tmpdir 129 workdir = tmpdir
94 130
95 def _run(cmd): 131 def _run(cmd):
96 cmd = "NPM_CONFIG_USERCONFIG=%s " % cfgfile + cmd 132 cmd = "NPM_CONFIG_USERCONFIG=%s " % (self.user_config.name) + cmd
97 cmd = "NPM_CONFIG_GLOBALCONFIG=%s " % cfgfile + cmd 133 cmd = "NPM_CONFIG_GLOBALCONFIG=%s " % (self.global_config_name) + cmd
98 return runfetchcmd(cmd, d, workdir=workdir) 134 return runfetchcmd(cmd, d, workdir=workdir)
99 135
100 if self.configs:
101 for key, value in self.configs:
102 _run("npm config set %s %s" % (key, shlex.quote(value)))
103
104 if configs: 136 if configs:
137 bb.warn("Use of configs argument of NpmEnvironment.run() function"
138 " is deprecated. Please use args argument instead.")
105 for key, value in configs: 139 for key, value in configs:
106 _run("npm config set %s %s" % (key, shlex.quote(value))) 140 cmd += " --%s=%s" % (key, shlex.quote(value))
107 141
108 if args: 142 if args:
109 for key, value in args: 143 for key, value in args:
@@ -142,12 +176,12 @@ class Npm(FetchMethod):
142 raise ParameterError("Invalid 'version' parameter", ud.url) 176 raise ParameterError("Invalid 'version' parameter", ud.url)
143 177
144 # Extract the 'registry' part of the url 178 # Extract the 'registry' part of the url
145 ud.registry = re.sub(r"^npm://", "http://", ud.url.split(";")[0]) 179 ud.registry = re.sub(r"^npm://", "https://", ud.url.split(";")[0])
146 180
147 # Using the 'downloadfilename' parameter as local filename 181 # Using the 'downloadfilename' parameter as local filename
148 # or the npm package name. 182 # or the npm package name.
149 if "downloadfilename" in ud.parm: 183 if "downloadfilename" in ud.parm:
150 ud.localfile = d.expand(ud.parm["downloadfilename"]) 184 ud.localfile = npm_localfile(ud.parm["downloadfilename"])
151 else: 185 else:
152 ud.localfile = npm_localfile(ud.package, ud.version) 186 ud.localfile = npm_localfile(ud.package, ud.version)
153 187
@@ -165,14 +199,14 @@ class Npm(FetchMethod):
165 199
166 def _resolve_proxy_url(self, ud, d): 200 def _resolve_proxy_url(self, ud, d):
167 def _npm_view(): 201 def _npm_view():
168 configs = [] 202 args = []
169 configs.append(("json", "true")) 203 args.append(("json", "true"))
170 configs.append(("registry", ud.registry)) 204 args.append(("registry", ud.registry))
171 pkgver = shlex.quote(ud.package + "@" + ud.version) 205 pkgver = shlex.quote(ud.package + "@" + ud.version)
172 cmd = ud.basecmd + " view %s" % pkgver 206 cmd = ud.basecmd + " view %s" % pkgver
173 env = NpmEnvironment(d) 207 env = NpmEnvironment(d)
174 check_network_access(d, cmd, ud.registry) 208 check_network_access(d, cmd, ud.registry)
175 view_string = env.run(cmd, configs=configs) 209 view_string = env.run(cmd, args=args)
176 210
177 if not view_string: 211 if not view_string:
178 raise FetchError("Unavailable package %s" % pkgver, ud.url) 212 raise FetchError("Unavailable package %s" % pkgver, ud.url)
@@ -280,6 +314,7 @@ class Npm(FetchMethod):
280 destsuffix = ud.parm.get("destsuffix", "npm") 314 destsuffix = ud.parm.get("destsuffix", "npm")
281 destdir = os.path.join(rootdir, destsuffix) 315 destdir = os.path.join(rootdir, destsuffix)
282 npm_unpack(ud.localpath, destdir, d) 316 npm_unpack(ud.localpath, destdir, d)
317 ud.unpack_tracer.unpack("npm", destdir)
283 318
284 def clean(self, ud, d): 319 def clean(self, ud, d):
285 """Clean any existing full or partial download""" 320 """Clean any existing full or partial download"""
diff --git a/bitbake/lib/bb/fetch2/npmsw.py b/bitbake/lib/bb/fetch2/npmsw.py
index 0c3511d8ab..2f9599ee9e 100644
--- a/bitbake/lib/bb/fetch2/npmsw.py
+++ b/bitbake/lib/bb/fetch2/npmsw.py
@@ -24,34 +24,39 @@ import bb
24from bb.fetch2 import Fetch 24from bb.fetch2 import Fetch
25from bb.fetch2 import FetchMethod 25from bb.fetch2 import FetchMethod
26from bb.fetch2 import ParameterError 26from bb.fetch2 import ParameterError
27from bb.fetch2 import runfetchcmd
27from bb.fetch2 import URI 28from bb.fetch2 import URI
28from bb.fetch2.npm import npm_integrity 29from bb.fetch2.npm import npm_integrity
29from bb.fetch2.npm import npm_localfile 30from bb.fetch2.npm import npm_localfile
30from bb.fetch2.npm import npm_unpack 31from bb.fetch2.npm import npm_unpack
31from bb.utils import is_semver 32from bb.utils import is_semver
33from bb.utils import lockfile
34from bb.utils import unlockfile
32 35
33def foreach_dependencies(shrinkwrap, callback=None, dev=False): 36def foreach_dependencies(shrinkwrap, callback=None, dev=False):
34 """ 37 """
35 Run a callback for each dependencies of a shrinkwrap file. 38 Run a callback for each dependencies of a shrinkwrap file.
36 The callback is using the format: 39 The callback is using the format:
37 callback(name, params, deptree) 40 callback(name, data, location)
38 with: 41 with:
39 name = the package name (string) 42 name = the package name (string)
40 params = the package parameters (dictionary) 43 data = the package data (dictionary)
41 deptree = the package dependency tree (array of strings) 44 location = the location of the package (string)
42 """ 45 """
43 def _walk_deps(deps, deptree): 46 packages = shrinkwrap.get("packages")
44 for name in deps: 47 if not packages:
45 subtree = [*deptree, name] 48 raise FetchError("Invalid shrinkwrap file format")
46 _walk_deps(deps[name].get("dependencies", {}), subtree) 49
47 if callback is not None: 50 for location, data in packages.items():
48 if deps[name].get("dev", False) and not dev: 51 # Skip empty main and local link target packages
49 continue 52 if not location.startswith('node_modules/'):
50 elif deps[name].get("bundled", False): 53 continue
51 continue 54 elif not dev and data.get("dev", False):
52 callback(name, deps[name], subtree) 55 continue
53 56 elif data.get("inBundle", False):
54 _walk_deps(shrinkwrap.get("dependencies", {}), []) 57 continue
58 name = location.split('node_modules/')[-1]
59 callback(name, data, location)
55 60
56class NpmShrinkWrap(FetchMethod): 61class NpmShrinkWrap(FetchMethod):
57 """Class to fetch all package from a shrinkwrap file""" 62 """Class to fetch all package from a shrinkwrap file"""
@@ -72,19 +77,28 @@ class NpmShrinkWrap(FetchMethod):
72 # Resolve the dependencies 77 # Resolve the dependencies
73 ud.deps = [] 78 ud.deps = []
74 79
75 def _resolve_dependency(name, params, deptree): 80 def _resolve_dependency(name, params, destsuffix):
76 url = None 81 url = None
77 localpath = None 82 localpath = None
78 extrapaths = [] 83 extrapaths = []
79 destsubdirs = [os.path.join("node_modules", dep) for dep in deptree] 84 unpack = True
80 destsuffix = os.path.join(*destsubdirs)
81 85
82 integrity = params.get("integrity", None) 86 integrity = params.get("integrity")
83 resolved = params.get("resolved", None) 87 resolved = params.get("resolved")
84 version = params.get("version", None) 88 version = params.get("version")
89 link = params.get("link", False)
90
91 # Handle link sources
92 if link:
93 localpath = resolved
94 unpack = False
85 95
86 # Handle registry sources 96 # Handle registry sources
87 if is_semver(version) and resolved and integrity: 97 elif version and is_semver(version) and integrity:
98 # Handle duplicate dependencies without url
99 if not resolved:
100 return
101
88 localfile = npm_localfile(name, version) 102 localfile = npm_localfile(name, version)
89 103
90 uri = URI(resolved) 104 uri = URI(resolved)
@@ -108,10 +122,10 @@ class NpmShrinkWrap(FetchMethod):
108 extrapaths.append(resolvefile) 122 extrapaths.append(resolvefile)
109 123
110 # Handle http tarball sources 124 # Handle http tarball sources
111 elif version.startswith("http") and integrity: 125 elif resolved.startswith("http") and integrity:
112 localfile = os.path.join("npm2", os.path.basename(version)) 126 localfile = npm_localfile(os.path.basename(resolved))
113 127
114 uri = URI(version) 128 uri = URI(resolved)
115 uri.params["downloadfilename"] = localfile 129 uri.params["downloadfilename"] = localfile
116 130
117 checksum_name, checksum_expected = npm_integrity(integrity) 131 checksum_name, checksum_expected = npm_integrity(integrity)
@@ -121,8 +135,12 @@ class NpmShrinkWrap(FetchMethod):
121 135
122 localpath = os.path.join(d.getVar("DL_DIR"), localfile) 136 localpath = os.path.join(d.getVar("DL_DIR"), localfile)
123 137
138 # Handle local tarball sources
139 elif resolved.startswith("file"):
140 localpath = resolved[5:]
141
124 # Handle git sources 142 # Handle git sources
125 elif version.startswith("git"): 143 elif resolved.startswith("git"):
126 regex = re.compile(r""" 144 regex = re.compile(r"""
127 ^ 145 ^
128 git\+ 146 git\+
@@ -134,29 +152,31 @@ class NpmShrinkWrap(FetchMethod):
134 $ 152 $
135 """, re.VERBOSE) 153 """, re.VERBOSE)
136 154
137 match = regex.match(version) 155 match = regex.match(resolved)
138
139 if not match: 156 if not match:
140 raise ParameterError("Invalid git url: %s" % version, ud.url) 157 raise ParameterError("Invalid git url: %s" % resolved, ud.url)
141 158
142 groups = match.groupdict() 159 groups = match.groupdict()
143 160
144 uri = URI("git://" + str(groups["url"])) 161 uri = URI("git://" + str(groups["url"]))
145 uri.params["protocol"] = str(groups["protocol"]) 162 uri.params["protocol"] = str(groups["protocol"])
146 uri.params["rev"] = str(groups["rev"]) 163 uri.params["rev"] = str(groups["rev"])
164 uri.params["nobranch"] = "1"
147 uri.params["destsuffix"] = destsuffix 165 uri.params["destsuffix"] = destsuffix
148 166
149 url = str(uri) 167 url = str(uri)
150 168
151 # local tarball sources and local link sources are unsupported
152 else: 169 else:
153 raise ParameterError("Unsupported dependency: %s" % name, ud.url) 170 raise ParameterError("Unsupported dependency: %s" % name, ud.url)
154 171
172 # name is needed by unpack tracer for module mapping
155 ud.deps.append({ 173 ud.deps.append({
174 "name": name,
156 "url": url, 175 "url": url,
157 "localpath": localpath, 176 "localpath": localpath,
158 "extrapaths": extrapaths, 177 "extrapaths": extrapaths,
159 "destsuffix": destsuffix, 178 "destsuffix": destsuffix,
179 "unpack": unpack,
160 }) 180 })
161 181
162 try: 182 try:
@@ -177,17 +197,23 @@ class NpmShrinkWrap(FetchMethod):
177 # This fetcher resolves multiple URIs from a shrinkwrap file and then 197 # This fetcher resolves multiple URIs from a shrinkwrap file and then
178 # forwards it to a proxy fetcher. The management of the donestamp file, 198 # forwards it to a proxy fetcher. The management of the donestamp file,
179 # the lockfile and the checksums are forwarded to the proxy fetcher. 199 # the lockfile and the checksums are forwarded to the proxy fetcher.
180 ud.proxy = Fetch([dep["url"] for dep in ud.deps], data) 200 shrinkwrap_urls = [dep["url"] for dep in ud.deps if dep["url"]]
201 if shrinkwrap_urls:
202 ud.proxy = Fetch(shrinkwrap_urls, data)
181 ud.needdonestamp = False 203 ud.needdonestamp = False
182 204
183 @staticmethod 205 @staticmethod
184 def _foreach_proxy_method(ud, handle): 206 def _foreach_proxy_method(ud, handle):
185 returns = [] 207 returns = []
186 for proxy_url in ud.proxy.urls: 208 #Check if there are dependencies before try to fetch them
187 proxy_ud = ud.proxy.ud[proxy_url] 209 if len(ud.deps) > 0:
188 proxy_d = ud.proxy.d 210 for proxy_url in ud.proxy.urls:
189 proxy_ud.setup_localpath(proxy_d) 211 proxy_ud = ud.proxy.ud[proxy_url]
190 returns.append(handle(proxy_ud.method, proxy_ud, proxy_d)) 212 proxy_d = ud.proxy.d
213 proxy_ud.setup_localpath(proxy_d)
214 lf = lockfile(proxy_ud.lockfile)
215 returns.append(handle(proxy_ud.method, proxy_ud, proxy_d))
216 unlockfile(lf)
191 return returns 217 return returns
192 218
193 def verify_donestamp(self, ud, d): 219 def verify_donestamp(self, ud, d):
@@ -220,10 +246,11 @@ class NpmShrinkWrap(FetchMethod):
220 246
221 def unpack(self, ud, rootdir, d): 247 def unpack(self, ud, rootdir, d):
222 """Unpack the downloaded dependencies""" 248 """Unpack the downloaded dependencies"""
223 destdir = d.getVar("S") 249 destdir = rootdir
224 destsuffix = ud.parm.get("destsuffix") 250 destsuffix = ud.parm.get("destsuffix")
225 if destsuffix: 251 if destsuffix:
226 destdir = os.path.join(rootdir, destsuffix) 252 destdir = os.path.join(rootdir, destsuffix)
253 ud.unpack_tracer.unpack("npm-shrinkwrap", destdir)
227 254
228 bb.utils.mkdirhier(destdir) 255 bb.utils.mkdirhier(destdir)
229 bb.utils.copyfile(ud.shrinkwrap_file, 256 bb.utils.copyfile(ud.shrinkwrap_file,
@@ -237,7 +264,16 @@ class NpmShrinkWrap(FetchMethod):
237 264
238 for dep in manual: 265 for dep in manual:
239 depdestdir = os.path.join(destdir, dep["destsuffix"]) 266 depdestdir = os.path.join(destdir, dep["destsuffix"])
240 npm_unpack(dep["localpath"], depdestdir, d) 267 if dep["url"]:
268 npm_unpack(dep["localpath"], depdestdir, d)
269 else:
270 depsrcdir= os.path.join(destdir, dep["localpath"])
271 if dep["unpack"]:
272 npm_unpack(depsrcdir, depdestdir, d)
273 else:
274 bb.utils.mkdirhier(depdestdir)
275 cmd = 'cp -fpPRH "%s/." .' % (depsrcdir)
276 runfetchcmd(cmd, d, workdir=depdestdir)
241 277
242 def clean(self, ud, d): 278 def clean(self, ud, d):
243 """Clean any existing full or partial download""" 279 """Clean any existing full or partial download"""
diff --git a/bitbake/lib/bb/fetch2/osc.py b/bitbake/lib/bb/fetch2/osc.py
index d9ce44390c..495ac8a30a 100644
--- a/bitbake/lib/bb/fetch2/osc.py
+++ b/bitbake/lib/bb/fetch2/osc.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright BitBake Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4""" 6"""
@@ -9,6 +11,7 @@ Based on the svn "Fetch" implementation.
9 11
10import logging 12import logging
11import os 13import os
14import re
12import bb 15import bb
13from bb.fetch2 import FetchMethod 16from bb.fetch2 import FetchMethod
14from bb.fetch2 import FetchError 17from bb.fetch2 import FetchError
@@ -36,6 +39,7 @@ class Osc(FetchMethod):
36 # Create paths to osc checkouts 39 # Create paths to osc checkouts
37 oscdir = d.getVar("OSCDIR") or (d.getVar("DL_DIR") + "/osc") 40 oscdir = d.getVar("OSCDIR") or (d.getVar("DL_DIR") + "/osc")
38 relpath = self._strip_leading_slashes(ud.path) 41 relpath = self._strip_leading_slashes(ud.path)
42 ud.oscdir = oscdir
39 ud.pkgdir = os.path.join(oscdir, ud.host) 43 ud.pkgdir = os.path.join(oscdir, ud.host)
40 ud.moddir = os.path.join(ud.pkgdir, relpath, ud.module) 44 ud.moddir = os.path.join(ud.pkgdir, relpath, ud.module)
41 45
@@ -43,13 +47,13 @@ class Osc(FetchMethod):
43 ud.revision = ud.parm['rev'] 47 ud.revision = ud.parm['rev']
44 else: 48 else:
45 pv = d.getVar("PV", False) 49 pv = d.getVar("PV", False)
46 rev = bb.fetch2.srcrev_internal_helper(ud, d) 50 rev = bb.fetch2.srcrev_internal_helper(ud, d, '')
47 if rev: 51 if rev:
48 ud.revision = rev 52 ud.revision = rev
49 else: 53 else:
50 ud.revision = "" 54 ud.revision = ""
51 55
52 ud.localfile = d.expand('%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.path.replace('/', '.'), ud.revision)) 56 ud.localfile = d.expand('%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), relpath.replace('/', '.'), ud.revision))
53 57
54 def _buildosccommand(self, ud, d, command): 58 def _buildosccommand(self, ud, d, command):
55 """ 59 """
@@ -59,26 +63,49 @@ class Osc(FetchMethod):
59 63
60 basecmd = d.getVar("FETCHCMD_osc") or "/usr/bin/env osc" 64 basecmd = d.getVar("FETCHCMD_osc") or "/usr/bin/env osc"
61 65
62 proto = ud.parm.get('protocol', 'ocs') 66 proto = ud.parm.get('protocol', 'https')
63 67
64 options = [] 68 options = []
65 69
66 config = "-c %s" % self.generate_config(ud, d) 70 config = "-c %s" % self.generate_config(ud, d)
67 71
68 if ud.revision: 72 if getattr(ud, 'revision', ''):
69 options.append("-r %s" % ud.revision) 73 options.append("-r %s" % ud.revision)
70 74
71 coroot = self._strip_leading_slashes(ud.path) 75 coroot = self._strip_leading_slashes(ud.path)
72 76
73 if command == "fetch": 77 if command == "fetch":
74 osccmd = "%s %s co %s/%s %s" % (basecmd, config, coroot, ud.module, " ".join(options)) 78 osccmd = "%s %s -A %s://%s co %s/%s %s" % (basecmd, config, proto, ud.host, coroot, ud.module, " ".join(options))
75 elif command == "update": 79 elif command == "update":
76 osccmd = "%s %s up %s" % (basecmd, config, " ".join(options)) 80 osccmd = "%s %s -A %s://%s up %s" % (basecmd, config, proto, ud.host, " ".join(options))
81 elif command == "api_source":
82 osccmd = "%s %s -A %s://%s api source/%s/%s" % (basecmd, config, proto, ud.host, coroot, ud.module)
77 else: 83 else:
78 raise FetchError("Invalid osc command %s" % command, ud.url) 84 raise FetchError("Invalid osc command %s" % command, ud.url)
79 85
80 return osccmd 86 return osccmd
81 87
88 def _latest_revision(self, ud, d, name):
89 """
90 Fetch latest revision for the given package
91 """
92 api_source_cmd = self._buildosccommand(ud, d, "api_source")
93
94 output = runfetchcmd(api_source_cmd, d)
95 match = re.match(r'<directory ?.* rev="(\d+)".*>', output)
96 if match is None:
97 raise FetchError("Unable to parse osc response", ud.url)
98 return match.groups()[0]
99
100 def _revision_key(self, ud, d, name):
101 """
102 Return a unique key for the url
103 """
104 # Collapse adjacent slashes
105 slash_re = re.compile(r"/+")
106 rev = getattr(ud, 'revision', "latest")
107 return "osc:%s%s.%s.%s" % (ud.host, slash_re.sub(".", ud.path), name, rev)
108
82 def download(self, ud, d): 109 def download(self, ud, d):
83 """ 110 """
84 Fetch url 111 Fetch url
@@ -86,7 +113,7 @@ class Osc(FetchMethod):
86 113
87 logger.debug2("Fetch: checking for module directory '" + ud.moddir + "'") 114 logger.debug2("Fetch: checking for module directory '" + ud.moddir + "'")
88 115
89 if os.access(os.path.join(d.getVar('OSCDIR'), ud.path, ud.module), os.R_OK): 116 if os.access(ud.moddir, os.R_OK):
90 oscupdatecmd = self._buildosccommand(ud, d, "update") 117 oscupdatecmd = self._buildosccommand(ud, d, "update")
91 logger.info("Update "+ ud.url) 118 logger.info("Update "+ ud.url)
92 # update sources there 119 # update sources there
@@ -114,20 +141,23 @@ class Osc(FetchMethod):
114 Generate a .oscrc to be used for this run. 141 Generate a .oscrc to be used for this run.
115 """ 142 """
116 143
117 config_path = os.path.join(d.getVar('OSCDIR'), "oscrc") 144 config_path = os.path.join(ud.oscdir, "oscrc")
145 if not os.path.exists(ud.oscdir):
146 bb.utils.mkdirhier(ud.oscdir)
147
118 if (os.path.exists(config_path)): 148 if (os.path.exists(config_path)):
119 os.remove(config_path) 149 os.remove(config_path)
120 150
121 f = open(config_path, 'w') 151 f = open(config_path, 'w')
152 proto = ud.parm.get('protocol', 'https')
122 f.write("[general]\n") 153 f.write("[general]\n")
123 f.write("apisrv = %s\n" % ud.host) 154 f.write("apiurl = %s://%s\n" % (proto, ud.host))
124 f.write("scheme = http\n")
125 f.write("su-wrapper = su -c\n") 155 f.write("su-wrapper = su -c\n")
126 f.write("build-root = %s\n" % d.getVar('WORKDIR')) 156 f.write("build-root = %s\n" % d.getVar('WORKDIR'))
127 f.write("urllist = %s\n" % d.getVar("OSCURLLIST")) 157 f.write("urllist = %s\n" % d.getVar("OSCURLLIST"))
128 f.write("extra-pkgs = gzip\n") 158 f.write("extra-pkgs = gzip\n")
129 f.write("\n") 159 f.write("\n")
130 f.write("[%s]\n" % ud.host) 160 f.write("[%s://%s]\n" % (proto, ud.host))
131 f.write("user = %s\n" % ud.parm["user"]) 161 f.write("user = %s\n" % ud.parm["user"])
132 f.write("pass = %s\n" % ud.parm["pswd"]) 162 f.write("pass = %s\n" % ud.parm["pswd"])
133 f.close() 163 f.close()
diff --git a/bitbake/lib/bb/fetch2/perforce.py b/bitbake/lib/bb/fetch2/perforce.py
index e2a41a4a12..3b6fa4b1ec 100644
--- a/bitbake/lib/bb/fetch2/perforce.py
+++ b/bitbake/lib/bb/fetch2/perforce.py
@@ -134,7 +134,7 @@ class Perforce(FetchMethod):
134 134
135 ud.setup_revisions(d) 135 ud.setup_revisions(d)
136 136
137 ud.localfile = d.expand('%s_%s_%s_%s.tar.gz' % (cleanedhost, cleanedpath, cleandedmodule, ud.revision)) 137 ud.localfile = d.expand('%s_%s_%s_%s.tar.gz' % (cleanedhost, cleanedpath, cleanedmodule, ud.revision))
138 138
139 def _buildp4command(self, ud, d, command, depot_filename=None): 139 def _buildp4command(self, ud, d, command, depot_filename=None):
140 """ 140 """
diff --git a/bitbake/lib/bb/fetch2/s3.py b/bitbake/lib/bb/fetch2/s3.py
index ffca73c8e4..22c0538139 100644
--- a/bitbake/lib/bb/fetch2/s3.py
+++ b/bitbake/lib/bb/fetch2/s3.py
@@ -18,10 +18,47 @@ The aws tool must be correctly installed and configured prior to use.
18import os 18import os
19import bb 19import bb
20import urllib.request, urllib.parse, urllib.error 20import urllib.request, urllib.parse, urllib.error
21import re
21from bb.fetch2 import FetchMethod 22from bb.fetch2 import FetchMethod
22from bb.fetch2 import FetchError 23from bb.fetch2 import FetchError
23from bb.fetch2 import runfetchcmd 24from bb.fetch2 import runfetchcmd
24 25
26def convertToBytes(value, unit):
27 value = float(value)
28 if (unit == "KiB"):
29 value = value*1024.0;
30 elif (unit == "MiB"):
31 value = value*1024.0*1024.0;
32 elif (unit == "GiB"):
33 value = value*1024.0*1024.0*1024.0;
34 return value
35
36class S3ProgressHandler(bb.progress.LineFilterProgressHandler):
37 """
38 Extract progress information from s3 cp output, e.g.:
39 Completed 5.1 KiB/8.8 GiB (12.0 MiB/s) with 1 file(s) remaining
40 """
41 def __init__(self, d):
42 super(S3ProgressHandler, self).__init__(d)
43 # Send an initial progress event so the bar gets shown
44 self._fire_progress(0)
45
46 def writeline(self, line):
47 percs = re.findall(r'^Completed (\d+.{0,1}\d*) (\w+)\/(\d+.{0,1}\d*) (\w+) (\(.+\)) with\s+', line)
48 if percs:
49 completed = (percs[-1][0])
50 completedUnit = (percs[-1][1])
51 total = (percs[-1][2])
52 totalUnit = (percs[-1][3])
53 completed = convertToBytes(completed, completedUnit)
54 total = convertToBytes(total, totalUnit)
55 progress = (completed/total)*100.0
56 rate = percs[-1][4]
57 self.update(progress, rate)
58 return False
59 return True
60
61
25class S3(FetchMethod): 62class S3(FetchMethod):
26 """Class to fetch urls via 'aws s3'""" 63 """Class to fetch urls via 'aws s3'"""
27 64
@@ -40,7 +77,7 @@ class S3(FetchMethod):
40 else: 77 else:
41 ud.basename = os.path.basename(ud.path) 78 ud.basename = os.path.basename(ud.path)
42 79
43 ud.localfile = d.expand(urllib.parse.unquote(ud.basename)) 80 ud.localfile = ud.basename
44 81
45 ud.basecmd = d.getVar("FETCHCMD_s3") or "/usr/bin/env aws s3" 82 ud.basecmd = d.getVar("FETCHCMD_s3") or "/usr/bin/env aws s3"
46 83
@@ -52,7 +89,9 @@ class S3(FetchMethod):
52 89
53 cmd = '%s cp s3://%s%s %s' % (ud.basecmd, ud.host, ud.path, ud.localpath) 90 cmd = '%s cp s3://%s%s %s' % (ud.basecmd, ud.host, ud.path, ud.localpath)
54 bb.fetch2.check_network_access(d, cmd, ud.url) 91 bb.fetch2.check_network_access(d, cmd, ud.url)
55 runfetchcmd(cmd, d) 92
93 progresshandler = S3ProgressHandler(d)
94 runfetchcmd(cmd, d, False, log=progresshandler)
56 95
57 # Additional sanity checks copied from the wget class (although there 96 # Additional sanity checks copied from the wget class (although there
58 # are no known issues which mean these are required, treat the aws cli 97 # are no known issues which mean these are required, treat the aws cli
diff --git a/bitbake/lib/bb/fetch2/sftp.py b/bitbake/lib/bb/fetch2/sftp.py
index f87f292e5d..bee71a0d0d 100644
--- a/bitbake/lib/bb/fetch2/sftp.py
+++ b/bitbake/lib/bb/fetch2/sftp.py
@@ -77,7 +77,7 @@ class SFTP(FetchMethod):
77 else: 77 else:
78 ud.basename = os.path.basename(ud.path) 78 ud.basename = os.path.basename(ud.path)
79 79
80 ud.localfile = d.expand(urllib.parse.unquote(ud.basename)) 80 ud.localfile = ud.basename
81 81
82 def download(self, ud, d): 82 def download(self, ud, d):
83 """Fetch urls""" 83 """Fetch urls"""
@@ -103,7 +103,7 @@ class SFTP(FetchMethod):
103 if path[:3] == '/~/': 103 if path[:3] == '/~/':
104 path = path[3:] 104 path = path[3:]
105 105
106 remote = '%s%s:%s' % (user, urlo.hostname, path) 106 remote = '"%s%s:%s"' % (user, urlo.hostname, path)
107 107
108 cmd = '%s %s %s %s' % (basecmd, port, remote, lpath) 108 cmd = '%s %s %s %s' % (basecmd, port, remote, lpath)
109 109
diff --git a/bitbake/lib/bb/fetch2/ssh.py b/bitbake/lib/bb/fetch2/ssh.py
index 2c8557e1f8..2a0f2cb44b 100644
--- a/bitbake/lib/bb/fetch2/ssh.py
+++ b/bitbake/lib/bb/fetch2/ssh.py
@@ -32,6 +32,7 @@ IETF secsh internet draft:
32 32
33import re, os 33import re, os
34from bb.fetch2 import check_network_access, FetchMethod, ParameterError, runfetchcmd 34from bb.fetch2 import check_network_access, FetchMethod, ParameterError, runfetchcmd
35import urllib
35 36
36 37
37__pattern__ = re.compile(r''' 38__pattern__ = re.compile(r'''
@@ -40,9 +41,9 @@ __pattern__ = re.compile(r'''
40 ( # Optional username/password block 41 ( # Optional username/password block
41 (?P<user>\S+) # username 42 (?P<user>\S+) # username
42 (:(?P<pass>\S+))? # colon followed by the password (optional) 43 (:(?P<pass>\S+))? # colon followed by the password (optional)
43 )?
44 (?P<cparam>(;[^;]+)*)? # connection parameters block (optional) 44 (?P<cparam>(;[^;]+)*)? # connection parameters block (optional)
45 @ 45 @
46 )?
46 (?P<host>\S+?) # non-greedy match of the host 47 (?P<host>\S+?) # non-greedy match of the host
47 (:(?P<port>[0-9]+))? # colon followed by the port (optional) 48 (:(?P<port>[0-9]+))? # colon followed by the port (optional)
48 / 49 /
@@ -70,9 +71,9 @@ class SSH(FetchMethod):
70 "git:// prefix with protocol=ssh", urldata.url) 71 "git:// prefix with protocol=ssh", urldata.url)
71 m = __pattern__.match(urldata.url) 72 m = __pattern__.match(urldata.url)
72 path = m.group('path') 73 path = m.group('path')
74 path = urllib.parse.unquote(path)
73 host = m.group('host') 75 host = m.group('host')
74 urldata.localpath = os.path.join(d.getVar('DL_DIR'), 76 urldata.localfile = os.path.basename(os.path.normpath(path))
75 os.path.basename(os.path.normpath(path)))
76 77
77 def download(self, urldata, d): 78 def download(self, urldata, d):
78 dldir = d.getVar('DL_DIR') 79 dldir = d.getVar('DL_DIR')
@@ -96,6 +97,11 @@ class SSH(FetchMethod):
96 fr += '@%s' % host 97 fr += '@%s' % host
97 else: 98 else:
98 fr = host 99 fr = host
100
101 if path[0] != '~':
102 path = '/%s' % path
103 path = urllib.parse.unquote(path)
104
99 fr += ':%s' % path 105 fr += ':%s' % path
100 106
101 cmd = 'scp -B -r %s %s %s/' % ( 107 cmd = 'scp -B -r %s %s %s/' % (
@@ -108,3 +114,41 @@ class SSH(FetchMethod):
108 114
109 runfetchcmd(cmd, d) 115 runfetchcmd(cmd, d)
110 116
117 def checkstatus(self, fetch, urldata, d):
118 """
119 Check the status of the url
120 """
121 m = __pattern__.match(urldata.url)
122 path = m.group('path')
123 host = m.group('host')
124 port = m.group('port')
125 user = m.group('user')
126 password = m.group('pass')
127
128 if port:
129 portarg = '-P %s' % port
130 else:
131 portarg = ''
132
133 if user:
134 fr = user
135 if password:
136 fr += ':%s' % password
137 fr += '@%s' % host
138 else:
139 fr = host
140
141 if path[0] != '~':
142 path = '/%s' % path
143 path = urllib.parse.unquote(path)
144
145 cmd = 'ssh -o BatchMode=true %s %s [ -f %s ]' % (
146 portarg,
147 fr,
148 path
149 )
150
151 check_network_access(d, cmd, urldata.url)
152 runfetchcmd(cmd, d)
153
154 return True
diff --git a/bitbake/lib/bb/fetch2/svn.py b/bitbake/lib/bb/fetch2/svn.py
index 8856ef1c62..0852108e7d 100644
--- a/bitbake/lib/bb/fetch2/svn.py
+++ b/bitbake/lib/bb/fetch2/svn.py
@@ -57,7 +57,12 @@ class Svn(FetchMethod):
57 if 'rev' in ud.parm: 57 if 'rev' in ud.parm:
58 ud.revision = ud.parm['rev'] 58 ud.revision = ud.parm['rev']
59 59
60 ud.localfile = d.expand('%s_%s_%s_%s_.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision)) 60 # Whether to use the @REV peg-revision syntax in the svn command or not
61 ud.pegrevision = True
62 if 'nopegrevision' in ud.parm:
63 ud.pegrevision = False
64
65 ud.localfile = d.expand('%s_%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision, ["0", "1"][ud.pegrevision]))
61 66
62 def _buildsvncommand(self, ud, d, command): 67 def _buildsvncommand(self, ud, d, command):
63 """ 68 """
@@ -86,7 +91,7 @@ class Svn(FetchMethod):
86 if command == "info": 91 if command == "info":
87 svncmd = "%s info %s %s://%s/%s/" % (ud.basecmd, " ".join(options), proto, svnroot, ud.module) 92 svncmd = "%s info %s %s://%s/%s/" % (ud.basecmd, " ".join(options), proto, svnroot, ud.module)
88 elif command == "log1": 93 elif command == "log1":
89 svncmd = "%s log --limit 1 %s %s://%s/%s/" % (ud.basecmd, " ".join(options), proto, svnroot, ud.module) 94 svncmd = "%s log --limit 1 --quiet %s %s://%s/%s/" % (ud.basecmd, " ".join(options), proto, svnroot, ud.module)
90 else: 95 else:
91 suffix = "" 96 suffix = ""
92 97
@@ -98,7 +103,8 @@ class Svn(FetchMethod):
98 103
99 if ud.revision: 104 if ud.revision:
100 options.append("-r %s" % ud.revision) 105 options.append("-r %s" % ud.revision)
101 suffix = "@%s" % (ud.revision) 106 if ud.pegrevision:
107 suffix = "@%s" % (ud.revision)
102 108
103 if command == "fetch": 109 if command == "fetch":
104 transportuser = ud.parm.get("transportuser", "") 110 transportuser = ud.parm.get("transportuser", "")
@@ -204,3 +210,6 @@ class Svn(FetchMethod):
204 210
205 def _build_revision(self, ud, d): 211 def _build_revision(self, ud, d):
206 return ud.revision 212 return ud.revision
213
214 def supports_checksum(self, urldata):
215 return False
diff --git a/bitbake/lib/bb/fetch2/wget.py b/bitbake/lib/bb/fetch2/wget.py
index 6d82f3af07..7e43d3bc97 100644
--- a/bitbake/lib/bb/fetch2/wget.py
+++ b/bitbake/lib/bb/fetch2/wget.py
@@ -26,7 +26,6 @@ from bb.fetch2 import FetchMethod
26from bb.fetch2 import FetchError 26from bb.fetch2 import FetchError
27from bb.fetch2 import logger 27from bb.fetch2 import logger
28from bb.fetch2 import runfetchcmd 28from bb.fetch2 import runfetchcmd
29from bb.utils import export_proxies
30from bs4 import BeautifulSoup 29from bs4 import BeautifulSoup
31from bs4 import SoupStrainer 30from bs4 import SoupStrainer
32 31
@@ -52,18 +51,19 @@ class WgetProgressHandler(bb.progress.LineFilterProgressHandler):
52 51
53 52
54class Wget(FetchMethod): 53class Wget(FetchMethod):
54 """Class to fetch urls via 'wget'"""
55 55
56 # CDNs like CloudFlare may do a 'browser integrity test' which can fail 56 def check_certs(self, d):
57 # with the standard wget/urllib User-Agent, so pretend to be a modern 57 """
58 # browser. 58 Should certificates be checked?
59 user_agent = "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:84.0) Gecko/20100101 Firefox/84.0" 59 """
60 return (d.getVar("BB_CHECK_SSL_CERTS") or "1") != "0"
60 61
61 """Class to fetch urls via 'wget'"""
62 def supports(self, ud, d): 62 def supports(self, ud, d):
63 """ 63 """
64 Check to see if a given url can be fetched with wget. 64 Check to see if a given url can be fetched with wget.
65 """ 65 """
66 return ud.type in ['http', 'https', 'ftp'] 66 return ud.type in ['http', 'https', 'ftp', 'ftps']
67 67
68 def recommends_checksum(self, urldata): 68 def recommends_checksum(self, urldata):
69 return True 69 return True
@@ -78,11 +78,17 @@ class Wget(FetchMethod):
78 else: 78 else:
79 ud.basename = os.path.basename(ud.path) 79 ud.basename = os.path.basename(ud.path)
80 80
81 ud.localfile = d.expand(urllib.parse.unquote(ud.basename)) 81 ud.localfile = ud.basename
82 if not ud.localfile: 82 if not ud.localfile:
83 ud.localfile = d.expand(urllib.parse.unquote(ud.host + ud.path).replace("/", ".")) 83 ud.localfile = ud.host + ud.path.replace("/", ".")
84 84
85 self.basecmd = d.getVar("FETCHCMD_wget") or "/usr/bin/env wget -t 2 -T 30 --passive-ftp --no-check-certificate" 85 self.basecmd = d.getVar("FETCHCMD_wget") or "/usr/bin/env wget --tries=2 --timeout=100"
86
87 if ud.type == 'ftp' or ud.type == 'ftps':
88 self.basecmd += " --passive-ftp"
89
90 if not self.check_certs(d):
91 self.basecmd += " --no-check-certificate"
86 92
87 def _runwget(self, ud, d, command, quiet, workdir=None): 93 def _runwget(self, ud, d, command, quiet, workdir=None):
88 94
@@ -90,39 +96,53 @@ class Wget(FetchMethod):
90 96
91 logger.debug2("Fetching %s using command '%s'" % (ud.url, command)) 97 logger.debug2("Fetching %s using command '%s'" % (ud.url, command))
92 bb.fetch2.check_network_access(d, command, ud.url) 98 bb.fetch2.check_network_access(d, command, ud.url)
93 runfetchcmd(command + ' --progress=dot -v', d, quiet, log=progresshandler, workdir=workdir) 99 runfetchcmd(command + ' --progress=dot --verbose', d, quiet, log=progresshandler, workdir=workdir)
94 100
95 def download(self, ud, d): 101 def download(self, ud, d):
96 """Fetch urls""" 102 """Fetch urls"""
97 103
98 fetchcmd = self.basecmd 104 fetchcmd = self.basecmd
99 105
100 if 'downloadfilename' in ud.parm: 106 dldir = os.path.realpath(d.getVar("DL_DIR"))
101 localpath = os.path.join(d.getVar("DL_DIR"), ud.localfile) 107 localpath = os.path.join(dldir, ud.localfile) + ".tmp"
102 bb.utils.mkdirhier(os.path.dirname(localpath)) 108 bb.utils.mkdirhier(os.path.dirname(localpath))
103 fetchcmd += " -O %s" % shlex.quote(localpath) 109 fetchcmd += " --output-document=%s" % shlex.quote(localpath)
104 110
105 if ud.user and ud.pswd: 111 if ud.user and ud.pswd:
106 fetchcmd += " --user=%s --password=%s --auth-no-challenge" % (ud.user, ud.pswd) 112 fetchcmd += " --auth-no-challenge"
113 if ud.parm.get("redirectauth", "1") == "1":
114 # An undocumented feature of wget is that if the
115 # username/password are specified on the URI, wget will only
116 # send the Authorization header to the first host and not to
117 # any hosts that it is redirected to. With the increasing
118 # usage of temporary AWS URLs, this difference now matters as
119 # AWS will reject any request that has authentication both in
120 # the query parameters (from the redirect) and in the
121 # Authorization header.
122 fetchcmd += " --user=%s --password=%s" % (ud.user, ud.pswd)
107 123
108 uri = ud.url.split(";")[0] 124 uri = ud.url.split(";")[0]
109 if os.path.exists(ud.localpath): 125 fetchcmd += " --continue --directory-prefix=%s '%s'" % (dldir, uri)
110 # file exists, but we didnt complete it.. trying again..
111 fetchcmd += d.expand(" -c -P ${DL_DIR} '%s'" % uri)
112 else:
113 fetchcmd += d.expand(" -P ${DL_DIR} '%s'" % uri)
114
115 self._runwget(ud, d, fetchcmd, False) 126 self._runwget(ud, d, fetchcmd, False)
116 127
117 # Sanity check since wget can pretend it succeed when it didn't 128 # Sanity check since wget can pretend it succeed when it didn't
118 # Also, this used to happen if sourceforge sent us to the mirror page 129 # Also, this used to happen if sourceforge sent us to the mirror page
119 if not os.path.exists(ud.localpath): 130 if not os.path.exists(localpath):
120 raise FetchError("The fetch command returned success for url %s but %s doesn't exist?!" % (uri, ud.localpath), uri) 131 raise FetchError("The fetch command returned success for url %s but %s doesn't exist?!" % (uri, localpath), uri)
121 132
122 if os.path.getsize(ud.localpath) == 0: 133 if os.path.getsize(localpath) == 0:
123 os.remove(ud.localpath) 134 os.remove(localpath)
124 raise FetchError("The fetch of %s resulted in a zero size file?! Deleting and failing since this isn't right." % (uri), uri) 135 raise FetchError("The fetch of %s resulted in a zero size file?! Deleting and failing since this isn't right." % (uri), uri)
125 136
137 # Try and verify any checksum now, meaning if it isn't correct, we don't remove the
138 # original file, which might be a race (imagine two recipes referencing the same
139 # source, one with an incorrect checksum)
140 bb.fetch2.verify_checksum(ud, d, localpath=localpath, fatal_nochecksum=False)
141
142 # Remove the ".tmp" and move the file into position atomically
143 # Our lock prevents multiple writers but mirroring code may grab incomplete files
144 os.rename(localpath, localpath[:-4])
145
126 return True 146 return True
127 147
128 def checkstatus(self, fetch, ud, d, try_again=True): 148 def checkstatus(self, fetch, ud, d, try_again=True):
@@ -209,12 +229,17 @@ class Wget(FetchMethod):
209 # We let the request fail and expect it to be 229 # We let the request fail and expect it to be
210 # tried once more ("try_again" in check_status()), 230 # tried once more ("try_again" in check_status()),
211 # with the dead connection removed from the cache. 231 # with the dead connection removed from the cache.
212 # If it still fails, we give up, which can happend for bad 232 # If it still fails, we give up, which can happen for bad
213 # HTTP proxy settings. 233 # HTTP proxy settings.
214 fetch.connection_cache.remove_connection(h.host, h.port) 234 fetch.connection_cache.remove_connection(h.host, h.port)
215 raise urllib.error.URLError(err) 235 raise urllib.error.URLError(err)
216 else: 236 else:
217 r = h.getresponse() 237 try:
238 r = h.getresponse()
239 except TimeoutError as e:
240 if fetch.connection_cache:
241 fetch.connection_cache.remove_connection(h.host, h.port)
242 raise TimeoutError(e)
218 243
219 # Pick apart the HTTPResponse object to get the addinfourl 244 # Pick apart the HTTPResponse object to get the addinfourl
220 # object initialized properly. 245 # object initialized properly.
@@ -275,71 +300,115 @@ class Wget(FetchMethod):
275 300
276 class FixedHTTPRedirectHandler(urllib.request.HTTPRedirectHandler): 301 class FixedHTTPRedirectHandler(urllib.request.HTTPRedirectHandler):
277 """ 302 """
278 urllib2.HTTPRedirectHandler resets the method to GET on redirect, 303 urllib2.HTTPRedirectHandler before 3.13 has two flaws:
279 when we want to follow redirects using the original method. 304
305 It resets the method to GET on redirect when we want to follow
306 redirects using the original method (typically HEAD). This was fixed
307 in 759e8e7.
308
309 It also doesn't handle 308 (Permanent Redirect). This was fixed in
310 c379bc5.
311
312 Until we depend on Python 3.13 onwards, copy the redirect_request
313 method to fix these issues.
280 """ 314 """
281 def redirect_request(self, req, fp, code, msg, headers, newurl): 315 def redirect_request(self, req, fp, code, msg, headers, newurl):
282 newreq = urllib.request.HTTPRedirectHandler.redirect_request(self, req, fp, code, msg, headers, newurl) 316 m = req.get_method()
283 newreq.get_method = req.get_method 317 if (not (code in (301, 302, 303, 307, 308) and m in ("GET", "HEAD")
284 return newreq 318 or code in (301, 302, 303) and m == "POST")):
285 exported_proxies = export_proxies(d) 319 raise urllib.HTTPError(req.full_url, code, msg, headers, fp)
286 320
287 handlers = [FixedHTTPRedirectHandler, HTTPMethodFallback] 321 # Strictly (according to RFC 2616), 301 or 302 in response to
288 if exported_proxies: 322 # a POST MUST NOT cause a redirection without confirmation
289 handlers.append(urllib.request.ProxyHandler()) 323 # from the user (of urllib.request, in this case). In practice,
290 handlers.append(CacheHTTPHandler()) 324 # essentially all clients do redirect in this case, so we do
291 # Since Python 2.7.9 ssl cert validation is enabled by default 325 # the same.
292 # see PEP-0476, this causes verification errors on some https servers 326
293 # so disable by default. 327 # Be conciliant with URIs containing a space. This is mainly
294 import ssl 328 # redundant with the more complete encoding done in http_error_302(),
295 if hasattr(ssl, '_create_unverified_context'): 329 # but it is kept for compatibility with other callers.
296 handlers.append(urllib.request.HTTPSHandler(context=ssl._create_unverified_context())) 330 newurl = newurl.replace(' ', '%20')
297 opener = urllib.request.build_opener(*handlers) 331
298 332 CONTENT_HEADERS = ("content-length", "content-type")
299 try: 333 newheaders = {k: v for k, v in req.headers.items()
300 uri = ud.url.split(";")[0] 334 if k.lower() not in CONTENT_HEADERS}
301 r = urllib.request.Request(uri) 335 return urllib.request.Request(newurl,
302 r.get_method = lambda: "HEAD" 336 method="HEAD" if m == "HEAD" else "GET",
303 # Some servers (FusionForge, as used on Alioth) require that the 337 headers=newheaders,
304 # optional Accept header is set. 338 origin_req_host=req.origin_req_host,
305 r.add_header("Accept", "*/*") 339 unverifiable=True)
306 r.add_header("User-Agent", self.user_agent) 340
307 def add_basic_auth(login_str, request): 341 http_error_308 = urllib.request.HTTPRedirectHandler.http_error_302
308 '''Adds Basic auth to http request, pass in login:password as string''' 342
309 import base64 343 # We need to update the environment here as both the proxy and HTTPS
310 encodeuser = base64.b64encode(login_str.encode('utf-8')).decode("utf-8") 344 # handlers need variables set. The proxy needs http_proxy and friends to
311 authheader = "Basic %s" % encodeuser 345 # be set, and HTTPSHandler ends up calling into openssl to load the
312 r.add_header("Authorization", authheader) 346 # certificates. In buildtools configurations this will be looking at the
313 347 # wrong place for certificates by default: we set SSL_CERT_FILE to the
314 if ud.user and ud.pswd: 348 # right location in the buildtools environment script but as BitBake
315 add_basic_auth(ud.user + ':' + ud.pswd, r) 349 # prunes prunes the environment this is lost. When binaries are executed
350 # runfetchcmd ensures these values are in the environment, but this is
351 # pure Python so we need to update the environment.
352 #
353 # Avoid tramping the environment too much by using bb.utils.environment
354 # to scope the changes to the build_opener request, which is when the
355 # environment lookups happen.
356 newenv = bb.fetch2.get_fetcher_environment(d)
357
358 with bb.utils.environment(**newenv):
359 import ssl
360
361 if self.check_certs(d):
362 context = ssl.create_default_context()
363 else:
364 context = ssl._create_unverified_context()
365
366 handlers = [FixedHTTPRedirectHandler,
367 HTTPMethodFallback,
368 urllib.request.ProxyHandler(),
369 CacheHTTPHandler(),
370 urllib.request.HTTPSHandler(context=context)]
371 opener = urllib.request.build_opener(*handlers)
316 372
317 try: 373 try:
318 import netrc 374 parts = urllib.parse.urlparse(ud.url.split(";")[0])
319 n = netrc.netrc() 375 uri = "{}://{}{}".format(parts.scheme, parts.netloc, parts.path)
320 login, unused, password = n.authenticators(urllib.parse.urlparse(uri).hostname) 376 r = urllib.request.Request(uri)
321 add_basic_auth("%s:%s" % (login, password), r) 377 r.get_method = lambda: "HEAD"
322 except (TypeError, ImportError, IOError, netrc.NetrcParseError): 378 # Some servers (FusionForge, as used on Alioth) require that the
323 pass 379 # optional Accept header is set.
324 380 r.add_header("Accept", "*/*")
325 with opener.open(r) as response: 381 r.add_header("User-Agent", "bitbake/{}".format(bb.__version__))
326 pass 382 def add_basic_auth(login_str, request):
327 except urllib.error.URLError as e: 383 '''Adds Basic auth to http request, pass in login:password as string'''
328 if try_again: 384 import base64
329 logger.debug2("checkstatus: trying again") 385 encodeuser = base64.b64encode(login_str.encode('utf-8')).decode("utf-8")
330 return self.checkstatus(fetch, ud, d, False) 386 authheader = "Basic %s" % encodeuser
331 else: 387 r.add_header("Authorization", authheader)
332 # debug for now to avoid spamming the logs in e.g. remote sstate searches 388
333 logger.debug2("checkstatus() urlopen failed: %s" % e) 389 if ud.user and ud.pswd:
334 return False 390 add_basic_auth(ud.user + ':' + ud.pswd, r)
335 except ConnectionResetError as e: 391
336 if try_again: 392 try:
337 logger.debug2("checkstatus: trying again") 393 import netrc
338 return self.checkstatus(fetch, ud, d, False) 394 auth_data = netrc.netrc().authenticators(urllib.parse.urlparse(uri).hostname)
339 else: 395 if auth_data:
340 # debug for now to avoid spamming the logs in e.g. remote sstate searches 396 login, _, password = auth_data
341 logger.debug2("checkstatus() urlopen failed: %s" % e) 397 add_basic_auth("%s:%s" % (login, password), r)
342 return False 398 except (FileNotFoundError, netrc.NetrcParseError):
399 pass
400
401 with opener.open(r, timeout=100) as response:
402 pass
403 except (urllib.error.URLError, ConnectionResetError, TimeoutError) as e:
404 if try_again:
405 logger.debug2("checkstatus: trying again")
406 return self.checkstatus(fetch, ud, d, False)
407 else:
408 # debug for now to avoid spamming the logs in e.g. remote sstate searches
409 logger.debug2("checkstatus() urlopen failed for %s: %s" % (uri,e))
410 return False
411
343 return True 412 return True
344 413
345 def _parse_path(self, regex, s): 414 def _parse_path(self, regex, s):
@@ -416,7 +485,7 @@ class Wget(FetchMethod):
416 f = tempfile.NamedTemporaryFile() 485 f = tempfile.NamedTemporaryFile()
417 with tempfile.TemporaryDirectory(prefix="wget-index-") as workdir, tempfile.NamedTemporaryFile(dir=workdir, prefix="wget-listing-") as f: 486 with tempfile.TemporaryDirectory(prefix="wget-index-") as workdir, tempfile.NamedTemporaryFile(dir=workdir, prefix="wget-listing-") as f:
418 fetchcmd = self.basecmd 487 fetchcmd = self.basecmd
419 fetchcmd += " -O " + f.name + " --user-agent='" + self.user_agent + "' '" + uri + "'" 488 fetchcmd += " --output-document=%s '%s'" % (f.name, uri)
420 try: 489 try:
421 self._runwget(ud, d, fetchcmd, True, workdir=workdir) 490 self._runwget(ud, d, fetchcmd, True, workdir=workdir)
422 fetchresult = f.read() 491 fetchresult = f.read()
@@ -472,7 +541,7 @@ class Wget(FetchMethod):
472 version_dir = ['', '', ''] 541 version_dir = ['', '', '']
473 version = ['', '', ''] 542 version = ['', '', '']
474 543
475 dirver_regex = re.compile(r"(?P<pfx>\D*)(?P<ver>(\d+[\.\-_])+(\d+))") 544 dirver_regex = re.compile(r"(?P<pfx>\D*)(?P<ver>(\d+[\.\-_])*(\d+))")
476 s = dirver_regex.search(dirver) 545 s = dirver_regex.search(dirver)
477 if s: 546 if s:
478 version_dir[1] = s.group('ver') 547 version_dir[1] = s.group('ver')
@@ -548,7 +617,7 @@ class Wget(FetchMethod):
548 617
549 # src.rpm extension was added only for rpm package. Can be removed if the rpm 618 # src.rpm extension was added only for rpm package. Can be removed if the rpm
550 # packaged will always be considered as having to be manually upgraded 619 # packaged will always be considered as having to be manually upgraded
551 psuffix_regex = r"(tar\.gz|tgz|tar\.bz2|zip|xz|tar\.lz|rpm|bz2|orig\.tar\.gz|tar\.xz|src\.tar\.gz|src\.tgz|svnr\d+\.tar\.bz2|stable\.tar\.gz|src\.rpm)" 620 psuffix_regex = r"(tar\.\w+|tgz|zip|xz|rpm|bz2|orig\.tar\.\w+|src\.tar\.\w+|src\.tgz|svnr\d+\.tar\.\w+|stable\.tar\.\w+|src\.rpm)"
552 621
553 # match name, version and archive type of a package 622 # match name, version and archive type of a package
554 package_regex_comp = re.compile(r"(?P<name>%s?\.?v?)(?P<pver>%s)(?P<arch>%s)?[\.-](?P<type>%s$)" 623 package_regex_comp = re.compile(r"(?P<name>%s?\.?v?)(?P<pver>%s)(?P<arch>%s)?[\.-](?P<type>%s$)"
@@ -576,13 +645,17 @@ class Wget(FetchMethod):
576 645
577 sanity check to ensure same name and type. 646 sanity check to ensure same name and type.
578 """ 647 """
579 package = ud.path.split("/")[-1] 648 if 'downloadfilename' in ud.parm:
649 package = ud.parm['downloadfilename']
650 else:
651 package = ud.path.split("/")[-1]
580 current_version = ['', d.getVar('PV'), ''] 652 current_version = ['', d.getVar('PV'), '']
581 653
582 """possible to have no version in pkg name, such as spectrum-fw""" 654 """possible to have no version in pkg name, such as spectrum-fw"""
583 if not re.search(r"\d+", package): 655 if not re.search(r"\d+", package):
584 current_version[1] = re.sub('_', '.', current_version[1]) 656 current_version[1] = re.sub('_', '.', current_version[1])
585 current_version[1] = re.sub('-', '.', current_version[1]) 657 current_version[1] = re.sub('-', '.', current_version[1])
658 bb.debug(3, "latest_versionstring: no version found in %s" % package)
586 return (current_version[1], '') 659 return (current_version[1], '')
587 660
588 package_regex = self._init_regexes(package, ud, d) 661 package_regex = self._init_regexes(package, ud, d)
@@ -599,10 +672,10 @@ class Wget(FetchMethod):
599 # search for version matches on folders inside the path, like: 672 # search for version matches on folders inside the path, like:
600 # "5.7" in http://download.gnome.org/sources/${PN}/5.7/${PN}-${PV}.tar.gz 673 # "5.7" in http://download.gnome.org/sources/${PN}/5.7/${PN}-${PV}.tar.gz
601 dirver_regex = re.compile(r"(?P<dirver>[^/]*(\d+\.)*\d+([-_]r\d+)*)/") 674 dirver_regex = re.compile(r"(?P<dirver>[^/]*(\d+\.)*\d+([-_]r\d+)*)/")
602 m = dirver_regex.search(path) 675 m = dirver_regex.findall(path)
603 if m: 676 if m:
604 pn = d.getVar('PN') 677 pn = d.getVar('PN')
605 dirver = m.group('dirver') 678 dirver = m[-1][0]
606 679
607 dirver_pn_regex = re.compile(r"%s\d?" % (re.escape(pn))) 680 dirver_pn_regex = re.compile(r"%s\d?" % (re.escape(pn)))
608 if not dirver_pn_regex.search(dirver): 681 if not dirver_pn_regex.search(dirver):
diff --git a/bitbake/lib/bb/main.py b/bitbake/lib/bb/main.py
index 06bad495ac..bca8ebfa09 100755
--- a/bitbake/lib/bb/main.py
+++ b/bitbake/lib/bb/main.py
@@ -12,11 +12,12 @@
12import os 12import os
13import sys 13import sys
14import logging 14import logging
15import optparse 15import argparse
16import warnings 16import warnings
17import fcntl 17import fcntl
18import time 18import time
19import traceback 19import traceback
20import datetime
20 21
21import bb 22import bb
22from bb import event 23from bb import event
@@ -43,18 +44,18 @@ def present_options(optionlist):
43 else: 44 else:
44 return optionlist[0] 45 return optionlist[0]
45 46
46class BitbakeHelpFormatter(optparse.IndentedHelpFormatter): 47class BitbakeHelpFormatter(argparse.HelpFormatter):
47 def format_option(self, option): 48 def _get_help_string(self, action):
48 # We need to do this here rather than in the text we supply to 49 # We need to do this here rather than in the text we supply to
49 # add_option() because we don't want to call list_extension_modules() 50 # add_option() because we don't want to call list_extension_modules()
50 # on every execution (since it imports all of the modules) 51 # on every execution (since it imports all of the modules)
51 # Note also that we modify option.help rather than the returned text 52 # Note also that we modify option.help rather than the returned text
52 # - this is so that we don't have to re-format the text ourselves 53 # - this is so that we don't have to re-format the text ourselves
53 if option.dest == 'ui': 54 if action.dest == 'ui':
54 valid_uis = list_extension_modules(bb.ui, 'main') 55 valid_uis = list_extension_modules(bb.ui, 'main')
55 option.help = option.help.replace('@CHOICES@', present_options(valid_uis)) 56 return action.help.replace('@CHOICES@', present_options(valid_uis))
56 57
57 return optparse.IndentedHelpFormatter.format_option(self, option) 58 return action.help
58 59
59def list_extension_modules(pkg, checkattr): 60def list_extension_modules(pkg, checkattr):
60 """ 61 """
@@ -112,189 +113,209 @@ def _showwarning(message, category, filename, lineno, file=None, line=None):
112 warnlog.warning(s) 113 warnlog.warning(s)
113 114
114warnings.showwarning = _showwarning 115warnings.showwarning = _showwarning
115warnings.filterwarnings("ignore")
116warnings.filterwarnings("default", module="(<string>$|(oe|bb)\.)")
117warnings.filterwarnings("ignore", category=PendingDeprecationWarning)
118warnings.filterwarnings("ignore", category=ImportWarning)
119warnings.filterwarnings("ignore", category=DeprecationWarning, module="<string>$")
120warnings.filterwarnings("ignore", message="With-statements now directly support multiple context managers")
121
122 116
123def create_bitbake_parser(): 117def create_bitbake_parser():
124 parser = optparse.OptionParser( 118 parser = argparse.ArgumentParser(
125 formatter=BitbakeHelpFormatter(), 119 description="""\
126 version="BitBake Build Tool Core version %s" % bb.__version__, 120 It is assumed there is a conf/bblayers.conf available in cwd or in BBPATH which
127 usage="""%prog [options] [recipename/target recipe:do_task ...] 121 will provide the layer, BBFILES and other configuration information.
128 122 """,
129 Executes the specified task (default is 'build') for a given set of target recipes (.bb files). 123 formatter_class=BitbakeHelpFormatter,
130 It is assumed there is a conf/bblayers.conf available in cwd or in BBPATH which 124 allow_abbrev=False,
131 will provide the layer, BBFILES and other configuration information.""") 125 add_help=False, # help is manually added below in a specific argument group
132 126 )
133 parser.add_option("-b", "--buildfile", action="store", dest="buildfile", default=None, 127
134 help="Execute tasks from a specific .bb recipe directly. WARNING: Does " 128 general_group = parser.add_argument_group('General options')
135 "not handle any dependencies from other recipes.") 129 task_group = parser.add_argument_group('Task control options')
136 130 exec_group = parser.add_argument_group('Execution control options')
137 parser.add_option("-k", "--continue", action="store_false", dest="abort", default=True, 131 logging_group = parser.add_argument_group('Logging/output control options')
138 help="Continue as much as possible after an error. While the target that " 132 server_group = parser.add_argument_group('Server options')
139 "failed and anything depending on it cannot be built, as much as " 133 config_group = parser.add_argument_group('Configuration options')
140 "possible will be built before stopping.") 134
141 135 general_group.add_argument("targets", nargs="*", metavar="recipename/target",
142 parser.add_option("-f", "--force", action="store_true", dest="force", default=False, 136 help="Execute the specified task (default is 'build') for these target "
143 help="Force the specified targets/task to run (invalidating any " 137 "recipes (.bb files).")
144 "existing stamp file).") 138
145 139 general_group.add_argument("-s", "--show-versions", action="store_true",
146 parser.add_option("-c", "--cmd", action="store", dest="cmd", 140 help="Show current and preferred versions of all recipes.")
147 help="Specify the task to execute. The exact options available " 141
148 "depend on the metadata. Some examples might be 'compile'" 142 general_group.add_argument("-e", "--environment", action="store_true",
149 " or 'populate_sysroot' or 'listtasks' may give a list of " 143 dest="show_environment",
150 "the tasks available.") 144 help="Show the global or per-recipe environment complete with information"
151 145 " about where variables were set/changed.")
152 parser.add_option("-C", "--clear-stamp", action="store", dest="invalidate_stamp", 146
153 help="Invalidate the stamp for the specified task such as 'compile' " 147 general_group.add_argument("-g", "--graphviz", action="store_true", dest="dot_graph",
154 "and then run the default task for the specified target(s).") 148 help="Save dependency tree information for the specified "
155 149 "targets in the dot syntax.")
156 parser.add_option("-r", "--read", action="append", dest="prefile", default=[],
157 help="Read the specified file before bitbake.conf.")
158
159 parser.add_option("-R", "--postread", action="append", dest="postfile", default=[],
160 help="Read the specified file after bitbake.conf.")
161
162 parser.add_option("-v", "--verbose", action="store_true", dest="verbose", default=False,
163 help="Enable tracing of shell tasks (with 'set -x'). "
164 "Also print bb.note(...) messages to stdout (in "
165 "addition to writing them to ${T}/log.do_<task>).")
166
167 parser.add_option("-D", "--debug", action="count", dest="debug", default=0,
168 help="Increase the debug level. You can specify this "
169 "more than once. -D sets the debug level to 1, "
170 "where only bb.debug(1, ...) messages are printed "
171 "to stdout; -DD sets the debug level to 2, where "
172 "both bb.debug(1, ...) and bb.debug(2, ...) "
173 "messages are printed; etc. Without -D, no debug "
174 "messages are printed. Note that -D only affects "
175 "output to stdout. All debug messages are written "
176 "to ${T}/log.do_taskname, regardless of the debug "
177 "level.")
178
179 parser.add_option("-q", "--quiet", action="count", dest="quiet", default=0,
180 help="Output less log message data to the terminal. You can specify this more than once.")
181
182 parser.add_option("-n", "--dry-run", action="store_true", dest="dry_run", default=False,
183 help="Don't execute, just go through the motions.")
184
185 parser.add_option("-S", "--dump-signatures", action="append", dest="dump_signatures",
186 default=[], metavar="SIGNATURE_HANDLER",
187 help="Dump out the signature construction information, with no task "
188 "execution. The SIGNATURE_HANDLER parameter is passed to the "
189 "handler. Two common values are none and printdiff but the handler "
190 "may define more/less. none means only dump the signature, printdiff"
191 " means compare the dumped signature with the cached one.")
192
193 parser.add_option("-p", "--parse-only", action="store_true",
194 dest="parse_only", default=False,
195 help="Quit after parsing the BB recipes.")
196
197 parser.add_option("-s", "--show-versions", action="store_true",
198 dest="show_versions", default=False,
199 help="Show current and preferred versions of all recipes.")
200
201 parser.add_option("-e", "--environment", action="store_true",
202 dest="show_environment", default=False,
203 help="Show the global or per-recipe environment complete with information"
204 " about where variables were set/changed.")
205
206 parser.add_option("-g", "--graphviz", action="store_true", dest="dot_graph", default=False,
207 help="Save dependency tree information for the specified "
208 "targets in the dot syntax.")
209
210 parser.add_option("-I", "--ignore-deps", action="append",
211 dest="extra_assume_provided", default=[],
212 help="Assume these dependencies don't exist and are already provided "
213 "(equivalent to ASSUME_PROVIDED). Useful to make dependency "
214 "graphs more appealing")
215
216 parser.add_option("-l", "--log-domains", action="append", dest="debug_domains", default=[],
217 help="Show debug logging for the specified logging domains")
218
219 parser.add_option("-P", "--profile", action="store_true", dest="profile", default=False,
220 help="Profile the command and save reports.")
221 150
222 # @CHOICES@ is substituted out by BitbakeHelpFormatter above 151 # @CHOICES@ is substituted out by BitbakeHelpFormatter above
223 parser.add_option("-u", "--ui", action="store", dest="ui", 152 general_group.add_argument("-u", "--ui",
224 default=os.environ.get('BITBAKE_UI', 'knotty'), 153 default=os.environ.get('BITBAKE_UI', 'knotty'),
225 help="The user interface to use (@CHOICES@ - default %default).") 154 help="The user interface to use (@CHOICES@ - default %(default)s).")
226 155
227 parser.add_option("", "--token", action="store", dest="xmlrpctoken", 156 general_group.add_argument("--version", action="store_true",
228 default=os.environ.get("BBTOKEN"), 157 help="Show programs version and exit.")
229 help="Specify the connection token to be used when connecting " 158
230 "to a remote server.") 159 general_group.add_argument('-h', '--help', action='help',
231 160 help='Show this help message and exit.')
232 parser.add_option("", "--revisions-changed", action="store_true", 161
233 dest="revisions_changed", default=False, 162
234 help="Set the exit code depending on whether upstream floating " 163 task_group.add_argument("-f", "--force", action="store_true",
235 "revisions have changed or not.") 164 help="Force the specified targets/task to run (invalidating any "
236 165 "existing stamp file).")
237 parser.add_option("", "--server-only", action="store_true", 166
238 dest="server_only", default=False, 167 task_group.add_argument("-c", "--cmd",
239 help="Run bitbake without a UI, only starting a server " 168 help="Specify the task to execute. The exact options available "
240 "(cooker) process.") 169 "depend on the metadata. Some examples might be 'compile'"
241 170 " or 'populate_sysroot' or 'listtasks' may give a list of "
242 parser.add_option("-B", "--bind", action="store", dest="bind", default=False, 171 "the tasks available.")
243 help="The name/address for the bitbake xmlrpc server to bind to.") 172
244 173 task_group.add_argument("-C", "--clear-stamp", dest="invalidate_stamp",
245 parser.add_option("-T", "--idle-timeout", type=float, dest="server_timeout", 174 help="Invalidate the stamp for the specified task such as 'compile' "
246 default=os.getenv("BB_SERVER_TIMEOUT"), 175 "and then run the default task for the specified target(s).")
247 help="Set timeout to unload bitbake server due to inactivity, " 176
248 "set to -1 means no unload, " 177 task_group.add_argument("--runall", action="append", default=[],
249 "default: Environment variable BB_SERVER_TIMEOUT.") 178 help="Run the specified task for any recipe in the taskgraph of the "
250 179 "specified target (even if it wouldn't otherwise have run).")
251 parser.add_option("", "--no-setscene", action="store_true", 180
252 dest="nosetscene", default=False, 181 task_group.add_argument("--runonly", action="append",
253 help="Do not run any setscene tasks. sstate will be ignored and " 182 help="Run only the specified task within the taskgraph of the "
254 "everything needed, built.") 183 "specified targets (and any task dependencies those tasks may have).")
255 184
256 parser.add_option("", "--skip-setscene", action="store_true", 185 task_group.add_argument("--no-setscene", action="store_true",
257 dest="skipsetscene", default=False, 186 dest="nosetscene",
258 help="Skip setscene tasks if they would be executed. Tasks previously " 187 help="Do not run any setscene tasks. sstate will be ignored and "
259 "restored from sstate will be kept, unlike --no-setscene") 188 "everything needed, built.")
260 189
261 parser.add_option("", "--setscene-only", action="store_true", 190 task_group.add_argument("--skip-setscene", action="store_true",
262 dest="setsceneonly", default=False, 191 dest="skipsetscene",
263 help="Only run setscene tasks, don't run any real tasks.") 192 help="Skip setscene tasks if they would be executed. Tasks previously "
264 193 "restored from sstate will be kept, unlike --no-setscene.")
265 parser.add_option("", "--remote-server", action="store", dest="remote_server", 194
266 default=os.environ.get("BBSERVER"), 195 task_group.add_argument("--setscene-only", action="store_true",
267 help="Connect to the specified server.") 196 dest="setsceneonly",
268 197 help="Only run setscene tasks, don't run any real tasks.")
269 parser.add_option("-m", "--kill-server", action="store_true", 198
270 dest="kill_server", default=False, 199
271 help="Terminate any running bitbake server.") 200 exec_group.add_argument("-n", "--dry-run", action="store_true",
272 201 help="Don't execute, just go through the motions.")
273 parser.add_option("", "--observe-only", action="store_true", 202
274 dest="observe_only", default=False, 203 exec_group.add_argument("-p", "--parse-only", action="store_true",
275 help="Connect to a server as an observing-only client.") 204 help="Quit after parsing the BB recipes.")
276 205
277 parser.add_option("", "--status-only", action="store_true", 206 exec_group.add_argument("-k", "--continue", action="store_false", dest="halt",
278 dest="status_only", default=False, 207 help="Continue as much as possible after an error. While the target that "
279 help="Check the status of the remote bitbake server.") 208 "failed and anything depending on it cannot be built, as much as "
280 209 "possible will be built before stopping.")
281 parser.add_option("-w", "--write-log", action="store", dest="writeeventlog", 210
282 default=os.environ.get("BBEVENTLOG"), 211 exec_group.add_argument("-P", "--profile", action="store_true",
283 help="Writes the event log of the build to a bitbake event json file. " 212 help="Profile the command and save reports.")
284 "Use '' (empty string) to assign the name automatically.") 213
285 214 exec_group.add_argument("-S", "--dump-signatures", action="append",
286 parser.add_option("", "--runall", action="append", dest="runall", 215 default=[], metavar="SIGNATURE_HANDLER",
287 help="Run the specified task for any recipe in the taskgraph of the specified target (even if it wouldn't otherwise have run).") 216 help="Dump out the signature construction information, with no task "
288 217 "execution. The SIGNATURE_HANDLER parameter is passed to the "
289 parser.add_option("", "--runonly", action="append", dest="runonly", 218 "handler. Two common values are none and printdiff but the handler "
290 help="Run only the specified task within the taskgraph of the specified targets (and any task dependencies those tasks may have).") 219 "may define more/less. none means only dump the signature, printdiff"
220 " means recursively compare the dumped signature with the most recent"
221 " one in a local build or sstate cache (can be used to find out why tasks re-run"
222 " when that is not expected)")
223
224 exec_group.add_argument("--revisions-changed", action="store_true",
225 help="Set the exit code depending on whether upstream floating "
226 "revisions have changed or not.")
227
228 exec_group.add_argument("-b", "--buildfile",
229 help="Execute tasks from a specific .bb recipe directly. WARNING: Does "
230 "not handle any dependencies from other recipes.")
231
232 logging_group.add_argument("-D", "--debug", action="count", default=0,
233 help="Increase the debug level. You can specify this "
234 "more than once. -D sets the debug level to 1, "
235 "where only bb.debug(1, ...) messages are printed "
236 "to stdout; -DD sets the debug level to 2, where "
237 "both bb.debug(1, ...) and bb.debug(2, ...) "
238 "messages are printed; etc. Without -D, no debug "
239 "messages are printed. Note that -D only affects "
240 "output to stdout. All debug messages are written "
241 "to ${T}/log.do_taskname, regardless of the debug "
242 "level.")
243
244 logging_group.add_argument("-l", "--log-domains", action="append", dest="debug_domains",
245 default=[],
246 help="Show debug logging for the specified logging domains.")
247
248 logging_group.add_argument("-v", "--verbose", action="store_true",
249 help="Enable tracing of shell tasks (with 'set -x'). "
250 "Also print bb.note(...) messages to stdout (in "
251 "addition to writing them to ${T}/log.do_<task>).")
252
253 logging_group.add_argument("-q", "--quiet", action="count", default=0,
254 help="Output less log message data to the terminal. You can specify this "
255 "more than once.")
256
257 logging_group.add_argument("-w", "--write-log", dest="writeeventlog",
258 default=os.environ.get("BBEVENTLOG"),
259 help="Writes the event log of the build to a bitbake event json file. "
260 "Use '' (empty string) to assign the name automatically.")
261
262
263 server_group.add_argument("-B", "--bind", default=False,
264 help="The name/address for the bitbake xmlrpc server to bind to.")
265
266 server_group.add_argument("-T", "--idle-timeout", type=float, dest="server_timeout",
267 default=os.getenv("BB_SERVER_TIMEOUT"),
268 help="Set timeout to unload bitbake server due to inactivity, "
269 "set to -1 means no unload, "
270 "default: Environment variable BB_SERVER_TIMEOUT.")
271
272 server_group.add_argument("--remote-server",
273 default=os.environ.get("BBSERVER"),
274 help="Connect to the specified server.")
275
276 server_group.add_argument("-m", "--kill-server", action="store_true",
277 help="Terminate any running bitbake server.")
278
279 server_group.add_argument("--token", dest="xmlrpctoken",
280 default=os.environ.get("BBTOKEN"),
281 help="Specify the connection token to be used when connecting "
282 "to a remote server.")
283
284 server_group.add_argument("--observe-only", action="store_true",
285 help="Connect to a server as an observing-only client.")
286
287 server_group.add_argument("--status-only", action="store_true",
288 help="Check the status of the remote bitbake server.")
289
290 server_group.add_argument("--server-only", action="store_true",
291 help="Run bitbake without a UI, only starting a server "
292 "(cooker) process.")
293
294
295 config_group.add_argument("-r", "--read", action="append", dest="prefile", default=[],
296 help="Read the specified file before bitbake.conf.")
297
298 config_group.add_argument("-R", "--postread", action="append", dest="postfile", default=[],
299 help="Read the specified file after bitbake.conf.")
300
301
302 config_group.add_argument("-I", "--ignore-deps", action="append",
303 dest="extra_assume_provided", default=[],
304 help="Assume these dependencies don't exist and are already provided "
305 "(equivalent to ASSUME_PROVIDED). Useful to make dependency "
306 "graphs more appealing.")
307
291 return parser 308 return parser
292 309
293 310
294class BitBakeConfigParameters(cookerdata.ConfigParameters): 311class BitBakeConfigParameters(cookerdata.ConfigParameters):
295 def parseCommandLine(self, argv=sys.argv): 312 def parseCommandLine(self, argv=sys.argv):
296 parser = create_bitbake_parser() 313 parser = create_bitbake_parser()
297 options, targets = parser.parse_args(argv) 314 options = parser.parse_intermixed_args(argv[1:])
315
316 if options.version:
317 print("BitBake Build Tool Core version %s" % bb.__version__)
318 sys.exit(0)
298 319
299 if options.quiet and options.verbose: 320 if options.quiet and options.verbose:
300 parser.error("options --quiet and --verbose are mutually exclusive") 321 parser.error("options --quiet and --verbose are mutually exclusive")
@@ -326,7 +347,7 @@ class BitBakeConfigParameters(cookerdata.ConfigParameters):
326 else: 347 else:
327 options.xmlrpcinterface = (None, 0) 348 options.xmlrpcinterface = (None, 0)
328 349
329 return options, targets[1:] 350 return options, options.targets
330 351
331 352
332def bitbake_main(configParams, configuration): 353def bitbake_main(configParams, configuration):
@@ -391,6 +412,9 @@ def bitbake_main(configParams, configuration):
391 412
392 return 1 413 return 1
393 414
415def timestamp():
416 return datetime.datetime.now().strftime('%H:%M:%S.%f')
417
394def setup_bitbake(configParams, extrafeatures=None): 418def setup_bitbake(configParams, extrafeatures=None):
395 # Ensure logging messages get sent to the UI as events 419 # Ensure logging messages get sent to the UI as events
396 handler = bb.event.LogHandler() 420 handler = bb.event.LogHandler()
@@ -398,6 +422,11 @@ def setup_bitbake(configParams, extrafeatures=None):
398 # In status only mode there are no logs and no UI 422 # In status only mode there are no logs and no UI
399 logger.addHandler(handler) 423 logger.addHandler(handler)
400 424
425 if configParams.dump_signatures:
426 if extrafeatures is None:
427 extrafeatures = []
428 extrafeatures.append(bb.cooker.CookerFeatures.RECIPE_SIGGEN_INFO)
429
401 if configParams.server_only: 430 if configParams.server_only:
402 featureset = [] 431 featureset = []
403 ui_module = None 432 ui_module = None
@@ -425,7 +454,7 @@ def setup_bitbake(configParams, extrafeatures=None):
425 retries = 8 454 retries = 8
426 while retries: 455 while retries:
427 try: 456 try:
428 topdir, lock = lockBitbake() 457 topdir, lock, lockfile = lockBitbake()
429 sockname = topdir + "/bitbake.sock" 458 sockname = topdir + "/bitbake.sock"
430 if lock: 459 if lock:
431 if configParams.status_only or configParams.kill_server: 460 if configParams.status_only or configParams.kill_server:
@@ -436,18 +465,22 @@ def setup_bitbake(configParams, extrafeatures=None):
436 logger.info("Starting bitbake server...") 465 logger.info("Starting bitbake server...")
437 # Clear the event queue since we already displayed messages 466 # Clear the event queue since we already displayed messages
438 bb.event.ui_queue = [] 467 bb.event.ui_queue = []
439 server = bb.server.process.BitBakeServer(lock, sockname, featureset, configParams.server_timeout, configParams.xmlrpcinterface) 468 server = bb.server.process.BitBakeServer(lock, sockname, featureset, configParams.server_timeout, configParams.xmlrpcinterface, configParams.profile)
440 469
441 else: 470 else:
442 logger.info("Reconnecting to bitbake server...") 471 logger.info("Reconnecting to bitbake server...")
443 if not os.path.exists(sockname): 472 if not os.path.exists(sockname):
444 logger.info("Previous bitbake instance shutting down?, waiting to retry...") 473 logger.info("Previous bitbake instance shutting down?, waiting to retry... (%s)" % timestamp())
474 procs = bb.server.process.get_lockfile_process_msg(lockfile)
475 if procs:
476 logger.info("Processes holding bitbake.lock (missing socket %s):\n%s" % (sockname, procs))
477 logger.info("Directory listing: %s" % (str(os.listdir(topdir))))
445 i = 0 478 i = 0
446 lock = None 479 lock = None
447 # Wait for 5s or until we can get the lock 480 # Wait for 5s or until we can get the lock
448 while not lock and i < 50: 481 while not lock and i < 50:
449 time.sleep(0.1) 482 time.sleep(0.1)
450 _, lock = lockBitbake() 483 _, lock, _ = lockBitbake()
451 i += 1 484 i += 1
452 if lock: 485 if lock:
453 bb.utils.unlockfile(lock) 486 bb.utils.unlockfile(lock)
@@ -466,9 +499,9 @@ def setup_bitbake(configParams, extrafeatures=None):
466 retries -= 1 499 retries -= 1
467 tryno = 8 - retries 500 tryno = 8 - retries
468 if isinstance(e, (bb.server.process.ProcessTimeout, BrokenPipeError, EOFError, SystemExit)): 501 if isinstance(e, (bb.server.process.ProcessTimeout, BrokenPipeError, EOFError, SystemExit)):
469 logger.info("Retrying server connection (#%d)..." % tryno) 502 logger.info("Retrying server connection (#%d)... (%s)" % (tryno, timestamp()))
470 else: 503 else:
471 logger.info("Retrying server connection (#%d)... (%s)" % (tryno, traceback.format_exc())) 504 logger.info("Retrying server connection (#%d)... (%s, %s)" % (tryno, traceback.format_exc(), timestamp()))
472 505
473 if not retries: 506 if not retries:
474 bb.fatal("Unable to connect to bitbake server, or start one (server startup failures would be in bitbake-cookerdaemon.log).") 507 bb.fatal("Unable to connect to bitbake server, or start one (server startup failures would be in bitbake-cookerdaemon.log).")
@@ -497,5 +530,5 @@ def lockBitbake():
497 bb.error("Unable to find conf/bblayers.conf or conf/bitbake.conf. BBPATH is unset and/or not in a build directory?") 530 bb.error("Unable to find conf/bblayers.conf or conf/bitbake.conf. BBPATH is unset and/or not in a build directory?")
498 raise BBMainFatal 531 raise BBMainFatal
499 lockfile = topdir + "/bitbake.lock" 532 lockfile = topdir + "/bitbake.lock"
500 return topdir, bb.utils.lockfile(lockfile, False, False) 533 return topdir, bb.utils.lockfile(lockfile, False, False), lockfile
501 534
diff --git a/bitbake/lib/bb/monitordisk.py b/bitbake/lib/bb/monitordisk.py
index 98f2109ed2..f928210351 100644
--- a/bitbake/lib/bb/monitordisk.py
+++ b/bitbake/lib/bb/monitordisk.py
@@ -76,7 +76,12 @@ def getDiskData(BBDirs):
76 return None 76 return None
77 77
78 action = pathSpaceInodeRe.group(1) 78 action = pathSpaceInodeRe.group(1)
79 if action not in ("ABORT", "STOPTASKS", "WARN"): 79 if action == "ABORT":
80 # Emit a deprecation warning
81 logger.warnonce("The BB_DISKMON_DIRS \"ABORT\" action has been renamed to \"HALT\", update configuration")
82 action = "HALT"
83
84 if action not in ("HALT", "STOPTASKS", "WARN"):
80 printErr("Unknown disk space monitor action: %s" % action) 85 printErr("Unknown disk space monitor action: %s" % action)
81 return None 86 return None
82 87
@@ -177,7 +182,7 @@ class diskMonitor:
177 # use them to avoid printing too many warning messages 182 # use them to avoid printing too many warning messages
178 self.preFreeS = {} 183 self.preFreeS = {}
179 self.preFreeI = {} 184 self.preFreeI = {}
180 # This is for STOPTASKS and ABORT, to avoid printing the message 185 # This is for STOPTASKS and HALT, to avoid printing the message
181 # repeatedly while waiting for the tasks to finish 186 # repeatedly while waiting for the tasks to finish
182 self.checked = {} 187 self.checked = {}
183 for k in self.devDict: 188 for k in self.devDict:
@@ -219,8 +224,8 @@ class diskMonitor:
219 self.checked[k] = True 224 self.checked[k] = True
220 rq.finish_runqueue(False) 225 rq.finish_runqueue(False)
221 bb.event.fire(bb.event.DiskFull(dev, 'disk', freeSpace, path), self.configuration) 226 bb.event.fire(bb.event.DiskFull(dev, 'disk', freeSpace, path), self.configuration)
222 elif action == "ABORT" and not self.checked[k]: 227 elif action == "HALT" and not self.checked[k]:
223 logger.error("Immediately abort since the disk space monitor action is \"ABORT\"!") 228 logger.error("Immediately halt since the disk space monitor action is \"HALT\"!")
224 self.checked[k] = True 229 self.checked[k] = True
225 rq.finish_runqueue(True) 230 rq.finish_runqueue(True)
226 bb.event.fire(bb.event.DiskFull(dev, 'disk', freeSpace, path), self.configuration) 231 bb.event.fire(bb.event.DiskFull(dev, 'disk', freeSpace, path), self.configuration)
@@ -229,9 +234,10 @@ class diskMonitor:
229 freeInode = st.f_favail 234 freeInode = st.f_favail
230 235
231 if minInode and freeInode < minInode: 236 if minInode and freeInode < minInode:
232 # Some filesystems use dynamic inodes so can't run out 237 # Some filesystems use dynamic inodes so can't run out.
233 # (e.g. btrfs). This is reported by the inode count being 0. 238 # This is reported by the inode count being 0 (btrfs) or the free
234 if st.f_files == 0: 239 # inode count being -1 (cephfs).
240 if st.f_files == 0 or st.f_favail == -1:
235 self.devDict[k][2] = None 241 self.devDict[k][2] = None
236 continue 242 continue
237 # Always show warning, the self.checked would always be False if the action is WARN 243 # Always show warning, the self.checked would always be False if the action is WARN
@@ -245,8 +251,8 @@ class diskMonitor:
245 self.checked[k] = True 251 self.checked[k] = True
246 rq.finish_runqueue(False) 252 rq.finish_runqueue(False)
247 bb.event.fire(bb.event.DiskFull(dev, 'inode', freeInode, path), self.configuration) 253 bb.event.fire(bb.event.DiskFull(dev, 'inode', freeInode, path), self.configuration)
248 elif action == "ABORT" and not self.checked[k]: 254 elif action == "HALT" and not self.checked[k]:
249 logger.error("Immediately abort since the disk space monitor action is \"ABORT\"!") 255 logger.error("Immediately halt since the disk space monitor action is \"HALT\"!")
250 self.checked[k] = True 256 self.checked[k] = True
251 rq.finish_runqueue(True) 257 rq.finish_runqueue(True)
252 bb.event.fire(bb.event.DiskFull(dev, 'inode', freeInode, path), self.configuration) 258 bb.event.fire(bb.event.DiskFull(dev, 'inode', freeInode, path), self.configuration)
diff --git a/bitbake/lib/bb/msg.py b/bitbake/lib/bb/msg.py
index 291b38ff7f..4f616ff42e 100644
--- a/bitbake/lib/bb/msg.py
+++ b/bitbake/lib/bb/msg.py
@@ -30,7 +30,9 @@ class BBLogFormatter(logging.Formatter):
30 PLAIN = logging.INFO + 1 30 PLAIN = logging.INFO + 1
31 VERBNOTE = logging.INFO + 2 31 VERBNOTE = logging.INFO + 2
32 ERROR = logging.ERROR 32 ERROR = logging.ERROR
33 ERRORONCE = logging.ERROR - 1
33 WARNING = logging.WARNING 34 WARNING = logging.WARNING
35 WARNONCE = logging.WARNING - 1
34 CRITICAL = logging.CRITICAL 36 CRITICAL = logging.CRITICAL
35 37
36 levelnames = { 38 levelnames = {
@@ -42,7 +44,9 @@ class BBLogFormatter(logging.Formatter):
42 PLAIN : '', 44 PLAIN : '',
43 VERBNOTE: 'NOTE', 45 VERBNOTE: 'NOTE',
44 WARNING : 'WARNING', 46 WARNING : 'WARNING',
47 WARNONCE : 'WARNING',
45 ERROR : 'ERROR', 48 ERROR : 'ERROR',
49 ERRORONCE : 'ERROR',
46 CRITICAL: 'ERROR', 50 CRITICAL: 'ERROR',
47 } 51 }
48 52
@@ -58,7 +62,9 @@ class BBLogFormatter(logging.Formatter):
58 PLAIN : BASECOLOR, 62 PLAIN : BASECOLOR,
59 VERBNOTE: BASECOLOR, 63 VERBNOTE: BASECOLOR,
60 WARNING : YELLOW, 64 WARNING : YELLOW,
65 WARNONCE : YELLOW,
61 ERROR : RED, 66 ERROR : RED,
67 ERRORONCE : RED,
62 CRITICAL: RED, 68 CRITICAL: RED,
63 } 69 }
64 70
@@ -83,10 +89,6 @@ class BBLogFormatter(logging.Formatter):
83 msg = logging.Formatter.format(self, record) 89 msg = logging.Formatter.format(self, record)
84 if hasattr(record, 'bb_exc_formatted'): 90 if hasattr(record, 'bb_exc_formatted'):
85 msg += '\n' + ''.join(record.bb_exc_formatted) 91 msg += '\n' + ''.join(record.bb_exc_formatted)
86 elif hasattr(record, 'bb_exc_info'):
87 etype, value, tb = record.bb_exc_info
88 formatted = bb.exceptions.format_exception(etype, value, tb, limit=5)
89 msg += '\n' + ''.join(formatted)
90 return msg 92 return msg
91 93
92 def colorize(self, record): 94 def colorize(self, record):
@@ -121,6 +123,22 @@ class BBLogFilter(object):
121 return True 123 return True
122 return False 124 return False
123 125
126class LogFilterShowOnce(logging.Filter):
127 def __init__(self):
128 self.seen_warnings = set()
129 self.seen_errors = set()
130
131 def filter(self, record):
132 if record.levelno == bb.msg.BBLogFormatter.WARNONCE:
133 if record.msg in self.seen_warnings:
134 return False
135 self.seen_warnings.add(record.msg)
136 if record.levelno == bb.msg.BBLogFormatter.ERRORONCE:
137 if record.msg in self.seen_errors:
138 return False
139 self.seen_errors.add(record.msg)
140 return True
141
124class LogFilterGEQLevel(logging.Filter): 142class LogFilterGEQLevel(logging.Filter):
125 def __init__(self, level): 143 def __init__(self, level):
126 self.strlevel = str(level) 144 self.strlevel = str(level)
@@ -206,8 +224,9 @@ def logger_create(name, output=sys.stderr, level=logging.INFO, preserve_handlers
206 """Standalone logger creation function""" 224 """Standalone logger creation function"""
207 logger = logging.getLogger(name) 225 logger = logging.getLogger(name)
208 console = logging.StreamHandler(output) 226 console = logging.StreamHandler(output)
227 console.addFilter(bb.msg.LogFilterShowOnce())
209 format = bb.msg.BBLogFormatter("%(levelname)s: %(message)s") 228 format = bb.msg.BBLogFormatter("%(levelname)s: %(message)s")
210 if color == 'always' or (color == 'auto' and output.isatty()): 229 if color == 'always' or (color == 'auto' and output.isatty() and os.environ.get('NO_COLOR', '') == ''):
211 format.enable_color() 230 format.enable_color()
212 console.setFormatter(format) 231 console.setFormatter(format)
213 if preserve_handlers: 232 if preserve_handlers:
@@ -293,10 +312,17 @@ def setLoggingConfig(defaultconfig, userconfigfile=None):
293 312
294 # Convert all level parameters to integers in case users want to use the 313 # Convert all level parameters to integers in case users want to use the
295 # bitbake defined level names 314 # bitbake defined level names
296 for h in logconfig["handlers"].values(): 315 for name, h in logconfig["handlers"].items():
297 if "level" in h: 316 if "level" in h:
298 h["level"] = bb.msg.stringToLevel(h["level"]) 317 h["level"] = bb.msg.stringToLevel(h["level"])
299 318
319 # Every handler needs its own instance of the once filter.
320 once_filter_name = name + ".showonceFilter"
321 logconfig.setdefault("filters", {})[once_filter_name] = {
322 "()": "bb.msg.LogFilterShowOnce",
323 }
324 h.setdefault("filters", []).append(once_filter_name)
325
300 for l in logconfig["loggers"].values(): 326 for l in logconfig["loggers"].values():
301 if "level" in l: 327 if "level" in l:
302 l["level"] = bb.msg.stringToLevel(l["level"]) 328 l["level"] = bb.msg.stringToLevel(l["level"])
diff --git a/bitbake/lib/bb/parse/__init__.py b/bitbake/lib/bb/parse/__init__.py
index c01807ba87..d428d8a4b4 100644
--- a/bitbake/lib/bb/parse/__init__.py
+++ b/bitbake/lib/bb/parse/__init__.py
@@ -49,20 +49,32 @@ class SkipPackage(SkipRecipe):
49__mtime_cache = {} 49__mtime_cache = {}
50def cached_mtime(f): 50def cached_mtime(f):
51 if f not in __mtime_cache: 51 if f not in __mtime_cache:
52 __mtime_cache[f] = os.stat(f)[stat.ST_MTIME] 52 res = os.stat(f)
53 __mtime_cache[f] = (res.st_mtime_ns, res.st_size, res.st_ino)
53 return __mtime_cache[f] 54 return __mtime_cache[f]
54 55
55def cached_mtime_noerror(f): 56def cached_mtime_noerror(f):
56 if f not in __mtime_cache: 57 if f not in __mtime_cache:
57 try: 58 try:
58 __mtime_cache[f] = os.stat(f)[stat.ST_MTIME] 59 res = os.stat(f)
60 __mtime_cache[f] = (res.st_mtime_ns, res.st_size, res.st_ino)
59 except OSError: 61 except OSError:
60 return 0 62 return 0
61 return __mtime_cache[f] 63 return __mtime_cache[f]
62 64
65def check_mtime(f, mtime):
66 try:
67 res = os.stat(f)
68 current_mtime = (res.st_mtime_ns, res.st_size, res.st_ino)
69 __mtime_cache[f] = current_mtime
70 except OSError:
71 current_mtime = 0
72 return current_mtime == mtime
73
63def update_mtime(f): 74def update_mtime(f):
64 try: 75 try:
65 __mtime_cache[f] = os.stat(f)[stat.ST_MTIME] 76 res = os.stat(f)
77 __mtime_cache[f] = (res.st_mtime_ns, res.st_size, res.st_ino)
66 except OSError: 78 except OSError:
67 if f in __mtime_cache: 79 if f in __mtime_cache:
68 del __mtime_cache[f] 80 del __mtime_cache[f]
@@ -99,12 +111,12 @@ def supports(fn, data):
99 return 1 111 return 1
100 return 0 112 return 0
101 113
102def handle(fn, data, include = 0): 114def handle(fn, data, include=0, baseconfig=False):
103 """Call the handler that is appropriate for this file""" 115 """Call the handler that is appropriate for this file"""
104 for h in handlers: 116 for h in handlers:
105 if h['supports'](fn, data): 117 if h['supports'](fn, data):
106 with data.inchistory.include(fn): 118 with data.inchistory.include(fn):
107 return h['handle'](fn, data, include) 119 return h['handle'](fn, data, include, baseconfig)
108 raise ParseError("not a BitBake file", fn) 120 raise ParseError("not a BitBake file", fn)
109 121
110def init(fn, data): 122def init(fn, data):
@@ -113,6 +125,8 @@ def init(fn, data):
113 return h['init'](data) 125 return h['init'](data)
114 126
115def init_parser(d): 127def init_parser(d):
128 if hasattr(bb.parse, "siggen"):
129 bb.parse.siggen.exit()
116 bb.parse.siggen = bb.siggen.init(d) 130 bb.parse.siggen = bb.siggen.init(d)
117 131
118def resolve_file(fn, d): 132def resolve_file(fn, d):
@@ -162,4 +176,41 @@ def get_file_depends(d):
162 dep_files.append(os.path.abspath(fn)) 176 dep_files.append(os.path.abspath(fn))
163 return " ".join(dep_files) 177 return " ".join(dep_files)
164 178
179def vardeps(*varnames):
180 """
181 Function decorator that can be used to instruct the bitbake dependency
182 parsing to add a dependency on the specified variables names
183
184 Example:
185
186 @bb.parse.vardeps("FOO", "BAR")
187 def my_function():
188 ...
189
190 """
191 def inner(f):
192 if not hasattr(f, "bb_vardeps"):
193 f.bb_vardeps = set()
194 f.bb_vardeps |= set(varnames)
195 return f
196 return inner
197
198def vardepsexclude(*varnames):
199 """
200 Function decorator that can be used to instruct the bitbake dependency
201 parsing to ignore dependencies on the specified variable names in the code
202
203 Example:
204
205 @bb.parse.vardepsexclude("FOO", "BAR")
206 def my_function():
207 ...
208 """
209 def inner(f):
210 if not hasattr(f, "bb_vardepsexclude"):
211 f.bb_vardepsexclude = set()
212 f.bb_vardepsexclude |= set(varnames)
213 return f
214 return inner
215
165from bb.parse.parse_py import __version__, ConfHandler, BBHandler 216from bb.parse.parse_py import __version__, ConfHandler, BBHandler
diff --git a/bitbake/lib/bb/parse/ast.py b/bitbake/lib/bb/parse/ast.py
index 50a88f7da7..49a0788038 100644
--- a/bitbake/lib/bb/parse/ast.py
+++ b/bitbake/lib/bb/parse/ast.py
@@ -9,6 +9,7 @@
9# SPDX-License-Identifier: GPL-2.0-only 9# SPDX-License-Identifier: GPL-2.0-only
10# 10#
11 11
12import sys
12import bb 13import bb
13from bb import methodpool 14from bb import methodpool
14from bb.parse import logger 15from bb.parse import logger
@@ -42,6 +43,21 @@ class IncludeNode(AstNode):
42 else: 43 else:
43 bb.parse.ConfHandler.include(self.filename, s, self.lineno, data, False) 44 bb.parse.ConfHandler.include(self.filename, s, self.lineno, data, False)
44 45
46class IncludeAllNode(AstNode):
47 def __init__(self, filename, lineno, what_file):
48 AstNode.__init__(self, filename, lineno)
49 self.what_file = what_file
50
51 def eval(self, data):
52 """
53 Include the file and evaluate the statements
54 """
55 s = data.expand(self.what_file)
56 logger.debug2("CONF %s:%s: including %s", self.filename, self.lineno, s)
57
58 for path in data.getVar("BBPATH").split(":"):
59 bb.parse.ConfHandler.include(self.filename, os.path.join(path, s), self.lineno, data, False)
60
45class ExportNode(AstNode): 61class ExportNode(AstNode):
46 def __init__(self, filename, lineno, var): 62 def __init__(self, filename, lineno, var):
47 AstNode.__init__(self, filename, lineno) 63 AstNode.__init__(self, filename, lineno)
@@ -130,9 +146,16 @@ class DataNode(AstNode):
130 else: 146 else:
131 val = groupd["value"] 147 val = groupd["value"]
132 148
149 if ":append" in key or ":remove" in key or ":prepend" in key:
150 if op in ["append", "prepend", "postdot", "predot", "ques"]:
151 bb.warn(key + " " + groupd[op] + " is not a recommended operator combination, please replace it.")
152
133 flag = None 153 flag = None
134 if 'flag' in groupd and groupd['flag'] is not None: 154 if 'flag' in groupd and groupd['flag'] is not None:
135 flag = groupd['flag'] 155 if groupd["lazyques"]:
156 flag = "_defaultval_flag_"+groupd['flag']
157 else:
158 flag = groupd['flag']
136 elif groupd["lazyques"]: 159 elif groupd["lazyques"]:
137 flag = "_defaultval" 160 flag = "_defaultval"
138 161
@@ -145,7 +168,7 @@ class DataNode(AstNode):
145 data.setVar(key, val, parsing=True, **loginfo) 168 data.setVar(key, val, parsing=True, **loginfo)
146 169
147class MethodNode(AstNode): 170class MethodNode(AstNode):
148 tr_tbl = str.maketrans('/.+-@%&', '_______') 171 tr_tbl = str.maketrans('/.+-@%&~', '________')
149 172
150 def __init__(self, filename, lineno, func_name, body, python, fakeroot): 173 def __init__(self, filename, lineno, func_name, body, python, fakeroot):
151 AstNode.__init__(self, filename, lineno) 174 AstNode.__init__(self, filename, lineno)
@@ -206,10 +229,12 @@ class ExportFuncsNode(AstNode):
206 229
207 def eval(self, data): 230 def eval(self, data):
208 231
232 sentinel = " # Export function set\n"
209 for func in self.n: 233 for func in self.n:
210 calledfunc = self.classname + "_" + func 234 calledfunc = self.classname + "_" + func
211 235
212 if data.getVar(func, False) and not data.getVarFlag(func, 'export_func', False): 236 basevar = data.getVar(func, False)
237 if basevar and sentinel not in basevar:
213 continue 238 continue
214 239
215 if data.getVar(func, False): 240 if data.getVar(func, False):
@@ -219,29 +244,30 @@ class ExportFuncsNode(AstNode):
219 for flag in [ "func", "python" ]: 244 for flag in [ "func", "python" ]:
220 if data.getVarFlag(calledfunc, flag, False): 245 if data.getVarFlag(calledfunc, flag, False):
221 data.setVarFlag(func, flag, data.getVarFlag(calledfunc, flag, False)) 246 data.setVarFlag(func, flag, data.getVarFlag(calledfunc, flag, False))
222 for flag in [ "dirs" ]: 247 for flag in ["dirs", "cleandirs", "fakeroot"]:
223 if data.getVarFlag(func, flag, False): 248 if data.getVarFlag(func, flag, False):
224 data.setVarFlag(calledfunc, flag, data.getVarFlag(func, flag, False)) 249 data.setVarFlag(calledfunc, flag, data.getVarFlag(func, flag, False))
225 data.setVarFlag(func, "filename", "autogenerated") 250 data.setVarFlag(func, "filename", "autogenerated")
226 data.setVarFlag(func, "lineno", 1) 251 data.setVarFlag(func, "lineno", 1)
227 252
228 if data.getVarFlag(calledfunc, "python", False): 253 if data.getVarFlag(calledfunc, "python", False):
229 data.setVar(func, " bb.build.exec_func('" + calledfunc + "', d)\n", parsing=True) 254 data.setVar(func, sentinel + " bb.build.exec_func('" + calledfunc + "', d)\n", parsing=True)
230 else: 255 else:
231 if "-" in self.classname: 256 if "-" in self.classname:
232 bb.fatal("The classname %s contains a dash character and is calling an sh function %s using EXPORT_FUNCTIONS. Since a dash is illegal in sh function names, this cannot work, please rename the class or don't use EXPORT_FUNCTIONS." % (self.classname, calledfunc)) 257 bb.fatal("The classname %s contains a dash character and is calling an sh function %s using EXPORT_FUNCTIONS. Since a dash is illegal in sh function names, this cannot work, please rename the class or don't use EXPORT_FUNCTIONS." % (self.classname, calledfunc))
233 data.setVar(func, " " + calledfunc + "\n", parsing=True) 258 data.setVar(func, sentinel + " " + calledfunc + "\n", parsing=True)
234 data.setVarFlag(func, 'export_func', '1')
235 259
236class AddTaskNode(AstNode): 260class AddTaskNode(AstNode):
237 def __init__(self, filename, lineno, func, before, after): 261 def __init__(self, filename, lineno, tasks, before, after):
238 AstNode.__init__(self, filename, lineno) 262 AstNode.__init__(self, filename, lineno)
239 self.func = func 263 self.tasks = tasks
240 self.before = before 264 self.before = before
241 self.after = after 265 self.after = after
242 266
243 def eval(self, data): 267 def eval(self, data):
244 bb.build.addtask(self.func, self.before, self.after, data) 268 tasks = self.tasks.split()
269 for task in tasks:
270 bb.build.addtask(task, self.before, self.after, data)
245 271
246class DelTaskNode(AstNode): 272class DelTaskNode(AstNode):
247 def __init__(self, filename, lineno, tasks): 273 def __init__(self, filename, lineno, tasks):
@@ -265,6 +291,41 @@ class BBHandlerNode(AstNode):
265 data.setVarFlag(h, "handler", 1) 291 data.setVarFlag(h, "handler", 1)
266 data.setVar('__BBHANDLERS', bbhands) 292 data.setVar('__BBHANDLERS', bbhands)
267 293
294class PyLibNode(AstNode):
295 def __init__(self, filename, lineno, libdir, namespace):
296 AstNode.__init__(self, filename, lineno)
297 self.libdir = libdir
298 self.namespace = namespace
299
300 def eval(self, data):
301 global_mods = (data.getVar("BB_GLOBAL_PYMODULES") or "").split()
302 for m in global_mods:
303 if m not in bb.utils._context:
304 bb.utils._context[m] = __import__(m)
305
306 libdir = data.expand(self.libdir)
307 if libdir not in sys.path:
308 sys.path.append(libdir)
309 try:
310 bb.utils._context[self.namespace] = __import__(self.namespace)
311 toimport = getattr(bb.utils._context[self.namespace], "BBIMPORTS", [])
312 for i in toimport:
313 bb.utils._context[self.namespace] = __import__(self.namespace + "." + i)
314 mod = getattr(bb.utils._context[self.namespace], i)
315 fn = getattr(mod, "__file__")
316 funcs = {}
317 for f in dir(mod):
318 if f.startswith("_"):
319 continue
320 fcall = getattr(mod, f)
321 if not callable(fcall):
322 continue
323 funcs[f] = fcall
324 bb.codeparser.add_module_functions(fn, funcs, "%s.%s" % (self.namespace, i))
325
326 except AttributeError as e:
327 bb.error("Error importing OE modules: %s" % str(e))
328
268class InheritNode(AstNode): 329class InheritNode(AstNode):
269 def __init__(self, filename, lineno, classes): 330 def __init__(self, filename, lineno, classes):
270 AstNode.__init__(self, filename, lineno) 331 AstNode.__init__(self, filename, lineno)
@@ -273,9 +334,68 @@ class InheritNode(AstNode):
273 def eval(self, data): 334 def eval(self, data):
274 bb.parse.BBHandler.inherit(self.classes, self.filename, self.lineno, data) 335 bb.parse.BBHandler.inherit(self.classes, self.filename, self.lineno, data)
275 336
337class InheritDeferredNode(AstNode):
338 def __init__(self, filename, lineno, classes):
339 AstNode.__init__(self, filename, lineno)
340 self.inherit = (classes, filename, lineno)
341
342 def eval(self, data):
343 bb.parse.BBHandler.inherit_defer(*self.inherit, data)
344
345class AddFragmentsNode(AstNode):
346 def __init__(self, filename, lineno, fragments_path_prefix, fragments_variable, flagged_variables_list_variable, builtin_fragments_variable):
347 AstNode.__init__(self, filename, lineno)
348 self.fragments_path_prefix = fragments_path_prefix
349 self.fragments_variable = fragments_variable
350 self.flagged_variables_list_variable = flagged_variables_list_variable
351 self.builtin_fragments_variable = builtin_fragments_variable
352
353 def eval(self, data):
354 # No need to use mark_dependency since we would only match a fragment
355 # from a specific layer and there can only be a single layer with a
356 # given namespace.
357 def find_fragment(layers, layerid, full_fragment_name):
358 for layerpath in layers.split():
359 candidate_fragment_path = os.path.join(layerpath, full_fragment_name)
360 if os.path.exists(candidate_fragment_path) and bb.utils.get_file_layer(candidate_fragment_path, data) == layerid:
361 return candidate_fragment_path
362 return None
363
364 def check_and_set_builtin_fragment(fragment, data, builtin_fragments):
365 prefix, value = fragment.split('/', 1)
366 if prefix in builtin_fragments.keys():
367 data.setVar(builtin_fragments[prefix], value)
368 return True
369 return False
370
371 fragments = data.getVar(self.fragments_variable)
372 layers = data.getVar('BBLAYERS')
373 flagged_variables = data.getVar(self.flagged_variables_list_variable).split()
374 builtin_fragments = {f[0]:f[1] for f in [f.split(':') for f in data.getVar(self.builtin_fragments_variable).split()] }
375
376 if not fragments:
377 return
378 for f in fragments.split():
379 if check_and_set_builtin_fragment(f, data, builtin_fragments):
380 continue
381 layerid, fragment_name = f.split('/', 1)
382 full_fragment_name = data.expand("{}/{}.conf".format(self.fragments_path_prefix, fragment_name))
383 fragment_path = find_fragment(layers, layerid, full_fragment_name)
384 if fragment_path:
385 bb.parse.ConfHandler.include(self.filename, fragment_path, self.lineno, data, "include fragment")
386 for flagged_var in flagged_variables:
387 val = data.getVar(flagged_var)
388 data.setVarFlag(flagged_var, f, val)
389 data.setVar(flagged_var, None)
390 else:
391 bb.error("Could not find fragment {} in enabled layers: {}".format(f, layers))
392
276def handleInclude(statements, filename, lineno, m, force): 393def handleInclude(statements, filename, lineno, m, force):
277 statements.append(IncludeNode(filename, lineno, m.group(1), force)) 394 statements.append(IncludeNode(filename, lineno, m.group(1), force))
278 395
396def handleIncludeAll(statements, filename, lineno, m):
397 statements.append(IncludeAllNode(filename, lineno, m.group(1)))
398
279def handleExport(statements, filename, lineno, m): 399def handleExport(statements, filename, lineno, m):
280 statements.append(ExportNode(filename, lineno, m.group(1))) 400 statements.append(ExportNode(filename, lineno, m.group(1)))
281 401
@@ -297,38 +417,81 @@ def handlePythonMethod(statements, filename, lineno, funcname, modulename, body)
297def handleExportFuncs(statements, filename, lineno, m, classname): 417def handleExportFuncs(statements, filename, lineno, m, classname):
298 statements.append(ExportFuncsNode(filename, lineno, m.group(1), classname)) 418 statements.append(ExportFuncsNode(filename, lineno, m.group(1), classname))
299 419
300def handleAddTask(statements, filename, lineno, m): 420def handleAddTask(statements, filename, lineno, tasks, before, after):
301 func = m.group("func") 421 statements.append(AddTaskNode(filename, lineno, tasks, before, after))
302 before = m.group("before")
303 after = m.group("after")
304 if func is None:
305 return
306
307 statements.append(AddTaskNode(filename, lineno, func, before, after))
308 422
309def handleDelTask(statements, filename, lineno, m): 423def handleDelTask(statements, filename, lineno, tasks):
310 func = m.group(1) 424 statements.append(DelTaskNode(filename, lineno, tasks))
311 if func is None:
312 return
313
314 statements.append(DelTaskNode(filename, lineno, func))
315 425
316def handleBBHandlers(statements, filename, lineno, m): 426def handleBBHandlers(statements, filename, lineno, m):
317 statements.append(BBHandlerNode(filename, lineno, m.group(1))) 427 statements.append(BBHandlerNode(filename, lineno, m.group(1)))
318 428
429def handlePyLib(statements, filename, lineno, m):
430 statements.append(PyLibNode(filename, lineno, m.group(1), m.group(2)))
431
319def handleInherit(statements, filename, lineno, m): 432def handleInherit(statements, filename, lineno, m):
320 classes = m.group(1) 433 classes = m.group(1)
321 statements.append(InheritNode(filename, lineno, classes)) 434 statements.append(InheritNode(filename, lineno, classes))
322 435
436def handleInheritDeferred(statements, filename, lineno, m):
437 classes = m.group(1)
438 statements.append(InheritDeferredNode(filename, lineno, classes))
439
440def handleAddFragments(statements, filename, lineno, m):
441 fragments_path_prefix = m.group(1)
442 fragments_variable = m.group(2)
443 flagged_variables_list_variable = m.group(3)
444 builtin_fragments_variable = m.group(4)
445 statements.append(AddFragmentsNode(filename, lineno, fragments_path_prefix, fragments_variable, flagged_variables_list_variable, builtin_fragments_variable))
446
323def runAnonFuncs(d): 447def runAnonFuncs(d):
324 code = [] 448 code = []
325 for funcname in d.getVar("__BBANONFUNCS", False) or []: 449 for funcname in d.getVar("__BBANONFUNCS", False) or []:
326 code.append("%s(d)" % funcname) 450 code.append("%s(d)" % funcname)
327 bb.utils.better_exec("\n".join(code), {"d": d}) 451 bb.utils.better_exec("\n".join(code), {"d": d})
328 452
453# Handle recipe level PREFERRED_PROVIDERs
454def handleVirtRecipeProviders(tasklist, d):
455 depends = (d.getVar("DEPENDS") or "").split()
456 virtprovs = (d.getVar("BB_RECIPE_VIRTUAL_PROVIDERS") or "").split()
457 newdeps = []
458 for dep in depends:
459 if dep in virtprovs:
460 newdep = d.getVar("PREFERRED_PROVIDER_" + dep)
461 if not newdep:
462 bb.fatal("Error, recipe virtual provider PREFERRED_PROVIDER_%s not set" % dep)
463 newdeps.append(newdep)
464 else:
465 newdeps.append(dep)
466 d.setVar("DEPENDS", " ".join(newdeps))
467 for task in tasklist:
468 taskdeps = (d.getVarFlag(task, "depends") or "").split()
469 remapped = []
470 for entry in taskdeps:
471 r, t = entry.split(":")
472 if r in virtprovs:
473 r = d.getVar("PREFERRED_PROVIDER_" + r)
474 remapped.append("%s:%s" % (r, t))
475 d.setVarFlag(task, "depends", " ".join(remapped))
476
329def finalize(fn, d, variant = None): 477def finalize(fn, d, variant = None):
330 saved_handlers = bb.event.get_handlers().copy() 478 saved_handlers = bb.event.get_handlers().copy()
331 try: 479 try:
480 # Found renamed variables. Exit immediately
481 if d.getVar("_FAILPARSINGERRORHANDLED", False) == True:
482 raise bb.BBHandledException()
483
484 inherits = [x[0] for x in (d.getVar('__BBDEFINHERITS', False) or [('',)])]
485 bb.event.fire(bb.event.RecipePreDeferredInherits(fn, inherits), d)
486
487 while True:
488 inherits = d.getVar('__BBDEFINHERITS', False) or []
489 if not inherits:
490 break
491 inherit, filename, lineno = inherits.pop(0)
492 d.setVar('__BBDEFINHERITS', inherits)
493 bb.parse.BBHandler.inherit(inherit, filename, lineno, d, deferred=True)
494
332 for var in d.getVar('__BBHANDLERS', False) or []: 495 for var in d.getVar('__BBHANDLERS', False) or []:
333 # try to add the handler 496 # try to add the handler
334 handlerfn = d.getVarFlag(var, "filename", False) 497 handlerfn = d.getVarFlag(var, "filename", False)
@@ -347,12 +510,16 @@ def finalize(fn, d, variant = None):
347 510
348 tasklist = d.getVar('__BBTASKS', False) or [] 511 tasklist = d.getVar('__BBTASKS', False) or []
349 bb.event.fire(bb.event.RecipeTaskPreProcess(fn, list(tasklist)), d) 512 bb.event.fire(bb.event.RecipeTaskPreProcess(fn, list(tasklist)), d)
513 handleVirtRecipeProviders(tasklist, d)
350 bb.build.add_tasks(tasklist, d) 514 bb.build.add_tasks(tasklist, d)
351 515
352 bb.parse.siggen.finalise(fn, d, variant) 516 bb.parse.siggen.finalise(fn, d, variant)
353 517
354 d.setVar('BBINCLUDED', bb.parse.get_file_depends(d)) 518 d.setVar('BBINCLUDED', bb.parse.get_file_depends(d))
355 519
520 if d.getVar('__BBAUTOREV_SEEN') and d.getVar('__BBSRCREV_SEEN') and not d.getVar("__BBAUTOREV_ACTED_UPON"):
521 bb.fatal("AUTOREV/SRCPV set too late for the fetcher to work properly, please set the variables earlier in parsing. Erroring instead of later obtuse build failures.")
522
356 bb.event.fire(bb.event.RecipeParsed(fn), d) 523 bb.event.fire(bb.event.RecipeParsed(fn), d)
357 finally: 524 finally:
358 bb.event.set_handlers(saved_handlers) 525 bb.event.set_handlers(saved_handlers)
@@ -414,7 +581,7 @@ def multi_finalize(fn, d):
414 d.setVar("BBEXTENDVARIANT", variantmap[name]) 581 d.setVar("BBEXTENDVARIANT", variantmap[name])
415 else: 582 else:
416 d.setVar("PN", "%s-%s" % (pn, name)) 583 d.setVar("PN", "%s-%s" % (pn, name))
417 bb.parse.BBHandler.inherit(extendedmap[name], fn, 0, d) 584 bb.parse.BBHandler.inherit_defer(extendedmap[name], fn, 0, d)
418 585
419 safe_d.setVar("BBCLASSEXTEND", extended) 586 safe_d.setVar("BBCLASSEXTEND", extended)
420 _create_variants(datastores, extendedmap.keys(), extendfunc, onlyfinalise) 587 _create_variants(datastores, extendedmap.keys(), extendfunc, onlyfinalise)
diff --git a/bitbake/lib/bb/parse/parse_py/BBHandler.py b/bitbake/lib/bb/parse/parse_py/BBHandler.py
index f8988b8631..008fec2308 100644
--- a/bitbake/lib/bb/parse/parse_py/BBHandler.py
+++ b/bitbake/lib/bb/parse/parse_py/BBHandler.py
@@ -19,14 +19,12 @@ from . import ConfHandler
19from .. import resolve_file, ast, logger, ParseError 19from .. import resolve_file, ast, logger, ParseError
20from .ConfHandler import include, init 20from .ConfHandler import include, init
21 21
22# For compatibility 22__func_start_regexp__ = re.compile(r"(((?P<py>python(?=(\s|\()))|(?P<fr>fakeroot(?=\s)))\s*)*(?P<func>[\w\.\-\+\{\}\$:]+)?\s*\(\s*\)\s*{$" )
23bb.deprecate_import(__name__, "bb.parse", ["vars_from_file"])
24
25__func_start_regexp__ = re.compile(r"(((?P<py>python(?=(\s|\()))|(?P<fr>fakeroot(?=\s)))\s*)*(?P<func>[\w\.\-\+\{\}\$]+)?\s*\(\s*\)\s*{$" )
26__inherit_regexp__ = re.compile(r"inherit\s+(.+)" ) 23__inherit_regexp__ = re.compile(r"inherit\s+(.+)" )
24__inherit_def_regexp__ = re.compile(r"inherit_defer\s+(.+)" )
27__export_func_regexp__ = re.compile(r"EXPORT_FUNCTIONS\s+(.+)" ) 25__export_func_regexp__ = re.compile(r"EXPORT_FUNCTIONS\s+(.+)" )
28__addtask_regexp__ = re.compile(r"addtask\s+(?P<func>\w+)\s*((before\s*(?P<before>((.*(?=after))|(.*))))|(after\s*(?P<after>((.*(?=before))|(.*)))))*") 26__addtask_regexp__ = re.compile(r"addtask\s+([^#\n]+)(?P<comment>#.*|.*?)")
29__deltask_regexp__ = re.compile(r"deltask\s+(.+)") 27__deltask_regexp__ = re.compile(r"deltask\s+([^#\n]+)(?P<comment>#.*|.*?)")
30__addhandler_regexp__ = re.compile(r"addhandler\s+(.+)" ) 28__addhandler_regexp__ = re.compile(r"addhandler\s+(.+)" )
31__def_regexp__ = re.compile(r"def\s+(\w+).*:" ) 29__def_regexp__ = re.compile(r"def\s+(\w+).*:" )
32__python_func_regexp__ = re.compile(r"(\s+.*)|(^$)|(^#)" ) 30__python_func_regexp__ = re.compile(r"(\s+.*)|(^$)|(^#)" )
@@ -36,6 +34,7 @@ __infunc__ = []
36__inpython__ = False 34__inpython__ = False
37__body__ = [] 35__body__ = []
38__classname__ = "" 36__classname__ = ""
37__residue__ = []
39 38
40cached_statements = {} 39cached_statements = {}
41 40
@@ -43,31 +42,56 @@ def supports(fn, d):
43 """Return True if fn has a supported extension""" 42 """Return True if fn has a supported extension"""
44 return os.path.splitext(fn)[-1] in [".bb", ".bbclass", ".inc"] 43 return os.path.splitext(fn)[-1] in [".bb", ".bbclass", ".inc"]
45 44
46def inherit(files, fn, lineno, d): 45def inherit_defer(expression, fn, lineno, d):
46 inherit = (expression, fn, lineno)
47 inherits = d.getVar('__BBDEFINHERITS', False) or []
48 inherits.append(inherit)
49 d.setVar('__BBDEFINHERITS', inherits)
50
51def inherit(files, fn, lineno, d, deferred=False):
47 __inherit_cache = d.getVar('__inherit_cache', False) or [] 52 __inherit_cache = d.getVar('__inherit_cache', False) or []
53 #if "${" in files and not deferred:
54 # bb.warn("%s:%s has non deferred conditional inherit" % (fn, lineno))
48 files = d.expand(files).split() 55 files = d.expand(files).split()
49 for file in files: 56 for file in files:
50 if not os.path.isabs(file) and not file.endswith(".bbclass"): 57 defer = (d.getVar("BB_DEFER_BBCLASSES") or "").split()
51 file = os.path.join('classes', '%s.bbclass' % file) 58 if not deferred and file in defer:
52 59 inherit_defer(file, fn, lineno, d)
53 if not os.path.isabs(file): 60 continue
54 bbpath = d.getVar("BBPATH") 61 classtype = d.getVar("__bbclasstype", False)
55 abs_fn, attempts = bb.utils.which(bbpath, file, history=True) 62 origfile = file
56 for af in attempts: 63 for t in ["classes-" + classtype, "classes"]:
57 if af != abs_fn: 64 file = origfile
58 bb.parse.mark_dependency(d, af) 65 if not os.path.isabs(file) and not file.endswith(".bbclass"):
59 if abs_fn: 66 file = os.path.join(t, '%s.bbclass' % file)
60 file = abs_fn 67
68 if not os.path.isabs(file):
69 bbpath = d.getVar("BBPATH")
70 abs_fn, attempts = bb.utils.which(bbpath, file, history=True)
71 for af in attempts:
72 if af != abs_fn:
73 bb.parse.mark_dependency(d, af)
74 if abs_fn:
75 file = abs_fn
76
77 if os.path.exists(file):
78 break
79
80 if not os.path.exists(file):
81 raise ParseError("Could not inherit file %s" % (file), fn, lineno)
61 82
62 if not file in __inherit_cache: 83 if not file in __inherit_cache:
63 logger.debug("Inheriting %s (from %s:%d)" % (file, fn, lineno)) 84 logger.debug("Inheriting %s (from %s:%d)" % (file, fn, lineno))
64 __inherit_cache.append( file ) 85 __inherit_cache.append( file )
65 d.setVar('__inherit_cache', __inherit_cache) 86 d.setVar('__inherit_cache', __inherit_cache)
66 include(fn, file, lineno, d, "inherit") 87 try:
88 bb.parse.handle(file, d, True)
89 except (IOError, OSError) as exc:
90 raise ParseError("Could not inherit file %s: %s" % (fn, exc.strerror), fn, lineno)
67 __inherit_cache = d.getVar('__inherit_cache', False) or [] 91 __inherit_cache = d.getVar('__inherit_cache', False) or []
68 92
69def get_statements(filename, absolute_filename, base_name): 93def get_statements(filename, absolute_filename, base_name):
70 global cached_statements 94 global cached_statements, __residue__, __body__
71 95
72 try: 96 try:
73 return cached_statements[absolute_filename] 97 return cached_statements[absolute_filename]
@@ -87,12 +111,17 @@ def get_statements(filename, absolute_filename, base_name):
87 # add a blank line to close out any python definition 111 # add a blank line to close out any python definition
88 feeder(lineno, "", filename, base_name, statements, eof=True) 112 feeder(lineno, "", filename, base_name, statements, eof=True)
89 113
114 if __residue__:
115 raise ParseError("Unparsed lines %s: %s" % (filename, str(__residue__)), filename, lineno)
116 if __body__:
117 raise ParseError("Unparsed lines from unclosed function %s: %s" % (filename, str(__body__)), filename, lineno)
118
90 if filename.endswith(".bbclass") or filename.endswith(".inc"): 119 if filename.endswith(".bbclass") or filename.endswith(".inc"):
91 cached_statements[absolute_filename] = statements 120 cached_statements[absolute_filename] = statements
92 return statements 121 return statements
93 122
94def handle(fn, d, include): 123def handle(fn, d, include, baseconfig=False):
95 global __func_start_regexp__, __inherit_regexp__, __export_func_regexp__, __addtask_regexp__, __addhandler_regexp__, __infunc__, __body__, __residue__, __classname__ 124 global __infunc__, __body__, __residue__, __classname__
96 __body__ = [] 125 __body__ = []
97 __infunc__ = [] 126 __infunc__ = []
98 __classname__ = "" 127 __classname__ = ""
@@ -144,7 +173,7 @@ def handle(fn, d, include):
144 return d 173 return d
145 174
146def feeder(lineno, s, fn, root, statements, eof=False): 175def feeder(lineno, s, fn, root, statements, eof=False):
147 global __func_start_regexp__, __inherit_regexp__, __export_func_regexp__, __addtask_regexp__, __addhandler_regexp__, __def_regexp__, __python_func_regexp__, __inpython__, __infunc__, __body__, bb, __residue__, __classname__ 176 global __inpython__, __infunc__, __body__, __residue__, __classname__
148 177
149 # Check tabs in python functions: 178 # Check tabs in python functions:
150 # - def py_funcname(): covered by __inpython__ 179 # - def py_funcname(): covered by __inpython__
@@ -181,10 +210,10 @@ def feeder(lineno, s, fn, root, statements, eof=False):
181 210
182 if s and s[0] == '#': 211 if s and s[0] == '#':
183 if len(__residue__) != 0 and __residue__[0][0] != "#": 212 if len(__residue__) != 0 and __residue__[0][0] != "#":
184 bb.fatal("There is a comment on line %s of file %s (%s) which is in the middle of a multiline expression.\nBitbake used to ignore these but no longer does so, please fix your metadata as errors are likely as a result of this change." % (lineno, fn, s)) 213 bb.fatal("There is a comment on line %s of file %s:\n'''\n%s\n'''\nwhich is in the middle of a multiline expression. This syntax is invalid, please correct it." % (lineno, fn, s))
185 214
186 if len(__residue__) != 0 and __residue__[0][0] == "#" and (not s or s[0] != "#"): 215 if len(__residue__) != 0 and __residue__[0][0] == "#" and (not s or s[0] != "#"):
187 bb.fatal("There is a confusing multiline, partially commented expression on line %s of file %s (%s).\nPlease clarify whether this is all a comment or should be parsed." % (lineno, fn, s)) 216 bb.fatal("There is a confusing multiline partially commented expression on line %s of file %s:\n%s\nPlease clarify whether this is all a comment or should be parsed." % (lineno - len(__residue__), fn, "\n".join(__residue__)))
188 217
189 if s and s[-1] == '\\': 218 if s and s[-1] == '\\':
190 __residue__.append(s[:-1]) 219 __residue__.append(s[:-1])
@@ -220,29 +249,38 @@ def feeder(lineno, s, fn, root, statements, eof=False):
220 249
221 m = __addtask_regexp__.match(s) 250 m = __addtask_regexp__.match(s)
222 if m: 251 if m:
223 if len(m.group().split()) == 2: 252 after = ""
224 # Check and warn for "addtask task1 task2" 253 before = ""
225 m2 = re.match(r"addtask\s+(?P<func>\w+)(?P<ignores>.*)", s) 254
226 if m2 and m2.group('ignores'): 255 # This code splits on 'before' and 'after' instead of on whitespace so we can defer
227 logger.warning('addtask ignored: "%s"' % m2.group('ignores')) 256 # evaluation to as late as possible.
228 257 tasks = m.group(1).split(" before ")[0].split(" after ")[0]
229 # Check and warn for "addtask task1 before task2 before task3", the 258
230 # similar to "after" 259 for exp in m.group(1).split(" before "):
231 taskexpression = s.split() 260 exp2 = exp.split(" after ")
232 for word in ('before', 'after'): 261 if len(exp2) > 1:
233 if taskexpression.count(word) > 1: 262 after = after + " ".join(exp2[1:])
234 logger.warning("addtask contained multiple '%s' keywords, only one is supported" % word)
235 263
236 # Check and warn for having task with exprssion as part of task name 264 for exp in m.group(1).split(" after "):
265 exp2 = exp.split(" before ")
266 if len(exp2) > 1:
267 before = before + " ".join(exp2[1:])
268
269 # Check and warn for having task with a keyword as part of task name
270 taskexpression = s.split()
237 for te in taskexpression: 271 for te in taskexpression:
238 if any( ( "%s_" % keyword ) in te for keyword in bb.data_smart.__setvar_keyword__ ): 272 if any( ( "%s_" % keyword ) in te for keyword in bb.data_smart.__setvar_keyword__ ):
239 raise ParseError("Task name '%s' contains a keyword which is not recommended/supported.\nPlease rename the task not to include the keyword.\n%s" % (te, ("\n".join(map(str, bb.data_smart.__setvar_keyword__)))), fn) 273 raise ParseError("Task name '%s' contains a keyword which is not recommended/supported.\nPlease rename the task not to include the keyword.\n%s" % (te, ("\n".join(map(str, bb.data_smart.__setvar_keyword__)))), fn)
240 ast.handleAddTask(statements, fn, lineno, m) 274
275 if tasks is not None:
276 ast.handleAddTask(statements, fn, lineno, tasks, before, after)
241 return 277 return
242 278
243 m = __deltask_regexp__.match(s) 279 m = __deltask_regexp__.match(s)
244 if m: 280 if m:
245 ast.handleDelTask(statements, fn, lineno, m) 281 task = m.group(1)
282 if task is not None:
283 ast.handleDelTask(statements, fn, lineno, task)
246 return 284 return
247 285
248 m = __addhandler_regexp__.match(s) 286 m = __addhandler_regexp__.match(s)
@@ -255,7 +293,12 @@ def feeder(lineno, s, fn, root, statements, eof=False):
255 ast.handleInherit(statements, fn, lineno, m) 293 ast.handleInherit(statements, fn, lineno, m)
256 return 294 return
257 295
258 return ConfHandler.feeder(lineno, s, fn, statements) 296 m = __inherit_def_regexp__.match(s)
297 if m:
298 ast.handleInheritDeferred(statements, fn, lineno, m)
299 return
300
301 return ConfHandler.feeder(lineno, s, fn, statements, conffile=False)
259 302
260# Add us to the handlers list 303# Add us to the handlers list
261from .. import handlers 304from .. import handlers
diff --git a/bitbake/lib/bb/parse/parse_py/ConfHandler.py b/bitbake/lib/bb/parse/parse_py/ConfHandler.py
index f171c5c932..9ddbae123d 100644
--- a/bitbake/lib/bb/parse/parse_py/ConfHandler.py
+++ b/bitbake/lib/bb/parse/parse_py/ConfHandler.py
@@ -20,10 +20,10 @@ from bb.parse import ParseError, resolve_file, ast, logger, handle
20__config_regexp__ = re.compile( r""" 20__config_regexp__ = re.compile( r"""
21 ^ 21 ^
22 (?P<exp>export\s+)? 22 (?P<exp>export\s+)?
23 (?P<var>[a-zA-Z0-9\-_+.${}/~]+?) 23 (?P<var>[a-zA-Z0-9\-_+.${}/~:]*?)
24 (\[(?P<flag>[a-zA-Z0-9\-_+.]+)\])? 24 (\[(?P<flag>[a-zA-Z0-9\-_+.][a-zA-Z0-9\-_+.@/]*)\])?
25 25
26 \s* ( 26 (?P<whitespace>\s*) (
27 (?P<colon>:=) | 27 (?P<colon>:=) |
28 (?P<lazyques>\?\?=) | 28 (?P<lazyques>\?\?=) |
29 (?P<ques>\?=) | 29 (?P<ques>\?=) |
@@ -32,7 +32,7 @@ __config_regexp__ = re.compile( r"""
32 (?P<predot>=\.) | 32 (?P<predot>=\.) |
33 (?P<postdot>\.=) | 33 (?P<postdot>\.=) |
34 = 34 =
35 ) \s* 35 ) (?P<whitespace2>\s*)
36 36
37 (?!'[^']*'[^']*'$) 37 (?!'[^']*'[^']*'$)
38 (?!\"[^\"]*\"[^\"]*\"$) 38 (?!\"[^\"]*\"[^\"]*\"$)
@@ -43,15 +43,15 @@ __config_regexp__ = re.compile( r"""
43 """, re.X) 43 """, re.X)
44__include_regexp__ = re.compile( r"include\s+(.+)" ) 44__include_regexp__ = re.compile( r"include\s+(.+)" )
45__require_regexp__ = re.compile( r"require\s+(.+)" ) 45__require_regexp__ = re.compile( r"require\s+(.+)" )
46__includeall_regexp__ = re.compile( r"include_all\s+(.+)" )
46__export_regexp__ = re.compile( r"export\s+([a-zA-Z0-9\-_+.${}/~]+)$" ) 47__export_regexp__ = re.compile( r"export\s+([a-zA-Z0-9\-_+.${}/~]+)$" )
47__unset_regexp__ = re.compile( r"unset\s+([a-zA-Z0-9\-_+.${}/~]+)$" ) 48__unset_regexp__ = re.compile( r"unset\s+([a-zA-Z0-9\-_+.${}/~]+)$" )
48__unset_flag_regexp__ = re.compile( r"unset\s+([a-zA-Z0-9\-_+.${}/~]+)\[([a-zA-Z0-9\-_+.]+)\]$" ) 49__unset_flag_regexp__ = re.compile( r"unset\s+([a-zA-Z0-9\-_+.${}/~]+)\[([a-zA-Z0-9\-_+.][a-zA-Z0-9\-_+.@]+)\]$" )
50__addpylib_regexp__ = re.compile(r"addpylib\s+(.+)\s+(.+)" )
51__addfragments_regexp__ = re.compile(r"addfragments\s+(.+)\s+(.+)\s+(.+)\s+(.+)" )
49 52
50def init(data): 53def init(data):
51 topdir = data.getVar('TOPDIR', False) 54 return
52 if not topdir:
53 data.setVar('TOPDIR', os.getcwd())
54
55 55
56def supports(fn, d): 56def supports(fn, d):
57 return fn[-5:] == ".conf" 57 return fn[-5:] == ".conf"
@@ -105,12 +105,12 @@ def include_single_file(parentfn, fn, lineno, data, error_out):
105# We have an issue where a UI might want to enforce particular settings such as 105# We have an issue where a UI might want to enforce particular settings such as
106# an empty DISTRO variable. If configuration files do something like assigning 106# an empty DISTRO variable. If configuration files do something like assigning
107# a weak default, it turns out to be very difficult to filter out these changes, 107# a weak default, it turns out to be very difficult to filter out these changes,
108# particularly when the weak default might appear half way though parsing a chain 108# particularly when the weak default might appear half way though parsing a chain
109# of configuration files. We therefore let the UIs hook into configuration file 109# of configuration files. We therefore let the UIs hook into configuration file
110# parsing. This turns out to be a hard problem to solve any other way. 110# parsing. This turns out to be a hard problem to solve any other way.
111confFilters = [] 111confFilters = []
112 112
113def handle(fn, data, include): 113def handle(fn, data, include, baseconfig=False):
114 init(data) 114 init(data)
115 115
116 if include == 0: 116 if include == 0:
@@ -128,21 +128,26 @@ def handle(fn, data, include):
128 s = f.readline() 128 s = f.readline()
129 if not s: 129 if not s:
130 break 130 break
131 origlineno = lineno
132 origline = s
131 w = s.strip() 133 w = s.strip()
132 # skip empty lines 134 # skip empty lines
133 if not w: 135 if not w:
134 continue 136 continue
135 s = s.rstrip() 137 s = s.rstrip()
136 while s[-1] == '\\': 138 while s[-1] == '\\':
137 s2 = f.readline().rstrip() 139 line = f.readline()
140 origline += line
141 s2 = line.rstrip()
138 lineno = lineno + 1 142 lineno = lineno + 1
139 if (not s2 or s2 and s2[0] != "#") and s[0] == "#" : 143 if (not s2 or s2 and s2[0] != "#") and s[0] == "#" :
140 bb.fatal("There is a confusing multiline, partially commented expression on line %s of file %s (%s).\nPlease clarify whether this is all a comment or should be parsed." % (lineno, fn, s)) 144 bb.fatal("There is a confusing multiline, partially commented expression starting on line %s of file %s:\n%s\nPlease clarify whether this is all a comment or should be parsed." % (origlineno, fn, origline))
145
141 s = s[:-1] + s2 146 s = s[:-1] + s2
142 # skip comments 147 # skip comments
143 if s[0] == '#': 148 if s[0] == '#':
144 continue 149 continue
145 feeder(lineno, s, abs_fn, statements) 150 feeder(lineno, s, abs_fn, statements, baseconfig=baseconfig)
146 151
147 # DONE WITH PARSING... time to evaluate 152 # DONE WITH PARSING... time to evaluate
148 data.setVar('FILE', abs_fn) 153 data.setVar('FILE', abs_fn)
@@ -150,17 +155,21 @@ def handle(fn, data, include):
150 if oldfile: 155 if oldfile:
151 data.setVar('FILE', oldfile) 156 data.setVar('FILE', oldfile)
152 157
153 f.close()
154
155 for f in confFilters: 158 for f in confFilters:
156 f(fn, data) 159 f(fn, data)
157 160
158 return data 161 return data
159 162
160def feeder(lineno, s, fn, statements): 163# baseconfig is set for the bblayers/layer.conf cookerdata config parsing
164# The function is also used by BBHandler, conffile would be False
165def feeder(lineno, s, fn, statements, baseconfig=False, conffile=True):
161 m = __config_regexp__.match(s) 166 m = __config_regexp__.match(s)
162 if m: 167 if m:
163 groupd = m.groupdict() 168 groupd = m.groupdict()
169 if groupd['var'] == "":
170 raise ParseError("Empty variable name in assignment: '%s'" % s, fn, lineno);
171 if not groupd['whitespace'] or not groupd['whitespace2']:
172 logger.warning("%s:%s has a lack of whitespace around the assignment: '%s'" % (fn, lineno, s))
164 ast.handleData(statements, fn, lineno, groupd) 173 ast.handleData(statements, fn, lineno, groupd)
165 return 174 return
166 175
@@ -174,6 +183,11 @@ def feeder(lineno, s, fn, statements):
174 ast.handleInclude(statements, fn, lineno, m, True) 183 ast.handleInclude(statements, fn, lineno, m, True)
175 return 184 return
176 185
186 m = __includeall_regexp__.match(s)
187 if m:
188 ast.handleIncludeAll(statements, fn, lineno, m)
189 return
190
177 m = __export_regexp__.match(s) 191 m = __export_regexp__.match(s)
178 if m: 192 if m:
179 ast.handleExport(statements, fn, lineno, m) 193 ast.handleExport(statements, fn, lineno, m)
@@ -189,6 +203,16 @@ def feeder(lineno, s, fn, statements):
189 ast.handleUnsetFlag(statements, fn, lineno, m) 203 ast.handleUnsetFlag(statements, fn, lineno, m)
190 return 204 return
191 205
206 m = __addpylib_regexp__.match(s)
207 if baseconfig and conffile and m:
208 ast.handlePyLib(statements, fn, lineno, m)
209 return
210
211 m = __addfragments_regexp__.match(s)
212 if m:
213 ast.handleAddFragments(statements, fn, lineno, m)
214 return
215
192 raise ParseError("unparsed line: '%s'" % s, fn, lineno); 216 raise ParseError("unparsed line: '%s'" % s, fn, lineno);
193 217
194# Add us to the handlers list 218# Add us to the handlers list
diff --git a/bitbake/lib/bb/persist_data.py b/bitbake/lib/bb/persist_data.py
deleted file mode 100644
index c6a209fb3f..0000000000
--- a/bitbake/lib/bb/persist_data.py
+++ /dev/null
@@ -1,301 +0,0 @@
1"""BitBake Persistent Data Store
2
3Used to store data in a central location such that other threads/tasks can
4access them at some future date. Acts as a convenience wrapper around sqlite,
5currently, providing a key/value store accessed by 'domain'.
6"""
7
8# Copyright (C) 2007 Richard Purdie
9# Copyright (C) 2010 Chris Larson <chris_larson@mentor.com>
10#
11# SPDX-License-Identifier: GPL-2.0-only
12#
13
14import collections
15import contextlib
16import functools
17import logging
18import os.path
19import sqlite3
20import sys
21import warnings
22from collections import Mapping
23
24sqlversion = sqlite3.sqlite_version_info
25if sqlversion[0] < 3 or (sqlversion[0] == 3 and sqlversion[1] < 3):
26 raise Exception("sqlite3 version 3.3.0 or later is required.")
27
28
29logger = logging.getLogger("BitBake.PersistData")
30
31@functools.total_ordering
32class SQLTable(collections.MutableMapping):
33 class _Decorators(object):
34 @staticmethod
35 def retry(*, reconnect=True):
36 """
37 Decorator that restarts a function if a database locked sqlite
38 exception occurs. If reconnect is True, the database connection
39 will be closed and reopened each time a failure occurs
40 """
41 def retry_wrapper(f):
42 def wrap_func(self, *args, **kwargs):
43 # Reconnect if necessary
44 if self.connection is None and reconnect:
45 self.reconnect()
46
47 count = 0
48 while True:
49 try:
50 return f(self, *args, **kwargs)
51 except sqlite3.OperationalError as exc:
52 if count < 500 and ('is locked' in str(exc) or 'locking protocol' in str(exc)):
53 count = count + 1
54 if reconnect:
55 self.reconnect()
56 continue
57 raise
58 return wrap_func
59 return retry_wrapper
60
61 @staticmethod
62 def transaction(f):
63 """
64 Decorator that starts a database transaction and creates a database
65 cursor for performing queries. If no exception is thrown, the
66 database results are commited. If an exception occurs, the database
67 is rolled back. In all cases, the cursor is closed after the
68 function ends.
69
70 Note that the cursor is passed as an extra argument to the function
71 after `self` and before any of the normal arguments
72 """
73 def wrap_func(self, *args, **kwargs):
74 # Context manager will COMMIT the database on success,
75 # or ROLLBACK on an exception
76 with self.connection:
77 # Automatically close the cursor when done
78 with contextlib.closing(self.connection.cursor()) as cursor:
79 return f(self, cursor, *args, **kwargs)
80 return wrap_func
81
82 """Object representing a table/domain in the database"""
83 def __init__(self, cachefile, table):
84 self.cachefile = cachefile
85 self.table = table
86
87 self.connection = None
88 self._execute_single("CREATE TABLE IF NOT EXISTS %s(key TEXT PRIMARY KEY NOT NULL, value TEXT);" % table)
89
90 @_Decorators.retry(reconnect=False)
91 @_Decorators.transaction
92 def _setup_database(self, cursor):
93 cursor.execute("pragma synchronous = off;")
94 # Enable WAL and keep the autocheckpoint length small (the default is
95 # usually 1000). Persistent caches are usually read-mostly, so keeping
96 # this short will keep readers running quickly
97 cursor.execute("pragma journal_mode = WAL;")
98 cursor.execute("pragma wal_autocheckpoint = 100;")
99
100 def reconnect(self):
101 if self.connection is not None:
102 self.connection.close()
103 self.connection = sqlite3.connect(self.cachefile, timeout=5)
104 self.connection.text_factory = str
105 self._setup_database()
106
107 @_Decorators.retry()
108 @_Decorators.transaction
109 def _execute_single(self, cursor, *query):
110 """
111 Executes a single query and discards the results. This correctly closes
112 the database cursor when finished
113 """
114 cursor.execute(*query)
115
116 @_Decorators.retry()
117 def _row_iter(self, f, *query):
118 """
119 Helper function that returns a row iterator. Each time __next__ is
120 called on the iterator, the provided function is evaluated to determine
121 the return value
122 """
123 class CursorIter(object):
124 def __init__(self, cursor):
125 self.cursor = cursor
126
127 def __iter__(self):
128 return self
129
130 def __next__(self):
131 row = self.cursor.fetchone()
132 if row is None:
133 self.cursor.close()
134 raise StopIteration
135 return f(row)
136
137 def __enter__(self):
138 return self
139
140 def __exit__(self, typ, value, traceback):
141 self.cursor.close()
142 return False
143
144 cursor = self.connection.cursor()
145 try:
146 cursor.execute(*query)
147 return CursorIter(cursor)
148 except:
149 cursor.close()
150
151 def __enter__(self):
152 self.connection.__enter__()
153 return self
154
155 def __exit__(self, *excinfo):
156 self.connection.__exit__(*excinfo)
157
158 @_Decorators.retry()
159 @_Decorators.transaction
160 def __getitem__(self, cursor, key):
161 cursor.execute("SELECT * from %s where key=?;" % self.table, [key])
162 row = cursor.fetchone()
163 if row is not None:
164 return row[1]
165 raise KeyError(key)
166
167 @_Decorators.retry()
168 @_Decorators.transaction
169 def __delitem__(self, cursor, key):
170 if key not in self:
171 raise KeyError(key)
172 cursor.execute("DELETE from %s where key=?;" % self.table, [key])
173
174 @_Decorators.retry()
175 @_Decorators.transaction
176 def __setitem__(self, cursor, key, value):
177 if not isinstance(key, str):
178 raise TypeError('Only string keys are supported')
179 elif not isinstance(value, str):
180 raise TypeError('Only string values are supported')
181
182 # Ensure the entire transaction (including SELECT) executes under write lock
183 cursor.execute("BEGIN EXCLUSIVE")
184
185 cursor.execute("SELECT * from %s where key=?;" % self.table, [key])
186 row = cursor.fetchone()
187 if row is not None:
188 cursor.execute("UPDATE %s SET value=? WHERE key=?;" % self.table, [value, key])
189 else:
190 cursor.execute("INSERT into %s(key, value) values (?, ?);" % self.table, [key, value])
191
192 @_Decorators.retry()
193 @_Decorators.transaction
194 def __contains__(self, cursor, key):
195 cursor.execute('SELECT * from %s where key=?;' % self.table, [key])
196 return cursor.fetchone() is not None
197
198 @_Decorators.retry()
199 @_Decorators.transaction
200 def __len__(self, cursor):
201 cursor.execute("SELECT COUNT(key) FROM %s;" % self.table)
202 row = cursor.fetchone()
203 if row is not None:
204 return row[0]
205
206 def __iter__(self):
207 return self._row_iter(lambda row: row[0], "SELECT key from %s;" % self.table)
208
209 def __lt__(self, other):
210 if not isinstance(other, Mapping):
211 raise NotImplemented
212
213 return len(self) < len(other)
214
215 def get_by_pattern(self, pattern):
216 return self._row_iter(lambda row: row[1], "SELECT * FROM %s WHERE key LIKE ?;" %
217 self.table, [pattern])
218
219 def values(self):
220 return list(self.itervalues())
221
222 def itervalues(self):
223 return self._row_iter(lambda row: row[0], "SELECT value FROM %s;" %
224 self.table)
225
226 def items(self):
227 return list(self.iteritems())
228
229 def iteritems(self):
230 return self._row_iter(lambda row: (row[0], row[1]), "SELECT * FROM %s;" %
231 self.table)
232
233 @_Decorators.retry()
234 @_Decorators.transaction
235 def clear(self, cursor):
236 cursor.execute("DELETE FROM %s;" % self.table)
237
238 def has_key(self, key):
239 return key in self
240
241
242class PersistData(object):
243 """Deprecated representation of the bitbake persistent data store"""
244 def __init__(self, d):
245 warnings.warn("Use of PersistData is deprecated. Please use "
246 "persist(domain, d) instead.",
247 category=DeprecationWarning,
248 stacklevel=2)
249
250 self.data = persist(d)
251 logger.debug("Using '%s' as the persistent data cache",
252 self.data.filename)
253
254 def addDomain(self, domain):
255 """
256 Add a domain (pending deprecation)
257 """
258 return self.data[domain]
259
260 def delDomain(self, domain):
261 """
262 Removes a domain and all the data it contains
263 """
264 del self.data[domain]
265
266 def getKeyValues(self, domain):
267 """
268 Return a list of key + value pairs for a domain
269 """
270 return list(self.data[domain].items())
271
272 def getValue(self, domain, key):
273 """
274 Return the value of a key for a domain
275 """
276 return self.data[domain][key]
277
278 def setValue(self, domain, key, value):
279 """
280 Sets the value of a key for a domain
281 """
282 self.data[domain][key] = value
283
284 def delValue(self, domain, key):
285 """
286 Deletes a key/value pair
287 """
288 del self.data[domain][key]
289
290def persist(domain, d):
291 """Convenience factory for SQLTable objects based upon metadata"""
292 import bb.utils
293 cachedir = (d.getVar("PERSISTENT_DIR") or
294 d.getVar("CACHE"))
295 if not cachedir:
296 logger.critical("Please set the 'PERSISTENT_DIR' or 'CACHE' variable")
297 sys.exit(1)
298
299 bb.utils.mkdirhier(cachedir)
300 cachefile = os.path.join(cachedir, "bb_persist_data.sqlite3")
301 return SQLTable(cachefile, domain)
diff --git a/bitbake/lib/bb/process.py b/bitbake/lib/bb/process.py
index 7c3995cce5..4c7b6d39df 100644
--- a/bitbake/lib/bb/process.py
+++ b/bitbake/lib/bb/process.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright BitBake Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
@@ -60,7 +62,7 @@ class Popen(subprocess.Popen):
60 "close_fds": True, 62 "close_fds": True,
61 "preexec_fn": subprocess_setup, 63 "preexec_fn": subprocess_setup,
62 "stdout": subprocess.PIPE, 64 "stdout": subprocess.PIPE,
63 "stderr": subprocess.STDOUT, 65 "stderr": subprocess.PIPE,
64 "stdin": subprocess.PIPE, 66 "stdin": subprocess.PIPE,
65 "shell": False, 67 "shell": False,
66 } 68 }
@@ -142,7 +144,7 @@ def _logged_communicate(pipe, log, input, extrafiles):
142 while pipe.poll() is None: 144 while pipe.poll() is None:
143 read_all_pipes(log, rin, outdata, errdata) 145 read_all_pipes(log, rin, outdata, errdata)
144 146
145 # Pocess closed, drain all pipes... 147 # Process closed, drain all pipes...
146 read_all_pipes(log, rin, outdata, errdata) 148 read_all_pipes(log, rin, outdata, errdata)
147 finally: 149 finally:
148 log.flush() 150 log.flush()
@@ -181,5 +183,8 @@ def run(cmd, input=None, log=None, extrafiles=None, **options):
181 stderr = stderr.decode("utf-8") 183 stderr = stderr.decode("utf-8")
182 184
183 if pipe.returncode != 0: 185 if pipe.returncode != 0:
186 if log:
187 # Don't duplicate the output in the exception if logging it
188 raise ExecutionError(cmd, pipe.returncode, None, None)
184 raise ExecutionError(cmd, pipe.returncode, stdout, stderr) 189 raise ExecutionError(cmd, pipe.returncode, stdout, stderr)
185 return stdout, stderr 190 return stdout, stderr
diff --git a/bitbake/lib/bb/progress.py b/bitbake/lib/bb/progress.py
index d051ba0198..9518be77fb 100644
--- a/bitbake/lib/bb/progress.py
+++ b/bitbake/lib/bb/progress.py
@@ -94,12 +94,15 @@ class LineFilterProgressHandler(ProgressHandler):
94 while True: 94 while True:
95 breakpos = self._linebuffer.find('\n') + 1 95 breakpos = self._linebuffer.find('\n') + 1
96 if breakpos == 0: 96 if breakpos == 0:
97 break 97 # for the case when the line with progress ends with only '\r'
98 breakpos = self._linebuffer.find('\r') + 1
99 if breakpos == 0:
100 break
98 line = self._linebuffer[:breakpos] 101 line = self._linebuffer[:breakpos]
99 self._linebuffer = self._linebuffer[breakpos:] 102 self._linebuffer = self._linebuffer[breakpos:]
100 # Drop any line feeds and anything that precedes them 103 # Drop any line feeds and anything that precedes them
101 lbreakpos = line.rfind('\r') + 1 104 lbreakpos = line.rfind('\r') + 1
102 if lbreakpos: 105 if lbreakpos and lbreakpos != breakpos:
103 line = line[lbreakpos:] 106 line = line[lbreakpos:]
104 if self.writeline(filter_color(line)): 107 if self.writeline(filter_color(line)):
105 super().write(line) 108 super().write(line)
@@ -145,7 +148,7 @@ class MultiStageProgressReporter:
145 for tasks made up of python code spread across multiple 148 for tasks made up of python code spread across multiple
146 classes / functions - the progress reporter object can 149 classes / functions - the progress reporter object can
147 be passed around or stored at the object level and calls 150 be passed around or stored at the object level and calls
148 to next_stage() and update() made whereever needed. 151 to next_stage() and update() made wherever needed.
149 """ 152 """
150 def __init__(self, d, stage_weights, debug=False): 153 def __init__(self, d, stage_weights, debug=False):
151 """ 154 """
diff --git a/bitbake/lib/bb/providers.py b/bitbake/lib/bb/providers.py
index b5a6cd0090..e11a4637d1 100644
--- a/bitbake/lib/bb/providers.py
+++ b/bitbake/lib/bb/providers.py
@@ -38,16 +38,17 @@ def findProviders(cfgData, dataCache, pkg_pn = None):
38 localdata = data.createCopy(cfgData) 38 localdata = data.createCopy(cfgData)
39 bb.data.expandKeys(localdata) 39 bb.data.expandKeys(localdata)
40 40
41 required = {}
41 preferred_versions = {} 42 preferred_versions = {}
42 latest_versions = {} 43 latest_versions = {}
43 44
44 for pn in pkg_pn: 45 for pn in pkg_pn:
45 (last_ver, last_file, pref_ver, pref_file) = findBestProvider(pn, localdata, dataCache, pkg_pn) 46 (last_ver, last_file, pref_ver, pref_file, req) = findBestProvider(pn, localdata, dataCache, pkg_pn)
46 preferred_versions[pn] = (pref_ver, pref_file) 47 preferred_versions[pn] = (pref_ver, pref_file)
47 latest_versions[pn] = (last_ver, last_file) 48 latest_versions[pn] = (last_ver, last_file)
49 required[pn] = req
48 50
49 return (latest_versions, preferred_versions) 51 return (latest_versions, preferred_versions, required)
50
51 52
52def allProviders(dataCache): 53def allProviders(dataCache):
53 """ 54 """
@@ -59,7 +60,6 @@ def allProviders(dataCache):
59 all_providers[pn].append((ver, fn)) 60 all_providers[pn].append((ver, fn))
60 return all_providers 61 return all_providers
61 62
62
63def sortPriorities(pn, dataCache, pkg_pn = None): 63def sortPriorities(pn, dataCache, pkg_pn = None):
64 """ 64 """
65 Reorder pkg_pn by file priority and default preference 65 Reorder pkg_pn by file priority and default preference
@@ -87,6 +87,21 @@ def sortPriorities(pn, dataCache, pkg_pn = None):
87 87
88 return tmp_pn 88 return tmp_pn
89 89
90def versionVariableMatch(cfgData, keyword, pn):
91 """
92 Return the value of the <keyword>_VERSION variable if set.
93 """
94
95 # pn can contain '_', e.g. gcc-cross-x86_64 and an override cannot
96 # hence we do this manually rather than use OVERRIDES
97 ver = cfgData.getVar("%s_VERSION:pn-%s" % (keyword, pn))
98 if not ver:
99 ver = cfgData.getVar("%s_VERSION_%s" % (keyword, pn))
100 if not ver:
101 ver = cfgData.getVar("%s_VERSION" % keyword)
102
103 return ver
104
90def preferredVersionMatch(pe, pv, pr, preferred_e, preferred_v, preferred_r): 105def preferredVersionMatch(pe, pv, pr, preferred_e, preferred_v, preferred_r):
91 """ 106 """
92 Check if the version pe,pv,pr is the preferred one. 107 Check if the version pe,pv,pr is the preferred one.
@@ -102,19 +117,28 @@ def preferredVersionMatch(pe, pv, pr, preferred_e, preferred_v, preferred_r):
102 117
103def findPreferredProvider(pn, cfgData, dataCache, pkg_pn = None, item = None): 118def findPreferredProvider(pn, cfgData, dataCache, pkg_pn = None, item = None):
104 """ 119 """
105 Find the first provider in pkg_pn with a PREFERRED_VERSION set. 120 Find the first provider in pkg_pn with REQUIRED_VERSION or PREFERRED_VERSION set.
106 """ 121 """
107 122
108 preferred_file = None 123 preferred_file = None
109 preferred_ver = None 124 preferred_ver = None
125 required = False
110 126
111 # pn can contain '_', e.g. gcc-cross-x86_64 and an override cannot 127 required_v = versionVariableMatch(cfgData, "REQUIRED", pn)
112 # hence we do this manually rather than use OVERRIDES 128 preferred_v = versionVariableMatch(cfgData, "PREFERRED", pn)
113 preferred_v = cfgData.getVar("PREFERRED_VERSION_pn-%s" % pn) 129
114 if not preferred_v: 130 itemstr = ""
115 preferred_v = cfgData.getVar("PREFERRED_VERSION_%s" % pn) 131 if item:
116 if not preferred_v: 132 itemstr = " (for item %s)" % item
117 preferred_v = cfgData.getVar("PREFERRED_VERSION") 133
134 if required_v is not None:
135 if preferred_v is not None:
136 logger.warning("REQUIRED_VERSION and PREFERRED_VERSION for package %s%s are both set using REQUIRED_VERSION %s", pn, itemstr, required_v)
137 else:
138 logger.debug("REQUIRED_VERSION is set for package %s%s", pn, itemstr)
139 # REQUIRED_VERSION always takes precedence over PREFERRED_VERSION
140 preferred_v = required_v
141 required = True
118 142
119 if preferred_v: 143 if preferred_v:
120 m = re.match(r'(\d+:)*(.*)(_.*)*', preferred_v) 144 m = re.match(r'(\d+:)*(.*)(_.*)*', preferred_v)
@@ -147,11 +171,9 @@ def findPreferredProvider(pn, cfgData, dataCache, pkg_pn = None, item = None):
147 pv_str = preferred_v 171 pv_str = preferred_v
148 if not (preferred_e is None): 172 if not (preferred_e is None):
149 pv_str = '%s:%s' % (preferred_e, pv_str) 173 pv_str = '%s:%s' % (preferred_e, pv_str)
150 itemstr = ""
151 if item:
152 itemstr = " (for item %s)" % item
153 if preferred_file is None: 174 if preferred_file is None:
154 logger.warn("preferred version %s of %s not available%s", pv_str, pn, itemstr) 175 if not required:
176 logger.warning("preferred version %s of %s not available%s", pv_str, pn, itemstr)
155 available_vers = [] 177 available_vers = []
156 for file_set in pkg_pn: 178 for file_set in pkg_pn:
157 for f in file_set: 179 for f in file_set:
@@ -163,12 +185,16 @@ def findPreferredProvider(pn, cfgData, dataCache, pkg_pn = None, item = None):
163 available_vers.append(ver_str) 185 available_vers.append(ver_str)
164 if available_vers: 186 if available_vers:
165 available_vers.sort() 187 available_vers.sort()
166 logger.warn("versions of %s available: %s", pn, ' '.join(available_vers)) 188 logger.warning("versions of %s available: %s", pn, ' '.join(available_vers))
189 if required:
190 logger.error("required version %s of %s not available%s", pv_str, pn, itemstr)
167 else: 191 else:
168 logger.debug("selecting %s as PREFERRED_VERSION %s of package %s%s", preferred_file, pv_str, pn, itemstr) 192 if required:
169 193 logger.debug("selecting %s as REQUIRED_VERSION %s of package %s%s", preferred_file, pv_str, pn, itemstr)
170 return (preferred_ver, preferred_file) 194 else:
195 logger.debug("selecting %s as PREFERRED_VERSION %s of package %s%s", preferred_file, pv_str, pn, itemstr)
171 196
197 return (preferred_ver, preferred_file, required)
172 198
173def findLatestProvider(pn, cfgData, dataCache, file_set): 199def findLatestProvider(pn, cfgData, dataCache, file_set):
174 """ 200 """
@@ -189,7 +215,6 @@ def findLatestProvider(pn, cfgData, dataCache, file_set):
189 215
190 return (latest, latest_f) 216 return (latest, latest_f)
191 217
192
193def findBestProvider(pn, cfgData, dataCache, pkg_pn = None, item = None): 218def findBestProvider(pn, cfgData, dataCache, pkg_pn = None, item = None):
194 """ 219 """
195 If there is a PREFERRED_VERSION, find the highest-priority bbfile 220 If there is a PREFERRED_VERSION, find the highest-priority bbfile
@@ -198,17 +223,16 @@ def findBestProvider(pn, cfgData, dataCache, pkg_pn = None, item = None):
198 """ 223 """
199 224
200 sortpkg_pn = sortPriorities(pn, dataCache, pkg_pn) 225 sortpkg_pn = sortPriorities(pn, dataCache, pkg_pn)
201 # Find the highest priority provider with a PREFERRED_VERSION set 226 # Find the highest priority provider with a REQUIRED_VERSION or PREFERRED_VERSION set
202 (preferred_ver, preferred_file) = findPreferredProvider(pn, cfgData, dataCache, sortpkg_pn, item) 227 (preferred_ver, preferred_file, required) = findPreferredProvider(pn, cfgData, dataCache, sortpkg_pn, item)
203 # Find the latest version of the highest priority provider 228 # Find the latest version of the highest priority provider
204 (latest, latest_f) = findLatestProvider(pn, cfgData, dataCache, sortpkg_pn[0]) 229 (latest, latest_f) = findLatestProvider(pn, cfgData, dataCache, sortpkg_pn[0])
205 230
206 if preferred_file is None: 231 if not required and preferred_file is None:
207 preferred_file = latest_f 232 preferred_file = latest_f
208 preferred_ver = latest 233 preferred_ver = latest
209 234
210 return (latest, latest_f, preferred_ver, preferred_file) 235 return (latest, latest_f, preferred_ver, preferred_file, required)
211
212 236
213def _filterProviders(providers, item, cfgData, dataCache): 237def _filterProviders(providers, item, cfgData, dataCache):
214 """ 238 """
@@ -234,10 +258,13 @@ def _filterProviders(providers, item, cfgData, dataCache):
234 258
235 logger.debug("providers for %s are: %s", item, list(sorted(pkg_pn.keys()))) 259 logger.debug("providers for %s are: %s", item, list(sorted(pkg_pn.keys())))
236 260
237 # First add PREFERRED_VERSIONS 261 # First add REQUIRED_VERSIONS or PREFERRED_VERSIONS
238 for pn in sorted(pkg_pn): 262 for pn in sorted(pkg_pn):
239 sortpkg_pn[pn] = sortPriorities(pn, dataCache, pkg_pn) 263 sortpkg_pn[pn] = sortPriorities(pn, dataCache, pkg_pn)
240 preferred_versions[pn] = findPreferredProvider(pn, cfgData, dataCache, sortpkg_pn[pn], item) 264 preferred_ver, preferred_file, required = findPreferredProvider(pn, cfgData, dataCache, sortpkg_pn[pn], item)
265 if required and preferred_file is None:
266 return eligible
267 preferred_versions[pn] = (preferred_ver, preferred_file)
241 if preferred_versions[pn][1]: 268 if preferred_versions[pn][1]:
242 eligible.append(preferred_versions[pn][1]) 269 eligible.append(preferred_versions[pn][1])
243 270
@@ -249,7 +276,6 @@ def _filterProviders(providers, item, cfgData, dataCache):
249 eligible.append(preferred_versions[pn][1]) 276 eligible.append(preferred_versions[pn][1])
250 277
251 if not eligible: 278 if not eligible:
252 logger.error("no eligible providers for %s", item)
253 return eligible 279 return eligible
254 280
255 # If pn == item, give it a slight default preference 281 # If pn == item, give it a slight default preference
@@ -266,7 +292,6 @@ def _filterProviders(providers, item, cfgData, dataCache):
266 292
267 return eligible 293 return eligible
268 294
269
270def filterProviders(providers, item, cfgData, dataCache): 295def filterProviders(providers, item, cfgData, dataCache):
271 """ 296 """
272 Take a list of providers and filter/reorder according to the 297 Take a list of providers and filter/reorder according to the
@@ -371,8 +396,8 @@ def getRuntimeProviders(dataCache, rdepend):
371 return rproviders 396 return rproviders
372 397
373 # Only search dynamic packages if we can't find anything in other variables 398 # Only search dynamic packages if we can't find anything in other variables
374 for pattern in dataCache.packages_dynamic: 399 for pat_key in dataCache.packages_dynamic:
375 pattern = pattern.replace(r'+', r"\+") 400 pattern = pat_key.replace(r'+', r"\+")
376 if pattern in regexp_cache: 401 if pattern in regexp_cache:
377 regexp = regexp_cache[pattern] 402 regexp = regexp_cache[pattern]
378 else: 403 else:
@@ -383,12 +408,11 @@ def getRuntimeProviders(dataCache, rdepend):
383 raise 408 raise
384 regexp_cache[pattern] = regexp 409 regexp_cache[pattern] = regexp
385 if regexp.match(rdepend): 410 if regexp.match(rdepend):
386 rproviders += dataCache.packages_dynamic[pattern] 411 rproviders += dataCache.packages_dynamic[pat_key]
387 logger.debug("Assuming %s is a dynamic package, but it may not exist" % rdepend) 412 logger.debug("Assuming %s is a dynamic package, but it may not exist" % rdepend)
388 413
389 return rproviders 414 return rproviders
390 415
391
392def buildWorldTargetList(dataCache, task=None): 416def buildWorldTargetList(dataCache, task=None):
393 """ 417 """
394 Build package list for "bitbake world" 418 Build package list for "bitbake world"
diff --git a/bitbake/lib/bb/runqueue.py b/bitbake/lib/bb/runqueue.py
index 54ef245a63..80f3d3282f 100644
--- a/bitbake/lib/bb/runqueue.py
+++ b/bitbake/lib/bb/runqueue.py
@@ -14,6 +14,7 @@ import os
14import sys 14import sys
15import stat 15import stat
16import errno 16import errno
17import itertools
17import logging 18import logging
18import re 19import re
19import bb 20import bb
@@ -24,6 +25,7 @@ import pickle
24from multiprocessing import Process 25from multiprocessing import Process
25import shlex 26import shlex
26import pprint 27import pprint
28import time
27 29
28bblogger = logging.getLogger("BitBake") 30bblogger = logging.getLogger("BitBake")
29logger = logging.getLogger("BitBake.RunQueue") 31logger = logging.getLogger("BitBake.RunQueue")
@@ -85,15 +87,19 @@ class RunQueueStats:
85 """ 87 """
86 Holds statistics on the tasks handled by the associated runQueue 88 Holds statistics on the tasks handled by the associated runQueue
87 """ 89 """
88 def __init__(self, total): 90 def __init__(self, total, setscene_total):
89 self.completed = 0 91 self.completed = 0
90 self.skipped = 0 92 self.skipped = 0
91 self.failed = 0 93 self.failed = 0
92 self.active = 0 94 self.active = 0
95 self.setscene_active = 0
96 self.setscene_covered = 0
97 self.setscene_notcovered = 0
98 self.setscene_total = setscene_total
93 self.total = total 99 self.total = total
94 100
95 def copy(self): 101 def copy(self):
96 obj = self.__class__(self.total) 102 obj = self.__class__(self.total, self.setscene_total)
97 obj.__dict__.update(self.__dict__) 103 obj.__dict__.update(self.__dict__)
98 return obj 104 return obj
99 105
@@ -112,10 +118,18 @@ class RunQueueStats:
112 def taskActive(self): 118 def taskActive(self):
113 self.active = self.active + 1 119 self.active = self.active + 1
114 120
121 def updateCovered(self, covered, notcovered):
122 self.setscene_covered = covered
123 self.setscene_notcovered = notcovered
124
125 def updateActiveSetscene(self, active):
126 self.setscene_active = active
127
115# These values indicate the next step due to be run in the 128# These values indicate the next step due to be run in the
116# runQueue state machine 129# runQueue state machine
117runQueuePrepare = 2 130runQueuePrepare = 2
118runQueueSceneInit = 3 131runQueueSceneInit = 3
132runQueueDumpSigs = 4
119runQueueRunning = 6 133runQueueRunning = 6
120runQueueFailed = 7 134runQueueFailed = 7
121runQueueCleanUp = 8 135runQueueCleanUp = 8
@@ -143,11 +157,82 @@ class RunQueueScheduler(object):
143 self.stamps = {} 157 self.stamps = {}
144 for tid in self.rqdata.runtaskentries: 158 for tid in self.rqdata.runtaskentries:
145 (mc, fn, taskname, taskfn) = split_tid_mcfn(tid) 159 (mc, fn, taskname, taskfn) = split_tid_mcfn(tid)
146 self.stamps[tid] = bb.build.stampfile(taskname, self.rqdata.dataCaches[mc], taskfn, noextra=True) 160 self.stamps[tid] = bb.parse.siggen.stampfile_mcfn(taskname, taskfn, extrainfo=False)
147 if tid in self.rq.runq_buildable: 161 if tid in self.rq.runq_buildable:
148 self.buildable.append(tid) 162 self.buildable.add(tid)
149 163
150 self.rev_prio_map = None 164 self.rev_prio_map = None
165 self.is_pressure_usable()
166
167 def is_pressure_usable(self):
168 """
169 If monitoring pressure, return True if pressure files can be open and read. For example
170 openSUSE /proc/pressure/* files have readable file permissions but when read the error EOPNOTSUPP (Operation not supported)
171 is returned.
172 """
173 if self.rq.max_cpu_pressure or self.rq.max_io_pressure or self.rq.max_memory_pressure:
174 try:
175 with open("/proc/pressure/cpu") as cpu_pressure_fds, \
176 open("/proc/pressure/io") as io_pressure_fds, \
177 open("/proc/pressure/memory") as memory_pressure_fds:
178
179 self.prev_cpu_pressure = cpu_pressure_fds.readline().split()[4].split("=")[1]
180 self.prev_io_pressure = io_pressure_fds.readline().split()[4].split("=")[1]
181 self.prev_memory_pressure = memory_pressure_fds.readline().split()[4].split("=")[1]
182 self.prev_pressure_time = time.time()
183 self.check_pressure = True
184 except:
185 bb.note("The /proc/pressure files can't be read. Continuing build without monitoring pressure")
186 self.check_pressure = False
187 else:
188 self.check_pressure = False
189
190 def exceeds_max_pressure(self):
191 """
192 Monitor the difference in total pressure at least once per second, if
193 BB_PRESSURE_MAX_{CPU|IO|MEMORY} are set, return True if above threshold.
194 """
195 if self.check_pressure:
196 with open("/proc/pressure/cpu") as cpu_pressure_fds, \
197 open("/proc/pressure/io") as io_pressure_fds, \
198 open("/proc/pressure/memory") as memory_pressure_fds:
199 # extract "total" from /proc/pressure/{cpu|io}
200 curr_cpu_pressure = cpu_pressure_fds.readline().split()[4].split("=")[1]
201 curr_io_pressure = io_pressure_fds.readline().split()[4].split("=")[1]
202 curr_memory_pressure = memory_pressure_fds.readline().split()[4].split("=")[1]
203 now = time.time()
204 tdiff = now - self.prev_pressure_time
205 psi_accumulation_interval = 1.0
206 cpu_pressure = (float(curr_cpu_pressure) - float(self.prev_cpu_pressure)) / tdiff
207 io_pressure = (float(curr_io_pressure) - float(self.prev_io_pressure)) / tdiff
208 memory_pressure = (float(curr_memory_pressure) - float(self.prev_memory_pressure)) / tdiff
209 exceeds_cpu_pressure = self.rq.max_cpu_pressure and cpu_pressure > self.rq.max_cpu_pressure
210 exceeds_io_pressure = self.rq.max_io_pressure and io_pressure > self.rq.max_io_pressure
211 exceeds_memory_pressure = self.rq.max_memory_pressure and memory_pressure > self.rq.max_memory_pressure
212
213 if tdiff > psi_accumulation_interval:
214 self.prev_cpu_pressure = curr_cpu_pressure
215 self.prev_io_pressure = curr_io_pressure
216 self.prev_memory_pressure = curr_memory_pressure
217 self.prev_pressure_time = now
218
219 pressure_state = (exceeds_cpu_pressure, exceeds_io_pressure, exceeds_memory_pressure)
220 pressure_values = (round(cpu_pressure,1), self.rq.max_cpu_pressure, round(io_pressure,1), self.rq.max_io_pressure, round(memory_pressure,1), self.rq.max_memory_pressure)
221 if hasattr(self, "pressure_state") and pressure_state != self.pressure_state:
222 bb.note("Pressure status changed to CPU: %s, IO: %s, Mem: %s (CPU: %s/%s, IO: %s/%s, Mem: %s/%s) - using %s/%s bitbake threads" % (pressure_state + pressure_values + (len(self.rq.runq_running.difference(self.rq.runq_complete)), self.rq.number_tasks)))
223 self.pressure_state = pressure_state
224 return (exceeds_cpu_pressure or exceeds_io_pressure or exceeds_memory_pressure)
225 elif self.rq.max_loadfactor:
226 limit = False
227 loadfactor = float(os.getloadavg()[0]) / os.cpu_count()
228 # bb.warn("Comparing %s to %s" % (loadfactor, self.rq.max_loadfactor))
229 if loadfactor > self.rq.max_loadfactor:
230 limit = True
231 if hasattr(self, "loadfactor_limit") and limit != self.loadfactor_limit:
232 bb.note("Load average limiting set to %s as load average: %s - using %s/%s bitbake threads" % (limit, loadfactor, len(self.rq.runq_running.difference(self.rq.runq_complete)), self.rq.number_tasks))
233 self.loadfactor_limit = limit
234 return limit
235 return False
151 236
152 def next_buildable_task(self): 237 def next_buildable_task(self):
153 """ 238 """
@@ -161,6 +246,12 @@ class RunQueueScheduler(object):
161 if not buildable: 246 if not buildable:
162 return None 247 return None
163 248
249 # Bitbake requires that at least one task be active. Only check for pressure if
250 # this is the case, otherwise the pressure limitation could result in no tasks
251 # being active and no new tasks started thereby, at times, breaking the scheduler.
252 if self.rq.stats.active and self.exceeds_max_pressure():
253 return None
254
164 # Filter out tasks that have a max number of threads that have been exceeded 255 # Filter out tasks that have a max number of threads that have been exceeded
165 skip_buildable = {} 256 skip_buildable = {}
166 for running in self.rq.runq_running.difference(self.rq.runq_complete): 257 for running in self.rq.runq_running.difference(self.rq.runq_complete):
@@ -191,11 +282,11 @@ class RunQueueScheduler(object):
191 best = None 282 best = None
192 bestprio = None 283 bestprio = None
193 for tid in buildable: 284 for tid in buildable:
194 taskname = taskname_from_tid(tid)
195 if taskname in skip_buildable and skip_buildable[taskname] >= int(self.skip_maxthread[taskname]):
196 continue
197 prio = self.rev_prio_map[tid] 285 prio = self.rev_prio_map[tid]
198 if bestprio is None or bestprio > prio: 286 if bestprio is None or bestprio > prio:
287 taskname = taskname_from_tid(tid)
288 if taskname in skip_buildable and skip_buildable[taskname] >= int(self.skip_maxthread[taskname]):
289 continue
199 stamp = self.stamps[tid] 290 stamp = self.stamps[tid]
200 if stamp in self.rq.build_stamps.values(): 291 if stamp in self.rq.build_stamps.values():
201 continue 292 continue
@@ -374,10 +465,9 @@ class RunQueueData:
374 self.rq = rq 465 self.rq = rq
375 self.warn_multi_bb = False 466 self.warn_multi_bb = False
376 467
377 self.stampwhitelist = cfgData.getVar("BB_STAMP_WHITELIST") or "" 468 self.multi_provider_allowed = (cfgData.getVar("BB_MULTI_PROVIDER_ALLOWED") or "").split()
378 self.multi_provider_whitelist = (cfgData.getVar("MULTI_PROVIDER_WHITELIST") or "").split() 469 self.setscene_ignore_tasks = get_setscene_enforce_ignore_tasks(cfgData, targets)
379 self.setscenewhitelist = get_setscene_enforce_whitelist(cfgData, targets) 470 self.setscene_ignore_tasks_checked = False
380 self.setscenewhitelist_checked = False
381 self.setscene_enforce = (cfgData.getVar('BB_SETSCENE_ENFORCE') == "1") 471 self.setscene_enforce = (cfgData.getVar('BB_SETSCENE_ENFORCE') == "1")
382 self.init_progress_reporter = bb.progress.DummyMultiStageProcessProgressReporter() 472 self.init_progress_reporter = bb.progress.DummyMultiStageProcessProgressReporter()
383 473
@@ -387,7 +477,6 @@ class RunQueueData:
387 self.runtaskentries = {} 477 self.runtaskentries = {}
388 478
389 def runq_depends_names(self, ids): 479 def runq_depends_names(self, ids):
390 import re
391 ret = [] 480 ret = []
392 for id in ids: 481 for id in ids:
393 nam = os.path.basename(id) 482 nam = os.path.basename(id)
@@ -475,7 +564,7 @@ class RunQueueData:
475 msgs.append(" Task %s (dependent Tasks %s)\n" % (dep, self.runq_depends_names(self.runtaskentries[dep].depends))) 564 msgs.append(" Task %s (dependent Tasks %s)\n" % (dep, self.runq_depends_names(self.runtaskentries[dep].depends)))
476 msgs.append("\n") 565 msgs.append("\n")
477 if len(valid_chains) > 10: 566 if len(valid_chains) > 10:
478 msgs.append("Aborted dependency loops search after 10 matches.\n") 567 msgs.append("Halted dependency loops search after 10 matches.\n")
479 raise TooManyLoops 568 raise TooManyLoops
480 continue 569 continue
481 scan = False 570 scan = False
@@ -536,7 +625,7 @@ class RunQueueData:
536 next_points.append(revdep) 625 next_points.append(revdep)
537 task_done[revdep] = True 626 task_done[revdep] = True
538 endpoints = next_points 627 endpoints = next_points
539 if len(next_points) == 0: 628 if not next_points:
540 break 629 break
541 630
542 # Circular dependency sanity check 631 # Circular dependency sanity check
@@ -578,15 +667,18 @@ class RunQueueData:
578 667
579 found = False 668 found = False
580 for mc in self.taskData: 669 for mc in self.taskData:
581 if len(taskData[mc].taskentries) > 0: 670 if taskData[mc].taskentries:
582 found = True 671 found = True
583 break 672 break
584 if not found: 673 if not found:
585 # Nothing to do 674 # Nothing to do
586 return 0 675 return 0
587 676
677 bb.parse.siggen.setup_datacache(self.dataCaches)
678
588 self.init_progress_reporter.start() 679 self.init_progress_reporter.start()
589 self.init_progress_reporter.next_stage() 680 self.init_progress_reporter.next_stage()
681 bb.event.check_for_interrupts(self.cooker.data)
590 682
591 # Step A - Work out a list of tasks to run 683 # Step A - Work out a list of tasks to run
592 # 684 #
@@ -632,9 +724,13 @@ class RunQueueData:
632 frommc = mcdependency[1] 724 frommc = mcdependency[1]
633 mcdep = mcdependency[2] 725 mcdep = mcdependency[2]
634 deptask = mcdependency[4] 726 deptask = mcdependency[4]
727 if mcdep not in taskData:
728 bb.fatal("Multiconfig '%s' is referenced in multiconfig dependency '%s' but not enabled in BBMULTICONFIG?" % (mcdep, dep))
635 if mc == frommc: 729 if mc == frommc:
636 fn = taskData[mcdep].build_targets[pn][0] 730 fn = taskData[mcdep].build_targets[pn][0]
637 newdep = '%s:%s' % (fn,deptask) 731 newdep = '%s:%s' % (fn,deptask)
732 if newdep not in taskData[mcdep].taskentries:
733 bb.fatal("Task mcdepends on non-existent task %s" % (newdep))
638 taskData[mc].taskentries[tid].tdepends.append(newdep) 734 taskData[mc].taskentries[tid].tdepends.append(newdep)
639 735
640 for mc in taskData: 736 for mc in taskData:
@@ -733,6 +829,7 @@ class RunQueueData:
733 #self.dump_data() 829 #self.dump_data()
734 830
735 self.init_progress_reporter.next_stage() 831 self.init_progress_reporter.next_stage()
832 bb.event.check_for_interrupts(self.cooker.data)
736 833
737 # Resolve recursive 'recrdeptask' dependencies (Part B) 834 # Resolve recursive 'recrdeptask' dependencies (Part B)
738 # 835 #
@@ -762,7 +859,7 @@ class RunQueueData:
762 # Find the dependency chain endpoints 859 # Find the dependency chain endpoints
763 endpoints = set() 860 endpoints = set()
764 for tid in self.runtaskentries: 861 for tid in self.runtaskentries:
765 if len(deps[tid]) == 0: 862 if not deps[tid]:
766 endpoints.add(tid) 863 endpoints.add(tid)
767 # Iterate the chains collating dependencies 864 # Iterate the chains collating dependencies
768 while endpoints: 865 while endpoints:
@@ -773,11 +870,11 @@ class RunQueueData:
773 cumulativedeps[dep].update(cumulativedeps[tid]) 870 cumulativedeps[dep].update(cumulativedeps[tid])
774 if tid in deps[dep]: 871 if tid in deps[dep]:
775 deps[dep].remove(tid) 872 deps[dep].remove(tid)
776 if len(deps[dep]) == 0: 873 if not deps[dep]:
777 next.add(dep) 874 next.add(dep)
778 endpoints = next 875 endpoints = next
779 #for tid in deps: 876 #for tid in deps:
780 # if len(deps[tid]) != 0: 877 # if deps[tid]:
781 # bb.warn("Sanity test failure, dependencies left for %s (%s)" % (tid, deps[tid])) 878 # bb.warn("Sanity test failure, dependencies left for %s (%s)" % (tid, deps[tid]))
782 879
783 # Loop here since recrdeptasks can depend upon other recrdeptasks and we have to 880 # Loop here since recrdeptasks can depend upon other recrdeptasks and we have to
@@ -829,6 +926,7 @@ class RunQueueData:
829 self.runtaskentries[tid].depends.difference_update(recursivetasksselfref) 926 self.runtaskentries[tid].depends.difference_update(recursivetasksselfref)
830 927
831 self.init_progress_reporter.next_stage() 928 self.init_progress_reporter.next_stage()
929 bb.event.check_for_interrupts(self.cooker.data)
832 930
833 #self.dump_data() 931 #self.dump_data()
834 932
@@ -867,7 +965,7 @@ class RunQueueData:
867 bb.debug(1, "Task %s is marked nostamp, cannot invalidate this task" % taskname) 965 bb.debug(1, "Task %s is marked nostamp, cannot invalidate this task" % taskname)
868 else: 966 else:
869 logger.verbose("Invalidate task %s, %s", taskname, fn) 967 logger.verbose("Invalidate task %s, %s", taskname, fn)
870 bb.parse.siggen.invalidate_task(taskname, self.dataCaches[mc], taskfn) 968 bb.parse.siggen.invalidate_task(taskname, taskfn)
871 969
872 self.target_tids = [] 970 self.target_tids = []
873 for (mc, target, task, fn) in self.targets: 971 for (mc, target, task, fn) in self.targets:
@@ -910,47 +1008,54 @@ class RunQueueData:
910 mark_active(tid, 1) 1008 mark_active(tid, 1)
911 1009
912 self.init_progress_reporter.next_stage() 1010 self.init_progress_reporter.next_stage()
1011 bb.event.check_for_interrupts(self.cooker.data)
913 1012
914 # Step C - Prune all inactive tasks 1013 # Step C - Prune all inactive tasks
915 # 1014 #
916 # Once all active tasks are marked, prune the ones we don't need. 1015 # Once all active tasks are marked, prune the ones we don't need.
917 1016
918 delcount = {}
919 for tid in list(self.runtaskentries.keys()):
920 if tid not in runq_build:
921 delcount[tid] = self.runtaskentries[tid]
922 del self.runtaskentries[tid]
923
924 # Handle --runall 1017 # Handle --runall
925 if self.cooker.configuration.runall: 1018 if self.cooker.configuration.runall:
926 # re-run the mark_active and then drop unused tasks from new list 1019 # re-run the mark_active and then drop unused tasks from new list
927 runq_build = {}
928 1020
929 for task in self.cooker.configuration.runall: 1021 runall_tids = set()
930 if not task.startswith("do_"): 1022 added = True
931 task = "do_{0}".format(task) 1023 while added:
932 runall_tids = set() 1024 reduced_tasklist = set(self.runtaskentries.keys())
933 for tid in list(self.runtaskentries): 1025 for tid in list(self.runtaskentries.keys()):
934 wanttid = "{0}:{1}".format(fn_from_tid(tid), task) 1026 if tid not in runq_build:
935 if wanttid in delcount: 1027 reduced_tasklist.remove(tid)
936 self.runtaskentries[wanttid] = delcount[wanttid] 1028 runq_build = {}
937 if wanttid in self.runtaskentries:
938 runall_tids.add(wanttid)
939
940 for tid in list(runall_tids):
941 mark_active(tid,1)
942 if self.cooker.configuration.force:
943 invalidate_task(tid, False)
944 1029
945 for tid in list(self.runtaskentries.keys()): 1030 orig = runall_tids
946 if tid not in runq_build: 1031 runall_tids = set()
947 delcount[tid] = self.runtaskentries[tid] 1032 for task in self.cooker.configuration.runall:
948 del self.runtaskentries[tid] 1033 if not task.startswith("do_"):
1034 task = "do_{0}".format(task)
1035 for tid in reduced_tasklist:
1036 wanttid = "{0}:{1}".format(fn_from_tid(tid), task)
1037 if wanttid in self.runtaskentries:
1038 runall_tids.add(wanttid)
1039
1040 for tid in list(runall_tids):
1041 mark_active(tid, 1)
1042 self.target_tids.append(tid)
1043 if self.cooker.configuration.force:
1044 invalidate_task(tid, False)
1045 added = runall_tids - orig
1046
1047 delcount = set()
1048 for tid in list(self.runtaskentries.keys()):
1049 if tid not in runq_build:
1050 delcount.add(tid)
1051 del self.runtaskentries[tid]
949 1052
950 if len(self.runtaskentries) == 0: 1053 if self.cooker.configuration.runall:
1054 if not self.runtaskentries:
951 bb.msg.fatal("RunQueue", "Could not find any tasks with the tasknames %s to run within the recipes of the taskgraphs of the targets %s" % (str(self.cooker.configuration.runall), str(self.targets))) 1055 bb.msg.fatal("RunQueue", "Could not find any tasks with the tasknames %s to run within the recipes of the taskgraphs of the targets %s" % (str(self.cooker.configuration.runall), str(self.targets)))
952 1056
953 self.init_progress_reporter.next_stage() 1057 self.init_progress_reporter.next_stage()
1058 bb.event.check_for_interrupts(self.cooker.data)
954 1059
955 # Handle runonly 1060 # Handle runonly
956 if self.cooker.configuration.runonly: 1061 if self.cooker.configuration.runonly:
@@ -960,19 +1065,19 @@ class RunQueueData:
960 for task in self.cooker.configuration.runonly: 1065 for task in self.cooker.configuration.runonly:
961 if not task.startswith("do_"): 1066 if not task.startswith("do_"):
962 task = "do_{0}".format(task) 1067 task = "do_{0}".format(task)
963 runonly_tids = { k: v for k, v in self.runtaskentries.items() if taskname_from_tid(k) == task } 1068 runonly_tids = [k for k in self.runtaskentries.keys() if taskname_from_tid(k) == task]
964 1069
965 for tid in list(runonly_tids): 1070 for tid in runonly_tids:
966 mark_active(tid,1) 1071 mark_active(tid, 1)
967 if self.cooker.configuration.force: 1072 if self.cooker.configuration.force:
968 invalidate_task(tid, False) 1073 invalidate_task(tid, False)
969 1074
970 for tid in list(self.runtaskentries.keys()): 1075 for tid in list(self.runtaskentries.keys()):
971 if tid not in runq_build: 1076 if tid not in runq_build:
972 delcount[tid] = self.runtaskentries[tid] 1077 delcount.add(tid)
973 del self.runtaskentries[tid] 1078 del self.runtaskentries[tid]
974 1079
975 if len(self.runtaskentries) == 0: 1080 if not self.runtaskentries:
976 bb.msg.fatal("RunQueue", "Could not find any tasks with the tasknames %s to run within the taskgraphs of the targets %s" % (str(self.cooker.configuration.runonly), str(self.targets))) 1081 bb.msg.fatal("RunQueue", "Could not find any tasks with the tasknames %s to run within the taskgraphs of the targets %s" % (str(self.cooker.configuration.runonly), str(self.targets)))
977 1082
978 # 1083 #
@@ -980,8 +1085,8 @@ class RunQueueData:
980 # 1085 #
981 1086
982 # Check to make sure we still have tasks to run 1087 # Check to make sure we still have tasks to run
983 if len(self.runtaskentries) == 0: 1088 if not self.runtaskentries:
984 if not taskData[''].abort: 1089 if not taskData[''].halt:
985 bb.msg.fatal("RunQueue", "All buildable tasks have been run but the build is incomplete (--continue mode). Errors for the tasks that failed will have been printed above.") 1090 bb.msg.fatal("RunQueue", "All buildable tasks have been run but the build is incomplete (--continue mode). Errors for the tasks that failed will have been printed above.")
986 else: 1091 else:
987 bb.msg.fatal("RunQueue", "No active tasks and not in --continue mode?! Please report this bug.") 1092 bb.msg.fatal("RunQueue", "No active tasks and not in --continue mode?! Please report this bug.")
@@ -991,6 +1096,7 @@ class RunQueueData:
991 logger.verbose("Assign Weightings") 1096 logger.verbose("Assign Weightings")
992 1097
993 self.init_progress_reporter.next_stage() 1098 self.init_progress_reporter.next_stage()
1099 bb.event.check_for_interrupts(self.cooker.data)
994 1100
995 # Generate a list of reverse dependencies to ease future calculations 1101 # Generate a list of reverse dependencies to ease future calculations
996 for tid in self.runtaskentries: 1102 for tid in self.runtaskentries:
@@ -998,13 +1104,14 @@ class RunQueueData:
998 self.runtaskentries[dep].revdeps.add(tid) 1104 self.runtaskentries[dep].revdeps.add(tid)
999 1105
1000 self.init_progress_reporter.next_stage() 1106 self.init_progress_reporter.next_stage()
1107 bb.event.check_for_interrupts(self.cooker.data)
1001 1108
1002 # Identify tasks at the end of dependency chains 1109 # Identify tasks at the end of dependency chains
1003 # Error on circular dependency loops (length two) 1110 # Error on circular dependency loops (length two)
1004 endpoints = [] 1111 endpoints = []
1005 for tid in self.runtaskentries: 1112 for tid in self.runtaskentries:
1006 revdeps = self.runtaskentries[tid].revdeps 1113 revdeps = self.runtaskentries[tid].revdeps
1007 if len(revdeps) == 0: 1114 if not revdeps:
1008 endpoints.append(tid) 1115 endpoints.append(tid)
1009 for dep in revdeps: 1116 for dep in revdeps:
1010 if dep in self.runtaskentries[tid].depends: 1117 if dep in self.runtaskentries[tid].depends:
@@ -1014,12 +1121,14 @@ class RunQueueData:
1014 logger.verbose("Compute totals (have %s endpoint(s))", len(endpoints)) 1121 logger.verbose("Compute totals (have %s endpoint(s))", len(endpoints))
1015 1122
1016 self.init_progress_reporter.next_stage() 1123 self.init_progress_reporter.next_stage()
1124 bb.event.check_for_interrupts(self.cooker.data)
1017 1125
1018 # Calculate task weights 1126 # Calculate task weights
1019 # Check of higher length circular dependencies 1127 # Check of higher length circular dependencies
1020 self.runq_weight = self.calculate_task_weights(endpoints) 1128 self.runq_weight = self.calculate_task_weights(endpoints)
1021 1129
1022 self.init_progress_reporter.next_stage() 1130 self.init_progress_reporter.next_stage()
1131 bb.event.check_for_interrupts(self.cooker.data)
1023 1132
1024 # Sanity Check - Check for multiple tasks building the same provider 1133 # Sanity Check - Check for multiple tasks building the same provider
1025 for mc in self.dataCaches: 1134 for mc in self.dataCaches:
@@ -1040,7 +1149,7 @@ class RunQueueData:
1040 for prov in prov_list: 1149 for prov in prov_list:
1041 if len(prov_list[prov]) < 2: 1150 if len(prov_list[prov]) < 2:
1042 continue 1151 continue
1043 if prov in self.multi_provider_whitelist: 1152 if prov in self.multi_provider_allowed:
1044 continue 1153 continue
1045 seen_pn = [] 1154 seen_pn = []
1046 # If two versions of the same PN are being built its fatal, we don't support it. 1155 # If two versions of the same PN are being built its fatal, we don't support it.
@@ -1050,12 +1159,12 @@ class RunQueueData:
1050 seen_pn.append(pn) 1159 seen_pn.append(pn)
1051 else: 1160 else:
1052 bb.fatal("Multiple versions of %s are due to be built (%s). Only one version of a given PN should be built in any given build. You likely need to set PREFERRED_VERSION_%s to select the correct version or don't depend on multiple versions." % (pn, " ".join(prov_list[prov]), pn)) 1161 bb.fatal("Multiple versions of %s are due to be built (%s). Only one version of a given PN should be built in any given build. You likely need to set PREFERRED_VERSION_%s to select the correct version or don't depend on multiple versions." % (pn, " ".join(prov_list[prov]), pn))
1053 msg = "Multiple .bb files are due to be built which each provide %s:\n %s" % (prov, "\n ".join(prov_list[prov])) 1162 msgs = ["Multiple .bb files are due to be built which each provide %s:\n %s" % (prov, "\n ".join(prov_list[prov]))]
1054 # 1163 #
1055 # Construct a list of things which uniquely depend on each provider 1164 # Construct a list of things which uniquely depend on each provider
1056 # since this may help the user figure out which dependency is triggering this warning 1165 # since this may help the user figure out which dependency is triggering this warning
1057 # 1166 #
1058 msg += "\nA list of tasks depending on these providers is shown and may help explain where the dependency comes from." 1167 msgs.append("\nA list of tasks depending on these providers is shown and may help explain where the dependency comes from.")
1059 deplist = {} 1168 deplist = {}
1060 commondeps = None 1169 commondeps = None
1061 for provfn in prov_list[prov]: 1170 for provfn in prov_list[prov]:
@@ -1075,12 +1184,12 @@ class RunQueueData:
1075 commondeps &= deps 1184 commondeps &= deps
1076 deplist[provfn] = deps 1185 deplist[provfn] = deps
1077 for provfn in deplist: 1186 for provfn in deplist:
1078 msg += "\n%s has unique dependees:\n %s" % (provfn, "\n ".join(deplist[provfn] - commondeps)) 1187 msgs.append("\n%s has unique dependees:\n %s" % (provfn, "\n ".join(deplist[provfn] - commondeps)))
1079 # 1188 #
1080 # Construct a list of provides and runtime providers for each recipe 1189 # Construct a list of provides and runtime providers for each recipe
1081 # (rprovides has to cover RPROVIDES, PACKAGES, PACKAGES_DYNAMIC) 1190 # (rprovides has to cover RPROVIDES, PACKAGES, PACKAGES_DYNAMIC)
1082 # 1191 #
1083 msg += "\nIt could be that one recipe provides something the other doesn't and should. The following provider and runtime provider differences may be helpful." 1192 msgs.append("\nIt could be that one recipe provides something the other doesn't and should. The following provider and runtime provider differences may be helpful.")
1084 provide_results = {} 1193 provide_results = {}
1085 rprovide_results = {} 1194 rprovide_results = {}
1086 commonprovs = None 1195 commonprovs = None
@@ -1107,30 +1216,20 @@ class RunQueueData:
1107 else: 1216 else:
1108 commonrprovs &= rprovides 1217 commonrprovs &= rprovides
1109 rprovide_results[provfn] = rprovides 1218 rprovide_results[provfn] = rprovides
1110 #msg += "\nCommon provides:\n %s" % ("\n ".join(commonprovs)) 1219 #msgs.append("\nCommon provides:\n %s" % ("\n ".join(commonprovs)))
1111 #msg += "\nCommon rprovides:\n %s" % ("\n ".join(commonrprovs)) 1220 #msgs.append("\nCommon rprovides:\n %s" % ("\n ".join(commonrprovs)))
1112 for provfn in prov_list[prov]: 1221 for provfn in prov_list[prov]:
1113 msg += "\n%s has unique provides:\n %s" % (provfn, "\n ".join(provide_results[provfn] - commonprovs)) 1222 msgs.append("\n%s has unique provides:\n %s" % (provfn, "\n ".join(provide_results[provfn] - commonprovs)))
1114 msg += "\n%s has unique rprovides:\n %s" % (provfn, "\n ".join(rprovide_results[provfn] - commonrprovs)) 1223 msgs.append("\n%s has unique rprovides:\n %s" % (provfn, "\n ".join(rprovide_results[provfn] - commonrprovs)))
1115 1224
1116 if self.warn_multi_bb: 1225 if self.warn_multi_bb:
1117 logger.verbnote(msg) 1226 logger.verbnote("".join(msgs))
1118 else: 1227 else:
1119 logger.error(msg) 1228 logger.error("".join(msgs))
1120 1229
1121 self.init_progress_reporter.next_stage() 1230 self.init_progress_reporter.next_stage()
1122
1123 # Create a whitelist usable by the stamp checks
1124 self.stampfnwhitelist = {}
1125 for mc in self.taskData:
1126 self.stampfnwhitelist[mc] = []
1127 for entry in self.stampwhitelist.split():
1128 if entry not in self.taskData[mc].build_targets:
1129 continue
1130 fn = self.taskData.build_targets[entry][0]
1131 self.stampfnwhitelist[mc].append(fn)
1132
1133 self.init_progress_reporter.next_stage() 1231 self.init_progress_reporter.next_stage()
1232 bb.event.check_for_interrupts(self.cooker.data)
1134 1233
1135 # Iterate over the task list looking for tasks with a 'setscene' function 1234 # Iterate over the task list looking for tasks with a 'setscene' function
1136 self.runq_setscene_tids = set() 1235 self.runq_setscene_tids = set()
@@ -1143,6 +1242,7 @@ class RunQueueData:
1143 self.runq_setscene_tids.add(tid) 1242 self.runq_setscene_tids.add(tid)
1144 1243
1145 self.init_progress_reporter.next_stage() 1244 self.init_progress_reporter.next_stage()
1245 bb.event.check_for_interrupts(self.cooker.data)
1146 1246
1147 # Invalidate task if force mode active 1247 # Invalidate task if force mode active
1148 if self.cooker.configuration.force: 1248 if self.cooker.configuration.force:
@@ -1159,6 +1259,7 @@ class RunQueueData:
1159 invalidate_task(fn + ":" + st, True) 1259 invalidate_task(fn + ":" + st, True)
1160 1260
1161 self.init_progress_reporter.next_stage() 1261 self.init_progress_reporter.next_stage()
1262 bb.event.check_for_interrupts(self.cooker.data)
1162 1263
1163 # Create and print to the logs a virtual/xxxx -> PN (fn) table 1264 # Create and print to the logs a virtual/xxxx -> PN (fn) table
1164 for mc in taskData: 1265 for mc in taskData:
@@ -1171,30 +1272,45 @@ class RunQueueData:
1171 bb.parse.siggen.tasks_resolved(virtmap, virtpnmap, self.dataCaches[mc]) 1272 bb.parse.siggen.tasks_resolved(virtmap, virtpnmap, self.dataCaches[mc])
1172 1273
1173 self.init_progress_reporter.next_stage() 1274 self.init_progress_reporter.next_stage()
1275 bb.event.check_for_interrupts(self.cooker.data)
1174 1276
1175 bb.parse.siggen.set_setscene_tasks(self.runq_setscene_tids) 1277 bb.parse.siggen.set_setscene_tasks(self.runq_setscene_tids)
1176 1278
1279 starttime = time.time()
1280 lasttime = starttime
1281
1177 # Iterate over the task list and call into the siggen code 1282 # Iterate over the task list and call into the siggen code
1178 dealtwith = set() 1283 dealtwith = set()
1179 todeal = set(self.runtaskentries) 1284 todeal = set(self.runtaskentries)
1180 while len(todeal) > 0: 1285 while todeal:
1286 ready = set()
1181 for tid in todeal.copy(): 1287 for tid in todeal.copy():
1182 if len(self.runtaskentries[tid].depends - dealtwith) == 0: 1288 if not (self.runtaskentries[tid].depends - dealtwith):
1183 dealtwith.add(tid) 1289 self.runtaskentries[tid].taskhash_deps = bb.parse.siggen.prep_taskhash(tid, self.runtaskentries[tid].depends, self.dataCaches)
1184 todeal.remove(tid) 1290 # get_taskhash for a given tid *must* be called before get_unihash* below
1185 self.prepare_task_hash(tid) 1291 self.runtaskentries[tid].hash = bb.parse.siggen.get_taskhash(tid, self.runtaskentries[tid].depends, self.dataCaches)
1292 ready.add(tid)
1293 unihashes = bb.parse.siggen.get_unihashes(ready)
1294 for tid in ready:
1295 dealtwith.add(tid)
1296 todeal.remove(tid)
1297 self.runtaskentries[tid].unihash = unihashes[tid]
1298
1299 bb.event.check_for_interrupts(self.cooker.data)
1300
1301 if time.time() > (lasttime + 30):
1302 lasttime = time.time()
1303 hashequiv_logger.verbose("Initial setup loop progress: %s of %s in %s" % (len(todeal), len(self.runtaskentries), lasttime - starttime))
1304
1305 endtime = time.time()
1306 if (endtime-starttime > 60):
1307 hashequiv_logger.verbose("Initial setup loop took: %s" % (endtime-starttime))
1186 1308
1187 bb.parse.siggen.writeout_file_checksum_cache() 1309 bb.parse.siggen.writeout_file_checksum_cache()
1188 1310
1189 #self.dump_data() 1311 #self.dump_data()
1190 return len(self.runtaskentries) 1312 return len(self.runtaskentries)
1191 1313
1192 def prepare_task_hash(self, tid):
1193 dc = bb.parse.siggen.get_data_caches(self.dataCaches, mc_from_tid(tid))
1194 bb.parse.siggen.prep_taskhash(tid, self.runtaskentries[tid].depends, dc)
1195 self.runtaskentries[tid].hash = bb.parse.siggen.get_taskhash(tid, self.runtaskentries[tid].depends, dc)
1196 self.runtaskentries[tid].unihash = bb.parse.siggen.get_unihash(tid)
1197
1198 def dump_data(self): 1314 def dump_data(self):
1199 """ 1315 """
1200 Dump some debug information on the internal data structures 1316 Dump some debug information on the internal data structures
@@ -1218,7 +1334,6 @@ class RunQueue:
1218 self.cfgData = cfgData 1334 self.cfgData = cfgData
1219 self.rqdata = RunQueueData(self, cooker, cfgData, dataCaches, taskData, targets) 1335 self.rqdata = RunQueueData(self, cooker, cfgData, dataCaches, taskData, targets)
1220 1336
1221 self.stamppolicy = cfgData.getVar("BB_STAMP_POLICY") or "perfile"
1222 self.hashvalidate = cfgData.getVar("BB_HASHCHECK_FUNCTION") or None 1337 self.hashvalidate = cfgData.getVar("BB_HASHCHECK_FUNCTION") or None
1223 self.depvalidate = cfgData.getVar("BB_SETSCENE_DEPVALID") or None 1338 self.depvalidate = cfgData.getVar("BB_SETSCENE_DEPVALID") or None
1224 1339
@@ -1237,30 +1352,40 @@ class RunQueue:
1237 self.worker = {} 1352 self.worker = {}
1238 self.fakeworker = {} 1353 self.fakeworker = {}
1239 1354
1355 @staticmethod
1356 def send_pickled_data(worker, data, name):
1357 msg = bytearray()
1358 msg.extend(b"<" + name.encode() + b">")
1359 pickled_data = pickle.dumps(data)
1360 msg.extend(len(pickled_data).to_bytes(4, 'big'))
1361 msg.extend(pickled_data)
1362 msg.extend(b"</" + name.encode() + b">")
1363 worker.stdin.write(msg)
1364
1240 def _start_worker(self, mc, fakeroot = False, rqexec = None): 1365 def _start_worker(self, mc, fakeroot = False, rqexec = None):
1241 logger.debug("Starting bitbake-worker") 1366 logger.debug("Starting bitbake-worker")
1242 magic = "decafbad" 1367 magic = "decafbad"
1243 if self.cooker.configuration.profile: 1368 if self.cooker.configuration.profile:
1244 magic = "decafbadbad" 1369 magic = "decafbadbad"
1370 fakerootlogs = None
1371
1372 workerscript = os.path.realpath(os.path.dirname(__file__) + "/../../bin/bitbake-worker")
1245 if fakeroot: 1373 if fakeroot:
1246 magic = magic + "beef" 1374 magic = magic + "beef"
1247 mcdata = self.cooker.databuilder.mcdata[mc] 1375 mcdata = self.cooker.databuilder.mcdata[mc]
1248 fakerootcmd = shlex.split(mcdata.getVar("FAKEROOTCMD")) 1376 fakerootcmd = shlex.split(mcdata.getVar("FAKEROOTCMD"))
1249 fakerootenv = (mcdata.getVar("FAKEROOTBASEENV") or "").split() 1377 fakerootenv = (mcdata.getVar("FAKEROOTBASEENV") or "").split()
1250 env = os.environ.copy() 1378 env = os.environ.copy()
1251 for key, value in (var.split('=') for var in fakerootenv): 1379 for key, value in (var.split('=',1) for var in fakerootenv):
1252 env[key] = value 1380 env[key] = value
1253 worker = subprocess.Popen(fakerootcmd + ["bitbake-worker", magic], stdout=subprocess.PIPE, stdin=subprocess.PIPE, env=env) 1381 worker = subprocess.Popen(fakerootcmd + [sys.executable, workerscript, magic], stdout=subprocess.PIPE, stdin=subprocess.PIPE, env=env)
1382 fakerootlogs = self.rqdata.dataCaches[mc].fakerootlogs
1254 else: 1383 else:
1255 worker = subprocess.Popen(["bitbake-worker", magic], stdout=subprocess.PIPE, stdin=subprocess.PIPE) 1384 worker = subprocess.Popen([sys.executable, workerscript, magic], stdout=subprocess.PIPE, stdin=subprocess.PIPE)
1256 bb.utils.nonblockingfd(worker.stdout) 1385 bb.utils.nonblockingfd(worker.stdout)
1257 workerpipe = runQueuePipe(worker.stdout, None, self.cfgData, self, rqexec) 1386 workerpipe = runQueuePipe(worker.stdout, None, self.cfgData, self, rqexec, fakerootlogs=fakerootlogs)
1258 1387
1259 workerdata = { 1388 workerdata = {
1260 "taskdeps" : self.rqdata.dataCaches[mc].task_deps,
1261 "fakerootenv" : self.rqdata.dataCaches[mc].fakerootenv,
1262 "fakerootdirs" : self.rqdata.dataCaches[mc].fakerootdirs,
1263 "fakerootnoenv" : self.rqdata.dataCaches[mc].fakerootnoenv,
1264 "sigdata" : bb.parse.siggen.get_taskdata(), 1389 "sigdata" : bb.parse.siggen.get_taskdata(),
1265 "logdefaultlevel" : bb.msg.loggerDefaultLogLevel, 1390 "logdefaultlevel" : bb.msg.loggerDefaultLogLevel,
1266 "build_verbose_shell" : self.cooker.configuration.build_verbose_shell, 1391 "build_verbose_shell" : self.cooker.configuration.build_verbose_shell,
@@ -1274,9 +1399,9 @@ class RunQueue:
1274 "umask" : self.cfgData.getVar("BB_DEFAULT_UMASK"), 1399 "umask" : self.cfgData.getVar("BB_DEFAULT_UMASK"),
1275 } 1400 }
1276 1401
1277 worker.stdin.write(b"<cookerconfig>" + pickle.dumps(self.cooker.configuration) + b"</cookerconfig>") 1402 RunQueue.send_pickled_data(worker, self.cooker.configuration, "cookerconfig")
1278 worker.stdin.write(b"<extraconfigdata>" + pickle.dumps(self.cooker.extraconfigdata) + b"</extraconfigdata>") 1403 RunQueue.send_pickled_data(worker, self.cooker.extraconfigdata, "extraconfigdata")
1279 worker.stdin.write(b"<workerdata>" + pickle.dumps(workerdata) + b"</workerdata>") 1404 RunQueue.send_pickled_data(worker, workerdata, "workerdata")
1280 worker.stdin.flush() 1405 worker.stdin.flush()
1281 1406
1282 return RunQueueWorker(worker, workerpipe) 1407 return RunQueueWorker(worker, workerpipe)
@@ -1286,7 +1411,7 @@ class RunQueue:
1286 return 1411 return
1287 logger.debug("Teardown for bitbake-worker") 1412 logger.debug("Teardown for bitbake-worker")
1288 try: 1413 try:
1289 worker.process.stdin.write(b"<quit></quit>") 1414 RunQueue.send_pickled_data(worker.process, b"", "quit")
1290 worker.process.stdin.flush() 1415 worker.process.stdin.flush()
1291 worker.process.stdin.close() 1416 worker.process.stdin.close()
1292 except IOError: 1417 except IOError:
@@ -1298,12 +1423,12 @@ class RunQueue:
1298 continue 1423 continue
1299 worker.pipe.close() 1424 worker.pipe.close()
1300 1425
1301 def start_worker(self): 1426 def start_worker(self, rqexec):
1302 if self.worker: 1427 if self.worker:
1303 self.teardown_workers() 1428 self.teardown_workers()
1304 self.teardown = False 1429 self.teardown = False
1305 for mc in self.rqdata.dataCaches: 1430 for mc in self.rqdata.dataCaches:
1306 self.worker[mc] = self._start_worker(mc) 1431 self.worker[mc] = self._start_worker(mc, False, rqexec)
1307 1432
1308 def start_fakeworker(self, rqexec, mc): 1433 def start_fakeworker(self, rqexec, mc):
1309 if not mc in self.fakeworker: 1434 if not mc in self.fakeworker:
@@ -1345,15 +1470,7 @@ class RunQueue:
1345 if taskname is None: 1470 if taskname is None:
1346 taskname = tn 1471 taskname = tn
1347 1472
1348 if self.stamppolicy == "perfile": 1473 stampfile = bb.parse.siggen.stampfile_mcfn(taskname, taskfn)
1349 fulldeptree = False
1350 else:
1351 fulldeptree = True
1352 stampwhitelist = []
1353 if self.stamppolicy == "whitelist":
1354 stampwhitelist = self.rqdata.stampfnwhitelist[mc]
1355
1356 stampfile = bb.build.stampfile(taskname, self.rqdata.dataCaches[mc], taskfn)
1357 1474
1358 # If the stamp is missing, it's not current 1475 # If the stamp is missing, it's not current
1359 if not os.access(stampfile, os.F_OK): 1476 if not os.access(stampfile, os.F_OK):
@@ -1365,7 +1482,7 @@ class RunQueue:
1365 logger.debug2("%s.%s is nostamp\n", fn, taskname) 1482 logger.debug2("%s.%s is nostamp\n", fn, taskname)
1366 return False 1483 return False
1367 1484
1368 if taskname != "do_setscene" and taskname.endswith("_setscene"): 1485 if taskname.endswith("_setscene"):
1369 return True 1486 return True
1370 1487
1371 if cache is None: 1488 if cache is None:
@@ -1376,15 +1493,15 @@ class RunQueue:
1376 for dep in self.rqdata.runtaskentries[tid].depends: 1493 for dep in self.rqdata.runtaskentries[tid].depends:
1377 if iscurrent: 1494 if iscurrent:
1378 (mc2, fn2, taskname2, taskfn2) = split_tid_mcfn(dep) 1495 (mc2, fn2, taskname2, taskfn2) = split_tid_mcfn(dep)
1379 stampfile2 = bb.build.stampfile(taskname2, self.rqdata.dataCaches[mc2], taskfn2) 1496 stampfile2 = bb.parse.siggen.stampfile_mcfn(taskname2, taskfn2)
1380 stampfile3 = bb.build.stampfile(taskname2 + "_setscene", self.rqdata.dataCaches[mc2], taskfn2) 1497 stampfile3 = bb.parse.siggen.stampfile_mcfn(taskname2 + "_setscene", taskfn2)
1381 t2 = get_timestamp(stampfile2) 1498 t2 = get_timestamp(stampfile2)
1382 t3 = get_timestamp(stampfile3) 1499 t3 = get_timestamp(stampfile3)
1383 if t3 and not t2: 1500 if t3 and not t2:
1384 continue 1501 continue
1385 if t3 and t3 > t2: 1502 if t3 and t3 > t2:
1386 continue 1503 continue
1387 if fn == fn2 or (fulldeptree and fn2 not in stampwhitelist): 1504 if fn == fn2:
1388 if not t2: 1505 if not t2:
1389 logger.debug2('Stampfile %s does not exist', stampfile2) 1506 logger.debug2('Stampfile %s does not exist', stampfile2)
1390 iscurrent = False 1507 iscurrent = False
@@ -1434,10 +1551,11 @@ class RunQueue:
1434 """ 1551 """
1435 Run the tasks in a queue prepared by rqdata.prepare() 1552 Run the tasks in a queue prepared by rqdata.prepare()
1436 Upon failure, optionally try to recover the build using any alternate providers 1553 Upon failure, optionally try to recover the build using any alternate providers
1437 (if the abort on failure configuration option isn't set) 1554 (if the halt on failure configuration option isn't set)
1438 """ 1555 """
1439 1556
1440 retval = True 1557 retval = True
1558 bb.event.check_for_interrupts(self.cooker.data)
1441 1559
1442 if self.state is runQueuePrepare: 1560 if self.state is runQueuePrepare:
1443 # NOTE: if you add, remove or significantly refactor the stages of this 1561 # NOTE: if you add, remove or significantly refactor the stages of this
@@ -1466,31 +1584,37 @@ class RunQueue:
1466 1584
1467 if not self.dm_event_handler_registered: 1585 if not self.dm_event_handler_registered:
1468 res = bb.event.register(self.dm_event_handler_name, 1586 res = bb.event.register(self.dm_event_handler_name,
1469 lambda x: self.dm.check(self) if self.state in [runQueueRunning, runQueueCleanUp] else False, 1587 lambda x, y: self.dm.check(self) if self.state in [runQueueRunning, runQueueCleanUp] else False,
1470 ('bb.event.HeartbeatEvent',), data=self.cfgData) 1588 ('bb.event.HeartbeatEvent',), data=self.cfgData)
1471 self.dm_event_handler_registered = True 1589 self.dm_event_handler_registered = True
1472 1590
1473 dump = self.cooker.configuration.dump_signatures 1591 self.rqdata.init_progress_reporter.next_stage()
1474 if dump: 1592 self.rqexe = RunQueueExecute(self)
1593
1594 dumpsigs = self.cooker.configuration.dump_signatures
1595 if dumpsigs:
1475 self.rqdata.init_progress_reporter.finish() 1596 self.rqdata.init_progress_reporter.finish()
1476 if 'printdiff' in dump: 1597 if 'printdiff' in dumpsigs:
1477 invalidtasks = self.print_diffscenetasks() 1598 self.invalidtasks_dump = self.print_diffscenetasks()
1478 self.dump_signatures(dump) 1599 self.state = runQueueDumpSigs
1479 if 'printdiff' in dump: 1600
1480 self.write_diffscenetasks(invalidtasks) 1601 if self.state is runQueueDumpSigs:
1602 dumpsigs = self.cooker.configuration.dump_signatures
1603 retval = self.dump_signatures(dumpsigs)
1604 if retval is False:
1605 if 'printdiff' in dumpsigs:
1606 self.write_diffscenetasks(self.invalidtasks_dump)
1481 self.state = runQueueComplete 1607 self.state = runQueueComplete
1482 1608
1483 if self.state is runQueueSceneInit: 1609 if self.state is runQueueSceneInit:
1484 self.rqdata.init_progress_reporter.next_stage() 1610 self.start_worker(self.rqexe)
1485 self.start_worker() 1611 self.rqdata.init_progress_reporter.finish()
1486 self.rqdata.init_progress_reporter.next_stage()
1487 self.rqexe = RunQueueExecute(self)
1488 1612
1489 # If we don't have any setscene functions, skip execution 1613 # If we don't have any setscene functions, skip execution
1490 if len(self.rqdata.runq_setscene_tids) == 0: 1614 if not self.rqdata.runq_setscene_tids:
1491 logger.info('No setscene tasks') 1615 logger.info('No setscene tasks')
1492 for tid in self.rqdata.runtaskentries: 1616 for tid in self.rqdata.runtaskentries:
1493 if len(self.rqdata.runtaskentries[tid].depends) == 0: 1617 if not self.rqdata.runtaskentries[tid].depends:
1494 self.rqexe.setbuildable(tid) 1618 self.rqexe.setbuildable(tid)
1495 self.rqexe.tasks_notcovered.add(tid) 1619 self.rqexe.tasks_notcovered.add(tid)
1496 self.rqexe.sqdone = True 1620 self.rqexe.sqdone = True
@@ -1563,43 +1687,62 @@ class RunQueue:
1563 else: 1687 else:
1564 self.rqexe.finish() 1688 self.rqexe.finish()
1565 1689
1566 def rq_dump_sigfn(self, fn, options): 1690 def _rq_dump_sigtid(self, tids):
1567 bb_cache = bb.cache.NoCache(self.cooker.databuilder) 1691 for tid in tids:
1568 mc = bb.runqueue.mc_from_tid(fn) 1692 (mc, fn, taskname, taskfn) = split_tid_mcfn(tid)
1569 the_data = bb_cache.loadDataFull(fn, self.cooker.collections[mc].get_file_appends(fn)) 1693 dataCaches = self.rqdata.dataCaches
1570 siggen = bb.parse.siggen 1694 bb.parse.siggen.dump_sigtask(taskfn, taskname, dataCaches[mc].stamp[taskfn], True)
1571 dataCaches = self.rqdata.dataCaches
1572 siggen.dump_sigfn(fn, dataCaches, options)
1573 1695
1574 def dump_signatures(self, options): 1696 def dump_signatures(self, options):
1575 fns = set() 1697 if not hasattr(self, "dumpsigs_launched"):
1576 bb.note("Reparsing files to collect dependency data") 1698 if bb.cooker.CookerFeatures.RECIPE_SIGGEN_INFO not in self.cooker.featureset:
1699 bb.fatal("The dump signatures functionality needs the RECIPE_SIGGEN_INFO feature enabled")
1577 1700
1578 for tid in self.rqdata.runtaskentries: 1701 bb.note("Writing task signature files")
1579 fn = fn_from_tid(tid) 1702
1580 fns.add(fn) 1703 max_process = int(self.cfgData.getVar("BB_NUMBER_PARSE_THREADS") or os.cpu_count() or 1)
1581 1704 def chunkify(l, n):
1582 max_process = int(self.cfgData.getVar("BB_NUMBER_PARSE_THREADS") or os.cpu_count() or 1) 1705 return [l[i::n] for i in range(n)]
1583 # We cannot use the real multiprocessing.Pool easily due to some local data 1706 dumpsigs_tids = chunkify(list(self.rqdata.runtaskentries), max_process)
1584 # that can't be pickled. This is a cheap multi-process solution. 1707
1585 launched = [] 1708 # We cannot use the real multiprocessing.Pool easily due to some local data
1586 while fns: 1709 # that can't be pickled. This is a cheap multi-process solution.
1587 if len(launched) < max_process: 1710 self.dumpsigs_launched = []
1588 p = Process(target=self.rq_dump_sigfn, args=(fns.pop(), options)) 1711
1712 for tids in dumpsigs_tids:
1713 p = Process(target=self._rq_dump_sigtid, args=(tids, ))
1589 p.start() 1714 p.start()
1590 launched.append(p) 1715 self.dumpsigs_launched.append(p)
1591 for q in launched: 1716
1592 # The finished processes are joined when calling is_alive() 1717 return 1.0
1593 if not q.is_alive(): 1718
1594 launched.remove(q) 1719 for q in self.dumpsigs_launched:
1595 for p in launched: 1720 # The finished processes are joined when calling is_alive()
1721 if not q.is_alive():
1722 self.dumpsigs_launched.remove(q)
1723
1724 if self.dumpsigs_launched:
1725 return 1.0
1726
1727 for p in self.dumpsigs_launched:
1596 p.join() 1728 p.join()
1597 1729
1598 bb.parse.siggen.dump_sigs(self.rqdata.dataCaches, options) 1730 bb.parse.siggen.dump_sigs(self.rqdata.dataCaches, options)
1599 1731
1600 return 1732 return False
1601 1733
1602 def print_diffscenetasks(self): 1734 def print_diffscenetasks(self):
1735 def get_root_invalid_tasks(task, taskdepends, valid, noexec, visited_invalid):
1736 invalidtasks = []
1737 for t in taskdepends[task].depends:
1738 if t not in valid and t not in visited_invalid:
1739 invalidtasks.extend(get_root_invalid_tasks(t, taskdepends, valid, noexec, visited_invalid))
1740 visited_invalid.add(t)
1741
1742 direct_invalid = [t for t in taskdepends[task].depends if t not in valid]
1743 if not direct_invalid and task not in noexec:
1744 invalidtasks = [task]
1745 return invalidtasks
1603 1746
1604 noexec = [] 1747 noexec = []
1605 tocheck = set() 1748 tocheck = set()
@@ -1633,46 +1776,49 @@ class RunQueue:
1633 valid_new.add(dep) 1776 valid_new.add(dep)
1634 1777
1635 invalidtasks = set() 1778 invalidtasks = set()
1636 for tid in self.rqdata.runtaskentries:
1637 if tid not in valid_new and tid not in noexec:
1638 invalidtasks.add(tid)
1639 1779
1640 found = set() 1780 toptasks = set(["{}:{}".format(t[3], t[2]) for t in self.rqdata.targets])
1641 processed = set() 1781 for tid in toptasks:
1642 for tid in invalidtasks:
1643 toprocess = set([tid]) 1782 toprocess = set([tid])
1644 while toprocess: 1783 while toprocess:
1645 next = set() 1784 next = set()
1785 visited_invalid = set()
1646 for t in toprocess: 1786 for t in toprocess:
1647 for dep in self.rqdata.runtaskentries[t].depends: 1787 if t not in valid_new and t not in noexec:
1648 if dep in invalidtasks: 1788 invalidtasks.update(get_root_invalid_tasks(t, self.rqdata.runtaskentries, valid_new, noexec, visited_invalid))
1649 found.add(tid) 1789 continue
1650 if dep not in processed: 1790 if t in self.rqdata.runq_setscene_tids:
1651 processed.add(dep) 1791 for dep in self.rqexe.sqdata.sq_deps[t]:
1652 next.add(dep) 1792 next.add(dep)
1793 continue
1794
1795 for dep in self.rqdata.runtaskentries[t].depends:
1796 next.add(dep)
1797
1653 toprocess = next 1798 toprocess = next
1654 if tid in found:
1655 toprocess = set()
1656 1799
1657 tasklist = [] 1800 tasklist = []
1658 for tid in invalidtasks.difference(found): 1801 for tid in invalidtasks:
1659 tasklist.append(tid) 1802 tasklist.append(tid)
1660 1803
1661 if tasklist: 1804 if tasklist:
1662 bb.plain("The differences between the current build and any cached tasks start at the following tasks:\n" + "\n".join(tasklist)) 1805 bb.plain("The differences between the current build and any cached tasks start at the following tasks:\n" + "\n".join(tasklist))
1663 1806
1664 return invalidtasks.difference(found) 1807 return invalidtasks
1665 1808
1666 def write_diffscenetasks(self, invalidtasks): 1809 def write_diffscenetasks(self, invalidtasks):
1810 bb.siggen.check_siggen_version(bb.siggen)
1667 1811
1668 # Define recursion callback 1812 # Define recursion callback
1669 def recursecb(key, hash1, hash2): 1813 def recursecb(key, hash1, hash2):
1670 hashes = [hash1, hash2] 1814 hashes = [hash1, hash2]
1815 bb.debug(1, "Recursively looking for recipe {} hashes {}".format(key, hashes))
1671 hashfiles = bb.siggen.find_siginfo(key, None, hashes, self.cfgData) 1816 hashfiles = bb.siggen.find_siginfo(key, None, hashes, self.cfgData)
1817 bb.debug(1, "Found hashfiles:\n{}".format(hashfiles))
1672 1818
1673 recout = [] 1819 recout = []
1674 if len(hashfiles) == 2: 1820 if len(hashfiles) == 2:
1675 out2 = bb.siggen.compare_sigfiles(hashfiles[hash1], hashfiles[hash2], recursecb) 1821 out2 = bb.siggen.compare_sigfiles(hashfiles[hash1]['path'], hashfiles[hash2]['path'], recursecb)
1676 recout.extend(list(' ' + l for l in out2)) 1822 recout.extend(list(' ' + l for l in out2))
1677 else: 1823 else:
1678 recout.append("Unable to find matching sigdata for %s with hashes %s or %s" % (key, hash1, hash2)) 1824 recout.append("Unable to find matching sigdata for %s with hashes %s or %s" % (key, hash1, hash2))
@@ -1683,20 +1829,25 @@ class RunQueue:
1683 for tid in invalidtasks: 1829 for tid in invalidtasks:
1684 (mc, fn, taskname, taskfn) = split_tid_mcfn(tid) 1830 (mc, fn, taskname, taskfn) = split_tid_mcfn(tid)
1685 pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn] 1831 pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn]
1686 h = self.rqdata.runtaskentries[tid].hash 1832 h = self.rqdata.runtaskentries[tid].unihash
1687 matches = bb.siggen.find_siginfo(pn, taskname, [], self.cfgData) 1833 bb.debug(1, "Looking for recipe {} task {}".format(pn, taskname))
1834 matches = bb.siggen.find_siginfo(pn, taskname, [], self.cooker.databuilder.mcdata[mc])
1835 bb.debug(1, "Found hashfiles:\n{}".format(matches))
1688 match = None 1836 match = None
1689 for m in matches: 1837 for m in matches.values():
1690 if h in m: 1838 if h in m['path']:
1691 match = m 1839 match = m['path']
1692 if match is None: 1840 if match is None:
1693 bb.fatal("Can't find a task we're supposed to have written out? (hash: %s)?" % h) 1841 bb.fatal("Can't find a task we're supposed to have written out? (hash: %s tid: %s)?" % (h, tid))
1694 matches = {k : v for k, v in iter(matches.items()) if h not in k} 1842 matches = {k : v for k, v in iter(matches.items()) if h not in k}
1843 matches_local = {k : v for k, v in iter(matches.items()) if h not in k and not v['sstate']}
1844 if matches_local:
1845 matches = matches_local
1695 if matches: 1846 if matches:
1696 latestmatch = sorted(matches.keys(), key=lambda f: matches[f])[-1] 1847 latestmatch = matches[sorted(matches.keys(), key=lambda h: matches[h]['time'])[-1]]['path']
1697 prevh = __find_sha256__.search(latestmatch).group(0) 1848 prevh = __find_sha256__.search(latestmatch).group(0)
1698 output = bb.siggen.compare_sigfiles(latestmatch, match, recursecb) 1849 output = bb.siggen.compare_sigfiles(latestmatch, match, recursecb)
1699 bb.plain("\nTask %s:%s couldn't be used from the cache because:\n We need hash %s, closest matching task was %s\n " % (pn, taskname, h, prevh) + '\n '.join(output)) 1850 bb.plain("\nTask %s:%s couldn't be used from the cache because:\n We need hash %s, most recent matching task was %s\n " % (pn, taskname, h, prevh) + '\n '.join(output))
1700 1851
1701 1852
1702class RunQueueExecute: 1853class RunQueueExecute:
@@ -1709,6 +1860,10 @@ class RunQueueExecute:
1709 1860
1710 self.number_tasks = int(self.cfgData.getVar("BB_NUMBER_THREADS") or 1) 1861 self.number_tasks = int(self.cfgData.getVar("BB_NUMBER_THREADS") or 1)
1711 self.scheduler = self.cfgData.getVar("BB_SCHEDULER") or "speed" 1862 self.scheduler = self.cfgData.getVar("BB_SCHEDULER") or "speed"
1863 self.max_cpu_pressure = self.cfgData.getVar("BB_PRESSURE_MAX_CPU")
1864 self.max_io_pressure = self.cfgData.getVar("BB_PRESSURE_MAX_IO")
1865 self.max_memory_pressure = self.cfgData.getVar("BB_PRESSURE_MAX_MEMORY")
1866 self.max_loadfactor = self.cfgData.getVar("BB_LOADFACTOR_MAX")
1712 1867
1713 self.sq_buildable = set() 1868 self.sq_buildable = set()
1714 self.sq_running = set() 1869 self.sq_running = set()
@@ -1726,6 +1881,8 @@ class RunQueueExecute:
1726 self.build_stamps2 = [] 1881 self.build_stamps2 = []
1727 self.failed_tids = [] 1882 self.failed_tids = []
1728 self.sq_deferred = {} 1883 self.sq_deferred = {}
1884 self.sq_needed_harddeps = set()
1885 self.sq_harddep_deferred = set()
1729 1886
1730 self.stampcache = {} 1887 self.stampcache = {}
1731 1888
@@ -1733,17 +1890,39 @@ class RunQueueExecute:
1733 self.holdoff_need_update = True 1890 self.holdoff_need_update = True
1734 self.sqdone = False 1891 self.sqdone = False
1735 1892
1736 self.stats = RunQueueStats(len(self.rqdata.runtaskentries)) 1893 self.stats = RunQueueStats(len(self.rqdata.runtaskentries), len(self.rqdata.runq_setscene_tids))
1737 self.sq_stats = RunQueueStats(len(self.rqdata.runq_setscene_tids))
1738
1739 for mc in rq.worker:
1740 rq.worker[mc].pipe.setrunqueueexec(self)
1741 for mc in rq.fakeworker:
1742 rq.fakeworker[mc].pipe.setrunqueueexec(self)
1743 1894
1744 if self.number_tasks <= 0: 1895 if self.number_tasks <= 0:
1745 bb.fatal("Invalid BB_NUMBER_THREADS %s" % self.number_tasks) 1896 bb.fatal("Invalid BB_NUMBER_THREADS %s" % self.number_tasks)
1746 1897
1898 lower_limit = 1.0
1899 upper_limit = 1000000.0
1900 if self.max_cpu_pressure:
1901 self.max_cpu_pressure = float(self.max_cpu_pressure)
1902 if self.max_cpu_pressure < lower_limit:
1903 bb.fatal("Invalid BB_PRESSURE_MAX_CPU %s, minimum value is %s." % (self.max_cpu_pressure, lower_limit))
1904 if self.max_cpu_pressure > upper_limit:
1905 bb.warn("Your build will be largely unregulated since BB_PRESSURE_MAX_CPU is set to %s. It is very unlikely that such high pressure will be experienced." % (self.max_cpu_pressure))
1906
1907 if self.max_io_pressure:
1908 self.max_io_pressure = float(self.max_io_pressure)
1909 if self.max_io_pressure < lower_limit:
1910 bb.fatal("Invalid BB_PRESSURE_MAX_IO %s, minimum value is %s." % (self.max_io_pressure, lower_limit))
1911 if self.max_io_pressure > upper_limit:
1912 bb.warn("Your build will be largely unregulated since BB_PRESSURE_MAX_IO is set to %s. It is very unlikely that such high pressure will be experienced." % (self.max_io_pressure))
1913
1914 if self.max_memory_pressure:
1915 self.max_memory_pressure = float(self.max_memory_pressure)
1916 if self.max_memory_pressure < lower_limit:
1917 bb.fatal("Invalid BB_PRESSURE_MAX_MEMORY %s, minimum value is %s." % (self.max_memory_pressure, lower_limit))
1918 if self.max_memory_pressure > upper_limit:
1919 bb.warn("Your build will be largely unregulated since BB_PRESSURE_MAX_MEMORY is set to %s. It is very unlikely that such high pressure will be experienced." % (self.max_io_pressure))
1920
1921 if self.max_loadfactor:
1922 self.max_loadfactor = float(self.max_loadfactor)
1923 if self.max_loadfactor <= 0:
1924 bb.fatal("Invalid BB_LOADFACTOR_MAX %s, needs to be greater than zero." % (self.max_loadfactor))
1925
1747 # List of setscene tasks which we've covered 1926 # List of setscene tasks which we've covered
1748 self.scenequeue_covered = set() 1927 self.scenequeue_covered = set()
1749 # List of tasks which are covered (including setscene ones) 1928 # List of tasks which are covered (including setscene ones)
@@ -1753,11 +1932,6 @@ class RunQueueExecute:
1753 self.tasks_notcovered = set() 1932 self.tasks_notcovered = set()
1754 self.scenequeue_notneeded = set() 1933 self.scenequeue_notneeded = set()
1755 1934
1756 # We can't skip specified target tasks which aren't setscene tasks
1757 self.cantskip = set(self.rqdata.target_tids)
1758 self.cantskip.difference_update(self.rqdata.runq_setscene_tids)
1759 self.cantskip.intersection_update(self.rqdata.runtaskentries)
1760
1761 schedulers = self.get_schedulers() 1935 schedulers = self.get_schedulers()
1762 for scheduler in schedulers: 1936 for scheduler in schedulers:
1763 if self.scheduler == scheduler.name: 1937 if self.scheduler == scheduler.name:
@@ -1768,11 +1942,29 @@ class RunQueueExecute:
1768 bb.fatal("Invalid scheduler '%s'. Available schedulers: %s" % 1942 bb.fatal("Invalid scheduler '%s'. Available schedulers: %s" %
1769 (self.scheduler, ", ".join(obj.name for obj in schedulers))) 1943 (self.scheduler, ", ".join(obj.name for obj in schedulers)))
1770 1944
1771 #if len(self.rqdata.runq_setscene_tids) > 0: 1945 #if self.rqdata.runq_setscene_tids:
1772 self.sqdata = SQData() 1946 self.sqdata = SQData()
1773 build_scenequeue_data(self.sqdata, self.rqdata, self.rq, self.cooker, self.stampcache, self) 1947 build_scenequeue_data(self.sqdata, self.rqdata, self)
1948
1949 update_scenequeue_data(self.sqdata.sq_revdeps, self.sqdata, self.rqdata, self.rq, self.cooker, self.stampcache, self, summary=True)
1950
1951 # Compute a list of 'stale' sstate tasks where the current hash does not match the one
1952 # in any stamp files. Pass the list out to metadata as an event.
1953 found = {}
1954 for tid in self.rqdata.runq_setscene_tids:
1955 (mc, fn, taskname, taskfn) = split_tid_mcfn(tid)
1956 stamps = bb.build.find_stale_stamps(taskname, taskfn)
1957 if stamps:
1958 if mc not in found:
1959 found[mc] = {}
1960 found[mc][tid] = stamps
1961 for mc in found:
1962 event = bb.event.StaleSetSceneTasks(found[mc])
1963 bb.event.fire(event, self.cooker.databuilder.mcdata[mc])
1964
1965 self.build_taskdepdata_cache()
1774 1966
1775 def runqueue_process_waitpid(self, task, status): 1967 def runqueue_process_waitpid(self, task, status, fakerootlog=None):
1776 1968
1777 # self.build_stamps[pid] may not exist when use shared work directory. 1969 # self.build_stamps[pid] may not exist when use shared work directory.
1778 if task in self.build_stamps: 1970 if task in self.build_stamps:
@@ -1785,9 +1977,10 @@ class RunQueueExecute:
1785 else: 1977 else:
1786 self.sq_task_complete(task) 1978 self.sq_task_complete(task)
1787 self.sq_live.remove(task) 1979 self.sq_live.remove(task)
1980 self.stats.updateActiveSetscene(len(self.sq_live))
1788 else: 1981 else:
1789 if status != 0: 1982 if status != 0:
1790 self.task_fail(task, status) 1983 self.task_fail(task, status, fakerootlog=fakerootlog)
1791 else: 1984 else:
1792 self.task_complete(task) 1985 self.task_complete(task)
1793 return True 1986 return True
@@ -1795,20 +1988,20 @@ class RunQueueExecute:
1795 def finish_now(self): 1988 def finish_now(self):
1796 for mc in self.rq.worker: 1989 for mc in self.rq.worker:
1797 try: 1990 try:
1798 self.rq.worker[mc].process.stdin.write(b"<finishnow></finishnow>") 1991 RunQueue.send_pickled_data(self.rq.worker[mc].process, b"", "finishnow")
1799 self.rq.worker[mc].process.stdin.flush() 1992 self.rq.worker[mc].process.stdin.flush()
1800 except IOError: 1993 except IOError:
1801 # worker must have died? 1994 # worker must have died?
1802 pass 1995 pass
1803 for mc in self.rq.fakeworker: 1996 for mc in self.rq.fakeworker:
1804 try: 1997 try:
1805 self.rq.fakeworker[mc].process.stdin.write(b"<finishnow></finishnow>") 1998 RunQueue.send_pickled_data(self.rq.fakeworker[mc].process, b"", "finishnow")
1806 self.rq.fakeworker[mc].process.stdin.flush() 1999 self.rq.fakeworker[mc].process.stdin.flush()
1807 except IOError: 2000 except IOError:
1808 # worker must have died? 2001 # worker must have died?
1809 pass 2002 pass
1810 2003
1811 if len(self.failed_tids) != 0: 2004 if self.failed_tids:
1812 self.rq.state = runQueueFailed 2005 self.rq.state = runQueueFailed
1813 return 2006 return
1814 2007
@@ -1818,13 +2011,13 @@ class RunQueueExecute:
1818 def finish(self): 2011 def finish(self):
1819 self.rq.state = runQueueCleanUp 2012 self.rq.state = runQueueCleanUp
1820 2013
1821 active = self.stats.active + self.sq_stats.active 2014 active = self.stats.active + len(self.sq_live)
1822 if active > 0: 2015 if active > 0:
1823 bb.event.fire(runQueueExitWait(active), self.cfgData) 2016 bb.event.fire(runQueueExitWait(active), self.cfgData)
1824 self.rq.read_workers() 2017 self.rq.read_workers()
1825 return self.rq.active_fds() 2018 return self.rq.active_fds()
1826 2019
1827 if len(self.failed_tids) != 0: 2020 if self.failed_tids:
1828 self.rq.state = runQueueFailed 2021 self.rq.state = runQueueFailed
1829 return True 2022 return True
1830 2023
@@ -1851,7 +2044,7 @@ class RunQueueExecute:
1851 return valid 2044 return valid
1852 2045
1853 def can_start_task(self): 2046 def can_start_task(self):
1854 active = self.stats.active + self.sq_stats.active 2047 active = self.stats.active + len(self.sq_live)
1855 can_start = active < self.number_tasks 2048 can_start = active < self.number_tasks
1856 return can_start 2049 return can_start
1857 2050
@@ -1871,8 +2064,7 @@ class RunQueueExecute:
1871 try: 2064 try:
1872 module = __import__(modname, fromlist=(name,)) 2065 module = __import__(modname, fromlist=(name,))
1873 except ImportError as exc: 2066 except ImportError as exc:
1874 logger.critical("Unable to import scheduler '%s' from '%s': %s" % (name, modname, exc)) 2067 bb.fatal("Unable to import scheduler '%s' from '%s': %s" % (name, modname, exc))
1875 raise SystemExit(1)
1876 else: 2068 else:
1877 schedulers.add(getattr(module, name)) 2069 schedulers.add(getattr(module, name))
1878 return schedulers 2070 return schedulers
@@ -1902,21 +2094,52 @@ class RunQueueExecute:
1902 self.setbuildable(revdep) 2094 self.setbuildable(revdep)
1903 logger.debug("Marking task %s as buildable", revdep) 2095 logger.debug("Marking task %s as buildable", revdep)
1904 2096
2097 found = None
2098 for t in sorted(self.sq_deferred.copy()):
2099 if self.sq_deferred[t] == task:
2100 # Allow the next deferred task to run. Any other deferred tasks should be deferred after that task.
2101 # We shouldn't allow all to run at once as it is prone to races.
2102 if not found:
2103 bb.debug(1, "Deferred task %s now buildable" % t)
2104 del self.sq_deferred[t]
2105 update_scenequeue_data([t], self.sqdata, self.rqdata, self.rq, self.cooker, self.stampcache, self, summary=False)
2106 found = t
2107 else:
2108 bb.debug(1, "Deferring %s after %s" % (t, found))
2109 self.sq_deferred[t] = found
2110
1905 def task_complete(self, task): 2111 def task_complete(self, task):
1906 self.stats.taskCompleted() 2112 self.stats.taskCompleted()
1907 bb.event.fire(runQueueTaskCompleted(task, self.stats, self.rq), self.cfgData) 2113 bb.event.fire(runQueueTaskCompleted(task, self.stats, self.rq), self.cfgData)
1908 self.task_completeoutright(task) 2114 self.task_completeoutright(task)
1909 self.runq_tasksrun.add(task) 2115 self.runq_tasksrun.add(task)
1910 2116
1911 def task_fail(self, task, exitcode): 2117 def task_fail(self, task, exitcode, fakerootlog=None):
1912 """ 2118 """
1913 Called when a task has failed 2119 Called when a task has failed
1914 Updates the state engine with the failure 2120 Updates the state engine with the failure
1915 """ 2121 """
1916 self.stats.taskFailed() 2122 self.stats.taskFailed()
1917 self.failed_tids.append(task) 2123 self.failed_tids.append(task)
1918 bb.event.fire(runQueueTaskFailed(task, self.stats, exitcode, self.rq), self.cfgData) 2124
1919 if self.rqdata.taskData[''].abort: 2125 fakeroot_log = []
2126 if fakerootlog and os.path.exists(fakerootlog):
2127 with open(fakerootlog) as fakeroot_log_file:
2128 fakeroot_failed = False
2129 for line in reversed(fakeroot_log_file.readlines()):
2130 for fakeroot_error in ['mismatch', 'error', 'fatal']:
2131 if fakeroot_error in line.lower():
2132 fakeroot_failed = True
2133 if 'doing new pid setup and server start' in line:
2134 break
2135 fakeroot_log.append(line)
2136
2137 if not fakeroot_failed:
2138 fakeroot_log = []
2139
2140 bb.event.fire(runQueueTaskFailed(task, self.stats, exitcode, self.rq, fakeroot_log=("".join(fakeroot_log) or None)), self.cfgData)
2141
2142 if self.rqdata.taskData[''].halt:
1920 self.rq.state = runQueueCleanUp 2143 self.rq.state = runQueueCleanUp
1921 2144
1922 def task_skip(self, task, reason): 2145 def task_skip(self, task, reason):
@@ -1931,7 +2154,7 @@ class RunQueueExecute:
1931 err = False 2154 err = False
1932 if not self.sqdone: 2155 if not self.sqdone:
1933 logger.debug('We could skip tasks %s', "\n".join(sorted(self.scenequeue_covered))) 2156 logger.debug('We could skip tasks %s', "\n".join(sorted(self.scenequeue_covered)))
1934 completeevent = sceneQueueComplete(self.sq_stats, self.rq) 2157 completeevent = sceneQueueComplete(self.stats, self.rq)
1935 bb.event.fire(completeevent, self.cfgData) 2158 bb.event.fire(completeevent, self.cfgData)
1936 if self.sq_deferred: 2159 if self.sq_deferred:
1937 logger.error("Scenequeue had deferred entries: %s" % pprint.pformat(self.sq_deferred)) 2160 logger.error("Scenequeue had deferred entries: %s" % pprint.pformat(self.sq_deferred))
@@ -1943,6 +2166,10 @@ class RunQueueExecute:
1943 logger.error("Scenequeue had holdoff tasks: %s" % pprint.pformat(self.holdoff_tasks)) 2166 logger.error("Scenequeue had holdoff tasks: %s" % pprint.pformat(self.holdoff_tasks))
1944 err = True 2167 err = True
1945 2168
2169 for tid in self.scenequeue_covered.intersection(self.scenequeue_notcovered):
2170 # No task should end up in both covered and uncovered, that is a bug.
2171 logger.error("Setscene task %s in both covered and notcovered." % tid)
2172
1946 for tid in self.rqdata.runq_setscene_tids: 2173 for tid in self.rqdata.runq_setscene_tids:
1947 if tid not in self.scenequeue_covered and tid not in self.scenequeue_notcovered: 2174 if tid not in self.scenequeue_covered and tid not in self.scenequeue_notcovered:
1948 err = True 2175 err = True
@@ -1961,7 +2188,7 @@ class RunQueueExecute:
1961 if x not in self.tasks_scenequeue_done: 2188 if x not in self.tasks_scenequeue_done:
1962 logger.error("Task %s was never processed by the setscene code" % x) 2189 logger.error("Task %s was never processed by the setscene code" % x)
1963 err = True 2190 err = True
1964 if len(self.rqdata.runtaskentries[x].depends) == 0 and x not in self.runq_buildable: 2191 if not self.rqdata.runtaskentries[x].depends and x not in self.runq_buildable:
1965 logger.error("Task %s was never marked as buildable by the setscene code" % x) 2192 logger.error("Task %s was never marked as buildable by the setscene code" % x)
1966 err = True 2193 err = True
1967 return err 2194 return err
@@ -1979,13 +2206,24 @@ class RunQueueExecute:
1979 if not hasattr(self, "sorted_setscene_tids"): 2206 if not hasattr(self, "sorted_setscene_tids"):
1980 # Don't want to sort this set every execution 2207 # Don't want to sort this set every execution
1981 self.sorted_setscene_tids = sorted(self.rqdata.runq_setscene_tids) 2208 self.sorted_setscene_tids = sorted(self.rqdata.runq_setscene_tids)
2209 # Resume looping where we left off when we returned to feed the mainloop
2210 self.setscene_tids_generator = itertools.cycle(self.rqdata.runq_setscene_tids)
1982 2211
1983 task = None 2212 task = None
1984 if not self.sqdone and self.can_start_task(): 2213 if not self.sqdone and self.can_start_task():
1985 # Find the next setscene to run 2214 loopcount = 0
1986 for nexttask in self.sorted_setscene_tids: 2215 # Find the next setscene to run, exit the loop when we've processed all tids or found something to execute
1987 if nexttask in self.sq_buildable and nexttask not in self.sq_running and self.sqdata.stamps[nexttask] not in self.build_stamps.values(): 2216 while loopcount < len(self.rqdata.runq_setscene_tids):
1988 if nexttask not in self.sqdata.unskippable and len(self.sqdata.sq_revdeps[nexttask]) > 0 and self.sqdata.sq_revdeps[nexttask].issubset(self.scenequeue_covered) and self.check_dependencies(nexttask, self.sqdata.sq_revdeps[nexttask]): 2217 loopcount += 1
2218 nexttask = next(self.setscene_tids_generator)
2219 if nexttask in self.sq_buildable and nexttask not in self.sq_running and self.sqdata.stamps[nexttask] not in self.build_stamps.values() and nexttask not in self.sq_harddep_deferred:
2220 if nexttask in self.sq_deferred and self.sq_deferred[nexttask] not in self.runq_complete:
2221 # Skip deferred tasks quickly before the 'expensive' tests below - this is key to performant multiconfig builds
2222 continue
2223 if nexttask not in self.sqdata.unskippable and self.sqdata.sq_revdeps[nexttask] and \
2224 nexttask not in self.sq_needed_harddeps and \
2225 self.sqdata.sq_revdeps[nexttask].issubset(self.scenequeue_covered) and \
2226 self.check_dependencies(nexttask, self.sqdata.sq_revdeps[nexttask]):
1989 if nexttask not in self.rqdata.target_tids: 2227 if nexttask not in self.rqdata.target_tids:
1990 logger.debug2("Skipping setscene for task %s" % nexttask) 2228 logger.debug2("Skipping setscene for task %s" % nexttask)
1991 self.sq_task_skip(nexttask) 2229 self.sq_task_skip(nexttask)
@@ -1993,13 +2231,25 @@ class RunQueueExecute:
1993 if nexttask in self.sq_deferred: 2231 if nexttask in self.sq_deferred:
1994 del self.sq_deferred[nexttask] 2232 del self.sq_deferred[nexttask]
1995 return True 2233 return True
2234 if nexttask in self.sqdata.sq_harddeps_rev and not self.sqdata.sq_harddeps_rev[nexttask].issubset(self.scenequeue_covered | self.scenequeue_notcovered):
2235 logger.debug2("Deferring %s due to hard dependencies" % nexttask)
2236 updated = False
2237 for dep in self.sqdata.sq_harddeps_rev[nexttask]:
2238 if dep not in self.sq_needed_harddeps:
2239 logger.debug2("Enabling task %s as it is a hard dependency" % dep)
2240 self.sq_buildable.add(dep)
2241 self.sq_needed_harddeps.add(dep)
2242 updated = True
2243 self.sq_harddep_deferred.add(nexttask)
2244 if updated:
2245 return True
2246 continue
1996 # If covered tasks are running, need to wait for them to complete 2247 # If covered tasks are running, need to wait for them to complete
1997 for t in self.sqdata.sq_covered_tasks[nexttask]: 2248 for t in self.sqdata.sq_covered_tasks[nexttask]:
1998 if t in self.runq_running and t not in self.runq_complete: 2249 if t in self.runq_running and t not in self.runq_complete:
1999 continue 2250 continue
2000 if nexttask in self.sq_deferred: 2251 if nexttask in self.sq_deferred:
2001 if self.sq_deferred[nexttask] not in self.runq_complete: 2252 # Deferred tasks that were still deferred were skipped above so we now need to process
2002 continue
2003 logger.debug("Task %s no longer deferred" % nexttask) 2253 logger.debug("Task %s no longer deferred" % nexttask)
2004 del self.sq_deferred[nexttask] 2254 del self.sq_deferred[nexttask]
2005 valid = self.rq.validate_hashes(set([nexttask]), self.cooker.data, 0, False, summary=False) 2255 valid = self.rq.validate_hashes(set([nexttask]), self.cooker.data, 0, False, summary=False)
@@ -2007,8 +2257,6 @@ class RunQueueExecute:
2007 logger.debug("%s didn't become valid, skipping setscene" % nexttask) 2257 logger.debug("%s didn't become valid, skipping setscene" % nexttask)
2008 self.sq_task_failoutright(nexttask) 2258 self.sq_task_failoutright(nexttask)
2009 return True 2259 return True
2010 else:
2011 self.sqdata.outrightfail.remove(nexttask)
2012 if nexttask in self.sqdata.outrightfail: 2260 if nexttask in self.sqdata.outrightfail:
2013 logger.debug2('No package found, so skipping setscene task %s', nexttask) 2261 logger.debug2('No package found, so skipping setscene task %s', nexttask)
2014 self.sq_task_failoutright(nexttask) 2262 self.sq_task_failoutright(nexttask)
@@ -2040,28 +2288,42 @@ class RunQueueExecute:
2040 self.sq_task_failoutright(task) 2288 self.sq_task_failoutright(task)
2041 return True 2289 return True
2042 2290
2043 startevent = sceneQueueTaskStarted(task, self.sq_stats, self.rq) 2291 startevent = sceneQueueTaskStarted(task, self.stats, self.rq)
2044 bb.event.fire(startevent, self.cfgData) 2292 bb.event.fire(startevent, self.cfgData)
2045 2293
2046 taskdepdata = self.sq_build_taskdepdata(task)
2047
2048 taskdep = self.rqdata.dataCaches[mc].task_deps[taskfn] 2294 taskdep = self.rqdata.dataCaches[mc].task_deps[taskfn]
2049 taskhash = self.rqdata.get_task_hash(task) 2295 realfn = bb.cache.virtualfn2realfn(taskfn)[0]
2050 unihash = self.rqdata.get_task_unihash(task) 2296 runtask = {
2297 'fn' : taskfn,
2298 'task' : task,
2299 'taskname' : taskname,
2300 'taskhash' : self.rqdata.get_task_hash(task),
2301 'unihash' : self.rqdata.get_task_unihash(task),
2302 'quieterrors' : True,
2303 'appends' : self.cooker.collections[mc].get_file_appends(taskfn),
2304 'layername' : self.cooker.collections[mc].calc_bbfile_priority(realfn)[2],
2305 'taskdepdata' : self.sq_build_taskdepdata(task),
2306 'dry_run' : False,
2307 'taskdep': taskdep,
2308 'fakerootenv' : self.rqdata.dataCaches[mc].fakerootenv[taskfn],
2309 'fakerootdirs' : self.rqdata.dataCaches[mc].fakerootdirs[taskfn],
2310 'fakerootnoenv' : self.rqdata.dataCaches[mc].fakerootnoenv[taskfn]
2311 }
2312
2051 if 'fakeroot' in taskdep and taskname in taskdep['fakeroot'] and not self.cooker.configuration.dry_run: 2313 if 'fakeroot' in taskdep and taskname in taskdep['fakeroot'] and not self.cooker.configuration.dry_run:
2052 if not mc in self.rq.fakeworker: 2314 if not mc in self.rq.fakeworker:
2053 self.rq.start_fakeworker(self, mc) 2315 self.rq.start_fakeworker(self, mc)
2054 self.rq.fakeworker[mc].process.stdin.write(b"<runtask>" + pickle.dumps((taskfn, task, taskname, taskhash, unihash, True, self.cooker.collections[mc].get_file_appends(taskfn), taskdepdata, False)) + b"</runtask>") 2316 RunQueue.send_pickled_data(self.rq.fakeworker[mc].process, runtask, "runtask")
2055 self.rq.fakeworker[mc].process.stdin.flush() 2317 self.rq.fakeworker[mc].process.stdin.flush()
2056 else: 2318 else:
2057 self.rq.worker[mc].process.stdin.write(b"<runtask>" + pickle.dumps((taskfn, task, taskname, taskhash, unihash, True, self.cooker.collections[mc].get_file_appends(taskfn), taskdepdata, False)) + b"</runtask>") 2319 RunQueue.send_pickled_data(self.rq.worker[mc].process, runtask, "runtask")
2058 self.rq.worker[mc].process.stdin.flush() 2320 self.rq.worker[mc].process.stdin.flush()
2059 2321
2060 self.build_stamps[task] = bb.build.stampfile(taskname, self.rqdata.dataCaches[mc], taskfn, noextra=True) 2322 self.build_stamps[task] = bb.parse.siggen.stampfile_mcfn(taskname, taskfn, extrainfo=False)
2061 self.build_stamps2.append(self.build_stamps[task]) 2323 self.build_stamps2.append(self.build_stamps[task])
2062 self.sq_running.add(task) 2324 self.sq_running.add(task)
2063 self.sq_live.add(task) 2325 self.sq_live.add(task)
2064 self.sq_stats.taskActive() 2326 self.stats.updateActiveSetscene(len(self.sq_live))
2065 if self.can_start_task(): 2327 if self.can_start_task():
2066 return True 2328 return True
2067 2329
@@ -2092,9 +2354,9 @@ class RunQueueExecute:
2092 if task is not None: 2354 if task is not None:
2093 (mc, fn, taskname, taskfn) = split_tid_mcfn(task) 2355 (mc, fn, taskname, taskfn) = split_tid_mcfn(task)
2094 2356
2095 if self.rqdata.setscenewhitelist is not None: 2357 if self.rqdata.setscene_ignore_tasks is not None:
2096 if self.check_setscenewhitelist(task): 2358 if self.check_setscene_ignore_tasks(task):
2097 self.task_fail(task, "setscene whitelist") 2359 self.task_fail(task, "setscene ignore_tasks")
2098 return True 2360 return True
2099 2361
2100 if task in self.tasks_covered: 2362 if task in self.tasks_covered:
@@ -2117,18 +2379,32 @@ class RunQueueExecute:
2117 self.runq_running.add(task) 2379 self.runq_running.add(task)
2118 self.stats.taskActive() 2380 self.stats.taskActive()
2119 if not (self.cooker.configuration.dry_run or self.rqdata.setscene_enforce): 2381 if not (self.cooker.configuration.dry_run or self.rqdata.setscene_enforce):
2120 bb.build.make_stamp(taskname, self.rqdata.dataCaches[mc], taskfn) 2382 bb.build.make_stamp_mcfn(taskname, taskfn)
2121 self.task_complete(task) 2383 self.task_complete(task)
2122 return True 2384 return True
2123 else: 2385 else:
2124 startevent = runQueueTaskStarted(task, self.stats, self.rq) 2386 startevent = runQueueTaskStarted(task, self.stats, self.rq)
2125 bb.event.fire(startevent, self.cfgData) 2387 bb.event.fire(startevent, self.cfgData)
2126 2388
2127 taskdepdata = self.build_taskdepdata(task)
2128
2129 taskdep = self.rqdata.dataCaches[mc].task_deps[taskfn] 2389 taskdep = self.rqdata.dataCaches[mc].task_deps[taskfn]
2130 taskhash = self.rqdata.get_task_hash(task) 2390 realfn = bb.cache.virtualfn2realfn(taskfn)[0]
2131 unihash = self.rqdata.get_task_unihash(task) 2391 runtask = {
2392 'fn' : taskfn,
2393 'task' : task,
2394 'taskname' : taskname,
2395 'taskhash' : self.rqdata.get_task_hash(task),
2396 'unihash' : self.rqdata.get_task_unihash(task),
2397 'quieterrors' : False,
2398 'appends' : self.cooker.collections[mc].get_file_appends(taskfn),
2399 'layername' : self.cooker.collections[mc].calc_bbfile_priority(realfn)[2],
2400 'taskdepdata' : self.build_taskdepdata(task),
2401 'dry_run' : self.rqdata.setscene_enforce,
2402 'taskdep': taskdep,
2403 'fakerootenv' : self.rqdata.dataCaches[mc].fakerootenv[taskfn],
2404 'fakerootdirs' : self.rqdata.dataCaches[mc].fakerootdirs[taskfn],
2405 'fakerootnoenv' : self.rqdata.dataCaches[mc].fakerootnoenv[taskfn]
2406 }
2407
2132 if 'fakeroot' in taskdep and taskname in taskdep['fakeroot'] and not (self.cooker.configuration.dry_run or self.rqdata.setscene_enforce): 2408 if 'fakeroot' in taskdep and taskname in taskdep['fakeroot'] and not (self.cooker.configuration.dry_run or self.rqdata.setscene_enforce):
2133 if not mc in self.rq.fakeworker: 2409 if not mc in self.rq.fakeworker:
2134 try: 2410 try:
@@ -2138,31 +2414,31 @@ class RunQueueExecute:
2138 self.rq.state = runQueueFailed 2414 self.rq.state = runQueueFailed
2139 self.stats.taskFailed() 2415 self.stats.taskFailed()
2140 return True 2416 return True
2141 self.rq.fakeworker[mc].process.stdin.write(b"<runtask>" + pickle.dumps((taskfn, task, taskname, taskhash, unihash, False, self.cooker.collections[mc].get_file_appends(taskfn), taskdepdata, self.rqdata.setscene_enforce)) + b"</runtask>") 2417 RunQueue.send_pickled_data(self.rq.fakeworker[mc].process, runtask, "runtask")
2142 self.rq.fakeworker[mc].process.stdin.flush() 2418 self.rq.fakeworker[mc].process.stdin.flush()
2143 else: 2419 else:
2144 self.rq.worker[mc].process.stdin.write(b"<runtask>" + pickle.dumps((taskfn, task, taskname, taskhash, unihash, False, self.cooker.collections[mc].get_file_appends(taskfn), taskdepdata, self.rqdata.setscene_enforce)) + b"</runtask>") 2420 RunQueue.send_pickled_data(self.rq.worker[mc].process, runtask, "runtask")
2145 self.rq.worker[mc].process.stdin.flush() 2421 self.rq.worker[mc].process.stdin.flush()
2146 2422
2147 self.build_stamps[task] = bb.build.stampfile(taskname, self.rqdata.dataCaches[mc], taskfn, noextra=True) 2423 self.build_stamps[task] = bb.parse.siggen.stampfile_mcfn(taskname, taskfn, extrainfo=False)
2148 self.build_stamps2.append(self.build_stamps[task]) 2424 self.build_stamps2.append(self.build_stamps[task])
2149 self.runq_running.add(task) 2425 self.runq_running.add(task)
2150 self.stats.taskActive() 2426 self.stats.taskActive()
2151 if self.can_start_task(): 2427 if self.can_start_task():
2152 return True 2428 return True
2153 2429
2154 if self.stats.active > 0 or self.sq_stats.active > 0: 2430 if self.stats.active > 0 or self.sq_live:
2155 self.rq.read_workers() 2431 self.rq.read_workers()
2156 return self.rq.active_fds() 2432 return self.rq.active_fds()
2157 2433
2158 # No more tasks can be run. If we have deferred setscene tasks we should run them. 2434 # No more tasks can be run. If we have deferred setscene tasks we should run them.
2159 if self.sq_deferred: 2435 if self.sq_deferred:
2160 tid = self.sq_deferred.pop(list(self.sq_deferred.keys())[0]) 2436 deferred_tid = list(self.sq_deferred.keys())[0]
2161 logger.warning("Runqeueue deadlocked on deferred tasks, forcing task %s" % tid) 2437 blocking_tid = self.sq_deferred.pop(deferred_tid)
2162 self.sq_task_failoutright(tid) 2438 logger.warning("Runqueue deadlocked on deferred tasks, forcing task %s blocked by %s" % (deferred_tid, blocking_tid))
2163 return True 2439 return True
2164 2440
2165 if len(self.failed_tids) != 0: 2441 if self.failed_tids:
2166 self.rq.state = runQueueFailed 2442 self.rq.state = runQueueFailed
2167 return True 2443 return True
2168 2444
@@ -2195,6 +2471,25 @@ class RunQueueExecute:
2195 ret.add(dep) 2471 ret.add(dep)
2196 return ret 2472 return ret
2197 2473
2474 # Build the individual cache entries in advance once to save time
2475 def build_taskdepdata_cache(self):
2476 taskdepdata_cache = {}
2477 for task in self.rqdata.runtaskentries:
2478 (mc, fn, taskname, taskfn) = split_tid_mcfn(task)
2479 taskdepdata_cache[task] = bb.TaskData(
2480 pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn],
2481 taskname = taskname,
2482 fn = fn,
2483 deps = self.filtermcdeps(task, mc, self.rqdata.runtaskentries[task].depends),
2484 provides = self.rqdata.dataCaches[mc].fn_provides[taskfn],
2485 taskhash = self.rqdata.runtaskentries[task].hash,
2486 unihash = self.rqdata.runtaskentries[task].unihash,
2487 hashfn = self.rqdata.dataCaches[mc].hashfn[taskfn],
2488 taskhash_deps = self.rqdata.runtaskentries[task].taskhash_deps,
2489 )
2490
2491 self.taskdepdata_cache = taskdepdata_cache
2492
2198 # We filter out multiconfig dependencies from taskdepdata we pass to the tasks 2493 # We filter out multiconfig dependencies from taskdepdata we pass to the tasks
2199 # as most code can't handle them 2494 # as most code can't handle them
2200 def build_taskdepdata(self, task): 2495 def build_taskdepdata(self, task):
@@ -2206,15 +2501,11 @@ class RunQueueExecute:
2206 while next: 2501 while next:
2207 additional = [] 2502 additional = []
2208 for revdep in next: 2503 for revdep in next:
2209 (mc, fn, taskname, taskfn) = split_tid_mcfn(revdep) 2504 self.taskdepdata_cache[revdep] = self.taskdepdata_cache[revdep]._replace(
2210 pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn] 2505 unihash=self.rqdata.runtaskentries[revdep].unihash
2211 deps = self.rqdata.runtaskentries[revdep].depends 2506 )
2212 provides = self.rqdata.dataCaches[mc].fn_provides[taskfn] 2507 taskdepdata[revdep] = self.taskdepdata_cache[revdep]
2213 taskhash = self.rqdata.runtaskentries[revdep].hash 2508 for revdep2 in self.taskdepdata_cache[revdep].deps:
2214 unihash = self.rqdata.runtaskentries[revdep].unihash
2215 deps = self.filtermcdeps(task, mc, deps)
2216 taskdepdata[revdep] = [pn, taskname, fn, deps, provides, taskhash, unihash]
2217 for revdep2 in deps:
2218 if revdep2 not in taskdepdata: 2509 if revdep2 not in taskdepdata:
2219 additional.append(revdep2) 2510 additional.append(revdep2)
2220 next = additional 2511 next = additional
@@ -2228,7 +2519,7 @@ class RunQueueExecute:
2228 return 2519 return
2229 2520
2230 notcovered = set(self.scenequeue_notcovered) 2521 notcovered = set(self.scenequeue_notcovered)
2231 notcovered |= self.cantskip 2522 notcovered |= self.sqdata.cantskip
2232 for tid in self.scenequeue_notcovered: 2523 for tid in self.scenequeue_notcovered:
2233 notcovered |= self.sqdata.sq_covered_tasks[tid] 2524 notcovered |= self.sqdata.sq_covered_tasks[tid]
2234 notcovered |= self.sqdata.unskippable.difference(self.rqdata.runq_setscene_tids) 2525 notcovered |= self.sqdata.unskippable.difference(self.rqdata.runq_setscene_tids)
@@ -2241,7 +2532,7 @@ class RunQueueExecute:
2241 covered.intersection_update(self.tasks_scenequeue_done) 2532 covered.intersection_update(self.tasks_scenequeue_done)
2242 2533
2243 for tid in notcovered | covered: 2534 for tid in notcovered | covered:
2244 if len(self.rqdata.runtaskentries[tid].depends) == 0: 2535 if not self.rqdata.runtaskentries[tid].depends:
2245 self.setbuildable(tid) 2536 self.setbuildable(tid)
2246 elif self.rqdata.runtaskentries[tid].depends.issubset(self.runq_complete): 2537 elif self.rqdata.runtaskentries[tid].depends.issubset(self.runq_complete):
2247 self.setbuildable(tid) 2538 self.setbuildable(tid)
@@ -2273,10 +2564,16 @@ class RunQueueExecute:
2273 self.updated_taskhash_queue.remove((tid, unihash)) 2564 self.updated_taskhash_queue.remove((tid, unihash))
2274 2565
2275 if unihash != self.rqdata.runtaskentries[tid].unihash: 2566 if unihash != self.rqdata.runtaskentries[tid].unihash:
2276 hashequiv_logger.verbose("Task %s unihash changed to %s" % (tid, unihash)) 2567 # Make sure we rehash any other tasks with the same task hash that we're deferred against.
2277 self.rqdata.runtaskentries[tid].unihash = unihash 2568 torehash = [tid]
2278 bb.parse.siggen.set_unihash(tid, unihash) 2569 for deftid in self.sq_deferred:
2279 toprocess.add(tid) 2570 if self.sq_deferred[deftid] == tid:
2571 torehash.append(deftid)
2572 for hashtid in torehash:
2573 hashequiv_logger.verbose("Task %s unihash changed to %s" % (hashtid, unihash))
2574 self.rqdata.runtaskentries[hashtid].unihash = unihash
2575 bb.parse.siggen.set_unihash(hashtid, unihash)
2576 toprocess.add(hashtid)
2280 2577
2281 # Work out all tasks which depend upon these 2578 # Work out all tasks which depend upon these
2282 total = set() 2579 total = set()
@@ -2294,23 +2591,33 @@ class RunQueueExecute:
2294 # Now iterate those tasks in dependency order to regenerate their taskhash/unihash 2591 # Now iterate those tasks in dependency order to regenerate their taskhash/unihash
2295 next = set() 2592 next = set()
2296 for p in total: 2593 for p in total:
2297 if len(self.rqdata.runtaskentries[p].depends) == 0: 2594 if not self.rqdata.runtaskentries[p].depends:
2298 next.add(p) 2595 next.add(p)
2299 elif self.rqdata.runtaskentries[p].depends.isdisjoint(total): 2596 elif self.rqdata.runtaskentries[p].depends.isdisjoint(total):
2300 next.add(p) 2597 next.add(p)
2301 2598
2599 starttime = time.time()
2600 lasttime = starttime
2601
2302 # When an item doesn't have dependencies in total, we can process it. Drop items from total when handled 2602 # When an item doesn't have dependencies in total, we can process it. Drop items from total when handled
2303 while next: 2603 while next:
2304 current = next.copy() 2604 current = next.copy()
2305 next = set() 2605 next = set()
2606 ready = {}
2306 for tid in current: 2607 for tid in current:
2307 if len(self.rqdata.runtaskentries[p].depends) and not self.rqdata.runtaskentries[tid].depends.isdisjoint(total): 2608 if self.rqdata.runtaskentries[p].depends and not self.rqdata.runtaskentries[tid].depends.isdisjoint(total):
2308 continue 2609 continue
2610 # get_taskhash for a given tid *must* be called before get_unihash* below
2611 ready[tid] = bb.parse.siggen.get_taskhash(tid, self.rqdata.runtaskentries[tid].depends, self.rqdata.dataCaches)
2612
2613 unihashes = bb.parse.siggen.get_unihashes(ready.keys())
2614
2615 for tid in ready:
2309 orighash = self.rqdata.runtaskentries[tid].hash 2616 orighash = self.rqdata.runtaskentries[tid].hash
2310 dc = bb.parse.siggen.get_data_caches(self.rqdata.dataCaches, mc_from_tid(tid)) 2617 newhash = ready[tid]
2311 newhash = bb.parse.siggen.get_taskhash(tid, self.rqdata.runtaskentries[tid].depends, dc)
2312 origuni = self.rqdata.runtaskentries[tid].unihash 2618 origuni = self.rqdata.runtaskentries[tid].unihash
2313 newuni = bb.parse.siggen.get_unihash(tid) 2619 newuni = unihashes[tid]
2620
2314 # FIXME, need to check it can come from sstate at all for determinism? 2621 # FIXME, need to check it can come from sstate at all for determinism?
2315 remapped = False 2622 remapped = False
2316 if newuni == origuni: 2623 if newuni == origuni:
@@ -2331,12 +2638,21 @@ class RunQueueExecute:
2331 next |= self.rqdata.runtaskentries[tid].revdeps 2638 next |= self.rqdata.runtaskentries[tid].revdeps
2332 total.remove(tid) 2639 total.remove(tid)
2333 next.intersection_update(total) 2640 next.intersection_update(total)
2641 bb.event.check_for_interrupts(self.cooker.data)
2642
2643 if time.time() > (lasttime + 30):
2644 lasttime = time.time()
2645 hashequiv_logger.verbose("Rehash loop slow progress: %s in %s" % (len(total), lasttime - starttime))
2646
2647 endtime = time.time()
2648 if (endtime-starttime > 60):
2649 hashequiv_logger.verbose("Rehash loop took more than 60s: %s" % (endtime-starttime))
2334 2650
2335 if changed: 2651 if changed:
2336 for mc in self.rq.worker: 2652 for mc in self.rq.worker:
2337 self.rq.worker[mc].process.stdin.write(b"<newtaskhashes>" + pickle.dumps(bb.parse.siggen.get_taskhashes()) + b"</newtaskhashes>") 2653 RunQueue.send_pickled_data(self.rq.worker[mc].process, bb.parse.siggen.get_taskhashes(), "newtaskhashes")
2338 for mc in self.rq.fakeworker: 2654 for mc in self.rq.fakeworker:
2339 self.rq.fakeworker[mc].process.stdin.write(b"<newtaskhashes>" + pickle.dumps(bb.parse.siggen.get_taskhashes()) + b"</newtaskhashes>") 2655 RunQueue.send_pickled_data(self.rq.fakeworker[mc].process, bb.parse.siggen.get_taskhashes(), "newtaskhashes")
2340 2656
2341 hashequiv_logger.debug(pprint.pformat("Tasks changed:\n%s" % (changed))) 2657 hashequiv_logger.debug(pprint.pformat("Tasks changed:\n%s" % (changed)))
2342 2658
@@ -2370,7 +2686,7 @@ class RunQueueExecute:
2370 self.tasks_scenequeue_done.remove(tid) 2686 self.tasks_scenequeue_done.remove(tid)
2371 for dep in self.sqdata.sq_covered_tasks[tid]: 2687 for dep in self.sqdata.sq_covered_tasks[tid]:
2372 if dep in self.runq_complete and dep not in self.runq_tasksrun: 2688 if dep in self.runq_complete and dep not in self.runq_tasksrun:
2373 bb.error("Task %s marked as completed but now needing to rerun? Aborting build." % dep) 2689 bb.error("Task %s marked as completed but now needing to rerun? Halting build." % dep)
2374 self.failed_tids.append(tid) 2690 self.failed_tids.append(tid)
2375 self.rq.state = runQueueCleanUp 2691 self.rq.state = runQueueCleanUp
2376 return 2692 return
@@ -2383,17 +2699,6 @@ class RunQueueExecute:
2383 self.sq_buildable.remove(tid) 2699 self.sq_buildable.remove(tid)
2384 if tid in self.sq_running: 2700 if tid in self.sq_running:
2385 self.sq_running.remove(tid) 2701 self.sq_running.remove(tid)
2386 harddepfail = False
2387 for t in self.sqdata.sq_harddeps:
2388 if tid in self.sqdata.sq_harddeps[t] and t in self.scenequeue_notcovered:
2389 harddepfail = True
2390 break
2391 if not harddepfail and self.sqdata.sq_revdeps[tid].issubset(self.scenequeue_covered | self.scenequeue_notcovered):
2392 if tid not in self.sq_buildable:
2393 self.sq_buildable.add(tid)
2394 if len(self.sqdata.sq_revdeps[tid]) == 0:
2395 self.sq_buildable.add(tid)
2396
2397 if tid in self.sqdata.outrightfail: 2702 if tid in self.sqdata.outrightfail:
2398 self.sqdata.outrightfail.remove(tid) 2703 self.sqdata.outrightfail.remove(tid)
2399 if tid in self.scenequeue_notcovered: 2704 if tid in self.scenequeue_notcovered:
@@ -2404,7 +2709,7 @@ class RunQueueExecute:
2404 self.scenequeue_notneeded.remove(tid) 2709 self.scenequeue_notneeded.remove(tid)
2405 2710
2406 (mc, fn, taskname, taskfn) = split_tid_mcfn(tid) 2711 (mc, fn, taskname, taskfn) = split_tid_mcfn(tid)
2407 self.sqdata.stamps[tid] = bb.build.stampfile(taskname + "_setscene", self.rqdata.dataCaches[mc], taskfn, noextra=True) 2712 self.sqdata.stamps[tid] = bb.parse.siggen.stampfile_mcfn(taskname, taskfn, extrainfo=False)
2408 2713
2409 if tid in self.stampcache: 2714 if tid in self.stampcache:
2410 del self.stampcache[tid] 2715 del self.stampcache[tid]
@@ -2412,29 +2717,67 @@ class RunQueueExecute:
2412 if tid in self.build_stamps: 2717 if tid in self.build_stamps:
2413 del self.build_stamps[tid] 2718 del self.build_stamps[tid]
2414 2719
2415 update_tasks.append((tid, harddepfail, tid in self.sqdata.valid)) 2720 update_tasks.append(tid)
2416 2721
2417 if update_tasks: 2722 update_tasks2 = []
2723 for tid in update_tasks:
2724 harddepfail = False
2725 for t in self.sqdata.sq_harddeps_rev[tid]:
2726 if t in self.scenequeue_notcovered:
2727 harddepfail = True
2728 break
2729 if not harddepfail and self.sqdata.sq_revdeps[tid].issubset(self.scenequeue_covered | self.scenequeue_notcovered):
2730 if tid not in self.sq_buildable:
2731 self.sq_buildable.add(tid)
2732 if not self.sqdata.sq_revdeps[tid]:
2733 self.sq_buildable.add(tid)
2734
2735 update_tasks2.append((tid, harddepfail, tid in self.sqdata.valid))
2736
2737 if update_tasks2:
2418 self.sqdone = False 2738 self.sqdone = False
2419 update_scenequeue_data([t[0] for t in update_tasks], self.sqdata, self.rqdata, self.rq, self.cooker, self.stampcache, self, summary=False) 2739 for mc in sorted(self.sqdata.multiconfigs):
2740 for tid in sorted([t[0] for t in update_tasks2]):
2741 if mc_from_tid(tid) != mc:
2742 continue
2743 h = pending_hash_index(tid, self.rqdata)
2744 if h in self.sqdata.hashes and tid != self.sqdata.hashes[h]:
2745 self.sq_deferred[tid] = self.sqdata.hashes[h]
2746 bb.note("Deferring %s after %s" % (tid, self.sqdata.hashes[h]))
2747 update_scenequeue_data([t[0] for t in update_tasks2], self.sqdata, self.rqdata, self.rq, self.cooker, self.stampcache, self, summary=False)
2420 2748
2421 for (tid, harddepfail, origvalid) in update_tasks: 2749 for (tid, harddepfail, origvalid) in update_tasks2:
2422 if tid in self.sqdata.valid and not origvalid: 2750 if tid in self.sqdata.valid and not origvalid:
2423 hashequiv_logger.verbose("Setscene task %s became valid" % tid) 2751 hashequiv_logger.verbose("Setscene task %s became valid" % tid)
2424 if harddepfail: 2752 if harddepfail:
2753 logger.debug2("%s has an unavailable hard dependency so skipping" % (tid))
2425 self.sq_task_failoutright(tid) 2754 self.sq_task_failoutright(tid)
2426 2755
2427 if changed: 2756 if changed:
2757 self.stats.updateCovered(len(self.scenequeue_covered), len(self.scenequeue_notcovered))
2758 self.sq_needed_harddeps = set()
2759 self.sq_harddep_deferred = set()
2428 self.holdoff_need_update = True 2760 self.holdoff_need_update = True
2429 2761
2430 def scenequeue_updatecounters(self, task, fail=False): 2762 def scenequeue_updatecounters(self, task, fail=False):
2431 2763
2432 for dep in sorted(self.sqdata.sq_deps[task]): 2764 if fail and task in self.sqdata.sq_harddeps:
2433 if fail and task in self.sqdata.sq_harddeps and dep in self.sqdata.sq_harddeps[task]: 2765 for dep in sorted(self.sqdata.sq_harddeps[task]):
2766 if dep in self.scenequeue_covered or dep in self.scenequeue_notcovered:
2767 # dependency could be already processed, e.g. noexec setscene task
2768 continue
2769 noexec, stamppresent = check_setscene_stamps(dep, self.rqdata, self.rq, self.stampcache)
2770 if noexec or stamppresent:
2771 continue
2434 logger.debug2("%s was unavailable and is a hard dependency of %s so skipping" % (task, dep)) 2772 logger.debug2("%s was unavailable and is a hard dependency of %s so skipping" % (task, dep))
2435 self.sq_task_failoutright(dep) 2773 self.sq_task_failoutright(dep)
2436 continue 2774 continue
2437 if self.sqdata.sq_revdeps[dep].issubset(self.scenequeue_covered | self.scenequeue_notcovered): 2775
2776 # For performance, only compute allcovered once if needed
2777 if self.sqdata.sq_deps[task]:
2778 allcovered = self.scenequeue_covered | self.scenequeue_notcovered
2779 for dep in sorted(self.sqdata.sq_deps[task]):
2780 if self.sqdata.sq_revdeps[dep].issubset(allcovered):
2438 if dep not in self.sq_buildable: 2781 if dep not in self.sq_buildable:
2439 self.sq_buildable.add(dep) 2782 self.sq_buildable.add(dep)
2440 2783
@@ -2452,6 +2795,14 @@ class RunQueueExecute:
2452 new.add(dep) 2795 new.add(dep)
2453 next = new 2796 next = new
2454 2797
2798 # If this task was one which other setscene tasks have a hard dependency upon, we need
2799 # to walk through the hard dependencies and allow execution of those which have completed dependencies.
2800 if task in self.sqdata.sq_harddeps:
2801 for dep in self.sq_harddep_deferred.copy():
2802 if self.sqdata.sq_harddeps_rev[dep].issubset(self.scenequeue_covered | self.scenequeue_notcovered):
2803 self.sq_harddep_deferred.remove(dep)
2804
2805 self.stats.updateCovered(len(self.scenequeue_covered), len(self.scenequeue_notcovered))
2455 self.holdoff_need_update = True 2806 self.holdoff_need_update = True
2456 2807
2457 def sq_task_completeoutright(self, task): 2808 def sq_task_completeoutright(self, task):
@@ -2466,22 +2817,20 @@ class RunQueueExecute:
2466 self.scenequeue_updatecounters(task) 2817 self.scenequeue_updatecounters(task)
2467 2818
2468 def sq_check_taskfail(self, task): 2819 def sq_check_taskfail(self, task):
2469 if self.rqdata.setscenewhitelist is not None: 2820 if self.rqdata.setscene_ignore_tasks is not None:
2470 realtask = task.split('_setscene')[0] 2821 realtask = task.split('_setscene')[0]
2471 (mc, fn, taskname, taskfn) = split_tid_mcfn(realtask) 2822 (mc, fn, taskname, taskfn) = split_tid_mcfn(realtask)
2472 pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn] 2823 pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn]
2473 if not check_setscene_enforce_whitelist(pn, taskname, self.rqdata.setscenewhitelist): 2824 if not check_setscene_enforce_ignore_tasks(pn, taskname, self.rqdata.setscene_ignore_tasks):
2474 logger.error('Task %s.%s failed' % (pn, taskname + "_setscene")) 2825 logger.error('Task %s.%s failed' % (pn, taskname + "_setscene"))
2475 self.rq.state = runQueueCleanUp 2826 self.rq.state = runQueueCleanUp
2476 2827
2477 def sq_task_complete(self, task): 2828 def sq_task_complete(self, task):
2478 self.sq_stats.taskCompleted() 2829 bb.event.fire(sceneQueueTaskCompleted(task, self.stats, self.rq), self.cfgData)
2479 bb.event.fire(sceneQueueTaskCompleted(task, self.sq_stats, self.rq), self.cfgData)
2480 self.sq_task_completeoutright(task) 2830 self.sq_task_completeoutright(task)
2481 2831
2482 def sq_task_fail(self, task, result): 2832 def sq_task_fail(self, task, result):
2483 self.sq_stats.taskFailed() 2833 bb.event.fire(sceneQueueTaskFailed(task, self.stats, result, self), self.cfgData)
2484 bb.event.fire(sceneQueueTaskFailed(task, self.sq_stats, result, self), self.cfgData)
2485 self.scenequeue_notcovered.add(task) 2834 self.scenequeue_notcovered.add(task)
2486 self.scenequeue_updatecounters(task, True) 2835 self.scenequeue_updatecounters(task, True)
2487 self.sq_check_taskfail(task) 2836 self.sq_check_taskfail(task)
@@ -2489,8 +2838,6 @@ class RunQueueExecute:
2489 def sq_task_failoutright(self, task): 2838 def sq_task_failoutright(self, task):
2490 self.sq_running.add(task) 2839 self.sq_running.add(task)
2491 self.sq_buildable.add(task) 2840 self.sq_buildable.add(task)
2492 self.sq_stats.taskSkipped()
2493 self.sq_stats.taskCompleted()
2494 self.scenequeue_notcovered.add(task) 2841 self.scenequeue_notcovered.add(task)
2495 self.scenequeue_updatecounters(task, True) 2842 self.scenequeue_updatecounters(task, True)
2496 2843
@@ -2498,8 +2845,6 @@ class RunQueueExecute:
2498 self.sq_running.add(task) 2845 self.sq_running.add(task)
2499 self.sq_buildable.add(task) 2846 self.sq_buildable.add(task)
2500 self.sq_task_completeoutright(task) 2847 self.sq_task_completeoutright(task)
2501 self.sq_stats.taskSkipped()
2502 self.sq_stats.taskCompleted()
2503 2848
2504 def sq_build_taskdepdata(self, task): 2849 def sq_build_taskdepdata(self, task):
2505 def getsetscenedeps(tid): 2850 def getsetscenedeps(tid):
@@ -2525,12 +2870,19 @@ class RunQueueExecute:
2525 additional = [] 2870 additional = []
2526 for revdep in next: 2871 for revdep in next:
2527 (mc, fn, taskname, taskfn) = split_tid_mcfn(revdep) 2872 (mc, fn, taskname, taskfn) = split_tid_mcfn(revdep)
2528 pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn]
2529 deps = getsetscenedeps(revdep) 2873 deps = getsetscenedeps(revdep)
2530 provides = self.rqdata.dataCaches[mc].fn_provides[taskfn] 2874
2531 taskhash = self.rqdata.runtaskentries[revdep].hash 2875 taskdepdata[revdep] = bb.TaskData(
2532 unihash = self.rqdata.runtaskentries[revdep].unihash 2876 pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn],
2533 taskdepdata[revdep] = [pn, taskname, fn, deps, provides, taskhash, unihash] 2877 taskname = taskname,
2878 fn = fn,
2879 deps = deps,
2880 provides = self.rqdata.dataCaches[mc].fn_provides[taskfn],
2881 taskhash = self.rqdata.runtaskentries[revdep].hash,
2882 unihash = self.rqdata.runtaskentries[revdep].unihash,
2883 hashfn = self.rqdata.dataCaches[mc].hashfn[taskfn],
2884 taskhash_deps = self.rqdata.runtaskentries[revdep].taskhash_deps,
2885 )
2534 for revdep2 in deps: 2886 for revdep2 in deps:
2535 if revdep2 not in taskdepdata: 2887 if revdep2 not in taskdepdata:
2536 additional.append(revdep2) 2888 additional.append(revdep2)
@@ -2539,8 +2891,8 @@ class RunQueueExecute:
2539 #bb.note("Task %s: " % task + str(taskdepdata).replace("], ", "],\n")) 2891 #bb.note("Task %s: " % task + str(taskdepdata).replace("], ", "],\n"))
2540 return taskdepdata 2892 return taskdepdata
2541 2893
2542 def check_setscenewhitelist(self, tid): 2894 def check_setscene_ignore_tasks(self, tid):
2543 # Check task that is going to run against the whitelist 2895 # Check task that is going to run against the ignore tasks list
2544 (mc, fn, taskname, taskfn) = split_tid_mcfn(tid) 2896 (mc, fn, taskname, taskfn) = split_tid_mcfn(tid)
2545 # Ignore covered tasks 2897 # Ignore covered tasks
2546 if tid in self.tasks_covered: 2898 if tid in self.tasks_covered:
@@ -2554,14 +2906,15 @@ class RunQueueExecute:
2554 return False 2906 return False
2555 2907
2556 pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn] 2908 pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn]
2557 if not check_setscene_enforce_whitelist(pn, taskname, self.rqdata.setscenewhitelist): 2909 if not check_setscene_enforce_ignore_tasks(pn, taskname, self.rqdata.setscene_ignore_tasks):
2558 if tid in self.rqdata.runq_setscene_tids: 2910 if tid in self.rqdata.runq_setscene_tids:
2559 msg = 'Task %s.%s attempted to execute unexpectedly and should have been setscened' % (pn, taskname) 2911 msg = ['Task %s.%s attempted to execute unexpectedly and should have been setscened' % (pn, taskname)]
2560 else: 2912 else:
2561 msg = 'Task %s.%s attempted to execute unexpectedly' % (pn, taskname) 2913 msg = ['Task %s.%s attempted to execute unexpectedly' % (pn, taskname)]
2562 for t in self.scenequeue_notcovered: 2914 for t in self.scenequeue_notcovered:
2563 msg = msg + "\nTask %s, unihash %s, taskhash %s" % (t, self.rqdata.runtaskentries[t].unihash, self.rqdata.runtaskentries[t].hash) 2915 msg.append("\nTask %s, unihash %s, taskhash %s" % (t, self.rqdata.runtaskentries[t].unihash, self.rqdata.runtaskentries[t].hash))
2564 logger.error(msg + '\nThis is usually due to missing setscene tasks. Those missing in this build were: %s' % pprint.pformat(self.scenequeue_notcovered)) 2916 msg.append('\nThis is usually due to missing setscene tasks. Those missing in this build were: %s' % pprint.pformat(self.scenequeue_notcovered))
2917 logger.error("".join(msg))
2565 return True 2918 return True
2566 return False 2919 return False
2567 2920
@@ -2573,6 +2926,7 @@ class SQData(object):
2573 self.sq_revdeps = {} 2926 self.sq_revdeps = {}
2574 # Injected inter-setscene task dependencies 2927 # Injected inter-setscene task dependencies
2575 self.sq_harddeps = {} 2928 self.sq_harddeps = {}
2929 self.sq_harddeps_rev = {}
2576 # Cache of stamp files so duplicates can't run in parallel 2930 # Cache of stamp files so duplicates can't run in parallel
2577 self.stamps = {} 2931 self.stamps = {}
2578 # Setscene tasks directly depended upon by the build 2932 # Setscene tasks directly depended upon by the build
@@ -2582,12 +2936,17 @@ class SQData(object):
2582 # A list of normal tasks a setscene task covers 2936 # A list of normal tasks a setscene task covers
2583 self.sq_covered_tasks = {} 2937 self.sq_covered_tasks = {}
2584 2938
2585def build_scenequeue_data(sqdata, rqdata, rq, cooker, stampcache, sqrq): 2939def build_scenequeue_data(sqdata, rqdata, sqrq):
2586 2940
2587 sq_revdeps = {} 2941 sq_revdeps = {}
2588 sq_revdeps_squash = {} 2942 sq_revdeps_squash = {}
2589 sq_collated_deps = {} 2943 sq_collated_deps = {}
2590 2944
2945 # We can't skip specified target tasks which aren't setscene tasks
2946 sqdata.cantskip = set(rqdata.target_tids)
2947 sqdata.cantskip.difference_update(rqdata.runq_setscene_tids)
2948 sqdata.cantskip.intersection_update(rqdata.runtaskentries)
2949
2591 # We need to construct a dependency graph for the setscene functions. Intermediate 2950 # We need to construct a dependency graph for the setscene functions. Intermediate
2592 # dependencies between the setscene tasks only complicate the code. This code 2951 # dependencies between the setscene tasks only complicate the code. This code
2593 # therefore aims to collapse the huge runqueue dependency tree into a smaller one 2952 # therefore aims to collapse the huge runqueue dependency tree into a smaller one
@@ -2600,7 +2959,7 @@ def build_scenequeue_data(sqdata, rqdata, rq, cooker, stampcache, sqrq):
2600 for tid in rqdata.runtaskentries: 2959 for tid in rqdata.runtaskentries:
2601 sq_revdeps[tid] = copy.copy(rqdata.runtaskentries[tid].revdeps) 2960 sq_revdeps[tid] = copy.copy(rqdata.runtaskentries[tid].revdeps)
2602 sq_revdeps_squash[tid] = set() 2961 sq_revdeps_squash[tid] = set()
2603 if (len(sq_revdeps[tid]) == 0) and tid not in rqdata.runq_setscene_tids: 2962 if not sq_revdeps[tid] and tid not in rqdata.runq_setscene_tids:
2604 #bb.warn("Added endpoint %s" % (tid)) 2963 #bb.warn("Added endpoint %s" % (tid))
2605 endpoints[tid] = set() 2964 endpoints[tid] = set()
2606 2965
@@ -2634,16 +2993,15 @@ def build_scenequeue_data(sqdata, rqdata, rq, cooker, stampcache, sqrq):
2634 sq_revdeps_squash[point] = set() 2993 sq_revdeps_squash[point] = set()
2635 if point in rqdata.runq_setscene_tids: 2994 if point in rqdata.runq_setscene_tids:
2636 sq_revdeps_squash[point] = tasks 2995 sq_revdeps_squash[point] = tasks
2637 tasks = set()
2638 continue 2996 continue
2639 for dep in rqdata.runtaskentries[point].depends: 2997 for dep in rqdata.runtaskentries[point].depends:
2640 if point in sq_revdeps[dep]: 2998 if point in sq_revdeps[dep]:
2641 sq_revdeps[dep].remove(point) 2999 sq_revdeps[dep].remove(point)
2642 if tasks: 3000 if tasks:
2643 sq_revdeps_squash[dep] |= tasks 3001 sq_revdeps_squash[dep] |= tasks
2644 if len(sq_revdeps[dep]) == 0 and dep not in rqdata.runq_setscene_tids: 3002 if not sq_revdeps[dep] and dep not in rqdata.runq_setscene_tids:
2645 newendpoints[dep] = task 3003 newendpoints[dep] = task
2646 if len(newendpoints) != 0: 3004 if newendpoints:
2647 process_endpoints(newendpoints) 3005 process_endpoints(newendpoints)
2648 3006
2649 process_endpoints(endpoints) 3007 process_endpoints(endpoints)
@@ -2655,16 +3013,16 @@ def build_scenequeue_data(sqdata, rqdata, rq, cooker, stampcache, sqrq):
2655 # Take the build endpoints (no revdeps) and find the sstate tasks they depend upon 3013 # Take the build endpoints (no revdeps) and find the sstate tasks they depend upon
2656 new = True 3014 new = True
2657 for tid in rqdata.runtaskentries: 3015 for tid in rqdata.runtaskentries:
2658 if len(rqdata.runtaskentries[tid].revdeps) == 0: 3016 if not rqdata.runtaskentries[tid].revdeps:
2659 sqdata.unskippable.add(tid) 3017 sqdata.unskippable.add(tid)
2660 sqdata.unskippable |= sqrq.cantskip 3018 sqdata.unskippable |= sqdata.cantskip
2661 while new: 3019 while new:
2662 new = False 3020 new = False
2663 orig = sqdata.unskippable.copy() 3021 orig = sqdata.unskippable.copy()
2664 for tid in sorted(orig, reverse=True): 3022 for tid in sorted(orig, reverse=True):
2665 if tid in rqdata.runq_setscene_tids: 3023 if tid in rqdata.runq_setscene_tids:
2666 continue 3024 continue
2667 if len(rqdata.runtaskentries[tid].depends) == 0: 3025 if not rqdata.runtaskentries[tid].depends:
2668 # These are tasks which have no setscene tasks in their chain, need to mark as directly buildable 3026 # These are tasks which have no setscene tasks in their chain, need to mark as directly buildable
2669 sqrq.setbuildable(tid) 3027 sqrq.setbuildable(tid)
2670 sqdata.unskippable |= rqdata.runtaskentries[tid].depends 3028 sqdata.unskippable |= rqdata.runtaskentries[tid].depends
@@ -2676,14 +3034,13 @@ def build_scenequeue_data(sqdata, rqdata, rq, cooker, stampcache, sqrq):
2676 rqdata.init_progress_reporter.next_stage(len(rqdata.runtaskentries)) 3034 rqdata.init_progress_reporter.next_stage(len(rqdata.runtaskentries))
2677 3035
2678 # Sanity check all dependencies could be changed to setscene task references 3036 # Sanity check all dependencies could be changed to setscene task references
2679 for taskcounter, tid in enumerate(rqdata.runtaskentries): 3037 for tid in rqdata.runtaskentries:
2680 if tid in rqdata.runq_setscene_tids: 3038 if tid in rqdata.runq_setscene_tids:
2681 pass 3039 pass
2682 elif len(sq_revdeps_squash[tid]) != 0: 3040 elif sq_revdeps_squash[tid]:
2683 bb.msg.fatal("RunQueue", "Something went badly wrong during scenequeue generation, aborting. Please report this problem.") 3041 bb.msg.fatal("RunQueue", "Something went badly wrong during scenequeue generation, halting. Please report this problem.")
2684 else: 3042 else:
2685 del sq_revdeps_squash[tid] 3043 del sq_revdeps_squash[tid]
2686 rqdata.init_progress_reporter.update(taskcounter)
2687 3044
2688 rqdata.init_progress_reporter.next_stage() 3045 rqdata.init_progress_reporter.next_stage()
2689 3046
@@ -2694,7 +3051,9 @@ def build_scenequeue_data(sqdata, rqdata, rq, cooker, stampcache, sqrq):
2694 (mc, fn, taskname, taskfn) = split_tid_mcfn(tid) 3051 (mc, fn, taskname, taskfn) = split_tid_mcfn(tid)
2695 realtid = tid + "_setscene" 3052 realtid = tid + "_setscene"
2696 idepends = rqdata.taskData[mc].taskentries[realtid].idepends 3053 idepends = rqdata.taskData[mc].taskentries[realtid].idepends
2697 sqdata.stamps[tid] = bb.build.stampfile(taskname + "_setscene", rqdata.dataCaches[mc], taskfn, noextra=True) 3054 sqdata.stamps[tid] = bb.parse.siggen.stampfile_mcfn(taskname, taskfn, extrainfo=False)
3055
3056 sqdata.sq_harddeps_rev[tid] = set()
2698 for (depname, idependtask) in idepends: 3057 for (depname, idependtask) in idepends:
2699 3058
2700 if depname not in rqdata.taskData[mc].build_targets: 3059 if depname not in rqdata.taskData[mc].build_targets:
@@ -2707,20 +3066,15 @@ def build_scenequeue_data(sqdata, rqdata, rq, cooker, stampcache, sqrq):
2707 if deptid not in rqdata.runtaskentries: 3066 if deptid not in rqdata.runtaskentries:
2708 bb.msg.fatal("RunQueue", "Task %s depends upon non-existent task %s:%s" % (realtid, depfn, idependtask)) 3067 bb.msg.fatal("RunQueue", "Task %s depends upon non-existent task %s:%s" % (realtid, depfn, idependtask))
2709 3068
3069 logger.debug2("Adding hard setscene dependency %s for %s" % (deptid, tid))
3070
2710 if not deptid in sqdata.sq_harddeps: 3071 if not deptid in sqdata.sq_harddeps:
2711 sqdata.sq_harddeps[deptid] = set() 3072 sqdata.sq_harddeps[deptid] = set()
2712 sqdata.sq_harddeps[deptid].add(tid) 3073 sqdata.sq_harddeps[deptid].add(tid)
2713 3074 sqdata.sq_harddeps_rev[tid].add(deptid)
2714 sq_revdeps_squash[tid].add(deptid)
2715 # Have to zero this to avoid circular dependencies
2716 sq_revdeps_squash[deptid] = set()
2717 3075
2718 rqdata.init_progress_reporter.next_stage() 3076 rqdata.init_progress_reporter.next_stage()
2719 3077
2720 for task in sqdata.sq_harddeps:
2721 for dep in sqdata.sq_harddeps[task]:
2722 sq_revdeps_squash[dep].add(task)
2723
2724 rqdata.init_progress_reporter.next_stage() 3078 rqdata.init_progress_reporter.next_stage()
2725 3079
2726 #for tid in sq_revdeps_squash: 3080 #for tid in sq_revdeps_squash:
@@ -2744,16 +3098,47 @@ def build_scenequeue_data(sqdata, rqdata, rq, cooker, stampcache, sqrq):
2744 sqdata.multiconfigs = set() 3098 sqdata.multiconfigs = set()
2745 for tid in sqdata.sq_revdeps: 3099 for tid in sqdata.sq_revdeps:
2746 sqdata.multiconfigs.add(mc_from_tid(tid)) 3100 sqdata.multiconfigs.add(mc_from_tid(tid))
2747 if len(sqdata.sq_revdeps[tid]) == 0: 3101 if not sqdata.sq_revdeps[tid]:
2748 sqrq.sq_buildable.add(tid) 3102 sqrq.sq_buildable.add(tid)
2749 3103
2750 rqdata.init_progress_reporter.finish() 3104 rqdata.init_progress_reporter.next_stage()
2751 3105
2752 sqdata.noexec = set() 3106 sqdata.noexec = set()
2753 sqdata.stamppresent = set() 3107 sqdata.stamppresent = set()
2754 sqdata.valid = set() 3108 sqdata.valid = set()
2755 3109
2756 update_scenequeue_data(sqdata.sq_revdeps, sqdata, rqdata, rq, cooker, stampcache, sqrq, summary=True) 3110 sqdata.hashes = {}
3111 sqrq.sq_deferred = {}
3112 for mc in sorted(sqdata.multiconfigs):
3113 for tid in sorted(sqdata.sq_revdeps):
3114 if mc_from_tid(tid) != mc:
3115 continue
3116 h = pending_hash_index(tid, rqdata)
3117 if h not in sqdata.hashes:
3118 sqdata.hashes[h] = tid
3119 else:
3120 sqrq.sq_deferred[tid] = sqdata.hashes[h]
3121 bb.debug(1, "Deferring %s after %s" % (tid, sqdata.hashes[h]))
3122
3123def check_setscene_stamps(tid, rqdata, rq, stampcache, noexecstamp=False):
3124
3125 (mc, fn, taskname, taskfn) = split_tid_mcfn(tid)
3126
3127 taskdep = rqdata.dataCaches[mc].task_deps[taskfn]
3128
3129 if 'noexec' in taskdep and taskname in taskdep['noexec']:
3130 bb.build.make_stamp_mcfn(taskname + "_setscene", taskfn)
3131 return True, False
3132
3133 if rq.check_stamp_task(tid, taskname + "_setscene", cache=stampcache):
3134 logger.debug2('Setscene stamp current for task %s', tid)
3135 return False, True
3136
3137 if rq.check_stamp_task(tid, taskname, recurse = True, cache=stampcache):
3138 logger.debug2('Normal stamp current for task %s', tid)
3139 return False, True
3140
3141 return False, False
2757 3142
2758def update_scenequeue_data(tids, sqdata, rqdata, rq, cooker, stampcache, sqrq, summary=True): 3143def update_scenequeue_data(tids, sqdata, rqdata, rq, cooker, stampcache, sqrq, summary=True):
2759 3144
@@ -2764,55 +3149,42 @@ def update_scenequeue_data(tids, sqdata, rqdata, rq, cooker, stampcache, sqrq, s
2764 sqdata.stamppresent.remove(tid) 3149 sqdata.stamppresent.remove(tid)
2765 if tid in sqdata.valid: 3150 if tid in sqdata.valid:
2766 sqdata.valid.remove(tid) 3151 sqdata.valid.remove(tid)
3152 if tid in sqdata.outrightfail:
3153 sqdata.outrightfail.remove(tid)
2767 3154
2768 (mc, fn, taskname, taskfn) = split_tid_mcfn(tid) 3155 noexec, stamppresent = check_setscene_stamps(tid, rqdata, rq, stampcache, noexecstamp=True)
2769
2770 taskdep = rqdata.dataCaches[mc].task_deps[taskfn]
2771 3156
2772 if 'noexec' in taskdep and taskname in taskdep['noexec']: 3157 if noexec:
2773 sqdata.noexec.add(tid) 3158 sqdata.noexec.add(tid)
2774 sqrq.sq_task_skip(tid) 3159 sqrq.sq_task_skip(tid)
2775 bb.build.make_stamp(taskname + "_setscene", rqdata.dataCaches[mc], taskfn) 3160 logger.debug2("%s is noexec so skipping setscene" % (tid))
2776 continue
2777
2778 if rq.check_stamp_task(tid, taskname + "_setscene", cache=stampcache):
2779 logger.debug2('Setscene stamp current for task %s', tid)
2780 sqdata.stamppresent.add(tid)
2781 sqrq.sq_task_skip(tid)
2782 continue 3161 continue
2783 3162
2784 if rq.check_stamp_task(tid, taskname, recurse = True, cache=stampcache): 3163 if stamppresent:
2785 logger.debug2('Normal stamp current for task %s', tid)
2786 sqdata.stamppresent.add(tid) 3164 sqdata.stamppresent.add(tid)
2787 sqrq.sq_task_skip(tid) 3165 sqrq.sq_task_skip(tid)
3166 logger.debug2("%s has a valid stamp, skipping" % (tid))
2788 continue 3167 continue
2789 3168
2790 tocheck.add(tid) 3169 tocheck.add(tid)
2791 3170
2792 sqdata.valid |= rq.validate_hashes(tocheck, cooker.data, len(sqdata.stamppresent), False, summary=summary) 3171 sqdata.valid |= rq.validate_hashes(tocheck, cooker.data, len(sqdata.stamppresent), False, summary=summary)
2793 3172
2794 sqdata.hashes = {} 3173 for tid in tids:
2795 for mc in sorted(sqdata.multiconfigs): 3174 if tid in sqdata.stamppresent:
2796 for tid in sorted(sqdata.sq_revdeps): 3175 continue
2797 if mc_from_tid(tid) != mc: 3176 if tid in sqdata.valid:
2798 continue 3177 continue
2799 if tid in sqdata.stamppresent: 3178 if tid in sqdata.noexec:
2800 continue 3179 continue
2801 if tid in sqdata.valid: 3180 if tid in sqrq.scenequeue_covered:
2802 continue 3181 continue
2803 if tid in sqdata.noexec: 3182 if tid in sqrq.scenequeue_notcovered:
2804 continue 3183 continue
2805 if tid in sqrq.scenequeue_notcovered: 3184 if tid in sqrq.sq_deferred:
2806 continue 3185 continue
2807 sqdata.outrightfail.add(tid) 3186 sqdata.outrightfail.add(tid)
2808 3187 logger.debug2("%s already handled (fallthrough), skipping" % (tid))
2809 h = pending_hash_index(tid, rqdata)
2810 if h not in sqdata.hashes:
2811 sqdata.hashes[h] = tid
2812 else:
2813 sqrq.sq_deferred[tid] = sqdata.hashes[h]
2814 bb.note("Deferring %s after %s" % (tid, sqdata.hashes[h]))
2815
2816 3188
2817class TaskFailure(Exception): 3189class TaskFailure(Exception):
2818 """ 3190 """
@@ -2876,12 +3248,16 @@ class runQueueTaskFailed(runQueueEvent):
2876 """ 3248 """
2877 Event notifying a task failed 3249 Event notifying a task failed
2878 """ 3250 """
2879 def __init__(self, task, stats, exitcode, rq): 3251 def __init__(self, task, stats, exitcode, rq, fakeroot_log=None):
2880 runQueueEvent.__init__(self, task, stats, rq) 3252 runQueueEvent.__init__(self, task, stats, rq)
2881 self.exitcode = exitcode 3253 self.exitcode = exitcode
3254 self.fakeroot_log = fakeroot_log
2882 3255
2883 def __str__(self): 3256 def __str__(self):
2884 return "Task (%s) failed with exit code '%s'" % (self.taskstring, self.exitcode) 3257 if self.fakeroot_log:
3258 return "Task (%s) failed with exit code '%s' \nPseudo log:\n%s" % (self.taskstring, self.exitcode, self.fakeroot_log)
3259 else:
3260 return "Task (%s) failed with exit code '%s'" % (self.taskstring, self.exitcode)
2885 3261
2886class sceneQueueTaskFailed(sceneQueueEvent): 3262class sceneQueueTaskFailed(sceneQueueEvent):
2887 """ 3263 """
@@ -2933,18 +3309,16 @@ class runQueuePipe():
2933 """ 3309 """
2934 Abstraction for a pipe between a worker thread and the server 3310 Abstraction for a pipe between a worker thread and the server
2935 """ 3311 """
2936 def __init__(self, pipein, pipeout, d, rq, rqexec): 3312 def __init__(self, pipein, pipeout, d, rq, rqexec, fakerootlogs=None):
2937 self.input = pipein 3313 self.input = pipein
2938 if pipeout: 3314 if pipeout:
2939 pipeout.close() 3315 pipeout.close()
2940 bb.utils.nonblockingfd(self.input) 3316 bb.utils.nonblockingfd(self.input)
2941 self.queue = b"" 3317 self.queue = bytearray()
2942 self.d = d 3318 self.d = d
2943 self.rq = rq 3319 self.rq = rq
2944 self.rqexec = rqexec 3320 self.rqexec = rqexec
2945 3321 self.fakerootlogs = fakerootlogs
2946 def setrunqueueexec(self, rqexec):
2947 self.rqexec = rqexec
2948 3322
2949 def read(self): 3323 def read(self):
2950 for workers, name in [(self.rq.worker, "Worker"), (self.rq.fakeworker, "Fakeroot")]: 3324 for workers, name in [(self.rq.worker, "Worker"), (self.rq.fakeworker, "Fakeroot")]:
@@ -2956,13 +3330,13 @@ class runQueuePipe():
2956 3330
2957 start = len(self.queue) 3331 start = len(self.queue)
2958 try: 3332 try:
2959 self.queue = self.queue + (self.input.read(102400) or b"") 3333 self.queue.extend(self.input.read(512 * 1024) or b"")
2960 except (OSError, IOError) as e: 3334 except (OSError, IOError) as e:
2961 if e.errno != errno.EAGAIN: 3335 if e.errno != errno.EAGAIN:
2962 raise 3336 raise
2963 end = len(self.queue) 3337 end = len(self.queue)
2964 found = True 3338 found = True
2965 while found and len(self.queue): 3339 while found and self.queue:
2966 found = False 3340 found = False
2967 index = self.queue.find(b"</event>") 3341 index = self.queue.find(b"</event>")
2968 while index != -1 and self.queue.startswith(b"<event>"): 3342 while index != -1 and self.queue.startswith(b"<event>"):
@@ -2987,7 +3361,11 @@ class runQueuePipe():
2987 task, status = pickle.loads(self.queue[10:index]) 3361 task, status = pickle.loads(self.queue[10:index])
2988 except (ValueError, pickle.UnpicklingError, AttributeError, IndexError) as e: 3362 except (ValueError, pickle.UnpicklingError, AttributeError, IndexError) as e:
2989 bb.msg.fatal("RunQueue", "failed load pickle '%s': '%s'" % (e, self.queue[10:index])) 3363 bb.msg.fatal("RunQueue", "failed load pickle '%s': '%s'" % (e, self.queue[10:index]))
2990 self.rqexec.runqueue_process_waitpid(task, status) 3364 (_, _, _, taskfn) = split_tid_mcfn(task)
3365 fakerootlog = None
3366 if self.fakerootlogs and taskfn and taskfn in self.fakerootlogs:
3367 fakerootlog = self.fakerootlogs[taskfn]
3368 self.rqexec.runqueue_process_waitpid(task, status, fakerootlog=fakerootlog)
2991 found = True 3369 found = True
2992 self.queue = self.queue[index+11:] 3370 self.queue = self.queue[index+11:]
2993 index = self.queue.find(b"</exitcode>") 3371 index = self.queue.find(b"</exitcode>")
@@ -2996,16 +3374,16 @@ class runQueuePipe():
2996 def close(self): 3374 def close(self):
2997 while self.read(): 3375 while self.read():
2998 continue 3376 continue
2999 if len(self.queue) > 0: 3377 if self.queue:
3000 print("Warning, worker left partial message: %s" % self.queue) 3378 print("Warning, worker left partial message: %s" % self.queue)
3001 self.input.close() 3379 self.input.close()
3002 3380
3003def get_setscene_enforce_whitelist(d, targets): 3381def get_setscene_enforce_ignore_tasks(d, targets):
3004 if d.getVar('BB_SETSCENE_ENFORCE') != '1': 3382 if d.getVar('BB_SETSCENE_ENFORCE') != '1':
3005 return None 3383 return None
3006 whitelist = (d.getVar("BB_SETSCENE_ENFORCE_WHITELIST") or "").split() 3384 ignore_tasks = (d.getVar("BB_SETSCENE_ENFORCE_IGNORE_TASKS") or "").split()
3007 outlist = [] 3385 outlist = []
3008 for item in whitelist[:]: 3386 for item in ignore_tasks[:]:
3009 if item.startswith('%:'): 3387 if item.startswith('%:'):
3010 for (mc, target, task, fn) in targets: 3388 for (mc, target, task, fn) in targets:
3011 outlist.append(target + ':' + item.split(':')[1]) 3389 outlist.append(target + ':' + item.split(':')[1])
@@ -3013,12 +3391,12 @@ def get_setscene_enforce_whitelist(d, targets):
3013 outlist.append(item) 3391 outlist.append(item)
3014 return outlist 3392 return outlist
3015 3393
3016def check_setscene_enforce_whitelist(pn, taskname, whitelist): 3394def check_setscene_enforce_ignore_tasks(pn, taskname, ignore_tasks):
3017 import fnmatch 3395 import fnmatch
3018 if whitelist is not None: 3396 if ignore_tasks is not None:
3019 item = '%s:%s' % (pn, taskname) 3397 item = '%s:%s' % (pn, taskname)
3020 for whitelist_item in whitelist: 3398 for ignore_tasks in ignore_tasks:
3021 if fnmatch.fnmatch(item, whitelist_item): 3399 if fnmatch.fnmatch(item, ignore_tasks):
3022 return True 3400 return True
3023 return False 3401 return False
3024 return True 3402 return True
diff --git a/bitbake/lib/bb/server/process.py b/bitbake/lib/bb/server/process.py
index b27b4aefe0..4b35be62cd 100644
--- a/bitbake/lib/bb/server/process.py
+++ b/bitbake/lib/bb/server/process.py
@@ -26,6 +26,9 @@ import errno
26import re 26import re
27import datetime 27import datetime
28import pickle 28import pickle
29import traceback
30import gc
31import stat
29import bb.server.xmlrpcserver 32import bb.server.xmlrpcserver
30from bb import daemonize 33from bb import daemonize
31from multiprocessing import queues 34from multiprocessing import queues
@@ -35,9 +38,46 @@ logger = logging.getLogger('BitBake')
35class ProcessTimeout(SystemExit): 38class ProcessTimeout(SystemExit):
36 pass 39 pass
37 40
41def currenttime():
42 return datetime.datetime.now().strftime('%H:%M:%S.%f')
43
38def serverlog(msg): 44def serverlog(msg):
39 print(str(os.getpid()) + " " + datetime.datetime.now().strftime('%H:%M:%S.%f') + " " + msg) 45 print(str(os.getpid()) + " " + currenttime() + " " + msg)
40 sys.stdout.flush() 46 #Seems a flush here triggers filesytem sync like behaviour and long hangs in the server
47 #sys.stdout.flush()
48
49#
50# When we have lockfile issues, try and find infomation about which process is
51# using the lockfile
52#
53def get_lockfile_process_msg(lockfile):
54 # Some systems may not have lsof available
55 procs = None
56 try:
57 procs = subprocess.check_output(["lsof", '-w', lockfile], stderr=subprocess.STDOUT)
58 except subprocess.CalledProcessError:
59 # File was deleted?
60 pass
61 except OSError as e:
62 if e.errno != errno.ENOENT:
63 raise
64 if procs is None:
65 # Fall back to fuser if lsof is unavailable
66 try:
67 procs = subprocess.check_output(["fuser", '-v', lockfile], stderr=subprocess.STDOUT)
68 except subprocess.CalledProcessError:
69 # File was deleted?
70 pass
71 except OSError as e:
72 if e.errno != errno.ENOENT:
73 raise
74 if procs:
75 return procs.decode("utf-8")
76 return None
77
78class idleFinish():
79 def __init__(self, msg):
80 self.msg = msg
41 81
42class ProcessServer(): 82class ProcessServer():
43 profile_filename = "profile.log" 83 profile_filename = "profile.log"
@@ -56,12 +96,19 @@ class ProcessServer():
56 self.maxuiwait = 30 96 self.maxuiwait = 30
57 self.xmlrpc = False 97 self.xmlrpc = False
58 98
99 self.idle = None
100 # Need a lock for _idlefuns changes
59 self._idlefuns = {} 101 self._idlefuns = {}
102 self._idlefuncsLock = threading.Lock()
103 self.idle_cond = threading.Condition(self._idlefuncsLock)
60 104
61 self.bitbake_lock = lock 105 self.bitbake_lock = lock
62 self.bitbake_lock_name = lockname 106 self.bitbake_lock_name = lockname
63 self.sock = sock 107 self.sock = sock
64 self.sockname = sockname 108 self.sockname = sockname
109 # It is possible the directory may be renamed. Cache the inode of the socket file
110 # so we can tell if things changed.
111 self.sockinode = os.stat(self.sockname)[stat.ST_INO]
65 112
66 self.server_timeout = server_timeout 113 self.server_timeout = server_timeout
67 self.timeout = self.server_timeout 114 self.timeout = self.server_timeout
@@ -70,7 +117,9 @@ class ProcessServer():
70 def register_idle_function(self, function, data): 117 def register_idle_function(self, function, data):
71 """Register a function to be called while the server is idle""" 118 """Register a function to be called while the server is idle"""
72 assert hasattr(function, '__call__') 119 assert hasattr(function, '__call__')
73 self._idlefuns[function] = data 120 with bb.utils.lock_timeout(self._idlefuncsLock):
121 self._idlefuns[function] = data
122 serverlog("Registering idle function %s" % str(function))
74 123
75 def run(self): 124 def run(self):
76 125
@@ -109,6 +158,31 @@ class ProcessServer():
109 158
110 return ret 159 return ret
111 160
161 def _idle_check(self):
162 return len(self._idlefuns) == 0 and self.cooker.command.currentAsyncCommand is None
163
164 def wait_for_idle(self, timeout=30):
165 # Wait for the idle loop to have cleared
166 with bb.utils.lock_timeout(self._idlefuncsLock):
167 return self.idle_cond.wait_for(self._idle_check, timeout) is not False
168
169 def set_async_cmd(self, cmd):
170 with bb.utils.lock_timeout(self._idlefuncsLock):
171 ret = self.idle_cond.wait_for(self._idle_check, 30)
172 if ret is False:
173 return False
174 self.cooker.command.currentAsyncCommand = cmd
175 return True
176
177 def clear_async_cmd(self):
178 with bb.utils.lock_timeout(self._idlefuncsLock):
179 self.cooker.command.currentAsyncCommand = None
180 self.idle_cond.notify_all()
181
182 def get_async_cmd(self):
183 with bb.utils.lock_timeout(self._idlefuncsLock):
184 return self.cooker.command.currentAsyncCommand
185
112 def main(self): 186 def main(self):
113 self.cooker.pre_serve() 187 self.cooker.pre_serve()
114 188
@@ -123,14 +197,19 @@ class ProcessServer():
123 fds.append(self.xmlrpc) 197 fds.append(self.xmlrpc)
124 seendata = False 198 seendata = False
125 serverlog("Entering server connection loop") 199 serverlog("Entering server connection loop")
200 serverlog("Lockfile is: %s\nSocket is %s (%s)" % (self.bitbake_lock_name, self.sockname, os.path.exists(self.sockname)))
126 201
127 def disconnect_client(self, fds): 202 def disconnect_client(self, fds):
128 serverlog("Disconnecting Client") 203 serverlog("Disconnecting Client (socket: %s)" % os.path.exists(self.sockname))
129 if self.controllersock: 204 if self.controllersock:
130 fds.remove(self.controllersock) 205 fds.remove(self.controllersock)
131 self.controllersock.close() 206 self.controllersock.close()
132 self.controllersock = False 207 self.controllersock = False
133 if self.haveui: 208 if self.haveui:
209 # Wait for the idle loop to have cleared (30s max)
210 if not self.wait_for_idle(30):
211 serverlog("Idle loop didn't finish queued commands after 30s, exiting.")
212 self.quit = True
134 fds.remove(self.command_channel) 213 fds.remove(self.command_channel)
135 bb.event.unregister_UIHhandler(self.event_handle, True) 214 bb.event.unregister_UIHhandler(self.event_handle, True)
136 self.command_channel_reply.writer.close() 215 self.command_channel_reply.writer.close()
@@ -142,12 +221,12 @@ class ProcessServer():
142 self.cooker.clientComplete() 221 self.cooker.clientComplete()
143 self.haveui = False 222 self.haveui = False
144 ready = select.select(fds,[],[],0)[0] 223 ready = select.select(fds,[],[],0)[0]
145 if newconnections: 224 if newconnections and not self.quit:
146 serverlog("Starting new client") 225 serverlog("Starting new client")
147 conn = newconnections.pop(-1) 226 conn = newconnections.pop(-1)
148 fds.append(conn) 227 fds.append(conn)
149 self.controllersock = conn 228 self.controllersock = conn
150 elif self.timeout is None and not ready: 229 elif not self.timeout and not ready:
151 serverlog("No timeout, exiting.") 230 serverlog("No timeout, exiting.")
152 self.quit = True 231 self.quit = True
153 232
@@ -214,11 +293,14 @@ class ProcessServer():
214 continue 293 continue
215 try: 294 try:
216 serverlog("Running command %s" % command) 295 serverlog("Running command %s" % command)
217 self.command_channel_reply.send(self.cooker.command.runCommand(command)) 296 reply = self.cooker.command.runCommand(command, self)
218 serverlog("Command Completed") 297 serverlog("Sending reply %s" % repr(reply))
298 self.command_channel_reply.send(reply)
299 serverlog("Command Completed (socket: %s)" % os.path.exists(self.sockname))
219 except Exception as e: 300 except Exception as e:
220 serverlog('Exception in server main event loop running command %s (%s)' % (command, str(e))) 301 stack = traceback.format_exc()
221 logger.exception('Exception in server main event loop running command %s (%s)' % (command, str(e))) 302 serverlog('Exception in server main event loop running command %s (%s)' % (command, stack))
303 logger.exception('Exception in server main event loop running command %s (%s)' % (command, stack))
222 304
223 if self.xmlrpc in ready: 305 if self.xmlrpc in ready:
224 self.xmlrpc.handle_requests() 306 self.xmlrpc.handle_requests()
@@ -239,21 +321,42 @@ class ProcessServer():
239 bb.warn('Ignoring invalid BB_SERVER_TIMEOUT=%s, must be a float specifying seconds.' % self.timeout) 321 bb.warn('Ignoring invalid BB_SERVER_TIMEOUT=%s, must be a float specifying seconds.' % self.timeout)
240 seendata = True 322 seendata = True
241 323
242 ready = self.idle_commands(.1, fds) 324 if not self.idle:
325 self.idle = threading.Thread(target=self.idle_thread)
326 self.idle.start()
327 elif self.idle and not self.idle.is_alive():
328 serverlog("Idle thread terminated, main thread exiting too")
329 bb.error("Idle thread terminated, main thread exiting too")
330 self.quit = True
243 331
244 if len(threading.enumerate()) != 1: 332 nextsleep = 1.0
245 serverlog("More than one thread left?: " + str(threading.enumerate())) 333 if self.xmlrpc:
334 nextsleep = self.xmlrpc.get_timeout(nextsleep)
335 try:
336 ready = select.select(fds,[],[],nextsleep)[0]
337 except InterruptedError:
338 # Ignore EINTR
339 ready = []
340
341 if self.idle:
342 self.idle.join()
246 343
247 serverlog("Exiting") 344 serverlog("Exiting (socket: %s)" % os.path.exists(self.sockname))
248 # Remove the socket file so we don't get any more connections to avoid races 345 # Remove the socket file so we don't get any more connections to avoid races
346 # The build directory could have been renamed so if the file isn't the one we created
347 # we shouldn't delete it.
249 try: 348 try:
250 os.unlink(self.sockname) 349 sockinode = os.stat(self.sockname)[stat.ST_INO]
251 except: 350 if sockinode == self.sockinode:
252 pass 351 os.unlink(self.sockname)
352 else:
353 serverlog("bitbake.sock inode mismatch (%s vs %s), not deleting." % (sockinode, self.sockinode))
354 except Exception as err:
355 serverlog("Removing socket file '%s' failed (%s)" % (self.sockname, err))
253 self.sock.close() 356 self.sock.close()
254 357
255 try: 358 try:
256 self.cooker.shutdown(True) 359 self.cooker.shutdown(True, idle=False)
257 self.cooker.notifier.stop() 360 self.cooker.notifier.stop()
258 self.cooker.confignotifier.stop() 361 self.cooker.confignotifier.stop()
259 except: 362 except:
@@ -261,6 +364,9 @@ class ProcessServer():
261 364
262 self.cooker.post_serve() 365 self.cooker.post_serve()
263 366
367 if len(threading.enumerate()) != 1:
368 serverlog("More than one thread left?: " + str(threading.enumerate()))
369
264 # Flush logs before we release the lock 370 # Flush logs before we release the lock
265 sys.stdout.flush() 371 sys.stdout.flush()
266 sys.stderr.flush() 372 sys.stderr.flush()
@@ -276,20 +382,21 @@ class ProcessServer():
276 except FileNotFoundError: 382 except FileNotFoundError:
277 return None 383 return None
278 384
279 lockcontents = get_lock_contents(lockfile)
280 serverlog("Original lockfile contents: " + str(lockcontents))
281
282 lock.close() 385 lock.close()
283 lock = None 386 lock = None
284 387
285 while not lock: 388 while not lock:
286 i = 0 389 i = 0
287 lock = None 390 lock = None
391 if not os.path.exists(os.path.basename(lockfile)):
392 serverlog("Lockfile directory gone, exiting.")
393 return
394
288 while not lock and i < 30: 395 while not lock and i < 30:
289 lock = bb.utils.lockfile(lockfile, shared=False, retry=False, block=False) 396 lock = bb.utils.lockfile(lockfile, shared=False, retry=False, block=False)
290 if not lock: 397 if not lock:
291 newlockcontents = get_lock_contents(lockfile) 398 newlockcontents = get_lock_contents(lockfile)
292 if newlockcontents != lockcontents: 399 if not newlockcontents[0].startswith([f"{os.getpid()}\n", f"{os.getpid()} "]):
293 # A new server was started, the lockfile contents changed, we can exit 400 # A new server was started, the lockfile contents changed, we can exit
294 serverlog("Lockfile now contains different contents, exiting: " + str(newlockcontents)) 401 serverlog("Lockfile now contains different contents, exiting: " + str(newlockcontents))
295 return 402 return
@@ -303,87 +410,95 @@ class ProcessServer():
303 return 410 return
304 411
305 if not lock: 412 if not lock:
306 # Some systems may not have lsof available 413 procs = get_lockfile_process_msg(lockfile)
307 procs = None 414 msg = ["Delaying shutdown due to active processes which appear to be holding bitbake.lock"]
308 try:
309 procs = subprocess.check_output(["lsof", '-w', lockfile], stderr=subprocess.STDOUT)
310 except subprocess.CalledProcessError:
311 # File was deleted?
312 continue
313 except OSError as e:
314 if e.errno != errno.ENOENT:
315 raise
316 if procs is None:
317 # Fall back to fuser if lsof is unavailable
318 try:
319 procs = subprocess.check_output(["fuser", '-v', lockfile], stderr=subprocess.STDOUT)
320 except subprocess.CalledProcessError:
321 # File was deleted?
322 continue
323 except OSError as e:
324 if e.errno != errno.ENOENT:
325 raise
326
327 msg = "Delaying shutdown due to active processes which appear to be holding bitbake.lock"
328 if procs: 415 if procs:
329 msg += ":\n%s" % str(procs.decode("utf-8")) 416 msg.append(":\n%s" % procs)
330 serverlog(msg) 417 serverlog("".join(msg))
331 418
332 def idle_commands(self, delay, fds=None): 419 def idle_thread(self):
333 nextsleep = delay 420 if self.cooker.configuration.profile:
334 if not fds:
335 fds = []
336
337 for function, data in list(self._idlefuns.items()):
338 try: 421 try:
339 retval = function(self, data, False) 422 import cProfile as profile
340 if retval is False: 423 except:
341 del self._idlefuns[function] 424 import profile
342 nextsleep = None 425 prof = profile.Profile()
343 elif retval is True:
344 nextsleep = None
345 elif isinstance(retval, float) and nextsleep:
346 if (retval < nextsleep):
347 nextsleep = retval
348 elif nextsleep is None:
349 continue
350 else:
351 fds = fds + retval
352 except SystemExit:
353 raise
354 except Exception as exc:
355 if not isinstance(exc, bb.BBHandledException):
356 logger.exception('Running idle function')
357 del self._idlefuns[function]
358 self.quit = True
359 426
360 # Create new heartbeat event? 427 ret = profile.Profile.runcall(prof, self.idle_thread_internal)
361 now = time.time() 428
362 if now >= self.next_heartbeat: 429 prof.dump_stats("profile-mainloop.log")
363 # We might have missed heartbeats. Just trigger once in 430 bb.utils.process_profilelog("profile-mainloop.log")
364 # that case and continue after the usual delay. 431 serverlog("Raw profiling information saved to profile-mainloop.log and processed statistics to profile-mainloop.log.processed")
365 self.next_heartbeat += self.heartbeat_seconds
366 if self.next_heartbeat <= now:
367 self.next_heartbeat = now + self.heartbeat_seconds
368 if hasattr(self.cooker, "data"):
369 heartbeat = bb.event.HeartbeatEvent(now)
370 bb.event.fire(heartbeat, self.cooker.data)
371 if nextsleep and now + nextsleep > self.next_heartbeat:
372 # Shorten timeout so that we we wake up in time for
373 # the heartbeat.
374 nextsleep = self.next_heartbeat - now
375
376 if nextsleep is not None:
377 if self.xmlrpc:
378 nextsleep = self.xmlrpc.get_timeout(nextsleep)
379 try:
380 return select.select(fds,[],[],nextsleep)[0]
381 except InterruptedError:
382 # Ignore EINTR
383 return []
384 else: 432 else:
385 return select.select(fds,[],[],0)[0] 433 self.idle_thread_internal()
434
435 def idle_thread_internal(self):
436 def remove_idle_func(function):
437 with bb.utils.lock_timeout(self._idlefuncsLock):
438 del self._idlefuns[function]
439 self.idle_cond.notify_all()
440
441 while not self.quit:
442 nextsleep = 1.0
443 fds = []
444
445 with bb.utils.lock_timeout(self._idlefuncsLock):
446 items = list(self._idlefuns.items())
386 447
448 for function, data in items:
449 try:
450 retval = function(self, data, False)
451 if isinstance(retval, idleFinish):
452 serverlog("Removing idle function %s at idleFinish" % str(function))
453 remove_idle_func(function)
454 self.cooker.command.finishAsyncCommand(retval.msg)
455 nextsleep = None
456 elif retval is False:
457 serverlog("Removing idle function %s" % str(function))
458 remove_idle_func(function)
459 nextsleep = None
460 elif retval is True:
461 nextsleep = None
462 elif isinstance(retval, float) and nextsleep:
463 if (retval < nextsleep):
464 nextsleep = retval
465 elif nextsleep is None:
466 continue
467 else:
468 fds = fds + retval
469 except SystemExit:
470 raise
471 except Exception as exc:
472 if not isinstance(exc, bb.BBHandledException):
473 logger.exception('Running idle function')
474 remove_idle_func(function)
475 serverlog("Exception %s broke the idle_thread, exiting" % traceback.format_exc())
476 self.quit = True
477
478 # Create new heartbeat event?
479 now = time.time()
480 if items and bb.event._heartbeat_enabled and now >= self.next_heartbeat:
481 # We might have missed heartbeats. Just trigger once in
482 # that case and continue after the usual delay.
483 self.next_heartbeat += self.heartbeat_seconds
484 if self.next_heartbeat <= now:
485 self.next_heartbeat = now + self.heartbeat_seconds
486 if hasattr(self.cooker, "data"):
487 heartbeat = bb.event.HeartbeatEvent(now)
488 try:
489 bb.event.fire(heartbeat, self.cooker.data)
490 except Exception as exc:
491 if not isinstance(exc, bb.BBHandledException):
492 logger.exception('Running heartbeat function')
493 serverlog("Exception %s broke in idle_thread, exiting" % traceback.format_exc())
494 self.quit = True
495 if nextsleep and bb.event._heartbeat_enabled and now + nextsleep > self.next_heartbeat:
496 # Shorten timeout so that we we wake up in time for
497 # the heartbeat.
498 nextsleep = self.next_heartbeat - now
499
500 if nextsleep is not None:
501 select.select(fds,[],[],nextsleep)[0]
387 502
388class ServerCommunicator(): 503class ServerCommunicator():
389 def __init__(self, connection, recv): 504 def __init__(self, connection, recv):
@@ -391,12 +506,18 @@ class ServerCommunicator():
391 self.recv = recv 506 self.recv = recv
392 507
393 def runCommand(self, command): 508 def runCommand(self, command):
394 self.connection.send(command) 509 try:
510 self.connection.send(command)
511 except BrokenPipeError as e:
512 raise BrokenPipeError("bitbake-server might have died or been forcibly stopped, ie. OOM killed") from e
395 if not self.recv.poll(30): 513 if not self.recv.poll(30):
396 logger.info("No reply from server in 30s") 514 logger.info("No reply from server in 30s (for command %s at %s)" % (command[0], currenttime()))
397 if not self.recv.poll(30): 515 if not self.recv.poll(30):
398 raise ProcessTimeout("Timeout while waiting for a reply from the bitbake server (60s)") 516 raise ProcessTimeout("Timeout while waiting for a reply from the bitbake server (60s at %s)" % currenttime())
399 ret, exc = self.recv.get() 517 try:
518 ret, exc = self.recv.get()
519 except EOFError as e:
520 raise EOFError("bitbake-server might have died or been forcibly stopped, ie. OOM killed") from e
400 # Should probably turn all exceptions in exc back into exceptions? 521 # Should probably turn all exceptions in exc back into exceptions?
401 # For now, at least handle BBHandledException 522 # For now, at least handle BBHandledException
402 if exc and ("BBHandledException" in exc or "SystemExit" in exc): 523 if exc and ("BBHandledException" in exc or "SystemExit" in exc):
@@ -429,6 +550,7 @@ class BitBakeProcessServerConnection(object):
429 self.socket_connection = sock 550 self.socket_connection = sock
430 551
431 def terminate(self): 552 def terminate(self):
553 self.events.close()
432 self.socket_connection.close() 554 self.socket_connection.close()
433 self.connection.connection.close() 555 self.connection.connection.close()
434 self.connection.recv.close() 556 self.connection.recv.close()
@@ -439,13 +561,14 @@ start_log_datetime_format = '%Y-%m-%d %H:%M:%S.%f'
439 561
440class BitBakeServer(object): 562class BitBakeServer(object):
441 563
442 def __init__(self, lock, sockname, featureset, server_timeout, xmlrpcinterface): 564 def __init__(self, lock, sockname, featureset, server_timeout, xmlrpcinterface, profile):
443 565
444 self.server_timeout = server_timeout 566 self.server_timeout = server_timeout
445 self.xmlrpcinterface = xmlrpcinterface 567 self.xmlrpcinterface = xmlrpcinterface
446 self.featureset = featureset 568 self.featureset = featureset
447 self.sockname = sockname 569 self.sockname = sockname
448 self.bitbake_lock = lock 570 self.bitbake_lock = lock
571 self.profile = profile
449 self.readypipe, self.readypipein = os.pipe() 572 self.readypipe, self.readypipein = os.pipe()
450 573
451 # Place the log in the builddirectory alongside the lock file 574 # Place the log in the builddirectory alongside the lock file
@@ -466,7 +589,7 @@ class BitBakeServer(object):
466 try: 589 try:
467 r = ready.get() 590 r = ready.get()
468 except EOFError: 591 except EOFError:
469 # Trap the child exitting/closing the pipe and error out 592 # Trap the child exiting/closing the pipe and error out
470 r = None 593 r = None
471 if not r or r[0] != "r": 594 if not r or r[0] != "r":
472 ready.close() 595 ready.close()
@@ -509,9 +632,9 @@ class BitBakeServer(object):
509 os.set_inheritable(self.bitbake_lock.fileno(), True) 632 os.set_inheritable(self.bitbake_lock.fileno(), True)
510 os.set_inheritable(self.readypipein, True) 633 os.set_inheritable(self.readypipein, True)
511 serverscript = os.path.realpath(os.path.dirname(__file__) + "/../../../bin/bitbake-server") 634 serverscript = os.path.realpath(os.path.dirname(__file__) + "/../../../bin/bitbake-server")
512 os.execl(sys.executable, "bitbake-server", serverscript, "decafbad", str(self.bitbake_lock.fileno()), str(self.readypipein), self.logfile, self.bitbake_lock.name, self.sockname, str(self.server_timeout), str(self.xmlrpcinterface[0]), str(self.xmlrpcinterface[1])) 635 os.execl(sys.executable, sys.executable, serverscript, "decafbad", str(self.bitbake_lock.fileno()), str(self.readypipein), self.logfile, self.bitbake_lock.name, self.sockname, str(self.server_timeout or 0), str(int(self.profile)), str(self.xmlrpcinterface[0]), str(self.xmlrpcinterface[1]))
513 636
514def execServer(lockfd, readypipeinfd, lockname, sockname, server_timeout, xmlrpcinterface): 637def execServer(lockfd, readypipeinfd, lockname, sockname, server_timeout, xmlrpcinterface, profile):
515 638
516 import bb.cookerdata 639 import bb.cookerdata
517 import bb.cooker 640 import bb.cooker
@@ -523,6 +646,7 @@ def execServer(lockfd, readypipeinfd, lockname, sockname, server_timeout, xmlrpc
523 646
524 # Create server control socket 647 # Create server control socket
525 if os.path.exists(sockname): 648 if os.path.exists(sockname):
649 serverlog("WARNING: removing existing socket file '%s'" % sockname)
526 os.unlink(sockname) 650 os.unlink(sockname)
527 651
528 sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) 652 sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
@@ -539,7 +663,8 @@ def execServer(lockfd, readypipeinfd, lockname, sockname, server_timeout, xmlrpc
539 writer = ConnectionWriter(readypipeinfd) 663 writer = ConnectionWriter(readypipeinfd)
540 try: 664 try:
541 featureset = [] 665 featureset = []
542 cooker = bb.cooker.BBCooker(featureset, server.register_idle_function) 666 cooker = bb.cooker.BBCooker(featureset, server)
667 cooker.configuration.profile = profile
543 except bb.BBHandledException: 668 except bb.BBHandledException:
544 return None 669 return None
545 writer.send("r") 670 writer.send("r")
@@ -549,7 +674,7 @@ def execServer(lockfd, readypipeinfd, lockname, sockname, server_timeout, xmlrpc
549 674
550 server.run() 675 server.run()
551 finally: 676 finally:
552 # Flush any ,essages/errors to the logfile before exit 677 # Flush any messages/errors to the logfile before exit
553 sys.stdout.flush() 678 sys.stdout.flush()
554 sys.stderr.flush() 679 sys.stderr.flush()
555 680
@@ -654,23 +779,18 @@ class BBUIEventQueue:
654 self.reader = ConnectionReader(readfd) 779 self.reader = ConnectionReader(readfd)
655 780
656 self.t = threading.Thread() 781 self.t = threading.Thread()
657 self.t.setDaemon(True)
658 self.t.run = self.startCallbackHandler 782 self.t.run = self.startCallbackHandler
659 self.t.start() 783 self.t.start()
660 784
661 def getEvent(self): 785 def getEvent(self):
662 self.eventQueueLock.acquire() 786 with bb.utils.lock_timeout(self.eventQueueLock):
663 787 if len(self.eventQueue) == 0:
664 if len(self.eventQueue) == 0: 788 return None
665 self.eventQueueLock.release()
666 return None
667
668 item = self.eventQueue.pop(0)
669 789
670 if len(self.eventQueue) == 0: 790 item = self.eventQueue.pop(0)
671 self.eventQueueNotify.clear() 791 if len(self.eventQueue) == 0:
792 self.eventQueueNotify.clear()
672 793
673 self.eventQueueLock.release()
674 return item 794 return item
675 795
676 def waitEvent(self, delay): 796 def waitEvent(self, delay):
@@ -678,10 +798,9 @@ class BBUIEventQueue:
678 return self.getEvent() 798 return self.getEvent()
679 799
680 def queue_event(self, event): 800 def queue_event(self, event):
681 self.eventQueueLock.acquire() 801 with bb.utils.lock_timeout(self.eventQueueLock):
682 self.eventQueue.append(event) 802 self.eventQueue.append(event)
683 self.eventQueueNotify.set() 803 self.eventQueueNotify.set()
684 self.eventQueueLock.release()
685 804
686 def send_event(self, event): 805 def send_event(self, event):
687 self.queue_event(pickle.loads(event)) 806 self.queue_event(pickle.loads(event))
@@ -690,13 +809,17 @@ class BBUIEventQueue:
690 bb.utils.set_process_name("UIEventQueue") 809 bb.utils.set_process_name("UIEventQueue")
691 while True: 810 while True:
692 try: 811 try:
693 self.reader.wait() 812 ready = self.reader.wait(0.25)
694 event = self.reader.get() 813 if ready:
695 self.queue_event(event) 814 event = self.reader.get()
696 except EOFError: 815 self.queue_event(event)
816 except (EOFError, OSError, TypeError):
697 # Easiest way to exit is to close the file descriptor to cause an exit 817 # Easiest way to exit is to close the file descriptor to cause an exit
698 break 818 break
819
820 def close(self):
699 self.reader.close() 821 self.reader.close()
822 self.t.join()
700 823
701class ConnectionReader(object): 824class ConnectionReader(object):
702 825
@@ -711,7 +834,7 @@ class ConnectionReader(object):
711 return self.reader.poll(timeout) 834 return self.reader.poll(timeout)
712 835
713 def get(self): 836 def get(self):
714 with self.rlock: 837 with bb.utils.lock_timeout(self.rlock):
715 res = self.reader.recv_bytes() 838 res = self.reader.recv_bytes()
716 return multiprocessing.reduction.ForkingPickler.loads(res) 839 return multiprocessing.reduction.ForkingPickler.loads(res)
717 840
@@ -730,10 +853,31 @@ class ConnectionWriter(object):
730 # Why bb.event needs this I have no idea 853 # Why bb.event needs this I have no idea
731 self.event = self 854 self.event = self
732 855
856 def _send(self, obj):
857 gc.disable()
858 with bb.utils.lock_timeout(self.wlock):
859 self.writer.send_bytes(obj)
860 gc.enable()
861
733 def send(self, obj): 862 def send(self, obj):
734 obj = multiprocessing.reduction.ForkingPickler.dumps(obj) 863 obj = multiprocessing.reduction.ForkingPickler.dumps(obj)
735 with self.wlock: 864 # See notes/code in CookerParser
736 self.writer.send_bytes(obj) 865 # We must not terminate holding this lock else processes will hang.
866 # For SIGTERM, raising afterwards avoids this.
867 # For SIGINT, we don't want to have written partial data to the pipe.
868 # pthread_sigmask block/unblock would be nice but doesn't work, https://bugs.python.org/issue47139
869 process = multiprocessing.current_process()
870 if process and hasattr(process, "queue_signals"):
871 with bb.utils.lock_timeout(process.signal_threadlock):
872 process.queue_signals = True
873 self._send(obj)
874 process.queue_signals = False
875
876 while len(process.signal_received) > 0:
877 sig = process.signal_received.pop()
878 process.handle_sig(sig, None)
879 else:
880 self._send(obj)
737 881
738 def fileno(self): 882 def fileno(self):
739 return self.writer.fileno() 883 return self.writer.fileno()
diff --git a/bitbake/lib/bb/server/xmlrpcserver.py b/bitbake/lib/bb/server/xmlrpcserver.py
index 2fa71be667..ebc271aca4 100644
--- a/bitbake/lib/bb/server/xmlrpcserver.py
+++ b/bitbake/lib/bb/server/xmlrpcserver.py
@@ -11,8 +11,11 @@ import hashlib
11import time 11import time
12import inspect 12import inspect
13from xmlrpc.server import SimpleXMLRPCServer, SimpleXMLRPCRequestHandler 13from xmlrpc.server import SimpleXMLRPCServer, SimpleXMLRPCRequestHandler
14import bb.server.xmlrpcclient
14 15
15import bb 16import bb
17import bb.cooker
18import bb.event
16 19
17# This request handler checks if the request has a "Bitbake-token" header 20# This request handler checks if the request has a "Bitbake-token" header
18# field (this comes from the client side) and compares it with its internal 21# field (this comes from the client side) and compares it with its internal
@@ -53,7 +56,7 @@ class BitBakeXMLRPCServer(SimpleXMLRPCServer):
53 56
54 def __init__(self, interface, cooker, parent): 57 def __init__(self, interface, cooker, parent):
55 # Use auto port configuration 58 # Use auto port configuration
56 if (interface[1] == -1): 59 if interface[1] == -1:
57 interface = (interface[0], 0) 60 interface = (interface[0], 0)
58 SimpleXMLRPCServer.__init__(self, interface, 61 SimpleXMLRPCServer.__init__(self, interface,
59 requestHandler=BitBakeXMLRPCRequestHandler, 62 requestHandler=BitBakeXMLRPCRequestHandler,
@@ -86,11 +89,12 @@ class BitBakeXMLRPCServer(SimpleXMLRPCServer):
86 def handle_requests(self): 89 def handle_requests(self):
87 self._handle_request_noblock() 90 self._handle_request_noblock()
88 91
89class BitBakeXMLRPCServerCommands(): 92class BitBakeXMLRPCServerCommands:
90 93
91 def __init__(self, server): 94 def __init__(self, server):
92 self.server = server 95 self.server = server
93 self.has_client = False 96 self.has_client = False
97 self.event_handle = None
94 98
95 def registerEventHandler(self, host, port): 99 def registerEventHandler(self, host, port):
96 """ 100 """
@@ -99,8 +103,8 @@ class BitBakeXMLRPCServerCommands():
99 s, t = bb.server.xmlrpcclient._create_server(host, port) 103 s, t = bb.server.xmlrpcclient._create_server(host, port)
100 104
101 # we don't allow connections if the cooker is running 105 # we don't allow connections if the cooker is running
102 if (self.server.cooker.state in [bb.cooker.state.parsing, bb.cooker.state.running]): 106 if self.server.cooker.state in [bb.cooker.State.PARSING, bb.cooker.State.RUNNING]:
103 return None, "Cooker is busy: %s" % bb.cooker.state.get_name(self.server.cooker.state) 107 return None, f"Cooker is busy: {self.server.cooker.state.name}"
104 108
105 self.event_handle = bb.event.register_UIHhandler(s, True) 109 self.event_handle = bb.event.register_UIHhandler(s, True)
106 return self.event_handle, 'OK' 110 return self.event_handle, 'OK'
@@ -117,7 +121,7 @@ class BitBakeXMLRPCServerCommands():
117 """ 121 """
118 Run a cooker command on the server 122 Run a cooker command on the server
119 """ 123 """
120 return self.server.cooker.command.runCommand(command, self.server.readonly) 124 return self.server.cooker.command.runCommand(command, self.server.parent, self.server.readonly)
121 125
122 def getEventHandle(self): 126 def getEventHandle(self):
123 return self.event_handle 127 return self.event_handle
diff --git a/bitbake/lib/bb/siggen.py b/bitbake/lib/bb/siggen.py
index 0d88c6ec68..a6163b55ea 100644
--- a/bitbake/lib/bb/siggen.py
+++ b/bitbake/lib/bb/siggen.py
@@ -1,4 +1,6 @@
1# 1#
2# Copyright BitBake Contributors
3#
2# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
3# 5#
4 6
@@ -11,6 +13,10 @@ import pickle
11import bb.data 13import bb.data
12import difflib 14import difflib
13import simplediff 15import simplediff
16import json
17import types
18from contextlib import contextmanager
19import bb.compress.zstd
14from bb.checksum import FileChecksumCache 20from bb.checksum import FileChecksumCache
15from bb import runqueue 21from bb import runqueue
16import hashserv 22import hashserv
@@ -19,6 +25,35 @@ import hashserv.client
19logger = logging.getLogger('BitBake.SigGen') 25logger = logging.getLogger('BitBake.SigGen')
20hashequiv_logger = logging.getLogger('BitBake.SigGen.HashEquiv') 26hashequiv_logger = logging.getLogger('BitBake.SigGen.HashEquiv')
21 27
28#find_siginfo and find_siginfo_version are set by the metadata siggen
29# The minimum version of the find_siginfo function we need
30find_siginfo_minversion = 2
31
32HASHSERV_ENVVARS = [
33 "SSL_CERT_DIR",
34 "SSL_CERT_FILE",
35 "NO_PROXY",
36 "HTTPS_PROXY",
37 "HTTP_PROXY"
38]
39
40def check_siggen_version(siggen):
41 if not hasattr(siggen, "find_siginfo_version"):
42 bb.fatal("Siggen from metadata (OE-Core?) is too old, please update it (no version found)")
43 if siggen.find_siginfo_version < siggen.find_siginfo_minversion:
44 bb.fatal("Siggen from metadata (OE-Core?) is too old, please update it (%s vs %s)" % (siggen.find_siginfo_version, siggen.find_siginfo_minversion))
45
46class SetEncoder(json.JSONEncoder):
47 def default(self, obj):
48 if isinstance(obj, set) or isinstance(obj, frozenset):
49 return dict(_set_object=list(sorted(obj)))
50 return json.JSONEncoder.default(self, obj)
51
52def SetDecoder(dct):
53 if '_set_object' in dct:
54 return frozenset(dct['_set_object'])
55 return dct
56
22def init(d): 57def init(d):
23 siggens = [obj for obj in globals().values() 58 siggens = [obj for obj in globals().values()
24 if type(obj) is type and issubclass(obj, SignatureGenerator)] 59 if type(obj) is type and issubclass(obj, SignatureGenerator)]
@@ -27,7 +62,6 @@ def init(d):
27 for sg in siggens: 62 for sg in siggens:
28 if desired == sg.name: 63 if desired == sg.name:
29 return sg(d) 64 return sg(d)
30 break
31 else: 65 else:
32 logger.error("Invalid signature generator '%s', using default 'noop'\n" 66 logger.error("Invalid signature generator '%s', using default 'noop'\n"
33 "Available generators: %s", desired, 67 "Available generators: %s", desired,
@@ -39,11 +73,6 @@ class SignatureGenerator(object):
39 """ 73 """
40 name = "noop" 74 name = "noop"
41 75
42 # If the derived class supports multiconfig datacaches, set this to True
43 # The default is False for backward compatibility with derived signature
44 # generators that do not understand multiconfig caches
45 supports_multiconfig_datacaches = False
46
47 def __init__(self, data): 76 def __init__(self, data):
48 self.basehash = {} 77 self.basehash = {}
49 self.taskhash = {} 78 self.taskhash = {}
@@ -61,9 +90,39 @@ class SignatureGenerator(object):
61 def postparsing_clean_cache(self): 90 def postparsing_clean_cache(self):
62 return 91 return
63 92
93 def setup_datacache(self, datacaches):
94 self.datacaches = datacaches
95
96 def setup_datacache_from_datastore(self, mcfn, d):
97 # In task context we have no cache so setup internal data structures
98 # from the fully parsed data store provided
99
100 mc = d.getVar("__BBMULTICONFIG", False) or ""
101 tasks = d.getVar('__BBTASKS', False)
102
103 self.datacaches = {}
104 self.datacaches[mc] = types.SimpleNamespace()
105 setattr(self.datacaches[mc], "stamp", {})
106 self.datacaches[mc].stamp[mcfn] = d.getVar('STAMP')
107 setattr(self.datacaches[mc], "stamp_extrainfo", {})
108 self.datacaches[mc].stamp_extrainfo[mcfn] = {}
109 for t in tasks:
110 flag = d.getVarFlag(t, "stamp-extra-info")
111 if flag:
112 self.datacaches[mc].stamp_extrainfo[mcfn][t] = flag
113
114 def get_cached_unihash(self, tid):
115 return None
116
64 def get_unihash(self, tid): 117 def get_unihash(self, tid):
118 unihash = self.get_cached_unihash(tid)
119 if unihash:
120 return unihash
65 return self.taskhash[tid] 121 return self.taskhash[tid]
66 122
123 def get_unihashes(self, tids):
124 return {tid: self.get_unihash(tid) for tid in tids}
125
67 def prep_taskhash(self, tid, deps, dataCaches): 126 def prep_taskhash(self, tid, deps, dataCaches):
68 return 127 return
69 128
@@ -75,17 +134,51 @@ class SignatureGenerator(object):
75 """Write/update the file checksum cache onto disk""" 134 """Write/update the file checksum cache onto disk"""
76 return 135 return
77 136
137 def stampfile_base(self, mcfn):
138 mc = bb.runqueue.mc_from_tid(mcfn)
139 return self.datacaches[mc].stamp[mcfn]
140
141 def stampfile_mcfn(self, taskname, mcfn, extrainfo=True):
142 mc = bb.runqueue.mc_from_tid(mcfn)
143 stamp = self.datacaches[mc].stamp[mcfn]
144 if not stamp:
145 return
146
147 stamp_extrainfo = ""
148 if extrainfo:
149 taskflagname = taskname
150 if taskname.endswith("_setscene"):
151 taskflagname = taskname.replace("_setscene", "")
152 stamp_extrainfo = self.datacaches[mc].stamp_extrainfo[mcfn].get(taskflagname) or ""
153
154 return self.stampfile(stamp, mcfn, taskname, stamp_extrainfo)
155
78 def stampfile(self, stampbase, file_name, taskname, extrainfo): 156 def stampfile(self, stampbase, file_name, taskname, extrainfo):
79 return ("%s.%s.%s" % (stampbase, taskname, extrainfo)).rstrip('.') 157 return ("%s.%s.%s" % (stampbase, taskname, extrainfo)).rstrip('.')
80 158
159 def stampcleanmask_mcfn(self, taskname, mcfn):
160 mc = bb.runqueue.mc_from_tid(mcfn)
161 stamp = self.datacaches[mc].stamp[mcfn]
162 if not stamp:
163 return []
164
165 taskflagname = taskname
166 if taskname.endswith("_setscene"):
167 taskflagname = taskname.replace("_setscene", "")
168 stamp_extrainfo = self.datacaches[mc].stamp_extrainfo[mcfn].get(taskflagname) or ""
169
170 return self.stampcleanmask(stamp, mcfn, taskname, stamp_extrainfo)
171
81 def stampcleanmask(self, stampbase, file_name, taskname, extrainfo): 172 def stampcleanmask(self, stampbase, file_name, taskname, extrainfo):
82 return ("%s.%s.%s" % (stampbase, taskname, extrainfo)).rstrip('.') 173 return ("%s.%s.%s" % (stampbase, taskname, extrainfo)).rstrip('.')
83 174
84 def dump_sigtask(self, fn, task, stampbase, runtime): 175 def dump_sigtask(self, mcfn, task, stampbase, runtime):
85 return 176 return
86 177
87 def invalidate_task(self, task, d, fn): 178 def invalidate_task(self, task, mcfn):
88 bb.build.del_stamp(task, d, fn) 179 mc = bb.runqueue.mc_from_tid(mcfn)
180 stamp = self.datacaches[mc].stamp[mcfn]
181 bb.utils.remove(stamp)
89 182
90 def dump_sigs(self, dataCache, options): 183 def dump_sigs(self, dataCache, options):
91 return 184 return
@@ -111,37 +204,13 @@ class SignatureGenerator(object):
111 def set_setscene_tasks(self, setscene_tasks): 204 def set_setscene_tasks(self, setscene_tasks):
112 return 205 return
113 206
114 @classmethod 207 def exit(self):
115 def get_data_caches(cls, dataCaches, mc): 208 return
116 """
117 This function returns the datacaches that should be passed to signature
118 generator functions. If the signature generator supports multiconfig
119 caches, the entire dictionary of data caches is sent, otherwise a
120 special proxy is sent that support both index access to all
121 multiconfigs, and also direct access for the default multiconfig.
122
123 The proxy class allows code in this class itself to always use
124 multiconfig aware code (to ease maintenance), but derived classes that
125 are unaware of multiconfig data caches can still access the default
126 multiconfig as expected.
127
128 Do not override this function in derived classes; it will be removed in
129 the future when support for multiconfig data caches is mandatory
130 """
131 class DataCacheProxy(object):
132 def __init__(self):
133 pass
134
135 def __getitem__(self, key):
136 return dataCaches[key]
137
138 def __getattr__(self, name):
139 return getattr(dataCaches[mc], name)
140
141 if cls.supports_multiconfig_datacaches:
142 return dataCaches
143 209
144 return DataCacheProxy() 210def build_pnid(mc, pn, taskname):
211 if mc:
212 return "mc:" + mc + ":" + pn + ":" + taskname
213 return pn + ":" + taskname
145 214
146class SignatureGeneratorBasic(SignatureGenerator): 215class SignatureGeneratorBasic(SignatureGenerator):
147 """ 216 """
@@ -152,15 +221,12 @@ class SignatureGeneratorBasic(SignatureGenerator):
152 self.basehash = {} 221 self.basehash = {}
153 self.taskhash = {} 222 self.taskhash = {}
154 self.unihash = {} 223 self.unihash = {}
155 self.taskdeps = {}
156 self.runtaskdeps = {} 224 self.runtaskdeps = {}
157 self.file_checksum_values = {} 225 self.file_checksum_values = {}
158 self.taints = {} 226 self.taints = {}
159 self.gendeps = {}
160 self.lookupcache = {}
161 self.setscenetasks = set() 227 self.setscenetasks = set()
162 self.basewhitelist = set((data.getVar("BB_HASHBASE_WHITELIST") or "").split()) 228 self.basehash_ignore_vars = set((data.getVar("BB_BASEHASH_IGNORE_VARS") or "").split())
163 self.taskwhitelist = None 229 self.taskhash_ignore_tasks = None
164 self.init_rundepcheck(data) 230 self.init_rundepcheck(data)
165 checksum_cache_file = data.getVar("BB_HASH_CHECKSUM_CACHE_FILE") 231 checksum_cache_file = data.getVar("BB_HASH_CHECKSUM_CACHE_FILE")
166 if checksum_cache_file: 232 if checksum_cache_file:
@@ -175,21 +241,21 @@ class SignatureGeneratorBasic(SignatureGenerator):
175 self.tidtopn = {} 241 self.tidtopn = {}
176 242
177 def init_rundepcheck(self, data): 243 def init_rundepcheck(self, data):
178 self.taskwhitelist = data.getVar("BB_HASHTASK_WHITELIST") or None 244 self.taskhash_ignore_tasks = data.getVar("BB_TASKHASH_IGNORE_TASKS") or None
179 if self.taskwhitelist: 245 if self.taskhash_ignore_tasks:
180 self.twl = re.compile(self.taskwhitelist) 246 self.twl = re.compile(self.taskhash_ignore_tasks)
181 else: 247 else:
182 self.twl = None 248 self.twl = None
183 249
184 def _build_data(self, fn, d): 250 def _build_data(self, mcfn, d):
185 251
186 ignore_mismatch = ((d.getVar("BB_HASH_IGNORE_MISMATCH") or '') == '1') 252 ignore_mismatch = ((d.getVar("BB_HASH_IGNORE_MISMATCH") or '') == '1')
187 tasklist, gendeps, lookupcache = bb.data.generate_dependencies(d, self.basewhitelist) 253 tasklist, gendeps, lookupcache = bb.data.generate_dependencies(d, self.basehash_ignore_vars)
188 254
189 taskdeps, basehash = bb.data.generate_dependency_hash(tasklist, gendeps, lookupcache, self.basewhitelist, fn) 255 taskdeps, basehash = bb.data.generate_dependency_hash(tasklist, gendeps, lookupcache, self.basehash_ignore_vars, mcfn)
190 256
191 for task in tasklist: 257 for task in tasklist:
192 tid = fn + ":" + task 258 tid = mcfn + ":" + task
193 if not ignore_mismatch and tid in self.basehash and self.basehash[tid] != basehash[tid]: 259 if not ignore_mismatch and tid in self.basehash and self.basehash[tid] != basehash[tid]:
194 bb.error("When reparsing %s, the basehash value changed from %s to %s. The metadata is not deterministic and this needs to be fixed." % (tid, self.basehash[tid], basehash[tid])) 260 bb.error("When reparsing %s, the basehash value changed from %s to %s. The metadata is not deterministic and this needs to be fixed." % (tid, self.basehash[tid], basehash[tid]))
195 bb.error("The following commands may help:") 261 bb.error("The following commands may help:")
@@ -200,11 +266,7 @@ class SignatureGeneratorBasic(SignatureGenerator):
200 bb.error("%s -Sprintdiff\n" % cmd) 266 bb.error("%s -Sprintdiff\n" % cmd)
201 self.basehash[tid] = basehash[tid] 267 self.basehash[tid] = basehash[tid]
202 268
203 self.taskdeps[fn] = taskdeps 269 return taskdeps, gendeps, lookupcache
204 self.gendeps[fn] = gendeps
205 self.lookupcache[fn] = lookupcache
206
207 return taskdeps
208 270
209 def set_setscene_tasks(self, setscene_tasks): 271 def set_setscene_tasks(self, setscene_tasks):
210 self.setscenetasks = set(setscene_tasks) 272 self.setscenetasks = set(setscene_tasks)
@@ -212,35 +274,47 @@ class SignatureGeneratorBasic(SignatureGenerator):
212 def finalise(self, fn, d, variant): 274 def finalise(self, fn, d, variant):
213 275
214 mc = d.getVar("__BBMULTICONFIG", False) or "" 276 mc = d.getVar("__BBMULTICONFIG", False) or ""
277 mcfn = fn
215 if variant or mc: 278 if variant or mc:
216 fn = bb.cache.realfn2virtual(fn, variant, mc) 279 mcfn = bb.cache.realfn2virtual(fn, variant, mc)
217 280
218 try: 281 try:
219 taskdeps = self._build_data(fn, d) 282 taskdeps, gendeps, lookupcache = self._build_data(mcfn, d)
220 except bb.parse.SkipRecipe: 283 except bb.parse.SkipRecipe:
221 raise 284 raise
222 except: 285 except:
223 bb.warn("Error during finalise of %s" % fn) 286 bb.warn("Error during finalise of %s" % mcfn)
224 raise 287 raise
225 288
289 basehashes = {}
290 for task in taskdeps:
291 basehashes[task] = self.basehash[mcfn + ":" + task]
292
293 d.setVar("__siggen_basehashes", basehashes)
294 d.setVar("__siggen_gendeps", gendeps)
295 d.setVar("__siggen_varvals", lookupcache)
296 d.setVar("__siggen_taskdeps", taskdeps)
297
226 #Slow but can be useful for debugging mismatched basehashes 298 #Slow but can be useful for debugging mismatched basehashes
227 #for task in self.taskdeps[fn]: 299 #self.setup_datacache_from_datastore(mcfn, d)
228 # self.dump_sigtask(fn, task, d.getVar("STAMP"), False) 300 #for task in taskdeps:
301 # self.dump_sigtask(mcfn, task, d.getVar("STAMP"), False)
229 302
230 for task in taskdeps: 303 def setup_datacache_from_datastore(self, mcfn, d):
231 d.setVar("BB_BASEHASH_task-%s" % task, self.basehash[fn + ":" + task]) 304 super().setup_datacache_from_datastore(mcfn, d)
232 305
233 def postparsing_clean_cache(self): 306 mc = bb.runqueue.mc_from_tid(mcfn)
234 # 307 for attr in ["siggen_varvals", "siggen_taskdeps", "siggen_gendeps"]:
235 # After parsing we can remove some things from memory to reduce our memory footprint 308 if not hasattr(self.datacaches[mc], attr):
236 # 309 setattr(self.datacaches[mc], attr, {})
237 self.gendeps = {} 310 self.datacaches[mc].siggen_varvals[mcfn] = d.getVar("__siggen_varvals")
238 self.lookupcache = {} 311 self.datacaches[mc].siggen_taskdeps[mcfn] = d.getVar("__siggen_taskdeps")
239 self.taskdeps = {} 312 self.datacaches[mc].siggen_gendeps[mcfn] = d.getVar("__siggen_gendeps")
240 313
241 def rundep_check(self, fn, recipename, task, dep, depname, dataCaches): 314 def rundep_check(self, fn, recipename, task, dep, depname, dataCaches):
242 # Return True if we should keep the dependency, False to drop it 315 # Return True if we should keep the dependency, False to drop it
243 # We only manipulate the dependencies for packages not in the whitelist 316 # We only manipulate the dependencies for packages not in the ignore
317 # list
244 if self.twl and not self.twl.search(recipename): 318 if self.twl and not self.twl.search(recipename):
245 # then process the actual dependencies 319 # then process the actual dependencies
246 if self.twl.search(depname): 320 if self.twl.search(depname):
@@ -258,38 +332,37 @@ class SignatureGeneratorBasic(SignatureGenerator):
258 332
259 def prep_taskhash(self, tid, deps, dataCaches): 333 def prep_taskhash(self, tid, deps, dataCaches):
260 334
261 (mc, _, task, fn) = bb.runqueue.split_tid_mcfn(tid) 335 (mc, _, task, mcfn) = bb.runqueue.split_tid_mcfn(tid)
262 336
263 self.basehash[tid] = dataCaches[mc].basetaskhash[tid] 337 self.basehash[tid] = dataCaches[mc].basetaskhash[tid]
264 self.runtaskdeps[tid] = [] 338 self.runtaskdeps[tid] = []
265 self.file_checksum_values[tid] = [] 339 self.file_checksum_values[tid] = []
266 recipename = dataCaches[mc].pkg_fn[fn] 340 recipename = dataCaches[mc].pkg_fn[mcfn]
267 341
268 self.tidtopn[tid] = recipename 342 self.tidtopn[tid] = recipename
343 # save hashfn for deps into siginfo?
344 for dep in deps:
345 (depmc, _, deptask, depmcfn) = bb.runqueue.split_tid_mcfn(dep)
346 dep_pn = dataCaches[depmc].pkg_fn[depmcfn]
269 347
270 for dep in sorted(deps, key=clean_basepath): 348 if not self.rundep_check(mcfn, recipename, task, dep, dep_pn, dataCaches):
271 (depmc, _, _, depmcfn) = bb.runqueue.split_tid_mcfn(dep)
272 depname = dataCaches[depmc].pkg_fn[depmcfn]
273 if not self.supports_multiconfig_datacaches and mc != depmc:
274 # If the signature generator doesn't understand multiconfig
275 # data caches, any dependency not in the same multiconfig must
276 # be skipped for backward compatibility
277 continue
278 if not self.rundep_check(fn, recipename, task, dep, depname, dataCaches):
279 continue 349 continue
350
280 if dep not in self.taskhash: 351 if dep not in self.taskhash:
281 bb.fatal("%s is not in taskhash, caller isn't calling in dependency order?" % dep) 352 bb.fatal("%s is not in taskhash, caller isn't calling in dependency order?" % dep)
282 self.runtaskdeps[tid].append(dep)
283 353
284 if task in dataCaches[mc].file_checksums[fn]: 354 dep_pnid = build_pnid(depmc, dep_pn, deptask)
355 self.runtaskdeps[tid].append((dep_pnid, dep))
356
357 if task in dataCaches[mc].file_checksums[mcfn]:
285 if self.checksum_cache: 358 if self.checksum_cache:
286 checksums = self.checksum_cache.get_checksums(dataCaches[mc].file_checksums[fn][task], recipename, self.localdirsexclude) 359 checksums = self.checksum_cache.get_checksums(dataCaches[mc].file_checksums[mcfn][task], recipename, self.localdirsexclude)
287 else: 360 else:
288 checksums = bb.fetch2.get_file_checksums(dataCaches[mc].file_checksums[fn][task], recipename, self.localdirsexclude) 361 checksums = bb.fetch2.get_file_checksums(dataCaches[mc].file_checksums[mcfn][task], recipename, self.localdirsexclude)
289 for (f,cs) in checksums: 362 for (f,cs) in checksums:
290 self.file_checksum_values[tid].append((f,cs)) 363 self.file_checksum_values[tid].append((f,cs))
291 364
292 taskdep = dataCaches[mc].task_deps[fn] 365 taskdep = dataCaches[mc].task_deps[mcfn]
293 if 'nostamp' in taskdep and task in taskdep['nostamp']: 366 if 'nostamp' in taskdep and task in taskdep['nostamp']:
294 # Nostamp tasks need an implicit taint so that they force any dependent tasks to run 367 # Nostamp tasks need an implicit taint so that they force any dependent tasks to run
295 if tid in self.taints and self.taints[tid].startswith("nostamp:"): 368 if tid in self.taints and self.taints[tid].startswith("nostamp:"):
@@ -300,32 +373,34 @@ class SignatureGeneratorBasic(SignatureGenerator):
300 taint = str(uuid.uuid4()) 373 taint = str(uuid.uuid4())
301 self.taints[tid] = "nostamp:" + taint 374 self.taints[tid] = "nostamp:" + taint
302 375
303 taint = self.read_taint(fn, task, dataCaches[mc].stamp[fn]) 376 taint = self.read_taint(mcfn, task, dataCaches[mc].stamp[mcfn])
304 if taint: 377 if taint:
305 self.taints[tid] = taint 378 self.taints[tid] = taint
306 logger.warning("%s is tainted from a forced run" % tid) 379 logger.warning("%s is tainted from a forced run" % tid)
307 380
308 return 381 return set(dep for _, dep in self.runtaskdeps[tid])
309 382
310 def get_taskhash(self, tid, deps, dataCaches): 383 def get_taskhash(self, tid, deps, dataCaches):
311 384
312 data = self.basehash[tid] 385 data = self.basehash[tid]
313 for dep in self.runtaskdeps[tid]: 386 for dep in sorted(self.runtaskdeps[tid]):
314 data = data + self.get_unihash(dep) 387 data += self.get_unihash(dep[1])
315 388
316 for (f, cs) in self.file_checksum_values[tid]: 389 for (f, cs) in sorted(self.file_checksum_values[tid], key=clean_checksum_file_path):
317 if cs: 390 if cs:
318 data = data + cs 391 if "/./" in f:
392 data += "./" + f.split("/./")[1]
393 data += cs
319 394
320 if tid in self.taints: 395 if tid in self.taints:
321 if self.taints[tid].startswith("nostamp:"): 396 if self.taints[tid].startswith("nostamp:"):
322 data = data + self.taints[tid][8:] 397 data += self.taints[tid][8:]
323 else: 398 else:
324 data = data + self.taints[tid] 399 data += self.taints[tid]
325 400
326 h = hashlib.sha256(data.encode("utf-8")).hexdigest() 401 h = hashlib.sha256(data.encode("utf-8")).hexdigest()
327 self.taskhash[tid] = h 402 self.taskhash[tid] = h
328 #d.setVar("BB_TASKHASH_task-%s" % task, taskhash[task]) 403 #d.setVar("BB_TASKHASH:task-%s" % task, taskhash[task])
329 return h 404 return h
330 405
331 def writeout_file_checksum_cache(self): 406 def writeout_file_checksum_cache(self):
@@ -340,9 +415,9 @@ class SignatureGeneratorBasic(SignatureGenerator):
340 def save_unitaskhashes(self): 415 def save_unitaskhashes(self):
341 self.unihash_cache.save(self.unitaskhashes) 416 self.unihash_cache.save(self.unitaskhashes)
342 417
343 def dump_sigtask(self, fn, task, stampbase, runtime): 418 def dump_sigtask(self, mcfn, task, stampbase, runtime):
344 419 tid = mcfn + ":" + task
345 tid = fn + ":" + task 420 mc = bb.runqueue.mc_from_tid(mcfn)
346 referencestamp = stampbase 421 referencestamp = stampbase
347 if isinstance(runtime, str) and runtime.startswith("customfile"): 422 if isinstance(runtime, str) and runtime.startswith("customfile"):
348 sigfile = stampbase 423 sigfile = stampbase
@@ -357,29 +432,34 @@ class SignatureGeneratorBasic(SignatureGenerator):
357 432
358 data = {} 433 data = {}
359 data['task'] = task 434 data['task'] = task
360 data['basewhitelist'] = self.basewhitelist 435 data['basehash_ignore_vars'] = self.basehash_ignore_vars
361 data['taskwhitelist'] = self.taskwhitelist 436 data['taskhash_ignore_tasks'] = self.taskhash_ignore_tasks
362 data['taskdeps'] = self.taskdeps[fn][task] 437 data['taskdeps'] = self.datacaches[mc].siggen_taskdeps[mcfn][task]
363 data['basehash'] = self.basehash[tid] 438 data['basehash'] = self.basehash[tid]
364 data['gendeps'] = {} 439 data['gendeps'] = {}
365 data['varvals'] = {} 440 data['varvals'] = {}
366 data['varvals'][task] = self.lookupcache[fn][task] 441 data['varvals'][task] = self.datacaches[mc].siggen_varvals[mcfn][task]
367 for dep in self.taskdeps[fn][task]: 442 for dep in self.datacaches[mc].siggen_taskdeps[mcfn][task]:
368 if dep in self.basewhitelist: 443 if dep in self.basehash_ignore_vars:
369 continue 444 continue
370 data['gendeps'][dep] = self.gendeps[fn][dep] 445 data['gendeps'][dep] = self.datacaches[mc].siggen_gendeps[mcfn][dep]
371 data['varvals'][dep] = self.lookupcache[fn][dep] 446 data['varvals'][dep] = self.datacaches[mc].siggen_varvals[mcfn][dep]
372 447
373 if runtime and tid in self.taskhash: 448 if runtime and tid in self.taskhash:
374 data['runtaskdeps'] = self.runtaskdeps[tid] 449 data['runtaskdeps'] = [dep[0] for dep in sorted(self.runtaskdeps[tid])]
375 data['file_checksum_values'] = [(os.path.basename(f), cs) for f,cs in self.file_checksum_values[tid]] 450 data['file_checksum_values'] = []
451 for f,cs in sorted(self.file_checksum_values[tid], key=clean_checksum_file_path):
452 if "/./" in f:
453 data['file_checksum_values'].append(("./" + f.split("/./")[1], cs))
454 else:
455 data['file_checksum_values'].append((os.path.basename(f), cs))
376 data['runtaskhashes'] = {} 456 data['runtaskhashes'] = {}
377 for dep in data['runtaskdeps']: 457 for dep in self.runtaskdeps[tid]:
378 data['runtaskhashes'][dep] = self.get_unihash(dep) 458 data['runtaskhashes'][dep[0]] = self.get_unihash(dep[1])
379 data['taskhash'] = self.taskhash[tid] 459 data['taskhash'] = self.taskhash[tid]
380 data['unihash'] = self.get_unihash(tid) 460 data['unihash'] = self.get_unihash(tid)
381 461
382 taint = self.read_taint(fn, task, referencestamp) 462 taint = self.read_taint(mcfn, task, referencestamp)
383 if taint: 463 if taint:
384 data['taint'] = taint 464 data['taint'] = taint
385 465
@@ -396,13 +476,13 @@ class SignatureGeneratorBasic(SignatureGenerator):
396 bb.error("Taskhash mismatch %s versus %s for %s" % (computed_taskhash, self.taskhash[tid], tid)) 476 bb.error("Taskhash mismatch %s versus %s for %s" % (computed_taskhash, self.taskhash[tid], tid))
397 sigfile = sigfile.replace(self.taskhash[tid], computed_taskhash) 477 sigfile = sigfile.replace(self.taskhash[tid], computed_taskhash)
398 478
399 fd, tmpfile = tempfile.mkstemp(dir=os.path.dirname(sigfile), prefix="sigtask.") 479 fd, tmpfile = bb.utils.mkstemp(dir=os.path.dirname(sigfile), prefix="sigtask.")
400 try: 480 try:
401 with os.fdopen(fd, "wb") as stream: 481 with bb.compress.zstd.open(fd, "wt", encoding="utf-8", num_threads=1) as f:
402 p = pickle.dump(data, stream, -1) 482 json.dump(data, f, sort_keys=True, separators=(",", ":"), cls=SetEncoder)
403 stream.flush() 483 f.flush()
404 os.chmod(tmpfile, 0o664) 484 os.chmod(tmpfile, 0o664)
405 os.rename(tmpfile, sigfile) 485 bb.utils.rename(tmpfile, sigfile)
406 except (OSError, IOError) as err: 486 except (OSError, IOError) as err:
407 try: 487 try:
408 os.unlink(tmpfile) 488 os.unlink(tmpfile)
@@ -410,18 +490,6 @@ class SignatureGeneratorBasic(SignatureGenerator):
410 pass 490 pass
411 raise err 491 raise err
412 492
413 def dump_sigfn(self, fn, dataCaches, options):
414 if fn in self.taskdeps:
415 for task in self.taskdeps[fn]:
416 tid = fn + ":" + task
417 mc = bb.runqueue.mc_from_tid(tid)
418 if tid not in self.taskhash:
419 continue
420 if dataCaches[mc].basetaskhash[tid] != self.basehash[tid]:
421 bb.error("Bitbake's cached basehash does not match the one we just generated (%s)!" % tid)
422 bb.error("The mismatched hashes were %s and %s" % (dataCaches[mc].basetaskhash[tid], self.basehash[tid]))
423 self.dump_sigtask(fn, task, dataCaches[mc].stamp[fn], True)
424
425class SignatureGeneratorBasicHash(SignatureGeneratorBasic): 493class SignatureGeneratorBasicHash(SignatureGeneratorBasic):
426 name = "basichash" 494 name = "basichash"
427 495
@@ -432,11 +500,11 @@ class SignatureGeneratorBasicHash(SignatureGeneratorBasic):
432 # If task is not in basehash, then error 500 # If task is not in basehash, then error
433 return self.basehash[tid] 501 return self.basehash[tid]
434 502
435 def stampfile(self, stampbase, fn, taskname, extrainfo, clean=False): 503 def stampfile(self, stampbase, mcfn, taskname, extrainfo, clean=False):
436 if taskname != "do_setscene" and taskname.endswith("_setscene"): 504 if taskname.endswith("_setscene"):
437 tid = fn + ":" + taskname[:-9] 505 tid = mcfn + ":" + taskname[:-9]
438 else: 506 else:
439 tid = fn + ":" + taskname 507 tid = mcfn + ":" + taskname
440 if clean: 508 if clean:
441 h = "*" 509 h = "*"
442 else: 510 else:
@@ -444,29 +512,99 @@ class SignatureGeneratorBasicHash(SignatureGeneratorBasic):
444 512
445 return ("%s.%s.%s.%s" % (stampbase, taskname, h, extrainfo)).rstrip('.') 513 return ("%s.%s.%s.%s" % (stampbase, taskname, h, extrainfo)).rstrip('.')
446 514
447 def stampcleanmask(self, stampbase, fn, taskname, extrainfo): 515 def stampcleanmask(self, stampbase, mcfn, taskname, extrainfo):
448 return self.stampfile(stampbase, fn, taskname, extrainfo, clean=True) 516 return self.stampfile(stampbase, mcfn, taskname, extrainfo, clean=True)
517
518 def invalidate_task(self, task, mcfn):
519 bb.note("Tainting hash to force rebuild of task %s, %s" % (mcfn, task))
449 520
450 def invalidate_task(self, task, d, fn): 521 mc = bb.runqueue.mc_from_tid(mcfn)
451 bb.note("Tainting hash to force rebuild of task %s, %s" % (fn, task)) 522 stamp = self.datacaches[mc].stamp[mcfn]
452 bb.build.write_taint(task, d, fn) 523
524 taintfn = stamp + '.' + task + '.taint'
525
526 import uuid
527 bb.utils.mkdirhier(os.path.dirname(taintfn))
528 # The specific content of the taint file is not really important,
529 # we just need it to be random, so a random UUID is used
530 with open(taintfn, 'w') as taintf:
531 taintf.write(str(uuid.uuid4()))
453 532
454class SignatureGeneratorUniHashMixIn(object): 533class SignatureGeneratorUniHashMixIn(object):
455 def __init__(self, data): 534 def __init__(self, data):
456 self.extramethod = {} 535 self.extramethod = {}
536 # NOTE: The cache only tracks hashes that exist. Hashes that don't
537 # exist are always queried from the server since it is possible for
538 # hashes to appear over time, but much less likely for them to
539 # disappear
540 self.unihash_exists_cache = set()
541 self.username = None
542 self.password = None
543 self.env = {}
544
545 origenv = data.getVar("BB_ORIGENV")
546 for e in HASHSERV_ENVVARS:
547 value = data.getVar(e)
548 if not value and origenv:
549 value = origenv.getVar(e)
550 if value:
551 self.env[e] = value
457 super().__init__(data) 552 super().__init__(data)
458 553
459 def get_taskdata(self): 554 def get_taskdata(self):
460 return (self.server, self.method, self.extramethod) + super().get_taskdata() 555 return (self.server, self.method, self.extramethod, self.username, self.password, self.env) + super().get_taskdata()
461 556
462 def set_taskdata(self, data): 557 def set_taskdata(self, data):
463 self.server, self.method, self.extramethod = data[:3] 558 self.server, self.method, self.extramethod, self.username, self.password, self.env = data[:6]
464 super().set_taskdata(data[3:]) 559 super().set_taskdata(data[6:])
560
561 def get_hashserv_creds(self):
562 if self.username and self.password:
563 return {
564 "username": self.username,
565 "password": self.password,
566 }
567
568 return {}
569
570 @contextmanager
571 def _client_env(self):
572 orig_env = os.environ.copy()
573 try:
574 for k, v in self.env.items():
575 os.environ[k] = v
576
577 yield
578 finally:
579 for k, v in self.env.items():
580 if k in orig_env:
581 os.environ[k] = orig_env[k]
582 else:
583 del os.environ[k]
465 584
585 @contextmanager
466 def client(self): 586 def client(self):
467 if getattr(self, '_client', None) is None: 587 with self._client_env():
468 self._client = hashserv.create_client(self.server) 588 if getattr(self, '_client', None) is None:
469 return self._client 589 self._client = hashserv.create_client(self.server, **self.get_hashserv_creds())
590 yield self._client
591
592 def reset(self, data):
593 self.__close_clients()
594 return super().reset(data)
595
596 def exit(self):
597 self.__close_clients()
598 return super().exit()
599
600 def __close_clients(self):
601 with self._client_env():
602 if getattr(self, '_client', None) is not None:
603 self._client.close()
604 self._client = None
605 if getattr(self, '_client_pool', None) is not None:
606 self._client_pool.close()
607 self._client_pool = None
470 608
471 def get_stampfile_hash(self, tid): 609 def get_stampfile_hash(self, tid):
472 if tid in self.taskhash: 610 if tid in self.taskhash:
@@ -499,7 +637,7 @@ class SignatureGeneratorUniHashMixIn(object):
499 return None 637 return None
500 return unihash 638 return unihash
501 639
502 def get_unihash(self, tid): 640 def get_cached_unihash(self, tid):
503 taskhash = self.taskhash[tid] 641 taskhash = self.taskhash[tid]
504 642
505 # If its not a setscene task we can return 643 # If its not a setscene task we can return
@@ -514,40 +652,96 @@ class SignatureGeneratorUniHashMixIn(object):
514 self.unihash[tid] = unihash 652 self.unihash[tid] = unihash
515 return unihash 653 return unihash
516 654
517 # In the absence of being able to discover a unique hash from the 655 return None
518 # server, make it be equivalent to the taskhash. The unique "hash" only
519 # really needs to be a unique string (not even necessarily a hash), but
520 # making it match the taskhash has a few advantages:
521 #
522 # 1) All of the sstate code that assumes hashes can be the same
523 # 2) It provides maximal compatibility with builders that don't use
524 # an equivalency server
525 # 3) The value is easy for multiple independent builders to derive the
526 # same unique hash from the same input. This means that if the
527 # independent builders find the same taskhash, but it isn't reported
528 # to the server, there is a better chance that they will agree on
529 # the unique hash.
530 unihash = taskhash
531 656
532 try: 657 def _get_method(self, tid):
533 method = self.method 658 method = self.method
534 if tid in self.extramethod: 659 if tid in self.extramethod:
535 method = method + self.extramethod[tid] 660 method = method + self.extramethod[tid]
536 data = self.client().get_unihash(method, self.taskhash[tid]) 661
537 if data: 662 return method
538 unihash = data 663
664 def unihashes_exist(self, query):
665 if len(query) == 0:
666 return {}
667
668 query_keys = []
669 result = {}
670 for key, unihash in query.items():
671 if unihash in self.unihash_exists_cache:
672 result[key] = True
673 else:
674 query_keys.append(key)
675
676 if query_keys:
677 with self.client() as client:
678 query_result = client.unihash_exists_batch(query[k] for k in query_keys)
679
680 for idx, key in enumerate(query_keys):
681 exists = query_result[idx]
682 if exists:
683 self.unihash_exists_cache.add(query[key])
684 result[key] = exists
685
686 return result
687
688 def get_unihash(self, tid):
689 return self.get_unihashes([tid])[tid]
690
691 def get_unihashes(self, tids):
692 """
693 For a iterable of tids, returns a dictionary that maps each tid to a
694 unihash
695 """
696 result = {}
697 query_tids = []
698
699 for tid in tids:
700 unihash = self.get_cached_unihash(tid)
701 if unihash:
702 result[tid] = unihash
703 else:
704 query_tids.append(tid)
705
706 if query_tids:
707 unihashes = []
708 try:
709 with self.client() as client:
710 unihashes = client.get_unihash_batch((self._get_method(tid), self.taskhash[tid]) for tid in query_tids)
711 except (ConnectionError, FileNotFoundError) as e:
712 bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e)))
713
714 for idx, tid in enumerate(query_tids):
715 # In the absence of being able to discover a unique hash from the
716 # server, make it be equivalent to the taskhash. The unique "hash" only
717 # really needs to be a unique string (not even necessarily a hash), but
718 # making it match the taskhash has a few advantages:
719 #
720 # 1) All of the sstate code that assumes hashes can be the same
721 # 2) It provides maximal compatibility with builders that don't use
722 # an equivalency server
723 # 3) The value is easy for multiple independent builders to derive the
724 # same unique hash from the same input. This means that if the
725 # independent builders find the same taskhash, but it isn't reported
726 # to the server, there is a better chance that they will agree on
727 # the unique hash.
728 taskhash = self.taskhash[tid]
729
730 if unihashes and unihashes[idx]:
731 unihash = unihashes[idx]
539 # A unique hash equal to the taskhash is not very interesting, 732 # A unique hash equal to the taskhash is not very interesting,
540 # so it is reported it at debug level 2. If they differ, that 733 # so it is reported it at debug level 2. If they differ, that
541 # is much more interesting, so it is reported at debug level 1 734 # is much more interesting, so it is reported at debug level 1
542 hashequiv_logger.debug((1, 2)[unihash == taskhash], 'Found unihash %s in place of %s for %s from %s' % (unihash, taskhash, tid, self.server)) 735 hashequiv_logger.bbdebug((1, 2)[unihash == taskhash], 'Found unihash %s in place of %s for %s from %s' % (unihash, taskhash, tid, self.server))
543 else: 736 else:
544 hashequiv_logger.debug2('No reported unihash for %s:%s from %s' % (tid, taskhash, self.server)) 737 hashequiv_logger.debug2('No reported unihash for %s:%s from %s' % (tid, taskhash, self.server))
545 except hashserv.client.HashConnectionError as e: 738 unihash = taskhash
546 bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e)))
547 739
548 self.set_unihash(tid, unihash) 740 self.set_unihash(tid, unihash)
549 self.unihash[tid] = unihash 741 self.unihash[tid] = unihash
550 return unihash 742 result[tid] = unihash
743
744 return result
551 745
552 def report_unihash(self, path, task, d): 746 def report_unihash(self, path, task, d):
553 import importlib 747 import importlib
@@ -556,14 +750,14 @@ class SignatureGeneratorUniHashMixIn(object):
556 unihash = d.getVar('BB_UNIHASH') 750 unihash = d.getVar('BB_UNIHASH')
557 report_taskdata = d.getVar('SSTATE_HASHEQUIV_REPORT_TASKDATA') == '1' 751 report_taskdata = d.getVar('SSTATE_HASHEQUIV_REPORT_TASKDATA') == '1'
558 tempdir = d.getVar('T') 752 tempdir = d.getVar('T')
559 fn = d.getVar('BB_FILENAME') 753 mcfn = d.getVar('BB_FILENAME')
560 tid = fn + ':do_' + task 754 tid = mcfn + ':do_' + task
561 key = tid + ':' + taskhash 755 key = tid + ':' + taskhash
562 756
563 if self.setscenetasks and tid not in self.setscenetasks: 757 if self.setscenetasks and tid not in self.setscenetasks:
564 return 758 return
565 759
566 # This can happen if locked sigs are in action. Detect and just abort 760 # This can happen if locked sigs are in action. Detect and just exit
567 if taskhash != self.taskhash[tid]: 761 if taskhash != self.taskhash[tid]:
568 return 762 return
569 763
@@ -611,17 +805,19 @@ class SignatureGeneratorUniHashMixIn(object):
611 if tid in self.extramethod: 805 if tid in self.extramethod:
612 method = method + self.extramethod[tid] 806 method = method + self.extramethod[tid]
613 807
614 data = self.client().report_unihash(taskhash, method, outhash, unihash, extra_data) 808 with self.client() as client:
809 data = client.report_unihash(taskhash, method, outhash, unihash, extra_data)
810
615 new_unihash = data['unihash'] 811 new_unihash = data['unihash']
616 812
617 if new_unihash != unihash: 813 if new_unihash != unihash:
618 hashequiv_logger.debug('Task %s unihash changed %s -> %s by server %s' % (taskhash, unihash, new_unihash, self.server)) 814 hashequiv_logger.debug('Task %s unihash changed %s -> %s by server %s' % (taskhash, unihash, new_unihash, self.server))
619 bb.event.fire(bb.runqueue.taskUniHashUpdate(fn + ':do_' + task, new_unihash), d) 815 bb.event.fire(bb.runqueue.taskUniHashUpdate(mcfn + ':do_' + task, new_unihash), d)
620 self.set_unihash(tid, new_unihash) 816 self.set_unihash(tid, new_unihash)
621 d.setVar('BB_UNIHASH', new_unihash) 817 d.setVar('BB_UNIHASH', new_unihash)
622 else: 818 else:
623 hashequiv_logger.debug('Reported task %s as unihash %s to %s' % (taskhash, unihash, self.server)) 819 hashequiv_logger.debug('Reported task %s as unihash %s to %s' % (taskhash, unihash, self.server))
624 except hashserv.client.HashConnectionError as e: 820 except (ConnectionError, FileNotFoundError) as e:
625 bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e))) 821 bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e)))
626 finally: 822 finally:
627 if sigfile: 823 if sigfile:
@@ -642,7 +838,9 @@ class SignatureGeneratorUniHashMixIn(object):
642 if tid in self.extramethod: 838 if tid in self.extramethod:
643 method = method + self.extramethod[tid] 839 method = method + self.extramethod[tid]
644 840
645 data = self.client().report_unihash_equiv(taskhash, method, wanted_unihash, extra_data) 841 with self.client() as client:
842 data = client.report_unihash_equiv(taskhash, method, wanted_unihash, extra_data)
843
646 hashequiv_logger.verbose('Reported task %s as unihash %s to %s (%s)' % (tid, wanted_unihash, self.server, str(data))) 844 hashequiv_logger.verbose('Reported task %s as unihash %s to %s (%s)' % (tid, wanted_unihash, self.server, str(data)))
647 845
648 if data is None: 846 if data is None:
@@ -661,7 +859,7 @@ class SignatureGeneratorUniHashMixIn(object):
661 # TODO: What to do here? 859 # TODO: What to do here?
662 hashequiv_logger.verbose('Task %s unihash reported as unwanted hash %s' % (tid, finalunihash)) 860 hashequiv_logger.verbose('Task %s unihash reported as unwanted hash %s' % (tid, finalunihash))
663 861
664 except hashserv.client.HashConnectionError as e: 862 except (ConnectionError, FileNotFoundError) as e:
665 bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e))) 863 bb.warn('Error contacting Hash Equivalence Server %s: %s' % (self.server, str(e)))
666 864
667 return False 865 return False
@@ -676,19 +874,18 @@ class SignatureGeneratorTestEquivHash(SignatureGeneratorUniHashMixIn, SignatureG
676 self.server = data.getVar('BB_HASHSERVE') 874 self.server = data.getVar('BB_HASHSERVE')
677 self.method = "sstate_output_hash" 875 self.method = "sstate_output_hash"
678 876
679# 877def clean_checksum_file_path(file_checksum_tuple):
680# Dummy class used for bitbake-selftest 878 f, cs = file_checksum_tuple
681# 879 if "/./" in f:
682class SignatureGeneratorTestMulticonfigDepends(SignatureGeneratorBasicHash): 880 return "./" + f.split("/./")[1]
683 name = "TestMulticonfigDepends" 881 return os.path.basename(f)
684 supports_multiconfig_datacaches = True
685 882
686def dump_this_task(outfile, d): 883def dump_this_task(outfile, d):
687 import bb.parse 884 import bb.parse
688 fn = d.getVar("BB_FILENAME") 885 mcfn = d.getVar("BB_FILENAME")
689 task = "do_" + d.getVar("BB_CURRENTTASK") 886 task = "do_" + d.getVar("BB_CURRENTTASK")
690 referencestamp = bb.build.stamp_internal(task, d, None, True) 887 referencestamp = bb.parse.siggen.stampfile_base(mcfn)
691 bb.parse.siggen.dump_sigtask(fn, task, outfile, "customfile:" + referencestamp) 888 bb.parse.siggen.dump_sigtask(mcfn, task, outfile, "customfile:" + referencestamp)
692 889
693def init_colors(enable_color): 890def init_colors(enable_color):
694 """Initialise colour dict for passing to compare_sigfiles()""" 891 """Initialise colour dict for passing to compare_sigfiles()"""
@@ -741,38 +938,15 @@ def list_inline_diff(oldlist, newlist, colors=None):
741 ret.append(item) 938 ret.append(item)
742 return '[%s]' % (', '.join(ret)) 939 return '[%s]' % (', '.join(ret))
743 940
744def clean_basepath(basepath): 941# Handled renamed fields
745 basepath, dir, recipe_task = basepath.rsplit("/", 2) 942def handle_renames(data):
746 cleaned = dir + '/' + recipe_task 943 if 'basewhitelist' in data:
747 944 data['basehash_ignore_vars'] = data['basewhitelist']
748 if basepath[0] == '/': 945 del data['basewhitelist']
749 return cleaned 946 if 'taskwhitelist' in data:
750 947 data['taskhash_ignore_tasks'] = data['taskwhitelist']
751 if basepath.startswith("mc:") and basepath.count(':') >= 2: 948 del data['taskwhitelist']
752 mc, mc_name, basepath = basepath.split(":", 2)
753 mc_suffix = ':mc:' + mc_name
754 else:
755 mc_suffix = ''
756
757 # mc stuff now removed from basepath. Whatever was next, if present will be the first
758 # suffix. ':/', recipe path start, marks the end of this. Something like
759 # 'virtual:a[:b[:c]]:/path...' (b and c being optional)
760 if basepath[0] != '/':
761 cleaned += ':' + basepath.split(':/', 1)[0]
762
763 return cleaned + mc_suffix
764 949
765def clean_basepaths(a):
766 b = {}
767 for x in a:
768 b[clean_basepath(x)] = a[x]
769 return b
770
771def clean_basepaths_list(a):
772 b = []
773 for x in a:
774 b.append(clean_basepath(x))
775 return b
776 950
777def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False): 951def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False):
778 output = [] 952 output = []
@@ -794,20 +968,29 @@ def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False):
794 formatparams.update(values) 968 formatparams.update(values)
795 return formatstr.format(**formatparams) 969 return formatstr.format(**formatparams)
796 970
797 with open(a, 'rb') as f: 971 try:
798 p1 = pickle.Unpickler(f) 972 with bb.compress.zstd.open(a, "rt", encoding="utf-8", num_threads=1) as f:
799 a_data = p1.load() 973 a_data = json.load(f, object_hook=SetDecoder)
800 with open(b, 'rb') as f: 974 except (TypeError, OSError) as err:
801 p2 = pickle.Unpickler(f) 975 bb.error("Failed to open sigdata file '%s': %s" % (a, str(err)))
802 b_data = p2.load() 976 raise err
803 977 try:
804 def dict_diff(a, b, whitelist=set()): 978 with bb.compress.zstd.open(b, "rt", encoding="utf-8", num_threads=1) as f:
979 b_data = json.load(f, object_hook=SetDecoder)
980 except (TypeError, OSError) as err:
981 bb.error("Failed to open sigdata file '%s': %s" % (b, str(err)))
982 raise err
983
984 for data in [a_data, b_data]:
985 handle_renames(data)
986
987 def dict_diff(a, b, ignored_vars=set()):
805 sa = set(a.keys()) 988 sa = set(a.keys())
806 sb = set(b.keys()) 989 sb = set(b.keys())
807 common = sa & sb 990 common = sa & sb
808 changed = set() 991 changed = set()
809 for i in common: 992 for i in common:
810 if a[i] != b[i] and i not in whitelist: 993 if a[i] != b[i] and i not in ignored_vars:
811 changed.add(i) 994 changed.add(i)
812 added = sb - sa 995 added = sb - sa
813 removed = sa - sb 996 removed = sa - sb
@@ -815,11 +998,11 @@ def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False):
815 998
816 def file_checksums_diff(a, b): 999 def file_checksums_diff(a, b):
817 from collections import Counter 1000 from collections import Counter
818 # Handle old siginfo format 1001
819 if isinstance(a, dict): 1002 # Convert lists back to tuples
820 a = [(os.path.basename(f), cs) for f, cs in a.items()] 1003 a = [(f[0], f[1]) for f in a]
821 if isinstance(b, dict): 1004 b = [(f[0], f[1]) for f in b]
822 b = [(os.path.basename(f), cs) for f, cs in b.items()] 1005
823 # Compare lists, ensuring we can handle duplicate filenames if they exist 1006 # Compare lists, ensuring we can handle duplicate filenames if they exist
824 removedcount = Counter(a) 1007 removedcount = Counter(a)
825 removedcount.subtract(b) 1008 removedcount.subtract(b)
@@ -846,15 +1029,15 @@ def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False):
846 removed = [x[0] for x in removed] 1029 removed = [x[0] for x in removed]
847 return changed, added, removed 1030 return changed, added, removed
848 1031
849 if 'basewhitelist' in a_data and a_data['basewhitelist'] != b_data['basewhitelist']: 1032 if 'basehash_ignore_vars' in a_data and a_data['basehash_ignore_vars'] != b_data['basehash_ignore_vars']:
850 output.append(color_format("{color_title}basewhitelist changed{color_default} from '%s' to '%s'") % (a_data['basewhitelist'], b_data['basewhitelist'])) 1033 output.append(color_format("{color_title}basehash_ignore_vars changed{color_default} from '%s' to '%s'") % (a_data['basehash_ignore_vars'], b_data['basehash_ignore_vars']))
851 if a_data['basewhitelist'] and b_data['basewhitelist']: 1034 if a_data['basehash_ignore_vars'] and b_data['basehash_ignore_vars']:
852 output.append("changed items: %s" % a_data['basewhitelist'].symmetric_difference(b_data['basewhitelist'])) 1035 output.append("changed items: %s" % a_data['basehash_ignore_vars'].symmetric_difference(b_data['basehash_ignore_vars']))
853 1036
854 if 'taskwhitelist' in a_data and a_data['taskwhitelist'] != b_data['taskwhitelist']: 1037 if 'taskhash_ignore_tasks' in a_data and a_data['taskhash_ignore_tasks'] != b_data['taskhash_ignore_tasks']:
855 output.append(color_format("{color_title}taskwhitelist changed{color_default} from '%s' to '%s'") % (a_data['taskwhitelist'], b_data['taskwhitelist'])) 1038 output.append(color_format("{color_title}taskhash_ignore_tasks changed{color_default} from '%s' to '%s'") % (a_data['taskhash_ignore_tasks'], b_data['taskhash_ignore_tasks']))
856 if a_data['taskwhitelist'] and b_data['taskwhitelist']: 1039 if a_data['taskhash_ignore_tasks'] and b_data['taskhash_ignore_tasks']:
857 output.append("changed items: %s" % a_data['taskwhitelist'].symmetric_difference(b_data['taskwhitelist'])) 1040 output.append("changed items: %s" % a_data['taskhash_ignore_tasks'].symmetric_difference(b_data['taskhash_ignore_tasks']))
858 1041
859 if a_data['taskdeps'] != b_data['taskdeps']: 1042 if a_data['taskdeps'] != b_data['taskdeps']:
860 output.append(color_format("{color_title}Task dependencies changed{color_default} from:\n%s\nto:\n%s") % (sorted(a_data['taskdeps']), sorted(b_data['taskdeps']))) 1043 output.append(color_format("{color_title}Task dependencies changed{color_default} from:\n%s\nto:\n%s") % (sorted(a_data['taskdeps']), sorted(b_data['taskdeps'])))
@@ -862,23 +1045,23 @@ def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False):
862 if a_data['basehash'] != b_data['basehash'] and not collapsed: 1045 if a_data['basehash'] != b_data['basehash'] and not collapsed:
863 output.append(color_format("{color_title}basehash changed{color_default} from %s to %s") % (a_data['basehash'], b_data['basehash'])) 1046 output.append(color_format("{color_title}basehash changed{color_default} from %s to %s") % (a_data['basehash'], b_data['basehash']))
864 1047
865 changed, added, removed = dict_diff(a_data['gendeps'], b_data['gendeps'], a_data['basewhitelist'] & b_data['basewhitelist']) 1048 changed, added, removed = dict_diff(a_data['gendeps'], b_data['gendeps'], a_data['basehash_ignore_vars'] & b_data['basehash_ignore_vars'])
866 if changed: 1049 if changed:
867 for dep in changed: 1050 for dep in sorted(changed):
868 output.append(color_format("{color_title}List of dependencies for variable %s changed from '{color_default}%s{color_title}' to '{color_default}%s{color_title}'") % (dep, a_data['gendeps'][dep], b_data['gendeps'][dep])) 1051 output.append(color_format("{color_title}List of dependencies for variable %s changed from '{color_default}%s{color_title}' to '{color_default}%s{color_title}'") % (dep, a_data['gendeps'][dep], b_data['gendeps'][dep]))
869 if a_data['gendeps'][dep] and b_data['gendeps'][dep]: 1052 if a_data['gendeps'][dep] and b_data['gendeps'][dep]:
870 output.append("changed items: %s" % a_data['gendeps'][dep].symmetric_difference(b_data['gendeps'][dep])) 1053 output.append("changed items: %s" % a_data['gendeps'][dep].symmetric_difference(b_data['gendeps'][dep]))
871 if added: 1054 if added:
872 for dep in added: 1055 for dep in sorted(added):
873 output.append(color_format("{color_title}Dependency on variable %s was added") % (dep)) 1056 output.append(color_format("{color_title}Dependency on variable %s was added") % (dep))
874 if removed: 1057 if removed:
875 for dep in removed: 1058 for dep in sorted(removed):
876 output.append(color_format("{color_title}Dependency on Variable %s was removed") % (dep)) 1059 output.append(color_format("{color_title}Dependency on Variable %s was removed") % (dep))
877 1060
878 1061
879 changed, added, removed = dict_diff(a_data['varvals'], b_data['varvals']) 1062 changed, added, removed = dict_diff(a_data['varvals'], b_data['varvals'])
880 if changed: 1063 if changed:
881 for dep in changed: 1064 for dep in sorted(changed):
882 oldval = a_data['varvals'][dep] 1065 oldval = a_data['varvals'][dep]
883 newval = b_data['varvals'][dep] 1066 newval = b_data['varvals'][dep]
884 if newval and oldval and ('\n' in oldval or '\n' in newval): 1067 if newval and oldval and ('\n' in oldval or '\n' in newval):
@@ -902,9 +1085,9 @@ def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False):
902 output.append(color_format("{color_title}Variable {var} value changed from '{color_default}{oldval}{color_title}' to '{color_default}{newval}{color_title}'{color_default}", var=dep, oldval=oldval, newval=newval)) 1085 output.append(color_format("{color_title}Variable {var} value changed from '{color_default}{oldval}{color_title}' to '{color_default}{newval}{color_title}'{color_default}", var=dep, oldval=oldval, newval=newval))
903 1086
904 if not 'file_checksum_values' in a_data: 1087 if not 'file_checksum_values' in a_data:
905 a_data['file_checksum_values'] = {} 1088 a_data['file_checksum_values'] = []
906 if not 'file_checksum_values' in b_data: 1089 if not 'file_checksum_values' in b_data:
907 b_data['file_checksum_values'] = {} 1090 b_data['file_checksum_values'] = []
908 1091
909 changed, added, removed = file_checksums_diff(a_data['file_checksum_values'], b_data['file_checksum_values']) 1092 changed, added, removed = file_checksums_diff(a_data['file_checksum_values'], b_data['file_checksum_values'])
910 if changed: 1093 if changed:
@@ -931,11 +1114,11 @@ def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False):
931 a = a_data['runtaskdeps'][idx] 1114 a = a_data['runtaskdeps'][idx]
932 b = b_data['runtaskdeps'][idx] 1115 b = b_data['runtaskdeps'][idx]
933 if a_data['runtaskhashes'][a] != b_data['runtaskhashes'][b] and not collapsed: 1116 if a_data['runtaskhashes'][a] != b_data['runtaskhashes'][b] and not collapsed:
934 changed.append("%s with hash %s\n changed to\n%s with hash %s" % (clean_basepath(a), a_data['runtaskhashes'][a], clean_basepath(b), b_data['runtaskhashes'][b])) 1117 changed.append("%s with hash %s\n changed to\n%s with hash %s" % (a, a_data['runtaskhashes'][a], b, b_data['runtaskhashes'][b]))
935 1118
936 if changed: 1119 if changed:
937 clean_a = clean_basepaths_list(a_data['runtaskdeps']) 1120 clean_a = a_data['runtaskdeps']
938 clean_b = clean_basepaths_list(b_data['runtaskdeps']) 1121 clean_b = b_data['runtaskdeps']
939 if clean_a != clean_b: 1122 if clean_a != clean_b:
940 output.append(color_format("{color_title}runtaskdeps changed:{color_default}\n%s") % list_inline_diff(clean_a, clean_b, colors)) 1123 output.append(color_format("{color_title}runtaskdeps changed:{color_default}\n%s") % list_inline_diff(clean_a, clean_b, colors))
941 else: 1124 else:
@@ -948,7 +1131,7 @@ def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False):
948 b = b_data['runtaskhashes'] 1131 b = b_data['runtaskhashes']
949 changed, added, removed = dict_diff(a, b) 1132 changed, added, removed = dict_diff(a, b)
950 if added: 1133 if added:
951 for dep in added: 1134 for dep in sorted(added):
952 bdep_found = False 1135 bdep_found = False
953 if removed: 1136 if removed:
954 for bdep in removed: 1137 for bdep in removed:
@@ -956,9 +1139,9 @@ def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False):
956 #output.append("Dependency on task %s was replaced by %s with same hash" % (dep, bdep)) 1139 #output.append("Dependency on task %s was replaced by %s with same hash" % (dep, bdep))
957 bdep_found = True 1140 bdep_found = True
958 if not bdep_found: 1141 if not bdep_found:
959 output.append(color_format("{color_title}Dependency on task %s was added{color_default} with hash %s") % (clean_basepath(dep), b[dep])) 1142 output.append(color_format("{color_title}Dependency on task %s was added{color_default} with hash %s") % (dep, b[dep]))
960 if removed: 1143 if removed:
961 for dep in removed: 1144 for dep in sorted(removed):
962 adep_found = False 1145 adep_found = False
963 if added: 1146 if added:
964 for adep in added: 1147 for adep in added:
@@ -966,11 +1149,11 @@ def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False):
966 #output.append("Dependency on task %s was replaced by %s with same hash" % (adep, dep)) 1149 #output.append("Dependency on task %s was replaced by %s with same hash" % (adep, dep))
967 adep_found = True 1150 adep_found = True
968 if not adep_found: 1151 if not adep_found:
969 output.append(color_format("{color_title}Dependency on task %s was removed{color_default} with hash %s") % (clean_basepath(dep), a[dep])) 1152 output.append(color_format("{color_title}Dependency on task %s was removed{color_default} with hash %s") % (dep, a[dep]))
970 if changed: 1153 if changed:
971 for dep in changed: 1154 for dep in sorted(changed):
972 if not collapsed: 1155 if not collapsed:
973 output.append(color_format("{color_title}Hash for dependent task %s changed{color_default} from %s to %s") % (clean_basepath(dep), a[dep], b[dep])) 1156 output.append(color_format("{color_title}Hash for task dependency %s changed{color_default} from %s to %s") % (dep, a[dep], b[dep]))
974 if callable(recursecb): 1157 if callable(recursecb):
975 recout = recursecb(dep, a[dep], b[dep]) 1158 recout = recursecb(dep, a[dep], b[dep])
976 if recout: 1159 if recout:
@@ -980,6 +1163,7 @@ def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False):
980 # If a dependent hash changed, might as well print the line above and then defer to the changes in 1163 # If a dependent hash changed, might as well print the line above and then defer to the changes in
981 # that hash since in all likelyhood, they're the same changes this task also saw. 1164 # that hash since in all likelyhood, they're the same changes this task also saw.
982 output = [output[-1]] + recout 1165 output = [output[-1]] + recout
1166 break
983 1167
984 a_taint = a_data.get('taint', None) 1168 a_taint = a_data.get('taint', None)
985 b_taint = b_data.get('taint', None) 1169 b_taint = b_data.get('taint', None)
@@ -1001,7 +1185,7 @@ def calc_basehash(sigdata):
1001 basedata = '' 1185 basedata = ''
1002 1186
1003 alldeps = sigdata['taskdeps'] 1187 alldeps = sigdata['taskdeps']
1004 for dep in alldeps: 1188 for dep in sorted(alldeps):
1005 basedata = basedata + dep 1189 basedata = basedata + dep
1006 val = sigdata['varvals'][dep] 1190 val = sigdata['varvals'][dep]
1007 if val is not None: 1191 if val is not None:
@@ -1017,6 +1201,8 @@ def calc_taskhash(sigdata):
1017 1201
1018 for c in sigdata['file_checksum_values']: 1202 for c in sigdata['file_checksum_values']:
1019 if c[1]: 1203 if c[1]:
1204 if "./" in c[0]:
1205 data = data + c[0]
1020 data = data + c[1] 1206 data = data + c[1]
1021 1207
1022 if 'taint' in sigdata: 1208 if 'taint' in sigdata:
@@ -1031,32 +1217,37 @@ def calc_taskhash(sigdata):
1031def dump_sigfile(a): 1217def dump_sigfile(a):
1032 output = [] 1218 output = []
1033 1219
1034 with open(a, 'rb') as f: 1220 try:
1035 p1 = pickle.Unpickler(f) 1221 with bb.compress.zstd.open(a, "rt", encoding="utf-8", num_threads=1) as f:
1036 a_data = p1.load() 1222 a_data = json.load(f, object_hook=SetDecoder)
1223 except (TypeError, OSError) as err:
1224 bb.error("Failed to open sigdata file '%s': %s" % (a, str(err)))
1225 raise err
1226
1227 handle_renames(a_data)
1037 1228
1038 output.append("basewhitelist: %s" % (a_data['basewhitelist'])) 1229 output.append("basehash_ignore_vars: %s" % (sorted(a_data['basehash_ignore_vars'])))
1039 1230
1040 output.append("taskwhitelist: %s" % (a_data['taskwhitelist'])) 1231 output.append("taskhash_ignore_tasks: %s" % (sorted(a_data['taskhash_ignore_tasks'] or [])))
1041 1232
1042 output.append("Task dependencies: %s" % (sorted(a_data['taskdeps']))) 1233 output.append("Task dependencies: %s" % (sorted(a_data['taskdeps'])))
1043 1234
1044 output.append("basehash: %s" % (a_data['basehash'])) 1235 output.append("basehash: %s" % (a_data['basehash']))
1045 1236
1046 for dep in a_data['gendeps']: 1237 for dep in sorted(a_data['gendeps']):
1047 output.append("List of dependencies for variable %s is %s" % (dep, a_data['gendeps'][dep])) 1238 output.append("List of dependencies for variable %s is %s" % (dep, sorted(a_data['gendeps'][dep])))
1048 1239
1049 for dep in a_data['varvals']: 1240 for dep in sorted(a_data['varvals']):
1050 output.append("Variable %s value is %s" % (dep, a_data['varvals'][dep])) 1241 output.append("Variable %s value is %s" % (dep, a_data['varvals'][dep]))
1051 1242
1052 if 'runtaskdeps' in a_data: 1243 if 'runtaskdeps' in a_data:
1053 output.append("Tasks this task depends on: %s" % (a_data['runtaskdeps'])) 1244 output.append("Tasks this task depends on: %s" % (sorted(a_data['runtaskdeps'])))
1054 1245
1055 if 'file_checksum_values' in a_data: 1246 if 'file_checksum_values' in a_data:
1056 output.append("This task depends on the checksums of files: %s" % (a_data['file_checksum_values'])) 1247 output.append("This task depends on the checksums of files: %s" % (sorted(a_data['file_checksum_values'])))
1057 1248
1058 if 'runtaskhashes' in a_data: 1249 if 'runtaskhashes' in a_data:
1059 for dep in a_data['runtaskhashes']: 1250 for dep in sorted(a_data['runtaskhashes']):
1060 output.append("Hash for dependent task %s is %s" % (dep, a_data['runtaskhashes'][dep])) 1251 output.append("Hash for dependent task %s is %s" % (dep, a_data['runtaskhashes'][dep]))
1061 1252
1062 if 'taint' in a_data: 1253 if 'taint' in a_data:
diff --git a/bitbake/lib/bb/taskdata.py b/bitbake/lib/bb/taskdata.py
index 47bad6d1fa..66545a65af 100644
--- a/bitbake/lib/bb/taskdata.py
+++ b/bitbake/lib/bb/taskdata.py
@@ -39,7 +39,7 @@ class TaskData:
39 """ 39 """
40 BitBake Task Data implementation 40 BitBake Task Data implementation
41 """ 41 """
42 def __init__(self, abort = True, skiplist = None, allowincomplete = False): 42 def __init__(self, halt = True, skiplist = None, allowincomplete = False):
43 self.build_targets = {} 43 self.build_targets = {}
44 self.run_targets = {} 44 self.run_targets = {}
45 45
@@ -57,7 +57,7 @@ class TaskData:
57 self.failed_rdeps = [] 57 self.failed_rdeps = []
58 self.failed_fns = [] 58 self.failed_fns = []
59 59
60 self.abort = abort 60 self.halt = halt
61 self.allowincomplete = allowincomplete 61 self.allowincomplete = allowincomplete
62 62
63 self.skiplist = skiplist 63 self.skiplist = skiplist
@@ -328,7 +328,7 @@ class TaskData:
328 try: 328 try:
329 self.add_provider_internal(cfgData, dataCache, item) 329 self.add_provider_internal(cfgData, dataCache, item)
330 except bb.providers.NoProvider: 330 except bb.providers.NoProvider:
331 if self.abort: 331 if self.halt:
332 raise 332 raise
333 self.remove_buildtarget(item) 333 self.remove_buildtarget(item)
334 334
@@ -451,12 +451,12 @@ class TaskData:
451 for target in self.build_targets: 451 for target in self.build_targets:
452 if fn in self.build_targets[target]: 452 if fn in self.build_targets[target]:
453 self.build_targets[target].remove(fn) 453 self.build_targets[target].remove(fn)
454 if len(self.build_targets[target]) == 0: 454 if not self.build_targets[target]:
455 self.remove_buildtarget(target, missing_list) 455 self.remove_buildtarget(target, missing_list)
456 for target in self.run_targets: 456 for target in self.run_targets:
457 if fn in self.run_targets[target]: 457 if fn in self.run_targets[target]:
458 self.run_targets[target].remove(fn) 458 self.run_targets[target].remove(fn)
459 if len(self.run_targets[target]) == 0: 459 if not self.run_targets[target]:
460 self.remove_runtarget(target, missing_list) 460 self.remove_runtarget(target, missing_list)
461 461
462 def remove_buildtarget(self, target, missing_list=None): 462 def remove_buildtarget(self, target, missing_list=None):
@@ -479,7 +479,7 @@ class TaskData:
479 fn = tid.rsplit(":",1)[0] 479 fn = tid.rsplit(":",1)[0]
480 self.fail_fn(fn, missing_list) 480 self.fail_fn(fn, missing_list)
481 481
482 if self.abort and target in self.external_targets: 482 if self.halt and target in self.external_targets:
483 logger.error("Required build target '%s' has no buildable providers.\nMissing or unbuildable dependency chain was: %s", target, missing_list) 483 logger.error("Required build target '%s' has no buildable providers.\nMissing or unbuildable dependency chain was: %s", target, missing_list)
484 raise bb.providers.NoProvider(target) 484 raise bb.providers.NoProvider(target)
485 485
@@ -516,7 +516,7 @@ class TaskData:
516 self.add_provider_internal(cfgData, dataCache, target) 516 self.add_provider_internal(cfgData, dataCache, target)
517 added = added + 1 517 added = added + 1
518 except bb.providers.NoProvider: 518 except bb.providers.NoProvider:
519 if self.abort and target in self.external_targets and not self.allowincomplete: 519 if self.halt and target in self.external_targets and not self.allowincomplete:
520 raise 520 raise
521 if not self.allowincomplete: 521 if not self.allowincomplete:
522 self.remove_buildtarget(target) 522 self.remove_buildtarget(target)
diff --git a/bitbake/lib/bb/tests/codeparser.py b/bitbake/lib/bb/tests/codeparser.py
index 826a2d2f6d..c0d1362a0c 100644
--- a/bitbake/lib/bb/tests/codeparser.py
+++ b/bitbake/lib/bb/tests/codeparser.py
@@ -44,6 +44,7 @@ class VariableReferenceTest(ReferenceTest):
44 def parseExpression(self, exp): 44 def parseExpression(self, exp):
45 parsedvar = self.d.expandWithRefs(exp, None) 45 parsedvar = self.d.expandWithRefs(exp, None)
46 self.references = parsedvar.references 46 self.references = parsedvar.references
47 self.execs = parsedvar.execs
47 48
48 def test_simple_reference(self): 49 def test_simple_reference(self):
49 self.setEmptyVars(["FOO"]) 50 self.setEmptyVars(["FOO"])
@@ -61,6 +62,11 @@ class VariableReferenceTest(ReferenceTest):
61 self.parseExpression("${@d.getVar('BAR') + 'foo'}") 62 self.parseExpression("${@d.getVar('BAR') + 'foo'}")
62 self.assertReferences(set(["BAR"])) 63 self.assertReferences(set(["BAR"]))
63 64
65 def test_python_exec_reference(self):
66 self.parseExpression("${@eval('3 * 5')}")
67 self.assertReferences(set())
68 self.assertExecs(set(["eval"]))
69
64class ShellReferenceTest(ReferenceTest): 70class ShellReferenceTest(ReferenceTest):
65 71
66 def parseExpression(self, exp): 72 def parseExpression(self, exp):
@@ -100,6 +106,46 @@ ${D}${libdir}/pkgconfig/*.pc
100 self.parseExpression("foo=$(echo bar)") 106 self.parseExpression("foo=$(echo bar)")
101 self.assertExecs(set(["echo"])) 107 self.assertExecs(set(["echo"]))
102 108
109 def test_assign_subshell_expansion_quotes(self):
110 self.parseExpression('foo="$(echo bar)"')
111 self.assertExecs(set(["echo"]))
112
113 def test_assign_subshell_expansion_nested(self):
114 self.parseExpression('foo="$(func1 "$(func2 bar$(func3))")"')
115 self.assertExecs(set(["func1", "func2", "func3"]))
116
117 def test_assign_subshell_expansion_multiple(self):
118 self.parseExpression('foo="$(func1 "$(func2)") $(func3)"')
119 self.assertExecs(set(["func1", "func2", "func3"]))
120
121 def test_assign_subshell_expansion_escaped_quotes(self):
122 self.parseExpression('foo="\\"fo\\"o$(func1)"')
123 self.assertExecs(set(["func1"]))
124
125 def test_assign_subshell_expansion_empty(self):
126 self.parseExpression('foo="bar$()foo"')
127 self.assertExecs(set())
128
129 def test_assign_subshell_backticks(self):
130 self.parseExpression("foo=`echo bar`")
131 self.assertExecs(set(["echo"]))
132
133 def test_assign_subshell_backticks_quotes(self):
134 self.parseExpression('foo="`echo bar`"')
135 self.assertExecs(set(["echo"]))
136
137 def test_assign_subshell_backticks_multiple(self):
138 self.parseExpression('foo="`func1 bar` `func2`"')
139 self.assertExecs(set(["func1", "func2"]))
140
141 def test_assign_subshell_backticks_escaped_quotes(self):
142 self.parseExpression('foo="\\"fo\\"o`func1`"')
143 self.assertExecs(set(["func1"]))
144
145 def test_assign_subshell_backticks_empty(self):
146 self.parseExpression('foo="bar``foo"')
147 self.assertExecs(set())
148
103 def test_shell_unexpanded(self): 149 def test_shell_unexpanded(self):
104 self.setEmptyVars(["QT_BASE_NAME"]) 150 self.setEmptyVars(["QT_BASE_NAME"])
105 self.parseExpression('echo "${QT_BASE_NAME}"') 151 self.parseExpression('echo "${QT_BASE_NAME}"')
@@ -111,9 +157,9 @@ ${D}${libdir}/pkgconfig/*.pc
111 self.assertExecs(set(["sed"])) 157 self.assertExecs(set(["sed"]))
112 158
113 def test_parameter_expansion_modifiers(self): 159 def test_parameter_expansion_modifiers(self):
114 # - and + are also valid modifiers for parameter expansion, but are 160 # -,+ and : are also valid modifiers for parameter expansion, but are
115 # valid characters in bitbake variable names, so are not included here 161 # valid characters in bitbake variable names, so are not included here
116 for i in ('=', ':-', ':=', '?', ':?', ':+', '#', '%', '##', '%%'): 162 for i in ('=', '?', '#', '%', '##', '%%'):
117 name = "foo%sbar" % i 163 name = "foo%sbar" % i
118 self.parseExpression("${%s}" % name) 164 self.parseExpression("${%s}" % name)
119 self.assertNotIn(name, self.references) 165 self.assertNotIn(name, self.references)
@@ -318,7 +364,7 @@ d.getVar(a(), False)
318 "filename": "example.bb", 364 "filename": "example.bb",
319 }) 365 })
320 366
321 deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), self.d) 367 deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), set(), set(), self.d, self.d)
322 368
323 self.assertEqual(deps, set(["somevar", "bar", "something", "inexpand", "test", "test2", "a"])) 369 self.assertEqual(deps, set(["somevar", "bar", "something", "inexpand", "test", "test2", "a"]))
324 370
@@ -365,7 +411,7 @@ esac
365 self.d.setVarFlags("FOO", {"func": True}) 411 self.d.setVarFlags("FOO", {"func": True})
366 self.setEmptyVars(execs) 412 self.setEmptyVars(execs)
367 413
368 deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), self.d) 414 deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), set(), set(), self.d, self.d)
369 415
370 self.assertEqual(deps, set(["somevar", "inverted"] + execs)) 416 self.assertEqual(deps, set(["somevar", "inverted"] + execs))
371 417
@@ -375,7 +421,7 @@ esac
375 self.d.setVar("FOO", "foo=oe_libinstall; eval $foo") 421 self.d.setVar("FOO", "foo=oe_libinstall; eval $foo")
376 self.d.setVarFlag("FOO", "vardeps", "oe_libinstall") 422 self.d.setVarFlag("FOO", "vardeps", "oe_libinstall")
377 423
378 deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), self.d) 424 deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), set(), set(), self.d, self.d)
379 425
380 self.assertEqual(deps, set(["oe_libinstall"])) 426 self.assertEqual(deps, set(["oe_libinstall"]))
381 427
@@ -384,7 +430,7 @@ esac
384 self.d.setVar("FOO", "foo=oe_libinstall; eval $foo") 430 self.d.setVar("FOO", "foo=oe_libinstall; eval $foo")
385 self.d.setVarFlag("FOO", "vardeps", "${@'oe_libinstall'}") 431 self.d.setVarFlag("FOO", "vardeps", "${@'oe_libinstall'}")
386 432
387 deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), self.d) 433 deps, values = bb.data.build_dependencies("FOO", set(self.d.keys()), set(), set(), set(), set(), self.d, self.d)
388 434
389 self.assertEqual(deps, set(["oe_libinstall"])) 435 self.assertEqual(deps, set(["oe_libinstall"]))
390 436
@@ -399,7 +445,7 @@ esac
399 # Check dependencies 445 # Check dependencies
400 self.d.setVar('ANOTHERVAR', expr) 446 self.d.setVar('ANOTHERVAR', expr)
401 self.d.setVar('TESTVAR', 'anothervalue testval testval2') 447 self.d.setVar('TESTVAR', 'anothervalue testval testval2')
402 deps, values = bb.data.build_dependencies("ANOTHERVAR", set(self.d.keys()), set(), set(), self.d) 448 deps, values = bb.data.build_dependencies("ANOTHERVAR", set(self.d.keys()), set(), set(), set(), set(), self.d, self.d)
403 self.assertEqual(sorted(values.splitlines()), 449 self.assertEqual(sorted(values.splitlines()),
404 sorted([expr, 450 sorted([expr,
405 'TESTVAR{anothervalue} = Set', 451 'TESTVAR{anothervalue} = Set',
@@ -412,11 +458,55 @@ esac
412 # Check final value 458 # Check final value
413 self.assertEqual(self.d.getVar('ANOTHERVAR').split(), ['anothervalue', 'yetanothervalue', 'lastone']) 459 self.assertEqual(self.d.getVar('ANOTHERVAR').split(), ['anothervalue', 'yetanothervalue', 'lastone'])
414 460
461 def test_contains_vardeps_excluded(self):
462 # Check the ignored_vars option to build_dependencies is handled by contains functionality
463 varval = '${TESTVAR2} ${@bb.utils.filter("TESTVAR", "somevalue anothervalue", d)}'
464 self.d.setVar('ANOTHERVAR', varval)
465 self.d.setVar('TESTVAR', 'anothervalue testval testval2')
466 self.d.setVar('TESTVAR2', 'testval3')
467 deps, values = bb.data.build_dependencies("ANOTHERVAR", set(self.d.keys()), set(), set(), set(), set(["TESTVAR"]), self.d, self.d)
468 self.assertEqual(sorted(values.splitlines()), sorted([varval]))
469 self.assertEqual(deps, set(["TESTVAR2"]))
470 self.assertEqual(self.d.getVar('ANOTHERVAR').split(), ['testval3', 'anothervalue'])
471
472 # Check the vardepsexclude flag is handled by contains functionality
473 self.d.setVarFlag('ANOTHERVAR', 'vardepsexclude', 'TESTVAR')
474 deps, values = bb.data.build_dependencies("ANOTHERVAR", set(self.d.keys()), set(), set(), set(), set(), self.d, self.d)
475 self.assertEqual(sorted(values.splitlines()), sorted([varval]))
476 self.assertEqual(deps, set(["TESTVAR2"]))
477 self.assertEqual(self.d.getVar('ANOTHERVAR').split(), ['testval3', 'anothervalue'])
478
479 def test_contains_vardeps_override_operators(self):
480 # Check override operators handle dependencies correctly with the contains functionality
481 expr_plain = 'testval'
482 expr_prepend = '${@bb.utils.filter("TESTVAR1", "testval1", d)} '
483 expr_append = ' ${@bb.utils.filter("TESTVAR2", "testval2", d)}'
484 expr_remove = '${@bb.utils.contains("TESTVAR3", "no-testval", "testval", "", d)}'
485 # Check dependencies
486 self.d.setVar('ANOTHERVAR', expr_plain)
487 self.d.prependVar('ANOTHERVAR', expr_prepend)
488 self.d.appendVar('ANOTHERVAR', expr_append)
489 self.d.setVar('ANOTHERVAR:remove', expr_remove)
490 self.d.setVar('TESTVAR1', 'blah')
491 self.d.setVar('TESTVAR2', 'testval2')
492 self.d.setVar('TESTVAR3', 'no-testval')
493 deps, values = bb.data.build_dependencies("ANOTHERVAR", set(self.d.keys()), set(), set(), set(), set(), self.d, self.d)
494 self.assertEqual(sorted(values.splitlines()),
495 sorted([
496 expr_prepend + expr_plain + expr_append,
497 '_remove of ' + expr_remove,
498 'TESTVAR1{testval1} = Unset',
499 'TESTVAR2{testval2} = Set',
500 'TESTVAR3{no-testval} = Set',
501 ]))
502 # Check final value
503 self.assertEqual(self.d.getVar('ANOTHERVAR').split(), ['testval2'])
504
415 #Currently no wildcard support 505 #Currently no wildcard support
416 #def test_vardeps_wildcards(self): 506 #def test_vardeps_wildcards(self):
417 # self.d.setVar("oe_libinstall", "echo test") 507 # self.d.setVar("oe_libinstall", "echo test")
418 # self.d.setVar("FOO", "foo=oe_libinstall; eval $foo") 508 # self.d.setVar("FOO", "foo=oe_libinstall; eval $foo")
419 # self.d.setVarFlag("FOO", "vardeps", "oe_*") 509 # self.d.setVarFlag("FOO", "vardeps", "oe_*")
420 # self.assertEquals(deps, set(["oe_libinstall"])) 510 # self.assertEqual(deps, set(["oe_libinstall"]))
421 511
422 512
diff --git a/bitbake/lib/bb/tests/color.py b/bitbake/lib/bb/tests/color.py
index bf03750c69..bb70cb393d 100644
--- a/bitbake/lib/bb/tests/color.py
+++ b/bitbake/lib/bb/tests/color.py
@@ -20,7 +20,7 @@ class ProgressWatcher:
20 def __init__(self): 20 def __init__(self):
21 self._reports = [] 21 self._reports = []
22 22
23 def handle_event(self, event): 23 def handle_event(self, event, d):
24 self._reports.append((event.progress, event.rate)) 24 self._reports.append((event.progress, event.rate))
25 25
26 def reports(self): 26 def reports(self):
@@ -31,7 +31,7 @@ class ColorCodeTests(unittest.TestCase):
31 def setUp(self): 31 def setUp(self):
32 self.d = bb.data.init() 32 self.d = bb.data.init()
33 self._progress_watcher = ProgressWatcher() 33 self._progress_watcher = ProgressWatcher()
34 bb.event.register("bb.build.TaskProgress", self._progress_watcher.handle_event) 34 bb.event.register("bb.build.TaskProgress", self._progress_watcher.handle_event, data=self.d)
35 35
36 def tearDown(self): 36 def tearDown(self):
37 bb.event.remove("bb.build.TaskProgress", None) 37 bb.event.remove("bb.build.TaskProgress", None)
diff --git a/bitbake/lib/bb/tests/compression.py b/bitbake/lib/bb/tests/compression.py
new file mode 100644
index 0000000000..16c297b315
--- /dev/null
+++ b/bitbake/lib/bb/tests/compression.py
@@ -0,0 +1,100 @@
1#
2# Copyright BitBake Contributors
3#
4# SPDX-License-Identifier: GPL-2.0-only
5#
6
7from pathlib import Path
8import bb.compress.lz4
9import bb.compress.zstd
10import contextlib
11import os
12import shutil
13import tempfile
14import unittest
15import subprocess
16
17
18class CompressionTests(object):
19 def setUp(self):
20 self._t = tempfile.TemporaryDirectory()
21 self.tmpdir = Path(self._t.name)
22 self.addCleanup(self._t.cleanup)
23
24 def _file_helper(self, mode_suffix, data):
25 tmp_file = self.tmpdir / "compressed"
26
27 with self.do_open(tmp_file, mode="w" + mode_suffix) as f:
28 f.write(data)
29
30 with self.do_open(tmp_file, mode="r" + mode_suffix) as f:
31 read_data = f.read()
32
33 self.assertEqual(read_data, data)
34
35 def test_text_file(self):
36 self._file_helper("t", "Hello")
37
38 def test_binary_file(self):
39 self._file_helper("b", "Hello".encode("utf-8"))
40
41 def _pipe_helper(self, mode_suffix, data):
42 rfd, wfd = os.pipe()
43 with open(rfd, "rb") as r, open(wfd, "wb") as w:
44 with self.do_open(r, mode="r" + mode_suffix) as decompress:
45 with self.do_open(w, mode="w" + mode_suffix) as compress:
46 compress.write(data)
47 read_data = decompress.read()
48
49 self.assertEqual(read_data, data)
50
51 def test_text_pipe(self):
52 self._pipe_helper("t", "Hello")
53
54 def test_binary_pipe(self):
55 self._pipe_helper("b", "Hello".encode("utf-8"))
56
57 def test_bad_decompress(self):
58 tmp_file = self.tmpdir / "compressed"
59 with tmp_file.open("wb") as f:
60 f.write(b"\x00")
61
62 with self.assertRaises(OSError):
63 with self.do_open(tmp_file, mode="rb", stderr=subprocess.DEVNULL) as f:
64 data = f.read()
65
66
67class LZ4Tests(CompressionTests, unittest.TestCase):
68 def setUp(self):
69 if shutil.which("lz4") is None:
70 self.skipTest("'lz4' not found")
71 super().setUp()
72
73 @contextlib.contextmanager
74 def do_open(self, *args, **kwargs):
75 with bb.compress.lz4.open(*args, **kwargs) as f:
76 yield f
77
78
79class ZStdTests(CompressionTests, unittest.TestCase):
80 def setUp(self):
81 if shutil.which("zstd") is None:
82 self.skipTest("'zstd' not found")
83 super().setUp()
84
85 @contextlib.contextmanager
86 def do_open(self, *args, **kwargs):
87 with bb.compress.zstd.open(*args, **kwargs) as f:
88 yield f
89
90
91class PZStdTests(CompressionTests, unittest.TestCase):
92 def setUp(self):
93 if shutil.which("pzstd") is None:
94 self.skipTest("'pzstd' not found")
95 super().setUp()
96
97 @contextlib.contextmanager
98 def do_open(self, *args, **kwargs):
99 with bb.compress.zstd.open(*args, num_threads=2, **kwargs) as f:
100 yield f
diff --git a/bitbake/lib/bb/tests/cooker.py b/bitbake/lib/bb/tests/cooker.py
index c82d4b7b81..9e524ae345 100644
--- a/bitbake/lib/bb/tests/cooker.py
+++ b/bitbake/lib/bb/tests/cooker.py
@@ -1,6 +1,8 @@
1# 1#
2# BitBake Tests for cooker.py 2# BitBake Tests for cooker.py
3# 3#
4# Copyright BitBake Contributors
5#
4# SPDX-License-Identifier: GPL-2.0-only 6# SPDX-License-Identifier: GPL-2.0-only
5# 7#
6 8
diff --git a/bitbake/lib/bb/tests/data.py b/bitbake/lib/bb/tests/data.py
index 1d4a64b109..a895f6a58e 100644
--- a/bitbake/lib/bb/tests/data.py
+++ b/bitbake/lib/bb/tests/data.py
@@ -60,6 +60,15 @@ class DataExpansions(unittest.TestCase):
60 val = self.d.expand("${@5*12}") 60 val = self.d.expand("${@5*12}")
61 self.assertEqual(str(val), "60") 61 self.assertEqual(str(val), "60")
62 62
63 def test_python_snippet_w_dict(self):
64 val = self.d.expand("${@{ 'green': 1, 'blue': 2 }['green']}")
65 self.assertEqual(str(val), "1")
66
67 def test_python_unexpanded_multi(self):
68 self.d.setVar("bar", "${unsetvar}")
69 val = self.d.expand("${@2*2},${foo},${@d.getVar('foo') + ' ${bar}'},${foo}")
70 self.assertEqual(str(val), "4,value_of_foo,${@d.getVar('foo') + ' ${unsetvar}'},value_of_foo")
71
63 def test_expand_in_python_snippet(self): 72 def test_expand_in_python_snippet(self):
64 val = self.d.expand("${@'boo ' + '${foo}'}") 73 val = self.d.expand("${@'boo ' + '${foo}'}")
65 self.assertEqual(str(val), "boo value_of_foo") 74 self.assertEqual(str(val), "boo value_of_foo")
@@ -68,6 +77,18 @@ class DataExpansions(unittest.TestCase):
68 val = self.d.expand("${@d.getVar('foo') + ' ${bar}'}") 77 val = self.d.expand("${@d.getVar('foo') + ' ${bar}'}")
69 self.assertEqual(str(val), "value_of_foo value_of_bar") 78 self.assertEqual(str(val), "value_of_foo value_of_bar")
70 79
80 def test_python_snippet_function_reference(self):
81 self.d.setVar("TESTVAL", "testvalue")
82 self.d.setVar("testfunc", 'd.getVar("TESTVAL")')
83 context = bb.utils.get_context()
84 context["testfunc"] = lambda d: d.getVar("TESTVAL")
85 val = self.d.expand("${@testfunc(d)}")
86 self.assertEqual(str(val), "testvalue")
87
88 def test_python_snippet_builtin_metadata(self):
89 self.d.setVar("eval", "INVALID")
90 self.d.expand("${@eval('3')}")
91
71 def test_python_unexpanded(self): 92 def test_python_unexpanded(self):
72 self.d.setVar("bar", "${unsetvar}") 93 self.d.setVar("bar", "${unsetvar}")
73 val = self.d.expand("${@d.getVar('foo') + ' ${bar}'}") 94 val = self.d.expand("${@d.getVar('foo') + ' ${bar}'}")
@@ -245,35 +266,35 @@ class TestConcatOverride(unittest.TestCase):
245 266
246 def test_prepend(self): 267 def test_prepend(self):
247 self.d.setVar("TEST", "${VAL}") 268 self.d.setVar("TEST", "${VAL}")
248 self.d.setVar("TEST_prepend", "${FOO}:") 269 self.d.setVar("TEST:prepend", "${FOO}:")
249 self.assertEqual(self.d.getVar("TEST"), "foo:val") 270 self.assertEqual(self.d.getVar("TEST"), "foo:val")
250 271
251 def test_append(self): 272 def test_append(self):
252 self.d.setVar("TEST", "${VAL}") 273 self.d.setVar("TEST", "${VAL}")
253 self.d.setVar("TEST_append", ":${BAR}") 274 self.d.setVar("TEST:append", ":${BAR}")
254 self.assertEqual(self.d.getVar("TEST"), "val:bar") 275 self.assertEqual(self.d.getVar("TEST"), "val:bar")
255 276
256 def test_multiple_append(self): 277 def test_multiple_append(self):
257 self.d.setVar("TEST", "${VAL}") 278 self.d.setVar("TEST", "${VAL}")
258 self.d.setVar("TEST_prepend", "${FOO}:") 279 self.d.setVar("TEST:prepend", "${FOO}:")
259 self.d.setVar("TEST_append", ":val2") 280 self.d.setVar("TEST:append", ":val2")
260 self.d.setVar("TEST_append", ":${BAR}") 281 self.d.setVar("TEST:append", ":${BAR}")
261 self.assertEqual(self.d.getVar("TEST"), "foo:val:val2:bar") 282 self.assertEqual(self.d.getVar("TEST"), "foo:val:val2:bar")
262 283
263 def test_append_unset(self): 284 def test_append_unset(self):
264 self.d.setVar("TEST_prepend", "${FOO}:") 285 self.d.setVar("TEST:prepend", "${FOO}:")
265 self.d.setVar("TEST_append", ":val2") 286 self.d.setVar("TEST:append", ":val2")
266 self.d.setVar("TEST_append", ":${BAR}") 287 self.d.setVar("TEST:append", ":${BAR}")
267 self.assertEqual(self.d.getVar("TEST"), "foo::val2:bar") 288 self.assertEqual(self.d.getVar("TEST"), "foo::val2:bar")
268 289
269 def test_remove(self): 290 def test_remove(self):
270 self.d.setVar("TEST", "${VAL} ${BAR}") 291 self.d.setVar("TEST", "${VAL} ${BAR}")
271 self.d.setVar("TEST_remove", "val") 292 self.d.setVar("TEST:remove", "val")
272 self.assertEqual(self.d.getVar("TEST"), " bar") 293 self.assertEqual(self.d.getVar("TEST"), " bar")
273 294
274 def test_remove_cleared(self): 295 def test_remove_cleared(self):
275 self.d.setVar("TEST", "${VAL} ${BAR}") 296 self.d.setVar("TEST", "${VAL} ${BAR}")
276 self.d.setVar("TEST_remove", "val") 297 self.d.setVar("TEST:remove", "val")
277 self.d.setVar("TEST", "${VAL} ${BAR}") 298 self.d.setVar("TEST", "${VAL} ${BAR}")
278 self.assertEqual(self.d.getVar("TEST"), "val bar") 299 self.assertEqual(self.d.getVar("TEST"), "val bar")
279 300
@@ -281,42 +302,42 @@ class TestConcatOverride(unittest.TestCase):
281 # (including that whitespace is preserved) 302 # (including that whitespace is preserved)
282 def test_remove_inactive_override(self): 303 def test_remove_inactive_override(self):
283 self.d.setVar("TEST", "${VAL} ${BAR} 123") 304 self.d.setVar("TEST", "${VAL} ${BAR} 123")
284 self.d.setVar("TEST_remove_inactiveoverride", "val") 305 self.d.setVar("TEST:remove:inactiveoverride", "val")
285 self.assertEqual(self.d.getVar("TEST"), "val bar 123") 306 self.assertEqual(self.d.getVar("TEST"), "val bar 123")
286 307
287 def test_doubleref_remove(self): 308 def test_doubleref_remove(self):
288 self.d.setVar("TEST", "${VAL} ${BAR}") 309 self.d.setVar("TEST", "${VAL} ${BAR}")
289 self.d.setVar("TEST_remove", "val") 310 self.d.setVar("TEST:remove", "val")
290 self.d.setVar("TEST_TEST", "${TEST} ${TEST}") 311 self.d.setVar("TEST_TEST", "${TEST} ${TEST}")
291 self.assertEqual(self.d.getVar("TEST_TEST"), " bar bar") 312 self.assertEqual(self.d.getVar("TEST_TEST"), " bar bar")
292 313
293 def test_empty_remove(self): 314 def test_empty_remove(self):
294 self.d.setVar("TEST", "") 315 self.d.setVar("TEST", "")
295 self.d.setVar("TEST_remove", "val") 316 self.d.setVar("TEST:remove", "val")
296 self.assertEqual(self.d.getVar("TEST"), "") 317 self.assertEqual(self.d.getVar("TEST"), "")
297 318
298 def test_remove_expansion(self): 319 def test_remove_expansion(self):
299 self.d.setVar("BAR", "Z") 320 self.d.setVar("BAR", "Z")
300 self.d.setVar("TEST", "${BAR}/X Y") 321 self.d.setVar("TEST", "${BAR}/X Y")
301 self.d.setVar("TEST_remove", "${BAR}/X") 322 self.d.setVar("TEST:remove", "${BAR}/X")
302 self.assertEqual(self.d.getVar("TEST"), " Y") 323 self.assertEqual(self.d.getVar("TEST"), " Y")
303 324
304 def test_remove_expansion_items(self): 325 def test_remove_expansion_items(self):
305 self.d.setVar("TEST", "A B C D") 326 self.d.setVar("TEST", "A B C D")
306 self.d.setVar("BAR", "B D") 327 self.d.setVar("BAR", "B D")
307 self.d.setVar("TEST_remove", "${BAR}") 328 self.d.setVar("TEST:remove", "${BAR}")
308 self.assertEqual(self.d.getVar("TEST"), "A C ") 329 self.assertEqual(self.d.getVar("TEST"), "A C ")
309 330
310 def test_remove_preserve_whitespace(self): 331 def test_remove_preserve_whitespace(self):
311 # When the removal isn't active, the original value should be preserved 332 # When the removal isn't active, the original value should be preserved
312 self.d.setVar("TEST", " A B") 333 self.d.setVar("TEST", " A B")
313 self.d.setVar("TEST_remove", "C") 334 self.d.setVar("TEST:remove", "C")
314 self.assertEqual(self.d.getVar("TEST"), " A B") 335 self.assertEqual(self.d.getVar("TEST"), " A B")
315 336
316 def test_remove_preserve_whitespace2(self): 337 def test_remove_preserve_whitespace2(self):
317 # When the removal is active preserve the whitespace 338 # When the removal is active preserve the whitespace
318 self.d.setVar("TEST", " A B") 339 self.d.setVar("TEST", " A B")
319 self.d.setVar("TEST_remove", "B") 340 self.d.setVar("TEST:remove", "B")
320 self.assertEqual(self.d.getVar("TEST"), " A ") 341 self.assertEqual(self.d.getVar("TEST"), " A ")
321 342
322class TestOverrides(unittest.TestCase): 343class TestOverrides(unittest.TestCase):
@@ -329,81 +350,86 @@ class TestOverrides(unittest.TestCase):
329 self.assertEqual(self.d.getVar("TEST"), "testvalue") 350 self.assertEqual(self.d.getVar("TEST"), "testvalue")
330 351
331 def test_one_override(self): 352 def test_one_override(self):
332 self.d.setVar("TEST_bar", "testvalue2") 353 self.d.setVar("TEST:bar", "testvalue2")
333 self.assertEqual(self.d.getVar("TEST"), "testvalue2") 354 self.assertEqual(self.d.getVar("TEST"), "testvalue2")
334 355
335 def test_one_override_unset(self): 356 def test_one_override_unset(self):
336 self.d.setVar("TEST2_bar", "testvalue2") 357 self.d.setVar("TEST2:bar", "testvalue2")
337 358
338 self.assertEqual(self.d.getVar("TEST2"), "testvalue2") 359 self.assertEqual(self.d.getVar("TEST2"), "testvalue2")
339 self.assertCountEqual(list(self.d.keys()), ['TEST', 'TEST2', 'OVERRIDES', 'TEST2_bar']) 360 self.assertCountEqual(list(self.d.keys()), ['TEST', 'TEST2', 'OVERRIDES', 'TEST2:bar'])
340 361
341 def test_multiple_override(self): 362 def test_multiple_override(self):
342 self.d.setVar("TEST_bar", "testvalue2") 363 self.d.setVar("TEST:bar", "testvalue2")
343 self.d.setVar("TEST_local", "testvalue3") 364 self.d.setVar("TEST:local", "testvalue3")
344 self.d.setVar("TEST_foo", "testvalue4") 365 self.d.setVar("TEST:foo", "testvalue4")
345 self.assertEqual(self.d.getVar("TEST"), "testvalue3") 366 self.assertEqual(self.d.getVar("TEST"), "testvalue3")
346 self.assertCountEqual(list(self.d.keys()), ['TEST', 'TEST_foo', 'OVERRIDES', 'TEST_bar', 'TEST_local']) 367 self.assertCountEqual(list(self.d.keys()), ['TEST', 'TEST:foo', 'OVERRIDES', 'TEST:bar', 'TEST:local'])
347 368
348 def test_multiple_combined_overrides(self): 369 def test_multiple_combined_overrides(self):
349 self.d.setVar("TEST_local_foo_bar", "testvalue3") 370 self.d.setVar("TEST:local:foo:bar", "testvalue3")
350 self.assertEqual(self.d.getVar("TEST"), "testvalue3") 371 self.assertEqual(self.d.getVar("TEST"), "testvalue3")
351 372
352 def test_multiple_overrides_unset(self): 373 def test_multiple_overrides_unset(self):
353 self.d.setVar("TEST2_local_foo_bar", "testvalue3") 374 self.d.setVar("TEST2:local:foo:bar", "testvalue3")
354 self.assertEqual(self.d.getVar("TEST2"), "testvalue3") 375 self.assertEqual(self.d.getVar("TEST2"), "testvalue3")
355 376
356 def test_keyexpansion_override(self): 377 def test_keyexpansion_override(self):
357 self.d.setVar("LOCAL", "local") 378 self.d.setVar("LOCAL", "local")
358 self.d.setVar("TEST_bar", "testvalue2") 379 self.d.setVar("TEST:bar", "testvalue2")
359 self.d.setVar("TEST_${LOCAL}", "testvalue3") 380 self.d.setVar("TEST:${LOCAL}", "testvalue3")
360 self.d.setVar("TEST_foo", "testvalue4") 381 self.d.setVar("TEST:foo", "testvalue4")
361 bb.data.expandKeys(self.d) 382 bb.data.expandKeys(self.d)
362 self.assertEqual(self.d.getVar("TEST"), "testvalue3") 383 self.assertEqual(self.d.getVar("TEST"), "testvalue3")
363 384
364 def test_rename_override(self): 385 def test_rename_override(self):
365 self.d.setVar("ALTERNATIVE_ncurses-tools_class-target", "a") 386 self.d.setVar("ALTERNATIVE:ncurses-tools:class-target", "a")
366 self.d.setVar("OVERRIDES", "class-target") 387 self.d.setVar("OVERRIDES", "class-target")
367 self.d.renameVar("ALTERNATIVE_ncurses-tools", "ALTERNATIVE_lib32-ncurses-tools") 388 self.d.renameVar("ALTERNATIVE:ncurses-tools", "ALTERNATIVE:lib32-ncurses-tools")
368 self.assertEqual(self.d.getVar("ALTERNATIVE_lib32-ncurses-tools"), "a") 389 self.assertEqual(self.d.getVar("ALTERNATIVE:lib32-ncurses-tools"), "a")
369 390
370 def test_underscore_override(self): 391 def test_underscore_override(self):
371 self.d.setVar("TEST_bar", "testvalue2") 392 self.d.setVar("TEST:bar", "testvalue2")
372 self.d.setVar("TEST_some_val", "testvalue3") 393 self.d.setVar("TEST:some_val", "testvalue3")
373 self.d.setVar("TEST_foo", "testvalue4") 394 self.d.setVar("TEST:foo", "testvalue4")
374 self.d.setVar("OVERRIDES", "foo:bar:some_val") 395 self.d.setVar("OVERRIDES", "foo:bar:some_val")
375 self.assertEqual(self.d.getVar("TEST"), "testvalue3") 396 self.assertEqual(self.d.getVar("TEST"), "testvalue3")
376 397
398 # Test an override with _<numeric> in it based on a real world OE issue
399 def test_underscore_override_2(self):
400 self.d.setVar("TARGET_ARCH", "x86_64")
401 self.d.setVar("PN", "test-${TARGET_ARCH}")
402 self.d.setVar("VERSION", "1")
403 self.d.setVar("VERSION:pn-test-${TARGET_ARCH}", "2")
404 self.d.setVar("OVERRIDES", "pn-${PN}")
405 bb.data.expandKeys(self.d)
406 self.assertEqual(self.d.getVar("VERSION"), "2")
407
377 def test_remove_with_override(self): 408 def test_remove_with_override(self):
378 self.d.setVar("TEST_bar", "testvalue2") 409 self.d.setVar("TEST:bar", "testvalue2")
379 self.d.setVar("TEST_some_val", "testvalue3 testvalue5") 410 self.d.setVar("TEST:some_val", "testvalue3 testvalue5")
380 self.d.setVar("TEST_some_val_remove", "testvalue3") 411 self.d.setVar("TEST:some_val:remove", "testvalue3")
381 self.d.setVar("TEST_foo", "testvalue4") 412 self.d.setVar("TEST:foo", "testvalue4")
382 self.d.setVar("OVERRIDES", "foo:bar:some_val") 413 self.d.setVar("OVERRIDES", "foo:bar:some_val")
383 self.assertEqual(self.d.getVar("TEST"), " testvalue5") 414 self.assertEqual(self.d.getVar("TEST"), " testvalue5")
384 415
385 def test_append_and_override_1(self): 416 def test_append_and_override_1(self):
386 self.d.setVar("TEST_append", "testvalue2") 417 self.d.setVar("TEST:append", "testvalue2")
387 self.d.setVar("TEST_bar", "testvalue3") 418 self.d.setVar("TEST:bar", "testvalue3")
388 self.assertEqual(self.d.getVar("TEST"), "testvalue3testvalue2") 419 self.assertEqual(self.d.getVar("TEST"), "testvalue3testvalue2")
389 420
390 def test_append_and_override_2(self): 421 def test_append_and_override_2(self):
391 self.d.setVar("TEST_append_bar", "testvalue2") 422 self.d.setVar("TEST:append:bar", "testvalue2")
392 self.assertEqual(self.d.getVar("TEST"), "testvaluetestvalue2") 423 self.assertEqual(self.d.getVar("TEST"), "testvaluetestvalue2")
393 424
394 def test_append_and_override_3(self): 425 def test_append_and_override_3(self):
395 self.d.setVar("TEST_bar_append", "testvalue2") 426 self.d.setVar("TEST:bar:append", "testvalue2")
396 self.assertEqual(self.d.getVar("TEST"), "testvalue2") 427 self.assertEqual(self.d.getVar("TEST"), "testvalue2")
397 428
398 # Test an override with _<numeric> in it based on a real world OE issue 429 def test_append_and_unused_override(self):
399 def test_underscore_override(self): 430 # Had a bug where an unused override append could return "" instead of None
400 self.d.setVar("TARGET_ARCH", "x86_64") 431 self.d.setVar("BAR:append:unusedoverride", "testvalue2")
401 self.d.setVar("PN", "test-${TARGET_ARCH}") 432 self.assertEqual(self.d.getVar("BAR"), None)
402 self.d.setVar("VERSION", "1")
403 self.d.setVar("VERSION_pn-test-${TARGET_ARCH}", "2")
404 self.d.setVar("OVERRIDES", "pn-${PN}")
405 bb.data.expandKeys(self.d)
406 self.assertEqual(self.d.getVar("VERSION"), "2")
407 433
408class TestKeyExpansion(unittest.TestCase): 434class TestKeyExpansion(unittest.TestCase):
409 def setUp(self): 435 def setUp(self):
@@ -424,17 +450,64 @@ class TestFlags(unittest.TestCase):
424 self.d = bb.data.init() 450 self.d = bb.data.init()
425 self.d.setVar("foo", "value of foo") 451 self.d.setVar("foo", "value of foo")
426 self.d.setVarFlag("foo", "flag1", "value of flag1") 452 self.d.setVarFlag("foo", "flag1", "value of flag1")
453 self.d.setVarFlag("foo", "_defaultval_flag_flag1", "default of flag1")
427 self.d.setVarFlag("foo", "flag2", "value of flag2") 454 self.d.setVarFlag("foo", "flag2", "value of flag2")
455 self.d.setVarFlag("foo", "_defaultval_flag_flag2", "default of flag2")
456 self.d.setVarFlag("foo", "flag3", "value of flag3")
457 self.d.setVarFlag("foo", "_defaultval_flag_flagnovalue", "default of flagnovalue")
428 458
429 def test_setflag(self): 459 def test_setflag(self):
430 self.assertEqual(self.d.getVarFlag("foo", "flag1", False), "value of flag1") 460 self.assertEqual(self.d.getVarFlag("foo", "flag1", False), "value of flag1")
431 self.assertEqual(self.d.getVarFlag("foo", "flag2", False), "value of flag2") 461 self.assertEqual(self.d.getVarFlag("foo", "flag2", False), "value of flag2")
462 self.assertDictEqual(
463 self.d.getVarFlags("foo"),
464 {
465 "flag1": "value of flag1",
466 "flag2": "value of flag2",
467 "flag3": "value of flag3",
468 "flagnovalue": "default of flagnovalue",
469 }
470 )
471 self.assertDictEqual(
472 self.d.getVarFlags("foo", internalflags=True),
473 {
474 "_content": "value of foo",
475 "flag1": "value of flag1",
476 "flag2": "value of flag2",
477 "flag3": "value of flag3",
478 "_defaultval_flag_flag1": "default of flag1",
479 "_defaultval_flag_flag2": "default of flag2",
480 "_defaultval_flag_flagnovalue": "default of flagnovalue",
481 }
482 )
432 483
433 def test_delflag(self): 484 def test_delflag(self):
434 self.d.delVarFlag("foo", "flag2") 485 self.d.delVarFlag("foo", "flag2")
486 self.d.delVarFlag("foo", "flag3")
435 self.assertEqual(self.d.getVarFlag("foo", "flag1", False), "value of flag1") 487 self.assertEqual(self.d.getVarFlag("foo", "flag1", False), "value of flag1")
436 self.assertEqual(self.d.getVarFlag("foo", "flag2", False), None) 488 self.assertEqual(self.d.getVarFlag("foo", "flag2", False), None)
437 489 self.assertDictEqual(
490 self.d.getVarFlags("foo"),
491 {
492 "flag1": "value of flag1",
493 "flagnovalue": "default of flagnovalue",
494 }
495 )
496 self.assertDictEqual(
497 self.d.getVarFlags("foo", internalflags=True),
498 {
499 "_content": "value of foo",
500 "flag1": "value of flag1",
501 "_defaultval_flag_flag1": "default of flag1",
502 "_defaultval_flag_flagnovalue": "default of flagnovalue",
503 }
504 )
505
506 def test_delvar(self):
507 self.d.delVar("foo")
508 self.assertEqual(self.d.getVarFlag("foo", "flag1", False), None)
509 self.assertEqual(self.d.getVarFlag("foo", "flag2", False), None)
510 self.assertEqual(self.d.getVarFlags("foo", internalflags=True), None)
438 511
439class Contains(unittest.TestCase): 512class Contains(unittest.TestCase):
440 def setUp(self): 513 def setUp(self):
@@ -498,7 +571,7 @@ class TaskHash(unittest.TestCase):
498 d.setVar("VAR", "val") 571 d.setVar("VAR", "val")
499 # Adding an inactive removal shouldn't change the hash 572 # Adding an inactive removal shouldn't change the hash
500 d.setVar("BAR", "notbar") 573 d.setVar("BAR", "notbar")
501 d.setVar("MYCOMMAND_remove", "${BAR}") 574 d.setVar("MYCOMMAND:remove", "${BAR}")
502 nexthash = gettask_bashhash("mytask", d) 575 nexthash = gettask_bashhash("mytask", d)
503 self.assertEqual(orighash, nexthash) 576 self.assertEqual(orighash, nexthash)
504 577
diff --git a/bitbake/lib/bb/tests/event.py b/bitbake/lib/bb/tests/event.py
index 9ca7e9bc8e..ef61891d30 100644
--- a/bitbake/lib/bb/tests/event.py
+++ b/bitbake/lib/bb/tests/event.py
@@ -13,6 +13,7 @@ import pickle
13import threading 13import threading
14import time 14import time
15import unittest 15import unittest
16import tempfile
16from unittest.mock import Mock 17from unittest.mock import Mock
17from unittest.mock import call 18from unittest.mock import call
18 19
@@ -157,7 +158,7 @@ class EventHandlingTest(unittest.TestCase):
157 self._test_process.event_handler, 158 self._test_process.event_handler,
158 event, 159 event,
159 None) 160 None)
160 self._test_process.event_handler.assert_called_once_with(event) 161 self._test_process.event_handler.assert_called_once_with(event, None)
161 162
162 def test_fire_class_handlers(self): 163 def test_fire_class_handlers(self):
163 """ Test fire_class_handlers method """ 164 """ Test fire_class_handlers method """
@@ -175,10 +176,10 @@ class EventHandlingTest(unittest.TestCase):
175 bb.event.fire_class_handlers(event1, None) 176 bb.event.fire_class_handlers(event1, None)
176 bb.event.fire_class_handlers(event2, None) 177 bb.event.fire_class_handlers(event2, None)
177 bb.event.fire_class_handlers(event2, None) 178 bb.event.fire_class_handlers(event2, None)
178 expected_event_handler1 = [call(event1)] 179 expected_event_handler1 = [call(event1, None)]
179 expected_event_handler2 = [call(event1), 180 expected_event_handler2 = [call(event1, None),
180 call(event2), 181 call(event2, None),
181 call(event2)] 182 call(event2, None)]
182 self.assertEqual(self._test_process.event_handler1.call_args_list, 183 self.assertEqual(self._test_process.event_handler1.call_args_list,
183 expected_event_handler1) 184 expected_event_handler1)
184 self.assertEqual(self._test_process.event_handler2.call_args_list, 185 self.assertEqual(self._test_process.event_handler2.call_args_list,
@@ -205,7 +206,7 @@ class EventHandlingTest(unittest.TestCase):
205 bb.event.fire_class_handlers(event2, None) 206 bb.event.fire_class_handlers(event2, None)
206 bb.event.fire_class_handlers(event2, None) 207 bb.event.fire_class_handlers(event2, None)
207 expected_event_handler1 = [] 208 expected_event_handler1 = []
208 expected_event_handler2 = [call(event1)] 209 expected_event_handler2 = [call(event1, None)]
209 self.assertEqual(self._test_process.event_handler1.call_args_list, 210 self.assertEqual(self._test_process.event_handler1.call_args_list,
210 expected_event_handler1) 211 expected_event_handler1)
211 self.assertEqual(self._test_process.event_handler2.call_args_list, 212 self.assertEqual(self._test_process.event_handler2.call_args_list,
@@ -223,7 +224,7 @@ class EventHandlingTest(unittest.TestCase):
223 self.assertEqual(result, bb.event.Registered) 224 self.assertEqual(result, bb.event.Registered)
224 bb.event.fire_class_handlers(event1, None) 225 bb.event.fire_class_handlers(event1, None)
225 bb.event.fire_class_handlers(event2, None) 226 bb.event.fire_class_handlers(event2, None)
226 expected = [call(event1), call(event2)] 227 expected = [call(event1, None), call(event2, None)]
227 self.assertEqual(self._test_process.event_handler1.call_args_list, 228 self.assertEqual(self._test_process.event_handler1.call_args_list,
228 expected) 229 expected)
229 230
@@ -237,7 +238,7 @@ class EventHandlingTest(unittest.TestCase):
237 self.assertEqual(result, bb.event.Registered) 238 self.assertEqual(result, bb.event.Registered)
238 bb.event.fire_class_handlers(event1, None) 239 bb.event.fire_class_handlers(event1, None)
239 bb.event.fire_class_handlers(event2, None) 240 bb.event.fire_class_handlers(event2, None)
240 expected = [call(event1), call(event2), call(event1)] 241 expected = [call(event1, None), call(event2, None), call(event1, None)]
241 self.assertEqual(self._test_process.event_handler1.call_args_list, 242 self.assertEqual(self._test_process.event_handler1.call_args_list,
242 expected) 243 expected)
243 244
@@ -251,7 +252,7 @@ class EventHandlingTest(unittest.TestCase):
251 self.assertEqual(result, bb.event.Registered) 252 self.assertEqual(result, bb.event.Registered)
252 bb.event.fire_class_handlers(event1, None) 253 bb.event.fire_class_handlers(event1, None)
253 bb.event.fire_class_handlers(event2, None) 254 bb.event.fire_class_handlers(event2, None)
254 expected = [call(event1), call(event2), call(event1), call(event2)] 255 expected = [call(event1,None), call(event2, None), call(event1, None), call(event2, None)]
255 self.assertEqual(self._test_process.event_handler1.call_args_list, 256 self.assertEqual(self._test_process.event_handler1.call_args_list,
256 expected) 257 expected)
257 258
@@ -359,9 +360,10 @@ class EventHandlingTest(unittest.TestCase):
359 360
360 event1 = bb.event.ConfigParsed() 361 event1 = bb.event.ConfigParsed()
361 bb.event.fire(event1, None) 362 bb.event.fire(event1, None)
362 expected = [call(event1)] 363 expected = [call(event1, None)]
363 self.assertEqual(self._test_process.event_handler1.call_args_list, 364 self.assertEqual(self._test_process.event_handler1.call_args_list,
364 expected) 365 expected)
366 expected = [call(event1)]
365 self.assertEqual(self._test_ui1.event.send.call_args_list, 367 self.assertEqual(self._test_ui1.event.send.call_args_list,
366 expected) 368 expected)
367 369
@@ -450,10 +452,9 @@ class EventHandlingTest(unittest.TestCase):
450 and disable threadlocks tests """ 452 and disable threadlocks tests """
451 bb.event.fire(bb.event.OperationStarted(), None) 453 bb.event.fire(bb.event.OperationStarted(), None)
452 454
453 def test_enable_threadlock(self): 455 def test_event_threadlock(self):
454 """ Test enable_threadlock method """ 456 """ Test enable_threadlock method """
455 self._set_threadlock_test_mockups() 457 self._set_threadlock_test_mockups()
456 bb.event.enable_threadlock()
457 self._set_and_run_threadlock_test_workers() 458 self._set_and_run_threadlock_test_workers()
458 # Calls to UI handlers should be in order as all the registered 459 # Calls to UI handlers should be in order as all the registered
459 # handlers for the event coming from the first worker should be 460 # handlers for the event coming from the first worker should be
@@ -461,20 +462,6 @@ class EventHandlingTest(unittest.TestCase):
461 self.assertEqual(self._threadlock_test_calls, 462 self.assertEqual(self._threadlock_test_calls,
462 ["w1_ui1", "w1_ui2", "w2_ui1", "w2_ui2"]) 463 ["w1_ui1", "w1_ui2", "w2_ui1", "w2_ui2"])
463 464
464
465 def test_disable_threadlock(self):
466 """ Test disable_threadlock method """
467 self._set_threadlock_test_mockups()
468 bb.event.disable_threadlock()
469 self._set_and_run_threadlock_test_workers()
470 # Calls to UI handlers should be intertwined together. Thanks to the
471 # delay in the registered handlers for the event coming from the first
472 # worker, the event coming from the second worker starts being
473 # processed before finishing handling the first worker event.
474 self.assertEqual(self._threadlock_test_calls,
475 ["w1_ui1", "w2_ui1", "w1_ui2", "w2_ui2"])
476
477
478class EventClassesTest(unittest.TestCase): 465class EventClassesTest(unittest.TestCase):
479 """ Event classes test class """ 466 """ Event classes test class """
480 467
@@ -482,6 +469,8 @@ class EventClassesTest(unittest.TestCase):
482 469
483 def setUp(self): 470 def setUp(self):
484 bb.event.worker_pid = EventClassesTest._worker_pid 471 bb.event.worker_pid = EventClassesTest._worker_pid
472 self.d = bb.data.init()
473 bb.parse.siggen = bb.siggen.init(self.d)
485 474
486 def test_Event(self): 475 def test_Event(self):
487 """ Test the Event base class """ 476 """ Test the Event base class """
@@ -964,3 +953,24 @@ class EventClassesTest(unittest.TestCase):
964 event = bb.event.FindSigInfoResult(result) 953 event = bb.event.FindSigInfoResult(result)
965 self.assertEqual(event.result, result) 954 self.assertEqual(event.result, result)
966 self.assertEqual(event.pid, EventClassesTest._worker_pid) 955 self.assertEqual(event.pid, EventClassesTest._worker_pid)
956
957 def test_lineno_in_eventhandler(self):
958 # The error lineno is 5, not 4 since the first line is '\n'
959 error_line = """
960# Comment line1
961# Comment line2
962python test_lineno_in_eventhandler() {
963 This is an error line
964}
965addhandler test_lineno_in_eventhandler
966test_lineno_in_eventhandler[eventmask] = "bb.event.ConfigParsed"
967"""
968
969 with self.assertLogs() as logs:
970 f = tempfile.NamedTemporaryFile(suffix = '.bb')
971 f.write(bytes(error_line, "utf-8"))
972 f.flush()
973 d = bb.parse.handle(f.name, self.d)['']
974
975 output = "".join(logs.output)
976 self.assertTrue(" line 5\n" in output)
diff --git a/bitbake/lib/bb/tests/fetch-testdata/debian/pool/main/m/minicom/index.html b/bitbake/lib/bb/tests/fetch-testdata/debian/pool/main/m/minicom/index.html
new file mode 100644
index 0000000000..4a1eb4de13
--- /dev/null
+++ b/bitbake/lib/bb/tests/fetch-testdata/debian/pool/main/m/minicom/index.html
@@ -0,0 +1,59 @@
1<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">
2<html>
3 <head>
4 <title>Index of /debian/pool/main/m/minicom</title>
5 </head>
6 <body>
7<h1>Index of /debian/pool/main/m/minicom</h1>
8 <table>
9 <tr><th valign="top"><img src="/icons/blank.gif" alt="[ICO]"></th><th><a href="?C=N;O=D">Name</a></th><th><a href="?C=M;O=A">Last modified</a></th><th><a href="?C=S;O=A">Size</a></th></tr>
10 <tr><th colspan="4"><hr></th></tr>
11<tr><td valign="top"><img src="/icons/back.gif" alt="[PARENTDIR]"></td><td><a href="/debian/pool/main/m/">Parent Directory</a></td><td>&nbsp;</td><td align="right"> - </td></tr>
12<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1+deb8u1.debian.tar.xz">minicom_2.7-1+deb8u1.debian.tar.xz</a></td><td align="right">2017-04-24 08:22 </td><td align="right"> 14K</td></tr>
13<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1+deb8u1.dsc">minicom_2.7-1+deb8u1.dsc</a></td><td align="right">2017-04-24 08:22 </td><td align="right">1.9K</td></tr>
14<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1+deb8u1_amd64.deb">minicom_2.7-1+deb8u1_amd64.deb</a></td><td align="right">2017-04-25 21:10 </td><td align="right">257K</td></tr>
15<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1+deb8u1_armel.deb">minicom_2.7-1+deb8u1_armel.deb</a></td><td align="right">2017-04-26 00:58 </td><td align="right">246K</td></tr>
16<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1+deb8u1_armhf.deb">minicom_2.7-1+deb8u1_armhf.deb</a></td><td align="right">2017-04-26 00:58 </td><td align="right">245K</td></tr>
17<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1+deb8u1_i386.deb">minicom_2.7-1+deb8u1_i386.deb</a></td><td align="right">2017-04-25 21:41 </td><td align="right">258K</td></tr>
18<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1.1.debian.tar.xz">minicom_2.7-1.1.debian.tar.xz</a></td><td align="right">2017-04-22 09:34 </td><td align="right"> 14K</td></tr>
19<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1.1.dsc">minicom_2.7-1.1.dsc</a></td><td align="right">2017-04-22 09:34 </td><td align="right">1.9K</td></tr>
20<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1.1_amd64.deb">minicom_2.7-1.1_amd64.deb</a></td><td align="right">2017-04-22 15:29 </td><td align="right">261K</td></tr>
21<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1.1_arm64.deb">minicom_2.7-1.1_arm64.deb</a></td><td align="right">2017-04-22 15:29 </td><td align="right">250K</td></tr>
22<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1.1_armel.deb">minicom_2.7-1.1_armel.deb</a></td><td align="right">2017-04-22 15:29 </td><td align="right">255K</td></tr>
23<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1.1_armhf.deb">minicom_2.7-1.1_armhf.deb</a></td><td align="right">2017-04-22 15:29 </td><td align="right">254K</td></tr>
24<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1.1_i386.deb">minicom_2.7-1.1_i386.deb</a></td><td align="right">2017-04-22 15:29 </td><td align="right">266K</td></tr>
25<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1.1_mips.deb">minicom_2.7-1.1_mips.deb</a></td><td align="right">2017-04-22 15:29 </td><td align="right">258K</td></tr>
26<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1.1_mips64el.deb">minicom_2.7-1.1_mips64el.deb</a></td><td align="right">2017-04-22 15:29 </td><td align="right">259K</td></tr>
27<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1.1_mipsel.deb">minicom_2.7-1.1_mipsel.deb</a></td><td align="right">2017-04-22 15:29 </td><td align="right">259K</td></tr>
28<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1.1_ppc64el.deb">minicom_2.7-1.1_ppc64el.deb</a></td><td align="right">2017-04-22 15:29 </td><td align="right">253K</td></tr>
29<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7-1.1_s390x.deb">minicom_2.7-1.1_s390x.deb</a></td><td align="right">2017-04-22 15:29 </td><td align="right">261K</td></tr>
30<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7.1-1+b1_amd64.deb">minicom_2.7.1-1+b1_amd64.deb</a></td><td align="right">2018-05-06 08:14 </td><td align="right">262K</td></tr>
31<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7.1-1+b1_arm64.deb">minicom_2.7.1-1+b1_arm64.deb</a></td><td align="right">2018-05-06 07:58 </td><td align="right">250K</td></tr>
32<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7.1-1+b1_armel.deb">minicom_2.7.1-1+b1_armel.deb</a></td><td align="right">2018-05-06 08:45 </td><td align="right">253K</td></tr>
33<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7.1-1+b1_armhf.deb">minicom_2.7.1-1+b1_armhf.deb</a></td><td align="right">2018-05-06 10:42 </td><td align="right">253K</td></tr>
34<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7.1-1+b1_i386.deb">minicom_2.7.1-1+b1_i386.deb</a></td><td align="right">2018-05-06 08:55 </td><td align="right">266K</td></tr>
35<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7.1-1+b1_mips.deb">minicom_2.7.1-1+b1_mips.deb</a></td><td align="right">2018-05-06 08:14 </td><td align="right">258K</td></tr>
36<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7.1-1+b1_mipsel.deb">minicom_2.7.1-1+b1_mipsel.deb</a></td><td align="right">2018-05-06 12:13 </td><td align="right">259K</td></tr>
37<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7.1-1+b1_ppc64el.deb">minicom_2.7.1-1+b1_ppc64el.deb</a></td><td align="right">2018-05-06 09:10 </td><td align="right">260K</td></tr>
38<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7.1-1+b1_s390x.deb">minicom_2.7.1-1+b1_s390x.deb</a></td><td align="right">2018-05-06 08:14 </td><td align="right">257K</td></tr>
39<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7.1-1+b2_mips64el.deb">minicom_2.7.1-1+b2_mips64el.deb</a></td><td align="right">2018-05-06 09:41 </td><td align="right">260K</td></tr>
40<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7.1-1.debian.tar.xz">minicom_2.7.1-1.debian.tar.xz</a></td><td align="right">2017-08-13 15:40 </td><td align="right"> 14K</td></tr>
41<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.7.1-1.dsc">minicom_2.7.1-1.dsc</a></td><td align="right">2017-08-13 15:40 </td><td align="right">1.8K</td></tr>
42<tr><td valign="top"><img src="/icons/compressed.gif" alt="[ ]"></td><td><a href="minicom_2.7.1.orig.tar.gz">minicom_2.7.1.orig.tar.gz</a></td><td align="right">2017-08-13 15:40 </td><td align="right">855K</td></tr>
43<tr><td valign="top"><img src="/icons/compressed.gif" alt="[ ]"></td><td><a href="minicom_2.7.orig.tar.gz">minicom_2.7.orig.tar.gz</a></td><td align="right">2014-01-01 09:36 </td><td align="right">843K</td></tr>
44<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.8-2.debian.tar.xz">minicom_2.8-2.debian.tar.xz</a></td><td align="right">2021-06-15 03:47 </td><td align="right"> 14K</td></tr>
45<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.8-2.dsc">minicom_2.8-2.dsc</a></td><td align="right">2021-06-15 03:47 </td><td align="right">1.8K</td></tr>
46<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.8-2_amd64.deb">minicom_2.8-2_amd64.deb</a></td><td align="right">2021-06-15 03:58 </td><td align="right">280K</td></tr>
47<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.8-2_arm64.deb">minicom_2.8-2_arm64.deb</a></td><td align="right">2021-06-15 04:13 </td><td align="right">275K</td></tr>
48<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.8-2_armel.deb">minicom_2.8-2_armel.deb</a></td><td align="right">2021-06-15 04:13 </td><td align="right">271K</td></tr>
49<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.8-2_armhf.deb">minicom_2.8-2_armhf.deb</a></td><td align="right">2021-06-15 04:13 </td><td align="right">272K</td></tr>
50<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.8-2_i386.deb">minicom_2.8-2_i386.deb</a></td><td align="right">2021-06-15 04:13 </td><td align="right">285K</td></tr>
51<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.8-2_mips64el.deb">minicom_2.8-2_mips64el.deb</a></td><td align="right">2021-06-15 04:13 </td><td align="right">277K</td></tr>
52<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.8-2_mipsel.deb">minicom_2.8-2_mipsel.deb</a></td><td align="right">2021-06-15 04:13 </td><td align="right">278K</td></tr>
53<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.8-2_ppc64el.deb">minicom_2.8-2_ppc64el.deb</a></td><td align="right">2021-06-15 04:13 </td><td align="right">286K</td></tr>
54<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.8-2_s390x.deb">minicom_2.8-2_s390x.deb</a></td><td align="right">2021-06-15 03:58 </td><td align="right">275K</td></tr>
55<tr><td valign="top"><img src="/icons/unknown.gif" alt="[ ]"></td><td><a href="minicom_2.8.orig.tar.bz2">minicom_2.8.orig.tar.bz2</a></td><td align="right">2021-01-03 12:44 </td><td align="right">598K</td></tr>
56 <tr><th colspan="4"><hr></th></tr>
57</table>
58<address>Apache Server at ftp.debian.org Port 80</address>
59</body></html>
diff --git a/bitbake/lib/bb/tests/fetch-testdata/software/libxml2/2.10/index.html b/bitbake/lib/bb/tests/fetch-testdata/software/libxml2/2.10/index.html
new file mode 100644
index 0000000000..4e41af6d6a
--- /dev/null
+++ b/bitbake/lib/bb/tests/fetch-testdata/software/libxml2/2.10/index.html
@@ -0,0 +1,20 @@
1<!DOCTYPE html><html><head><meta http-equiv="content-type" content="text/html; charset=utf-8"><meta name="viewport" content="width=device-width"><style type="text/css">body,html {background:#fff;font-family:"Bitstream Vera Sans","Lucida Grande","Lucida Sans Unicode",Lucidux,Verdana,Lucida,sans-serif;}tr:nth-child(even) {background:#f4f4f4;}th,td {padding:0.1em 0.5em;}th {text-align:left;font-weight:bold;background:#eee;border-bottom:1px solid #aaa;}#list {border:1px solid #aaa;width:100%;}a {color:#a33;}a:hover {color:#e33;}</style>
2
3<title>Index of /sources/libxml2/2.10/</title>
4</head><body><h1>Index of /sources/libxml2/2.10/</h1>
5<table id="list"><thead><tr><th style="width:55%"><a href="?C=N&amp;O=A">File Name</a>&nbsp;<a href="?C=N&amp;O=D">&nbsp;&darr;&nbsp;</a></th><th style="width:20%"><a href="?C=S&amp;O=A">File Size</a>&nbsp;<a href="?C=S&amp;O=D">&nbsp;&darr;&nbsp;</a></th><th style="width:25%"><a href="?C=M&amp;O=A">Date</a>&nbsp;<a href="?C=M&amp;O=D">&nbsp;&darr;&nbsp;</a></th></tr></thead>
6<tbody><tr><td class="link"><a href="../">Parent directory/</a></td><td class="size">-</td><td class="date">-</td></tr>
7<tr><td class="link"><a href="LATEST-IS-2.10.3" title="LATEST-IS-2.10.3">LATEST-IS-2.10.3</a></td><td class="size">2.5 MiB</td><td class="date">2022-Oct-14 12:55</td></tr>
8<tr><td class="link"><a href="libxml2-2.10.0.news" title="libxml2-2.10.0.news">libxml2-2.10.0.news</a></td><td class="size">7.1 KiB</td><td class="date">2022-Aug-17 11:55</td></tr>
9<tr><td class="link"><a href="libxml2-2.10.0.sha256sum" title="libxml2-2.10.0.sha256sum">libxml2-2.10.0.sha256sum</a></td><td class="size">174 B</td><td class="date">2022-Aug-17 11:55</td></tr>
10<tr><td class="link"><a href="libxml2-2.10.0.tar.xz" title="libxml2-2.10.0.tar.xz">libxml2-2.10.0.tar.xz</a></td><td class="size">2.6 MiB</td><td class="date">2022-Aug-17 11:55</td></tr>
11<tr><td class="link"><a href="libxml2-2.10.1.news" title="libxml2-2.10.1.news">libxml2-2.10.1.news</a></td><td class="size">455 B</td><td class="date">2022-Aug-25 11:33</td></tr>
12<tr><td class="link"><a href="libxml2-2.10.1.sha256sum" title="libxml2-2.10.1.sha256sum">libxml2-2.10.1.sha256sum</a></td><td class="size">174 B</td><td class="date">2022-Aug-25 11:33</td></tr>
13<tr><td class="link"><a href="libxml2-2.10.1.tar.xz" title="libxml2-2.10.1.tar.xz">libxml2-2.10.1.tar.xz</a></td><td class="size">2.6 MiB</td><td class="date">2022-Aug-25 11:33</td></tr>
14<tr><td class="link"><a href="libxml2-2.10.2.news" title="libxml2-2.10.2.news">libxml2-2.10.2.news</a></td><td class="size">309 B</td><td class="date">2022-Aug-29 14:56</td></tr>
15<tr><td class="link"><a href="libxml2-2.10.2.sha256sum" title="libxml2-2.10.2.sha256sum">libxml2-2.10.2.sha256sum</a></td><td class="size">174 B</td><td class="date">2022-Aug-29 14:56</td></tr>
16<tr><td class="link"><a href="libxml2-2.10.2.tar.xz" title="libxml2-2.10.2.tar.xz">libxml2-2.10.2.tar.xz</a></td><td class="size">2.5 MiB</td><td class="date">2022-Aug-29 14:56</td></tr>
17<tr><td class="link"><a href="libxml2-2.10.3.news" title="libxml2-2.10.3.news">libxml2-2.10.3.news</a></td><td class="size">294 B</td><td class="date">2022-Oct-14 12:55</td></tr>
18<tr><td class="link"><a href="libxml2-2.10.3.sha256sum" title="libxml2-2.10.3.sha256sum">libxml2-2.10.3.sha256sum</a></td><td class="size">174 B</td><td class="date">2022-Oct-14 12:55</td></tr>
19<tr><td class="link"><a href="libxml2-2.10.3.tar.xz" title="libxml2-2.10.3.tar.xz">libxml2-2.10.3.tar.xz</a></td><td class="size">2.5 MiB</td><td class="date">2022-Oct-14 12:55</td></tr>
20</tbody></table></body></html>
diff --git a/bitbake/lib/bb/tests/fetch-testdata/software/libxml2/2.9/index.html b/bitbake/lib/bb/tests/fetch-testdata/software/libxml2/2.9/index.html
new file mode 100644
index 0000000000..abdfdd0fa2
--- /dev/null
+++ b/bitbake/lib/bb/tests/fetch-testdata/software/libxml2/2.9/index.html
@@ -0,0 +1,40 @@
1<!DOCTYPE html><html><head><meta http-equiv="content-type" content="text/html; charset=utf-8"><meta name="viewport" content="width=device-width"><style type="text/css">body,html {background:#fff;font-family:"Bitstream Vera Sans","Lucida Grande","Lucida Sans Unicode",Lucidux,Verdana,Lucida,sans-serif;}tr:nth-child(even) {background:#f4f4f4;}th,td {padding:0.1em 0.5em;}th {text-align:left;font-weight:bold;background:#eee;border-bottom:1px solid #aaa;}#list {border:1px solid #aaa;width:100%;}a {color:#a33;}a:hover {color:#e33;}</style>
2
3<title>Index of /sources/libxml2/2.9/</title>
4</head><body><h1>Index of /sources/libxml2/2.9/</h1>
5<table id="list"><thead><tr><th style="width:55%"><a href="?C=N&amp;O=A">File Name</a>&nbsp;<a href="?C=N&amp;O=D">&nbsp;&darr;&nbsp;</a></th><th style="width:20%"><a href="?C=S&amp;O=A">File Size</a>&nbsp;<a href="?C=S&amp;O=D">&nbsp;&darr;&nbsp;</a></th><th style="width:25%"><a href="?C=M&amp;O=A">Date</a>&nbsp;<a href="?C=M&amp;O=D">&nbsp;&darr;&nbsp;</a></th></tr></thead>
6<tbody><tr><td class="link"><a href="../">Parent directory/</a></td><td class="size">-</td><td class="date">-</td></tr>
7<tr><td class="link"><a href="LATEST-IS-2.9.14" title="LATEST-IS-2.9.14">LATEST-IS-2.9.14</a></td><td class="size">3.0 MiB</td><td class="date">2022-May-02 12:03</td></tr>
8<tr><td class="link"><a href="libxml2-2.9.0.sha256sum" title="libxml2-2.9.0.sha256sum">libxml2-2.9.0.sha256sum</a></td><td class="size">87 B</td><td class="date">2022-Feb-14 18:27</td></tr>
9<tr><td class="link"><a href="libxml2-2.9.0.tar.xz" title="libxml2-2.9.0.tar.xz">libxml2-2.9.0.tar.xz</a></td><td class="size">3.0 MiB</td><td class="date">2022-Feb-14 18:27</td></tr>
10<tr><td class="link"><a href="libxml2-2.9.1.sha256sum" title="libxml2-2.9.1.sha256sum">libxml2-2.9.1.sha256sum</a></td><td class="size">87 B</td><td class="date">2022-Feb-14 18:28</td></tr>
11<tr><td class="link"><a href="libxml2-2.9.1.tar.xz" title="libxml2-2.9.1.tar.xz">libxml2-2.9.1.tar.xz</a></td><td class="size">3.0 MiB</td><td class="date">2022-Feb-14 18:28</td></tr>
12<tr><td class="link"><a href="libxml2-2.9.10.sha256sum" title="libxml2-2.9.10.sha256sum">libxml2-2.9.10.sha256sum</a></td><td class="size">88 B</td><td class="date">2022-Feb-14 18:42</td></tr>
13<tr><td class="link"><a href="libxml2-2.9.10.tar.xz" title="libxml2-2.9.10.tar.xz">libxml2-2.9.10.tar.xz</a></td><td class="size">3.2 MiB</td><td class="date">2022-Feb-14 18:42</td></tr>
14<tr><td class="link"><a href="libxml2-2.9.11.sha256sum" title="libxml2-2.9.11.sha256sum">libxml2-2.9.11.sha256sum</a></td><td class="size">88 B</td><td class="date">2022-Feb-14 18:43</td></tr>
15<tr><td class="link"><a href="libxml2-2.9.11.tar.xz" title="libxml2-2.9.11.tar.xz">libxml2-2.9.11.tar.xz</a></td><td class="size">3.2 MiB</td><td class="date">2022-Feb-14 18:43</td></tr>
16<tr><td class="link"><a href="libxml2-2.9.12.sha256sum" title="libxml2-2.9.12.sha256sum">libxml2-2.9.12.sha256sum</a></td><td class="size">88 B</td><td class="date">2022-Feb-14 18:45</td></tr>
17<tr><td class="link"><a href="libxml2-2.9.12.tar.xz" title="libxml2-2.9.12.tar.xz">libxml2-2.9.12.tar.xz</a></td><td class="size">3.2 MiB</td><td class="date">2022-Feb-14 18:45</td></tr>
18<tr><td class="link"><a href="libxml2-2.9.13.news" title="libxml2-2.9.13.news">libxml2-2.9.13.news</a></td><td class="size">26.6 KiB</td><td class="date">2022-Feb-20 12:42</td></tr>
19<tr><td class="link"><a href="libxml2-2.9.13.sha256sum" title="libxml2-2.9.13.sha256sum">libxml2-2.9.13.sha256sum</a></td><td class="size">174 B</td><td class="date">2022-Feb-20 12:42</td></tr>
20<tr><td class="link"><a href="libxml2-2.9.13.tar.xz" title="libxml2-2.9.13.tar.xz">libxml2-2.9.13.tar.xz</a></td><td class="size">3.1 MiB</td><td class="date">2022-Feb-20 12:42</td></tr>
21<tr><td class="link"><a href="libxml2-2.9.14.news" title="libxml2-2.9.14.news">libxml2-2.9.14.news</a></td><td class="size">1.0 KiB</td><td class="date">2022-May-02 12:03</td></tr>
22<tr><td class="link"><a href="libxml2-2.9.14.sha256sum" title="libxml2-2.9.14.sha256sum">libxml2-2.9.14.sha256sum</a></td><td class="size">174 B</td><td class="date">2022-May-02 12:03</td></tr>
23<tr><td class="link"><a href="libxml2-2.9.14.tar.xz" title="libxml2-2.9.14.tar.xz">libxml2-2.9.14.tar.xz</a></td><td class="size">3.0 MiB</td><td class="date">2022-May-02 12:03</td></tr>
24<tr><td class="link"><a href="libxml2-2.9.2.sha256sum" title="libxml2-2.9.2.sha256sum">libxml2-2.9.2.sha256sum</a></td><td class="size">87 B</td><td class="date">2022-Feb-14 18:30</td></tr>
25<tr><td class="link"><a href="libxml2-2.9.2.tar.xz" title="libxml2-2.9.2.tar.xz">libxml2-2.9.2.tar.xz</a></td><td class="size">3.2 MiB</td><td class="date">2022-Feb-14 18:30</td></tr>
26<tr><td class="link"><a href="libxml2-2.9.3.sha256sum" title="libxml2-2.9.3.sha256sum">libxml2-2.9.3.sha256sum</a></td><td class="size">87 B</td><td class="date">2022-Feb-14 18:31</td></tr>
27<tr><td class="link"><a href="libxml2-2.9.3.tar.xz" title="libxml2-2.9.3.tar.xz">libxml2-2.9.3.tar.xz</a></td><td class="size">3.2 MiB</td><td class="date">2022-Feb-14 18:31</td></tr>
28<tr><td class="link"><a href="libxml2-2.9.4.sha256sum" title="libxml2-2.9.4.sha256sum">libxml2-2.9.4.sha256sum</a></td><td class="size">87 B</td><td class="date">2022-Feb-14 18:33</td></tr>
29<tr><td class="link"><a href="libxml2-2.9.4.tar.xz" title="libxml2-2.9.4.tar.xz">libxml2-2.9.4.tar.xz</a></td><td class="size">2.9 MiB</td><td class="date">2022-Feb-14 18:33</td></tr>
30<tr><td class="link"><a href="libxml2-2.9.5.sha256sum" title="libxml2-2.9.5.sha256sum">libxml2-2.9.5.sha256sum</a></td><td class="size">87 B</td><td class="date">2022-Feb-14 18:35</td></tr>
31<tr><td class="link"><a href="libxml2-2.9.5.tar.xz" title="libxml2-2.9.5.tar.xz">libxml2-2.9.5.tar.xz</a></td><td class="size">3.0 MiB</td><td class="date">2022-Feb-14 18:35</td></tr>
32<tr><td class="link"><a href="libxml2-2.9.6.sha256sum" title="libxml2-2.9.6.sha256sum">libxml2-2.9.6.sha256sum</a></td><td class="size">87 B</td><td class="date">2022-Feb-14 18:36</td></tr>
33<tr><td class="link"><a href="libxml2-2.9.6.tar.xz" title="libxml2-2.9.6.tar.xz">libxml2-2.9.6.tar.xz</a></td><td class="size">3.0 MiB</td><td class="date">2022-Feb-14 18:36</td></tr>
34<tr><td class="link"><a href="libxml2-2.9.7.sha256sum" title="libxml2-2.9.7.sha256sum">libxml2-2.9.7.sha256sum</a></td><td class="size">87 B</td><td class="date">2022-Feb-14 18:37</td></tr>
35<tr><td class="link"><a href="libxml2-2.9.7.tar.xz" title="libxml2-2.9.7.tar.xz">libxml2-2.9.7.tar.xz</a></td><td class="size">3.0 MiB</td><td class="date">2022-Feb-14 18:37</td></tr>
36<tr><td class="link"><a href="libxml2-2.9.8.sha256sum" title="libxml2-2.9.8.sha256sum">libxml2-2.9.8.sha256sum</a></td><td class="size">87 B</td><td class="date">2022-Feb-14 18:39</td></tr>
37<tr><td class="link"><a href="libxml2-2.9.8.tar.xz" title="libxml2-2.9.8.tar.xz">libxml2-2.9.8.tar.xz</a></td><td class="size">3.0 MiB</td><td class="date">2022-Feb-14 18:39</td></tr>
38<tr><td class="link"><a href="libxml2-2.9.9.sha256sum" title="libxml2-2.9.9.sha256sum">libxml2-2.9.9.sha256sum</a></td><td class="size">87 B</td><td class="date">2022-Feb-14 18:40</td></tr>
39<tr><td class="link"><a href="libxml2-2.9.9.tar.xz" title="libxml2-2.9.9.tar.xz">libxml2-2.9.9.tar.xz</a></td><td class="size">3.0 MiB</td><td class="date">2022-Feb-14 18:40</td></tr>
40</tbody></table></body></html>
diff --git a/bitbake/lib/bb/tests/fetch-testdata/software/libxml2/index.html b/bitbake/lib/bb/tests/fetch-testdata/software/libxml2/index.html
new file mode 100644
index 0000000000..c183e06a55
--- /dev/null
+++ b/bitbake/lib/bb/tests/fetch-testdata/software/libxml2/index.html
@@ -0,0 +1,19 @@
1<!DOCTYPE html><html><head><meta http-equiv="content-type" content="text/html; charset=utf-8"><meta name="viewport" content="width=device-width"><style type="text/css">body,html {background:#fff;font-family:"Bitstream Vera Sans","Lucida Grande","Lucida Sans Unicode",Lucidux,Verdana,Lucida,sans-serif;}tr:nth-child(even) {background:#f4f4f4;}th,td {padding:0.1em 0.5em;}th {text-align:left;font-weight:bold;background:#eee;border-bottom:1px solid #aaa;}#list {border:1px solid #aaa;width:100%;}a {color:#a33;}a:hover {color:#e33;}</style>
2
3<title>Index of /sources/libxml2/</title>
4</head><body><h1>Index of /sources/libxml2/</h1>
5<table id="list"><thead><tr><th style="width:55%"><a href="?C=N&amp;O=A">File Name</a>&nbsp;<a href="?C=N&amp;O=D">&nbsp;&darr;&nbsp;</a></th><th style="width:20%"><a href="?C=S&amp;O=A">File Size</a>&nbsp;<a href="?C=S&amp;O=D">&nbsp;&darr;&nbsp;</a></th><th style="width:25%"><a href="?C=M&amp;O=A">Date</a>&nbsp;<a href="?C=M&amp;O=D">&nbsp;&darr;&nbsp;</a></th></tr></thead>
6<tbody><tr><td class="link"><a href="../">Parent directory/</a></td><td class="size">-</td><td class="date">-</td></tr>
7<tr><td class="link"><a href="2.0/" title="2.0">2.0/</a></td><td class="size">-</td><td class="date">2009-Jul-14 13:04</td></tr>
8<tr><td class="link"><a href="2.1/" title="2.1">2.1/</a></td><td class="size">-</td><td class="date">2009-Jul-14 13:04</td></tr>
9<tr><td class="link"><a href="2.10/" title="2.10">2.10/</a></td><td class="size">-</td><td class="date">2022-Oct-14 12:55</td></tr>
10<tr><td class="link"><a href="2.2/" title="2.2">2.2/</a></td><td class="size">-</td><td class="date">2009-Jul-14 13:04</td></tr>
11<tr><td class="link"><a href="2.3/" title="2.3">2.3/</a></td><td class="size">-</td><td class="date">2009-Jul-14 13:05</td></tr>
12<tr><td class="link"><a href="2.4/" title="2.4">2.4/</a></td><td class="size">-</td><td class="date">2009-Jul-14 13:05</td></tr>
13<tr><td class="link"><a href="2.5/" title="2.5">2.5/</a></td><td class="size">-</td><td class="date">2009-Jul-14 13:05</td></tr>
14<tr><td class="link"><a href="2.6/" title="2.6">2.6/</a></td><td class="size">-</td><td class="date">2009-Jul-14 13:05</td></tr>
15<tr><td class="link"><a href="2.7/" title="2.7">2.7/</a></td><td class="size">-</td><td class="date">2022-Feb-14 18:24</td></tr>
16<tr><td class="link"><a href="2.8/" title="2.8">2.8/</a></td><td class="size">-</td><td class="date">2022-Feb-14 18:26</td></tr>
17<tr><td class="link"><a href="2.9/" title="2.9">2.9/</a></td><td class="size">-</td><td class="date">2022-May-02 12:04</td></tr>
18<tr><td class="link"><a href="cache.json" title="cache.json">cache.json</a></td><td class="size">22.8 KiB</td><td class="date">2022-Oct-14 12:55</td></tr>
19</tbody></table></body></html>
diff --git a/bitbake/lib/bb/tests/fetch-testdata/software/miniupnp/download.php b/bitbake/lib/bb/tests/fetch-testdata/software/miniupnp/download.php
new file mode 100644
index 0000000000..e27ee134f2
--- /dev/null
+++ b/bitbake/lib/bb/tests/fetch-testdata/software/miniupnp/download.php
@@ -0,0 +1,3528 @@
1<?xml version="1.0" encoding="UTF-8"?>
2<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
3 "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
4<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en">
5<head>
6<title>MiniUPnP download zone</title>
7<link href="../css/miniupnp.css" rel="stylesheet" type="text/css"/>
8<meta name="description" content="files download of the miniupnp project"/>
9<meta name="keywords" content="upnp,download,openbsd,freebsd,linux,windows"/>
10<meta name="viewport" content="width=device-width" />
11<link href="rss.php" title="MiniUPnPd, MiniUPnPc and MiniSSDPd Files" type="application/rss+xml" rel="alternate" />
12<link rel="canonical" href="http://miniupnp.free.fr/files/" />
13<link rel="alternate" hreflang="fr" href="/files/index_fr.php" />
14<script async="async" src="//pagead2.googlesyndication.com/pagead/js/adsbygoogle.js" type="text/javascript"></script>
15<script type="text/javascript">
16 (adsbygoogle = window.adsbygoogle || []).push({
17 google_ad_client: "ca-pub-6883148866513192",
18 enable_page_level_ads: true
19 });
20</script>
21</head>
22<body>
23<h2>MiniUPnP Project</h2>
24
25<p align="center">
26<a href="../">Home</a> |
27<b>Downloads</b> |
28<a href="../devicelist.php">Compatibility list</a> |
29<a href="../libnatpmp.html">libnatpmp</a> |
30<a href="../minissdpd.html">MiniSSDPd</a> |
31<a href="../xchat-upnp.html">xchat upnp patch</a> |
32<a href="../search.html">Search</a> |
33<a href="https://miniupnp.tuxfamily.org/forum/">Forum</a>
34</p>
35<p align="center">
36<b>English</b> | <a href="/files/index_fr.php">Fran&ccedil;ais</a>
37</p>
38
39<div align="center">
40<script type="text/javascript"><!--
41google_ad_client = "pub-6883148866513192";
42/* 728x90, created 7/10/08 */
43google_ad_slot = "0774293141";
44google_ad_width = 728;
45google_ad_height = 90;
46//-->
47</script>
48<script type="text/javascript"
49src="https://pagead2.googlesyndication.com/pagead/show_ads.js">
50</script>
51</div>
52
53<h2>MiniUPnP download zone</h2>
54<p>
55Find on this page the source of miniupnp and
56some related files. You will also find precompiled binaries
57of the UPnP client sample program for windows compiled using
58<a href="https://mingw.osdn.io/">MinGW</a>. There are also Windows
59binaries (including python module) automatically built using
60<a href="https://ci.appveyor.com/project/miniupnp/miniupnp/build/artifacts">AppVeyor</a>.
61</p>
62<p>If you just need one of the software installed on your machine,
63you probably don't need to download and compile the source files.
64It is very likely that a package/port already exists for
65your system/distribution. Refer to your system documentation
66to find how to search and install a package/port.
67Mac OS X does have port systems too : see
68<a href="http://www.macports.org/">MacPorts</a> or
69<a href="http://mxcl.github.com/homebrew/">Homebrew</a> or
70<a href="http://www.finkproject.org/">Fink</a>.
71</p>
72<p>
73The miniupnpc (client) sources have been successfully compiled
74under Windows XP/vista/7/10/etc. (using
75<a href="https://mingw.osdn.io/">MinGW</a>,
76<a href="https://www.mingw-w64.org/">Mingw-w64</a>
77or <a href="http://www.cygwin.com/">Cygwin</a>),
78Linux, OpenBSD, FreeBSD, NetBSD, DragonFlyBSD,
79Solaris, MacOS X and AmigaOS. <br/>
80The Makefile of the client is made for GNU make :
81check which version your system have
82with the command "make --version". On some systems, such as OpenBSD,
83you have to use "gmake". Under Windows with MinGW, GNU make is
84called "mingw32-make" and a sligthly modified version of the Makefile
85should be used : Makefile.mingw. Run "mingw32make.bat" to compile. <br/>
86If you have any compatibility problem, please post on the
87<a href="https://miniupnp.tuxfamily.org/forum/">forum</a>
88or contact me by email.
89</p>
90<!--
91<p>A devoted user compiled miniupnp<strong>c</strong> for
92Openwrt (currently Kamikaze 7.09)
93and his work is available here :
94<a href="http://replay.waybackmachine.org/20081120030628/http://www.myantihero.net/pub/openwrt/packages/">http://myantihero.net/pub/openwrt/packages/</a>.</p>
95-->
96<p>Get miniupnpc under AmigaOS 4 on
97<a href="http://os4depot.net/index.php?function=showfile&amp;file=network/misc/miniupnpc.lha">OS4Depot</a>.
98</p>
99<p>
100Dario Meloni has made a Ruby Gem embedding miniupnpc :
101<a href="https://rubygems.org/gems/mupnp">https://rubygems.org/gems/mupnp</a>.
102</p>
103<p>
104The python module is available on pypi.org :
105<a href="https://pypi.org/project/miniupnpc/">pip install miniupnpc</a>.
106</p>
107<p>
108The daemon (starting in November 2006) compiles with BSD make under BSD
109and Solaris.<br/>
110To compile the daemon under linux, use "make -f Makefile.linux"<br/>
111To compile for <a href="http://openwrt.org/">OpenWRT</a>
112please read the README.openwrt file, or use the packages
113<a href="https://openwrt.org/packages/pkgdata/miniupnpd">miniupnpc</a> and
114<a href="https://openwrt.org/packages/pkgdata/miniupnpd">miniupnpd</a>.
115<!-- The
116<a href="http://www.x-wrt.org/">X-Wrt</a> project is providing
117precompiled ipkg packages for OpenWrt for both OpenWrt
118<a href="ftp://ftp.berlios.de/pub/xwrt/packages/">White Russian</a>
119and OpenWrt
120<a href="ftp://ftp.berlios.de/pub/xwrt/kamikaze/packages">kamikaze</a>.
121Check
122<a href="ftp://ftp.berlios.de/pub/xwrt/">ftp://ftp.berlios.de/pub/xwrt/</a>.
123For White Russian, take a look at
124<a href="http://jackassofalltrades.com/openwrt/">this</a>. -->
125<br/>
126<a href="http://pfsense.com">pfSense</a> users are advised to use the
127miniupnpd port available for their system. Recent versions of
128pfSense include MiniUPnPd in the base system.
129<br/>
130For <a href="http://en.wikipedia.org/wiki/WRT54G">Linksys WRT54G</a>
131and WRT54GL owners,
132<a href="http://sourceforge.net/projects/tarifa/">Tarifa firmware</a>
133is another alternative to get miniUPnPd running on the router.
134</p>
135<p>
136Please read README and
137LICENCE files included with the distribution for further informations.
138</p>
139<p>
140The MiniUPnP daemon (miniupnpd) is working under
141<a href="http://www.openbsd.org/">OpenBSD</a>,
142<a href="http://www.netbsd.org/">NetBSD</a>,
143<a href="http://www.freebsd.org/">FreeBSD</a>,
144<a href="http://www.dragonflybsd.org/">DragonFlyBSD</a>,
145<a href="http://www.apple.com/macosx/">Mac OS X</a> and
146(<a href="https://en.wikipedia.org/wiki/OpenSolaris">Open</a>)<a href="http://www.oracle.com/us/products/servers-storage/solaris/solaris11/overview/index.html">Solaris</a>
147with <a href="http://www.openbsd.org/faq/pf/">pf</a>,
148with <a href="https://en.wikipedia.org/wiki/IPFilter">IP Filter</a> or
149with <a href="http://en.wikipedia.org/wiki/Ipfirewall">ipfw</a>.
150The linux version uses either libiptc which permits to access
151<a href="http://netfilter.org/">netfilter</a>
152rules inside the kernel the same way as
153<a href="https://www.netfilter.org/projects/iptables/index.html">iptables</a>, or
154<a href="https://www.netfilter.org/projects/libnftnl/index.html">libnftnl</a>
155which is the equivalent for
156<a href="https://www.netfilter.org/projects/nftables/index.html">nftables</a>.
157</p>
158
159<p>Releases are now GPG signed with the key <a href="../A31ACAAF.asc">A31ACAAF</a>.
160Previous signing key was <a href="../A5C0863C.asc">A5C0863C</a>.
161Get it from your favorite
162<a href="https://pgp.mit.edu/pks/lookup?search=0xA31ACAAF&amp;op=index&amp;fingerprint=on">key server</a>.</p>
163
164<h4>REST API</h4>
165<p>You can use the REST API to get the latest releases available:</p>
166<ul>
167<li><a href="rest.php/tags/miniupnpd?count=1">rest.php/tags/miniupnpd?count=1</a>: latest miniupnpd.</li>
168<li><a href="rest.php/tags?count=1">rest.php/tags?count=1</a>: miniupnpc, miniupnpd and minissdpd.</li>
169</ul>
170
171<h4>You can help !</h4>
172<p>If you make a package/port for your favorite OS distribution,
173inform me so I can upload the package here or add a link to your
174repository.
175</p>
176
177<h4>Latest files</h4>
178<table>
179<tr><th>name</th>
180<th>size</th>
181<th>date</th>
182<th>comment</th>
183<th><!-- Changelog --></th>
184<th><!-- Signature --></th>
185</tr>
186<tr>
187 <td class="filename"><a href='miniupnpc-2.3.2.tar.gz'>miniupnpc-2.3.2.tar.gz</a></td>
188 <td class="filesize">140137</td>
189 <td class="filedate">05/03/2025 10:31</td>
190 <td class="comment">MiniUPnP client release source code</td>
191 <td><a href="changelog.php?file=miniupnpc-2.3.2.tar.gz">changelog</a></td>
192 <td><a href="miniupnpc-2.3.2.tar.gz.sig">Signature</a></td>
193</tr>
194<tr>
195 <td class="filename"><a href='miniupnpd-2.3.7.tar.gz'>miniupnpd-2.3.7.tar.gz</a></td>
196 <td class="filesize">265329</td>
197 <td class="filedate">22/06/2024 22:31</td>
198 <td class="comment">MiniUPnP daemon release source code</td>
199 <td><a href="changelog.php?file=miniupnpd-2.3.7.tar.gz">changelog</a></td>
200 <td><a href="miniupnpd-2.3.7.tar.gz.sig">Signature</a></td>
201</tr>
202<tr>
203 <td class="filename"><a href='libnatpmp-20230423.tar.gz'>libnatpmp-20230423.tar.gz</a></td>
204 <td class="filesize">26506</td>
205 <td class="filedate">23/04/2023 11:02</td>
206 <td class="comment">latest libnatpmp source code</td>
207 <td><a href="changelog.php?file=libnatpmp-20230423.tar.gz">changelog</a></td>
208 <td><a href="libnatpmp-20230423.tar.gz.sig">Signature</a></td>
209</tr>
210<tr>
211 <td class="filename"><a href='minissdpd-1.6.0.tar.gz'>minissdpd-1.6.0.tar.gz</a></td>
212 <td class="filesize">39077</td>
213 <td class="filedate">22/10/2022 18:41</td>
214 <td class="comment">MiniSSDPd release source code</td>
215 <td><a href="changelog.php?file=minissdpd-1.6.0.tar.gz">changelog</a></td>
216 <td><a href="minissdpd-1.6.0.tar.gz.sig">Signature</a></td>
217</tr>
218<tr>
219 <td class="filename"><a href='upnpc-exe-win32-20220515.zip'>upnpc-exe-win32-20220515.zip</a></td>
220 <td class="filesize">69503</td>
221 <td class="filedate">15/05/2022 14:31</td>
222 <td class="comment">Windows executable</td>
223 <td><a href="changelog.php?file=upnpc-exe-win32-20220515.zip">changelog</a></td>
224 <td></td>
225</tr>
226<tr>
227 <td class="filename"><a href='minissdpd-1.5.20211105.tar.gz'>minissdpd-1.5.20211105.tar.gz</a></td>
228 <td class="filesize">38870</td>
229 <td class="filedate">04/11/2021 23:34</td>
230 <td class="comment">latest MiniSSDPd source code</td>
231 <td><a href="changelog.php?file=minissdpd-1.5.20211105.tar.gz">changelog</a></td>
232 <td><a href="minissdpd-1.5.20211105.tar.gz.sig">Signature</a></td>
233</tr>
234<tr>
235 <td class="filename"><a href='miniupnpc-2.1.20201016.tar.gz'>miniupnpc-2.1.20201016.tar.gz</a></td>
236 <td class="filesize">97682</td>
237 <td class="filedate">15/10/2020 22:31</td>
238 <td class="comment">latest MiniUPnP client source code</td>
239 <td><a href="changelog.php?file=miniupnpc-2.1.20201016.tar.gz">changelog</a></td>
240 <td><a href="miniupnpc-2.1.20201016.tar.gz.sig">Signature</a></td>
241</tr>
242<tr>
243 <td class="filename"><a href='miniupnpd-2.1.20200510.tar.gz'>miniupnpd-2.1.20200510.tar.gz</a></td>
244 <td class="filesize">245426</td>
245 <td class="filedate">10/05/2020 18:23</td>
246 <td class="comment">latest MiniUPnP daemon source code</td>
247 <td><a href="changelog.php?file=miniupnpd-2.1.20200510.tar.gz">changelog</a></td>
248 <td><a href="miniupnpd-2.1.20200510.tar.gz.sig">Signature</a></td>
249</tr>
250<tr>
251 <td class="filename"><a href='xchat-upnp20110811.patch'>xchat-upnp20110811.patch</a></td>
252 <td class="filesize">10329</td>
253 <td class="filedate">11/08/2011 15:18</td>
254 <td class="comment">Patch to add UPnP capabilities to xchat</td>
255 <td><a href="changelog.php?file=xchat-upnp20110811.patch">changelog</a></td>
256 <td></td>
257</tr>
258<tr>
259 <td class="filename"><a href='minidlna_1.0.21.minissdp1.patch'>minidlna_1.0.21.minissdp1.patch</a></td>
260 <td class="filesize">7598</td>
261 <td class="filedate">25/07/2011 14:57</td>
262 <td class="comment">Patch for MiniDLNA to use miniSSDPD</td>
263 <td><a href="changelog.php?file=minidlna_1.0.21.minissdp1.patch">changelog</a></td>
264 <td></td>
265</tr>
266<tr>
267 <td class="filename"><a href='miniupnpc-new20060630.tar.gz'>miniupnpc-new20060630.tar.gz</a></td>
268 <td class="filesize">14840</td>
269 <td class="filedate">04/11/2006 18:16</td>
270 <td class="comment">Jo&atilde;o Paulo Barraca version of the upnp client</td>
271 <td><a href="changelog.php?file=miniupnpc-new20060630.tar.gz">changelog</a></td>
272 <td></td>
273</tr>
274</table>
275
276<h4>All files</h4>
277<table>
278<tr><th>name</th>
279<th>size</th>
280<th>date</th>
281<th>comment</th>
282<th><!-- signature --></th>
283</tr>
284<tr>
285 <td class="filename"><a href='download.php?file=miniupnpc-2.3.2.tar.gz'>miniupnpc-2.3.2.tar.gz</a></td>
286 <td class="filesize">140137</td>
287 <td class="filedate">05/03/2025 10:31:36 +0000</td>
288 <td class="comment">MiniUPnP client release source code</td>
289 <td><a href="miniupnpc-2.3.2.tar.gz.sig">Signature</a></td>
290</tr>
291<tr>
292 <td class="filename"><a href='download.php?file=miniupnpc-2.3.1.tar.gz'>miniupnpc-2.3.1.tar.gz</a></td>
293 <td class="filesize">139499</td>
294 <td class="filedate">23/02/2025 16:44:16 +0000</td>
295 <td class="comment">MiniUPnP client release source code</td>
296 <td><a href="miniupnpc-2.3.1.tar.gz.sig">Signature</a></td>
297</tr>
298<tr>
299 <td class="filename"><a href='download.php?file=miniupnpc-2.3.0.tar.gz'>miniupnpc-2.3.0.tar.gz</a></td>
300 <td class="filesize">105071</td>
301 <td class="filedate">10/01/2025 23:16:45 +0000</td>
302 <td class="comment">MiniUPnP client release source code</td>
303 <td><a href="miniupnpc-2.3.0.tar.gz.sig">Signature</a></td>
304</tr>
305<tr>
306 <td class="filename"><a href='download.php?file=miniupnpd-2.3.7.tar.gz'>miniupnpd-2.3.7.tar.gz</a></td>
307 <td class="filesize">265329</td>
308 <td class="filedate">22/06/2024 22:31:38 +0000</td>
309 <td class="comment">MiniUPnP daemon release source code</td>
310 <td><a href="miniupnpd-2.3.7.tar.gz.sig">Signature</a></td>
311</tr>
312<tr>
313 <td class="filename"><a href='download.php?file=miniupnpc-2.2.8.tar.gz'>miniupnpc-2.2.8.tar.gz</a></td>
314 <td class="filesize">104603</td>
315 <td class="filedate">08/06/2024 22:13:39 +0000</td>
316 <td class="comment">MiniUPnP client release source code</td>
317 <td><a href="miniupnpc-2.2.8.tar.gz.sig">Signature</a></td>
318</tr>
319<tr>
320 <td class="filename"><a href='download.php?file=miniupnpd-2.3.6.tar.gz'>miniupnpd-2.3.6.tar.gz</a></td>
321 <td class="filesize">263018</td>
322 <td class="filedate">19/03/2024 23:39:51 +0000</td>
323 <td class="comment">MiniUPnP daemon release source code</td>
324 <td><a href="miniupnpd-2.3.6.tar.gz.sig">Signature</a></td>
325</tr>
326<tr>
327 <td class="filename"><a href='download.php?file=miniupnpc-2.2.7.tar.gz'>miniupnpc-2.2.7.tar.gz</a></td>
328 <td class="filesize">104258</td>
329 <td class="filedate">19/03/2024 23:25:18 +0000</td>
330 <td class="comment">MiniUPnP client release source code</td>
331 <td><a href="miniupnpc-2.2.7.tar.gz.sig">Signature</a></td>
332</tr>
333<tr>
334 <td class="filename"><a href='download.php?file=miniupnpd-2.3.5.tar.gz'>miniupnpd-2.3.5.tar.gz</a></td>
335 <td class="filesize">261952</td>
336 <td class="filedate">02/03/2024 11:04:07 +0000</td>
337 <td class="comment">MiniUPnP daemon release source code</td>
338 <td><a href="miniupnpd-2.3.5.tar.gz.sig">Signature</a></td>
339</tr>
340<tr>
341 <td class="filename"><a href='download.php?file=miniupnpd-2.3.4.tar.gz'>miniupnpd-2.3.4.tar.gz</a></td>
342 <td class="filesize">260810</td>
343 <td class="filedate">04/01/2024 00:53:17 +0000</td>
344 <td class="comment">MiniUPnP daemon release source code</td>
345 <td><a href="miniupnpd-2.3.4.tar.gz.sig">Signature</a></td>
346</tr>
347<tr>
348 <td class="filename"><a href='download.php?file=miniupnpc-2.2.6.tar.gz'>miniupnpc-2.2.6.tar.gz</a></td>
349 <td class="filesize">103949</td>
350 <td class="filedate">04/01/2024 00:27:14 +0000</td>
351 <td class="comment">MiniUPnP client release source code</td>
352 <td><a href="miniupnpc-2.2.6.tar.gz.sig">Signature</a></td>
353</tr>
354<tr>
355 <td class="filename"><a href='download.php?file=miniupnpc-2.2.5.tar.gz'>miniupnpc-2.2.5.tar.gz</a></td>
356 <td class="filesize">103654</td>
357 <td class="filedate">11/06/2023 23:14:56 +0000</td>
358 <td class="comment">MiniUPnP client release source code</td>
359 <td><a href="miniupnpc-2.2.5.tar.gz.sig">Signature</a></td>
360</tr>
361<tr>
362 <td class="filename"><a href='download.php?file=libnatpmp-20230423.tar.gz'>libnatpmp-20230423.tar.gz</a></td>
363 <td class="filesize">26506</td>
364 <td class="filedate">23/04/2023 11:02:09 +0000</td>
365 <td class="comment">libnatpmp source code</td>
366 <td><a href="libnatpmp-20230423.tar.gz.sig">Signature</a></td>
367</tr>
368<tr>
369 <td class="filename"><a href='download.php?file=miniupnpd-2.3.3.tar.gz'>miniupnpd-2.3.3.tar.gz</a></td>
370 <td class="filesize">260079</td>
371 <td class="filedate">17/02/2023 03:07:46 +0000</td>
372 <td class="comment">MiniUPnP daemon release source code</td>
373 <td><a href="miniupnpd-2.3.3.tar.gz.sig">Signature</a></td>
374</tr>
375<tr>
376 <td class="filename"><a href='download.php?file=miniupnpd-2.3.2.tar.gz'>miniupnpd-2.3.2.tar.gz</a></td>
377 <td class="filesize">259686</td>
378 <td class="filedate">19/01/2023 23:18:08 +0000</td>
379 <td class="comment">MiniUPnP daemon release source code</td>
380 <td><a href="miniupnpd-2.3.2.tar.gz.sig">Signature</a></td>
381</tr>
382<tr>
383 <td class="filename"><a href='download.php?file=minissdpd-1.6.0.tar.gz'>minissdpd-1.6.0.tar.gz</a></td>
384 <td class="filesize">39077</td>
385 <td class="filedate">22/10/2022 18:41:54 +0000</td>
386 <td class="comment">MiniSSDPd release source code</td>
387 <td><a href="minissdpd-1.6.0.tar.gz.sig">Signature</a></td>
388</tr>
389<tr>
390 <td class="filename"><a href='download.php?file=miniupnpc-2.2.4.tar.gz'>miniupnpc-2.2.4.tar.gz</a></td>
391 <td class="filesize">102932</td>
392 <td class="filedate">21/10/2022 21:01:01 +0000</td>
393 <td class="comment">MiniUPnP client release source code</td>
394 <td><a href="miniupnpc-2.2.4.tar.gz.sig">Signature</a></td>
395</tr>
396<tr>
397 <td class="filename"><a href='download.php?file=miniupnpd-2.3.1.tar.gz'>miniupnpd-2.3.1.tar.gz</a></td>
398 <td class="filesize">258050</td>
399 <td class="filedate">16/10/2022 05:58:44 +0000</td>
400 <td class="comment">MiniUPnP daemon release source code</td>
401 <td><a href="miniupnpd-2.3.1.tar.gz.sig">Signature</a></td>
402</tr>
403<tr>
404 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20220515.zip'>upnpc-exe-win32-20220515.zip</a></td>
405 <td class="filesize">69503</td>
406 <td class="filedate">15/05/2022 14:31:25 +0000</td>
407 <td class="comment">Windows executable</td>
408 <td></td>
409</tr>
410<tr>
411 <td class="filename"><a href='download.php?file=hexchat-2.16.patch'>hexchat-2.16.patch</a></td>
412 <td class="filesize">8147</td>
413 <td class="filedate">19/03/2022 16:52:05 +0000</td>
414 <td class="comment"></td>
415 <td></td>
416</tr>
417<tr>
418 <td class="filename"><a href='download.php?file=miniupnpd-2.3.0.tar.gz'>miniupnpd-2.3.0.tar.gz</a></td>
419 <td class="filesize">256069</td>
420 <td class="filedate">23/01/2022 00:23:32 +0000</td>
421 <td class="comment">MiniUPnP daemon release source code</td>
422 <td><a href="miniupnpd-2.3.0.tar.gz.sig">Signature</a></td>
423</tr>
424<tr>
425 <td class="filename"><a href='download.php?file=minissdpd-1.5.20211105.tar.gz'>minissdpd-1.5.20211105.tar.gz</a></td>
426 <td class="filesize">38870</td>
427 <td class="filedate">04/11/2021 23:34:49 +0000</td>
428 <td class="comment">MiniSSDPd source code</td>
429 <td><a href="minissdpd-1.5.20211105.tar.gz.sig">Signature</a></td>
430</tr>
431<tr>
432 <td class="filename"><a href='download.php?file=miniupnpc-2.2.3.tar.gz'>miniupnpc-2.2.3.tar.gz</a></td>
433 <td class="filesize">101360</td>
434 <td class="filedate">28/09/2021 21:43:32 +0000</td>
435 <td class="comment">MiniUPnP client release source code</td>
436 <td><a href="miniupnpc-2.2.3.tar.gz.sig">Signature</a></td>
437</tr>
438<tr>
439 <td class="filename"><a href='download.php?file=miniupnpd-2.2.3.tar.gz'>miniupnpd-2.2.3.tar.gz</a></td>
440 <td class="filesize">254752</td>
441 <td class="filedate">21/08/2021 08:35:13 +0000</td>
442 <td class="comment">MiniUPnP daemon release source code</td>
443 <td><a href="miniupnpd-2.2.3.tar.gz.sig">Signature</a></td>
444</tr>
445<tr>
446 <td class="filename"><a href='download.php?file=miniupnpd-2.2.2.tar.gz'>miniupnpd-2.2.2.tar.gz</a></td>
447 <td class="filesize">250649</td>
448 <td class="filedate">13/05/2021 11:30:11 +0000</td>
449 <td class="comment">MiniUPnP daemon release source code</td>
450 <td><a href="miniupnpd-2.2.2.tar.gz.sig">Signature</a></td>
451</tr>
452<tr>
453 <td class="filename"><a href='download.php?file=miniupnpc-2.2.2.tar.gz'>miniupnpc-2.2.2.tar.gz</a></td>
454 <td class="filesize">100008</td>
455 <td class="filedate">02/03/2021 23:44:52 +0000</td>
456 <td class="comment">MiniUPnP client release source code</td>
457 <td><a href="miniupnpc-2.2.2.tar.gz.sig">Signature</a></td>
458</tr>
459<tr>
460 <td class="filename"><a href='download.php?file=miniupnpd-2.2.1.tar.gz'>miniupnpd-2.2.1.tar.gz</a></td>
461 <td class="filesize">250023</td>
462 <td class="filedate">20/12/2020 18:08:08 +0000</td>
463 <td class="comment">MiniUPnP daemon release source code</td>
464 <td><a href="miniupnpd-2.2.1.tar.gz.sig">Signature</a></td>
465</tr>
466<tr>
467 <td class="filename"><a href='download.php?file=miniupnpc-2.2.1.tar.gz'>miniupnpc-2.2.1.tar.gz</a></td>
468 <td class="filesize">99595</td>
469 <td class="filedate">20/12/2020 18:08:02 +0000</td>
470 <td class="comment">MiniUPnP client release source code</td>
471 <td><a href="miniupnpc-2.2.1.tar.gz.sig">Signature</a></td>
472</tr>
473<tr>
474 <td class="filename"><a href='download.php?file=miniupnpc-2.2.0.tar.gz'>miniupnpc-2.2.0.tar.gz</a></td>
475 <td class="filesize">98348</td>
476 <td class="filedate">09/11/2020 19:51:24 +0000</td>
477 <td class="comment">MiniUPnP client release source code</td>
478 <td><a href="miniupnpc-2.2.0.tar.gz.sig">Signature</a></td>
479</tr>
480<tr>
481 <td class="filename"><a href='download.php?file=miniupnpd-2.2.0.tar.gz'>miniupnpd-2.2.0.tar.gz</a></td>
482 <td class="filesize">249858</td>
483 <td class="filedate">31/10/2020 09:20:59 +0000</td>
484 <td class="comment">MiniUPnP daemon release source code</td>
485 <td><a href="miniupnpd-2.2.0.tar.gz.sig">Signature</a></td>
486</tr>
487<tr>
488 <td class="filename"><a href='download.php?file=miniupnpd-2.2.0-RC3.tar.gz'>miniupnpd-2.2.0-RC3.tar.gz</a></td>
489 <td class="filesize">249879</td>
490 <td class="filedate">30/10/2020 21:49:49 +0000</td>
491 <td class="comment">MiniUPnP daemon release source code</td>
492 <td><a href="miniupnpd-2.2.0-RC3.tar.gz.sig">Signature</a></td>
493</tr>
494<tr>
495 <td class="filename"><a href='download.php?file=miniupnpc-2.1.20201016.tar.gz'>miniupnpc-2.1.20201016.tar.gz</a></td>
496 <td class="filesize">97682</td>
497 <td class="filedate">15/10/2020 22:31:09 +0000</td>
498 <td class="comment">MiniUPnP client source code</td>
499 <td><a href="miniupnpc-2.1.20201016.tar.gz.sig">Signature</a></td>
500</tr>
501<tr>
502 <td class="filename"><a href='download.php?file=miniupnpd-2.2.0-RC2.tar.gz'>miniupnpd-2.2.0-RC2.tar.gz</a></td>
503 <td class="filesize">248756</td>
504 <td class="filedate">28/09/2020 21:57:22 +0000</td>
505 <td class="comment">MiniUPnP daemon release source code</td>
506 <td><a href="miniupnpd-2.2.0-RC2.tar.gz.sig">Signature</a></td>
507</tr>
508<tr>
509 <td class="filename"><a href='download.php?file=miniupnpc-2.1.20200928.tar.gz'>miniupnpc-2.1.20200928.tar.gz</a></td>
510 <td class="filesize">96508</td>
511 <td class="filedate">28/09/2020 21:56:09 +0000</td>
512 <td class="comment">MiniUPnP client source code</td>
513 <td><a href="miniupnpc-2.1.20200928.tar.gz.sig">Signature</a></td>
514</tr>
515<tr>
516 <td class="filename"><a href='download.php?file=minissdpd-1.5.20200928.tar.gz'>minissdpd-1.5.20200928.tar.gz</a></td>
517 <td class="filesize">37860</td>
518 <td class="filedate">28/09/2020 21:55:40 +0000</td>
519 <td class="comment">MiniSSDPd source code</td>
520 <td><a href="minissdpd-1.5.20200928.tar.gz.sig">Signature</a></td>
521</tr>
522<tr>
523 <td class="filename"><a href='download.php?file=miniupnpd-2.2.0-RC1.tar.gz'>miniupnpd-2.2.0-RC1.tar.gz</a></td>
524 <td class="filesize">247772</td>
525 <td class="filedate">06/06/2020 18:34:50 +0000</td>
526 <td class="comment">MiniUPnP daemon release source code</td>
527 <td><a href="miniupnpd-2.2.0-RC1.tar.gz.sig">Signature</a></td>
528</tr>
529<tr>
530 <td class="filename"><a href='download.php?file=miniupnpd-2.2.0-RC0.tar.gz'>miniupnpd-2.2.0-RC0.tar.gz</a></td>
531 <td class="filesize">245507</td>
532 <td class="filedate">16/05/2020 18:03:17 +0000</td>
533 <td class="comment">MiniUPnP daemon release source code</td>
534 <td><a href="miniupnpd-2.2.0-RC0.tar.gz.sig">Signature</a></td>
535</tr>
536<tr>
537 <td class="filename"><a href='download.php?file=miniupnpd-2.1.20200510.tar.gz'>miniupnpd-2.1.20200510.tar.gz</a></td>
538 <td class="filesize">245426</td>
539 <td class="filedate">10/05/2020 18:23:13 +0000</td>
540 <td class="comment">MiniUPnP daemon source code</td>
541 <td><a href="miniupnpd-2.1.20200510.tar.gz.sig">Signature</a></td>
542</tr>
543<tr>
544 <td class="filename"><a href='download.php?file=miniupnpd-2.1.20200329.tar.gz'>miniupnpd-2.1.20200329.tar.gz</a></td>
545 <td class="filesize">243725</td>
546 <td class="filedate">29/03/2020 09:11:02 +0000</td>
547 <td class="comment">MiniUPnP daemon source code</td>
548 <td><a href="miniupnpd-2.1.20200329.tar.gz.sig">Signature</a></td>
549</tr>
550<tr>
551 <td class="filename"><a href='download.php?file=miniupnpc-2.1.20191224.tar.gz'>miniupnpc-2.1.20191224.tar.gz</a></td>
552 <td class="filesize">94740</td>
553 <td class="filedate">23/12/2019 23:37:32 +0000</td>
554 <td class="comment">MiniUPnP client source code</td>
555 <td><a href="miniupnpc-2.1.20191224.tar.gz.sig">Signature</a></td>
556</tr>
557<tr>
558 <td class="filename"><a href='download.php?file=miniupnpd-2.1.20191006.tar.gz'>miniupnpd-2.1.20191006.tar.gz</a></td>
559 <td class="filesize">243255</td>
560 <td class="filedate">06/10/2019 21:02:31 +0000</td>
561 <td class="comment">MiniUPnP daemon source code</td>
562 <td><a href="miniupnpd-2.1.20191006.tar.gz.sig">Signature</a></td>
563</tr>
564<tr>
565 <td class="filename"><a href='download.php?file=miniupnpd-2.1.20191005.tar.gz'>miniupnpd-2.1.20191005.tar.gz</a></td>
566 <td class="filesize">244100</td>
567 <td class="filedate">05/10/2019 21:33:08 +0000</td>
568 <td class="comment">MiniUPnP daemon source code</td>
569 <td><a href="miniupnpd-2.1.20191005.tar.gz.sig">Signature</a></td>
570</tr>
571<tr>
572 <td class="filename"><a href='download.php?file=miniupnpd-2.1.20191003.tar.gz'>miniupnpd-2.1.20191003.tar.gz</a></td>
573 <td class="filesize">243287</td>
574 <td class="filedate">02/10/2019 22:23:51 +0000</td>
575 <td class="comment">MiniUPnP daemon source code</td>
576 <td><a href="miniupnpd-2.1.20191003.tar.gz.sig">Signature</a></td>
577</tr>
578<tr>
579 <td class="filename"><a href='download.php?file=miniupnpd-2.1.20190924.tar.gz'>miniupnpd-2.1.20190924.tar.gz</a></td>
580 <td class="filesize">241008</td>
581 <td class="filedate">24/09/2019 11:58:15 +0000</td>
582 <td class="comment">MiniUPnP daemon source code</td>
583 <td><a href="miniupnpd-2.1.20190924.tar.gz.sig">Signature</a></td>
584</tr>
585<tr>
586 <td class="filename"><a href='download.php?file=miniupnpd-2.1.20190902.tar.gz'>miniupnpd-2.1.20190902.tar.gz</a></td>
587 <td class="filesize">240742</td>
588 <td class="filedate">01/09/2019 23:03:03 +0000</td>
589 <td class="comment">MiniUPnP daemon source code</td>
590 <td><a href="miniupnpd-2.1.20190902.tar.gz.sig">Signature</a></td>
591</tr>
592<tr>
593 <td class="filename"><a href='download.php?file=miniupnpd-2.1.20190824.tar.gz'>miniupnpd-2.1.20190824.tar.gz</a></td>
594 <td class="filesize">240490</td>
595 <td class="filedate">24/08/2019 09:21:52 +0000</td>
596 <td class="comment">MiniUPnP daemon source code</td>
597 <td><a href="miniupnpd-2.1.20190824.tar.gz.sig">Signature</a></td>
598</tr>
599<tr>
600 <td class="filename"><a href='download.php?file=minissdpd-1.5.20190824.tar.gz'>minissdpd-1.5.20190824.tar.gz</a></td>
601 <td class="filesize">37300</td>
602 <td class="filedate">24/08/2019 09:17:32 +0000</td>
603 <td class="comment">MiniSSDPd source code</td>
604 <td><a href="minissdpd-1.5.20190824.tar.gz.sig">Signature</a></td>
605</tr>
606<tr>
607 <td class="filename"><a href='download.php?file=miniupnpc-2.1.20190824.tar.gz'>miniupnpc-2.1.20190824.tar.gz</a></td>
608 <td class="filesize">94564</td>
609 <td class="filedate">24/08/2019 09:12:50 +0000</td>
610 <td class="comment">MiniUPnP client source code</td>
611 <td><a href="miniupnpc-2.1.20190824.tar.gz.sig">Signature</a></td>
612</tr>
613<tr>
614 <td class="filename"><a href='download.php?file=miniupnpd-2.1.20190630.tar.gz'>miniupnpd-2.1.20190630.tar.gz</a></td>
615 <td class="filesize">240466</td>
616 <td class="filedate">30/06/2019 20:27:38 +0000</td>
617 <td class="comment">MiniUPnP daemon source code</td>
618 <td><a href="miniupnpd-2.1.20190630.tar.gz.sig">Signature</a></td>
619</tr>
620<tr>
621 <td class="filename"><a href='download.php?file=miniupnpd-2.1.20190625.tar.gz'>miniupnpd-2.1.20190625.tar.gz</a></td>
622 <td class="filesize">240120</td>
623 <td class="filedate">25/06/2019 21:33:49 +0000</td>
624 <td class="comment">MiniUPnP daemon source code</td>
625 <td><a href="miniupnpd-2.1.20190625.tar.gz.sig">Signature</a></td>
626</tr>
627<tr>
628 <td class="filename"><a href='download.php?file=miniupnpc-2.1.20190625.tar.gz'>miniupnpc-2.1.20190625.tar.gz</a></td>
629 <td class="filesize">94461</td>
630 <td class="filedate">25/06/2019 21:33:26 +0000</td>
631 <td class="comment">MiniUPnP client source code</td>
632 <td><a href="miniupnpc-2.1.20190625.tar.gz.sig">Signature</a></td>
633</tr>
634<tr>
635 <td class="filename"><a href='download.php?file=miniupnpd-2.1.20190502.tar.gz'>miniupnpd-2.1.20190502.tar.gz</a></td>
636 <td class="filesize">236052</td>
637 <td class="filedate">02/05/2019 17:22:23 +0000</td>
638 <td class="comment">MiniUPnP daemon source code</td>
639 <td><a href="miniupnpd-2.1.20190502.tar.gz.sig">Signature</a></td>
640</tr>
641<tr>
642 <td class="filename"><a href='download.php?file=miniupnpc-2.1.20190408.tar.gz'>miniupnpc-2.1.20190408.tar.gz</a></td>
643 <td class="filesize">94216</td>
644 <td class="filedate">08/04/2019 12:50:21 +0000</td>
645 <td class="comment">MiniUPnP client source code</td>
646 <td><a href="miniupnpc-2.1.20190408.tar.gz.sig">Signature</a></td>
647</tr>
648<tr>
649 <td class="filename"><a href='download.php?file=miniupnpd-2.1.20190408.tar.gz'>miniupnpd-2.1.20190408.tar.gz</a></td>
650 <td class="filesize">235989</td>
651 <td class="filedate">08/04/2019 12:50:01 +0000</td>
652 <td class="comment">MiniUPnP daemon source code</td>
653 <td><a href="miniupnpd-2.1.20190408.tar.gz.sig">Signature</a></td>
654</tr>
655<tr>
656 <td class="filename"><a href='download.php?file=miniupnpc-2.1.20190403.tar.gz'>miniupnpc-2.1.20190403.tar.gz</a></td>
657 <td class="filesize">94204</td>
658 <td class="filedate">03/04/2019 15:41:36 +0000</td>
659 <td class="comment">MiniUPnP client source code</td>
660 <td><a href="miniupnpc-2.1.20190403.tar.gz.sig">Signature</a></td>
661</tr>
662<tr>
663 <td class="filename"><a href='download.php?file=miniupnpd-2.1.20190403.tar.gz'>miniupnpd-2.1.20190403.tar.gz</a></td>
664 <td class="filesize">235909</td>
665 <td class="filedate">03/04/2019 15:41:17 +0000</td>
666 <td class="comment">MiniUPnP daemon source code</td>
667 <td><a href="miniupnpd-2.1.20190403.tar.gz.sig">Signature</a></td>
668</tr>
669<tr>
670 <td class="filename"><a href='download.php?file=minissdpd-1.5.20190210.tar.gz'>minissdpd-1.5.20190210.tar.gz</a></td>
671 <td class="filesize">37227</td>
672 <td class="filedate">10/02/2019 15:21:49 +0000</td>
673 <td class="comment">MiniSSDPd source code</td>
674 <td><a href="minissdpd-1.5.20190210.tar.gz.sig">Signature</a></td>
675</tr>
676<tr>
677 <td class="filename"><a href='download.php?file=miniupnpc-2.1.20190210.tar.gz'>miniupnpc-2.1.20190210.tar.gz</a></td>
678 <td class="filesize">94125</td>
679 <td class="filedate">10/02/2019 12:46:09 +0000</td>
680 <td class="comment">MiniUPnP client source code</td>
681 <td><a href="miniupnpc-2.1.20190210.tar.gz.sig">Signature</a></td>
682</tr>
683<tr>
684 <td class="filename"><a href='download.php?file=miniupnpd-2.1.20190210.tar.gz'>miniupnpd-2.1.20190210.tar.gz</a></td>
685 <td class="filesize">235093</td>
686 <td class="filedate">10/02/2019 11:20:11 +0000</td>
687 <td class="comment">MiniUPnP daemon source code</td>
688 <td><a href="miniupnpd-2.1.20190210.tar.gz.sig">Signature</a></td>
689</tr>
690<tr>
691 <td class="filename"><a href='download.php?file=miniupnpd-2.1.20180706.tar.gz'>miniupnpd-2.1.20180706.tar.gz</a></td>
692 <td class="filesize">233675</td>
693 <td class="filedate">06/07/2018 12:44:24 +0000</td>
694 <td class="comment">MiniUPnP daemon source code</td>
695 <td><a href="miniupnpd-2.1.20180706.tar.gz.sig">Signature</a></td>
696</tr>
697<tr>
698 <td class="filename"><a href='download.php?file=miniupnpd-2.1.tar.gz'>miniupnpd-2.1.tar.gz</a></td>
699 <td class="filesize">225458</td>
700 <td class="filedate">08/05/2018 21:50:32 +0000</td>
701 <td class="comment">MiniUPnP daemon release source code</td>
702 <td><a href="miniupnpd-2.1.tar.gz.sig">Signature</a></td>
703</tr>
704<tr>
705 <td class="filename"><a href='download.php?file=miniupnpc-2.1.tar.gz'>miniupnpc-2.1.tar.gz</a></td>
706 <td class="filesize">91914</td>
707 <td class="filedate">07/05/2018 11:10:59 +0000</td>
708 <td class="comment">MiniUPnP client release source code</td>
709 <td><a href="miniupnpc-2.1.tar.gz.sig">Signature</a></td>
710</tr>
711<tr>
712 <td class="filename"><a href='download.php?file=miniupnpd-2.0.20180503.tar.gz'>miniupnpd-2.0.20180503.tar.gz</a></td>
713 <td class="filesize">225454</td>
714 <td class="filedate">03/05/2018 08:33:10 +0000</td>
715 <td class="comment">MiniUPnP daemon source code</td>
716 <td></td>
717</tr>
718<tr>
719 <td class="filename"><a href='download.php?file=miniupnpc-2.0.20180503.tar.gz'>miniupnpc-2.0.20180503.tar.gz</a></td>
720 <td class="filesize">88207</td>
721 <td class="filedate">03/05/2018 08:31:22 +0000</td>
722 <td class="comment">MiniUPnP client source code</td>
723 <td></td>
724</tr>
725<tr>
726 <td class="filename"><a href='download.php?file=miniupnpd-2.0.20180422.tar.gz'>miniupnpd-2.0.20180422.tar.gz</a></td>
727 <td class="filesize">224942</td>
728 <td class="filedate">22/04/2018 19:48:54 +0000</td>
729 <td class="comment">MiniUPnP daemon source code</td>
730 <td></td>
731</tr>
732<tr>
733 <td class="filename"><a href='download.php?file=miniupnpd-2.0.20180412.tar.gz'>miniupnpd-2.0.20180412.tar.gz</a></td>
734 <td class="filesize">224831</td>
735 <td class="filedate">12/04/2018 08:16:25 +0000</td>
736 <td class="comment">MiniUPnP daemon source code</td>
737 <td></td>
738</tr>
739<tr>
740 <td class="filename"><a href='download.php?file=miniupnpd-2.0.20180410.tar.gz'>miniupnpd-2.0.20180410.tar.gz</a></td>
741 <td class="filesize">224736</td>
742 <td class="filedate">10/04/2018 07:58:28 +0000</td>
743 <td class="comment">MiniUPnP daemon source code</td>
744 <td></td>
745</tr>
746<tr>
747 <td class="filename"><a href='download.php?file=miniupnpc-2.0.20180410.tar.gz'>miniupnpc-2.0.20180410.tar.gz</a></td>
748 <td class="filesize">87363</td>
749 <td class="filedate">10/04/2018 07:52:55 +0000</td>
750 <td class="comment">MiniUPnP client source code</td>
751 <td></td>
752</tr>
753<tr>
754 <td class="filename"><a href='download.php?file=miniupnpc-2.0.20180406.tar.gz'>miniupnpc-2.0.20180406.tar.gz</a></td>
755 <td class="filesize">87374</td>
756 <td class="filedate">06/04/2018 10:55:21 +0000</td>
757 <td class="comment">MiniUPnP client source code</td>
758 <td></td>
759</tr>
760<tr>
761 <td class="filename"><a href='download.php?file=minissdpd-1.5.20180223.tar.gz'>minissdpd-1.5.20180223.tar.gz</a></td>
762 <td class="filesize">36179</td>
763 <td class="filedate">23/02/2018 14:24:07 +0000</td>
764 <td class="comment">MiniSSDPd source code</td>
765 <td></td>
766</tr>
767<tr>
768 <td class="filename"><a href='download.php?file=miniupnpc-2.0.20180222.tar.gz'>miniupnpc-2.0.20180222.tar.gz</a></td>
769 <td class="filesize">87018</td>
770 <td class="filedate">22/02/2018 15:09:24 +0000</td>
771 <td class="comment">MiniUPnP client source code</td>
772 <td></td>
773</tr>
774<tr>
775 <td class="filename"><a href='download.php?file=miniupnpd-2.0.20180222.tar.gz'>miniupnpd-2.0.20180222.tar.gz</a></td>
776 <td class="filesize">223697</td>
777 <td class="filedate">22/02/2018 15:09:14 +0000</td>
778 <td class="comment">MiniUPnP daemon source code</td>
779 <td></td>
780</tr>
781<tr>
782 <td class="filename"><a href='download.php?file=miniupnpd-2.0.20180203.tar.gz'>miniupnpd-2.0.20180203.tar.gz</a></td>
783 <td class="filesize">223084</td>
784 <td class="filedate">03/02/2018 22:34:46 +0000</td>
785 <td class="comment">MiniUPnP daemon source code</td>
786 <td></td>
787</tr>
788<tr>
789 <td class="filename"><a href='download.php?file=miniupnpc-2.0.20180203.tar.gz'>miniupnpc-2.0.20180203.tar.gz</a></td>
790 <td class="filesize">86772</td>
791 <td class="filedate">03/02/2018 22:34:32 +0000</td>
792 <td class="comment">MiniUPnP client source code</td>
793 <td></td>
794</tr>
795<tr>
796 <td class="filename"><a href='download.php?file=minissdpd-1.5.20180203.tar.gz'>minissdpd-1.5.20180203.tar.gz</a></td>
797 <td class="filesize">35848</td>
798 <td class="filedate">03/02/2018 22:33:08 +0000</td>
799 <td class="comment">MiniSSDPd source code</td>
800 <td></td>
801</tr>
802<tr>
803 <td class="filename"><a href='download.php?file=miniupnpc-2.0.20171212.tar.gz'>miniupnpc-2.0.20171212.tar.gz</a></td>
804 <td class="filesize">86607</td>
805 <td class="filedate">12/12/2017 12:03:38 +0000</td>
806 <td class="comment">MiniUPnP client source code</td>
807 <td></td>
808</tr>
809<tr>
810 <td class="filename"><a href='download.php?file=miniupnpd-2.0.20171212.tar.gz'>miniupnpd-2.0.20171212.tar.gz</a></td>
811 <td class="filesize">222617</td>
812 <td class="filedate">12/12/2017 12:03:32 +0000</td>
813 <td class="comment">MiniUPnP daemon source code</td>
814 <td></td>
815</tr>
816<tr>
817 <td class="filename"><a href='download.php?file=miniupnpc-2.0.20171102.tar.gz'>miniupnpc-2.0.20171102.tar.gz</a></td>
818 <td class="filesize">86363</td>
819 <td class="filedate">02/11/2017 17:58:34 +0000</td>
820 <td class="comment">MiniUPnP client source code</td>
821 <td></td>
822</tr>
823<tr>
824 <td class="filename"><a href='download.php?file=miniupnpc-2.0.20170509.tar.gz'>miniupnpc-2.0.20170509.tar.gz</a></td>
825 <td class="filesize">86055</td>
826 <td class="filedate">09/05/2017 10:14:56 +0000</td>
827 <td class="comment">MiniUPnP client source code</td>
828 <td></td>
829</tr>
830<tr>
831 <td class="filename"><a href='download.php?file=miniupnpc-2.0.20170421.tar.gz'>miniupnpc-2.0.20170421.tar.gz</a></td>
832 <td class="filesize">85984</td>
833 <td class="filedate">21/04/2017 12:02:26 +0000</td>
834 <td class="comment">MiniUPnP client source code</td>
835 <td></td>
836</tr>
837<tr>
838 <td class="filename"><a href='download.php?file=miniupnpd-2.0.20170421.tar.gz'>miniupnpd-2.0.20170421.tar.gz</a></td>
839 <td class="filesize">219191</td>
840 <td class="filedate">21/04/2017 12:02:06 +0000</td>
841 <td class="comment">MiniUPnP daemon source code</td>
842 <td></td>
843</tr>
844<tr>
845 <td class="filename"><a href='download.php?file=miniupnpd-2.0.20161216.tar.gz'>miniupnpd-2.0.20161216.tar.gz</a></td>
846 <td class="filesize">218119</td>
847 <td class="filedate">16/12/2016 09:34:08 +0000</td>
848 <td class="comment">MiniUPnP daemon source code</td>
849 <td></td>
850</tr>
851<tr>
852 <td class="filename"><a href='download.php?file=miniupnpc-2.0.20161216.tar.gz'>miniupnpc-2.0.20161216.tar.gz</a></td>
853 <td class="filesize">85780</td>
854 <td class="filedate">16/12/2016 09:34:03 +0000</td>
855 <td class="comment">MiniUPnP client source code</td>
856 <td></td>
857</tr>
858<tr>
859 <td class="filename"><a href='download.php?file=minissdpd-1.5.20161216.tar.gz'>minissdpd-1.5.20161216.tar.gz</a></td>
860 <td class="filesize">35078</td>
861 <td class="filedate">16/12/2016 09:33:59 +0000</td>
862 <td class="comment">MiniSSDPd source code</td>
863 <td></td>
864</tr>
865<tr>
866 <td class="filename"><a href='download.php?file=miniupnpd-2.0.tar.gz'>miniupnpd-2.0.tar.gz</a></td>
867 <td class="filesize">217802</td>
868 <td class="filedate">19/04/2016 21:12:01 +0000</td>
869 <td class="comment">MiniUPnP daemon release source code</td>
870 <td><a href="miniupnpd-2.0.tar.gz.sig">Signature</a></td>
871</tr>
872<tr>
873 <td class="filename"><a href='download.php?file=miniupnpc-2.0.tar.gz'>miniupnpc-2.0.tar.gz</a></td>
874 <td class="filesize">85287</td>
875 <td class="filedate">19/04/2016 21:07:52 +0000</td>
876 <td class="comment">MiniUPnP client release source code</td>
877 <td></td>
878</tr>
879<tr>
880 <td class="filename"><a href='download.php?file=minissdpd-1.5.20160301.tar.gz'>minissdpd-1.5.20160301.tar.gz</a></td>
881 <td class="filesize">34827</td>
882 <td class="filedate">01/03/2016 18:08:23 +0000</td>
883 <td class="comment">MiniSSDPd source code</td>
884 <td></td>
885</tr>
886<tr>
887 <td class="filename"><a href='download.php?file=miniupnpd-1.9.20160222.tar.gz'>miniupnpd-1.9.20160222.tar.gz</a></td>
888 <td class="filesize">217541</td>
889 <td class="filedate">22/02/2016 10:21:40 +0000</td>
890 <td class="comment">MiniUPnP daemon source code</td>
891 <td></td>
892</tr>
893<tr>
894 <td class="filename"><a href='download.php?file=miniupnpd-1.9.20160216.tar.gz'>miniupnpd-1.9.20160216.tar.gz</a></td>
895 <td class="filesize">217007</td>
896 <td class="filedate">16/02/2016 12:41:44 +0000</td>
897 <td class="comment">MiniUPnP daemon source code</td>
898 <td></td>
899</tr>
900<tr>
901 <td class="filename"><a href='download.php?file=miniupnpd-1.9.20160212.tar.gz'>miniupnpd-1.9.20160212.tar.gz</a></td>
902 <td class="filesize">215866</td>
903 <td class="filedate">12/02/2016 15:22:04 +0000</td>
904 <td class="comment">MiniUPnP daemon source code</td>
905 <td></td>
906</tr>
907<tr>
908 <td class="filename"><a href='download.php?file=miniupnpd-1.9.20160209.tar.gz'>miniupnpd-1.9.20160209.tar.gz</a></td>
909 <td class="filesize">213416</td>
910 <td class="filedate">09/02/2016 09:47:03 +0000</td>
911 <td class="comment">MiniUPnP daemon source code</td>
912 <td></td>
913</tr>
914<tr>
915 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20160209.tar.gz'>miniupnpc-1.9.20160209.tar.gz</a></td>
916 <td class="filesize">85268</td>
917 <td class="filedate">09/02/2016 09:44:50 +0000</td>
918 <td class="comment">MiniUPnP client source code</td>
919 <td></td>
920</tr>
921<tr>
922 <td class="filename"><a href='download.php?file=minissdpd-1.5.20160119.tar.gz'>minissdpd-1.5.20160119.tar.gz</a></td>
923 <td class="filesize">34711</td>
924 <td class="filedate">19/01/2016 13:39:51 +0000</td>
925 <td class="comment">MiniSSDPd source code</td>
926 <td></td>
927</tr>
928<tr>
929 <td class="filename"><a href='download.php?file=miniupnpd-1.9.20160113.tar.gz'>miniupnpd-1.9.20160113.tar.gz</a></td>
930 <td class="filesize">211437</td>
931 <td class="filedate">13/01/2016 16:03:14 +0000</td>
932 <td class="comment">MiniUPnP daemon source code</td>
933 <td></td>
934</tr>
935<tr>
936 <td class="filename"><a href='download.php?file=minissdpd-1.5.tar.gz'>minissdpd-1.5.tar.gz</a></td>
937 <td class="filesize">34404</td>
938 <td class="filedate">13/01/2016 15:26:53 +0000</td>
939 <td class="comment">MiniSSDPd release source code</td>
940 <td></td>
941</tr>
942<tr>
943 <td class="filename"><a href='download.php?file=miniupnpd-1.9.20151212.tar.gz'>miniupnpd-1.9.20151212.tar.gz</a></td>
944 <td class="filesize">210912</td>
945 <td class="filedate">12/12/2015 10:06:07 +0000</td>
946 <td class="comment">MiniUPnP daemon source code</td>
947 <td></td>
948</tr>
949<tr>
950 <td class="filename"><a href='download.php?file=miniupnpd-1.9.20151118.tar.gz'>miniupnpd-1.9.20151118.tar.gz</a></td>
951 <td class="filesize">210322</td>
952 <td class="filedate">18/11/2015 08:59:46 +0000</td>
953 <td class="comment">MiniUPnP daemon source code</td>
954 <td></td>
955</tr>
956<tr>
957 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20151026.tar.gz'>miniupnpc-1.9.20151026.tar.gz</a></td>
958 <td class="filesize">84208</td>
959 <td class="filedate">26/10/2015 17:07:34 +0000</td>
960 <td class="comment">MiniUPnP client source code</td>
961 <td></td>
962</tr>
963<tr>
964 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20151008.tar.gz'>miniupnpc-1.9.20151008.tar.gz</a></td>
965 <td class="filesize">83538</td>
966 <td class="filedate">08/10/2015 16:22:28 +0000</td>
967 <td class="comment">MiniUPnP client source code</td>
968 <td></td>
969</tr>
970<tr>
971 <td class="filename"><a href='download.php?file=miniupnpd-1.9.20150922.tar.gz'>miniupnpd-1.9.20150922.tar.gz</a></td>
972 <td class="filesize">208700</td>
973 <td class="filedate">22/09/2015 10:21:50 +0000</td>
974 <td class="comment">MiniUPnP daemon source code</td>
975 <td></td>
976</tr>
977<tr>
978 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20150918.zip'>upnpc-exe-win32-20150918.zip</a></td>
979 <td class="filesize">100004</td>
980 <td class="filedate">18/09/2015 12:50:51 +0000</td>
981 <td class="comment">Windows executable</td>
982 <td></td>
983</tr>
984<tr>
985 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20150917.tar.gz'>miniupnpc-1.9.20150917.tar.gz</a></td>
986 <td class="filesize">82609</td>
987 <td class="filedate">17/09/2015 14:09:14 +0000</td>
988 <td class="comment">MiniUPnP client source code</td>
989 <td></td>
990</tr>
991<tr>
992 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20150824.zip'>upnpc-exe-win32-20150824.zip</a></td>
993 <td class="filesize">99520</td>
994 <td class="filedate">24/08/2015 15:25:18 +0000</td>
995 <td class="comment">Windows executable</td>
996 <td></td>
997</tr>
998<tr>
999 <td class="filename"><a href='download.php?file=minissdpd-1.4.tar.gz'>minissdpd-1.4.tar.gz</a></td>
1000 <td class="filesize">32017</td>
1001 <td class="filedate">06/08/2015 13:38:37 +0000</td>
1002 <td class="comment">MiniSSDPd release source code</td>
1003 <td></td>
1004</tr>
1005<tr>
1006 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20150730.tar.gz'>miniupnpc-1.9.20150730.tar.gz</a></td>
1007 <td class="filesize">81431</td>
1008 <td class="filedate">29/07/2015 22:10:00 +0000</td>
1009 <td class="comment">MiniUPnP client source code</td>
1010 <td></td>
1011</tr>
1012<tr>
1013 <td class="filename"><a href='download.php?file=miniupnpd-1.9.20150721.tar.gz'>miniupnpd-1.9.20150721.tar.gz</a></td>
1014 <td class="filesize">207562</td>
1015 <td class="filedate">21/07/2015 13:35:51 +0000</td>
1016 <td class="comment">MiniUPnP daemon source code</td>
1017 <td></td>
1018</tr>
1019<tr>
1020 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20150721.tar.gz'>miniupnpc-1.9.20150721.tar.gz</a></td>
1021 <td class="filesize">80521</td>
1022 <td class="filedate">21/07/2015 13:27:00 +0000</td>
1023 <td class="comment">MiniUPnP client source code</td>
1024 <td></td>
1025</tr>
1026<tr>
1027 <td class="filename"><a href='download.php?file=libnatpmp-20150609.tar.gz'>libnatpmp-20150609.tar.gz</a></td>
1028 <td class="filesize">24392</td>
1029 <td class="filedate">09/06/2015 15:40:28 +0000</td>
1030 <td class="comment">libnatpmp source code</td>
1031 <td></td>
1032</tr>
1033<tr>
1034 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20150609.tar.gz'>miniupnpc-1.9.20150609.tar.gz</a></td>
1035 <td class="filesize">79311</td>
1036 <td class="filedate">09/06/2015 15:39:48 +0000</td>
1037 <td class="comment">MiniUPnP client source code</td>
1038 <td></td>
1039</tr>
1040<tr>
1041 <td class="filename"><a href='download.php?file=miniupnpd-1.9.20150609.tar.gz'>miniupnpd-1.9.20150609.tar.gz</a></td>
1042 <td class="filesize">207088</td>
1043 <td class="filedate">09/06/2015 15:39:36 +0000</td>
1044 <td class="comment">MiniUPnP daemon source code</td>
1045 <td></td>
1046</tr>
1047<tr>
1048 <td class="filename"><a href='download.php?file=minissdpd-1.3.20150527.tar.gz'>minissdpd-1.3.20150527.tar.gz</a></td>
1049 <td class="filesize">31025</td>
1050 <td class="filedate">27/05/2015 09:17:15 +0000</td>
1051 <td class="comment">MiniSSDPd source code</td>
1052 <td></td>
1053</tr>
1054<tr>
1055 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20150522.tar.gz'>miniupnpc-1.9.20150522.tar.gz</a></td>
1056 <td class="filesize">79080</td>
1057 <td class="filedate">22/05/2015 11:02:27 +0000</td>
1058 <td class="comment">MiniUPnP client source code</td>
1059 <td></td>
1060</tr>
1061<tr>
1062 <td class="filename"><a href='download.php?file=minissdpd-1.3.20150522.tar.gz'>minissdpd-1.3.20150522.tar.gz</a></td>
1063 <td class="filesize">30334</td>
1064 <td class="filedate">22/05/2015 11:02:04 +0000</td>
1065 <td class="comment">MiniSSDPd source code</td>
1066 <td></td>
1067</tr>
1068<tr>
1069 <td class="filename"><a href='download.php?file=miniupnpd-1.9.20150430.tar.gz'>miniupnpd-1.9.20150430.tar.gz</a></td>
1070 <td class="filesize">205930</td>
1071 <td class="filedate">30/04/2015 09:09:27 +0000</td>
1072 <td class="comment">MiniUPnP daemon source code</td>
1073 <td></td>
1074</tr>
1075<tr>
1076 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20150430.tar.gz'>miniupnpc-1.9.20150430.tar.gz</a></td>
1077 <td class="filesize">78459</td>
1078 <td class="filedate">30/04/2015 08:39:31 +0000</td>
1079 <td class="comment">MiniUPnP client source code</td>
1080 <td></td>
1081</tr>
1082<tr>
1083 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20150427.tar.gz'>miniupnpc-1.9.20150427.tar.gz</a></td>
1084 <td class="filesize">78424</td>
1085 <td class="filedate">27/04/2015 16:08:42 +0000</td>
1086 <td class="comment">MiniUPnP client source code</td>
1087 <td></td>
1088</tr>
1089<tr>
1090 <td class="filename"><a href='download.php?file=miniupnpd-1.9.20150427.tar.gz'>miniupnpd-1.9.20150427.tar.gz</a></td>
1091 <td class="filesize">191157</td>
1092 <td class="filedate">27/04/2015 16:08:27 +0000</td>
1093 <td class="comment">MiniUPnP daemon source code</td>
1094 <td></td>
1095</tr>
1096<tr>
1097 <td class="filename"><a href='download.php?file=miniupnpd-1.9.20150307.tar.gz'>miniupnpd-1.9.20150307.tar.gz</a></td>
1098 <td class="filesize">190913</td>
1099 <td class="filedate">07/03/2015 16:11:51 +0000</td>
1100 <td class="comment">MiniUPnP daemon source code</td>
1101 <td></td>
1102</tr>
1103<tr>
1104 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20150206.tar.gz'>miniupnpc-1.9.20150206.tar.gz</a></td>
1105 <td class="filesize">76864</td>
1106 <td class="filedate">06/02/2015 14:38:00 +0000</td>
1107 <td class="comment">MiniUPnP client source code</td>
1108 <td></td>
1109</tr>
1110<tr>
1111 <td class="filename"><a href='download.php?file=miniupnpd-1.9.20141209.tar.gz'>miniupnpd-1.9.20141209.tar.gz</a></td>
1112 <td class="filesize">193183</td>
1113 <td class="filedate">09/12/2014 09:58:34 +0000</td>
1114 <td class="comment">MiniUPnP daemon source code</td>
1115 <td></td>
1116</tr>
1117<tr>
1118 <td class="filename"><a href='download.php?file=minissdpd-1.3.tar.gz'>minissdpd-1.3.tar.gz</a></td>
1119 <td class="filesize">30326</td>
1120 <td class="filedate">09/12/2014 09:57:30 +0000</td>
1121 <td class="comment">MiniSSDPd release source code</td>
1122 <td></td>
1123</tr>
1124<tr>
1125 <td class="filename"><a href='download.php?file=minissdpd-1.2.20141204.tar.gz'>minissdpd-1.2.20141204.tar.gz</a></td>
1126 <td class="filesize">26978</td>
1127 <td class="filedate">04/12/2014 10:55:26 +0000</td>
1128 <td class="comment">MiniSSDPd source code</td>
1129 <td></td>
1130</tr>
1131<tr>
1132 <td class="filename"><a href='download.php?file=miniupnpd-1.9.20141204.tar.gz'>miniupnpd-1.9.20141204.tar.gz</a></td>
1133 <td class="filesize">192597</td>
1134 <td class="filedate">04/12/2014 10:55:03 +0000</td>
1135 <td class="comment">MiniUPnP daemon source code</td>
1136 <td></td>
1137</tr>
1138<tr>
1139 <td class="filename"><a href='download.php?file=minissdpd-1.2.20141128.tar.gz'>minissdpd-1.2.20141128.tar.gz</a></td>
1140 <td class="filesize">26795</td>
1141 <td class="filedate">28/11/2014 16:33:10 +0000</td>
1142 <td class="comment">MiniSSDPd source code</td>
1143 <td></td>
1144</tr>
1145<tr>
1146 <td class="filename"><a href='download.php?file=miniupnpd-1.9.20141128.tar.gz'>miniupnpd-1.9.20141128.tar.gz</a></td>
1147 <td class="filesize">192558</td>
1148 <td class="filedate">28/11/2014 13:31:36 +0000</td>
1149 <td class="comment">MiniUPnP daemon source code</td>
1150 <td></td>
1151</tr>
1152<tr>
1153 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20141128.tar.gz'>miniupnpc-1.9.20141128.tar.gz</a></td>
1154 <td class="filesize">76541</td>
1155 <td class="filedate">28/11/2014 13:31:15 +0000</td>
1156 <td class="comment">MiniUPnP client source code</td>
1157 <td></td>
1158</tr>
1159<tr>
1160 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20141117.tar.gz'>miniupnpc-1.9.20141117.tar.gz</a></td>
1161 <td class="filesize">73865</td>
1162 <td class="filedate">17/11/2014 09:51:36 +0000</td>
1163 <td class="comment">MiniUPnP client source code</td>
1164 <td></td>
1165</tr>
1166<tr>
1167 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20141113.tar.gz'>miniupnpc-1.9.20141113.tar.gz</a></td>
1168 <td class="filesize">72857</td>
1169 <td class="filedate">13/11/2014 10:36:44 +0000</td>
1170 <td class="comment">MiniUPnP client source code</td>
1171 <td></td>
1172</tr>
1173<tr>
1174 <td class="filename"><a href='download.php?file=minissdpd-1.2.20141108.tar.gz'>minissdpd-1.2.20141108.tar.gz</a></td>
1175 <td class="filesize">22001</td>
1176 <td class="filedate">08/11/2014 13:55:41 +0000</td>
1177 <td class="comment">MiniSSDPd source code</td>
1178 <td></td>
1179</tr>
1180<tr>
1181 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20141108.tar.gz'>miniupnpc-1.9.20141108.tar.gz</a></td>
1182 <td class="filesize">72781</td>
1183 <td class="filedate">08/11/2014 13:53:48 +0000</td>
1184 <td class="comment">MiniUPnP client source code</td>
1185 <td></td>
1186</tr>
1187<tr>
1188 <td class="filename"><a href='download.php?file=miniupnpd-1.9.20141108.tar.gz'>miniupnpd-1.9.20141108.tar.gz</a></td>
1189 <td class="filesize">192413</td>
1190 <td class="filedate">08/11/2014 13:53:38 +0000</td>
1191 <td class="comment">MiniUPnP daemon source code</td>
1192 <td></td>
1193</tr>
1194<tr>
1195 <td class="filename"><a href='download.php?file=miniupnpd-1.9.tar.gz'>miniupnpd-1.9.tar.gz</a></td>
1196 <td class="filesize">192183</td>
1197 <td class="filedate">27/10/2014 16:45:34 +0000</td>
1198 <td class="comment">MiniUPnP daemon release source code</td>
1199 <td></td>
1200</tr>
1201<tr>
1202 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20141027.tar.gz'>miniupnpc-1.9.20141027.tar.gz</a></td>
1203 <td class="filesize">76763</td>
1204 <td class="filedate">27/10/2014 16:45:25 +0000</td>
1205 <td class="comment">MiniUPnP client source code</td>
1206 <td></td>
1207</tr>
1208<tr>
1209 <td class="filename"><a href='download.php?file=miniupnpd-1.8.20141022.tar.gz'>miniupnpd-1.8.20141022.tar.gz</a></td>
1210 <td class="filesize">191630</td>
1211 <td class="filedate">22/10/2014 09:17:41 +0000</td>
1212 <td class="comment">MiniUPnP daemon source code</td>
1213 <td></td>
1214</tr>
1215<tr>
1216 <td class="filename"><a href='download.php?file=miniupnpd-1.8.20141021.tar.gz'>miniupnpd-1.8.20141021.tar.gz</a></td>
1217 <td class="filesize">191270</td>
1218 <td class="filedate">21/10/2014 14:18:58 +0000</td>
1219 <td class="comment">MiniUPnP daemon source code</td>
1220 <td></td>
1221</tr>
1222<tr>
1223 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20140911.tar.gz'>miniupnpc-1.9.20140911.tar.gz</a></td>
1224 <td class="filesize">76855</td>
1225 <td class="filedate">11/09/2014 14:15:23 +0000</td>
1226 <td class="comment">MiniUPnP client source code</td>
1227 <td></td>
1228</tr>
1229<tr>
1230 <td class="filename"><a href='download.php?file=minissdpd-1.2.20140906.tar.gz'>minissdpd-1.2.20140906.tar.gz</a></td>
1231 <td class="filesize">21956</td>
1232 <td class="filedate">06/09/2014 08:34:10 +0000</td>
1233 <td class="comment">MiniSSDPd source code</td>
1234 <td></td>
1235</tr>
1236<tr>
1237 <td class="filename"><a href='download.php?file=miniupnpd-1.8.20140906.tar.gz'>miniupnpd-1.8.20140906.tar.gz</a></td>
1238 <td class="filesize">191183</td>
1239 <td class="filedate">06/09/2014 08:34:02 +0000</td>
1240 <td class="comment">MiniUPnP daemon source code</td>
1241 <td></td>
1242</tr>
1243<tr>
1244 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20140906.tar.gz'>miniupnpc-1.9.20140906.tar.gz</a></td>
1245 <td class="filesize">76791</td>
1246 <td class="filedate">06/09/2014 08:33:45 +0000</td>
1247 <td class="comment">MiniUPnP client source code</td>
1248 <td></td>
1249</tr>
1250<tr>
1251 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20140701.tar.gz'>miniupnpc-1.9.20140701.tar.gz</a></td>
1252 <td class="filesize">76735</td>
1253 <td class="filedate">01/07/2014 13:06:51 +0000</td>
1254 <td class="comment">MiniUPnP client source code</td>
1255 <td></td>
1256</tr>
1257<tr>
1258 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20140610.tar.gz'>miniupnpc-1.9.20140610.tar.gz</a></td>
1259 <td class="filesize">76674</td>
1260 <td class="filedate">10/06/2014 10:28:27 +0000</td>
1261 <td class="comment">MiniUPnP client source code</td>
1262 <td></td>
1263</tr>
1264<tr>
1265 <td class="filename"><a href='download.php?file=minissdpd-1.2.20140610.tar.gz'>minissdpd-1.2.20140610.tar.gz</a></td>
1266 <td class="filesize">21909</td>
1267 <td class="filedate">10/06/2014 10:03:29 +0000</td>
1268 <td class="comment">MiniSSDPd source code</td>
1269 <td></td>
1270</tr>
1271<tr>
1272 <td class="filename"><a href='download.php?file=miniupnpd-1.8.20140523.tar.gz'>miniupnpd-1.8.20140523.tar.gz</a></td>
1273 <td class="filesize">190936</td>
1274 <td class="filedate">23/05/2014 15:48:03 +0000</td>
1275 <td class="comment">MiniUPnP daemon source code</td>
1276 <td></td>
1277</tr>
1278<tr>
1279 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20140422.zip'>upnpc-exe-win32-20140422.zip</a></td>
1280 <td class="filesize">97505</td>
1281 <td class="filedate">22/04/2014 10:10:07 +0000</td>
1282 <td class="comment">Windows executable</td>
1283 <td></td>
1284</tr>
1285<tr>
1286 <td class="filename"><a href='download.php?file=miniupnpd-1.8.20140422.tar.gz'>miniupnpd-1.8.20140422.tar.gz</a></td>
1287 <td class="filesize">187225</td>
1288 <td class="filedate">22/04/2014 08:58:56 +0000</td>
1289 <td class="comment">MiniUPnP daemon source code</td>
1290 <td></td>
1291</tr>
1292<tr>
1293 <td class="filename"><a href='download.php?file=miniupnpd-1.8.20140401.tar.gz'>miniupnpd-1.8.20140401.tar.gz</a></td>
1294 <td class="filesize">183131</td>
1295 <td class="filedate">01/04/2014 10:07:20 +0000</td>
1296 <td class="comment">MiniUPnP daemon source code</td>
1297 <td></td>
1298</tr>
1299<tr>
1300 <td class="filename"><a href='download.php?file=miniupnpc-1.9.20140401.tar.gz'>miniupnpc-1.9.20140401.tar.gz</a></td>
1301 <td class="filesize">74703</td>
1302 <td class="filedate">01/04/2014 09:49:46 +0000</td>
1303 <td class="comment">MiniUPnP client source code</td>
1304 <td></td>
1305</tr>
1306<tr>
1307 <td class="filename"><a href='download.php?file=libnatpmp-20140401.tar.gz'>libnatpmp-20140401.tar.gz</a></td>
1308 <td class="filesize">23302</td>
1309 <td class="filedate">01/04/2014 09:49:44 +0000</td>
1310 <td class="comment">libnatpmp source code</td>
1311 <td></td>
1312</tr>
1313<tr>
1314 <td class="filename"><a href='download.php?file=miniupnpd-1.8.20140313.tar.gz'>miniupnpd-1.8.20140313.tar.gz</a></td>
1315 <td class="filesize">177120</td>
1316 <td class="filedate">13/03/2014 10:39:11 +0000</td>
1317 <td class="comment">MiniUPnP daemon source code</td>
1318 <td></td>
1319</tr>
1320<tr>
1321 <td class="filename"><a href='download.php?file=miniupnpd-1.8.20140310.tar.gz'>miniupnpd-1.8.20140310.tar.gz</a></td>
1322 <td class="filesize">176585</td>
1323 <td class="filedate">09/03/2014 23:16:49 +0000</td>
1324 <td class="comment">MiniUPnP daemon source code</td>
1325 <td></td>
1326</tr>
1327<tr>
1328 <td class="filename"><a href='download.php?file=miniupnpd-1.8.20140225.tar.gz'>miniupnpd-1.8.20140225.tar.gz</a></td>
1329 <td class="filesize">175183</td>
1330 <td class="filedate">25/02/2014 11:01:29 +0000</td>
1331 <td class="comment">MiniUPnP daemon source code</td>
1332 <td></td>
1333</tr>
1334<tr>
1335 <td class="filename"><a href='download.php?file=miniupnpd-1.8.20140203.tar.gz'>miniupnpd-1.8.20140203.tar.gz</a></td>
1336 <td class="filesize">170112</td>
1337 <td class="filedate">03/02/2014 09:56:05 +0000</td>
1338 <td class="comment">MiniUPnP daemon source code</td>
1339 <td></td>
1340</tr>
1341<tr>
1342 <td class="filename"><a href='download.php?file=miniupnpc-1.9.tar.gz'>miniupnpc-1.9.tar.gz</a></td>
1343 <td class="filesize">74230</td>
1344 <td class="filedate">31/01/2014 13:57:40 +0000</td>
1345 <td class="comment">MiniUPnP client release source code</td>
1346 <td></td>
1347</tr>
1348<tr>
1349 <td class="filename"><a href='download.php?file=miniupnpd-1.8.20140127.tar.gz'>miniupnpd-1.8.20140127.tar.gz</a></td>
1350 <td class="filesize">170467</td>
1351 <td class="filedate">27/01/2014 11:25:34 +0000</td>
1352 <td class="comment">MiniUPnP daemon source code</td>
1353 <td></td>
1354</tr>
1355<tr>
1356 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20140117.zip'>upnpc-exe-win32-20140117.zip</a></td>
1357 <td class="filesize">97270</td>
1358 <td class="filedate">17/01/2014 11:37:53 +0000</td>
1359 <td class="comment">Windows executable</td>
1360 <td></td>
1361</tr>
1362<tr>
1363 <td class="filename"><a href='download.php?file=miniupnpd-1.8.20131216.tar.gz'>miniupnpd-1.8.20131216.tar.gz</a></td>
1364 <td class="filesize">170277</td>
1365 <td class="filedate">16/12/2013 16:15:40 +0000</td>
1366 <td class="comment">MiniUPnP daemon source code</td>
1367 <td></td>
1368</tr>
1369<tr>
1370 <td class="filename"><a href='download.php?file=miniupnpd-1.8.20131213.tar.gz'>miniupnpd-1.8.20131213.tar.gz</a></td>
1371 <td class="filesize">169753</td>
1372 <td class="filedate">13/12/2013 16:18:10 +0000</td>
1373 <td class="comment">MiniUPnP daemon source code</td>
1374 <td></td>
1375</tr>
1376<tr>
1377 <td class="filename"><a href='download.php?file=miniupnpc-1.8.20131209.tar.gz'>miniupnpc-1.8.20131209.tar.gz</a></td>
1378 <td class="filesize">73900</td>
1379 <td class="filedate">09/12/2013 20:52:54 +0000</td>
1380 <td class="comment">MiniUPnP client source code</td>
1381 <td></td>
1382</tr>
1383<tr>
1384 <td class="filename"><a href='download.php?file=libnatpmp-20131126.tar.gz'>libnatpmp-20131126.tar.gz</a></td>
1385 <td class="filesize">22972</td>
1386 <td class="filedate">26/11/2013 08:51:36 +0000</td>
1387 <td class="comment">libnatpmp source code</td>
1388 <td></td>
1389</tr>
1390<tr>
1391 <td class="filename"><a href='download.php?file=miniupnpc-1.8.20131007.tar.gz'>miniupnpc-1.8.20131007.tar.gz</a></td>
1392 <td class="filesize">73750</td>
1393 <td class="filedate">07/10/2013 10:10:25 +0000</td>
1394 <td class="comment">MiniUPnP client source code</td>
1395 <td></td>
1396</tr>
1397<tr>
1398 <td class="filename"><a href='download.php?file=libnatpmp-20130911.tar.gz'>libnatpmp-20130911.tar.gz</a></td>
1399 <td class="filesize">18744</td>
1400 <td class="filedate">11/09/2013 07:35:51 +0000</td>
1401 <td class="comment">libnatpmp source code</td>
1402 <td></td>
1403</tr>
1404<tr>
1405 <td class="filename"><a href='download.php?file=libnatpmp-20130910.tar.gz'>libnatpmp-20130910.tar.gz</a></td>
1406 <td class="filesize">18734</td>
1407 <td class="filedate">10/09/2013 20:15:34 +0000</td>
1408 <td class="comment">libnatpmp source code</td>
1409 <td></td>
1410</tr>
1411<tr>
1412 <td class="filename"><a href='download.php?file=minissdpd-1.2.20130907.tar.gz'>minissdpd-1.2.20130907.tar.gz</a></td>
1413 <td class="filesize">20237</td>
1414 <td class="filedate">07/09/2013 06:46:31 +0000</td>
1415 <td class="comment">MiniSSDPd source code</td>
1416 <td></td>
1417</tr>
1418<tr>
1419 <td class="filename"><a href='download.php?file=minissdpd-1.2.20130819.tar.gz'>minissdpd-1.2.20130819.tar.gz</a></td>
1420 <td class="filesize">20772</td>
1421 <td class="filedate">19/08/2013 16:50:29 +0000</td>
1422 <td class="comment">MiniSSDPd source code</td>
1423 <td></td>
1424</tr>
1425<tr>
1426 <td class="filename"><a href='download.php?file=miniupnpc-1.8.20130801.tar.gz'>miniupnpc-1.8.20130801.tar.gz</a></td>
1427 <td class="filesize">73426</td>
1428 <td class="filedate">01/08/2013 21:38:05 +0000</td>
1429 <td class="comment">MiniUPnP client source code</td>
1430 <td></td>
1431</tr>
1432<tr>
1433 <td class="filename"><a href='download.php?file=miniupnpd-1.8.20130730.tar.gz'>miniupnpd-1.8.20130730.tar.gz</a></td>
1434 <td class="filesize">149904</td>
1435 <td class="filedate">30/07/2013 11:37:48 +0000</td>
1436 <td class="comment">MiniUPnP daemon source code</td>
1437 <td></td>
1438</tr>
1439<tr>
1440 <td class="filename"><a href='download.php?file=miniupnpd-1.8.20130607.tar.gz'>miniupnpd-1.8.20130607.tar.gz</a></td>
1441 <td class="filesize">149521</td>
1442 <td class="filedate">07/06/2013 08:46:17 +0000</td>
1443 <td class="comment">MiniUPnP daemon source code</td>
1444 <td></td>
1445</tr>
1446<tr>
1447 <td class="filename"><a href='download.php?file=miniupnpd-1.8.20130521.tar.gz'>miniupnpd-1.8.20130521.tar.gz</a></td>
1448 <td class="filesize">149276</td>
1449 <td class="filedate">21/05/2013 09:01:33 +0000</td>
1450 <td class="comment">MiniUPnP daemon source code</td>
1451 <td></td>
1452</tr>
1453<tr>
1454 <td class="filename"><a href='download.php?file=miniupnpd-1.8.20130503.tar.gz'>miniupnpd-1.8.20130503.tar.gz</a></td>
1455 <td class="filesize">148420</td>
1456 <td class="filedate">03/05/2013 19:27:16 +0000</td>
1457 <td class="comment">MiniUPnP daemon source code</td>
1458 <td></td>
1459</tr>
1460<tr>
1461 <td class="filename"><a href='download.php?file=miniupnpc-1.8.20130503.tar.gz'>miniupnpc-1.8.20130503.tar.gz</a></td>
1462 <td class="filesize">71858</td>
1463 <td class="filedate">03/05/2013 19:27:07 +0000</td>
1464 <td class="comment">MiniUPnP client source code</td>
1465 <td></td>
1466</tr>
1467<tr>
1468 <td class="filename"><a href='download.php?file=miniupnpd-1.8.20130426.tar.gz'>miniupnpd-1.8.20130426.tar.gz</a></td>
1469 <td class="filesize">147890</td>
1470 <td class="filedate">26/04/2013 16:57:20 +0000</td>
1471 <td class="comment">MiniUPnP daemon source code</td>
1472 <td></td>
1473</tr>
1474<tr>
1475 <td class="filename"><a href='download.php?file=miniupnpc-1.8.20130211.tar.gz'>miniupnpc-1.8.20130211.tar.gz</a></td>
1476 <td class="filesize">70723</td>
1477 <td class="filedate">11/02/2013 10:32:44 +0000</td>
1478 <td class="comment">MiniUPnP client source code</td>
1479 <td></td>
1480</tr>
1481<tr>
1482 <td class="filename"><a href='download.php?file=miniupnpd-1.8.20130207.tar.gz'>miniupnpd-1.8.20130207.tar.gz</a></td>
1483 <td class="filesize">147325</td>
1484 <td class="filedate">07/02/2013 12:29:32 +0000</td>
1485 <td class="comment">MiniUPnP daemon source code</td>
1486 <td></td>
1487</tr>
1488<tr>
1489 <td class="filename"><a href='download.php?file=miniupnpc-1.8.tar.gz'>miniupnpc-1.8.tar.gz</a></td>
1490 <td class="filesize">70624</td>
1491 <td class="filedate">06/02/2013 14:31:06 +0000</td>
1492 <td class="comment">MiniUPnP client release source code</td>
1493 <td></td>
1494</tr>
1495<tr>
1496 <td class="filename"><a href='download.php?file=miniupnpd-1.8.tar.gz'>miniupnpd-1.8.tar.gz</a></td>
1497 <td class="filesize">146679</td>
1498 <td class="filedate">06/02/2013 14:30:59 +0000</td>
1499 <td class="comment">MiniUPnP daemon release source code</td>
1500 <td></td>
1501</tr>
1502<tr>
1503 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20121009.zip'>upnpc-exe-win32-20121009.zip</a></td>
1504 <td class="filesize">96513</td>
1505 <td class="filedate">09/10/2012 17:54:12 +0000</td>
1506 <td class="comment">Windows executable</td>
1507 <td></td>
1508</tr>
1509<tr>
1510 <td class="filename"><a href='download.php?file=miniupnpd-1.7.20121005.tar.gz'>miniupnpd-1.7.20121005.tar.gz</a></td>
1511 <td class="filesize">144393</td>
1512 <td class="filedate">04/10/2012 22:39:05 +0000</td>
1513 <td class="comment">MiniUPnP daemon source code</td>
1514 <td></td>
1515</tr>
1516<tr>
1517 <td class="filename"><a href='download.php?file=miniupnpc-1.7.20120830.tar.gz'>miniupnpc-1.7.20120830.tar.gz</a></td>
1518 <td class="filesize">70074</td>
1519 <td class="filedate">30/08/2012 08:41:51 +0000</td>
1520 <td class="comment">MiniUPnP client source code</td>
1521 <td></td>
1522</tr>
1523<tr>
1524 <td class="filename"><a href='download.php?file=miniupnpd-1.7.20120824.tar.gz'>miniupnpd-1.7.20120824.tar.gz</a></td>
1525 <td class="filesize">141960</td>
1526 <td class="filedate">24/08/2012 18:15:01 +0000</td>
1527 <td class="comment">MiniUPnP daemon source code</td>
1528 <td></td>
1529</tr>
1530<tr>
1531 <td class="filename"><a href='download.php?file=libnatpmp-20120821.tar.gz'>libnatpmp-20120821.tar.gz</a></td>
1532 <td class="filesize">17832</td>
1533 <td class="filedate">21/08/2012 17:24:46 +0000</td>
1534 <td class="comment">libnatpmp source code</td>
1535 <td></td>
1536</tr>
1537<tr>
1538 <td class="filename"><a href='download.php?file=miniupnpc-1.7.20120714.tar.gz'>miniupnpc-1.7.20120714.tar.gz</a></td>
1539 <td class="filesize">69570</td>
1540 <td class="filedate">14/07/2012 14:40:47 +0000</td>
1541 <td class="comment">MiniUPnP client source code</td>
1542 <td></td>
1543</tr>
1544<tr>
1545 <td class="filename"><a href='download.php?file=miniupnpc-1.7.20120711.tar.gz'>miniupnpc-1.7.20120711.tar.gz</a></td>
1546 <td class="filesize">69580</td>
1547 <td class="filedate">10/07/2012 22:27:05 +0000</td>
1548 <td class="comment">MiniUPnP client source code</td>
1549 <td></td>
1550</tr>
1551<tr>
1552 <td class="filename"><a href='download.php?file=miniupnpd-1.7.20120711.tar.gz'>miniupnpd-1.7.20120711.tar.gz</a></td>
1553 <td class="filesize">141380</td>
1554 <td class="filedate">10/07/2012 22:26:58 +0000</td>
1555 <td class="comment">MiniUPnP daemon source code</td>
1556 <td></td>
1557</tr>
1558<tr>
1559 <td class="filename"><a href='download.php?file=miniupnpd-1.7.tar.gz'>miniupnpd-1.7.tar.gz</a></td>
1560 <td class="filesize">138047</td>
1561 <td class="filedate">27/05/2012 23:13:30 +0000</td>
1562 <td class="comment">MiniUPnP daemon release source code</td>
1563 <td></td>
1564</tr>
1565<tr>
1566 <td class="filename"><a href='download.php?file=miniupnpc-1.7.tar.gz'>miniupnpc-1.7.tar.gz</a></td>
1567 <td class="filesize">68327</td>
1568 <td class="filedate">24/05/2012 18:17:48 +0000</td>
1569 <td class="comment">MiniUPnP client release source code</td>
1570 <td></td>
1571</tr>
1572<tr>
1573 <td class="filename"><a href='download.php?file=minissdpd-1.2.tar.gz'>minissdpd-1.2.tar.gz</a></td>
1574 <td class="filesize">19874</td>
1575 <td class="filedate">24/05/2012 18:06:24 +0000</td>
1576 <td class="comment">MiniSSDPd release source code</td>
1577 <td></td>
1578</tr>
1579<tr>
1580 <td class="filename"><a href='download.php?file=miniupnpd-1.6.20120509.tar.gz'>miniupnpd-1.6.20120509.tar.gz</a></td>
1581 <td class="filesize">137147</td>
1582 <td class="filedate">09/05/2012 10:45:44 +0000</td>
1583 <td class="comment">MiniUPnP daemon source code</td>
1584 <td></td>
1585</tr>
1586<tr>
1587 <td class="filename"><a href='download.php?file=miniupnpc-1.6.20120509.tar.gz'>miniupnpc-1.6.20120509.tar.gz</a></td>
1588 <td class="filesize">68205</td>
1589 <td class="filedate">09/05/2012 10:45:41 +0000</td>
1590 <td class="comment">MiniUPnP client source code</td>
1591 <td></td>
1592</tr>
1593<tr>
1594 <td class="filename"><a href='download.php?file=minissdpd-1.1.20120509.tar.gz'>minissdpd-1.1.20120509.tar.gz</a></td>
1595 <td class="filesize">18123</td>
1596 <td class="filedate">09/05/2012 10:45:39 +0000</td>
1597 <td class="comment">MiniSSDPd source code</td>
1598 <td></td>
1599</tr>
1600<tr>
1601 <td class="filename"><a href='download.php?file=miniupnpd-1.6.20120502.tar.gz'>miniupnpd-1.6.20120502.tar.gz</a></td>
1602 <td class="filesize">136688</td>
1603 <td class="filedate">01/05/2012 22:51:18 +0000</td>
1604 <td class="comment">MiniUPnP daemon source code</td>
1605 <td></td>
1606</tr>
1607<tr>
1608 <td class="filename"><a href='download.php?file=miniupnpc-1.6.20120502.tar.gz'>miniupnpc-1.6.20120502.tar.gz</a></td>
1609 <td class="filesize">68170</td>
1610 <td class="filedate">01/05/2012 22:51:11 +0000</td>
1611 <td class="comment">MiniUPnP client source code</td>
1612 <td></td>
1613</tr>
1614<tr>
1615 <td class="filename"><a href='download.php?file=miniupnpd-1.6.20120426.tar.gz'>miniupnpd-1.6.20120426.tar.gz</a></td>
1616 <td class="filesize">134764</td>
1617 <td class="filedate">26/04/2012 16:24:29 +0000</td>
1618 <td class="comment">MiniUPnP daemon source code</td>
1619 <td></td>
1620</tr>
1621<tr>
1622 <td class="filename"><a href='download.php?file=miniupnpd-1.6.20120424.tar.gz'>miniupnpd-1.6.20120424.tar.gz</a></td>
1623 <td class="filesize">132522</td>
1624 <td class="filedate">23/04/2012 22:43:17 +0000</td>
1625 <td class="comment">MiniUPnP daemon source code</td>
1626 <td></td>
1627</tr>
1628<tr>
1629 <td class="filename"><a href='download.php?file=miniupnpc-1.6.20120424.tar.gz'>miniupnpc-1.6.20120424.tar.gz</a></td>
1630 <td class="filesize">68067</td>
1631 <td class="filedate">23/04/2012 22:43:10 +0000</td>
1632 <td class="comment">MiniUPnP client source code</td>
1633 <td></td>
1634</tr>
1635<tr>
1636 <td class="filename"><a href='download.php?file=miniupnpd-1.6.20120420.tar.gz'>miniupnpd-1.6.20120420.tar.gz</a></td>
1637 <td class="filesize">131972</td>
1638 <td class="filedate">20/04/2012 14:58:57 +0000</td>
1639 <td class="comment">MiniUPnP daemon source code</td>
1640 <td></td>
1641</tr>
1642<tr>
1643 <td class="filename"><a href='download.php?file=miniupnpc-1.6.20120420.tar.gz'>miniupnpc-1.6.20120420.tar.gz</a></td>
1644 <td class="filesize">68068</td>
1645 <td class="filedate">20/04/2012 14:58:39 +0000</td>
1646 <td class="comment">MiniUPnP client source code</td>
1647 <td></td>
1648</tr>
1649<tr>
1650 <td class="filename"><a href='download.php?file=miniupnpd-1.6.20120419.tar.gz'>miniupnpd-1.6.20120419.tar.gz</a></td>
1651 <td class="filesize">131088</td>
1652 <td class="filedate">18/04/2012 23:41:36 +0000</td>
1653 <td class="comment">MiniUPnP daemon source code</td>
1654 <td></td>
1655</tr>
1656<tr>
1657 <td class="filename"><a href='download.php?file=miniupnpd-1.6.20120418.tar.gz'>miniupnpd-1.6.20120418.tar.gz</a></td>
1658 <td class="filesize">130879</td>
1659 <td class="filedate">18/04/2012 21:01:10 +0000</td>
1660 <td class="comment">MiniUPnP daemon source code</td>
1661 <td></td>
1662</tr>
1663<tr>
1664 <td class="filename"><a href='download.php?file=minissdpd-1.1.20120410.tar.gz'>minissdpd-1.1.20120410.tar.gz</a></td>
1665 <td class="filesize">18059</td>
1666 <td class="filedate">09/04/2012 22:45:38 +0000</td>
1667 <td class="comment">MiniSSDPd source code</td>
1668 <td></td>
1669</tr>
1670<tr>
1671 <td class="filename"><a href='download.php?file=miniupnpc-1.6.20120410.tar.gz'>miniupnpc-1.6.20120410.tar.gz</a></td>
1672 <td class="filesize">67934</td>
1673 <td class="filedate">09/04/2012 22:45:10 +0000</td>
1674 <td class="comment">MiniUPnP client source code</td>
1675 <td></td>
1676</tr>
1677<tr>
1678 <td class="filename"><a href='download.php?file=miniupnpd-1.6.20120406.tar.gz'>miniupnpd-1.6.20120406.tar.gz</a></td>
1679 <td class="filesize">128992</td>
1680 <td class="filedate">06/04/2012 17:52:57 +0000</td>
1681 <td class="comment">MiniUPnP daemon source code</td>
1682 <td></td>
1683</tr>
1684<tr>
1685 <td class="filename"><a href='download.php?file=miniupnpc-1.6.20120320.tar.gz'>miniupnpc-1.6.20120320.tar.gz</a></td>
1686 <td class="filesize">67374</td>
1687 <td class="filedate">20/03/2012 16:55:48 +0000</td>
1688 <td class="comment">MiniUPnP client source code</td>
1689 <td></td>
1690</tr>
1691<tr>
1692 <td class="filename"><a href='download.php?file=miniupnpd-1.6.20120320.tar.gz'>miniupnpd-1.6.20120320.tar.gz</a></td>
1693 <td class="filesize">127968</td>
1694 <td class="filedate">20/03/2012 16:46:07 +0000</td>
1695 <td class="comment">MiniUPnP daemon source code</td>
1696 <td></td>
1697</tr>
1698<tr>
1699 <td class="filename"><a href='download.php?file=miniupnpd-1.6.20120305.tar.gz'>miniupnpd-1.6.20120305.tar.gz</a></td>
1700 <td class="filesize">126985</td>
1701 <td class="filedate">05/03/2012 20:42:01 +0000</td>
1702 <td class="comment">MiniUPnP daemon source code</td>
1703 <td></td>
1704</tr>
1705<tr>
1706 <td class="filename"><a href='download.php?file=miniupnpd-1.6.20120207.tar.gz'>miniupnpd-1.6.20120207.tar.gz</a></td>
1707 <td class="filesize">127425</td>
1708 <td class="filedate">07/02/2012 10:21:16 +0000</td>
1709 <td class="comment">MiniUPnP daemon source code</td>
1710 <td></td>
1711</tr>
1712<tr>
1713 <td class="filename"><a href='download.php?file=miniupnpd-1.6.20120203.tar.gz'>miniupnpd-1.6.20120203.tar.gz</a></td>
1714 <td class="filesize">126599</td>
1715 <td class="filedate">03/02/2012 15:14:13 +0000</td>
1716 <td class="comment">MiniUPnP daemon source code</td>
1717 <td></td>
1718</tr>
1719<tr>
1720 <td class="filename"><a href='download.php?file=miniupnpc-1.6.20120125.tar.gz'>miniupnpc-1.6.20120125.tar.gz</a></td>
1721 <td class="filesize">67354</td>
1722 <td class="filedate">25/01/2012 21:12:28 +0000</td>
1723 <td class="comment">MiniUPnP client source code</td>
1724 <td></td>
1725</tr>
1726<tr>
1727 <td class="filename"><a href='download.php?file=miniupnpc-1.6.20120121.tar.gz'>miniupnpc-1.6.20120121.tar.gz</a></td>
1728 <td class="filesize">67347</td>
1729 <td class="filedate">21/01/2012 14:07:41 +0000</td>
1730 <td class="comment">MiniUPnP client source code</td>
1731 <td></td>
1732</tr>
1733<tr>
1734 <td class="filename"><a href='download.php?file=miniupnpd-1.6.20120121.tar.gz'>miniupnpd-1.6.20120121.tar.gz</a></td>
1735 <td class="filesize">126021</td>
1736 <td class="filedate">21/01/2012 14:07:33 +0000</td>
1737 <td class="comment">MiniUPnP daemon source code</td>
1738 <td></td>
1739</tr>
1740<tr>
1741 <td class="filename"><a href='download.php?file=minissdpd-1.1.20120121.tar.gz'>minissdpd-1.1.20120121.tar.gz</a></td>
1742 <td class="filesize">17762</td>
1743 <td class="filedate">21/01/2012 14:07:16 +0000</td>
1744 <td class="comment">MiniSSDPd source code</td>
1745 <td></td>
1746</tr>
1747<tr>
1748 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20120121.zip'>upnpc-exe-win32-20120121.zip</a></td>
1749 <td class="filesize">94575</td>
1750 <td class="filedate">21/01/2012 13:59:11 +0000</td>
1751 <td class="comment">Windows executable</td>
1752 <td></td>
1753</tr>
1754<tr>
1755 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20111212.zip'>upnpc-exe-win32-20111212.zip</a></td>
1756 <td class="filesize">94507</td>
1757 <td class="filedate">12/12/2011 12:33:48 +0000</td>
1758 <td class="comment">Windows executable</td>
1759 <td></td>
1760</tr>
1761<tr>
1762 <td class="filename"><a href='download.php?file=miniupnpd-1.6.20111118.tar.gz'>miniupnpd-1.6.20111118.tar.gz</a></td>
1763 <td class="filesize">125683</td>
1764 <td class="filedate">18/11/2011 11:26:12 +0000</td>
1765 <td class="comment">MiniUPnP daemon source code</td>
1766 <td></td>
1767</tr>
1768<tr>
1769 <td class="filename"><a href='download.php?file=minissdpd-1.1.20111007.tar.gz'>minissdpd-1.1.20111007.tar.gz</a></td>
1770 <td class="filesize">17611</td>
1771 <td class="filedate">07/10/2011 09:47:51 +0000</td>
1772 <td class="comment">MiniSSDPd source code</td>
1773 <td></td>
1774</tr>
1775<tr>
1776 <td class="filename"><a href='download.php?file=xchat-upnp20110811.patch'>xchat-upnp20110811.patch</a></td>
1777 <td class="filesize">10329</td>
1778 <td class="filedate">11/08/2011 15:18:25 +0000</td>
1779 <td class="comment">Patch to add UPnP capabilities to xchat</td>
1780 <td></td>
1781</tr>
1782<tr>
1783 <td class="filename"><a href='download.php?file=xchat-upnp20110811-2.8.8.patch'>xchat-upnp20110811-2.8.8.patch</a></td>
1784 <td class="filesize">11529</td>
1785 <td class="filedate">11/08/2011 15:18:23 +0000</td>
1786 <td class="comment">Patch to add UPnP capabilities to xchat</td>
1787 <td></td>
1788</tr>
1789<tr>
1790 <td class="filename"><a href='download.php?file=libnatpmp-20110808.tar.gz'>libnatpmp-20110808.tar.gz</a></td>
1791 <td class="filesize">17762</td>
1792 <td class="filedate">08/08/2011 21:21:34 +0000</td>
1793 <td class="comment">libnatpmp source code</td>
1794 <td></td>
1795</tr>
1796<tr>
1797 <td class="filename"><a href='download.php?file=libnatpmp-20110730.tar.gz'>libnatpmp-20110730.tar.gz</a></td>
1798 <td class="filesize">17687</td>
1799 <td class="filedate">30/07/2011 13:19:31 +0000</td>
1800 <td class="comment">libnatpmp source code</td>
1801 <td></td>
1802</tr>
1803<tr>
1804 <td class="filename"><a href='download.php?file=minissdpd-1.1.tar.gz'>minissdpd-1.1.tar.gz</a></td>
1805 <td class="filesize">17481</td>
1806 <td class="filedate">30/07/2011 13:17:30 +0000</td>
1807 <td class="comment">MiniSSDPd release source code</td>
1808 <td></td>
1809</tr>
1810<tr>
1811 <td class="filename"><a href='download.php?file=miniupnpd-1.6.20110730.tar.gz'>miniupnpd-1.6.20110730.tar.gz</a></td>
1812 <td class="filesize">125583</td>
1813 <td class="filedate">30/07/2011 13:17:09 +0000</td>
1814 <td class="comment">MiniUPnP daemon source code</td>
1815 <td></td>
1816</tr>
1817<tr>
1818 <td class="filename"><a href='download.php?file=minissdpd-1.0.20110729.tar.gz'>minissdpd-1.0.20110729.tar.gz</a></td>
1819 <td class="filesize">15898</td>
1820 <td class="filedate">29/07/2011 08:47:26 +0000</td>
1821 <td class="comment">MiniSSDPd source code</td>
1822 <td></td>
1823</tr>
1824<tr>
1825 <td class="filename"><a href='download.php?file=miniupnpc-1.6.tar.gz'>miniupnpc-1.6.tar.gz</a></td>
1826 <td class="filesize">66454</td>
1827 <td class="filedate">25/07/2011 18:03:09 +0000</td>
1828 <td class="comment">MiniUPnP client release source code</td>
1829 <td></td>
1830</tr>
1831<tr>
1832 <td class="filename"><a href='download.php?file=miniupnpd-1.6.tar.gz'>miniupnpd-1.6.tar.gz</a></td>
1833 <td class="filesize">124917</td>
1834 <td class="filedate">25/07/2011 16:37:57 +0000</td>
1835 <td class="comment">MiniUPnP daemon release source code</td>
1836 <td></td>
1837</tr>
1838<tr>
1839 <td class="filename"><a href='download.php?file=minidlna_1.0.21.minissdp1.patch'>minidlna_1.0.21.minissdp1.patch</a></td>
1840 <td class="filesize">7598</td>
1841 <td class="filedate">25/07/2011 14:57:50 +0000</td>
1842 <td class="comment">Patch for MiniDLNA to use miniSSDPD</td>
1843 <td></td>
1844</tr>
1845<tr>
1846 <td class="filename"><a href='download.php?file=libnatpmp-20110715.tar.gz'>libnatpmp-20110715.tar.gz</a></td>
1847 <td class="filesize">17943</td>
1848 <td class="filedate">15/07/2011 08:31:40 +0000</td>
1849 <td class="comment">libnatpmp source code</td>
1850 <td></td>
1851</tr>
1852<tr>
1853 <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110715.tar.gz'>miniupnpd-1.5.20110715.tar.gz</a></td>
1854 <td class="filesize">124519</td>
1855 <td class="filedate">15/07/2011 07:55:17 +0000</td>
1856 <td class="comment">MiniUPnP daemon source code</td>
1857 <td></td>
1858</tr>
1859<tr>
1860 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20110714.zip'>upnpc-exe-win32-20110714.zip</a></td>
1861 <td class="filesize">94236</td>
1862 <td class="filedate">13/07/2011 23:16:01 +0000</td>
1863 <td class="comment">Windows executable</td>
1864 <td></td>
1865</tr>
1866<tr>
1867 <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110623.tar.gz'>miniupnpd-1.5.20110623.tar.gz</a></td>
1868 <td class="filesize">123529</td>
1869 <td class="filedate">22/06/2011 22:29:15 +0000</td>
1870 <td class="comment">MiniUPnP daemon source code</td>
1871 <td></td>
1872</tr>
1873<tr>
1874 <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110620.tar.gz'>miniupnpd-1.5.20110620.tar.gz</a></td>
1875 <td class="filesize">123221</td>
1876 <td class="filedate">20/06/2011 14:11:11 +0000</td>
1877 <td class="comment">MiniUPnP daemon source code</td>
1878 <td></td>
1879</tr>
1880<tr>
1881 <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110618.tar.gz'>miniupnpd-1.5.20110618.tar.gz</a></td>
1882 <td class="filesize">123176</td>
1883 <td class="filedate">17/06/2011 23:29:18 +0000</td>
1884 <td class="comment">MiniUPnP daemon source code</td>
1885 <td></td>
1886</tr>
1887<tr>
1888 <td class="filename"><a href='download.php?file=miniupnpc-1.5.20110618.tar.gz'>miniupnpc-1.5.20110618.tar.gz</a></td>
1889 <td class="filesize">66401</td>
1890 <td class="filedate">17/06/2011 23:29:17 +0000</td>
1891 <td class="comment">MiniUPnP client source code</td>
1892 <td></td>
1893</tr>
1894<tr>
1895 <td class="filename"><a href='download.php?file=libnatpmp-20110618.tar.gz'>libnatpmp-20110618.tar.gz</a></td>
1896 <td class="filesize">17901</td>
1897 <td class="filedate">17/06/2011 23:29:16 +0000</td>
1898 <td class="comment">libnatpmp source code</td>
1899 <td></td>
1900</tr>
1901<tr>
1902 <td class="filename"><a href='download.php?file=minissdpd-1.0.20110618.tar.gz'>minissdpd-1.0.20110618.tar.gz</a></td>
1903 <td class="filesize">15193</td>
1904 <td class="filedate">17/06/2011 23:29:16 +0000</td>
1905 <td class="comment">MiniSSDPd source code</td>
1906 <td></td>
1907</tr>
1908<tr>
1909 <td class="filename" colspan="2"><a href='download.php?file=minidlna_cvs20110529_minissdp1.patch'>minidlna_cvs20110529_minissdp1.patch</a></td>
1910 <td class="filedate">29/05/2011 21:19:09 +0000</td>
1911 <td class="comment">Patch for MiniDLNA to use miniSSDPD</td>
1912 <td></td>
1913</tr>
1914<tr>
1915 <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110528.tar.gz'>miniupnpd-1.5.20110528.tar.gz</a></td>
1916 <td class="filesize">121985</td>
1917 <td class="filedate">28/05/2011 09:39:04 +0000</td>
1918 <td class="comment">MiniUPnP daemon source code</td>
1919 <td></td>
1920</tr>
1921<tr>
1922 <td class="filename"><a href='download.php?file=minidlna_1.0.19_minissdp1.patch'>minidlna_1.0.19_minissdp1.patch</a></td>
1923 <td class="filesize">9080</td>
1924 <td class="filedate">27/05/2011 09:55:04 +0000</td>
1925 <td class="comment">Patch for MiniDLNA to use miniSSDPD</td>
1926 <td></td>
1927</tr>
1928<tr>
1929 <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110527.tar.gz'>miniupnpd-1.5.20110527.tar.gz</a></td>
1930 <td class="filesize">120896</td>
1931 <td class="filedate">27/05/2011 08:28:35 +0000</td>
1932 <td class="comment">MiniUPnP daemon source code</td>
1933 <td></td>
1934</tr>
1935<tr>
1936 <td class="filename"><a href='download.php?file=miniupnpc-1.5.20110527.tar.gz'>miniupnpc-1.5.20110527.tar.gz</a></td>
1937 <td class="filesize">66279</td>
1938 <td class="filedate">27/05/2011 08:28:34 +0000</td>
1939 <td class="comment">MiniUPnP client source code</td>
1940 <td></td>
1941</tr>
1942<tr>
1943 <td class="filename"><a href='download.php?file=libnatpmp-20110527.tar.gz'>libnatpmp-20110527.tar.gz</a></td>
1944 <td class="filesize">17627</td>
1945 <td class="filedate">27/05/2011 08:28:33 +0000</td>
1946 <td class="comment">libnatpmp source code</td>
1947 <td></td>
1948</tr>
1949<tr>
1950 <td class="filename"><a href='download.php?file=minissdpd-1.0.20110523.tar.gz'>minissdpd-1.0.20110523.tar.gz</a></td>
1951 <td class="filesize">15024</td>
1952 <td class="filedate">23/05/2011 12:55:31 +0000</td>
1953 <td class="comment">MiniSSDPd source code</td>
1954 <td></td>
1955</tr>
1956<tr>
1957 <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110520.tar.gz'>miniupnpd-1.5.20110520.tar.gz</a></td>
1958 <td class="filesize">119227</td>
1959 <td class="filedate">20/05/2011 18:00:41 +0000</td>
1960 <td class="comment">MiniUPnP daemon source code</td>
1961 <td></td>
1962</tr>
1963<tr>
1964 <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110519.tar.gz'>miniupnpd-1.5.20110519.tar.gz</a></td>
1965 <td class="filesize">114735</td>
1966 <td class="filedate">18/05/2011 22:29:06 +0000</td>
1967 <td class="comment">MiniUPnP daemon source code</td>
1968 <td></td>
1969</tr>
1970<tr>
1971 <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110516.tar.gz'>miniupnpd-1.5.20110516.tar.gz</a></td>
1972 <td class="filesize">113348</td>
1973 <td class="filedate">16/05/2011 09:32:51 +0000</td>
1974 <td class="comment">MiniUPnP daemon source code</td>
1975 <td></td>
1976</tr>
1977<tr>
1978 <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110515.tar.gz'>miniupnpd-1.5.20110515.tar.gz</a></td>
1979 <td class="filesize">113135</td>
1980 <td class="filedate">15/05/2011 21:51:29 +0000</td>
1981 <td class="comment">MiniUPnP daemon source code</td>
1982 <td></td>
1983</tr>
1984<tr>
1985 <td class="filename"><a href='download.php?file=miniupnpc-1.5.20110515.tar.gz'>miniupnpc-1.5.20110515.tar.gz</a></td>
1986 <td class="filesize">66112</td>
1987 <td class="filedate">15/05/2011 21:51:28 +0000</td>
1988 <td class="comment">MiniUPnP client source code</td>
1989 <td></td>
1990</tr>
1991<tr>
1992 <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110513.tar.gz'>miniupnpd-1.5.20110513.tar.gz</a></td>
1993 <td class="filesize">111029</td>
1994 <td class="filedate">13/05/2011 14:03:12 +0000</td>
1995 <td class="comment">MiniUPnP daemon source code</td>
1996 <td></td>
1997</tr>
1998<tr>
1999 <td class="filename"><a href='download.php?file=miniupnpc-1.5.20110506.tar.gz'>miniupnpc-1.5.20110506.tar.gz</a></td>
2000 <td class="filesize">65536</td>
2001 <td class="filedate">06/05/2011 16:35:38 +0000</td>
2002 <td class="comment">MiniUPnP client source code</td>
2003 <td></td>
2004</tr>
2005<tr>
2006 <td class="filename"><a href='download.php?file=miniupnpc-1.4-v6.20100505.zip'>miniupnpc-1.4-v6.20100505.zip</a></td>
2007 <td class="filesize">91833</td>
2008 <td class="filedate">18/04/2011 20:14:11 +0000</td>
2009 <td class="comment"></td>
2010 <td></td>
2011</tr>
2012<tr>
2013 <td class="filename"><a href='download.php?file=miniupnpd-1.4-v6.20100823.zip'>miniupnpd-1.4-v6.20100823.zip</a></td>
2014 <td class="filesize">222235</td>
2015 <td class="filedate">18/04/2011 20:14:07 +0000</td>
2016 <td class="comment"></td>
2017 <td></td>
2018</tr>
2019<tr>
2020 <td class="filename"><a href='download.php?file=miniupnpc-1.5.20110418.tar.gz'>miniupnpc-1.5.20110418.tar.gz</a></td>
2021 <td class="filesize">61820</td>
2022 <td class="filedate">18/04/2011 20:09:22 +0000</td>
2023 <td class="comment">MiniUPnP client source code</td>
2024 <td></td>
2025</tr>
2026<tr>
2027 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20110418.zip'>upnpc-exe-win32-20110418.zip</a></td>
2028 <td class="filesize">94183</td>
2029 <td class="filedate">18/04/2011 17:53:26 +0000</td>
2030 <td class="comment">Windows executable</td>
2031 <td></td>
2032</tr>
2033<tr>
2034 <td class="filename"><a href='download.php?file=miniupnpc-1.5.20110314.tar.gz'>miniupnpc-1.5.20110314.tar.gz</a></td>
2035 <td class="filesize">57210</td>
2036 <td class="filedate">14/03/2011 14:27:29 +0000</td>
2037 <td class="comment">MiniUPnP client source code</td>
2038 <td></td>
2039</tr>
2040<tr>
2041 <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110309.tar.gz'>miniupnpd-1.5.20110309.tar.gz</a></td>
2042 <td class="filesize">100073</td>
2043 <td class="filedate">09/03/2011 15:36:12 +0000</td>
2044 <td class="comment">MiniUPnP daemon source code</td>
2045 <td></td>
2046</tr>
2047<tr>
2048 <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110302.tar.gz'>miniupnpd-1.5.20110302.tar.gz</a></td>
2049 <td class="filesize">100756</td>
2050 <td class="filedate">02/03/2011 16:17:44 +0000</td>
2051 <td class="comment">MiniUPnP daemon source code</td>
2052 <td></td>
2053</tr>
2054<tr>
2055 <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110221.tar.gz'>miniupnpd-1.5.20110221.tar.gz</a></td>
2056 <td class="filesize">100092</td>
2057 <td class="filedate">20/02/2011 23:48:17 +0000</td>
2058 <td class="comment">MiniUPnP daemon source code</td>
2059 <td></td>
2060</tr>
2061<tr>
2062 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20110215.zip'>upnpc-exe-win32-20110215.zip</a></td>
2063 <td class="filesize">55409</td>
2064 <td class="filedate">15/02/2011 23:05:00 +0000</td>
2065 <td class="comment">Windows executable</td>
2066 <td></td>
2067</tr>
2068<tr>
2069 <td class="filename"><a href='download.php?file=miniupnpc-1.5.20110215.tar.gz'>miniupnpc-1.5.20110215.tar.gz</a></td>
2070 <td class="filesize">54880</td>
2071 <td class="filedate">15/02/2011 11:16:04 +0000</td>
2072 <td class="comment">MiniUPnP client source code</td>
2073 <td></td>
2074</tr>
2075<tr>
2076 <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110214.tar.gz'>miniupnpd-1.5.20110214.tar.gz</a></td>
2077 <td class="filesize">99629</td>
2078 <td class="filedate">14/02/2011 18:00:43 +0000</td>
2079 <td class="comment">MiniUPnP daemon source code</td>
2080 <td></td>
2081</tr>
2082<tr>
2083 <td class="filename"><a href='download.php?file=minidlna_1.0.18_minissdp1.patch'>minidlna_1.0.18_minissdp1.patch</a></td>
2084 <td class="filesize">9747</td>
2085 <td class="filedate">02/02/2011 15:12:19 +0000</td>
2086 <td class="comment">Patch for MiniDLNA to use miniSSDPD</td>
2087 <td></td>
2088</tr>
2089<tr>
2090 <td class="filename"><a href='download.php?file=miniupnpd-1.5.20110127.tar.gz'>miniupnpd-1.5.20110127.tar.gz</a></td>
2091 <td class="filesize">97421</td>
2092 <td class="filedate">27/01/2011 17:51:25 +0000</td>
2093 <td class="comment">MiniUPnP daemon source code</td>
2094 <td></td>
2095</tr>
2096<tr>
2097 <td class="filename"><a href='download.php?file=miniupnpd-1.5.tar.gz'>miniupnpd-1.5.tar.gz</a></td>
2098 <td class="filesize">98993</td>
2099 <td class="filedate">04/01/2011 09:45:10 +0000</td>
2100 <td class="comment">MiniUPnP daemon release source code</td>
2101 <td></td>
2102</tr>
2103<tr>
2104 <td class="filename"><a href='download.php?file=miniupnpc-1.5.tar.gz'>miniupnpc-1.5.tar.gz</a></td>
2105 <td class="filesize">53309</td>
2106 <td class="filedate">04/01/2011 09:45:06 +0000</td>
2107 <td class="comment">MiniUPnP client release source code</td>
2108 <td></td>
2109</tr>
2110<tr>
2111 <td class="filename"><a href='download.php?file=libnatpmp-20110103.tar.gz'>libnatpmp-20110103.tar.gz</a></td>
2112 <td class="filesize">17529</td>
2113 <td class="filedate">03/01/2011 17:33:16 +0000</td>
2114 <td class="comment">libnatpmp source code</td>
2115 <td></td>
2116</tr>
2117<tr>
2118 <td class="filename"><a href='download.php?file=miniupnpc-1.4.20101221.tar.gz'>miniupnpc-1.4.20101221.tar.gz</a></td>
2119 <td class="filesize">52342</td>
2120 <td class="filedate">21/12/2010 16:15:38 +0000</td>
2121 <td class="comment">MiniUPnP client source code</td>
2122 <td></td>
2123</tr>
2124<tr>
2125 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20101213.zip'>upnpc-exe-win32-20101213.zip</a></td>
2126 <td class="filesize">52359</td>
2127 <td class="filedate">12/12/2010 23:44:01 +0000</td>
2128 <td class="comment">Windows executable</td>
2129 <td></td>
2130</tr>
2131<tr>
2132 <td class="filename"><a href='download.php?file=libnatpmp-20101211.tar.gz'>libnatpmp-20101211.tar.gz</a></td>
2133 <td class="filesize">17324</td>
2134 <td class="filedate">11/12/2010 17:20:36 +0000</td>
2135 <td class="comment">libnatpmp source code</td>
2136 <td></td>
2137</tr>
2138<tr>
2139 <td class="filename"><a href='download.php?file=miniupnpc-1.4.20101209.tar.gz'>miniupnpc-1.4.20101209.tar.gz</a></td>
2140 <td class="filesize">51900</td>
2141 <td class="filedate">09/12/2010 16:17:30 +0000</td>
2142 <td class="comment">MiniUPnP client source code</td>
2143 <td></td>
2144</tr>
2145<tr>
2146 <td class="filename"><a href='download.php?file=miniupnpd-1.4.20100921.tar.gz'>miniupnpd-1.4.20100921.tar.gz</a></td>
2147 <td class="filesize">95483</td>
2148 <td class="filedate">21/09/2010 15:50:00 +0000</td>
2149 <td class="comment">MiniUPnP daemon source code</td>
2150 <td></td>
2151</tr>
2152<tr>
2153 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20100825.zip'>upnpc-exe-win32-20100825.zip</a></td>
2154 <td class="filesize">50636</td>
2155 <td class="filedate">25/08/2010 08:42:59 +0000</td>
2156 <td class="comment">Windows executable</td>
2157 <td></td>
2158</tr>
2159<tr>
2160 <td class="filename"><a href='download.php?file=miniupnpc-1.4.20100609.tar.gz'>miniupnpc-1.4.20100609.tar.gz</a></td>
2161 <td class="filesize">50390</td>
2162 <td class="filedate">09/06/2010 11:03:11 +0000</td>
2163 <td class="comment">MiniUPnP client source code</td>
2164 <td></td>
2165</tr>
2166<tr>
2167 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20100513.zip'>upnpc-exe-win32-20100513.zip</a></td>
2168 <td class="filesize">50950</td>
2169 <td class="filedate">13/05/2010 16:54:33 +0000</td>
2170 <td class="comment">Windows executable</td>
2171 <td></td>
2172</tr>
2173<tr>
2174 <td class="filename"><a href='download.php?file=miniupnpd-1.4.20100511.tar.gz'>miniupnpd-1.4.20100511.tar.gz</a></td>
2175 <td class="filesize">93281</td>
2176 <td class="filedate">11/05/2010 16:22:33 +0000</td>
2177 <td class="comment">MiniUPnP daemon source code</td>
2178 <td></td>
2179</tr>
2180<tr>
2181 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20100418.zip'>upnpc-exe-win32-20100418.zip</a></td>
2182 <td class="filesize">40758</td>
2183 <td class="filedate">17/04/2010 23:00:37 +0000</td>
2184 <td class="comment">Windows executable</td>
2185 <td></td>
2186</tr>
2187<tr>
2188 <td class="filename"><a href='download.php?file=miniupnpc-1.4.20100418.tar.gz'>miniupnpc-1.4.20100418.tar.gz</a></td>
2189 <td class="filesize">50245</td>
2190 <td class="filedate">17/04/2010 22:18:31 +0000</td>
2191 <td class="comment">MiniUPnP client source code</td>
2192 <td></td>
2193</tr>
2194<tr>
2195 <td class="filename"><a href='download.php?file=miniupnpc-1.4.20100412.tar.gz'>miniupnpc-1.4.20100412.tar.gz</a></td>
2196 <td class="filesize">50145</td>
2197 <td class="filedate">12/04/2010 20:42:53 +0000</td>
2198 <td class="comment">MiniUPnP client source code</td>
2199 <td></td>
2200</tr>
2201<tr>
2202 <td class="filename"><a href='download.php?file=miniupnpc-1.4.20100407.tar.gz'>miniupnpc-1.4.20100407.tar.gz</a></td>
2203 <td class="filesize">49756</td>
2204 <td class="filedate">07/04/2010 10:05:08 +0000</td>
2205 <td class="comment">MiniUPnP client source code</td>
2206 <td></td>
2207</tr>
2208<tr>
2209 <td class="filename"><a href='download.php?file=miniupnpc-1.4.20100405.tar.gz'>miniupnpc-1.4.20100405.tar.gz</a></td>
2210 <td class="filesize">49549</td>
2211 <td class="filedate">05/04/2010 14:34:38 +0000</td>
2212 <td class="comment">MiniUPnP client source code</td>
2213 <td></td>
2214</tr>
2215<tr>
2216 <td class="filename"><a href='download.php?file=miniupnpd-1.4.20100308.tar.gz'>miniupnpd-1.4.20100308.tar.gz</a></td>
2217 <td class="filesize">92889</td>
2218 <td class="filedate">08/03/2010 17:18:00 +0000</td>
2219 <td class="comment">MiniUPnP daemon source code</td>
2220 <td></td>
2221</tr>
2222<tr>
2223 <td class="filename"><a href='download.php?file=libnatpmp-20100202.tar.gz'>libnatpmp-20100202.tar.gz</a></td>
2224 <td class="filesize">17231</td>
2225 <td class="filedate">02/02/2010 18:41:13 +0000</td>
2226 <td class="comment">libnatpmp source code</td>
2227 <td></td>
2228</tr>
2229<tr>
2230 <td class="filename"><a href='download.php?file=miniupnpc-1.4.20100202.tar.gz'>miniupnpc-1.4.20100202.tar.gz</a></td>
2231 <td class="filesize">46710</td>
2232 <td class="filedate">02/02/2010 18:41:13 +0000</td>
2233 <td class="comment">MiniUPnP client source code</td>
2234 <td></td>
2235</tr>
2236<tr>
2237 <td class="filename"><a href='download.php?file=miniupnpc-1.4.20100106.tar.gz'>miniupnpc-1.4.20100106.tar.gz</a></td>
2238 <td class="filesize">46659</td>
2239 <td class="filedate">06/01/2010 10:08:21 +0000</td>
2240 <td class="comment">MiniUPnP client source code</td>
2241 <td></td>
2242</tr>
2243<tr>
2244 <td class="filename"><a href='download.php?file=miniupnpd-1.4.20091222.tar.gz'>miniupnpd-1.4.20091222.tar.gz</a></td>
2245 <td class="filesize">90993</td>
2246 <td class="filedate">22/12/2009 17:23:48 +0000</td>
2247 <td class="comment">MiniUPnP daemon source code</td>
2248 <td></td>
2249</tr>
2250<tr>
2251 <td class="filename"><a href='download.php?file=libnatpmp-20091219.tar.gz'>libnatpmp-20091219.tar.gz</a></td>
2252 <td class="filesize">16839</td>
2253 <td class="filedate">19/12/2009 14:35:22 +0000</td>
2254 <td class="comment">libnatpmp source code</td>
2255 <td></td>
2256</tr>
2257<tr>
2258 <td class="filename"><a href='download.php?file=miniupnpc-1.4.20091213.tar.gz'>miniupnpc-1.4.20091213.tar.gz</a></td>
2259 <td class="filesize">46510</td>
2260 <td class="filedate">12/12/2009 23:05:40 +0000</td>
2261 <td class="comment">MiniUPnP client source code</td>
2262 <td></td>
2263</tr>
2264<tr>
2265 <td class="filename"><a href='download.php?file=miniupnpc-1.4.20091211.tar.gz'>miniupnpc-1.4.20091211.tar.gz</a></td>
2266 <td class="filesize">45852</td>
2267 <td class="filedate">11/12/2009 16:43:01 +0000</td>
2268 <td class="comment">MiniUPnP client source code</td>
2269 <td></td>
2270</tr>
2271<tr>
2272 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20091210.zip'>upnpc-exe-win32-20091210.zip</a></td>
2273 <td class="filesize">38666</td>
2274 <td class="filedate">10/12/2009 18:50:27 +0000</td>
2275 <td class="comment">Windows executable</td>
2276 <td></td>
2277</tr>
2278<tr>
2279 <td class="filename"><a href='download.php?file=miniupnpc-1.4.20091208.tar.gz'>miniupnpc-1.4.20091208.tar.gz</a></td>
2280 <td class="filesize">43392</td>
2281 <td class="filedate">08/12/2009 10:58:26 +0000</td>
2282 <td class="comment">MiniUPnP client source code</td>
2283 <td></td>
2284</tr>
2285<tr>
2286 <td class="filename"><a href='download.php?file=miniupnpc-1.4.20091203.tar.gz'>miniupnpc-1.4.20091203.tar.gz</a></td>
2287 <td class="filesize">42040</td>
2288 <td class="filedate">03/12/2009 13:56:28 +0000</td>
2289 <td class="comment">MiniUPnP client source code</td>
2290 <td></td>
2291</tr>
2292<tr>
2293 <td class="filename"><a href='download.php?file=miniupnpd-1.4.20091106.tar.gz'>miniupnpd-1.4.20091106.tar.gz</a></td>
2294 <td class="filesize">90787</td>
2295 <td class="filedate">06/11/2009 21:18:50 +0000</td>
2296 <td class="comment">MiniUPnP daemon source code</td>
2297 <td></td>
2298</tr>
2299<tr>
2300 <td class="filename"><a href='download.php?file=miniupnpd-1.4.tar.gz'>miniupnpd-1.4.tar.gz</a></td>
2301 <td class="filesize">90071</td>
2302 <td class="filedate">30/10/2009 09:20:05 +0000</td>
2303 <td class="comment">MiniUPnP daemon release source code</td>
2304 <td></td>
2305</tr>
2306<tr>
2307 <td class="filename"><a href='download.php?file=miniupnpc-1.4.tar.gz'>miniupnpc-1.4.tar.gz</a></td>
2308 <td class="filesize">41790</td>
2309 <td class="filedate">30/10/2009 09:20:04 +0000</td>
2310 <td class="comment">MiniUPnP client release source code</td>
2311 <td></td>
2312</tr>
2313<tr>
2314 <td class="filename"><a href='download.php?file=miniupnpc-20091016.tar.gz'>miniupnpc-20091016.tar.gz</a></td>
2315 <td class="filesize">41792</td>
2316 <td class="filedate">16/10/2009 09:04:35 +0000</td>
2317 <td class="comment">MiniUPnP client source code</td>
2318 <td></td>
2319</tr>
2320<tr>
2321 <td class="filename"><a href='download.php?file=miniupnpd-20091010.tar.gz'>miniupnpd-20091010.tar.gz</a></td>
2322 <td class="filesize">90043</td>
2323 <td class="filedate">10/10/2009 19:21:30 +0000</td>
2324 <td class="comment">MiniUPnP daemon source code</td>
2325 <td></td>
2326</tr>
2327<tr>
2328 <td class="filename"><a href='download.php?file=miniupnpc-20091010.tar.gz'>miniupnpc-20091010.tar.gz</a></td>
2329 <td class="filesize">41671</td>
2330 <td class="filedate">10/10/2009 19:21:28 +0000</td>
2331 <td class="comment">MiniUPnP client source code</td>
2332 <td></td>
2333</tr>
2334<tr>
2335 <td class="filename"><a href='download.php?file=miniupnpd-20090921.tar.gz'>miniupnpd-20090921.tar.gz</a></td>
2336 <td class="filesize">89476</td>
2337 <td class="filedate">21/09/2009 13:00:04 +0000</td>
2338 <td class="comment">MiniUPnP daemon source code</td>
2339 <td></td>
2340</tr>
2341<tr>
2342 <td class="filename"><a href='download.php?file=miniupnpc-20090921.tar.gz'>miniupnpc-20090921.tar.gz</a></td>
2343 <td class="filesize">41630</td>
2344 <td class="filedate">21/09/2009 13:00:03 +0000</td>
2345 <td class="comment">MiniUPnP client source code</td>
2346 <td></td>
2347</tr>
2348<tr>
2349 <td class="filename"><a href='download.php?file=miniupnpd-20090904.tar.gz'>miniupnpd-20090904.tar.gz</a></td>
2350 <td class="filesize">89344</td>
2351 <td class="filedate">04/09/2009 16:24:26 +0000</td>
2352 <td class="comment">MiniUPnP daemon source code</td>
2353 <td></td>
2354</tr>
2355<tr>
2356 <td class="filename"><a href='download.php?file=miniupnpd-20090820.tar.gz'>miniupnpd-20090820.tar.gz</a></td>
2357 <td class="filesize">89149</td>
2358 <td class="filedate">20/08/2009 09:35:58 +0000</td>
2359 <td class="comment">MiniUPnP daemon source code</td>
2360 <td></td>
2361</tr>
2362<tr>
2363 <td class="filename"><a href='download.php?file=miniupnpc-20090807.tar.gz'>miniupnpc-20090807.tar.gz</a></td>
2364 <td class="filesize">41288</td>
2365 <td class="filedate">07/08/2009 14:46:11 +0000</td>
2366 <td class="comment">MiniUPnP client source code</td>
2367 <td></td>
2368</tr>
2369<tr>
2370 <td class="filename"><a href='download.php?file=miniupnpc-20090729.tar.gz'>miniupnpc-20090729.tar.gz</a></td>
2371 <td class="filesize">40588</td>
2372 <td class="filedate">29/07/2009 08:47:43 +0000</td>
2373 <td class="comment">MiniUPnP client source code</td>
2374 <td></td>
2375</tr>
2376<tr>
2377 <td class="filename"><a href='download.php?file=xchat-upnp20061022.patch'>xchat-upnp20061022.patch</a></td>
2378 <td class="filesize">10258</td>
2379 <td class="filedate">17/07/2009 15:49:46 +0000</td>
2380 <td class="comment">Patch to add UPnP capabilities to xchat</td>
2381 <td></td>
2382</tr>
2383<tr>
2384 <td class="filename"><a href='download.php?file=miniupnpc-20090713.tar.gz'>miniupnpc-20090713.tar.gz</a></td>
2385 <td class="filesize">40206</td>
2386 <td class="filedate">13/07/2009 08:53:49 +0000</td>
2387 <td class="comment">MiniUPnP client source code</td>
2388 <td></td>
2389</tr>
2390<tr>
2391 <td class="filename"><a href='download.php?file=libnatpmp-20090713.tar.gz'>libnatpmp-20090713.tar.gz</a></td>
2392 <td class="filesize">14262</td>
2393 <td class="filedate">13/07/2009 08:53:49 +0000</td>
2394 <td class="comment">libnatpmp source code</td>
2395 <td></td>
2396</tr>
2397<tr>
2398 <td class="filename"><a href='download.php?file=miniupnpd-20090605.tar.gz'>miniupnpd-20090605.tar.gz</a></td>
2399 <td class="filesize">83774</td>
2400 <td class="filedate">04/06/2009 23:32:20 +0000</td>
2401 <td class="comment">MiniUPnP daemon source code</td>
2402 <td></td>
2403</tr>
2404<tr>
2405 <td class="filename"><a href='download.php?file=miniupnpc-20090605.tar.gz'>miniupnpc-20090605.tar.gz</a></td>
2406 <td class="filesize">40077</td>
2407 <td class="filedate">04/06/2009 23:32:16 +0000</td>
2408 <td class="comment">MiniUPnP client source code</td>
2409 <td></td>
2410</tr>
2411<tr>
2412 <td class="filename"><a href='download.php?file=libnatpmp-20090605.tar.gz'>libnatpmp-20090605.tar.gz</a></td>
2413 <td class="filesize">13817</td>
2414 <td class="filedate">04/06/2009 23:32:15 +0000</td>
2415 <td class="comment">libnatpmp source code</td>
2416 <td></td>
2417</tr>
2418<tr>
2419 <td class="filename"><a href='download.php?file=miniupnpd-20090516.tar.gz'>miniupnpd-20090516.tar.gz</a></td>
2420 <td class="filesize">83689</td>
2421 <td class="filedate">16/05/2009 08:47:31 +0000</td>
2422 <td class="comment">MiniUPnP daemon source code</td>
2423 <td></td>
2424</tr>
2425<tr>
2426 <td class="filename"><a href='download.php?file=miniupnpc-1.3.tar.gz'>miniupnpc-1.3.tar.gz</a></td>
2427 <td class="filesize">40058</td>
2428 <td class="filedate">17/04/2009 21:27:55 +0000</td>
2429 <td class="comment">MiniUPnP client release source code</td>
2430 <td></td>
2431</tr>
2432<tr>
2433 <td class="filename"><a href='download.php?file=miniupnpd-1.3.tar.gz'>miniupnpd-1.3.tar.gz</a></td>
2434 <td class="filesize">83464</td>
2435 <td class="filedate">17/04/2009 20:11:21 +0000</td>
2436 <td class="comment">MiniUPnP daemon release source code</td>
2437 <td></td>
2438</tr>
2439<tr>
2440 <td class="filename"><a href='download.php?file=libnatpmp-20090310.tar.gz'>libnatpmp-20090310.tar.gz</a></td>
2441 <td class="filesize">11847</td>
2442 <td class="filedate">10/03/2009 10:19:45 +0000</td>
2443 <td class="comment">libnatpmp source code</td>
2444 <td></td>
2445</tr>
2446<tr>
2447 <td class="filename"><a href='download.php?file=miniupnpd-20090214.tar.gz'>miniupnpd-20090214.tar.gz</a></td>
2448 <td class="filesize">82921</td>
2449 <td class="filedate">14/02/2009 11:27:03 +0000</td>
2450 <td class="comment">MiniUPnP daemon source code</td>
2451 <td></td>
2452</tr>
2453<tr>
2454 <td class="filename"><a href='download.php?file=miniupnpd-20090213.tar.gz'>miniupnpd-20090213.tar.gz</a></td>
2455 <td class="filesize">82594</td>
2456 <td class="filedate">13/02/2009 19:48:01 +0000</td>
2457 <td class="comment">MiniUPnP daemon source code</td>
2458 <td></td>
2459</tr>
2460<tr>
2461 <td class="filename"><a href='download.php?file=libnatpmp-20090129.tar.gz'>libnatpmp-20090129.tar.gz</a></td>
2462 <td class="filesize">11748</td>
2463 <td class="filedate">29/01/2009 21:50:31 +0000</td>
2464 <td class="comment">libnatpmp source code</td>
2465 <td></td>
2466</tr>
2467<tr>
2468 <td class="filename"><a href='download.php?file=miniupnpc-20090129.tar.gz'>miniupnpc-20090129.tar.gz</a></td>
2469 <td class="filesize">39976</td>
2470 <td class="filedate">29/01/2009 21:50:30 +0000</td>
2471 <td class="comment">MiniUPnP client source code</td>
2472 <td></td>
2473</tr>
2474<tr>
2475 <td class="filename"><a href='download.php?file=miniupnpd-20090129.tar.gz'>miniupnpd-20090129.tar.gz</a></td>
2476 <td class="filesize">82487</td>
2477 <td class="filedate">29/01/2009 21:50:27 +0000</td>
2478 <td class="comment">MiniUPnP daemon source code</td>
2479 <td></td>
2480</tr>
2481<tr>
2482 <td class="filename"><a href='download.php?file=miniupnpd-20081009.tar.gz'>miniupnpd-20081009.tar.gz</a></td>
2483 <td class="filesize">81732</td>
2484 <td class="filedate">09/10/2008 12:53:02 +0000</td>
2485 <td class="comment">MiniUPnP daemon source code</td>
2486 <td></td>
2487</tr>
2488<tr>
2489 <td class="filename"><a href='download.php?file=minissdpd-1.0.tar.gz'>minissdpd-1.0.tar.gz</a></td>
2490 <td class="filesize">12996</td>
2491 <td class="filedate">07/10/2008 14:03:49 +0000</td>
2492 <td class="comment">MiniSSDPd release source code</td>
2493 <td></td>
2494</tr>
2495<tr>
2496 <td class="filename"><a href='download.php?file=miniupnpc-1.2.tar.gz'>miniupnpc-1.2.tar.gz</a></td>
2497 <td class="filesize">38787</td>
2498 <td class="filedate">07/10/2008 14:03:47 +0000</td>
2499 <td class="comment">MiniUPnP client release source code</td>
2500 <td></td>
2501</tr>
2502<tr>
2503 <td class="filename"><a href='download.php?file=miniupnpd-1.2.tar.gz'>miniupnpd-1.2.tar.gz</a></td>
2504 <td class="filesize">81025</td>
2505 <td class="filedate">07/10/2008 14:03:45 +0000</td>
2506 <td class="comment">MiniUPnP daemon release source code</td>
2507 <td></td>
2508</tr>
2509<tr>
2510 <td class="filename"><a href='download.php?file=miniupnpd-20081006.tar.gz'>miniupnpd-20081006.tar.gz</a></td>
2511 <td class="filesize">80510</td>
2512 <td class="filedate">06/10/2008 15:50:34 +0000</td>
2513 <td class="comment">MiniUPnP daemon source code</td>
2514 <td></td>
2515</tr>
2516<tr>
2517 <td class="filename"><a href='download.php?file=minissdpd-20081006.tar.gz'>minissdpd-20081006.tar.gz</a></td>
2518 <td class="filesize">12230</td>
2519 <td class="filedate">06/10/2008 15:50:33 +0000</td>
2520 <td class="comment">MiniSSDPd source code</td>
2521 <td></td>
2522</tr>
2523<tr>
2524 <td class="filename"><a href='download.php?file=libnatpmp-20081006.tar.gz'>libnatpmp-20081006.tar.gz</a></td>
2525 <td class="filesize">11710</td>
2526 <td class="filedate">06/10/2008 15:50:31 +0000</td>
2527 <td class="comment">libnatpmp source code</td>
2528 <td></td>
2529</tr>
2530<tr>
2531 <td class="filename" colspan="2"><a href='download.php?file=mediatomb_minissdp-20081006.patch'>mediatomb_minissdp-20081006.patch</a></td>
2532 <td class="filedate">06/10/2008 15:48:18 +0000</td>
2533 <td class="comment"></td>
2534 <td></td>
2535</tr>
2536<tr>
2537 <td class="filename"><a href='download.php?file=miniupnpc-20081002.tar.gz'>miniupnpc-20081002.tar.gz</a></td>
2538 <td class="filesize">38291</td>
2539 <td class="filedate">02/10/2008 09:20:18 +0000</td>
2540 <td class="comment">MiniUPnP client source code</td>
2541 <td></td>
2542</tr>
2543<tr>
2544 <td class="filename"><a href='download.php?file=miniupnpd-20081001.tar.gz'>miniupnpd-20081001.tar.gz</a></td>
2545 <td class="filesize">79696</td>
2546 <td class="filedate">01/10/2008 13:11:20 +0000</td>
2547 <td class="comment">MiniUPnP daemon source code</td>
2548 <td></td>
2549</tr>
2550<tr>
2551 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20080925.zip'>upnpc-exe-win32-20080925.zip</a></td>
2552 <td class="filesize">36602</td>
2553 <td class="filedate">25/09/2008 06:59:33 +0000</td>
2554 <td class="comment">Windows executable</td>
2555 <td></td>
2556</tr>
2557<tr>
2558 <td class="filename"><a href='download.php?file=miniupnpd-20080710.tar.gz'>miniupnpd-20080710.tar.gz</a></td>
2559 <td class="filesize">78898</td>
2560 <td class="filedate">10/07/2008 09:38:41 +0000</td>
2561 <td class="comment">MiniUPnP daemon source code</td>
2562 <td></td>
2563</tr>
2564<tr>
2565 <td class="filename"><a href='download.php?file=libnatpmp-20080707.tar.gz'>libnatpmp-20080707.tar.gz</a></td>
2566 <td class="filesize">11679</td>
2567 <td class="filedate">06/07/2008 22:05:23 +0000</td>
2568 <td class="comment">libnatpmp source code</td>
2569 <td></td>
2570</tr>
2571<tr>
2572 <td class="filename"><a href='download.php?file=miniupnpc-1.1.tar.gz'>miniupnpc-1.1.tar.gz</a></td>
2573 <td class="filesize">38235</td>
2574 <td class="filedate">04/07/2008 16:45:24 +0000</td>
2575 <td class="comment">MiniUPnP client release source code</td>
2576 <td></td>
2577</tr>
2578<tr>
2579 <td class="filename"><a href='download.php?file=miniupnpc-20080703.tar.gz'>miniupnpc-20080703.tar.gz</a></td>
2580 <td class="filesize">38204</td>
2581 <td class="filedate">03/07/2008 15:47:37 +0000</td>
2582 <td class="comment">MiniUPnP client source code</td>
2583 <td></td>
2584</tr>
2585<tr>
2586 <td class="filename"><a href='download.php?file=libnatpmp-20080703.tar.gz'>libnatpmp-20080703.tar.gz</a></td>
2587 <td class="filesize">11570</td>
2588 <td class="filedate">03/07/2008 15:47:25 +0000</td>
2589 <td class="comment">libnatpmp source code</td>
2590 <td></td>
2591</tr>
2592<tr>
2593 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20080703.zip'>upnpc-exe-win32-20080703.zip</a></td>
2594 <td class="filesize">36137</td>
2595 <td class="filedate">02/07/2008 23:35:14 +0000</td>
2596 <td class="comment">Windows executable</td>
2597 <td></td>
2598</tr>
2599<tr>
2600 <td class="filename"><a href='download.php?file=libnatpmp-20080702.tar.gz'>libnatpmp-20080702.tar.gz</a></td>
2601 <td class="filesize">8873</td>
2602 <td class="filedate">02/07/2008 17:32:35 +0000</td>
2603 <td class="comment">libnatpmp source code</td>
2604 <td></td>
2605</tr>
2606<tr>
2607 <td class="filename"><a href='download.php?file=libnatpmp-20080630.tar.gz'>libnatpmp-20080630.tar.gz</a></td>
2608 <td class="filesize">8864</td>
2609 <td class="filedate">30/06/2008 14:20:16 +0000</td>
2610 <td class="comment">libnatpmp source code</td>
2611 <td></td>
2612</tr>
2613<tr>
2614 <td class="filename"><a href='download.php?file=libnatpmp-20080529.tar.gz'>libnatpmp-20080529.tar.gz</a></td>
2615 <td class="filesize">7397</td>
2616 <td class="filedate">29/05/2008 09:06:25 +0000</td>
2617 <td class="comment">libnatpmp source code</td>
2618 <td></td>
2619</tr>
2620<tr>
2621 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20080514.zip'>upnpc-exe-win32-20080514.zip</a></td>
2622 <td class="filesize">14227</td>
2623 <td class="filedate">14/05/2008 20:23:19 +0000</td>
2624 <td class="comment">Windows executable</td>
2625 <td></td>
2626</tr>
2627<tr>
2628 <td class="filename"><a href='download.php?file=libnatpmp-20080428.tar.gz'>libnatpmp-20080428.tar.gz</a></td>
2629 <td class="filesize">7295</td>
2630 <td class="filedate">28/04/2008 03:09:14 +0000</td>
2631 <td class="comment">libnatpmp source code</td>
2632 <td></td>
2633</tr>
2634<tr>
2635 <td class="filename"><a href='download.php?file=miniupnpd-20080427.tar.gz'>miniupnpd-20080427.tar.gz</a></td>
2636 <td class="filesize">78765</td>
2637 <td class="filedate">27/04/2008 18:16:36 +0000</td>
2638 <td class="comment">MiniUPnP daemon source code</td>
2639 <td></td>
2640</tr>
2641<tr>
2642 <td class="filename"><a href='download.php?file=miniupnpc-20080427.tar.gz'>miniupnpc-20080427.tar.gz</a></td>
2643 <td class="filesize">37610</td>
2644 <td class="filedate">27/04/2008 18:16:35 +0000</td>
2645 <td class="comment">MiniUPnP client source code</td>
2646 <td></td>
2647</tr>
2648<tr>
2649 <td class="filename"><a href='download.php?file=miniupnpd-1.1.tar.gz'>miniupnpd-1.1.tar.gz</a></td>
2650 <td class="filesize">78594</td>
2651 <td class="filedate">25/04/2008 17:38:05 +0000</td>
2652 <td class="comment">MiniUPnP daemon release source code</td>
2653 <td></td>
2654</tr>
2655<tr>
2656 <td class="filename"><a href='download.php?file=miniupnpc-20080423.tar.gz'>miniupnpc-20080423.tar.gz</a></td>
2657 <td class="filesize">36818</td>
2658 <td class="filedate">23/04/2008 11:57:36 +0000</td>
2659 <td class="comment">MiniUPnP client source code</td>
2660 <td></td>
2661</tr>
2662<tr>
2663 <td class="filename"><a href='download.php?file=miniupnpd-20080308.tar.gz'>miniupnpd-20080308.tar.gz</a></td>
2664 <td class="filesize">75679</td>
2665 <td class="filedate">08/03/2008 11:13:29 +0000</td>
2666 <td class="comment">MiniUPnP daemon source code</td>
2667 <td></td>
2668</tr>
2669<tr>
2670 <td class="filename"><a href='download.php?file=miniupnpd-20080303.tar.gz'>miniupnpd-20080303.tar.gz</a></td>
2671 <td class="filesize">74202</td>
2672 <td class="filedate">03/03/2008 01:43:16 +0000</td>
2673 <td class="comment">MiniUPnP daemon source code</td>
2674 <td></td>
2675</tr>
2676<tr>
2677 <td class="filename"><a href='download.php?file=miniupnpd-20080224.tar.gz'>miniupnpd-20080224.tar.gz</a></td>
2678 <td class="filesize">72773</td>
2679 <td class="filedate">24/02/2008 11:23:17 +0000</td>
2680 <td class="comment">MiniUPnP daemon source code</td>
2681 <td></td>
2682</tr>
2683<tr>
2684 <td class="filename"><a href='download.php?file=miniupnpc-1.0.tar.gz'>miniupnpc-1.0.tar.gz</a></td>
2685 <td class="filesize">36223</td>
2686 <td class="filedate">21/02/2008 13:26:46 +0000</td>
2687 <td class="comment">MiniUPnP client release source code</td>
2688 <td></td>
2689</tr>
2690<tr>
2691 <td class="filename"><a href='download.php?file=miniupnpd-20080221.tar.gz'>miniupnpd-20080221.tar.gz</a></td>
2692 <td class="filesize">70823</td>
2693 <td class="filedate">21/02/2008 10:23:46 +0000</td>
2694 <td class="comment">MiniUPnP daemon source code</td>
2695 <td></td>
2696</tr>
2697<tr>
2698 <td class="filename"><a href='download.php?file=miniupnpc-20080217.tar.gz'>miniupnpc-20080217.tar.gz</a></td>
2699 <td class="filesize">35243</td>
2700 <td class="filedate">16/02/2008 23:47:59 +0000</td>
2701 <td class="comment">MiniUPnP client source code</td>
2702 <td></td>
2703</tr>
2704<tr>
2705 <td class="filename"><a href='download.php?file=miniupnpd-20080207.tar.gz'>miniupnpd-20080207.tar.gz</a></td>
2706 <td class="filesize">70647</td>
2707 <td class="filedate">07/02/2008 21:21:00 +0000</td>
2708 <td class="comment">MiniUPnP daemon source code</td>
2709 <td></td>
2710</tr>
2711<tr>
2712 <td class="filename"><a href='download.php?file=miniupnpc-20080203.tar.gz'>miniupnpc-20080203.tar.gz</a></td>
2713 <td class="filesize">34921</td>
2714 <td class="filedate">03/02/2008 22:28:11 +0000</td>
2715 <td class="comment">MiniUPnP client source code</td>
2716 <td></td>
2717</tr>
2718<tr>
2719 <td class="filename"><a href='download.php?file=miniupnpd-1.0.tar.gz'>miniupnpd-1.0.tar.gz</a></td>
2720 <td class="filesize">69427</td>
2721 <td class="filedate">27/01/2008 22:41:25 +0000</td>
2722 <td class="comment">MiniUPnP daemon release source code</td>
2723 <td></td>
2724</tr>
2725<tr>
2726 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20080118.zip'>upnpc-exe-win32-20080118.zip</a></td>
2727 <td class="filesize">13582</td>
2728 <td class="filedate">18/01/2008 11:42:16 +0000</td>
2729 <td class="comment">Windows executable</td>
2730 <td></td>
2731</tr>
2732<tr>
2733 <td class="filename"><a href='download.php?file=miniupnpd-1.0-RC13.tar.gz'>miniupnpd-1.0-RC13.tar.gz</a></td>
2734 <td class="filesize">67892</td>
2735 <td class="filedate">03/01/2008 16:50:21 +0000</td>
2736 <td class="comment">MiniUPnP daemon release source code</td>
2737 <td></td>
2738</tr>
2739<tr>
2740 <td class="filename"><a href='download.php?file=miniupnpc-1.0-RC13.tar.gz'>miniupnpc-1.0-RC13.tar.gz</a></td>
2741 <td class="filesize">34820</td>
2742 <td class="filedate">03/01/2008 16:50:20 +0000</td>
2743 <td class="comment">MiniUPnP client release source code</td>
2744 <td></td>
2745</tr>
2746<tr>
2747 <td class="filename"><a href='download.php?file=miniupnpd-20071220.tar.gz'>miniupnpd-20071220.tar.gz</a></td>
2748 <td class="filesize">67211</td>
2749 <td class="filedate">20/12/2007 12:08:34 +0000</td>
2750 <td class="comment">MiniUPnP daemon source code</td>
2751 <td></td>
2752</tr>
2753<tr>
2754 <td class="filename"><a href='download.php?file=miniupnpc-20071219.tar.gz'>miniupnpc-20071219.tar.gz</a></td>
2755 <td class="filesize">34290</td>
2756 <td class="filedate">19/12/2007 18:31:47 +0000</td>
2757 <td class="comment">MiniUPnP client source code</td>
2758 <td></td>
2759</tr>
2760<tr>
2761 <td class="filename"><a href='download.php?file=minissdpd-1.0-RC12.tar.gz'>minissdpd-1.0-RC12.tar.gz</a></td>
2762 <td class="filesize">9956</td>
2763 <td class="filedate">19/12/2007 18:30:12 +0000</td>
2764 <td class="comment">MiniSSDPd release source code</td>
2765 <td></td>
2766</tr>
2767<tr>
2768 <td class="filename"><a href='download.php?file=miniupnpd-1.0-RC12.tar.gz'>miniupnpd-1.0-RC12.tar.gz</a></td>
2769 <td class="filesize">66911</td>
2770 <td class="filedate">14/12/2007 17:39:20 +0000</td>
2771 <td class="comment">MiniUPnP daemon release source code</td>
2772 <td></td>
2773</tr>
2774<tr>
2775 <td class="filename"><a href='download.php?file=miniupnpc-1.0-RC12.tar.gz'>miniupnpc-1.0-RC12.tar.gz</a></td>
2776 <td class="filesize">32543</td>
2777 <td class="filedate">14/12/2007 17:39:19 +0000</td>
2778 <td class="comment">MiniUPnP client release source code</td>
2779 <td></td>
2780</tr>
2781<tr>
2782 <td class="filename"><a href='download.php?file=miniupnpc-20071213.tar.gz'>miniupnpc-20071213.tar.gz</a></td>
2783 <td class="filesize">32541</td>
2784 <td class="filedate">13/12/2007 17:09:51 +0000</td>
2785 <td class="comment">MiniUPnP client source code</td>
2786 <td></td>
2787</tr>
2788<tr>
2789 <td class="filename"><a href='download.php?file=miniupnpd-20071213.tar.gz'>miniupnpd-20071213.tar.gz</a></td>
2790 <td class="filesize">66826</td>
2791 <td class="filedate">13/12/2007 16:42:50 +0000</td>
2792 <td class="comment">MiniUPnP daemon source code</td>
2793 <td></td>
2794</tr>
2795<tr>
2796 <td class="filename"><a href='download.php?file=libnatpmp-20071213.tar.gz'>libnatpmp-20071213.tar.gz</a></td>
2797 <td class="filesize">5997</td>
2798 <td class="filedate">13/12/2007 14:56:30 +0000</td>
2799 <td class="comment">libnatpmp source code</td>
2800 <td></td>
2801</tr>
2802<tr>
2803 <td class="filename"><a href='download.php?file=libnatpmp-20071202.tar.gz'>libnatpmp-20071202.tar.gz</a></td>
2804 <td class="filesize">5664</td>
2805 <td class="filedate">02/12/2007 00:15:28 +0000</td>
2806 <td class="comment">libnatpmp source code</td>
2807 <td></td>
2808</tr>
2809<tr>
2810 <td class="filename"><a href='download.php?file=miniupnpd-20071103.tar.gz'>miniupnpd-20071103.tar.gz</a></td>
2811 <td class="filesize">65740</td>
2812 <td class="filedate">02/11/2007 23:58:38 +0000</td>
2813 <td class="comment">MiniUPnP daemon source code</td>
2814 <td></td>
2815</tr>
2816<tr>
2817 <td class="filename"><a href='download.php?file=miniupnpd-20071102.tar.gz'>miniupnpd-20071102.tar.gz</a></td>
2818 <td class="filesize">65733</td>
2819 <td class="filedate">02/11/2007 23:05:44 +0000</td>
2820 <td class="comment">MiniUPnP daemon source code</td>
2821 <td></td>
2822</tr>
2823<tr>
2824 <td class="filename"><a href='download.php?file=miniupnpc-20071103.tar.gz'>miniupnpc-20071103.tar.gz</a></td>
2825 <td class="filesize">32239</td>
2826 <td class="filedate">02/11/2007 23:05:34 +0000</td>
2827 <td class="comment">MiniUPnP client source code</td>
2828 <td></td>
2829</tr>
2830<tr>
2831 <td class="filename"><a href='download.php?file=miniupnpd-1.0-RC11.tar.gz'>miniupnpd-1.0-RC11.tar.gz</a></td>
2832 <td class="filesize">64828</td>
2833 <td class="filedate">25/10/2007 13:27:18 +0000</td>
2834 <td class="comment">MiniUPnP daemon release source code</td>
2835 <td></td>
2836</tr>
2837<tr>
2838 <td class="filename"><a href='download.php?file=miniupnpc-1.0-RC11.tar.gz'>miniupnpc-1.0-RC11.tar.gz</a></td>
2839 <td class="filesize">32161</td>
2840 <td class="filedate">25/10/2007 13:27:17 +0000</td>
2841 <td class="comment">MiniUPnP client release source code</td>
2842 <td></td>
2843</tr>
2844<tr>
2845 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20071025.zip'>upnpc-exe-win32-20071025.zip</a></td>
2846 <td class="filesize">12809</td>
2847 <td class="filedate">24/10/2007 23:15:55 +0000</td>
2848 <td class="comment">Windows executable</td>
2849 <td></td>
2850</tr>
2851<tr>
2852 <td class="filename"><a href='download.php?file=miniupnpd-1.0-RC10.tar.gz'>miniupnpd-1.0-RC10.tar.gz</a></td>
2853 <td class="filesize">62674</td>
2854 <td class="filedate">12/10/2007 08:38:33 +0000</td>
2855 <td class="comment">MiniUPnP daemon release source code</td>
2856 <td></td>
2857</tr>
2858<tr>
2859 <td class="filename"><a href='download.php?file=miniupnpc-1.0-RC10.tar.gz'>miniupnpc-1.0-RC10.tar.gz</a></td>
2860 <td class="filesize">31962</td>
2861 <td class="filedate">12/10/2007 08:38:31 +0000</td>
2862 <td class="comment">MiniUPnP client release source code</td>
2863 <td></td>
2864</tr>
2865<tr>
2866 <td class="filename"><a href='download.php?file=minissdpd-1.0-RC10.tar.gz'>minissdpd-1.0-RC10.tar.gz</a></td>
2867 <td class="filesize">9517</td>
2868 <td class="filedate">12/10/2007 08:38:30 +0000</td>
2869 <td class="comment">MiniSSDPd release source code</td>
2870 <td></td>
2871</tr>
2872<tr>
2873 <td class="filename"><a href='download.php?file=miniupnpc-20071003.tar.gz'>miniupnpc-20071003.tar.gz</a></td>
2874 <td class="filesize">31199</td>
2875 <td class="filedate">03/10/2007 15:30:13 +0000</td>
2876 <td class="comment">MiniUPnP client source code</td>
2877 <td></td>
2878</tr>
2879<tr>
2880 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20071001.zip'>upnpc-exe-win32-20071001.zip</a></td>
2881 <td class="filesize">12604</td>
2882 <td class="filedate">01/10/2007 17:09:22 +0000</td>
2883 <td class="comment">Windows executable</td>
2884 <td></td>
2885</tr>
2886<tr>
2887 <td class="filename"><a href='download.php?file=miniupnpd-1.0-RC9.tar.gz'>miniupnpd-1.0-RC9.tar.gz</a></td>
2888 <td class="filesize">54778</td>
2889 <td class="filedate">27/09/2007 19:38:36 +0000</td>
2890 <td class="comment">MiniUPnP daemon release source code</td>
2891 <td></td>
2892</tr>
2893<tr>
2894 <td class="filename"><a href='download.php?file=minissdpd-1.0-RC9.tar.gz'>minissdpd-1.0-RC9.tar.gz</a></td>
2895 <td class="filesize">9163</td>
2896 <td class="filedate">27/09/2007 17:00:03 +0000</td>
2897 <td class="comment">MiniSSDPd release source code</td>
2898 <td></td>
2899</tr>
2900<tr>
2901 <td class="filename"><a href='download.php?file=miniupnpc-1.0-RC9.tar.gz'>miniupnpc-1.0-RC9.tar.gz</a></td>
2902 <td class="filesize">30538</td>
2903 <td class="filedate">27/09/2007 17:00:03 +0000</td>
2904 <td class="comment">MiniUPnP client release source code</td>
2905 <td></td>
2906</tr>
2907<tr>
2908 <td class="filename"><a href='download.php?file=miniupnpd-20070924.tar.gz'>miniupnpd-20070924.tar.gz</a></td>
2909 <td class="filesize">52338</td>
2910 <td class="filedate">24/09/2007 20:26:05 +0000</td>
2911 <td class="comment">MiniUPnP daemon source code</td>
2912 <td></td>
2913</tr>
2914<tr>
2915 <td class="filename"><a href='download.php?file=miniupnpd-20070923.tar.gz'>miniupnpd-20070923.tar.gz</a></td>
2916 <td class="filesize">51060</td>
2917 <td class="filedate">23/09/2007 21:13:34 +0000</td>
2918 <td class="comment">MiniUPnP daemon source code</td>
2919 <td></td>
2920</tr>
2921<tr>
2922 <td class="filename"><a href='download.php?file=miniupnpc-20070923.tar.gz'>miniupnpc-20070923.tar.gz</a></td>
2923 <td class="filesize">30246</td>
2924 <td class="filedate">23/09/2007 21:13:33 +0000</td>
2925 <td class="comment">MiniUPnP client source code</td>
2926 <td></td>
2927</tr>
2928<tr>
2929 <td class="filename"><a href='download.php?file=minissdpd-20070923.tar.gz'>minissdpd-20070923.tar.gz</a></td>
2930 <td class="filesize">8978</td>
2931 <td class="filedate">23/09/2007 21:13:32 +0000</td>
2932 <td class="comment">MiniSSDPd source code</td>
2933 <td></td>
2934</tr>
2935<tr>
2936 <td class="filename"><a href='download.php?file=miniupnpc-20070902.tar.gz'>miniupnpc-20070902.tar.gz</a></td>
2937 <td class="filesize">30205</td>
2938 <td class="filedate">01/09/2007 23:47:23 +0000</td>
2939 <td class="comment">MiniUPnP client source code</td>
2940 <td></td>
2941</tr>
2942<tr>
2943 <td class="filename"><a href='download.php?file=minissdpd-20070902.tar.gz'>minissdpd-20070902.tar.gz</a></td>
2944 <td class="filesize">6539</td>
2945 <td class="filedate">01/09/2007 23:47:20 +0000</td>
2946 <td class="comment">MiniSSDPd source code</td>
2947 <td></td>
2948</tr>
2949<tr>
2950 <td class="filename"><a href='download.php?file=miniupnpd-1.0-RC8.tar.gz'>miniupnpd-1.0-RC8.tar.gz</a></td>
2951 <td class="filesize">50952</td>
2952 <td class="filedate">29/08/2007 10:56:09 +0000</td>
2953 <td class="comment">MiniUPnP daemon release source code</td>
2954 <td></td>
2955</tr>
2956<tr>
2957 <td class="filename"><a href='download.php?file=miniupnpc-1.0-RC8.tar.gz'>miniupnpc-1.0-RC8.tar.gz</a></td>
2958 <td class="filesize">29312</td>
2959 <td class="filedate">29/08/2007 10:56:08 +0000</td>
2960 <td class="comment">MiniUPnP client release source code</td>
2961 <td></td>
2962</tr>
2963<tr>
2964 <td class="filename"><a href='download.php?file=miniupnpd-1.0-RC7.tar.gz'>miniupnpd-1.0-RC7.tar.gz</a></td>
2965 <td class="filesize">50613</td>
2966 <td class="filedate">20/07/2007 00:15:45 +0000</td>
2967 <td class="comment">MiniUPnP daemon release source code</td>
2968 <td></td>
2969</tr>
2970<tr>
2971 <td class="filename"><a href='download.php?file=miniupnpd-1.0-RC6.tar.gz'>miniupnpd-1.0-RC6.tar.gz</a></td>
2972 <td class="filesize">49986</td>
2973 <td class="filedate">12/06/2007 17:12:07 +0000</td>
2974 <td class="comment">MiniUPnP daemon release source code</td>
2975 <td></td>
2976</tr>
2977<tr>
2978 <td class="filename"><a href='download.php?file=miniupnpc-1.0-RC6.tar.gz'>miniupnpc-1.0-RC6.tar.gz</a></td>
2979 <td class="filesize">29032</td>
2980 <td class="filedate">12/06/2007 17:12:06 +0000</td>
2981 <td class="comment">MiniUPnP client release source code</td>
2982 <td></td>
2983</tr>
2984<tr>
2985 <td class="filename"><a href='download.php?file=miniupnpd-20070607.tar.gz'>miniupnpd-20070607.tar.gz</a></td>
2986 <td class="filesize">49768</td>
2987 <td class="filedate">06/06/2007 23:12:00 +0000</td>
2988 <td class="comment">MiniUPnP daemon source code</td>
2989 <td></td>
2990</tr>
2991<tr>
2992 <td class="filename"><a href='download.php?file=miniupnpd-20070605.tar.gz'>miniupnpd-20070605.tar.gz</a></td>
2993 <td class="filesize">49710</td>
2994 <td class="filedate">05/06/2007 21:01:53 +0000</td>
2995 <td class="comment">MiniUPnP daemon source code</td>
2996 <td></td>
2997</tr>
2998<tr>
2999 <td class="filename"><a href='download.php?file=miniupnpd-20070521.tar.gz'>miniupnpd-20070521.tar.gz</a></td>
3000 <td class="filesize">48374</td>
3001 <td class="filedate">21/05/2007 13:07:43 +0000</td>
3002 <td class="comment">MiniUPnP daemon source code</td>
3003 <td></td>
3004</tr>
3005<tr>
3006 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20070519.zip'>upnpc-exe-win32-20070519.zip</a></td>
3007 <td class="filesize">10836</td>
3008 <td class="filedate">19/05/2007 13:14:15 +0000</td>
3009 <td class="comment">Windows executable</td>
3010 <td></td>
3011</tr>
3012<tr>
3013 <td class="filename"><a href='download.php?file=miniupnpc-20070515.tar.gz'>miniupnpc-20070515.tar.gz</a></td>
3014 <td class="filesize">25802</td>
3015 <td class="filedate">15/05/2007 18:15:25 +0000</td>
3016 <td class="comment">MiniUPnP client source code</td>
3017 <td></td>
3018</tr>
3019<tr>
3020 <td class="filename"><a href='download.php?file=miniupnpd-1.0-RC5.tar.gz'>miniupnpd-1.0-RC5.tar.gz</a></td>
3021 <td class="filesize">48064</td>
3022 <td class="filedate">10/05/2007 20:22:48 +0000</td>
3023 <td class="comment">MiniUPnP daemon release source code</td>
3024 <td></td>
3025</tr>
3026<tr>
3027 <td class="filename"><a href='download.php?file=miniupnpc-1.0-RC5.tar.gz'>miniupnpc-1.0-RC5.tar.gz</a></td>
3028 <td class="filesize">25242</td>
3029 <td class="filedate">10/05/2007 20:22:46 +0000</td>
3030 <td class="comment">MiniUPnP client release source code</td>
3031 <td></td>
3032</tr>
3033<tr>
3034 <td class="filename"><a href='download.php?file=miniupnpd-20070412.tar.gz'>miniupnpd-20070412.tar.gz</a></td>
3035 <td class="filesize">47807</td>
3036 <td class="filedate">12/04/2007 20:21:48 +0000</td>
3037 <td class="comment">MiniUPnP daemon source code</td>
3038 <td></td>
3039</tr>
3040<tr>
3041 <td class="filename"><a href='download.php?file=miniupnpd-1.0-RC4.tar.gz'>miniupnpd-1.0-RC4.tar.gz</a></td>
3042 <td class="filesize">47687</td>
3043 <td class="filedate">17/03/2007 11:43:13 +0000</td>
3044 <td class="comment">MiniUPnP daemon release source code</td>
3045 <td></td>
3046</tr>
3047<tr>
3048 <td class="filename"><a href='download.php?file=miniupnpc-1.0-RC4.tar.gz'>miniupnpc-1.0-RC4.tar.gz</a></td>
3049 <td class="filesize">25085</td>
3050 <td class="filedate">17/03/2007 11:43:10 +0000</td>
3051 <td class="comment">MiniUPnP client release source code</td>
3052 <td></td>
3053</tr>
3054<tr>
3055 <td class="filename"><a href='download.php?file=miniupnpd-20070311.tar.gz'>miniupnpd-20070311.tar.gz</a></td>
3056 <td class="filesize">47599</td>
3057 <td class="filedate">11/03/2007 00:25:26 +0000</td>
3058 <td class="comment">MiniUPnP daemon source code</td>
3059 <td></td>
3060</tr>
3061<tr>
3062 <td class="filename"><a href='download.php?file=miniupnpd-20070208.tar.gz'>miniupnpd-20070208.tar.gz</a></td>
3063 <td class="filesize">45084</td>
3064 <td class="filedate">07/02/2007 23:04:06 +0000</td>
3065 <td class="comment">MiniUPnP daemon source code</td>
3066 <td></td>
3067</tr>
3068<tr>
3069 <td class="filename"><a href='download.php?file=miniupnpd-1.0-RC3.tar.gz'>miniupnpd-1.0-RC3.tar.gz</a></td>
3070 <td class="filesize">44683</td>
3071 <td class="filedate">30/01/2007 23:00:44 +0000</td>
3072 <td class="comment">MiniUPnP daemon release source code</td>
3073 <td></td>
3074</tr>
3075<tr>
3076 <td class="filename"><a href='download.php?file=miniupnpc-1.0-RC3.tar.gz'>miniupnpc-1.0-RC3.tar.gz</a></td>
3077 <td class="filesize">25055</td>
3078 <td class="filedate">30/01/2007 23:00:42 +0000</td>
3079 <td class="comment">MiniUPnP client release source code</td>
3080 <td></td>
3081</tr>
3082<tr>
3083 <td class="filename"><a href='download.php?file=miniupnpd-20070130.tar.gz'>miniupnpd-20070130.tar.gz</a></td>
3084 <td class="filesize">43735</td>
3085 <td class="filedate">29/01/2007 23:26:16 +0000</td>
3086 <td class="comment">MiniUPnP daemon source code</td>
3087 <td></td>
3088</tr>
3089<tr>
3090 <td class="filename"><a href='download.php?file=miniupnpc-20070130.tar.gz'>miniupnpc-20070130.tar.gz</a></td>
3091 <td class="filesize">24466</td>
3092 <td class="filedate">29/01/2007 23:26:13 +0000</td>
3093 <td class="comment">MiniUPnP client source code</td>
3094 <td></td>
3095</tr>
3096<tr>
3097 <td class="filename"><a href='download.php?file=miniupnpd-20070127.tar.gz'>miniupnpd-20070127.tar.gz</a></td>
3098 <td class="filesize">42643</td>
3099 <td class="filedate">27/01/2007 16:02:35 +0000</td>
3100 <td class="comment">MiniUPnP daemon source code</td>
3101 <td></td>
3102</tr>
3103<tr>
3104 <td class="filename"><a href='download.php?file=miniupnpc-20070127.tar.gz'>miniupnpc-20070127.tar.gz</a></td>
3105 <td class="filesize">24241</td>
3106 <td class="filedate">27/01/2007 16:02:33 +0000</td>
3107 <td class="comment">MiniUPnP client source code</td>
3108 <td></td>
3109</tr>
3110<tr>
3111 <td class="filename"><a href='download.php?file=miniupnpd-1.0-RC2.tar.gz'>miniupnpd-1.0-RC2.tar.gz</a></td>
3112 <td class="filesize">40424</td>
3113 <td class="filedate">17/01/2007 16:13:05 +0000</td>
3114 <td class="comment">MiniUPnP daemon release source code</td>
3115 <td></td>
3116</tr>
3117<tr>
3118 <td class="filename"><a href='download.php?file=miniupnpd-20070112.tar.gz'>miniupnpd-20070112.tar.gz</a></td>
3119 <td class="filesize">40708</td>
3120 <td class="filedate">12/01/2007 13:40:50 +0000</td>
3121 <td class="comment">MiniUPnP daemon source code</td>
3122 <td></td>
3123</tr>
3124<tr>
3125 <td class="filename"><a href='download.php?file=miniupnpd-20070111.tar.gz'>miniupnpd-20070111.tar.gz</a></td>
3126 <td class="filesize">40651</td>
3127 <td class="filedate">11/01/2007 18:50:21 +0000</td>
3128 <td class="comment">MiniUPnP daemon source code</td>
3129 <td></td>
3130</tr>
3131<tr>
3132 <td class="filename"><a href='download.php?file=miniupnpd-20070108.tar.gz'>miniupnpd-20070108.tar.gz</a></td>
3133 <td class="filesize">40025</td>
3134 <td class="filedate">08/01/2007 10:02:14 +0000</td>
3135 <td class="comment">MiniUPnP daemon source code</td>
3136 <td></td>
3137</tr>
3138<tr>
3139 <td class="filename"><a href='download.php?file=miniupnpd-20070103.tar.gz'>miniupnpd-20070103.tar.gz</a></td>
3140 <td class="filesize">40065</td>
3141 <td class="filedate">03/01/2007 14:39:11 +0000</td>
3142 <td class="comment">MiniUPnP daemon source code</td>
3143 <td></td>
3144</tr>
3145<tr>
3146 <td class="filename"><a href='download.php?file=miniupnpc-20061214.tar.gz'>miniupnpc-20061214.tar.gz</a></td>
3147 <td class="filesize">24106</td>
3148 <td class="filedate">14/12/2006 15:43:54 +0000</td>
3149 <td class="comment">MiniUPnP client source code</td>
3150 <td></td>
3151</tr>
3152<tr>
3153 <td class="filename"><a href='download.php?file=miniupnpd-20061214.tar.gz'>miniupnpd-20061214.tar.gz</a></td>
3154 <td class="filesize">39750</td>
3155 <td class="filedate">14/12/2006 13:44:51 +0000</td>
3156 <td class="comment">MiniUPnP daemon source code</td>
3157 <td></td>
3158</tr>
3159<tr>
3160 <td class="filename"><a href='download.php?file=miniupnpd-1.0-RC1.tar.gz'>miniupnpd-1.0-RC1.tar.gz</a></td>
3161 <td class="filesize">39572</td>
3162 <td class="filedate">07/12/2006 10:55:31 +0000</td>
3163 <td class="comment">MiniUPnP daemon release source code</td>
3164 <td></td>
3165</tr>
3166<tr>
3167 <td class="filename"><a href='download.php?file=miniupnpc-1.0-RC1.tar.gz'>miniupnpc-1.0-RC1.tar.gz</a></td>
3168 <td class="filesize">23582</td>
3169 <td class="filedate">07/12/2006 10:55:30 +0000</td>
3170 <td class="comment">MiniUPnP client release source code</td>
3171 <td></td>
3172</tr>
3173<tr>
3174 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20061201.zip'>upnpc-exe-win32-20061201.zip</a></td>
3175 <td class="filesize">10378</td>
3176 <td class="filedate">01/12/2006 00:33:08 +0000</td>
3177 <td class="comment">Windows executable</td>
3178 <td></td>
3179</tr>
3180<tr>
3181 <td class="filename"><a href='download.php?file=miniupnpd20061130.tar.gz'>miniupnpd20061130.tar.gz</a></td>
3182 <td class="filesize">37184</td>
3183 <td class="filedate">30/11/2006 12:25:25 +0000</td>
3184 <td class="comment">MiniUPnP daemon source code</td>
3185 <td></td>
3186</tr>
3187<tr>
3188 <td class="filename"><a href='download.php?file=miniupnpd20061129.tar.gz'>miniupnpd20061129.tar.gz</a></td>
3189 <td class="filesize">36045</td>
3190 <td class="filedate">29/11/2006 00:10:49 +0000</td>
3191 <td class="comment">MiniUPnP daemon source code</td>
3192 <td></td>
3193</tr>
3194<tr>
3195 <td class="filename"><a href='download.php?file=miniupnpd20061127.tar.gz'>miniupnpd20061127.tar.gz</a></td>
3196 <td class="filesize">34155</td>
3197 <td class="filedate">26/11/2006 23:15:28 +0000</td>
3198 <td class="comment">MiniUPnP daemon source code</td>
3199 <td></td>
3200</tr>
3201<tr>
3202 <td class="filename"><a href='download.php?file=miniupnpc20061123.tar.gz'>miniupnpc20061123.tar.gz</a></td>
3203 <td class="filesize">21004</td>
3204 <td class="filedate">23/11/2006 22:41:46 +0000</td>
3205 <td class="comment">MiniUPnP client source code</td>
3206 <td></td>
3207</tr>
3208<tr>
3209 <td class="filename" colspan="2"><a href='download.php?file=miniupnpd-bin-openwrt20061123.tar.gz'>miniupnpd-bin-openwrt20061123.tar.gz</a></td>
3210 <td class="filedate">23/11/2006 22:41:44 +0000</td>
3211 <td class="comment">Precompiled binaries for openwrt</td>
3212 <td></td>
3213</tr>
3214<tr>
3215 <td class="filename"><a href='download.php?file=miniupnpd20061123.tar.gz'>miniupnpd20061123.tar.gz</a></td>
3216 <td class="filesize">33809</td>
3217 <td class="filedate">23/11/2006 22:28:29 +0000</td>
3218 <td class="comment">MiniUPnP daemon source code</td>
3219 <td></td>
3220</tr>
3221<tr>
3222 <td class="filename"><a href='download.php?file=miniupnpc20061119.tar.gz'>miniupnpc20061119.tar.gz</a></td>
3223 <td class="filesize">20897</td>
3224 <td class="filedate">19/11/2006 22:50:37 +0000</td>
3225 <td class="comment">MiniUPnP client source code</td>
3226 <td></td>
3227</tr>
3228<tr>
3229 <td class="filename"><a href='download.php?file=miniupnpd20061119.tar.gz'>miniupnpd20061119.tar.gz</a></td>
3230 <td class="filesize">32580</td>
3231 <td class="filedate">19/11/2006 22:50:36 +0000</td>
3232 <td class="comment">MiniUPnP daemon source code</td>
3233 <td></td>
3234</tr>
3235<tr>
3236 <td class="filename"><a href='download.php?file=miniupnpd20061117.tar.gz'>miniupnpd20061117.tar.gz</a></td>
3237 <td class="filesize">32646</td>
3238 <td class="filedate">17/11/2006 13:29:33 +0000</td>
3239 <td class="comment">MiniUPnP daemon source code</td>
3240 <td></td>
3241</tr>
3242<tr>
3243 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20061112.zip'>upnpc-exe-win32-20061112.zip</a></td>
3244 <td class="filesize">10262</td>
3245 <td class="filedate">12/11/2006 22:41:25 +0000</td>
3246 <td class="comment">Windows executable</td>
3247 <td></td>
3248</tr>
3249<tr>
3250 <td class="filename"><a href='download.php?file=miniupnpd20061112.tar.gz'>miniupnpd20061112.tar.gz</a></td>
3251 <td class="filesize">32023</td>
3252 <td class="filedate">12/11/2006 21:30:32 +0000</td>
3253 <td class="comment">MiniUPnP daemon source code</td>
3254 <td></td>
3255</tr>
3256<tr>
3257 <td class="filename"><a href='download.php?file=miniupnpc20061112.tar.gz'>miniupnpc20061112.tar.gz</a></td>
3258 <td class="filesize">21047</td>
3259 <td class="filedate">12/11/2006 21:30:31 +0000</td>
3260 <td class="comment">MiniUPnP client source code</td>
3261 <td></td>
3262</tr>
3263<tr>
3264 <td class="filename"><a href='download.php?file=miniupnpd20061110.tar.gz'>miniupnpd20061110.tar.gz</a></td>
3265 <td class="filesize">27926</td>
3266 <td class="filedate">09/11/2006 23:35:02 +0000</td>
3267 <td class="comment">MiniUPnP daemon source code</td>
3268 <td></td>
3269</tr>
3270<tr>
3271 <td class="filename"><a href='download.php?file=miniupnpc20061110.tar.gz'>miniupnpc20061110.tar.gz</a></td>
3272 <td class="filesize">21009</td>
3273 <td class="filedate">09/11/2006 23:32:19 +0000</td>
3274 <td class="comment">MiniUPnP client source code</td>
3275 <td></td>
3276</tr>
3277<tr>
3278 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20061101.zip'>upnpc-exe-win32-20061101.zip</a></td>
3279 <td class="filesize">10089</td>
3280 <td class="filedate">08/11/2006 20:35:09 +0000</td>
3281 <td class="comment">Windows executable</td>
3282 <td></td>
3283</tr>
3284<tr>
3285 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20061020.zip'>upnpc-exe-win32-20061020.zip</a></td>
3286 <td class="filesize">9183</td>
3287 <td class="filedate">08/11/2006 20:35:08 +0000</td>
3288 <td class="comment">Windows executable</td>
3289 <td></td>
3290</tr>
3291<tr>
3292 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20060909.zip'>upnpc-exe-win32-20060909.zip</a></td>
3293 <td class="filesize">9994</td>
3294 <td class="filedate">08/11/2006 20:35:07 +0000</td>
3295 <td class="comment">Windows executable</td>
3296 <td></td>
3297</tr>
3298<tr>
3299 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20060801.zip'>upnpc-exe-win32-20060801.zip</a></td>
3300 <td class="filesize">10002</td>
3301 <td class="filedate">08/11/2006 20:35:06 +0000</td>
3302 <td class="comment">Windows executable</td>
3303 <td></td>
3304</tr>
3305<tr>
3306 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20060711.zip'>upnpc-exe-win32-20060711.zip</a></td>
3307 <td class="filesize">13733</td>
3308 <td class="filedate">08/11/2006 20:35:05 +0000</td>
3309 <td class="comment">Windows executable</td>
3310 <td></td>
3311</tr>
3312<tr>
3313 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20060709.zip'>upnpc-exe-win32-20060709.zip</a></td>
3314 <td class="filesize">13713</td>
3315 <td class="filedate">08/11/2006 20:35:04 +0000</td>
3316 <td class="comment">Windows executable</td>
3317 <td></td>
3318</tr>
3319<tr>
3320 <td class="filename"><a href='download.php?file=upnpc-exe-win32-20060704.zip'>upnpc-exe-win32-20060704.zip</a></td>
3321 <td class="filesize">13297</td>
3322 <td class="filedate">08/11/2006 20:35:03 +0000</td>
3323 <td class="comment">Windows executable</td>
3324 <td></td>
3325</tr>
3326<tr>
3327 <td class="filename"><a href='download.php?file=miniupnpc20061107.tar.gz'>miniupnpc20061107.tar.gz</a></td>
3328 <td class="filesize">20708</td>
3329 <td class="filedate">06/11/2006 23:36:57 +0000</td>
3330 <td class="comment">MiniUPnP client source code</td>
3331 <td></td>
3332</tr>
3333<tr>
3334 <td class="filename"><a href='download.php?file=miniupnpd20061107.tar.gz'>miniupnpd20061107.tar.gz</a></td>
3335 <td class="filesize">26992</td>
3336 <td class="filedate">06/11/2006 23:35:06 +0000</td>
3337 <td class="comment">MiniUPnP daemon source code</td>
3338 <td></td>
3339</tr>
3340<tr>
3341 <td class="filename"><a href='download.php?file=miniupnpc20061106.tar.gz'>miniupnpc20061106.tar.gz</a></td>
3342 <td class="filesize">20575</td>
3343 <td class="filedate">06/11/2006 17:02:15 +0000</td>
3344 <td class="comment">MiniUPnP client source code</td>
3345 <td></td>
3346</tr>
3347<tr>
3348 <td class="filename"><a href='download.php?file=miniupnpd20061106.tar.gz'>miniupnpd20061106.tar.gz</a></td>
3349 <td class="filesize">26597</td>
3350 <td class="filedate">06/11/2006 15:39:10 +0000</td>
3351 <td class="comment">MiniUPnP daemon source code</td>
3352 <td></td>
3353</tr>
3354<tr>
3355 <td class="filename"><a href='download.php?file=miniupnpc20061101.tar.gz'>miniupnpc20061101.tar.gz</a></td>
3356 <td class="filesize">20395</td>
3357 <td class="filedate">04/11/2006 18:16:15 +0000</td>
3358 <td class="comment">MiniUPnP client source code</td>
3359 <td></td>
3360</tr>
3361<tr>
3362 <td class="filename"><a href='download.php?file=miniupnpc20061031.tar.gz'>miniupnpc20061031.tar.gz</a></td>
3363 <td class="filesize">20396</td>
3364 <td class="filedate">04/11/2006 18:16:13 +0000</td>
3365 <td class="comment">MiniUPnP client source code</td>
3366 <td></td>
3367</tr>
3368<tr>
3369 <td class="filename"><a href='download.php?file=miniupnpc20061023.tar.gz'>miniupnpc20061023.tar.gz</a></td>
3370 <td class="filesize">20109</td>
3371 <td class="filedate">04/11/2006 18:16:12 +0000</td>
3372 <td class="comment">MiniUPnP client source code</td>
3373 <td></td>
3374</tr>
3375<tr>
3376 <td class="filename"><a href='download.php?file=miniupnpc20061020.tar.gz'>miniupnpc20061020.tar.gz</a></td>
3377 <td class="filesize">19739</td>
3378 <td class="filedate">04/11/2006 18:16:10 +0000</td>
3379 <td class="comment">MiniUPnP client source code</td>
3380 <td></td>
3381</tr>
3382<tr>
3383 <td class="filename"><a href='download.php?file=miniupnpc20060909.tar.gz'>miniupnpc20060909.tar.gz</a></td>
3384 <td class="filesize">19285</td>
3385 <td class="filedate">04/11/2006 18:16:09 +0000</td>
3386 <td class="comment">MiniUPnP client source code</td>
3387 <td></td>
3388</tr>
3389<tr>
3390 <td class="filename"><a href='download.php?file=miniupnpc20060731.tar.gz'>miniupnpc20060731.tar.gz</a></td>
3391 <td class="filesize">19032</td>
3392 <td class="filedate">04/11/2006 18:16:07 +0000</td>
3393 <td class="comment">MiniUPnP client source code</td>
3394 <td></td>
3395</tr>
3396<tr>
3397 <td class="filename"><a href='download.php?file=miniupnpc20060711.tar.gz'>miniupnpc20060711.tar.gz</a></td>
3398 <td class="filesize">19151</td>
3399 <td class="filedate">04/11/2006 18:16:06 +0000</td>
3400 <td class="comment">MiniUPnP client source code</td>
3401 <td></td>
3402</tr>
3403<tr>
3404 <td class="filename"><a href='download.php?file=miniupnpc20060709.tar.gz'>miniupnpc20060709.tar.gz</a></td>
3405 <td class="filesize">19080</td>
3406 <td class="filedate">04/11/2006 18:16:04 +0000</td>
3407 <td class="comment">MiniUPnP client source code</td>
3408 <td></td>
3409</tr>
3410<tr>
3411 <td class="filename"><a href='download.php?file=miniupnpc20060703.tar.gz'>miniupnpc20060703.tar.gz</a></td>
3412 <td class="filesize">17906</td>
3413 <td class="filedate">04/11/2006 18:16:03 +0000</td>
3414 <td class="comment">MiniUPnP client source code</td>
3415 <td></td>
3416</tr>
3417<tr>
3418 <td class="filename"><a href='download.php?file=miniupnpc-new20060630.tar.gz'>miniupnpc-new20060630.tar.gz</a></td>
3419 <td class="filesize">14840</td>
3420 <td class="filedate">04/11/2006 18:16:01 +0000</td>
3421 <td class="comment">Jo&atilde;o Paulo Barraca version of the upnp client</td>
3422 <td></td>
3423</tr>
3424<tr>
3425 <td class="filename"><a href='download.php?file=miniupnpd20061029.tar.gz'>miniupnpd20061029.tar.gz</a></td>
3426 <td class="filesize">24197</td>
3427 <td class="filedate">03/11/2006 13:40:30 +0000</td>
3428 <td class="comment">MiniUPnP daemon source code</td>
3429 <td></td>
3430</tr>
3431<tr>
3432 <td class="filename"><a href='download.php?file=miniupnpd20061027.tar.gz'>miniupnpd20061027.tar.gz</a></td>
3433 <td class="filesize">23904</td>
3434 <td class="filedate">03/11/2006 13:40:29 +0000</td>
3435 <td class="comment">MiniUPnP daemon source code</td>
3436 <td></td>
3437</tr>
3438<tr>
3439 <td class="filename"><a href='download.php?file=miniupnpd20061028.tar.gz'>miniupnpd20061028.tar.gz</a></td>
3440 <td class="filesize">24383</td>
3441 <td class="filedate">03/11/2006 13:40:29 +0000</td>
3442 <td class="comment">MiniUPnP daemon source code</td>
3443 <td></td>
3444</tr>
3445<tr>
3446 <td class="filename"><a href='download.php?file=miniupnpd20061018.tar.gz'>miniupnpd20061018.tar.gz</a></td>
3447 <td class="filesize">23051</td>
3448 <td class="filedate">03/11/2006 13:40:28 +0000</td>
3449 <td class="comment">MiniUPnP daemon source code</td>
3450 <td></td>
3451</tr>
3452<tr>
3453 <td class="filename"><a href='download.php?file=miniupnpd20061023.tar.gz'>miniupnpd20061023.tar.gz</a></td>
3454 <td class="filesize">23478</td>
3455 <td class="filedate">03/11/2006 13:40:28 +0000</td>
3456 <td class="comment">MiniUPnP daemon source code</td>
3457 <td></td>
3458</tr>
3459<tr>
3460 <td class="filename"><a href='download.php?file=miniupnpd20060930.tar.gz'>miniupnpd20060930.tar.gz</a></td>
3461 <td class="filesize">22832</td>
3462 <td class="filedate">03/11/2006 13:40:28 +0000</td>
3463 <td class="comment">MiniUPnP daemon source code</td>
3464 <td></td>
3465</tr>
3466<tr>
3467 <td class="filename"><a href='download.php?file=miniupnpd20060924.tar.gz'>miniupnpd20060924.tar.gz</a></td>
3468 <td class="filesize">22038</td>
3469 <td class="filedate">03/11/2006 13:40:27 +0000</td>
3470 <td class="comment">MiniUPnP daemon source code</td>
3471 <td></td>
3472</tr>
3473<tr>
3474 <td class="filename"><a href='download.php?file=miniupnpd20060919.tar.gz'>miniupnpd20060919.tar.gz</a></td>
3475 <td class="filesize">21566</td>
3476 <td class="filedate">03/11/2006 13:40:27 +0000</td>
3477 <td class="comment">MiniUPnP daemon source code</td>
3478 <td></td>
3479</tr>
3480<tr>
3481 <td class="filename"><a href='download.php?file=miniupnpd20060729.tar.gz'>miniupnpd20060729.tar.gz</a></td>
3482 <td class="filesize">19202</td>
3483 <td class="filedate">03/11/2006 13:40:26 +0000</td>
3484 <td class="comment">MiniUPnP daemon source code</td>
3485 <td></td>
3486</tr>
3487<tr>
3488 <td class="filename"><a href='download.php?file=miniupnpd20060909.tar.gz'>miniupnpd20060909.tar.gz</a></td>
3489 <td class="filesize">19952</td>
3490 <td class="filedate">03/11/2006 13:40:26 +0000</td>
3491 <td class="comment">MiniUPnP daemon source code</td>
3492 <td></td>
3493</tr>
3494</table>
3495
3496<p><a href="..">Home</a></p>
3497<p>Contact: miniupnp _AT_ free _DOT_ fr</p>
3498<p align="center">
3499<a href="https://validator.w3.org/check?uri=referer"><img src="https://www.w3.org/Icons/valid-xhtml10" alt="Valid XHTML 1.0 Transitional" height="31" width="88" /></a>
3500<a href="https://jigsaw.w3.org/css-validator/check/referer"><img style="border:0;width:88px;height:31px" src="https://jigsaw.w3.org/css-validator/images/vcss" alt="Valid CSS!" /></a>
3501<!--
3502 <a href="https://freshmeat.net/projects/miniupnp"><img src="https://s3.amazonaws.com/entp-tender-production/assets/bc5be96f147ec8db3c10fc017f1f53889904ef5b/fm_logo_white_150_normal.png" border="0" alt="freshmeat.net" /></a>
3503-->
3504<!-- https://futuresimple.github.com/images/github_logo.png -->
3505<!-- <a href="https://github.com/miniupnp/miniupnp"><img src="https://assets-cdn.github.com/images/modules/logos_page/GitHub-Logo.png" alt="github.com" height="31" /></a> -->
3506<a href="https://github.com/miniupnp/miniupnp"><img style="position: absolute; top: 0; left: 0; border: 0;" src="https://github.blog/wp-content/uploads/2008/12/forkme_left_green_007200.png" alt="Fork me on GitHub" /></a>
3507</p>
3508
3509<script type="text/javascript">
3510var gaJsHost = (("https:" == document.location.protocol) ? "https://ssl." : "http://www.");
3511document.write(unescape("%3Cscript src='" + gaJsHost + "google-analytics.com/ga.js' type='text/javascript'%3E%3C/script%3E"));
3512</script>
3513<script type="text/javascript">
3514try {
3515 var ua = 'UA-10295521';
3516 if(window.location.hostname == 'miniupnp.free.fr')
3517 ua += '-1';
3518 else if(window.location.hostname == 'miniupnp.tuxfamily.org')
3519 ua += '-2';
3520 else ua = '';
3521 if(ua != '') {
3522 var pageTracker = _gat._getTracker(ua);
3523 pageTracker._trackPageview();
3524 }
3525} catch(err) {}</script>
3526</body>
3527</html>
3528
diff --git a/bitbake/lib/bb/tests/fetch.py b/bitbake/lib/bb/tests/fetch.py
index 7b2dac7b86..077472b8b3 100644
--- a/bitbake/lib/bb/tests/fetch.py
+++ b/bitbake/lib/bb/tests/fetch.py
@@ -6,21 +6,59 @@
6# SPDX-License-Identifier: GPL-2.0-only 6# SPDX-License-Identifier: GPL-2.0-only
7# 7#
8 8
9import contextlib
10import shutil
9import unittest 11import unittest
12import unittest.mock
13import urllib.parse
10import hashlib 14import hashlib
11import tempfile 15import tempfile
12import collections 16import collections
13import os 17import os
18import signal
19import tarfile
14from bb.fetch2 import URI 20from bb.fetch2 import URI
15from bb.fetch2 import FetchMethod 21from bb.fetch2 import FetchMethod
16import bb 22import bb
23import bb.utils
17from bb.tests.support.httpserver import HTTPService 24from bb.tests.support.httpserver import HTTPService
18 25
19def skipIfNoNetwork(): 26def skipIfNoNetwork():
20 if os.environ.get("BB_SKIP_NETTESTS") == "yes": 27 if os.environ.get("BB_SKIP_NETTESTS") == "yes":
21 return unittest.skip("Network tests being skipped") 28 return unittest.skip("network test")
22 return lambda f: f 29 return lambda f: f
23 30
31
32@contextlib.contextmanager
33def hide_directory(directory):
34 """Hide the given directory and restore it after the context is left"""
35 temp_name = directory + ".bak"
36 os.rename(directory, temp_name)
37 try:
38 yield
39 finally:
40 os.rename(temp_name, directory)
41
42
43class TestTimeout(Exception):
44 # Indicate to pytest that this is not a test suite
45 __test__ = False
46
47class Timeout():
48
49 def __init__(self, seconds):
50 self.seconds = seconds
51
52 def handle_timeout(self, signum, frame):
53 raise TestTimeout("Test failed: timeout reached")
54
55 def __enter__(self):
56 signal.signal(signal.SIGALRM, self.handle_timeout)
57 signal.alarm(self.seconds)
58
59 def __exit__(self, exc_type, exc_val, exc_tb):
60 signal.alarm(0)
61
24class URITest(unittest.TestCase): 62class URITest(unittest.TestCase):
25 test_uris = { 63 test_uris = {
26 "http://www.google.com/index.html" : { 64 "http://www.google.com/index.html" : {
@@ -286,6 +324,36 @@ class URITest(unittest.TestCase):
286 'params': {"someparam" : "1"}, 324 'params': {"someparam" : "1"},
287 'query': {}, 325 'query': {},
288 'relative': True 326 'relative': True
327 },
328 "https://www.innodisk.com/Download_file?9BE0BF6657;downloadfilename=EGPL-T101.zip": {
329 'uri': 'https://www.innodisk.com/Download_file?9BE0BF6657;downloadfilename=EGPL-T101.zip',
330 'scheme': 'https',
331 'hostname': 'www.innodisk.com',
332 'port': None,
333 'hostport': 'www.innodisk.com',
334 'path': '/Download_file',
335 'userinfo': '',
336 'userinfo': '',
337 'username': '',
338 'password': '',
339 'params': {"downloadfilename" : "EGPL-T101.zip"},
340 'query': {"9BE0BF6657": None},
341 'relative': False
342 },
343 "file://example@.service": {
344 'uri': 'file:example%40.service',
345 'scheme': 'file',
346 'hostname': '',
347 'port': None,
348 'hostport': '',
349 'path': 'example@.service',
350 'userinfo': '',
351 'userinfo': '',
352 'username': '',
353 'password': '',
354 'params': {},
355 'query': {},
356 'relative': True
289 } 357 }
290 358
291 } 359 }
@@ -376,7 +444,7 @@ class FetcherTest(unittest.TestCase):
376 def setUp(self): 444 def setUp(self):
377 self.origdir = os.getcwd() 445 self.origdir = os.getcwd()
378 self.d = bb.data.init() 446 self.d = bb.data.init()
379 self.tempdir = tempfile.mkdtemp() 447 self.tempdir = tempfile.mkdtemp(prefix="bitbake-fetch-")
380 self.dldir = os.path.join(self.tempdir, "download") 448 self.dldir = os.path.join(self.tempdir, "download")
381 os.mkdir(self.dldir) 449 os.mkdir(self.dldir)
382 self.d.setVar("DL_DIR", self.dldir) 450 self.d.setVar("DL_DIR", self.dldir)
@@ -390,63 +458,104 @@ class FetcherTest(unittest.TestCase):
390 if os.environ.get("BB_TMPDIR_NOCLEAN") == "yes": 458 if os.environ.get("BB_TMPDIR_NOCLEAN") == "yes":
391 print("Not cleaning up %s. Please remove manually." % self.tempdir) 459 print("Not cleaning up %s. Please remove manually." % self.tempdir)
392 else: 460 else:
461 bb.process.run('chmod u+rw -R %s' % self.tempdir)
393 bb.utils.prunedir(self.tempdir) 462 bb.utils.prunedir(self.tempdir)
394 463
464 def git(self, cmd, cwd=None):
465 if isinstance(cmd, str):
466 cmd = 'git -c safe.bareRepository=all ' + cmd
467 else:
468 cmd = ['git', '-c', 'safe.bareRepository=all'] + cmd
469 if cwd is None:
470 cwd = self.gitdir
471 return bb.process.run(cmd, cwd=cwd)[0]
472
473 def git_init(self, cwd=None):
474 self.git('init', cwd=cwd)
475 # Explicitly set initial branch to master as
476 # a common setup is to use other default
477 # branch than master.
478 self.git(['checkout', '-b', 'master'], cwd=cwd)
479
480 try:
481 self.git(['config', 'user.email'], cwd=cwd)
482 except bb.process.ExecutionError:
483 self.git(['config', 'user.email', 'you@example.com'], cwd=cwd)
484
485 try:
486 self.git(['config', 'user.name'], cwd=cwd)
487 except bb.process.ExecutionError:
488 self.git(['config', 'user.name', 'Your Name'], cwd=cwd)
489
395class MirrorUriTest(FetcherTest): 490class MirrorUriTest(FetcherTest):
396 491
397 replaceuris = { 492 replaceuris = {
398 ("git://git.invalid.infradead.org/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/.*", "http://somewhere.org/somedir/") 493 ("git://git.invalid.infradead.org/mtd-utils.git;tag=1234567890123456789012345678901234567890;branch=master", "git://.*/.*", "http://somewhere.org/somedir/")
399 : "http://somewhere.org/somedir/git2_git.invalid.infradead.org.mtd-utils.git.tar.gz", 494 : "http://somewhere.org/somedir/git2_git.invalid.infradead.org.mtd-utils.git.tar.gz",
400 ("git://git.invalid.infradead.org/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/([^/]+/)*([^/]*)", "git://somewhere.org/somedir/\\2;protocol=http") 495 ("git://git.invalid.infradead.org/mtd-utils.git;tag=1234567890123456789012345678901234567890;branch=master", "git://.*/([^/]+/)*([^/]*)", "git://somewhere.org/somedir/\\2;protocol=http")
401 : "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http", 496 : "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;branch=master;protocol=http",
402 ("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/([^/]+/)*([^/]*)", "git://somewhere.org/somedir/\\2;protocol=http") 497 ("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890;branch=master", "git://.*/([^/]+/)*([^/]*)", "git://somewhere.org/somedir/\\2;protocol=http")
403 : "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http", 498 : "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;branch=master;protocol=http",
404 ("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/([^/]+/)*([^/]*)", "git://somewhere.org/\\2;protocol=http") 499 ("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890;branch=master", "git://.*/([^/]+/)*([^/]*)", "git://somewhere.org/\\2;protocol=http")
405 : "git://somewhere.org/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http", 500 : "git://somewhere.org/mtd-utils.git;tag=1234567890123456789012345678901234567890;branch=master;protocol=http",
406 ("git://someserver.org/bitbake;tag=1234567890123456789012345678901234567890", "git://someserver.org/bitbake", "git://git.openembedded.org/bitbake") 501 ("git://someserver.org/bitbake;tag=1234567890123456789012345678901234567890;branch=master", "git://someserver.org/bitbake", "git://git.openembedded.org/bitbake")
407 : "git://git.openembedded.org/bitbake;tag=1234567890123456789012345678901234567890", 502 : "git://git.openembedded.org/bitbake;tag=1234567890123456789012345678901234567890;branch=master",
408 ("file://sstate-xyz.tgz", "file://.*", "file:///somewhere/1234/sstate-cache") 503 ("file://sstate-xyz.tgz", "file://.*", "file:///somewhere/1234/sstate-cache")
409 : "file:///somewhere/1234/sstate-cache/sstate-xyz.tgz", 504 : "file:///somewhere/1234/sstate-cache/sstate-xyz.tgz",
410 ("file://sstate-xyz.tgz", "file://.*", "file:///somewhere/1234/sstate-cache/") 505 ("file://sstate-xyz.tgz", "file://.*", "file:///somewhere/1234/sstate-cache/")
411 : "file:///somewhere/1234/sstate-cache/sstate-xyz.tgz", 506 : "file:///somewhere/1234/sstate-cache/sstate-xyz.tgz",
412 ("http://somewhere.org/somedir1/somedir2/somefile_1.2.3.tar.gz", "http://.*/.*", "http://somewhere2.org/somedir3") 507 ("http://somewhere.org/somedir1/somedir2/somefile_1.2.3.tar.gz", "http://.*/.*", "http://somewhere2.org/somedir3")
413 : "http://somewhere2.org/somedir3/somefile_1.2.3.tar.gz", 508 : "http://somewhere2.org/somedir3/somefile_1.2.3.tar.gz",
414 ("http://somewhere.org/somedir1/somefile_1.2.3.tar.gz", "http://somewhere.org/somedir1/somefile_1.2.3.tar.gz", "http://somewhere2.org/somedir3/somefile_1.2.3.tar.gz") 509 ("http://somewhere.org/somedir1/somefile_1.2.3.tar.gz", "http://somewhere.org/somedir1/somefile_1.2.3.tar.gz", "http://somewhere2.org/somedir3/somefile_1.2.3.tar.gz")
415 : "http://somewhere2.org/somedir3/somefile_1.2.3.tar.gz", 510 : "http://somewhere2.org/somedir3/somefile_1.2.3.tar.gz",
416 ("http://www.apache.org/dist/subversion/subversion-1.7.1.tar.bz2", "http://www.apache.org/dist", "http://archive.apache.org/dist") 511 ("http://www.apache.org/dist/subversion/subversion-1.7.1.tar.bz2", "http://www.apache.org/dist", "http://archive.apache.org/dist")
417 : "http://archive.apache.org/dist/subversion/subversion-1.7.1.tar.bz2", 512 : "http://archive.apache.org/dist/subversion/subversion-1.7.1.tar.bz2",
418 ("http://www.apache.org/dist/subversion/subversion-1.7.1.tar.bz2", "http://.*/.*", "file:///somepath/downloads/") 513 ("http://www.apache.org/dist/subversion/subversion-1.7.1.tar.bz2", "http://.*/.*", "file:///somepath/downloads/")
419 : "file:///somepath/downloads/subversion-1.7.1.tar.bz2", 514 : "file:///somepath/downloads/subversion-1.7.1.tar.bz2",
420 ("git://git.invalid.infradead.org/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/.*", "git://somewhere.org/somedir/BASENAME;protocol=http") 515 ("git://git.invalid.infradead.org/mtd-utils.git;tag=1234567890123456789012345678901234567890;branch=master", "git://.*/.*", "git://somewhere.org/somedir/BASENAME;protocol=http")
421 : "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http", 516 : "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;branch=master;protocol=http",
422 ("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/.*", "git://somewhere.org/somedir/BASENAME;protocol=http") 517 ("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890;branch=master", "git://.*/.*", "git://somewhere.org/somedir/BASENAME;protocol=http")
423 : "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http", 518 : "git://somewhere.org/somedir/mtd-utils.git;tag=1234567890123456789012345678901234567890;branch=master;protocol=http",
424 ("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890", "git://.*/.*", "git://somewhere.org/somedir/MIRRORNAME;protocol=http") 519 ("git://git.invalid.infradead.org/foo/mtd-utils.git;tag=1234567890123456789012345678901234567890;branch=master", "git://.*/.*", "git://somewhere.org/somedir/MIRRORNAME;protocol=http")
425 : "git://somewhere.org/somedir/git.invalid.infradead.org.foo.mtd-utils.git;tag=1234567890123456789012345678901234567890;protocol=http", 520 : "git://somewhere.org/somedir/git.invalid.infradead.org.foo.mtd-utils.git;tag=1234567890123456789012345678901234567890;branch=master;protocol=http",
426 ("http://somewhere.org/somedir1/somedir2/somefile_1.2.3.tar.gz", "http://.*/.*", "http://somewhere2.org") 521 ("http://somewhere.org/somedir1/somedir2/somefile_1.2.3.tar.gz", "http://.*/.*", "http://somewhere2.org")
427 : "http://somewhere2.org/somefile_1.2.3.tar.gz", 522 : "http://somewhere2.org/somefile_1.2.3.tar.gz",
428 ("http://somewhere.org/somedir1/somedir2/somefile_1.2.3.tar.gz", "http://.*/.*", "http://somewhere2.org/") 523 ("http://somewhere.org/somedir1/somedir2/somefile_1.2.3.tar.gz", "http://.*/.*", "http://somewhere2.org/")
429 : "http://somewhere2.org/somefile_1.2.3.tar.gz", 524 : "http://somewhere2.org/somefile_1.2.3.tar.gz",
430 ("git://someserver.org/bitbake;tag=1234567890123456789012345678901234567890;branch=master", "git://someserver.org/bitbake;branch=master", "git://git.openembedded.org/bitbake;protocol=http") 525 ("git://someserver.org/bitbake;tag=1234567890123456789012345678901234567890;branch=master", "git://someserver.org/bitbake;branch=master", "git://git.openembedded.org/bitbake;protocol=http")
431 : "git://git.openembedded.org/bitbake;tag=1234567890123456789012345678901234567890;branch=master;protocol=http", 526 : "git://git.openembedded.org/bitbake;tag=1234567890123456789012345678901234567890;branch=master;protocol=http",
527 ("git://user1@someserver.org/bitbake;tag=1234567890123456789012345678901234567890;branch=master", "git://someserver.org/bitbake;branch=master", "git://user2@git.openembedded.org/bitbake;protocol=http")
528 : "git://user2@git.openembedded.org/bitbake;tag=1234567890123456789012345678901234567890;branch=master;protocol=http",
529 ("git://someserver.org/bitbake;tag=1234567890123456789012345678901234567890;protocol=git;branch=master", "git://someserver.org/bitbake", "git://someotherserver.org/bitbake;protocol=https")
530 : "git://someotherserver.org/bitbake;tag=1234567890123456789012345678901234567890;protocol=https;branch=master",
531 ("gitsm://git.qemu.org/git/seabios.git/;protocol=https;name=roms/seabios;subpath=roms/seabios;bareclone=1;nobranch=1;rev=1234567890123456789012345678901234567890", "gitsm://.*/.*", "http://petalinux.xilinx.com/sswreleases/rel-v${XILINX_VER_MAIN}/downloads") : "http://petalinux.xilinx.com/sswreleases/rel-v%24%7BXILINX_VER_MAIN%7D/downloads/git2_git.qemu.org.git.seabios.git..tar.gz",
532 ("https://somewhere.org/example/1.0.0/example;downloadfilename=some-example-1.0.0.tgz", "https://.*/.*", "file:///mirror/PATH")
533 : "file:///mirror/example/1.0.0/some-example-1.0.0.tgz;downloadfilename=some-example-1.0.0.tgz",
534 ("https://somewhere.org/example-1.0.0.tgz;downloadfilename=some-example-1.0.0.tgz", "https://.*/.*", "file:///mirror/some-example-1.0.0.tgz")
535 : "file:///mirror/some-example-1.0.0.tgz;downloadfilename=some-example-1.0.0.tgz",
536 ("git://git.invalid.infradead.org/mtd-utils.git;tag=1234567890123456789012345678901234567890;branch=master", r"git://(?!internal\.git\.server).*/.*", "http://somewhere.org/somedir/")
537 : "http://somewhere.org/somedir/git2_git.invalid.infradead.org.mtd-utils.git.tar.gz",
538 ("git://internal.git.server.org/mtd-utils.git;tag=1234567890123456789012345678901234567890;branch=master", r"git://(?!internal\.git\.server).*/.*", "http://somewhere.org/somedir/")
539 : None,
432 540
433 #Renaming files doesn't work 541 #Renaming files doesn't work
434 #("http://somewhere.org/somedir1/somefile_1.2.3.tar.gz", "http://somewhere.org/somedir1/somefile_1.2.3.tar.gz", "http://somewhere2.org/somedir3/somefile_2.3.4.tar.gz") : "http://somewhere2.org/somedir3/somefile_2.3.4.tar.gz" 542 #("http://somewhere.org/somedir1/somefile_1.2.3.tar.gz", "http://somewhere.org/somedir1/somefile_1.2.3.tar.gz", "http://somewhere2.org/somedir3/somefile_2.3.4.tar.gz") : "http://somewhere2.org/somedir3/somefile_2.3.4.tar.gz"
435 #("file://sstate-xyz.tgz", "file://.*/.*", "file:///somewhere/1234/sstate-cache") : "file:///somewhere/1234/sstate-cache/sstate-xyz.tgz", 543 #("file://sstate-xyz.tgz", "file://.*/.*", "file:///somewhere/1234/sstate-cache") : "file:///somewhere/1234/sstate-cache/sstate-xyz.tgz",
436 } 544 }
437 545
438 mirrorvar = "http://.*/.* file:///somepath/downloads/ \n" \ 546 mirrorvar = "http://.*/.* file:///somepath/downloads/ " \
439 "git://someserver.org/bitbake git://git.openembedded.org/bitbake \n" \ 547 "git://someserver.org/bitbake git://git.openembedded.org/bitbake " \
440 "https://.*/.* file:///someotherpath/downloads/ \n" \ 548 "https?://.*/.* file:///someotherpath/downloads/ " \
441 "http://.*/.* file:///someotherpath/downloads/ \n" 549 "svn://svn.server1.com/ svn://svn.server2.com/"
442 550
443 def test_urireplace(self): 551 def test_urireplace(self):
552 self.d.setVar("FILESPATH", ".")
444 for k, v in self.replaceuris.items(): 553 for k, v in self.replaceuris.items():
445 ud = bb.fetch.FetchData(k[0], self.d) 554 ud = bb.fetch.FetchData(k[0], self.d)
446 ud.setup_localpath(self.d) 555 ud.setup_localpath(self.d)
447 mirrors = bb.fetch2.mirror_from_string("%s %s" % (k[1], k[2])) 556 mirrors = bb.fetch2.mirror_from_string("%s %s" % (k[1], k[2]))
448 newuris, uds = bb.fetch2.build_mirroruris(ud, mirrors, self.d) 557 newuris, uds = bb.fetch2.build_mirroruris(ud, mirrors, self.d)
449 self.assertEqual([v], newuris) 558 self.assertEqual([v] if v else [], newuris)
450 559
451 def test_urilist1(self): 560 def test_urilist1(self):
452 fetcher = bb.fetch.FetchData("http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", self.d) 561 fetcher = bb.fetch.FetchData("http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", self.d)
@@ -461,10 +570,17 @@ class MirrorUriTest(FetcherTest):
461 uris, uds = bb.fetch2.build_mirroruris(fetcher, mirrors, self.d) 570 uris, uds = bb.fetch2.build_mirroruris(fetcher, mirrors, self.d)
462 self.assertEqual(uris, ['file:///someotherpath/downloads/bitbake-1.0.tar.gz']) 571 self.assertEqual(uris, ['file:///someotherpath/downloads/bitbake-1.0.tar.gz'])
463 572
573 def test_urilistsvn(self):
574 # Catch svn:// -> svn:// bug
575 fetcher = bb.fetch.FetchData("svn://svn.server1.com/isource/svnroot/reponame/tags/tagname;module=path_in_tagnamefolder;protocol=https;rev=2", self.d)
576 mirrors = bb.fetch2.mirror_from_string(self.mirrorvar)
577 uris, uds = bb.fetch2.build_mirroruris(fetcher, mirrors, self.d)
578 self.assertEqual(uris, ['svn://svn.server2.com/isource/svnroot/reponame/tags/tagname;module=path_in_tagnamefolder;protocol=https;rev=2'])
579
464 def test_mirror_of_mirror(self): 580 def test_mirror_of_mirror(self):
465 # Test if mirror of a mirror works 581 # Test if mirror of a mirror works
466 mirrorvar = self.mirrorvar + " http://.*/.* http://otherdownloads.yoctoproject.org/downloads/ \n" 582 mirrorvar = self.mirrorvar + " http://.*/.* http://otherdownloads.yoctoproject.org/downloads/"
467 mirrorvar = mirrorvar + " http://otherdownloads.yoctoproject.org/.* http://downloads2.yoctoproject.org/downloads/ \n" 583 mirrorvar = mirrorvar + " http://otherdownloads.yoctoproject.org/.* http://downloads2.yoctoproject.org/downloads/"
468 fetcher = bb.fetch.FetchData("http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", self.d) 584 fetcher = bb.fetch.FetchData("http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", self.d)
469 mirrors = bb.fetch2.mirror_from_string(mirrorvar) 585 mirrors = bb.fetch2.mirror_from_string(mirrorvar)
470 uris, uds = bb.fetch2.build_mirroruris(fetcher, mirrors, self.d) 586 uris, uds = bb.fetch2.build_mirroruris(fetcher, mirrors, self.d)
@@ -473,30 +589,30 @@ class MirrorUriTest(FetcherTest):
473 'http://otherdownloads.yoctoproject.org/downloads/bitbake-1.0.tar.gz', 589 'http://otherdownloads.yoctoproject.org/downloads/bitbake-1.0.tar.gz',
474 'http://downloads2.yoctoproject.org/downloads/bitbake-1.0.tar.gz']) 590 'http://downloads2.yoctoproject.org/downloads/bitbake-1.0.tar.gz'])
475 591
476 recmirrorvar = "https://.*/[^/]* http://AAAA/A/A/A/ \n" \ 592 recmirrorvar = "https://.*/[^/]* http://aaaa/A/A/A/ " \
477 "https://.*/[^/]* https://BBBB/B/B/B/ \n" 593 "https://.*/[^/]* https://bbbb/B/B/B/"
478 594
479 def test_recursive(self): 595 def test_recursive(self):
480 fetcher = bb.fetch.FetchData("https://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", self.d) 596 fetcher = bb.fetch.FetchData("https://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", self.d)
481 mirrors = bb.fetch2.mirror_from_string(self.recmirrorvar) 597 mirrors = bb.fetch2.mirror_from_string(self.recmirrorvar)
482 uris, uds = bb.fetch2.build_mirroruris(fetcher, mirrors, self.d) 598 uris, uds = bb.fetch2.build_mirroruris(fetcher, mirrors, self.d)
483 self.assertEqual(uris, ['http://AAAA/A/A/A/bitbake/bitbake-1.0.tar.gz', 599 self.assertEqual(uris, ['http://aaaa/A/A/A/bitbake/bitbake-1.0.tar.gz',
484 'https://BBBB/B/B/B/bitbake/bitbake-1.0.tar.gz', 600 'https://bbbb/B/B/B/bitbake/bitbake-1.0.tar.gz',
485 'http://AAAA/A/A/A/B/B/bitbake/bitbake-1.0.tar.gz']) 601 'http://aaaa/A/A/A/B/B/bitbake/bitbake-1.0.tar.gz'])
486 602
487 603
488class GitDownloadDirectoryNamingTest(FetcherTest): 604class GitDownloadDirectoryNamingTest(FetcherTest):
489 def setUp(self): 605 def setUp(self):
490 super(GitDownloadDirectoryNamingTest, self).setUp() 606 super(GitDownloadDirectoryNamingTest, self).setUp()
491 self.recipe_url = "git://git.openembedded.org/bitbake" 607 self.recipe_url = "git://git.openembedded.org/bitbake;branch=master;protocol=https"
492 self.recipe_dir = "git.openembedded.org.bitbake" 608 self.recipe_dir = "git.openembedded.org.bitbake"
493 self.mirror_url = "git://github.com/openembedded/bitbake.git" 609 self.mirror_url = "git://github.com/openembedded/bitbake.git;protocol=https;branch=master"
494 self.mirror_dir = "github.com.openembedded.bitbake.git" 610 self.mirror_dir = "github.com.openembedded.bitbake.git"
495 611
496 self.d.setVar('SRCREV', '82ea737a0b42a8b53e11c9cde141e9e9c0bd8c40') 612 self.d.setVar('SRCREV', '82ea737a0b42a8b53e11c9cde141e9e9c0bd8c40')
497 613
498 def setup_mirror_rewrite(self): 614 def setup_mirror_rewrite(self):
499 self.d.setVar("PREMIRRORS", self.recipe_url + " " + self.mirror_url + " \n") 615 self.d.setVar("PREMIRRORS", self.recipe_url + " " + self.mirror_url)
500 616
501 @skipIfNoNetwork() 617 @skipIfNoNetwork()
502 def test_that_directory_is_named_after_recipe_url_when_no_mirroring_is_used(self): 618 def test_that_directory_is_named_after_recipe_url_when_no_mirroring_is_used(self):
@@ -536,16 +652,16 @@ class GitDownloadDirectoryNamingTest(FetcherTest):
536class TarballNamingTest(FetcherTest): 652class TarballNamingTest(FetcherTest):
537 def setUp(self): 653 def setUp(self):
538 super(TarballNamingTest, self).setUp() 654 super(TarballNamingTest, self).setUp()
539 self.recipe_url = "git://git.openembedded.org/bitbake" 655 self.recipe_url = "git://git.openembedded.org/bitbake;branch=master;protocol=https"
540 self.recipe_tarball = "git2_git.openembedded.org.bitbake.tar.gz" 656 self.recipe_tarball = "git2_git.openembedded.org.bitbake.tar.gz"
541 self.mirror_url = "git://github.com/openembedded/bitbake.git" 657 self.mirror_url = "git://github.com/openembedded/bitbake.git;protocol=https;branch=master"
542 self.mirror_tarball = "git2_github.com.openembedded.bitbake.git.tar.gz" 658 self.mirror_tarball = "git2_github.com.openembedded.bitbake.git.tar.gz"
543 659
544 self.d.setVar('BB_GENERATE_MIRROR_TARBALLS', '1') 660 self.d.setVar('BB_GENERATE_MIRROR_TARBALLS', '1')
545 self.d.setVar('SRCREV', '82ea737a0b42a8b53e11c9cde141e9e9c0bd8c40') 661 self.d.setVar('SRCREV', '82ea737a0b42a8b53e11c9cde141e9e9c0bd8c40')
546 662
547 def setup_mirror_rewrite(self): 663 def setup_mirror_rewrite(self):
548 self.d.setVar("PREMIRRORS", self.recipe_url + " " + self.mirror_url + " \n") 664 self.d.setVar("PREMIRRORS", self.recipe_url + " " + self.mirror_url)
549 665
550 @skipIfNoNetwork() 666 @skipIfNoNetwork()
551 def test_that_the_recipe_tarball_is_created_when_no_mirroring_is_used(self): 667 def test_that_the_recipe_tarball_is_created_when_no_mirroring_is_used(self):
@@ -570,9 +686,9 @@ class TarballNamingTest(FetcherTest):
570class GitShallowTarballNamingTest(FetcherTest): 686class GitShallowTarballNamingTest(FetcherTest):
571 def setUp(self): 687 def setUp(self):
572 super(GitShallowTarballNamingTest, self).setUp() 688 super(GitShallowTarballNamingTest, self).setUp()
573 self.recipe_url = "git://git.openembedded.org/bitbake" 689 self.recipe_url = "git://git.openembedded.org/bitbake;branch=master;protocol=https"
574 self.recipe_tarball = "gitshallow_git.openembedded.org.bitbake_82ea737-1_master.tar.gz" 690 self.recipe_tarball = "gitshallow_git.openembedded.org.bitbake_82ea737-1_master.tar.gz"
575 self.mirror_url = "git://github.com/openembedded/bitbake.git" 691 self.mirror_url = "git://github.com/openembedded/bitbake.git;protocol=https;branch=master"
576 self.mirror_tarball = "gitshallow_github.com.openembedded.bitbake.git_82ea737-1_master.tar.gz" 692 self.mirror_tarball = "gitshallow_github.com.openembedded.bitbake.git_82ea737-1_master.tar.gz"
577 693
578 self.d.setVar('BB_GIT_SHALLOW', '1') 694 self.d.setVar('BB_GIT_SHALLOW', '1')
@@ -580,7 +696,7 @@ class GitShallowTarballNamingTest(FetcherTest):
580 self.d.setVar('SRCREV', '82ea737a0b42a8b53e11c9cde141e9e9c0bd8c40') 696 self.d.setVar('SRCREV', '82ea737a0b42a8b53e11c9cde141e9e9c0bd8c40')
581 697
582 def setup_mirror_rewrite(self): 698 def setup_mirror_rewrite(self):
583 self.d.setVar("PREMIRRORS", self.recipe_url + " " + self.mirror_url + " \n") 699 self.d.setVar("PREMIRRORS", self.recipe_url + " " + self.mirror_url)
584 700
585 @skipIfNoNetwork() 701 @skipIfNoNetwork()
586 def test_that_the_tarball_is_named_after_recipe_url_when_no_mirroring_is_used(self): 702 def test_that_the_tarball_is_named_after_recipe_url_when_no_mirroring_is_used(self):
@@ -602,6 +718,39 @@ class GitShallowTarballNamingTest(FetcherTest):
602 self.assertIn(self.mirror_tarball, dir) 718 self.assertIn(self.mirror_tarball, dir)
603 719
604 720
721class CleanTarballTest(FetcherTest):
722 def setUp(self):
723 super(CleanTarballTest, self).setUp()
724 self.recipe_url = "git://git.openembedded.org/bitbake;protocol=https;branch=master"
725 self.recipe_tarball = "git2_git.openembedded.org.bitbake.tar.gz"
726
727 self.d.setVar('BB_GENERATE_MIRROR_TARBALLS', '1')
728 self.d.setVar('SRCREV', '82ea737a0b42a8b53e11c9cde141e9e9c0bd8c40')
729
730 @skipIfNoNetwork()
731 def test_that_the_tarball_contents_does_not_leak_info(self):
732 fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
733
734 fetcher.download()
735
736 fetcher.unpack(self.unpackdir)
737 mtime = bb.process.run('git log --all -1 --format=%ct',
738 cwd=os.path.join(self.unpackdir, 'git'))
739 self.assertEqual(len(mtime), 2)
740 mtime = int(mtime[0])
741
742 archive = tarfile.open(os.path.join(self.dldir, self.recipe_tarball))
743 self.assertNotEqual(len(archive.members), 0)
744 for member in archive.members:
745 if member.name == ".":
746 continue
747 self.assertEqual(member.uname, 'oe', "user name for %s differs" % member.name)
748 self.assertEqual(member.uid, 0, "uid for %s differs" % member.name)
749 self.assertEqual(member.gname, 'oe', "group name for %s differs" % member.name)
750 self.assertEqual(member.gid, 0, "gid for %s differs" % member.name)
751 self.assertEqual(member.mtime, mtime, "mtime for %s differs" % member.name)
752
753
605class FetcherLocalTest(FetcherTest): 754class FetcherLocalTest(FetcherTest):
606 def setUp(self): 755 def setUp(self):
607 def touch(fn): 756 def touch(fn):
@@ -613,12 +762,16 @@ class FetcherLocalTest(FetcherTest):
613 os.makedirs(self.localsrcdir) 762 os.makedirs(self.localsrcdir)
614 touch(os.path.join(self.localsrcdir, 'a')) 763 touch(os.path.join(self.localsrcdir, 'a'))
615 touch(os.path.join(self.localsrcdir, 'b')) 764 touch(os.path.join(self.localsrcdir, 'b'))
765 touch(os.path.join(self.localsrcdir, 'c@d'))
616 os.makedirs(os.path.join(self.localsrcdir, 'dir')) 766 os.makedirs(os.path.join(self.localsrcdir, 'dir'))
617 touch(os.path.join(self.localsrcdir, 'dir', 'c')) 767 touch(os.path.join(self.localsrcdir, 'dir', 'c'))
618 touch(os.path.join(self.localsrcdir, 'dir', 'd')) 768 touch(os.path.join(self.localsrcdir, 'dir', 'd'))
619 os.makedirs(os.path.join(self.localsrcdir, 'dir', 'subdir')) 769 os.makedirs(os.path.join(self.localsrcdir, 'dir', 'subdir'))
620 touch(os.path.join(self.localsrcdir, 'dir', 'subdir', 'e')) 770 touch(os.path.join(self.localsrcdir, 'dir', 'subdir', 'e'))
621 touch(os.path.join(self.localsrcdir, r'backslash\x2dsystemd-unit.device')) 771 touch(os.path.join(self.localsrcdir, r'backslash\x2dsystemd-unit.device'))
772 bb.process.run('tar cf archive.tar -C dir .', cwd=self.localsrcdir)
773 bb.process.run('tar czf archive.tar.gz -C dir .', cwd=self.localsrcdir)
774 bb.process.run('tar cjf archive.tar.bz2 -C dir .', cwd=self.localsrcdir)
622 self.d.setVar("FILESPATH", self.localsrcdir) 775 self.d.setVar("FILESPATH", self.localsrcdir)
623 776
624 def fetchUnpack(self, uris): 777 def fetchUnpack(self, uris):
@@ -632,10 +785,19 @@ class FetcherLocalTest(FetcherTest):
632 flst.sort() 785 flst.sort()
633 return flst 786 return flst
634 787
788 def test_local_checksum_fails_no_file(self):
789 self.d.setVar("SRC_URI", "file://404")
790 with self.assertRaises(bb.BBHandledException):
791 bb.fetch.get_checksum_file_list(self.d)
792
635 def test_local(self): 793 def test_local(self):
636 tree = self.fetchUnpack(['file://a', 'file://dir/c']) 794 tree = self.fetchUnpack(['file://a', 'file://dir/c'])
637 self.assertEqual(tree, ['a', 'dir/c']) 795 self.assertEqual(tree, ['a', 'dir/c'])
638 796
797 def test_local_at(self):
798 tree = self.fetchUnpack(['file://c@d'])
799 self.assertEqual(tree, ['c@d'])
800
639 def test_local_backslash(self): 801 def test_local_backslash(self):
640 tree = self.fetchUnpack([r'file://backslash\x2dsystemd-unit.device']) 802 tree = self.fetchUnpack([r'file://backslash\x2dsystemd-unit.device'])
641 self.assertEqual(tree, [r'backslash\x2dsystemd-unit.device']) 803 self.assertEqual(tree, [r'backslash\x2dsystemd-unit.device'])
@@ -673,57 +835,58 @@ class FetcherLocalTest(FetcherTest):
673 with self.assertRaises(bb.fetch2.UnpackError): 835 with self.assertRaises(bb.fetch2.UnpackError):
674 self.fetchUnpack(['file://a;subdir=/bin/sh']) 836 self.fetchUnpack(['file://a;subdir=/bin/sh'])
675 837
676 def test_local_gitfetch_usehead(self): 838 def test_local_striplevel(self):
839 tree = self.fetchUnpack(['file://archive.tar;subdir=bar;striplevel=1'])
840 self.assertEqual(tree, ['bar/c', 'bar/d', 'bar/subdir/e'])
841
842 def test_local_striplevel_gzip(self):
843 tree = self.fetchUnpack(['file://archive.tar.gz;subdir=bar;striplevel=1'])
844 self.assertEqual(tree, ['bar/c', 'bar/d', 'bar/subdir/e'])
845
846 def test_local_striplevel_bzip2(self):
847 tree = self.fetchUnpack(['file://archive.tar.bz2;subdir=bar;striplevel=1'])
848 self.assertEqual(tree, ['bar/c', 'bar/d', 'bar/subdir/e'])
849
850 def dummyGitTest(self, suffix):
677 # Create dummy local Git repo 851 # Create dummy local Git repo
678 src_dir = tempfile.mkdtemp(dir=self.tempdir, 852 src_dir = tempfile.mkdtemp(dir=self.tempdir,
679 prefix='gitfetch_localusehead_') 853 prefix='gitfetch_localusehead_')
680 src_dir = os.path.abspath(src_dir) 854 self.gitdir = os.path.abspath(src_dir)
681 bb.process.run("git init", cwd=src_dir) 855 self.git_init()
682 bb.process.run("git commit --allow-empty -m'Dummy commit'", 856 self.git(['commit', '--allow-empty', '-m', 'Dummy commit'])
683 cwd=src_dir)
684 # Use other branch than master 857 # Use other branch than master
685 bb.process.run("git checkout -b my-devel", cwd=src_dir) 858 self.git(['checkout', '-b', 'my-devel'])
686 bb.process.run("git commit --allow-empty -m'Dummy commit 2'", 859 self.git(['commit', '--allow-empty', '-m', 'Dummy commit 2'])
687 cwd=src_dir) 860 orig_rev = self.git(['rev-parse', 'HEAD']).strip()
688 stdout = bb.process.run("git rev-parse HEAD", cwd=src_dir)
689 orig_rev = stdout[0].strip()
690 861
691 # Fetch and check revision 862 # Fetch and check revision
692 self.d.setVar("SRCREV", "AUTOINC") 863 self.d.setVar("SRCREV", "AUTOINC")
693 url = "git://" + src_dir + ";protocol=file;usehead=1" 864 self.d.setVar("__BBSRCREV_SEEN", "1")
865 url = "git://" + self.gitdir + ";branch=master;protocol=file;" + suffix
694 fetcher = bb.fetch.Fetch([url], self.d) 866 fetcher = bb.fetch.Fetch([url], self.d)
695 fetcher.download() 867 fetcher.download()
696 fetcher.unpack(self.unpackdir) 868 fetcher.unpack(self.unpackdir)
697 stdout = bb.process.run("git rev-parse HEAD", 869 unpack_rev = self.git(['rev-parse', 'HEAD'],
698 cwd=os.path.join(self.unpackdir, 'git')) 870 cwd=os.path.join(self.unpackdir, 'git')).strip()
699 unpack_rev = stdout[0].strip()
700 self.assertEqual(orig_rev, unpack_rev) 871 self.assertEqual(orig_rev, unpack_rev)
701 872
873 def test_local_gitfetch_usehead(self):
874 self.dummyGitTest("usehead=1")
875
702 def test_local_gitfetch_usehead_withname(self): 876 def test_local_gitfetch_usehead_withname(self):
703 # Create dummy local Git repo 877 self.dummyGitTest("usehead=1;name=newName")
704 src_dir = tempfile.mkdtemp(dir=self.tempdir,
705 prefix='gitfetch_localusehead_')
706 src_dir = os.path.abspath(src_dir)
707 bb.process.run("git init", cwd=src_dir)
708 bb.process.run("git commit --allow-empty -m'Dummy commit'",
709 cwd=src_dir)
710 # Use other branch than master
711 bb.process.run("git checkout -b my-devel", cwd=src_dir)
712 bb.process.run("git commit --allow-empty -m'Dummy commit 2'",
713 cwd=src_dir)
714 stdout = bb.process.run("git rev-parse HEAD", cwd=src_dir)
715 orig_rev = stdout[0].strip()
716 878
717 # Fetch and check revision 879 def test_local_gitfetch_shared(self):
718 self.d.setVar("SRCREV", "AUTOINC") 880 self.dummyGitTest("usehead=1;name=sharedName")
719 url = "git://" + src_dir + ";protocol=file;usehead=1;name=newName" 881 alt = os.path.join(self.unpackdir, 'git/.git/objects/info/alternates')
720 fetcher = bb.fetch.Fetch([url], self.d) 882 self.assertTrue(os.path.exists(alt))
721 fetcher.download() 883
722 fetcher.unpack(self.unpackdir) 884 def test_local_gitfetch_noshared(self):
723 stdout = bb.process.run("git rev-parse HEAD", 885 self.d.setVar('BB_GIT_NOSHARED', '1')
724 cwd=os.path.join(self.unpackdir, 'git')) 886 self.unpackdir += '_noshared'
725 unpack_rev = stdout[0].strip() 887 self.dummyGitTest("usehead=1;name=noSharedName")
726 self.assertEqual(orig_rev, unpack_rev) 888 alt = os.path.join(self.unpackdir, 'git/.git/objects/info/alternates')
889 self.assertFalse(os.path.exists(alt))
727 890
728class FetcherNoNetworkTest(FetcherTest): 891class FetcherNoNetworkTest(FetcherTest):
729 def setUp(self): 892 def setUp(self):
@@ -831,12 +994,12 @@ class FetcherNoNetworkTest(FetcherTest):
831class FetcherNetworkTest(FetcherTest): 994class FetcherNetworkTest(FetcherTest):
832 @skipIfNoNetwork() 995 @skipIfNoNetwork()
833 def test_fetch(self): 996 def test_fetch(self):
834 fetcher = bb.fetch.Fetch(["http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", "http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.1.tar.gz"], self.d) 997 fetcher = bb.fetch.Fetch(["https://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", "https://downloads.yoctoproject.org/releases/bitbake/bitbake-1.1.tar.gz"], self.d)
835 fetcher.download() 998 fetcher.download()
836 self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749) 999 self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749)
837 self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.1.tar.gz"), 57892) 1000 self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.1.tar.gz"), 57892)
838 self.d.setVar("BB_NO_NETWORK", "1") 1001 self.d.setVar("BB_NO_NETWORK", "1")
839 fetcher = bb.fetch.Fetch(["http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", "http://downloads.yoctoproject.org/releases/bitbake/bitbake-1.1.tar.gz"], self.d) 1002 fetcher = bb.fetch.Fetch(["https://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz", "https://downloads.yoctoproject.org/releases/bitbake/bitbake-1.1.tar.gz"], self.d)
840 fetcher.download() 1003 fetcher.download()
841 fetcher.unpack(self.unpackdir) 1004 fetcher.unpack(self.unpackdir)
842 self.assertEqual(len(os.listdir(self.unpackdir + "/bitbake-1.0/")), 9) 1005 self.assertEqual(len(os.listdir(self.unpackdir + "/bitbake-1.0/")), 9)
@@ -844,21 +1007,22 @@ class FetcherNetworkTest(FetcherTest):
844 1007
845 @skipIfNoNetwork() 1008 @skipIfNoNetwork()
846 def test_fetch_mirror(self): 1009 def test_fetch_mirror(self):
847 self.d.setVar("MIRRORS", "http://.*/.* http://downloads.yoctoproject.org/releases/bitbake") 1010 self.d.setVar("MIRRORS", "http://.*/.* https://downloads.yoctoproject.org/releases/bitbake")
848 fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz"], self.d) 1011 fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz"], self.d)
849 fetcher.download() 1012 fetcher.download()
850 self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749) 1013 self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749)
851 1014
852 @skipIfNoNetwork() 1015 @skipIfNoNetwork()
853 def test_fetch_mirror_of_mirror(self): 1016 def test_fetch_mirror_of_mirror(self):
854 self.d.setVar("MIRRORS", "http://.*/.* http://invalid2.yoctoproject.org/ \n http://invalid2.yoctoproject.org/.* http://downloads.yoctoproject.org/releases/bitbake") 1017 self.d.setVar("MIRRORS", "http://.*/.* http://invalid2.yoctoproject.org/ http://invalid2.yoctoproject.org/.* https://downloads.yoctoproject.org/releases/bitbake")
855 fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz"], self.d) 1018 fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz"], self.d)
856 fetcher.download() 1019 fetcher.download()
857 self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749) 1020 self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749)
858 1021
859 @skipIfNoNetwork() 1022 @skipIfNoNetwork()
860 def test_fetch_file_mirror_of_mirror(self): 1023 def test_fetch_file_mirror_of_mirror(self):
861 self.d.setVar("MIRRORS", "http://.*/.* file:///some1where/ \n file:///some1where/.* file://some2where/ \n file://some2where/.* http://downloads.yoctoproject.org/releases/bitbake") 1024 self.d.setVar("FILESPATH", ".")
1025 self.d.setVar("MIRRORS", "http://.*/.* file:///some1where/ file:///some1where/.* file://some2where/ file://some2where/.* https://downloads.yoctoproject.org/releases/bitbake")
862 fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz"], self.d) 1026 fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz"], self.d)
863 os.mkdir(self.dldir + "/some2where") 1027 os.mkdir(self.dldir + "/some2where")
864 fetcher.download() 1028 fetcher.download()
@@ -866,16 +1030,46 @@ class FetcherNetworkTest(FetcherTest):
866 1030
867 @skipIfNoNetwork() 1031 @skipIfNoNetwork()
868 def test_fetch_premirror(self): 1032 def test_fetch_premirror(self):
869 self.d.setVar("PREMIRRORS", "http://.*/.* http://downloads.yoctoproject.org/releases/bitbake") 1033 self.d.setVar("PREMIRRORS", "http://.*/.* https://downloads.yoctoproject.org/releases/bitbake")
870 fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz"], self.d) 1034 fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz"], self.d)
871 fetcher.download() 1035 fetcher.download()
872 self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749) 1036 self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749)
873 1037
874 @skipIfNoNetwork() 1038 @skipIfNoNetwork()
1039 def test_fetch_specify_downloadfilename(self):
1040 fetcher = bb.fetch.Fetch(["https://downloads.yoctoproject.org/releases/bitbake/bitbake-1.0.tar.gz;downloadfilename=bitbake-v1.0.0.tar.gz"], self.d)
1041 fetcher.download()
1042 self.assertEqual(os.path.getsize(self.dldir + "/bitbake-v1.0.0.tar.gz"), 57749)
1043
1044 @skipIfNoNetwork()
1045 def test_fetch_premirror_specify_downloadfilename_regex_uri(self):
1046 self.d.setVar("PREMIRRORS", "http://.*/.* https://downloads.yoctoproject.org/releases/bitbake/")
1047 fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/1.0.tar.gz;downloadfilename=bitbake-1.0.tar.gz"], self.d)
1048 fetcher.download()
1049 self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749)
1050
1051 @skipIfNoNetwork()
1052 # BZ13039
1053 def test_fetch_premirror_specify_downloadfilename_specific_uri(self):
1054 self.d.setVar("PREMIRRORS", "http://invalid.yoctoproject.org/releases/bitbake https://downloads.yoctoproject.org/releases/bitbake")
1055 fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/1.0.tar.gz;downloadfilename=bitbake-1.0.tar.gz"], self.d)
1056 fetcher.download()
1057 self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749)
1058
1059 @skipIfNoNetwork()
1060 def test_fetch_premirror_use_downloadfilename_to_fetch(self):
1061 # Ensure downloadfilename is used when fetching from premirror.
1062 self.d.setVar("PREMIRRORS", "http://.*/.* https://downloads.yoctoproject.org/releases/bitbake")
1063 fetcher = bb.fetch.Fetch(["http://invalid.yoctoproject.org/releases/bitbake/bitbake-1.1.tar.gz;downloadfilename=bitbake-1.0.tar.gz"], self.d)
1064 fetcher.download()
1065 self.assertEqual(os.path.getsize(self.dldir + "/bitbake-1.0.tar.gz"), 57749)
1066
1067 @skipIfNoNetwork()
875 def gitfetcher(self, url1, url2): 1068 def gitfetcher(self, url1, url2):
876 def checkrevision(self, fetcher): 1069 def checkrevision(self, fetcher):
877 fetcher.unpack(self.unpackdir) 1070 fetcher.unpack(self.unpackdir)
878 revision = bb.process.run("git rev-parse HEAD", shell=True, cwd=self.unpackdir + "/git")[0].strip() 1071 revision = self.git(['rev-parse', 'HEAD'],
1072 cwd=os.path.join(self.unpackdir, 'git')).strip()
879 self.assertEqual(revision, "270a05b0b4ba0959fe0624d2a4885d7b70426da5") 1073 self.assertEqual(revision, "270a05b0b4ba0959fe0624d2a4885d7b70426da5")
880 1074
881 self.d.setVar("BB_GENERATE_MIRROR_TARBALLS", "1") 1075 self.d.setVar("BB_GENERATE_MIRROR_TARBALLS", "1")
@@ -893,25 +1087,19 @@ class FetcherNetworkTest(FetcherTest):
893 1087
894 @skipIfNoNetwork() 1088 @skipIfNoNetwork()
895 def test_gitfetch(self): 1089 def test_gitfetch(self):
896 url1 = url2 = "git://git.openembedded.org/bitbake" 1090 url1 = url2 = "git://git.openembedded.org/bitbake;branch=master;protocol=https"
897 self.gitfetcher(url1, url2) 1091 self.gitfetcher(url1, url2)
898 1092
899 @skipIfNoNetwork() 1093 @skipIfNoNetwork()
900 def test_gitfetch_goodsrcrev(self): 1094 def test_gitfetch_goodsrcrev(self):
901 # SRCREV is set but matches rev= parameter 1095 # SRCREV is set but matches rev= parameter
902 url1 = url2 = "git://git.openembedded.org/bitbake;rev=270a05b0b4ba0959fe0624d2a4885d7b70426da5" 1096 url1 = url2 = "git://git.openembedded.org/bitbake;rev=270a05b0b4ba0959fe0624d2a4885d7b70426da5;branch=master;protocol=https"
903 self.gitfetcher(url1, url2) 1097 self.gitfetcher(url1, url2)
904 1098
905 @skipIfNoNetwork() 1099 @skipIfNoNetwork()
906 def test_gitfetch_badsrcrev(self): 1100 def test_gitfetch_badsrcrev(self):
907 # SRCREV is set but does not match rev= parameter 1101 # SRCREV is set but does not match rev= parameter
908 url1 = url2 = "git://git.openembedded.org/bitbake;rev=dead05b0b4ba0959fe0624d2a4885d7b70426da5" 1102 url1 = url2 = "git://git.openembedded.org/bitbake;rev=dead05b0b4ba0959fe0624d2a4885d7b70426da5;branch=master;protocol=https"
909 self.assertRaises(bb.fetch.FetchError, self.gitfetcher, url1, url2)
910
911 @skipIfNoNetwork()
912 def test_gitfetch_tagandrev(self):
913 # SRCREV is set but does not match rev= parameter
914 url1 = url2 = "git://git.openembedded.org/bitbake;rev=270a05b0b4ba0959fe0624d2a4885d7b70426da5;tag=270a05b0b4ba0959fe0624d2a4885d7b70426da5"
915 self.assertRaises(bb.fetch.FetchError, self.gitfetcher, url1, url2) 1103 self.assertRaises(bb.fetch.FetchError, self.gitfetcher, url1, url2)
916 1104
917 @skipIfNoNetwork() 1105 @skipIfNoNetwork()
@@ -920,7 +1108,7 @@ class FetcherNetworkTest(FetcherTest):
920 # `usehead=1' and instead fetch the specified SRCREV. See 1108 # `usehead=1' and instead fetch the specified SRCREV. See
921 # test_local_gitfetch_usehead() for a positive use of the usehead 1109 # test_local_gitfetch_usehead() for a positive use of the usehead
922 # feature. 1110 # feature.
923 url = "git://git.openembedded.org/bitbake;usehead=1" 1111 url = "git://git.openembedded.org/bitbake;usehead=1;branch=master;protocol=https"
924 self.assertRaises(bb.fetch.ParameterError, self.gitfetcher, url, url) 1112 self.assertRaises(bb.fetch.ParameterError, self.gitfetcher, url, url)
925 1113
926 @skipIfNoNetwork() 1114 @skipIfNoNetwork()
@@ -929,38 +1117,38 @@ class FetcherNetworkTest(FetcherTest):
929 # `usehead=1' and instead fetch the specified SRCREV. See 1117 # `usehead=1' and instead fetch the specified SRCREV. See
930 # test_local_gitfetch_usehead() for a positive use of the usehead 1118 # test_local_gitfetch_usehead() for a positive use of the usehead
931 # feature. 1119 # feature.
932 url = "git://git.openembedded.org/bitbake;usehead=1;name=newName" 1120 url = "git://git.openembedded.org/bitbake;usehead=1;name=newName;branch=master;protocol=https"
933 self.assertRaises(bb.fetch.ParameterError, self.gitfetcher, url, url) 1121 self.assertRaises(bb.fetch.ParameterError, self.gitfetcher, url, url)
934 1122
935 @skipIfNoNetwork() 1123 @skipIfNoNetwork()
936 def test_gitfetch_finds_local_tarball_for_mirrored_url_when_previous_downloaded_by_the_recipe_url(self): 1124 def test_gitfetch_finds_local_tarball_for_mirrored_url_when_previous_downloaded_by_the_recipe_url(self):
937 recipeurl = "git://git.openembedded.org/bitbake" 1125 recipeurl = "git://git.openembedded.org/bitbake;branch=master;protocol=https"
938 mirrorurl = "git://someserver.org/bitbake" 1126 mirrorurl = "git://someserver.org/bitbake;branch=master;protocol=https"
939 self.d.setVar("PREMIRRORS", "git://someserver.org/bitbake git://git.openembedded.org/bitbake \n") 1127 self.d.setVar("PREMIRRORS", "git://someserver.org/bitbake git://git.openembedded.org/bitbake")
940 self.gitfetcher(recipeurl, mirrorurl) 1128 self.gitfetcher(recipeurl, mirrorurl)
941 1129
942 @skipIfNoNetwork() 1130 @skipIfNoNetwork()
943 def test_gitfetch_finds_local_tarball_when_previous_downloaded_from_a_premirror(self): 1131 def test_gitfetch_finds_local_tarball_when_previous_downloaded_from_a_premirror(self):
944 recipeurl = "git://someserver.org/bitbake" 1132 recipeurl = "git://someserver.org/bitbake;branch=master;protocol=https"
945 self.d.setVar("PREMIRRORS", "git://someserver.org/bitbake git://git.openembedded.org/bitbake \n") 1133 self.d.setVar("PREMIRRORS", "git://someserver.org/bitbake git://git.openembedded.org/bitbake")
946 self.gitfetcher(recipeurl, recipeurl) 1134 self.gitfetcher(recipeurl, recipeurl)
947 1135
948 @skipIfNoNetwork() 1136 @skipIfNoNetwork()
949 def test_gitfetch_finds_local_repository_when_premirror_rewrites_the_recipe_url(self): 1137 def test_gitfetch_finds_local_repository_when_premirror_rewrites_the_recipe_url(self):
950 realurl = "git://git.openembedded.org/bitbake" 1138 realurl = "https://git.openembedded.org/bitbake"
951 recipeurl = "git://someserver.org/bitbake" 1139 recipeurl = "git://someserver.org/bitbake;protocol=https;branch=master"
952 self.sourcedir = self.unpackdir.replace("unpacked", "sourcemirror.git") 1140 self.sourcedir = self.unpackdir.replace("unpacked", "sourcemirror.git")
953 os.chdir(self.tempdir) 1141 os.chdir(self.tempdir)
954 bb.process.run("git clone %s %s 2> /dev/null" % (realurl, self.sourcedir), shell=True) 1142 self.git(['clone', realurl, self.sourcedir], cwd=self.tempdir)
955 self.d.setVar("PREMIRRORS", "%s git://%s;protocol=file \n" % (recipeurl, self.sourcedir)) 1143 self.d.setVar("PREMIRRORS", "%s git://%s;protocol=file" % (recipeurl, self.sourcedir))
956 self.gitfetcher(recipeurl, recipeurl) 1144 self.gitfetcher(recipeurl, recipeurl)
957 1145
958 @skipIfNoNetwork() 1146 @skipIfNoNetwork()
959 def test_git_submodule(self): 1147 def test_git_submodule(self):
960 # URL with ssh submodules 1148 # URL with ssh submodules
961 url = "gitsm://git.yoctoproject.org/git-submodule-test;branch=ssh-gitsm-tests;rev=049da4a6cb198d7c0302e9e8b243a1443cb809a7" 1149 url = "gitsm://git.yoctoproject.org/git-submodule-test;branch=ssh-gitsm-tests;rev=049da4a6cb198d7c0302e9e8b243a1443cb809a7;branch=master;protocol=https"
962 # Original URL (comment this if you have ssh access to git.yoctoproject.org) 1150 # Original URL (comment this if you have ssh access to git.yoctoproject.org)
963 url = "gitsm://git.yoctoproject.org/git-submodule-test;branch=master;rev=a2885dd7d25380d23627e7544b7bbb55014b16ee" 1151 url = "gitsm://git.yoctoproject.org/git-submodule-test;branch=master;rev=a2885dd7d25380d23627e7544b7bbb55014b16ee;branch=master;protocol=https"
964 fetcher = bb.fetch.Fetch([url], self.d) 1152 fetcher = bb.fetch.Fetch([url], self.d)
965 fetcher.download() 1153 fetcher.download()
966 # Previous cwd has been deleted 1154 # Previous cwd has been deleted
@@ -977,10 +1165,29 @@ class FetcherNetworkTest(FetcherTest):
977 self.assertTrue(os.path.exists(os.path.join(repo_path, 'bitbake-gitsm-test1', 'bitbake')), msg='submodule of submodule missing') 1165 self.assertTrue(os.path.exists(os.path.join(repo_path, 'bitbake-gitsm-test1', 'bitbake')), msg='submodule of submodule missing')
978 1166
979 @skipIfNoNetwork() 1167 @skipIfNoNetwork()
1168 def test_git_submodule_restricted_network_premirrors(self):
1169 # this test is to ensure that premirrors will be tried in restricted network
1170 # that is, BB_ALLOWED_NETWORKS does not contain the domain the url uses
1171 url = "gitsm://github.com/grpc/grpc.git;protocol=https;name=grpc;branch=v1.60.x;rev=0ef13a7555dbaadd4633399242524129eef5e231"
1172 # create a download directory to be used as premirror later
1173 tempdir = tempfile.mkdtemp(prefix="bitbake-fetch-")
1174 dl_premirror = os.path.join(tempdir, "download-premirror")
1175 os.mkdir(dl_premirror)
1176 self.d.setVar("DL_DIR", dl_premirror)
1177 fetcher = bb.fetch.Fetch([url], self.d)
1178 fetcher.download()
1179 # now use the premirror in restricted network
1180 self.d.setVar("DL_DIR", self.dldir)
1181 self.d.setVar("PREMIRRORS", "gitsm://.*/.* gitsm://%s/git2/MIRRORNAME;protocol=file" % dl_premirror)
1182 self.d.setVar("BB_ALLOWED_NETWORKS", "*.some.domain")
1183 fetcher = bb.fetch.Fetch([url], self.d)
1184 fetcher.download()
1185
1186 @skipIfNoNetwork()
980 def test_git_submodule_dbus_broker(self): 1187 def test_git_submodule_dbus_broker(self):
981 # The following external repositories have show failures in fetch and unpack operations 1188 # The following external repositories have show failures in fetch and unpack operations
982 # We want to avoid regressions! 1189 # We want to avoid regressions!
983 url = "gitsm://github.com/bus1/dbus-broker;protocol=git;rev=fc874afa0992d0c75ec25acb43d344679f0ee7d2;branch=main" 1190 url = "gitsm://github.com/bus1/dbus-broker;protocol=https;rev=fc874afa0992d0c75ec25acb43d344679f0ee7d2;branch=main"
984 fetcher = bb.fetch.Fetch([url], self.d) 1191 fetcher = bb.fetch.Fetch([url], self.d)
985 fetcher.download() 1192 fetcher.download()
986 # Previous cwd has been deleted 1193 # Previous cwd has been deleted
@@ -996,7 +1203,7 @@ class FetcherNetworkTest(FetcherTest):
996 1203
997 @skipIfNoNetwork() 1204 @skipIfNoNetwork()
998 def test_git_submodule_CLI11(self): 1205 def test_git_submodule_CLI11(self):
999 url = "gitsm://github.com/CLIUtils/CLI11;protocol=git;rev=bd4dc911847d0cde7a6b41dfa626a85aab213baf" 1206 url = "gitsm://github.com/CLIUtils/CLI11;protocol=https;rev=bd4dc911847d0cde7a6b41dfa626a85aab213baf;branch=main"
1000 fetcher = bb.fetch.Fetch([url], self.d) 1207 fetcher = bb.fetch.Fetch([url], self.d)
1001 fetcher.download() 1208 fetcher.download()
1002 # Previous cwd has been deleted 1209 # Previous cwd has been deleted
@@ -1011,12 +1218,12 @@ class FetcherNetworkTest(FetcherTest):
1011 @skipIfNoNetwork() 1218 @skipIfNoNetwork()
1012 def test_git_submodule_update_CLI11(self): 1219 def test_git_submodule_update_CLI11(self):
1013 """ Prevent regression on update detection not finding missing submodule, or modules without needed commits """ 1220 """ Prevent regression on update detection not finding missing submodule, or modules without needed commits """
1014 url = "gitsm://github.com/CLIUtils/CLI11;protocol=git;rev=cf6a99fa69aaefe477cc52e3ef4a7d2d7fa40714" 1221 url = "gitsm://github.com/CLIUtils/CLI11;protocol=https;rev=cf6a99fa69aaefe477cc52e3ef4a7d2d7fa40714;branch=main"
1015 fetcher = bb.fetch.Fetch([url], self.d) 1222 fetcher = bb.fetch.Fetch([url], self.d)
1016 fetcher.download() 1223 fetcher.download()
1017 1224
1018 # CLI11 that pulls in a newer nlohmann-json 1225 # CLI11 that pulls in a newer nlohmann-json
1019 url = "gitsm://github.com/CLIUtils/CLI11;protocol=git;rev=49ac989a9527ee9bb496de9ded7b4872c2e0e5ca" 1226 url = "gitsm://github.com/CLIUtils/CLI11;protocol=https;rev=49ac989a9527ee9bb496de9ded7b4872c2e0e5ca;branch=main"
1020 fetcher = bb.fetch.Fetch([url], self.d) 1227 fetcher = bb.fetch.Fetch([url], self.d)
1021 fetcher.download() 1228 fetcher.download()
1022 # Previous cwd has been deleted 1229 # Previous cwd has been deleted
@@ -1030,7 +1237,7 @@ class FetcherNetworkTest(FetcherTest):
1030 1237
1031 @skipIfNoNetwork() 1238 @skipIfNoNetwork()
1032 def test_git_submodule_aktualizr(self): 1239 def test_git_submodule_aktualizr(self):
1033 url = "gitsm://github.com/advancedtelematic/aktualizr;branch=master;protocol=git;rev=d00d1a04cc2366d1a5f143b84b9f507f8bd32c44" 1240 url = "gitsm://github.com/advancedtelematic/aktualizr;branch=master;protocol=https;rev=d00d1a04cc2366d1a5f143b84b9f507f8bd32c44"
1034 fetcher = bb.fetch.Fetch([url], self.d) 1241 fetcher = bb.fetch.Fetch([url], self.d)
1035 fetcher.download() 1242 fetcher.download()
1036 # Previous cwd has been deleted 1243 # Previous cwd has been deleted
@@ -1050,7 +1257,7 @@ class FetcherNetworkTest(FetcherTest):
1050 """ Prevent regression on deeply nested submodules not being checked out properly, even though they were fetched. """ 1257 """ Prevent regression on deeply nested submodules not being checked out properly, even though they were fetched. """
1051 1258
1052 # This repository also has submodules where the module (name), path and url do not align 1259 # This repository also has submodules where the module (name), path and url do not align
1053 url = "gitsm://github.com/azure/iotedge.git;protocol=git;rev=d76e0316c6f324345d77c48a83ce836d09392699" 1260 url = "gitsm://github.com/azure/iotedge.git;protocol=https;rev=d76e0316c6f324345d77c48a83ce836d09392699;branch=main"
1054 fetcher = bb.fetch.Fetch([url], self.d) 1261 fetcher = bb.fetch.Fetch([url], self.d)
1055 fetcher.download() 1262 fetcher.download()
1056 # Previous cwd has been deleted 1263 # Previous cwd has been deleted
@@ -1073,9 +1280,17 @@ class FetcherNetworkTest(FetcherTest):
1073 self.assertTrue(os.path.exists(os.path.join(repo_path, 'edgelet/hsm-sys/azure-iot-hsm-c/deps/utpm/deps/c-utility/testtools/umock-c/deps/ctest/README.md')), msg='Missing submodule checkout') 1280 self.assertTrue(os.path.exists(os.path.join(repo_path, 'edgelet/hsm-sys/azure-iot-hsm-c/deps/utpm/deps/c-utility/testtools/umock-c/deps/ctest/README.md')), msg='Missing submodule checkout')
1074 self.assertTrue(os.path.exists(os.path.join(repo_path, 'edgelet/hsm-sys/azure-iot-hsm-c/deps/utpm/deps/c-utility/testtools/umock-c/deps/testrunner/readme.md')), msg='Missing submodule checkout') 1281 self.assertTrue(os.path.exists(os.path.join(repo_path, 'edgelet/hsm-sys/azure-iot-hsm-c/deps/utpm/deps/c-utility/testtools/umock-c/deps/testrunner/readme.md')), msg='Missing submodule checkout')
1075 1282
1283 @skipIfNoNetwork()
1284 def test_git_submodule_reference_to_parent(self):
1285 self.recipe_url = "gitsm://github.com/gflags/gflags.git;protocol=https;branch=master"
1286 self.d.setVar("SRCREV", "14e1138441bbbb584160cb1c0a0426ec1bac35f1")
1287 with Timeout(60):
1288 fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
1289 with self.assertRaises(bb.fetch2.FetchError):
1290 fetcher.download()
1291
1076class SVNTest(FetcherTest): 1292class SVNTest(FetcherTest):
1077 def skipIfNoSvn(): 1293 def skipIfNoSvn():
1078 import shutil
1079 if not shutil.which("svn"): 1294 if not shutil.which("svn"):
1080 return unittest.skip("svn not installed, tests being skipped") 1295 return unittest.skip("svn not installed, tests being skipped")
1081 1296
@@ -1107,8 +1322,9 @@ class SVNTest(FetcherTest):
1107 cwd=repo_dir) 1322 cwd=repo_dir)
1108 1323
1109 bb.process.run("svn co %s svnfetch_co" % self.repo_url, cwd=self.tempdir) 1324 bb.process.run("svn co %s svnfetch_co" % self.repo_url, cwd=self.tempdir)
1110 # Github will emulate SVN. Use this to check if we're downloding... 1325 # Github won't emulate SVN anymore (see https://github.blog/2023-01-20-sunsetting-subversion-support/)
1111 bb.process.run("svn propset svn:externals 'bitbake svn://vcs.pcre.org/pcre2/code' .", 1326 # Use still accessible svn repo (only trunk to avoid longer downloads)
1327 bb.process.run("svn propset svn:externals 'bitbake https://svn.apache.org/repos/asf/serf/trunk' .",
1112 cwd=os.path.join(self.tempdir, 'svnfetch_co', 'trunk')) 1328 cwd=os.path.join(self.tempdir, 'svnfetch_co', 'trunk'))
1113 bb.process.run("svn commit --non-interactive -m 'Add external'", 1329 bb.process.run("svn commit --non-interactive -m 'Add external'",
1114 cwd=os.path.join(self.tempdir, 'svnfetch_co', 'trunk')) 1330 cwd=os.path.join(self.tempdir, 'svnfetch_co', 'trunk'))
@@ -1136,8 +1352,8 @@ class SVNTest(FetcherTest):
1136 1352
1137 self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk')), msg="Missing trunk") 1353 self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk')), msg="Missing trunk")
1138 self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk', 'README.md')), msg="Missing contents") 1354 self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk', 'README.md')), msg="Missing contents")
1139 self.assertFalse(os.path.exists(os.path.join(self.unpackdir, 'trunk/bitbake/trunk')), msg="External dir should NOT exist") 1355 self.assertFalse(os.path.exists(os.path.join(self.unpackdir, 'trunk/bitbake/protocols')), msg="External dir should NOT exist")
1140 self.assertFalse(os.path.exists(os.path.join(self.unpackdir, 'trunk/bitbake/trunk', 'README')), msg="External README should NOT exit") 1356 self.assertFalse(os.path.exists(os.path.join(self.unpackdir, 'trunk/bitbake/protocols', 'fcgi_buckets.h')), msg="External fcgi_buckets.h should NOT exit")
1141 1357
1142 @skipIfNoSvn() 1358 @skipIfNoSvn()
1143 def test_external_svn(self): 1359 def test_external_svn(self):
@@ -1150,66 +1366,71 @@ class SVNTest(FetcherTest):
1150 1366
1151 self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk')), msg="Missing trunk") 1367 self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk')), msg="Missing trunk")
1152 self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk', 'README.md')), msg="Missing contents") 1368 self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk', 'README.md')), msg="Missing contents")
1153 self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk/bitbake/trunk')), msg="External dir should exist") 1369 self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk/bitbake/protocols')), msg="External dir should exist")
1154 self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk/bitbake/trunk', 'README')), msg="External README should exit") 1370 self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'trunk/bitbake/protocols', 'fcgi_buckets.h')), msg="External fcgi_buckets.h should exit")
1155 1371
1156class TrustedNetworksTest(FetcherTest): 1372class TrustedNetworksTest(FetcherTest):
1157 def test_trusted_network(self): 1373 def test_trusted_network(self):
1158 # Ensure trusted_network returns False when the host IS in the list. 1374 # Ensure trusted_network returns False when the host IS in the list.
1159 url = "git://Someserver.org/foo;rev=1" 1375 url = "git://Someserver.org/foo;rev=1;branch=master"
1160 self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org someserver.org server2.org server3.org") 1376 self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org someserver.org server2.org server3.org")
1161 self.assertTrue(bb.fetch.trusted_network(self.d, url)) 1377 self.assertTrue(bb.fetch.trusted_network(self.d, url))
1162 1378
1163 def test_wild_trusted_network(self): 1379 def test_wild_trusted_network(self):
1164 # Ensure trusted_network returns true when the *.host IS in the list. 1380 # Ensure trusted_network returns true when the *.host IS in the list.
1165 url = "git://Someserver.org/foo;rev=1" 1381 url = "git://Someserver.org/foo;rev=1;branch=master"
1166 self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org *.someserver.org server2.org server3.org") 1382 self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org *.someserver.org server2.org server3.org")
1167 self.assertTrue(bb.fetch.trusted_network(self.d, url)) 1383 self.assertTrue(bb.fetch.trusted_network(self.d, url))
1168 1384
1169 def test_prefix_wild_trusted_network(self): 1385 def test_prefix_wild_trusted_network(self):
1170 # Ensure trusted_network returns true when the prefix matches *.host. 1386 # Ensure trusted_network returns true when the prefix matches *.host.
1171 url = "git://git.Someserver.org/foo;rev=1" 1387 url = "git://git.Someserver.org/foo;rev=1;branch=master"
1172 self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org *.someserver.org server2.org server3.org") 1388 self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org *.someserver.org server2.org server3.org")
1173 self.assertTrue(bb.fetch.trusted_network(self.d, url)) 1389 self.assertTrue(bb.fetch.trusted_network(self.d, url))
1174 1390
1175 def test_two_prefix_wild_trusted_network(self): 1391 def test_two_prefix_wild_trusted_network(self):
1176 # Ensure trusted_network returns true when the prefix matches *.host. 1392 # Ensure trusted_network returns true when the prefix matches *.host.
1177 url = "git://something.git.Someserver.org/foo;rev=1" 1393 url = "git://something.git.Someserver.org/foo;rev=1;branch=master"
1178 self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org *.someserver.org server2.org server3.org") 1394 self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org *.someserver.org server2.org server3.org")
1179 self.assertTrue(bb.fetch.trusted_network(self.d, url)) 1395 self.assertTrue(bb.fetch.trusted_network(self.d, url))
1180 1396
1181 def test_port_trusted_network(self): 1397 def test_port_trusted_network(self):
1182 # Ensure trusted_network returns True, even if the url specifies a port. 1398 # Ensure trusted_network returns True, even if the url specifies a port.
1183 url = "git://someserver.org:8080/foo;rev=1" 1399 url = "git://someserver.org:8080/foo;rev=1;branch=master"
1184 self.d.setVar("BB_ALLOWED_NETWORKS", "someserver.org") 1400 self.d.setVar("BB_ALLOWED_NETWORKS", "someserver.org")
1185 self.assertTrue(bb.fetch.trusted_network(self.d, url)) 1401 self.assertTrue(bb.fetch.trusted_network(self.d, url))
1186 1402
1187 def test_untrusted_network(self): 1403 def test_untrusted_network(self):
1188 # Ensure trusted_network returns False when the host is NOT in the list. 1404 # Ensure trusted_network returns False when the host is NOT in the list.
1189 url = "git://someserver.org/foo;rev=1" 1405 url = "git://someserver.org/foo;rev=1;branch=master"
1190 self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org server2.org server3.org") 1406 self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org server2.org server3.org")
1191 self.assertFalse(bb.fetch.trusted_network(self.d, url)) 1407 self.assertFalse(bb.fetch.trusted_network(self.d, url))
1192 1408
1193 def test_wild_untrusted_network(self): 1409 def test_wild_untrusted_network(self):
1194 # Ensure trusted_network returns False when the host is NOT in the list. 1410 # Ensure trusted_network returns False when the host is NOT in the list.
1195 url = "git://*.someserver.org/foo;rev=1" 1411 url = "git://*.someserver.org/foo;rev=1;branch=master"
1196 self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org server2.org server3.org") 1412 self.d.setVar("BB_ALLOWED_NETWORKS", "server1.org server2.org server3.org")
1197 self.assertFalse(bb.fetch.trusted_network(self.d, url)) 1413 self.assertFalse(bb.fetch.trusted_network(self.d, url))
1198 1414
1199class URLHandle(unittest.TestCase): 1415class URLHandle(unittest.TestCase):
1200 1416 # Quote password as per RFC3986
1417 password = urllib.parse.quote(r"!#$%^&*()-_={}[]\|:?,.<>~`", r"!$&'/()*+,;=")
1201 datatable = { 1418 datatable = {
1202 "http://www.google.com/index.html" : ('http', 'www.google.com', '/index.html', '', '', {}), 1419 "http://www.google.com/index.html" : ('http', 'www.google.com', '/index.html', '', '', {}),
1203 "cvs://anoncvs@cvs.handhelds.org/cvs;module=familiar/dist/ipkg" : ('cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', '', {'module': 'familiar/dist/ipkg'}), 1420 "cvs://anoncvs@cvs.handhelds.org/cvs;module=familiar/dist/ipkg" : ('cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', '', {'module': 'familiar/dist/ipkg'}),
1204 "cvs://anoncvs:anonymous@cvs.handhelds.org/cvs;tag=V0-99-81;module=familiar/dist/ipkg" : ('cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', 'anonymous', collections.OrderedDict([('tag', 'V0-99-81'), ('module', 'familiar/dist/ipkg')])), 1421 "cvs://anoncvs:anonymous@cvs.handhelds.org/cvs;tag=V0-99-81;module=familiar/dist/ipkg" : ('cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', 'anonymous', collections.OrderedDict([('tag', 'V0-99-81'), ('module', 'familiar/dist/ipkg')])),
1205 "git://git.openembedded.org/bitbake;branch=@foo" : ('git', 'git.openembedded.org', '/bitbake', '', '', {'branch': '@foo'}), 1422 "git://git.openembedded.org/bitbake;branch=@foo;protocol=https" : ('git', 'git.openembedded.org', '/bitbake', '', '', {'branch': '@foo', 'protocol' : 'https'}),
1206 "file://somelocation;someparam=1": ('file', '', 'somelocation', '', '', {'someparam': '1'}), 1423 "file://somelocation;someparam=1": ('file', '', 'somelocation', '', '', {'someparam': '1'}),
1424 "file://example@.service": ('file', '', 'example@.service', '', '', {}),
1425 "https://somesite.com/somerepo.git;user=anyUser:idtoken=1234" : ('https', 'somesite.com', '/somerepo.git', '', '', {'user': 'anyUser:idtoken=1234'}),
1426 'git://s.o-me_ONE:%s@git.openembedded.org/bitbake;branch=main;protocol=https' % password: ('git', 'git.openembedded.org', '/bitbake', 's.o-me_ONE', password, {'branch': 'main', 'protocol' : 'https'}),
1207 } 1427 }
1208 # we require a pathname to encodeurl but users can still pass such urls to 1428 # we require a pathname to encodeurl but users can still pass such urls to
1209 # decodeurl and we need to handle them 1429 # decodeurl and we need to handle them
1210 decodedata = datatable.copy() 1430 decodedata = datatable.copy()
1211 decodedata.update({ 1431 decodedata.update({
1212 "http://somesite.net;someparam=1": ('http', 'somesite.net', '/', '', '', {'someparam': '1'}), 1432 "http://somesite.net;someparam=1": ('http', 'somesite.net', '/', '', '', {'someparam': '1'}),
1433 "npmsw://some.registry.url;package=@pkg;version=latest": ('npmsw', 'some.registry.url', '/', '', '', {'package': '@pkg', 'version': 'latest'}),
1213 }) 1434 })
1214 1435
1215 def test_decodeurl(self): 1436 def test_decodeurl(self):
@@ -1220,138 +1441,179 @@ class URLHandle(unittest.TestCase):
1220 def test_encodeurl(self): 1441 def test_encodeurl(self):
1221 for k, v in self.datatable.items(): 1442 for k, v in self.datatable.items():
1222 result = bb.fetch.encodeurl(v) 1443 result = bb.fetch.encodeurl(v)
1444 if result.startswith("file:"):
1445 result = urllib.parse.unquote(result)
1223 self.assertEqual(result, k) 1446 self.assertEqual(result, k)
1224 1447
1225class FetchLatestVersionTest(FetcherTest): 1448class FetchLatestVersionTest(FetcherTest):
1226 1449
1227 test_git_uris = { 1450 test_git_uris = {
1228 # version pattern "X.Y.Z" 1451 # version pattern "X.Y.Z"
1229 ("mx-1.0", "git://github.com/clutter-project/mx.git;branch=mx-1.4", "9b1db6b8060bd00b121a692f942404a24ae2960f", "") 1452 ("mx-1.0", "git://github.com/clutter-project/mx.git;branch=mx-1.4;protocol=https", "9b1db6b8060bd00b121a692f942404a24ae2960f", "", "")
1230 : "1.99.4", 1453 : "1.99.4",
1231 # version pattern "vX.Y" 1454 # version pattern "vX.Y"
1232 # mirror of git.infradead.org since network issues interfered with testing 1455 # mirror of git.infradead.org since network issues interfered with testing
1233 ("mtd-utils", "git://git.yoctoproject.org/mtd-utils.git", "ca39eb1d98e736109c64ff9c1aa2a6ecca222d8f", "") 1456 ("mtd-utils", "git://git.yoctoproject.org/mtd-utils.git;branch=master;protocol=https", "ca39eb1d98e736109c64ff9c1aa2a6ecca222d8f", "", "")
1234 : "1.5.0", 1457 : "1.5.0",
1235 # version pattern "pkg_name-X.Y" 1458 # version pattern "pkg_name-X.Y"
1236 # mirror of git://anongit.freedesktop.org/git/xorg/proto/presentproto since network issues interfered with testing 1459 # mirror of git://anongit.freedesktop.org/git/xorg/proto/presentproto since network issues interfered with testing
1237 ("presentproto", "git://git.yoctoproject.org/bbfetchtests-presentproto", "24f3a56e541b0a9e6c6ee76081f441221a120ef9", "") 1460 ("presentproto", "git://git.yoctoproject.org/bbfetchtests-presentproto;branch=master;protocol=https", "24f3a56e541b0a9e6c6ee76081f441221a120ef9", "", "")
1238 : "1.0", 1461 : "1.0",
1239 # version pattern "pkg_name-vX.Y.Z" 1462 # version pattern "pkg_name-vX.Y.Z"
1240 ("dtc", "git://git.qemu.org/dtc.git", "65cc4d2748a2c2e6f27f1cf39e07a5dbabd80ebf", "") 1463 ("dtc", "git://git.yoctoproject.org/bbfetchtests-dtc.git;branch=master;protocol=https", "65cc4d2748a2c2e6f27f1cf39e07a5dbabd80ebf", "", "")
1241 : "1.4.0", 1464 : "1.4.0",
1242 # combination version pattern 1465 # combination version pattern
1243 ("sysprof", "git://gitlab.gnome.org/GNOME/sysprof.git;protocol=https", "cd44ee6644c3641507fb53b8a2a69137f2971219", "") 1466 ("sysprof", "git://git.yoctoproject.org/sysprof.git;protocol=https;branch=master", "cd44ee6644c3641507fb53b8a2a69137f2971219", "", "")
1244 : "1.2.0", 1467 : "1.2.0",
1245 ("u-boot-mkimage", "git://git.denx.de/u-boot.git;branch=master;protocol=git", "62c175fbb8a0f9a926c88294ea9f7e88eb898f6c", "") 1468 ("u-boot-mkimage", "git://source.denx.de/u-boot/u-boot.git;branch=master;protocol=https", "62c175fbb8a0f9a926c88294ea9f7e88eb898f6c", "", "")
1246 : "2014.01", 1469 : "2014.01",
1247 # version pattern "yyyymmdd" 1470 # version pattern "yyyymmdd"
1248 ("mobile-broadband-provider-info", "git://gitlab.gnome.org/GNOME/mobile-broadband-provider-info.git;protocol=https", "4ed19e11c2975105b71b956440acdb25d46a347d", "") 1471 ("mobile-broadband-provider-info", "git://git.yoctoproject.org/mobile-broadband-provider-info.git;protocol=https;branch=master", "4ed19e11c2975105b71b956440acdb25d46a347d", "", "")
1249 : "20120614", 1472 : "20120614",
1250 # packages with a valid UPSTREAM_CHECK_GITTAGREGEX 1473 # packages with a valid UPSTREAM_CHECK_GITTAGREGEX
1251 # mirror of git://anongit.freedesktop.org/xorg/driver/xf86-video-omap since network issues interfered with testing 1474 # mirror of git://anongit.freedesktop.org/xorg/driver/xf86-video-omap since network issues interfered with testing
1252 ("xf86-video-omap", "git://git.yoctoproject.org/bbfetchtests-xf86-video-omap", "ae0394e687f1a77e966cf72f895da91840dffb8f", "(?P<pver>(\d+\.(\d\.?)*))") 1475 ("xf86-video-omap", "git://git.yoctoproject.org/bbfetchtests-xf86-video-omap;branch=master;protocol=https", "ae0394e687f1a77e966cf72f895da91840dffb8f", r"(?P<pver>(\d+\.(\d\.?)*))", "")
1253 : "0.4.3", 1476 : "0.4.3",
1254 ("build-appliance-image", "git://git.yoctoproject.org/poky", "b37dd451a52622d5b570183a81583cc34c2ff555", "(?P<pver>(([0-9][\.|_]?)+[0-9]))") 1477 ("build-appliance-image", "git://git.yoctoproject.org/poky;branch=master;protocol=https", "b37dd451a52622d5b570183a81583cc34c2ff555", r"(?P<pver>(([0-9][\.|_]?)+[0-9]))", "")
1255 : "11.0.0", 1478 : "11.0.0",
1256 ("chkconfig-alternatives-native", "git://github.com/kergoth/chkconfig;branch=sysroot", "cd437ecbd8986c894442f8fce1e0061e20f04dee", "chkconfig\-(?P<pver>((\d+[\.\-_]*)+))") 1479 ("chkconfig-alternatives-native", "git://github.com/kergoth/chkconfig;branch=sysroot;protocol=https", "cd437ecbd8986c894442f8fce1e0061e20f04dee", r"chkconfig\-(?P<pver>((\d+[\.\-_]*)+))", "")
1257 : "1.3.59", 1480 : "1.3.59",
1258 ("remake", "git://github.com/rocky/remake.git", "f05508e521987c8494c92d9c2871aec46307d51d", "(?P<pver>(\d+\.(\d+\.)*\d*(\+dbg\d+(\.\d+)*)*))") 1481 ("remake", "git://github.com/rocky/remake.git;protocol=https;branch=master", "f05508e521987c8494c92d9c2871aec46307d51d", r"(?P<pver>(\d+\.(\d+\.)*\d*(\+dbg\d+(\.\d+)*)*))", "")
1259 : "3.82+dbg0.9", 1482 : "3.82+dbg0.9",
1483 ("sysdig", "git://github.com/draios/sysdig.git;branch=dev;protocol=https", "4fb6288275f567f63515df0ff0a6518043ecfa9b", r"^(?P<pver>\d+(\.\d+)+)", "10.0.0")
1484 : "0.28.0",
1260 } 1485 }
1261 1486
1487 WgetTestData = collections.namedtuple("WgetTestData", ["pn", "path", "pv", "check_uri", "check_regex"], defaults=[None, None, None])
1262 test_wget_uris = { 1488 test_wget_uris = {
1263 # 1489 #
1264 # packages with versions inside directory name 1490 # packages with versions inside directory name
1265 # 1491 #
1266 # http://kernel.org/pub/linux/utils/util-linux/v2.23/util-linux-2.24.2.tar.bz2 1492 # http://kernel.org/pub/linux/utils/util-linux/v2.23/util-linux-2.24.2.tar.bz2
1267 ("util-linux", "/pub/linux/utils/util-linux/v2.23/util-linux-2.24.2.tar.bz2", "", "") 1493 WgetTestData("util-linux", "/pub/linux/utils/util-linux/v2.23/util-linux-2.24.2.tar.bz2")
1268 : "2.24.2", 1494 : "2.24.2",
1269 # http://www.abisource.com/downloads/enchant/1.6.0/enchant-1.6.0.tar.gz 1495 # http://www.abisource.com/downloads/enchant/1.6.0/enchant-1.6.0.tar.gz
1270 ("enchant", "/downloads/enchant/1.6.0/enchant-1.6.0.tar.gz", "", "") 1496 WgetTestData("enchant", "/downloads/enchant/1.6.0/enchant-1.6.0.tar.gz")
1271 : "1.6.0", 1497 : "1.6.0",
1272 # http://www.cmake.org/files/v2.8/cmake-2.8.12.1.tar.gz 1498 # http://www.cmake.org/files/v2.8/cmake-2.8.12.1.tar.gz
1273 ("cmake", "/files/v2.8/cmake-2.8.12.1.tar.gz", "", "") 1499 WgetTestData("cmake", "/files/v2.8/cmake-2.8.12.1.tar.gz")
1274 : "2.8.12.1", 1500 : "2.8.12.1",
1501 # https://download.gnome.org/sources/libxml2/2.9/libxml2-2.9.14.tar.xz
1502 WgetTestData("libxml2", "/software/libxml2/2.9/libxml2-2.9.14.tar.xz")
1503 : "2.10.3",
1275 # 1504 #
1276 # packages with versions only in current directory 1505 # packages with versions only in current directory
1277 # 1506 #
1278 # http://downloads.yoctoproject.org/releases/eglibc/eglibc-2.18-svnr23787.tar.bz2 1507 # https://downloads.yoctoproject.org/releases/eglibc/eglibc-2.18-svnr23787.tar.bz2
1279 ("eglic", "/releases/eglibc/eglibc-2.18-svnr23787.tar.bz2", "", "") 1508 WgetTestData("eglic", "/releases/eglibc/eglibc-2.18-svnr23787.tar.bz2")
1280 : "2.19", 1509 : "2.19",
1281 # http://downloads.yoctoproject.org/releases/gnu-config/gnu-config-20120814.tar.bz2 1510 # https://downloads.yoctoproject.org/releases/gnu-config/gnu-config-20120814.tar.bz2
1282 ("gnu-config", "/releases/gnu-config/gnu-config-20120814.tar.bz2", "", "") 1511 WgetTestData("gnu-config", "/releases/gnu-config/gnu-config-20120814.tar.bz2")
1283 : "20120814", 1512 : "20120814",
1284 # 1513 #
1285 # packages with "99" in the name of possible version 1514 # packages with "99" in the name of possible version
1286 # 1515 #
1287 # http://freedesktop.org/software/pulseaudio/releases/pulseaudio-4.0.tar.xz 1516 # http://freedesktop.org/software/pulseaudio/releases/pulseaudio-4.0.tar.xz
1288 ("pulseaudio", "/software/pulseaudio/releases/pulseaudio-4.0.tar.xz", "", "") 1517 WgetTestData("pulseaudio", "/software/pulseaudio/releases/pulseaudio-4.0.tar.xz")
1289 : "5.0", 1518 : "5.0",
1290 # http://xorg.freedesktop.org/releases/individual/xserver/xorg-server-1.15.1.tar.bz2 1519 # http://xorg.freedesktop.org/releases/individual/xserver/xorg-server-1.15.1.tar.bz2
1291 ("xserver-xorg", "/releases/individual/xserver/xorg-server-1.15.1.tar.bz2", "", "") 1520 WgetTestData("xserver-xorg", "/releases/individual/xserver/xorg-server-1.15.1.tar.bz2")
1292 : "1.15.1", 1521 : "1.15.1",
1293 # 1522 #
1294 # packages with valid UPSTREAM_CHECK_URI and UPSTREAM_CHECK_REGEX 1523 # packages with valid UPSTREAM_CHECK_URI and UPSTREAM_CHECK_REGEX
1295 # 1524 #
1296 # http://www.cups.org/software/1.7.2/cups-1.7.2-source.tar.bz2 1525 # http://www.cups.org/software/1.7.2/cups-1.7.2-source.tar.bz2
1297 # https://github.com/apple/cups/releases 1526 # https://github.com/apple/cups/releases
1298 ("cups", "/software/1.7.2/cups-1.7.2-source.tar.bz2", "/apple/cups/releases", "(?P<name>cups\-)(?P<pver>((\d+[\.\-_]*)+))\-source\.tar\.gz") 1527 WgetTestData("cups", "/software/1.7.2/cups-1.7.2-source.tar.bz2", check_uri="/apple/cups/releases", check_regex=r"(?P<name>cups\-)(?P<pver>((\d+[\.\-_]*)+))\-source\.tar\.gz")
1299 : "2.0.0", 1528 : "2.0.0",
1300 # http://download.oracle.com/berkeley-db/db-5.3.21.tar.gz 1529 # http://download.oracle.com/berkeley-db/db-5.3.21.tar.gz
1301 # http://ftp.debian.org/debian/pool/main/d/db5.3/ 1530 # http://ftp.debian.org/debian/pool/main/d/db5.3/
1302 ("db", "/berkeley-db/db-5.3.21.tar.gz", "/debian/pool/main/d/db5.3/", "(?P<name>db5\.3_)(?P<pver>\d+(\.\d+)+).+\.orig\.tar\.xz") 1531 WgetTestData("db", "/berkeley-db/db-5.3.21.tar.gz", check_uri="/debian/pool/main/d/db5.3/", check_regex=r"(?P<name>db5\.3_)(?P<pver>\d+(\.\d+)+).+\.orig\.tar\.xz")
1303 : "5.3.10", 1532 : "5.3.10",
1533 #
1534 # packages where the tarball compression changed in the new version
1535 #
1536 # http://ftp.debian.org/debian/pool/main/m/minicom/minicom_2.7.1.orig.tar.gz
1537 WgetTestData("minicom", "/debian/pool/main/m/minicom/minicom_2.7.1.orig.tar.gz")
1538 : "2.8",
1539
1540 #
1541 # packages where the path doesn't actually contain the filename, so downloadfilename should be respected
1542 #
1543 WgetTestData("miniupnpd", "/software/miniupnp/download.php?file=miniupnpd_2.1.20191006.tar.gz;downloadfilename=miniupnpd_2.1.20191006.tar.gz", pv="2.1.20191006", check_uri="/software/miniupnp/download.php", check_regex=r"miniupnpd-(?P<pver>\d+(\.\d+)+)\.tar")
1544 : "2.3.7",
1304 } 1545 }
1305 1546
1547 test_crate_uris = {
1548 # basic example; version pattern "A.B.C+cargo-D.E.F"
1549 ("cargo-c", "crate://crates.io/cargo-c/0.9.18+cargo-0.69")
1550 : "0.9.29"
1551 }
1552
1306 @skipIfNoNetwork() 1553 @skipIfNoNetwork()
1307 def test_git_latest_versionstring(self): 1554 def test_git_latest_versionstring(self):
1308 for k, v in self.test_git_uris.items(): 1555 for k, v in self.test_git_uris.items():
1309 self.d.setVar("PN", k[0]) 1556 with self.subTest(pn=k[0]):
1310 self.d.setVar("SRCREV", k[2]) 1557 self.d.setVar("PN", k[0])
1311 self.d.setVar("UPSTREAM_CHECK_GITTAGREGEX", k[3]) 1558 self.d.setVar("SRCREV", k[2])
1312 ud = bb.fetch2.FetchData(k[1], self.d) 1559 self.d.setVar("UPSTREAM_CHECK_GITTAGREGEX", k[3])
1313 pupver= ud.method.latest_versionstring(ud, self.d) 1560 ud = bb.fetch2.FetchData(k[1], self.d)
1314 verstring = pupver[0] 1561 pupver= ud.method.latest_versionstring(ud, self.d)
1315 self.assertTrue(verstring, msg="Could not find upstream version for %s" % k[0]) 1562 verstring = pupver[0]
1316 r = bb.utils.vercmp_string(v, verstring) 1563 self.assertTrue(verstring, msg="Could not find upstream version for %s" % k[0])
1317 self.assertTrue(r == -1 or r == 0, msg="Package %s, version: %s <= %s" % (k[0], v, verstring)) 1564 r = bb.utils.vercmp_string(v, verstring)
1565 self.assertTrue(r == -1 or r == 0, msg="Package %s, version: %s <= %s" % (k[0], v, verstring))
1566 if k[4]:
1567 r = bb.utils.vercmp_string(verstring, k[4])
1568 self.assertTrue(r == -1 or r == 0, msg="Package %s, version: %s <= %s" % (k[0], verstring, k[4]))
1318 1569
1319 def test_wget_latest_versionstring(self): 1570 def test_wget_latest_versionstring(self):
1320 testdata = os.path.dirname(os.path.abspath(__file__)) + "/fetch-testdata" 1571 testdata = os.path.dirname(os.path.abspath(__file__)) + "/fetch-testdata"
1321 server = HTTPService(testdata) 1572 server = HTTPService(testdata, host="127.0.0.1")
1322 server.start() 1573 server.start()
1323 port = server.port 1574 port = server.port
1324 try: 1575 try:
1325 for k, v in self.test_wget_uris.items(): 1576 for data, v in self.test_wget_uris.items():
1577 with self.subTest(pn=data.pn):
1578 self.d.setVar("PN", data.pn)
1579 self.d.setVar("PV", data.pv)
1580 if data.check_uri:
1581 checkuri = "http://127.0.0.1:%s/%s" % (port, data.check_uri)
1582 self.d.setVar("UPSTREAM_CHECK_URI", checkuri)
1583 if data.check_regex:
1584 self.d.setVar("UPSTREAM_CHECK_REGEX", data.check_regex)
1585
1586 url = "http://127.0.0.1:%s/%s" % (port, data.path)
1587 ud = bb.fetch2.FetchData(url, self.d)
1588 pupver = ud.method.latest_versionstring(ud, self.d)
1589 verstring = pupver[0]
1590 self.assertTrue(verstring, msg="Could not find upstream version for %s" % data.pn)
1591 r = bb.utils.vercmp_string(v, verstring)
1592 self.assertTrue(r == -1 or r == 0, msg="Package %s, version: %s <= %s" % (data.pn, v, verstring))
1593 finally:
1594 server.stop()
1595
1596 @skipIfNoNetwork()
1597 def test_crate_latest_versionstring(self):
1598 for k, v in self.test_crate_uris.items():
1599 with self.subTest(pn=k[0]):
1326 self.d.setVar("PN", k[0]) 1600 self.d.setVar("PN", k[0])
1327 checkuri = "" 1601 ud = bb.fetch2.FetchData(k[1], self.d)
1328 if k[2]:
1329 checkuri = "http://localhost:%s/" % port + k[2]
1330 self.d.setVar("UPSTREAM_CHECK_URI", checkuri)
1331 self.d.setVar("UPSTREAM_CHECK_REGEX", k[3])
1332 url = "http://localhost:%s/" % port + k[1]
1333 ud = bb.fetch2.FetchData(url, self.d)
1334 pupver = ud.method.latest_versionstring(ud, self.d) 1602 pupver = ud.method.latest_versionstring(ud, self.d)
1335 verstring = pupver[0] 1603 verstring = pupver[0]
1336 self.assertTrue(verstring, msg="Could not find upstream version for %s" % k[0]) 1604 self.assertTrue(verstring, msg="Could not find upstream version for %s" % k[0])
1337 r = bb.utils.vercmp_string(v, verstring) 1605 r = bb.utils.vercmp_string(v, verstring)
1338 self.assertTrue(r == -1 or r == 0, msg="Package %s, version: %s <= %s" % (k[0], v, verstring)) 1606 self.assertTrue(r == -1 or r == 0, msg="Package %s, version: %s <= %s" % (k[0], v, verstring))
1339 finally:
1340 server.stop()
1341
1342 1607
1343class FetchCheckStatusTest(FetcherTest): 1608class FetchCheckStatusTest(FetcherTest):
1344 test_wget_uris = ["http://downloads.yoctoproject.org/releases/sato/sato-engine-0.1.tar.gz", 1609 test_wget_uris = ["https://downloads.yoctoproject.org/releases/sato/sato-engine-0.1.tar.gz",
1345 "http://downloads.yoctoproject.org/releases/sato/sato-engine-0.2.tar.gz", 1610 "https://downloads.yoctoproject.org/releases/sato/sato-engine-0.2.tar.gz",
1346 "http://downloads.yoctoproject.org/releases/sato/sato-engine-0.3.tar.gz", 1611 "https://downloads.yoctoproject.org/releases/sato/sato-engine-0.3.tar.gz",
1347 "https://yoctoproject.org/", 1612 "https://yoctoproject.org/",
1348 "https://yoctoproject.org/documentation", 1613 "https://docs.yoctoproject.org",
1349 "http://downloads.yoctoproject.org/releases/opkg/opkg-0.1.7.tar.gz", 1614 "https://downloads.yoctoproject.org/releases/opkg/opkg-0.1.7.tar.gz",
1350 "http://downloads.yoctoproject.org/releases/opkg/opkg-0.3.0.tar.gz", 1615 "https://downloads.yoctoproject.org/releases/opkg/opkg-0.3.0.tar.gz",
1351 "ftp://sourceware.org/pub/libffi/libffi-1.20.tar.gz", 1616 "ftp://sourceware.org/pub/libffi/libffi-1.20.tar.gz",
1352 "http://ftp.gnu.org/gnu/autoconf/autoconf-2.60.tar.gz",
1353 "https://ftp.gnu.org/gnu/chess/gnuchess-5.08.tar.gz",
1354 "https://ftp.gnu.org/gnu/gmp/gmp-4.0.tar.gz",
1355 # GitHub releases are hosted on Amazon S3, which doesn't support HEAD 1617 # GitHub releases are hosted on Amazon S3, which doesn't support HEAD
1356 "https://github.com/kergoth/tslib/releases/download/1.1/tslib-1.1.tar.xz" 1618 "https://github.com/kergoth/tslib/releases/download/1.1/tslib-1.1.tar.xz"
1357 ] 1619 ]
@@ -1389,7 +1651,7 @@ class GitMakeShallowTest(FetcherTest):
1389 FetcherTest.setUp(self) 1651 FetcherTest.setUp(self)
1390 self.gitdir = os.path.join(self.tempdir, 'gitshallow') 1652 self.gitdir = os.path.join(self.tempdir, 'gitshallow')
1391 bb.utils.mkdirhier(self.gitdir) 1653 bb.utils.mkdirhier(self.gitdir)
1392 bb.process.run('git init', cwd=self.gitdir) 1654 self.git_init()
1393 1655
1394 def assertRefs(self, expected_refs): 1656 def assertRefs(self, expected_refs):
1395 actual_refs = self.git(['for-each-ref', '--format=%(refname)']).splitlines() 1657 actual_refs = self.git(['for-each-ref', '--format=%(refname)']).splitlines()
@@ -1403,13 +1665,6 @@ class GitMakeShallowTest(FetcherTest):
1403 actual_count = len(revs.splitlines()) 1665 actual_count = len(revs.splitlines())
1404 self.assertEqual(expected_count, actual_count, msg='Object count `%d` is not the expected `%d`' % (actual_count, expected_count)) 1666 self.assertEqual(expected_count, actual_count, msg='Object count `%d` is not the expected `%d`' % (actual_count, expected_count))
1405 1667
1406 def git(self, cmd):
1407 if isinstance(cmd, str):
1408 cmd = 'git ' + cmd
1409 else:
1410 cmd = ['git'] + cmd
1411 return bb.process.run(cmd, cwd=self.gitdir)[0]
1412
1413 def make_shallow(self, args=None): 1668 def make_shallow(self, args=None):
1414 if args is None: 1669 if args is None:
1415 args = ['HEAD'] 1670 args = ['HEAD']
@@ -1512,13 +1767,13 @@ class GitShallowTest(FetcherTest):
1512 self.srcdir = os.path.join(self.tempdir, 'gitsource') 1767 self.srcdir = os.path.join(self.tempdir, 'gitsource')
1513 1768
1514 bb.utils.mkdirhier(self.srcdir) 1769 bb.utils.mkdirhier(self.srcdir)
1515 self.git('init', cwd=self.srcdir) 1770 self.git_init(cwd=self.srcdir)
1516 self.d.setVar('WORKDIR', self.tempdir) 1771 self.d.setVar('WORKDIR', self.tempdir)
1517 self.d.setVar('S', self.gitdir) 1772 self.d.setVar('S', self.gitdir)
1518 self.d.delVar('PREMIRRORS') 1773 self.d.delVar('PREMIRRORS')
1519 self.d.delVar('MIRRORS') 1774 self.d.delVar('MIRRORS')
1520 1775
1521 uri = 'git://%s;protocol=file;subdir=${S}' % self.srcdir 1776 uri = 'git://%s;protocol=file;subdir=${S};branch=master' % self.srcdir
1522 self.d.setVar('SRC_URI', uri) 1777 self.d.setVar('SRC_URI', uri)
1523 self.d.setVar('SRCREV', '${AUTOREV}') 1778 self.d.setVar('SRCREV', '${AUTOREV}')
1524 self.d.setVar('AUTOREV', '${@bb.fetch2.get_autorev(d)}') 1779 self.d.setVar('AUTOREV', '${@bb.fetch2.get_autorev(d)}')
@@ -1526,11 +1781,14 @@ class GitShallowTest(FetcherTest):
1526 self.d.setVar('BB_GIT_SHALLOW', '1') 1781 self.d.setVar('BB_GIT_SHALLOW', '1')
1527 self.d.setVar('BB_GENERATE_MIRROR_TARBALLS', '0') 1782 self.d.setVar('BB_GENERATE_MIRROR_TARBALLS', '0')
1528 self.d.setVar('BB_GENERATE_SHALLOW_TARBALLS', '1') 1783 self.d.setVar('BB_GENERATE_SHALLOW_TARBALLS', '1')
1784 self.d.setVar("__BBSRCREV_SEEN", "1")
1529 1785
1530 def assertRefs(self, expected_refs, cwd=None): 1786 def assertRefs(self, expected_refs, cwd=None):
1531 if cwd is None: 1787 if cwd is None:
1532 cwd = self.gitdir 1788 cwd = self.gitdir
1533 actual_refs = self.git(['for-each-ref', '--format=%(refname)'], cwd=cwd).splitlines() 1789 actual_refs = self.git(['for-each-ref', '--format=%(refname)'], cwd=cwd).splitlines()
1790 # Resolve references into the same format as the comparision (needed by git 2.48 onwards)
1791 actual_refs = self.git(['rev-parse', '--symbolic-full-name'] + actual_refs, cwd=cwd).splitlines()
1534 full_expected = self.git(['rev-parse', '--symbolic-full-name'] + expected_refs, cwd=cwd).splitlines() 1792 full_expected = self.git(['rev-parse', '--symbolic-full-name'] + expected_refs, cwd=cwd).splitlines()
1535 self.assertEqual(sorted(set(full_expected)), sorted(set(actual_refs))) 1793 self.assertEqual(sorted(set(full_expected)), sorted(set(actual_refs)))
1536 1794
@@ -1543,15 +1801,6 @@ class GitShallowTest(FetcherTest):
1543 actual_count = len(revs.splitlines()) 1801 actual_count = len(revs.splitlines())
1544 self.assertEqual(expected_count, actual_count, msg='Object count `%d` is not the expected `%d`' % (actual_count, expected_count)) 1802 self.assertEqual(expected_count, actual_count, msg='Object count `%d` is not the expected `%d`' % (actual_count, expected_count))
1545 1803
1546 def git(self, cmd, cwd=None):
1547 if isinstance(cmd, str):
1548 cmd = 'git ' + cmd
1549 else:
1550 cmd = ['git'] + cmd
1551 if cwd is None:
1552 cwd = self.gitdir
1553 return bb.process.run(cmd, cwd=cwd)[0]
1554
1555 def add_empty_file(self, path, cwd=None, msg=None): 1804 def add_empty_file(self, path, cwd=None, msg=None):
1556 if msg is None: 1805 if msg is None:
1557 msg = path 1806 msg = path
@@ -1586,7 +1835,6 @@ class GitShallowTest(FetcherTest):
1586 def fetch_shallow(self, uri=None, disabled=False, keepclone=False): 1835 def fetch_shallow(self, uri=None, disabled=False, keepclone=False):
1587 """Fetch a uri, generating a shallow tarball, then unpack using it""" 1836 """Fetch a uri, generating a shallow tarball, then unpack using it"""
1588 fetcher, ud = self.fetch_and_unpack(uri) 1837 fetcher, ud = self.fetch_and_unpack(uri)
1589 assert os.path.exists(ud.clonedir), 'Git clone in DLDIR (%s) does not exist for uri %s' % (ud.clonedir, uri)
1590 1838
1591 # Confirm that the unpacked repo is unshallow 1839 # Confirm that the unpacked repo is unshallow
1592 if not disabled: 1840 if not disabled:
@@ -1594,8 +1842,10 @@ class GitShallowTest(FetcherTest):
1594 1842
1595 # fetch and unpack, from the shallow tarball 1843 # fetch and unpack, from the shallow tarball
1596 bb.utils.remove(self.gitdir, recurse=True) 1844 bb.utils.remove(self.gitdir, recurse=True)
1597 bb.utils.remove(ud.clonedir, recurse=True) 1845 if os.path.exists(ud.clonedir):
1598 bb.utils.remove(ud.clonedir.replace('gitsource', 'gitsubmodule'), recurse=True) 1846 bb.process.run('chmod u+w -R "%s"' % ud.clonedir)
1847 bb.utils.remove(ud.clonedir, recurse=True)
1848 bb.utils.remove(ud.clonedir.replace('gitsource', 'gitsubmodule'), recurse=True)
1599 1849
1600 # confirm that the unpacked repo is used when no git clone or git 1850 # confirm that the unpacked repo is used when no git clone or git
1601 # mirror tarball is available 1851 # mirror tarball is available
@@ -1678,7 +1928,12 @@ class GitShallowTest(FetcherTest):
1678 self.add_empty_file('c') 1928 self.add_empty_file('c')
1679 self.assertRevCount(3, cwd=self.srcdir) 1929 self.assertRevCount(3, cwd=self.srcdir)
1680 1930
1931 # Clone without tarball
1932 self.d.setVar('BB_GIT_SHALLOW', '0')
1933 fetcher, ud = self.fetch()
1934
1681 # Clone and generate mirror tarball 1935 # Clone and generate mirror tarball
1936 self.d.setVar('BB_GIT_SHALLOW', '1')
1682 fetcher, ud = self.fetch() 1937 fetcher, ud = self.fetch()
1683 1938
1684 # Ensure we have a current mirror tarball, but an out of date clone 1939 # Ensure we have a current mirror tarball, but an out of date clone
@@ -1690,6 +1945,7 @@ class GitShallowTest(FetcherTest):
1690 fetcher, ud = self.fetch() 1945 fetcher, ud = self.fetch()
1691 fetcher.unpack(self.d.getVar('WORKDIR')) 1946 fetcher.unpack(self.d.getVar('WORKDIR'))
1692 self.assertRevCount(1) 1947 self.assertRevCount(1)
1948 assert os.path.exists(os.path.join(self.d.getVar('WORKDIR'), 'git', 'c'))
1693 1949
1694 def test_shallow_single_branch_no_merge(self): 1950 def test_shallow_single_branch_no_merge(self):
1695 self.add_empty_file('a') 1951 self.add_empty_file('a')
@@ -1745,7 +2001,7 @@ class GitShallowTest(FetcherTest):
1745 2001
1746 smdir = os.path.join(self.tempdir, 'gitsubmodule') 2002 smdir = os.path.join(self.tempdir, 'gitsubmodule')
1747 bb.utils.mkdirhier(smdir) 2003 bb.utils.mkdirhier(smdir)
1748 self.git('init', cwd=smdir) 2004 self.git_init(cwd=smdir)
1749 # Make this look like it was cloned from a remote... 2005 # Make this look like it was cloned from a remote...
1750 self.git('config --add remote.origin.url "%s"' % smdir, cwd=smdir) 2006 self.git('config --add remote.origin.url "%s"' % smdir, cwd=smdir)
1751 self.git('config --add remote.origin.fetch "+refs/heads/*:refs/remotes/origin/*"', cwd=smdir) 2007 self.git('config --add remote.origin.fetch "+refs/heads/*:refs/remotes/origin/*"', cwd=smdir)
@@ -1753,11 +2009,11 @@ class GitShallowTest(FetcherTest):
1753 self.add_empty_file('bsub', cwd=smdir) 2009 self.add_empty_file('bsub', cwd=smdir)
1754 2010
1755 self.git('submodule init', cwd=self.srcdir) 2011 self.git('submodule init', cwd=self.srcdir)
1756 self.git('submodule add file://%s' % smdir, cwd=self.srcdir) 2012 self.git('-c protocol.file.allow=always submodule add file://%s' % smdir, cwd=self.srcdir)
1757 self.git('submodule update', cwd=self.srcdir) 2013 self.git('submodule update', cwd=self.srcdir)
1758 self.git('commit -m submodule -a', cwd=self.srcdir) 2014 self.git('commit -m submodule -a', cwd=self.srcdir)
1759 2015
1760 uri = 'gitsm://%s;protocol=file;subdir=${S}' % self.srcdir 2016 uri = 'gitsm://%s;protocol=file;subdir=${S};branch=master' % self.srcdir
1761 fetcher, ud = self.fetch_shallow(uri) 2017 fetcher, ud = self.fetch_shallow(uri)
1762 2018
1763 # Verify the main repository is shallow 2019 # Verify the main repository is shallow
@@ -1775,7 +2031,7 @@ class GitShallowTest(FetcherTest):
1775 2031
1776 smdir = os.path.join(self.tempdir, 'gitsubmodule') 2032 smdir = os.path.join(self.tempdir, 'gitsubmodule')
1777 bb.utils.mkdirhier(smdir) 2033 bb.utils.mkdirhier(smdir)
1778 self.git('init', cwd=smdir) 2034 self.git_init(cwd=smdir)
1779 # Make this look like it was cloned from a remote... 2035 # Make this look like it was cloned from a remote...
1780 self.git('config --add remote.origin.url "%s"' % smdir, cwd=smdir) 2036 self.git('config --add remote.origin.url "%s"' % smdir, cwd=smdir)
1781 self.git('config --add remote.origin.fetch "+refs/heads/*:refs/remotes/origin/*"', cwd=smdir) 2037 self.git('config --add remote.origin.fetch "+refs/heads/*:refs/remotes/origin/*"', cwd=smdir)
@@ -1783,19 +2039,19 @@ class GitShallowTest(FetcherTest):
1783 self.add_empty_file('bsub', cwd=smdir) 2039 self.add_empty_file('bsub', cwd=smdir)
1784 2040
1785 self.git('submodule init', cwd=self.srcdir) 2041 self.git('submodule init', cwd=self.srcdir)
1786 self.git('submodule add file://%s' % smdir, cwd=self.srcdir) 2042 self.git('-c protocol.file.allow=always submodule add file://%s' % smdir, cwd=self.srcdir)
1787 self.git('submodule update', cwd=self.srcdir) 2043 self.git('submodule update', cwd=self.srcdir)
1788 self.git('commit -m submodule -a', cwd=self.srcdir) 2044 self.git('commit -m submodule -a', cwd=self.srcdir)
1789 2045
1790 uri = 'gitsm://%s;protocol=file;subdir=${S}' % self.srcdir 2046 uri = 'gitsm://%s;protocol=file;subdir=${S};branch=master' % self.srcdir
1791 2047
1792 # Fetch once to generate the shallow tarball 2048 # Fetch once to generate the shallow tarball
1793 fetcher, ud = self.fetch(uri) 2049 fetcher, ud = self.fetch(uri)
1794 2050
1795 # Set up the mirror 2051 # Set up the mirror
1796 mirrordir = os.path.join(self.tempdir, 'mirror') 2052 mirrordir = os.path.join(self.tempdir, 'mirror')
1797 os.rename(self.dldir, mirrordir) 2053 bb.utils.rename(self.dldir, mirrordir)
1798 self.d.setVar('PREMIRRORS', 'gitsm://.*/.* file://%s/\n' % mirrordir) 2054 self.d.setVar('PREMIRRORS', 'gitsm://.*/.* file://%s/' % mirrordir)
1799 2055
1800 # Fetch from the mirror 2056 # Fetch from the mirror
1801 bb.utils.remove(self.dldir, recurse=True) 2057 bb.utils.remove(self.dldir, recurse=True)
@@ -1818,80 +2074,27 @@ class GitShallowTest(FetcherTest):
1818 self.git('annex init', cwd=self.srcdir) 2074 self.git('annex init', cwd=self.srcdir)
1819 open(os.path.join(self.srcdir, 'c'), 'w').close() 2075 open(os.path.join(self.srcdir, 'c'), 'w').close()
1820 self.git('annex add c', cwd=self.srcdir) 2076 self.git('annex add c', cwd=self.srcdir)
1821 self.git('commit -m annex-c -a', cwd=self.srcdir) 2077 self.git('commit --author "Foo Bar <foo@bar>" -m annex-c -a', cwd=self.srcdir)
1822 bb.process.run('chmod u+w -R %s' % os.path.join(self.srcdir, '.git', 'annex')) 2078 bb.process.run('chmod u+w -R %s' % self.srcdir)
1823 2079
1824 uri = 'gitannex://%s;protocol=file;subdir=${S}' % self.srcdir 2080 uri = 'gitannex://%s;protocol=file;subdir=${S};branch=master' % self.srcdir
1825 fetcher, ud = self.fetch_shallow(uri) 2081 fetcher, ud = self.fetch_shallow(uri)
1826 2082
1827 self.assertRevCount(1) 2083 self.assertRevCount(1)
1828 assert './.git/annex/' in bb.process.run('tar -tzf %s' % os.path.join(self.dldir, ud.mirrortarballs[0]))[0] 2084 assert './.git/annex/' in bb.process.run('tar -tzf %s' % os.path.join(self.dldir, ud.mirrortarballs[0]))[0]
1829 assert os.path.exists(os.path.join(self.gitdir, 'c')) 2085 assert os.path.exists(os.path.join(self.gitdir, 'c'))
1830 2086
1831 def test_shallow_multi_one_uri(self):
1832 # Create initial git repo
1833 self.add_empty_file('a')
1834 self.add_empty_file('b')
1835 self.git('checkout -b a_branch', cwd=self.srcdir)
1836 self.add_empty_file('c')
1837 self.add_empty_file('d')
1838 self.git('checkout master', cwd=self.srcdir)
1839 self.git('tag v0.0 a_branch', cwd=self.srcdir)
1840 self.add_empty_file('e')
1841 self.git('merge --no-ff --no-edit a_branch', cwd=self.srcdir)
1842 self.add_empty_file('f')
1843 self.assertRevCount(7, cwd=self.srcdir)
1844
1845 uri = self.d.getVar('SRC_URI').split()[0]
1846 uri = '%s;branch=master,a_branch;name=master,a_branch' % uri
1847
1848 self.d.setVar('BB_GIT_SHALLOW_DEPTH', '0')
1849 self.d.setVar('BB_GIT_SHALLOW_REVS', 'v0.0')
1850 self.d.setVar('SRCREV_master', '${AUTOREV}')
1851 self.d.setVar('SRCREV_a_branch', '${AUTOREV}')
1852
1853 self.fetch_shallow(uri)
1854
1855 self.assertRevCount(5)
1856 self.assertRefs(['master', 'origin/master', 'origin/a_branch'])
1857
1858 def test_shallow_multi_one_uri_depths(self):
1859 # Create initial git repo
1860 self.add_empty_file('a')
1861 self.add_empty_file('b')
1862 self.git('checkout -b a_branch', cwd=self.srcdir)
1863 self.add_empty_file('c')
1864 self.add_empty_file('d')
1865 self.git('checkout master', cwd=self.srcdir)
1866 self.add_empty_file('e')
1867 self.git('merge --no-ff --no-edit a_branch', cwd=self.srcdir)
1868 self.add_empty_file('f')
1869 self.assertRevCount(7, cwd=self.srcdir)
1870
1871 uri = self.d.getVar('SRC_URI').split()[0]
1872 uri = '%s;branch=master,a_branch;name=master,a_branch' % uri
1873
1874 self.d.setVar('BB_GIT_SHALLOW_DEPTH', '0')
1875 self.d.setVar('BB_GIT_SHALLOW_DEPTH_master', '3')
1876 self.d.setVar('BB_GIT_SHALLOW_DEPTH_a_branch', '1')
1877 self.d.setVar('SRCREV_master', '${AUTOREV}')
1878 self.d.setVar('SRCREV_a_branch', '${AUTOREV}')
1879
1880 self.fetch_shallow(uri)
1881
1882 self.assertRevCount(4, ['--all'])
1883 self.assertRefs(['master', 'origin/master', 'origin/a_branch'])
1884
1885 def test_shallow_clone_preferred_over_shallow(self): 2087 def test_shallow_clone_preferred_over_shallow(self):
1886 self.add_empty_file('a') 2088 self.add_empty_file('a')
1887 self.add_empty_file('b') 2089 self.add_empty_file('b')
1888 2090
1889 # Fetch once to generate the shallow tarball 2091 # Fetch once to generate the shallow tarball
2092 self.d.setVar('BB_GIT_SHALLOW', '0')
1890 fetcher, ud = self.fetch() 2093 fetcher, ud = self.fetch()
1891 assert os.path.exists(os.path.join(self.dldir, ud.mirrortarballs[0]))
1892 2094
1893 # Fetch and unpack with both the clonedir and shallow tarball available 2095 # Fetch and unpack with both the clonedir and shallow tarball available
1894 bb.utils.remove(self.gitdir, recurse=True) 2096 bb.utils.remove(self.gitdir, recurse=True)
2097 self.d.setVar('BB_GIT_SHALLOW', '1')
1895 fetcher, ud = self.fetch_and_unpack() 2098 fetcher, ud = self.fetch_and_unpack()
1896 2099
1897 # The unpacked tree should *not* be shallow 2100 # The unpacked tree should *not* be shallow
@@ -1910,9 +2113,9 @@ class GitShallowTest(FetcherTest):
1910 # Set up the mirror 2113 # Set up the mirror
1911 mirrordir = os.path.join(self.tempdir, 'mirror') 2114 mirrordir = os.path.join(self.tempdir, 'mirror')
1912 bb.utils.mkdirhier(mirrordir) 2115 bb.utils.mkdirhier(mirrordir)
1913 self.d.setVar('PREMIRRORS', 'git://.*/.* file://%s/\n' % mirrordir) 2116 self.d.setVar('PREMIRRORS', 'git://.*/.* file://%s/' % mirrordir)
1914 2117
1915 os.rename(os.path.join(self.dldir, mirrortarball), 2118 bb.utils.rename(os.path.join(self.dldir, mirrortarball),
1916 os.path.join(mirrordir, mirrortarball)) 2119 os.path.join(mirrordir, mirrortarball))
1917 2120
1918 # Fetch from the mirror 2121 # Fetch from the mirror
@@ -1999,7 +2202,7 @@ class GitShallowTest(FetcherTest):
1999 2202
2000 self.fetch_shallow() 2203 self.fetch_shallow()
2001 2204
2002 self.assertRevCount(5) 2205 self.assertRevCount(2)
2003 2206
2004 def test_shallow_invalid_revs(self): 2207 def test_shallow_invalid_revs(self):
2005 self.add_empty_file('a') 2208 self.add_empty_file('a')
@@ -2018,7 +2221,10 @@ class GitShallowTest(FetcherTest):
2018 self.git('tag v0.0 master', cwd=self.srcdir) 2221 self.git('tag v0.0 master', cwd=self.srcdir)
2019 self.d.setVar('BB_GIT_SHALLOW_DEPTH', '0') 2222 self.d.setVar('BB_GIT_SHALLOW_DEPTH', '0')
2020 self.d.setVar('BB_GIT_SHALLOW_REVS', 'v0.0') 2223 self.d.setVar('BB_GIT_SHALLOW_REVS', 'v0.0')
2021 self.fetch_shallow() 2224
2225 with self.assertRaises(bb.fetch2.FetchError), self.assertLogs("BitBake.Fetcher", level="ERROR") as cm:
2226 self.fetch_shallow()
2227 self.assertIn("fatal: no commits selected for shallow requests", cm.output[0])
2022 2228
2023 def test_shallow_fetch_missing_revs_fails(self): 2229 def test_shallow_fetch_missing_revs_fails(self):
2024 self.add_empty_file('a') 2230 self.add_empty_file('a')
@@ -2032,8 +2238,35 @@ class GitShallowTest(FetcherTest):
2032 self.assertIn("Unable to find revision v0.0 even from upstream", cm.output[0]) 2238 self.assertIn("Unable to find revision v0.0 even from upstream", cm.output[0])
2033 2239
2034 @skipIfNoNetwork() 2240 @skipIfNoNetwork()
2241 def test_git_shallow_fetch_premirrors(self):
2242 url = "git://git.openembedded.org/bitbake;branch=master;protocol=https"
2243
2244 # Create a separate premirror directory within tempdir
2245 premirror = os.path.join(self.tempdir, "premirror")
2246 os.mkdir(premirror)
2247
2248 # Fetch a non-shallow clone into the premirror subdir
2249 self.d.setVar('BB_GIT_SHALLOW', '0')
2250 self.d.setVar("DL_DIR", premirror)
2251 fetcher, ud = self.fetch(url)
2252
2253 # Fetch a shallow clone from the premirror subdir with unpacking
2254 # using the original recipe URL and the premirror mapping
2255 self.d.setVar('BB_GIT_SHALLOW', '1')
2256 self.d.setVar("DL_DIR", self.dldir)
2257 self.d.setVar('BB_FETCH_PREMIRRORONLY', '1')
2258 self.d.setVar('BB_NO_NETWORK', '1')
2259 self.d.setVar('BB_GENERATE_MIRROR_TARBALLS', '0')
2260 self.d.setVar("PREMIRRORS", "git://.*/.* git://{0};protocol=file".format(premirror + "/git2/" + ud.host + ud.path.replace("/", ".")))
2261 fetcher = self.fetch_and_unpack(url)
2262
2263 # Verify that the unpacked sources are shallow clones
2264 self.assertRevCount(1)
2265 assert os.path.exists(os.path.join(self.gitdir, '.git', 'shallow'))
2266
2267 @skipIfNoNetwork()
2035 def test_bitbake(self): 2268 def test_bitbake(self):
2036 self.git('remote add --mirror=fetch origin git://github.com/openembedded/bitbake', cwd=self.srcdir) 2269 self.git('remote add --mirror=fetch origin https://github.com/openembedded/bitbake', cwd=self.srcdir)
2037 self.git('config core.bare true', cwd=self.srcdir) 2270 self.git('config core.bare true', cwd=self.srcdir)
2038 self.git('fetch', cwd=self.srcdir) 2271 self.git('fetch', cwd=self.srcdir)
2039 2272
@@ -2049,7 +2282,7 @@ class GitShallowTest(FetcherTest):
2049 revs = len(self.git('rev-list master').splitlines()) 2282 revs = len(self.git('rev-list master').splitlines())
2050 self.assertNotEqual(orig_revs, revs) 2283 self.assertNotEqual(orig_revs, revs)
2051 self.assertRefs(['master', 'origin/master']) 2284 self.assertRefs(['master', 'origin/master'])
2052 self.assertRevCount(orig_revs - 1758) 2285 self.assertRevCount(orig_revs - 1760)
2053 2286
2054 def test_that_unpack_throws_an_error_when_the_git_clone_nor_shallow_tarball_exist(self): 2287 def test_that_unpack_throws_an_error_when_the_git_clone_nor_shallow_tarball_exist(self):
2055 self.add_empty_file('a') 2288 self.add_empty_file('a')
@@ -2063,27 +2296,43 @@ class GitShallowTest(FetcherTest):
2063 self.assertIn("No up to date source found", context.exception.msg) 2296 self.assertIn("No up to date source found", context.exception.msg)
2064 self.assertIn("clone directory not available or not up to date", context.exception.msg) 2297 self.assertIn("clone directory not available or not up to date", context.exception.msg)
2065 2298
2066 @skipIfNoNetwork() 2299 def test_shallow_check_is_shallow(self):
2067 def test_that_unpack_does_work_when_using_git_shallow_tarball_but_tarball_is_not_available(self): 2300 self.add_empty_file('a')
2068 self.d.setVar('SRCREV', 'e5939ff608b95cdd4d0ab0e1935781ab9a276ac0') 2301 self.add_empty_file('b')
2069 self.d.setVar('BB_GIT_SHALLOW', '1')
2070 self.d.setVar('BB_GENERATE_SHALLOW_TARBALLS', '1')
2071 fetcher = bb.fetch.Fetch(["git://git.yoctoproject.org/fstests"], self.d)
2072 fetcher.download()
2073 2302
2074 bb.utils.remove(self.dldir + "/*.tar.gz") 2303 # Fetch and unpack without the clonedir and *only* shallow tarball available
2075 fetcher.unpack(self.unpackdir) 2304 bb.utils.remove(self.gitdir, recurse=True)
2305 fetcher, ud = self.fetch_and_unpack()
2076 2306
2077 dir = os.listdir(self.unpackdir + "/git/") 2307 # The unpacked tree *should* be shallow
2078 self.assertIn("fstests.doap", dir) 2308 self.assertRevCount(1)
2309 assert os.path.exists(os.path.join(self.gitdir, '.git', 'shallow'))
2310
2311 def test_shallow_succeeds_with_tag_containing_slash(self):
2312 self.add_empty_file('a')
2313 self.add_empty_file('b')
2314 self.git('tag t1/t2/t3', cwd=self.srcdir)
2315 self.assertRevCount(2, cwd=self.srcdir)
2316
2317 srcrev = self.git('rev-parse HEAD', cwd=self.srcdir).strip()
2318 self.d.setVar('SRCREV', srcrev)
2319 uri = self.d.getVar('SRC_URI').split()[0]
2320 uri = '%s;tag=t1/t2/t3' % uri
2321 self.fetch_shallow(uri)
2322 self.assertRevCount(1)
2079 2323
2080class GitLfsTest(FetcherTest): 2324class GitLfsTest(FetcherTest):
2325 def skipIfNoGitLFS():
2326 if not shutil.which('git-lfs'):
2327 return unittest.skip('git-lfs not installed')
2328 return lambda f: f
2329
2081 def setUp(self): 2330 def setUp(self):
2082 FetcherTest.setUp(self) 2331 FetcherTest.setUp(self)
2083 2332
2084 self.gitdir = os.path.join(self.tempdir, 'git') 2333 self.gitdir = os.path.join(self.tempdir, 'git')
2085 self.srcdir = os.path.join(self.tempdir, 'gitsource') 2334 self.srcdir = os.path.join(self.tempdir, 'gitsource')
2086 2335
2087 self.d.setVar('WORKDIR', self.tempdir) 2336 self.d.setVar('WORKDIR', self.tempdir)
2088 self.d.setVar('S', self.gitdir) 2337 self.d.setVar('S', self.gitdir)
2089 self.d.delVar('PREMIRRORS') 2338 self.d.delVar('PREMIRRORS')
@@ -2091,22 +2340,24 @@ class GitLfsTest(FetcherTest):
2091 2340
2092 self.d.setVar('SRCREV', '${AUTOREV}') 2341 self.d.setVar('SRCREV', '${AUTOREV}')
2093 self.d.setVar('AUTOREV', '${@bb.fetch2.get_autorev(d)}') 2342 self.d.setVar('AUTOREV', '${@bb.fetch2.get_autorev(d)}')
2343 self.d.setVar("__BBSRCREV_SEEN", "1")
2094 2344
2095 bb.utils.mkdirhier(self.srcdir) 2345 bb.utils.mkdirhier(self.srcdir)
2096 self.git('init', cwd=self.srcdir) 2346 self.git_init(cwd=self.srcdir)
2097 with open(os.path.join(self.srcdir, '.gitattributes'), 'wt') as attrs: 2347 self.commit_file('.gitattributes', '*.mp3 filter=lfs -text')
2098 attrs.write('*.mp3 filter=lfs -text')
2099 self.git(['add', '.gitattributes'], cwd=self.srcdir)
2100 self.git(['commit', '-m', "attributes", '.gitattributes'], cwd=self.srcdir)
2101 2348
2102 def git(self, cmd, cwd=None): 2349 def commit(self, *, cwd=None):
2103 if isinstance(cmd, str): 2350 cwd = cwd or self.srcdir
2104 cmd = 'git ' + cmd 2351 self.git(["commit", "-m", "Change"], cwd=cwd)
2105 else: 2352 return self.git(["rev-parse", "HEAD"], cwd=cwd).strip()
2106 cmd = ['git'] + cmd 2353
2107 if cwd is None: 2354 def commit_file(self, filename, content, *, cwd=None):
2108 cwd = self.gitdir 2355 cwd = cwd or self.srcdir
2109 return bb.process.run(cmd, cwd=cwd)[0] 2356
2357 with open(os.path.join(cwd, filename), "w") as f:
2358 f.write(content)
2359 self.git(["add", filename], cwd=cwd)
2360 return self.commit(cwd=cwd)
2110 2361
2111 def fetch(self, uri=None, download=True): 2362 def fetch(self, uri=None, download=True):
2112 uris = self.d.getVar('SRC_URI').split() 2363 uris = self.d.getVar('SRC_URI').split()
@@ -2119,65 +2370,259 @@ class GitLfsTest(FetcherTest):
2119 ud = fetcher.ud[uri] 2370 ud = fetcher.ud[uri]
2120 return fetcher, ud 2371 return fetcher, ud
2121 2372
2373 def get_real_git_lfs_file(self):
2374 self.d.setVar('PATH', os.environ.get('PATH'))
2375 fetcher, ud = self.fetch()
2376 fetcher.unpack(self.d.getVar('WORKDIR'))
2377 unpacked_lfs_file = os.path.join(self.d.getVar('WORKDIR'), 'git', "Cat_poster_1.jpg")
2378 return unpacked_lfs_file
2379
2380 @skipIfNoGitLFS()
2381 def test_gitsm_lfs(self):
2382 """Test that the gitsm fetcher caches objects stored via LFS"""
2383 self.git(["lfs", "install", "--local"], cwd=self.srcdir)
2384
2385 def fetch_and_verify(revision, filename, content):
2386 self.d.setVar('SRCREV', revision)
2387 fetcher, ud = self.fetch()
2388
2389 with hide_directory(submoduledir), hide_directory(self.srcdir):
2390 workdir = self.d.getVar('WORKDIR')
2391 fetcher.unpack(workdir)
2392
2393 with open(os.path.join(workdir, "git", filename)) as f:
2394 self.assertEqual(f.read(), content)
2395
2396 # Create the git repository that will later be used as a submodule
2397 submoduledir = self.tempdir + "/submodule"
2398 bb.utils.mkdirhier(submoduledir)
2399 self.git_init(submoduledir)
2400 self.git(["lfs", "install", "--local"], cwd=submoduledir)
2401 self.commit_file('.gitattributes', '*.mp3 filter=lfs -text', cwd=submoduledir)
2402
2403 submodule_commit_1 = self.commit_file("a.mp3", "submodule version 1", cwd=submoduledir)
2404 _ = self.commit_file("a.mp3", "submodule version 2", cwd=submoduledir)
2405
2406 # Add the submodule to the repository at its current HEAD revision
2407 self.git(["-c", "protocol.file.allow=always", "submodule", "add", submoduledir, "submodule"],
2408 cwd=self.srcdir)
2409 base_commit_1 = self.commit()
2410
2411 # Let the submodule point at a different revision
2412 self.git(["checkout", submodule_commit_1], self.srcdir + "/submodule")
2413 self.git(["add", "submodule"], cwd=self.srcdir)
2414 base_commit_2 = self.commit()
2415
2416 # Add a LFS file to the repository
2417 base_commit_3 = self.commit_file("a.mp3", "version 1")
2418 # Update the added LFS file
2419 base_commit_4 = self.commit_file("a.mp3", "version 2")
2420
2421 self.d.setVar('SRC_URI', "gitsm://%s;protocol=file;lfs=1;branch=master" % self.srcdir)
2422
2423 # Verify that LFS objects referenced from submodules are fetched and checked out
2424 fetch_and_verify(base_commit_1, "submodule/a.mp3", "submodule version 2")
2425 # Verify that the repository inside the download cache of a submodile is extended with any
2426 # additional LFS objects needed when checking out a different revision.
2427 fetch_and_verify(base_commit_2, "submodule/a.mp3", "submodule version 1")
2428 # Verify that LFS objects referenced from the base repository are fetched and checked out
2429 fetch_and_verify(base_commit_3, "a.mp3", "version 1")
2430 # Verify that the cached repository is extended with any additional LFS objects required
2431 # when checking out a different revision.
2432 fetch_and_verify(base_commit_4, "a.mp3", "version 2")
2433
2434 @skipIfNoGitLFS()
2435 def test_gitsm_lfs_disabled(self):
2436 """Test that the gitsm fetcher does not use LFS when explicitly disabled"""
2437 self.git(["lfs", "install", "--local"], cwd=self.srcdir)
2438
2439 def fetch_and_verify(revision, filename, content):
2440 self.d.setVar('SRCREV', revision)
2441 fetcher, ud = self.fetch()
2442
2443 with hide_directory(submoduledir), hide_directory(self.srcdir):
2444 workdir = self.d.getVar('WORKDIR')
2445 fetcher.unpack(workdir)
2446
2447 with open(os.path.join(workdir, "git", filename)) as f:
2448 # Assume that LFS did not perform smudging when the expected content is
2449 # missing.
2450 self.assertNotEqual(f.read(), content)
2451
2452 # Create the git repository that will later be used as a submodule
2453 submoduledir = self.tempdir + "/submodule"
2454 bb.utils.mkdirhier(submoduledir)
2455 self.git_init(submoduledir)
2456 self.git(["lfs", "install", "--local"], cwd=submoduledir)
2457 self.commit_file('.gitattributes', '*.mp3 filter=lfs -text', cwd=submoduledir)
2458
2459 submodule_commit_1 = self.commit_file("a.mp3", "submodule version 1", cwd=submoduledir)
2460
2461 # Add the submodule to the repository at its current HEAD revision
2462 self.git(["-c", "protocol.file.allow=always", "submodule", "add", submoduledir, "submodule"],
2463 cwd=self.srcdir)
2464 base_commit_1 = self.commit()
2465
2466 # Add a LFS file to the repository
2467 base_commit_2 = self.commit_file("a.mp3", "version 1")
2468
2469 self.d.setVar('SRC_URI', "gitsm://%s;protocol=file;lfs=1;branch=master;lfs=0" % self.srcdir)
2470
2471 # Verify that LFS objects referenced from submodules are not fetched nor checked out
2472 fetch_and_verify(base_commit_1, "submodule/a.mp3", "submodule version 1")
2473 # Verify that the LFS objects referenced from the base repository are not fetched nor
2474 # checked out
2475 fetch_and_verify(base_commit_2, "a.mp3", "version 1")
2476
2477 @skipIfNoGitLFS()
2478 def test_fetch_lfs_on_srcrev_change(self):
2479 """Test if fetch downloads missing LFS objects when a different revision within an existing repository is requested"""
2480 self.git(["lfs", "install", "--local"], cwd=self.srcdir)
2481
2482 def fetch_and_verify(revision, filename, content):
2483 self.d.setVar('SRCREV', revision)
2484 fetcher, ud = self.fetch()
2485
2486 with hide_directory(self.srcdir):
2487 workdir = self.d.getVar('WORKDIR')
2488 fetcher.unpack(workdir)
2489
2490 with open(os.path.join(workdir, "git", filename)) as f:
2491 self.assertEqual(f.read(), content)
2492
2493 commit_1 = self.commit_file("a.mp3", "version 1")
2494 commit_2 = self.commit_file("a.mp3", "version 2")
2495
2496 self.d.setVar('SRC_URI', "git://%s;protocol=file;lfs=1;branch=master" % self.srcdir)
2497
2498 # Seed the local download folder by fetching the latest commit and verifying that the LFS contents are
2499 # available even when the upstream repository disappears.
2500 fetch_and_verify(commit_2, "a.mp3", "version 2")
2501 # Verify that even when an older revision is fetched, the needed LFS objects are fetched into the download
2502 # folder.
2503 fetch_and_verify(commit_1, "a.mp3", "version 1")
2504
2505 @skipIfNoGitLFS()
2506 @skipIfNoNetwork()
2507 def test_real_git_lfs_repo_succeeds_without_lfs_param(self):
2508 self.d.setVar('SRC_URI', "git://gitlab.com/gitlab-examples/lfs.git;protocol=https;branch=master")
2509 f = self.get_real_git_lfs_file()
2510 self.assertTrue(os.path.exists(f))
2511 self.assertEqual("c0baab607a97839c9a328b4310713307", bb.utils.md5_file(f))
2512
2513 @skipIfNoGitLFS()
2514 @skipIfNoNetwork()
2515 def test_real_git_lfs_repo_succeeds(self):
2516 self.d.setVar('SRC_URI', "git://gitlab.com/gitlab-examples/lfs.git;protocol=https;branch=master;lfs=1")
2517 f = self.get_real_git_lfs_file()
2518 self.assertTrue(os.path.exists(f))
2519 self.assertEqual("c0baab607a97839c9a328b4310713307", bb.utils.md5_file(f))
2520
2521 @skipIfNoGitLFS()
2522 @skipIfNoNetwork()
2523 def test_real_git_lfs_repo_skips(self):
2524 self.d.setVar('SRC_URI', "git://gitlab.com/gitlab-examples/lfs.git;protocol=https;branch=master;lfs=0")
2525 f = self.get_real_git_lfs_file()
2526 # This is the actual non-smudged placeholder file on the repo if git-lfs does not run
2527 lfs_file = (
2528 'version https://git-lfs.github.com/spec/v1\n'
2529 'oid sha256:34be66b1a39a1955b46a12588df9d5f6fc1da790e05cf01f3c7422f4bbbdc26b\n'
2530 'size 11423554\n'
2531 )
2532
2533 with open(f) as fh:
2534 self.assertEqual(lfs_file, fh.read())
2535
2536 @skipIfNoGitLFS()
2122 def test_lfs_enabled(self): 2537 def test_lfs_enabled(self):
2123 import shutil 2538 uri = 'git://%s;protocol=file;lfs=1;branch=master' % self.srcdir
2539 self.d.setVar('SRC_URI', uri)
2540
2541 # With git-lfs installed, test that we can fetch and unpack
2542 fetcher, ud = self.fetch()
2543 shutil.rmtree(self.gitdir, ignore_errors=True)
2544 fetcher.unpack(self.d.getVar('WORKDIR'))
2124 2545
2125 uri = 'git://%s;protocol=file;subdir=${S};lfs=1' % self.srcdir 2546 @skipIfNoGitLFS()
2547 def test_lfs_disabled(self):
2548 uri = 'git://%s;protocol=file;lfs=0;branch=master' % self.srcdir
2126 self.d.setVar('SRC_URI', uri) 2549 self.d.setVar('SRC_URI', uri)
2127 2550
2128 # Careful: suppress initial attempt at downloading until 2551 # Verify that the fetcher can survive even if the source
2129 # we know whether git-lfs is installed. 2552 # repository has Git LFS usage configured.
2130 fetcher, ud = self.fetch(uri=None, download=False) 2553 fetcher, ud = self.fetch()
2131 self.assertIsNotNone(ud.method._find_git_lfs) 2554 fetcher.unpack(self.d.getVar('WORKDIR'))
2132 2555
2133 # If git-lfs can be found, the unpack should be successful. Only 2556 @skipIfNoGitLFS()
2134 # attempt this with the real live copy of git-lfs installed. 2557 def test_lfs_enabled_not_installed_during_unpack(self):
2135 if ud.method._find_git_lfs(self.d): 2558 uri = 'git://%s;protocol=file;lfs=1;branch=master' % self.srcdir
2136 fetcher.download() 2559 self.d.setVar('SRC_URI', uri)
2137 shutil.rmtree(self.gitdir, ignore_errors=True)
2138 fetcher.unpack(self.d.getVar('WORKDIR'))
2139 2560
2561 # Careful: suppress initial attempt at downloading
2562 fetcher, ud = self.fetch(uri=None, download=False)
2563
2564 fetcher.download()
2140 # If git-lfs cannot be found, the unpack should throw an error 2565 # If git-lfs cannot be found, the unpack should throw an error
2141 with self.assertRaises(bb.fetch2.FetchError): 2566 with self.assertRaises(bb.fetch2.FetchError):
2567 with unittest.mock.patch("shutil.which", return_value=None):
2568 shutil.rmtree(self.gitdir, ignore_errors=True)
2569 fetcher.unpack(self.d.getVar('WORKDIR'))
2570
2571 def test_lfs_enabled_not_installed(self):
2572 uri = 'git://%s;protocol=file;lfs=1;branch=master' % self.srcdir
2573 self.d.setVar('SRC_URI', uri)
2574
2575 # Careful: suppress initial attempt at downloading
2576 fetcher, ud = self.fetch(uri=None, download=False)
2577
2578 # If git-lfs cannot be found, the download should throw an error
2579 with unittest.mock.patch("shutil.which", return_value=None):
2580 with self.assertRaises(bb.fetch2.FetchError):
2581 fetcher.download()
2582
2583 def test_lfs_disabled_not_installed(self):
2584 uri = 'git://%s;protocol=file;lfs=0;branch=master' % self.srcdir
2585 self.d.setVar('SRC_URI', uri)
2586
2587 # Careful: suppress initial attempt at downloading
2588 fetcher, ud = self.fetch(uri=None, download=False)
2589
2590 # Even if git-lfs cannot be found, the download / unpack should be successful
2591 with unittest.mock.patch("shutil.which", return_value=None):
2142 fetcher.download() 2592 fetcher.download()
2143 ud.method._find_git_lfs = lambda d: False
2144 shutil.rmtree(self.gitdir, ignore_errors=True) 2593 shutil.rmtree(self.gitdir, ignore_errors=True)
2145 fetcher.unpack(self.d.getVar('WORKDIR')) 2594 fetcher.unpack(self.d.getVar('WORKDIR'))
2146 2595
2147 def test_lfs_disabled(self): 2596 def test_lfs_enabled_not_installed_but_not_needed(self):
2148 import shutil 2597 srcdir = os.path.join(self.tempdir, "emptygit")
2598 bb.utils.mkdirhier(srcdir)
2599 self.git_init(srcdir)
2600 self.commit_file("test", "test content", cwd=srcdir)
2149 2601
2150 uri = 'git://%s;protocol=file;subdir=${S};lfs=0' % self.srcdir 2602 uri = 'git://%s;protocol=file;lfs=1;branch=master' % srcdir
2151 self.d.setVar('SRC_URI', uri) 2603 self.d.setVar('SRC_URI', uri)
2152 2604
2153 # In contrast to test_lfs_enabled(), allow the implicit download 2605 # Careful: suppress initial attempt at downloading
2154 # done by self.fetch() to occur here. The point of this test case 2606 fetcher, ud = self.fetch(uri=None, download=False)
2155 # is to verify that the fetcher can survive even if the source
2156 # repository has Git LFS usage configured.
2157 fetcher, ud = self.fetch()
2158 self.assertIsNotNone(ud.method._find_git_lfs)
2159
2160 # If git-lfs can be found, the unpack should be successful. A
2161 # live copy of git-lfs is not required for this case, so
2162 # unconditionally forge its presence.
2163 ud.method._find_git_lfs = lambda d: True
2164 shutil.rmtree(self.gitdir, ignore_errors=True)
2165 fetcher.unpack(self.d.getVar('WORKDIR'))
2166 2607
2167 # If git-lfs cannot be found, the unpack should be successful 2608 # It shouldnt't matter that git-lfs cannot be found as the repository configuration does not
2168 ud.method._find_git_lfs = lambda d: False 2609 # specify any LFS filters.
2169 shutil.rmtree(self.gitdir, ignore_errors=True) 2610 with unittest.mock.patch("shutil.which", return_value=None):
2170 fetcher.unpack(self.d.getVar('WORKDIR')) 2611 fetcher.download()
2612 shutil.rmtree(self.gitdir, ignore_errors=True)
2613 fetcher.unpack(self.d.getVar('WORKDIR'))
2171 2614
2172class GitURLWithSpacesTest(FetcherTest): 2615class GitURLWithSpacesTest(FetcherTest):
2173 test_git_urls = { 2616 test_git_urls = {
2174 "git://tfs-example.org:22/tfs/example%20path/example.git" : { 2617 "git://tfs-example.org:22/tfs/example%20path/example.git;branch=master" : {
2175 'url': 'git://tfs-example.org:22/tfs/example%20path/example.git', 2618 'url': 'git://tfs-example.org:22/tfs/example%20path/example.git;branch=master',
2619 'repo_url': 'git://tfs-example.org:22/tfs/example%20path/example.git',
2176 'gitsrcname': 'tfs-example.org.22.tfs.example_path.example.git', 2620 'gitsrcname': 'tfs-example.org.22.tfs.example_path.example.git',
2177 'path': '/tfs/example path/example.git' 2621 'path': '/tfs/example path/example.git'
2178 }, 2622 },
2179 "git://tfs-example.org:22/tfs/example%20path/example%20repo.git" : { 2623 "git://tfs-example.org:22/tfs/example%20path/example%20repo.git;branch=master" : {
2180 'url': 'git://tfs-example.org:22/tfs/example%20path/example%20repo.git', 2624 'url': 'git://tfs-example.org:22/tfs/example%20path/example%20repo.git;branch=master',
2625 'repo_url': 'git://tfs-example.org:22/tfs/example%20path/example%20repo.git',
2181 'gitsrcname': 'tfs-example.org.22.tfs.example_path.example_repo.git', 2626 'gitsrcname': 'tfs-example.org.22.tfs.example_path.example_repo.git',
2182 'path': '/tfs/example path/example repo.git' 2627 'path': '/tfs/example path/example repo.git'
2183 } 2628 }
@@ -2200,19 +2645,137 @@ class GitURLWithSpacesTest(FetcherTest):
2200 self.assertEqual(ud.lockfile, os.path.join(self.dldir, "git2", ref['gitsrcname'] + '.lock')) 2645 self.assertEqual(ud.lockfile, os.path.join(self.dldir, "git2", ref['gitsrcname'] + '.lock'))
2201 self.assertEqual(ud.clonedir, os.path.join(self.dldir, "git2", ref['gitsrcname'])) 2646 self.assertEqual(ud.clonedir, os.path.join(self.dldir, "git2", ref['gitsrcname']))
2202 self.assertEqual(ud.fullmirror, os.path.join(self.dldir, "git2_" + ref['gitsrcname'] + '.tar.gz')) 2647 self.assertEqual(ud.fullmirror, os.path.join(self.dldir, "git2_" + ref['gitsrcname'] + '.tar.gz'))
2648 self.assertEqual(ud.method._get_repo_url(ud), ref['repo_url'])
2649
2650class CrateTest(FetcherTest):
2651 @skipIfNoNetwork()
2652 def test_crate_url(self):
2653
2654 uri = "crate://crates.io/glob/0.2.11"
2655 self.d.setVar('SRC_URI', uri)
2656
2657 uris = self.d.getVar('SRC_URI').split()
2658 d = self.d
2659
2660 fetcher = bb.fetch2.Fetch(uris, self.d)
2661 ud = fetcher.ud[fetcher.urls[0]]
2662
2663 self.assertIn("name", ud.parm)
2664 self.assertEqual(ud.parm["name"], "glob-0.2.11")
2665 self.assertIn("downloadfilename", ud.parm)
2666 self.assertEqual(ud.parm["downloadfilename"], "glob-0.2.11.crate")
2667
2668 fetcher.download()
2669 fetcher.unpack(self.tempdir)
2670 self.assertEqual(sorted(os.listdir(self.tempdir)), ['cargo_home', 'download' , 'unpacked'])
2671 self.assertEqual(sorted(os.listdir(self.tempdir + "/download")), ['glob-0.2.11.crate', 'glob-0.2.11.crate.done'])
2672 self.assertTrue(os.path.exists(self.tempdir + "/cargo_home/bitbake/glob-0.2.11/.cargo-checksum.json"))
2673 self.assertTrue(os.path.exists(self.tempdir + "/cargo_home/bitbake/glob-0.2.11/src/lib.rs"))
2674
2675 @skipIfNoNetwork()
2676 def test_crate_url_matching_recipe(self):
2677
2678 self.d.setVar('BP', 'glob-0.2.11')
2679
2680 uri = "crate://crates.io/glob/0.2.11"
2681 self.d.setVar('SRC_URI', uri)
2682
2683 uris = self.d.getVar('SRC_URI').split()
2684 d = self.d
2685
2686 fetcher = bb.fetch2.Fetch(uris, self.d)
2687 ud = fetcher.ud[fetcher.urls[0]]
2688
2689 self.assertIn("name", ud.parm)
2690 self.assertEqual(ud.parm["name"], "glob-0.2.11")
2691 self.assertIn("downloadfilename", ud.parm)
2692 self.assertEqual(ud.parm["downloadfilename"], "glob-0.2.11.crate")
2693
2694 fetcher.download()
2695 fetcher.unpack(self.tempdir)
2696 self.assertEqual(sorted(os.listdir(self.tempdir)), ['download', 'glob-0.2.11', 'unpacked'])
2697 self.assertEqual(sorted(os.listdir(self.tempdir + "/download")), ['glob-0.2.11.crate', 'glob-0.2.11.crate.done'])
2698 self.assertTrue(os.path.exists(self.tempdir + "/glob-0.2.11/src/lib.rs"))
2699
2700 @skipIfNoNetwork()
2701 def test_crate_url_params(self):
2702
2703 uri = "crate://crates.io/aho-corasick/0.7.20;name=aho-corasick-renamed"
2704 self.d.setVar('SRC_URI', uri)
2705
2706 uris = self.d.getVar('SRC_URI').split()
2707 d = self.d
2708
2709 fetcher = bb.fetch2.Fetch(uris, self.d)
2710 ud = fetcher.ud[fetcher.urls[0]]
2711
2712 self.assertIn("name", ud.parm)
2713 self.assertEqual(ud.parm["name"], "aho-corasick-renamed")
2714 self.assertIn("downloadfilename", ud.parm)
2715 self.assertEqual(ud.parm["downloadfilename"], "aho-corasick-0.7.20.crate")
2716
2717 fetcher.download()
2718 fetcher.unpack(self.tempdir)
2719 self.assertEqual(sorted(os.listdir(self.tempdir)), ['cargo_home', 'download' , 'unpacked'])
2720 self.assertEqual(sorted(os.listdir(self.tempdir + "/download")), ['aho-corasick-0.7.20.crate', 'aho-corasick-0.7.20.crate.done'])
2721 self.assertTrue(os.path.exists(self.tempdir + "/cargo_home/bitbake/aho-corasick-0.7.20/.cargo-checksum.json"))
2722 self.assertTrue(os.path.exists(self.tempdir + "/cargo_home/bitbake/aho-corasick-0.7.20/src/lib.rs"))
2723
2724 @skipIfNoNetwork()
2725 def test_crate_url_multi(self):
2726
2727 uri = "crate://crates.io/glob/0.2.11 crate://crates.io/time/0.1.35"
2728 self.d.setVar('SRC_URI', uri)
2729
2730 uris = self.d.getVar('SRC_URI').split()
2731 d = self.d
2732
2733 fetcher = bb.fetch2.Fetch(uris, self.d)
2734 ud = fetcher.ud[fetcher.urls[0]]
2735
2736 self.assertIn("name", ud.parm)
2737 self.assertEqual(ud.parm["name"], "glob-0.2.11")
2738 self.assertIn("downloadfilename", ud.parm)
2739 self.assertEqual(ud.parm["downloadfilename"], "glob-0.2.11.crate")
2740
2741 ud = fetcher.ud[fetcher.urls[1]]
2742 self.assertIn("name", ud.parm)
2743 self.assertEqual(ud.parm["name"], "time-0.1.35")
2744 self.assertIn("downloadfilename", ud.parm)
2745 self.assertEqual(ud.parm["downloadfilename"], "time-0.1.35.crate")
2746
2747 fetcher.download()
2748 fetcher.unpack(self.tempdir)
2749 self.assertEqual(sorted(os.listdir(self.tempdir)), ['cargo_home', 'download' , 'unpacked'])
2750 self.assertEqual(sorted(os.listdir(self.tempdir + "/download")), ['glob-0.2.11.crate', 'glob-0.2.11.crate.done', 'time-0.1.35.crate', 'time-0.1.35.crate.done'])
2751 self.assertTrue(os.path.exists(self.tempdir + "/cargo_home/bitbake/glob-0.2.11/.cargo-checksum.json"))
2752 self.assertTrue(os.path.exists(self.tempdir + "/cargo_home/bitbake/glob-0.2.11/src/lib.rs"))
2753 self.assertTrue(os.path.exists(self.tempdir + "/cargo_home/bitbake/time-0.1.35/.cargo-checksum.json"))
2754 self.assertTrue(os.path.exists(self.tempdir + "/cargo_home/bitbake/time-0.1.35/src/lib.rs"))
2755
2756 @skipIfNoNetwork()
2757 def test_crate_incorrect_cksum(self):
2758 uri = "crate://crates.io/aho-corasick/0.7.20"
2759 self.d.setVar('SRC_URI', uri)
2760 self.d.setVarFlag("SRC_URI", "aho-corasick-0.7.20.sha256sum", hashlib.sha256("Invalid".encode("utf-8")).hexdigest())
2761
2762 uris = self.d.getVar('SRC_URI').split()
2763
2764 fetcher = bb.fetch2.Fetch(uris, self.d)
2765 with self.assertRaisesRegex(bb.fetch2.FetchError, "Fetcher failure for URL"):
2766 fetcher.download()
2203 2767
2204class NPMTest(FetcherTest): 2768class NPMTest(FetcherTest):
2205 def skipIfNoNpm(): 2769 def skipIfNoNpm():
2206 import shutil
2207 if not shutil.which('npm'): 2770 if not shutil.which('npm'):
2208 return unittest.skip('npm not installed, tests being skipped') 2771 return unittest.skip('npm not installed')
2209 return lambda f: f 2772 return lambda f: f
2210 2773
2211 @skipIfNoNpm() 2774 @skipIfNoNpm()
2212 @skipIfNoNetwork() 2775 @skipIfNoNetwork()
2213 def test_npm(self): 2776 def test_npm(self):
2214 url = 'npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0' 2777 urls = ['npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0']
2215 fetcher = bb.fetch.Fetch([url], self.d) 2778 fetcher = bb.fetch.Fetch(urls, self.d)
2216 ud = fetcher.ud[fetcher.urls[0]] 2779 ud = fetcher.ud[fetcher.urls[0]]
2217 fetcher.download() 2780 fetcher.download()
2218 self.assertTrue(os.path.exists(ud.localpath)) 2781 self.assertTrue(os.path.exists(ud.localpath))
@@ -2225,9 +2788,9 @@ class NPMTest(FetcherTest):
2225 @skipIfNoNpm() 2788 @skipIfNoNpm()
2226 @skipIfNoNetwork() 2789 @skipIfNoNetwork()
2227 def test_npm_bad_checksum(self): 2790 def test_npm_bad_checksum(self):
2228 url = 'npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0' 2791 urls = ['npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0']
2229 # Fetch once to get a tarball 2792 # Fetch once to get a tarball
2230 fetcher = bb.fetch.Fetch([url], self.d) 2793 fetcher = bb.fetch.Fetch(urls, self.d)
2231 ud = fetcher.ud[fetcher.urls[0]] 2794 ud = fetcher.ud[fetcher.urls[0]]
2232 fetcher.download() 2795 fetcher.download()
2233 self.assertTrue(os.path.exists(ud.localpath)) 2796 self.assertTrue(os.path.exists(ud.localpath))
@@ -2244,17 +2807,48 @@ class NPMTest(FetcherTest):
2244 @skipIfNoNpm() 2807 @skipIfNoNpm()
2245 @skipIfNoNetwork() 2808 @skipIfNoNetwork()
2246 def test_npm_premirrors(self): 2809 def test_npm_premirrors(self):
2247 url = 'npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0' 2810 urls = ['npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0']
2248 # Fetch once to get a tarball 2811 # Fetch once to get a tarball
2249 fetcher = bb.fetch.Fetch([url], self.d) 2812 fetcher = bb.fetch.Fetch(urls, self.d)
2813 ud = fetcher.ud[fetcher.urls[0]]
2814 fetcher.download()
2815 self.assertTrue(os.path.exists(ud.localpath))
2816
2817 # Setup the mirror by renaming the download directory
2818 mirrordir = os.path.join(self.tempdir, 'mirror')
2819 bb.utils.rename(self.dldir, mirrordir)
2820 os.mkdir(self.dldir)
2821
2822 # Configure the premirror to be used
2823 self.d.setVar('PREMIRRORS', 'https?$://.*/.* file://%s/npm2' % mirrordir)
2824 self.d.setVar('BB_FETCH_PREMIRRORONLY', '1')
2825
2826 # Fetch again
2827 self.assertFalse(os.path.exists(ud.localpath))
2828 # The npm fetcher doesn't handle that the .resolved file disappears
2829 # while the fetcher object exists, which it does when we rename the
2830 # download directory to "mirror" above. Thus we need a new fetcher to go
2831 # with the now empty download directory.
2832 fetcher = bb.fetch.Fetch(urls, self.d)
2833 ud = fetcher.ud[fetcher.urls[0]]
2834 fetcher.download()
2835 self.assertTrue(os.path.exists(ud.localpath))
2836
2837 @skipIfNoNpm()
2838 @skipIfNoNetwork()
2839 def test_npm_premirrors_with_specified_filename(self):
2840 urls = ['npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0']
2841 # Fetch once to get a tarball
2842 fetcher = bb.fetch.Fetch(urls, self.d)
2250 ud = fetcher.ud[fetcher.urls[0]] 2843 ud = fetcher.ud[fetcher.urls[0]]
2251 fetcher.download() 2844 fetcher.download()
2252 self.assertTrue(os.path.exists(ud.localpath)) 2845 self.assertTrue(os.path.exists(ud.localpath))
2253 # Setup the mirror 2846 # Setup the mirror
2254 mirrordir = os.path.join(self.tempdir, 'mirror') 2847 mirrordir = os.path.join(self.tempdir, 'mirror')
2255 bb.utils.mkdirhier(mirrordir) 2848 bb.utils.mkdirhier(mirrordir)
2256 os.replace(ud.localpath, os.path.join(mirrordir, os.path.basename(ud.localpath))) 2849 mirrorfilename = os.path.join(mirrordir, os.path.basename(ud.localpath))
2257 self.d.setVar('PREMIRRORS', 'https?$://.*/.* file://%s/\n' % mirrordir) 2850 os.replace(ud.localpath, mirrorfilename)
2851 self.d.setVar('PREMIRRORS', 'https?$://.*/.* file://%s' % mirrorfilename)
2258 self.d.setVar('BB_FETCH_PREMIRRORONLY', '1') 2852 self.d.setVar('BB_FETCH_PREMIRRORONLY', '1')
2259 # Fetch again 2853 # Fetch again
2260 self.assertFalse(os.path.exists(ud.localpath)) 2854 self.assertFalse(os.path.exists(ud.localpath))
@@ -2265,8 +2859,8 @@ class NPMTest(FetcherTest):
2265 @skipIfNoNetwork() 2859 @skipIfNoNetwork()
2266 def test_npm_mirrors(self): 2860 def test_npm_mirrors(self):
2267 # Fetch once to get a tarball 2861 # Fetch once to get a tarball
2268 url = 'npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0' 2862 urls = ['npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0']
2269 fetcher = bb.fetch.Fetch([url], self.d) 2863 fetcher = bb.fetch.Fetch(urls, self.d)
2270 ud = fetcher.ud[fetcher.urls[0]] 2864 ud = fetcher.ud[fetcher.urls[0]]
2271 fetcher.download() 2865 fetcher.download()
2272 self.assertTrue(os.path.exists(ud.localpath)) 2866 self.assertTrue(os.path.exists(ud.localpath))
@@ -2274,7 +2868,7 @@ class NPMTest(FetcherTest):
2274 mirrordir = os.path.join(self.tempdir, 'mirror') 2868 mirrordir = os.path.join(self.tempdir, 'mirror')
2275 bb.utils.mkdirhier(mirrordir) 2869 bb.utils.mkdirhier(mirrordir)
2276 os.replace(ud.localpath, os.path.join(mirrordir, os.path.basename(ud.localpath))) 2870 os.replace(ud.localpath, os.path.join(mirrordir, os.path.basename(ud.localpath)))
2277 self.d.setVar('MIRRORS', 'https?$://.*/.* file://%s/\n' % mirrordir) 2871 self.d.setVar('MIRRORS', 'https?$://.*/.* file://%s/' % mirrordir)
2278 # Update the resolved url to an invalid url 2872 # Update the resolved url to an invalid url
2279 with open(ud.resolvefile, 'r') as f: 2873 with open(ud.resolvefile, 'r') as f:
2280 url = f.read() 2874 url = f.read()
@@ -2290,27 +2884,27 @@ class NPMTest(FetcherTest):
2290 @skipIfNoNpm() 2884 @skipIfNoNpm()
2291 @skipIfNoNetwork() 2885 @skipIfNoNetwork()
2292 def test_npm_destsuffix_downloadfilename(self): 2886 def test_npm_destsuffix_downloadfilename(self):
2293 url = 'npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0;destsuffix=foo/bar;downloadfilename=foo-bar.tgz' 2887 urls = ['npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0;destsuffix=foo/bar;downloadfilename=foo-bar.tgz']
2294 fetcher = bb.fetch.Fetch([url], self.d) 2888 fetcher = bb.fetch.Fetch(urls, self.d)
2295 fetcher.download() 2889 fetcher.download()
2296 self.assertTrue(os.path.exists(os.path.join(self.dldir, 'foo-bar.tgz'))) 2890 self.assertTrue(os.path.exists(os.path.join(self.dldir, 'npm2', 'foo-bar.tgz')))
2297 fetcher.unpack(self.unpackdir) 2891 fetcher.unpack(self.unpackdir)
2298 unpackdir = os.path.join(self.unpackdir, 'foo', 'bar') 2892 unpackdir = os.path.join(self.unpackdir, 'foo', 'bar')
2299 self.assertTrue(os.path.exists(os.path.join(unpackdir, 'package.json'))) 2893 self.assertTrue(os.path.exists(os.path.join(unpackdir, 'package.json')))
2300 2894
2301 def test_npm_no_network_no_tarball(self): 2895 def test_npm_no_network_no_tarball(self):
2302 url = 'npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0' 2896 urls = ['npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0']
2303 self.d.setVar('BB_NO_NETWORK', '1') 2897 self.d.setVar('BB_NO_NETWORK', '1')
2304 fetcher = bb.fetch.Fetch([url], self.d) 2898 fetcher = bb.fetch.Fetch(urls, self.d)
2305 with self.assertRaises(bb.fetch2.NetworkAccess): 2899 with self.assertRaises(bb.fetch2.NetworkAccess):
2306 fetcher.download() 2900 fetcher.download()
2307 2901
2308 @skipIfNoNpm() 2902 @skipIfNoNpm()
2309 @skipIfNoNetwork() 2903 @skipIfNoNetwork()
2310 def test_npm_no_network_with_tarball(self): 2904 def test_npm_no_network_with_tarball(self):
2311 url = 'npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0' 2905 urls = ['npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=1.0.0']
2312 # Fetch once to get a tarball 2906 # Fetch once to get a tarball
2313 fetcher = bb.fetch.Fetch([url], self.d) 2907 fetcher = bb.fetch.Fetch(urls, self.d)
2314 fetcher.download() 2908 fetcher.download()
2315 # Disable network access 2909 # Disable network access
2316 self.d.setVar('BB_NO_NETWORK', '1') 2910 self.d.setVar('BB_NO_NETWORK', '1')
@@ -2323,8 +2917,8 @@ class NPMTest(FetcherTest):
2323 @skipIfNoNpm() 2917 @skipIfNoNpm()
2324 @skipIfNoNetwork() 2918 @skipIfNoNetwork()
2325 def test_npm_registry_alternate(self): 2919 def test_npm_registry_alternate(self):
2326 url = 'npm://registry.freajs.org;package=@savoirfairelinux/node-server-example;version=1.0.0' 2920 urls = ['npm://skimdb.npmjs.com;package=@savoirfairelinux/node-server-example;version=1.0.0']
2327 fetcher = bb.fetch.Fetch([url], self.d) 2921 fetcher = bb.fetch.Fetch(urls, self.d)
2328 fetcher.download() 2922 fetcher.download()
2329 fetcher.unpack(self.unpackdir) 2923 fetcher.unpack(self.unpackdir)
2330 unpackdir = os.path.join(self.unpackdir, 'npm') 2924 unpackdir = os.path.join(self.unpackdir, 'npm')
@@ -2333,8 +2927,8 @@ class NPMTest(FetcherTest):
2333 @skipIfNoNpm() 2927 @skipIfNoNpm()
2334 @skipIfNoNetwork() 2928 @skipIfNoNetwork()
2335 def test_npm_version_latest(self): 2929 def test_npm_version_latest(self):
2336 url = 'npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=latest' 2930 url = ['npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=latest']
2337 fetcher = bb.fetch.Fetch([url], self.d) 2931 fetcher = bb.fetch.Fetch(url, self.d)
2338 fetcher.download() 2932 fetcher.download()
2339 fetcher.unpack(self.unpackdir) 2933 fetcher.unpack(self.unpackdir)
2340 unpackdir = os.path.join(self.unpackdir, 'npm') 2934 unpackdir = os.path.join(self.unpackdir, 'npm')
@@ -2343,46 +2937,46 @@ class NPMTest(FetcherTest):
2343 @skipIfNoNpm() 2937 @skipIfNoNpm()
2344 @skipIfNoNetwork() 2938 @skipIfNoNetwork()
2345 def test_npm_registry_invalid(self): 2939 def test_npm_registry_invalid(self):
2346 url = 'npm://registry.invalid.org;package=@savoirfairelinux/node-server-example;version=1.0.0' 2940 urls = ['npm://registry.invalid.org;package=@savoirfairelinux/node-server-example;version=1.0.0']
2347 fetcher = bb.fetch.Fetch([url], self.d) 2941 fetcher = bb.fetch.Fetch(urls, self.d)
2348 with self.assertRaises(bb.fetch2.FetchError): 2942 with self.assertRaises(bb.fetch2.FetchError):
2349 fetcher.download() 2943 fetcher.download()
2350 2944
2351 @skipIfNoNpm() 2945 @skipIfNoNpm()
2352 @skipIfNoNetwork() 2946 @skipIfNoNetwork()
2353 def test_npm_package_invalid(self): 2947 def test_npm_package_invalid(self):
2354 url = 'npm://registry.npmjs.org;package=@savoirfairelinux/invalid;version=1.0.0' 2948 urls = ['npm://registry.npmjs.org;package=@savoirfairelinux/invalid;version=1.0.0']
2355 fetcher = bb.fetch.Fetch([url], self.d) 2949 fetcher = bb.fetch.Fetch(urls, self.d)
2356 with self.assertRaises(bb.fetch2.FetchError): 2950 with self.assertRaises(bb.fetch2.FetchError):
2357 fetcher.download() 2951 fetcher.download()
2358 2952
2359 @skipIfNoNpm() 2953 @skipIfNoNpm()
2360 @skipIfNoNetwork() 2954 @skipIfNoNetwork()
2361 def test_npm_version_invalid(self): 2955 def test_npm_version_invalid(self):
2362 url = 'npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=invalid' 2956 urls = ['npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example;version=invalid']
2363 with self.assertRaises(bb.fetch2.ParameterError): 2957 with self.assertRaises(bb.fetch2.ParameterError):
2364 fetcher = bb.fetch.Fetch([url], self.d) 2958 fetcher = bb.fetch.Fetch(urls, self.d)
2365 2959
2366 @skipIfNoNpm() 2960 @skipIfNoNpm()
2367 @skipIfNoNetwork() 2961 @skipIfNoNetwork()
2368 def test_npm_registry_none(self): 2962 def test_npm_registry_none(self):
2369 url = 'npm://;package=@savoirfairelinux/node-server-example;version=1.0.0' 2963 urls = ['npm://;package=@savoirfairelinux/node-server-example;version=1.0.0']
2370 with self.assertRaises(bb.fetch2.MalformedUrl): 2964 with self.assertRaises(bb.fetch2.MalformedUrl):
2371 fetcher = bb.fetch.Fetch([url], self.d) 2965 fetcher = bb.fetch.Fetch(urls, self.d)
2372 2966
2373 @skipIfNoNpm() 2967 @skipIfNoNpm()
2374 @skipIfNoNetwork() 2968 @skipIfNoNetwork()
2375 def test_npm_package_none(self): 2969 def test_npm_package_none(self):
2376 url = 'npm://registry.npmjs.org;version=1.0.0' 2970 urls = ['npm://registry.npmjs.org;version=1.0.0']
2377 with self.assertRaises(bb.fetch2.MissingParameterError): 2971 with self.assertRaises(bb.fetch2.MissingParameterError):
2378 fetcher = bb.fetch.Fetch([url], self.d) 2972 fetcher = bb.fetch.Fetch(urls, self.d)
2379 2973
2380 @skipIfNoNpm() 2974 @skipIfNoNpm()
2381 @skipIfNoNetwork() 2975 @skipIfNoNetwork()
2382 def test_npm_version_none(self): 2976 def test_npm_version_none(self):
2383 url = 'npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example' 2977 urls = ['npm://registry.npmjs.org;package=@savoirfairelinux/node-server-example']
2384 with self.assertRaises(bb.fetch2.MissingParameterError): 2978 with self.assertRaises(bb.fetch2.MissingParameterError):
2385 fetcher = bb.fetch.Fetch([url], self.d) 2979 fetcher = bb.fetch.Fetch(urls, self.d)
2386 2980
2387 def create_shrinkwrap_file(self, data): 2981 def create_shrinkwrap_file(self, data):
2388 import json 2982 import json
@@ -2391,32 +2985,30 @@ class NPMTest(FetcherTest):
2391 bb.utils.mkdirhier(datadir) 2985 bb.utils.mkdirhier(datadir)
2392 with open(swfile, 'w') as f: 2986 with open(swfile, 'w') as f:
2393 json.dump(data, f) 2987 json.dump(data, f)
2394 # Also configure the S directory
2395 self.sdir = os.path.join(self.unpackdir, 'S')
2396 self.d.setVar('S', self.sdir)
2397 return swfile 2988 return swfile
2398 2989
2399 @skipIfNoNpm()
2400 @skipIfNoNetwork() 2990 @skipIfNoNetwork()
2401 def test_npmsw(self): 2991 def test_npmsw(self):
2402 swfile = self.create_shrinkwrap_file({ 2992 swfile = self.create_shrinkwrap_file({
2403 'dependencies': { 2993 'packages': {
2404 'array-flatten': { 2994 'node_modules/array-flatten': {
2405 'version': '1.1.1', 2995 'version': '1.1.1',
2406 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz', 2996 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz',
2407 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=', 2997 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=',
2408 'dependencies': { 2998 'dependencies': {
2409 'content-type': { 2999 'content-type': "1.0.4"
2410 'version': 'https://registry.npmjs.org/content-type/-/content-type-1.0.4.tgz',
2411 'integrity': 'sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA==',
2412 'dependencies': {
2413 'cookie': {
2414 'version': 'git+https://github.com/jshttp/cookie.git#aec1177c7da67e3b3273df96cf476824dbc9ae09',
2415 'from': 'git+https://github.com/jshttp/cookie.git'
2416 }
2417 }
2418 }
2419 } 3000 }
3001 },
3002 'node_modules/array-flatten/node_modules/content-type': {
3003 'version': '1.0.4',
3004 'resolved': 'https://registry.npmjs.org/content-type/-/content-type-1.0.4.tgz',
3005 'integrity': 'sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA==',
3006 'dependencies': {
3007 'cookie': 'git+https://github.com/jshttp/cookie.git#aec1177c7da67e3b3273df96cf476824dbc9ae09'
3008 }
3009 },
3010 'node_modules/array-flatten/node_modules/content-type/node_modules/cookie': {
3011 'resolved': 'git+https://github.com/jshttp/cookie.git#aec1177c7da67e3b3273df96cf476824dbc9ae09'
2420 } 3012 }
2421 } 3013 }
2422 }) 3014 })
@@ -2426,22 +3018,34 @@ class NPMTest(FetcherTest):
2426 self.assertTrue(os.path.exists(os.path.join(self.dldir, 'npm2', 'content-type-1.0.4.tgz'))) 3018 self.assertTrue(os.path.exists(os.path.join(self.dldir, 'npm2', 'content-type-1.0.4.tgz')))
2427 self.assertTrue(os.path.exists(os.path.join(self.dldir, 'git2', 'github.com.jshttp.cookie.git'))) 3019 self.assertTrue(os.path.exists(os.path.join(self.dldir, 'git2', 'github.com.jshttp.cookie.git')))
2428 fetcher.unpack(self.unpackdir) 3020 fetcher.unpack(self.unpackdir)
2429 self.assertTrue(os.path.exists(os.path.join(self.sdir, 'npm-shrinkwrap.json'))) 3021 self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'npm-shrinkwrap.json')))
2430 self.assertTrue(os.path.exists(os.path.join(self.sdir, 'node_modules', 'array-flatten', 'package.json'))) 3022 self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'node_modules', 'array-flatten', 'package.json')))
2431 self.assertTrue(os.path.exists(os.path.join(self.sdir, 'node_modules', 'array-flatten', 'node_modules', 'content-type', 'package.json'))) 3023 self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'node_modules', 'array-flatten', 'node_modules', 'content-type', 'package.json')))
2432 self.assertTrue(os.path.exists(os.path.join(self.sdir, 'node_modules', 'array-flatten', 'node_modules', 'content-type', 'node_modules', 'cookie', 'package.json'))) 3024 self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'node_modules', 'array-flatten', 'node_modules', 'content-type', 'node_modules', 'cookie', 'package.json')))
3025
3026 @skipIfNoNetwork()
3027 def test_npmsw_git(self):
3028 swfile = self.create_shrinkwrap_file({
3029 'packages': {
3030 'node_modules/cookie': {
3031 'resolved': 'git+https://github.com/jshttp/cookie.git#aec1177c7da67e3b3273df96cf476824dbc9ae09'
3032 }
3033 }
3034 })
3035 fetcher = bb.fetch.Fetch(['npmsw://' + swfile], self.d)
3036 fetcher.download()
3037 self.assertTrue(os.path.exists(os.path.join(self.dldir, 'git2', 'github.com.jshttp.cookie.git')))
2433 3038
2434 @skipIfNoNpm()
2435 @skipIfNoNetwork() 3039 @skipIfNoNetwork()
2436 def test_npmsw_dev(self): 3040 def test_npmsw_dev(self):
2437 swfile = self.create_shrinkwrap_file({ 3041 swfile = self.create_shrinkwrap_file({
2438 'dependencies': { 3042 'packages': {
2439 'array-flatten': { 3043 'node_modules/array-flatten': {
2440 'version': '1.1.1', 3044 'version': '1.1.1',
2441 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz', 3045 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz',
2442 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=' 3046 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI='
2443 }, 3047 },
2444 'content-type': { 3048 'node_modules/content-type': {
2445 'version': '1.0.4', 3049 'version': '1.0.4',
2446 'resolved': 'https://registry.npmjs.org/content-type/-/content-type-1.0.4.tgz', 3050 'resolved': 'https://registry.npmjs.org/content-type/-/content-type-1.0.4.tgz',
2447 'integrity': 'sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA==', 3051 'integrity': 'sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA==',
@@ -2460,12 +3064,11 @@ class NPMTest(FetcherTest):
2460 self.assertTrue(os.path.exists(os.path.join(self.dldir, 'npm2', 'array-flatten-1.1.1.tgz'))) 3064 self.assertTrue(os.path.exists(os.path.join(self.dldir, 'npm2', 'array-flatten-1.1.1.tgz')))
2461 self.assertTrue(os.path.exists(os.path.join(self.dldir, 'npm2', 'content-type-1.0.4.tgz'))) 3065 self.assertTrue(os.path.exists(os.path.join(self.dldir, 'npm2', 'content-type-1.0.4.tgz')))
2462 3066
2463 @skipIfNoNpm()
2464 @skipIfNoNetwork() 3067 @skipIfNoNetwork()
2465 def test_npmsw_destsuffix(self): 3068 def test_npmsw_destsuffix(self):
2466 swfile = self.create_shrinkwrap_file({ 3069 swfile = self.create_shrinkwrap_file({
2467 'dependencies': { 3070 'packages': {
2468 'array-flatten': { 3071 'node_modules/array-flatten': {
2469 'version': '1.1.1', 3072 'version': '1.1.1',
2470 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz', 3073 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz',
2471 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=' 3074 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI='
@@ -2479,8 +3082,8 @@ class NPMTest(FetcherTest):
2479 3082
2480 def test_npmsw_no_network_no_tarball(self): 3083 def test_npmsw_no_network_no_tarball(self):
2481 swfile = self.create_shrinkwrap_file({ 3084 swfile = self.create_shrinkwrap_file({
2482 'dependencies': { 3085 'packages': {
2483 'array-flatten': { 3086 'node_modules/array-flatten': {
2484 'version': '1.1.1', 3087 'version': '1.1.1',
2485 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz', 3088 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz',
2486 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=' 3089 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI='
@@ -2502,8 +3105,8 @@ class NPMTest(FetcherTest):
2502 self.d.setVar('BB_NO_NETWORK', '1') 3105 self.d.setVar('BB_NO_NETWORK', '1')
2503 # Fetch again 3106 # Fetch again
2504 swfile = self.create_shrinkwrap_file({ 3107 swfile = self.create_shrinkwrap_file({
2505 'dependencies': { 3108 'packages': {
2506 'array-flatten': { 3109 'node_modules/array-flatten': {
2507 'version': '1.1.1', 3110 'version': '1.1.1',
2508 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz', 3111 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz',
2509 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=' 3112 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI='
@@ -2513,15 +3116,14 @@ class NPMTest(FetcherTest):
2513 fetcher = bb.fetch.Fetch(['npmsw://' + swfile], self.d) 3116 fetcher = bb.fetch.Fetch(['npmsw://' + swfile], self.d)
2514 fetcher.download() 3117 fetcher.download()
2515 fetcher.unpack(self.unpackdir) 3118 fetcher.unpack(self.unpackdir)
2516 self.assertTrue(os.path.exists(os.path.join(self.sdir, 'node_modules', 'array-flatten', 'package.json'))) 3119 self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'node_modules', 'array-flatten', 'package.json')))
2517 3120
2518 @skipIfNoNpm()
2519 @skipIfNoNetwork() 3121 @skipIfNoNetwork()
2520 def test_npmsw_npm_reusability(self): 3122 def test_npmsw_npm_reusability(self):
2521 # Fetch once with npmsw 3123 # Fetch once with npmsw
2522 swfile = self.create_shrinkwrap_file({ 3124 swfile = self.create_shrinkwrap_file({
2523 'dependencies': { 3125 'packages': {
2524 'array-flatten': { 3126 'node_modules/array-flatten': {
2525 'version': '1.1.1', 3127 'version': '1.1.1',
2526 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz', 3128 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz',
2527 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=' 3129 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI='
@@ -2538,13 +3140,12 @@ class NPMTest(FetcherTest):
2538 fetcher.unpack(self.unpackdir) 3140 fetcher.unpack(self.unpackdir)
2539 self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'npm', 'package.json'))) 3141 self.assertTrue(os.path.exists(os.path.join(self.unpackdir, 'npm', 'package.json')))
2540 3142
2541 @skipIfNoNpm()
2542 @skipIfNoNetwork() 3143 @skipIfNoNetwork()
2543 def test_npmsw_bad_checksum(self): 3144 def test_npmsw_bad_checksum(self):
2544 # Try to fetch with bad checksum 3145 # Try to fetch with bad checksum
2545 swfile = self.create_shrinkwrap_file({ 3146 swfile = self.create_shrinkwrap_file({
2546 'dependencies': { 3147 'packages': {
2547 'array-flatten': { 3148 'node_modules/array-flatten': {
2548 'version': '1.1.1', 3149 'version': '1.1.1',
2549 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz', 3150 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz',
2550 'integrity': 'sha1-gfNEp2hqgLTFKT6P3AsBYMgsBqg=' 3151 'integrity': 'sha1-gfNEp2hqgLTFKT6P3AsBYMgsBqg='
@@ -2556,8 +3157,8 @@ class NPMTest(FetcherTest):
2556 fetcher.download() 3157 fetcher.download()
2557 # Fetch correctly to get a tarball 3158 # Fetch correctly to get a tarball
2558 swfile = self.create_shrinkwrap_file({ 3159 swfile = self.create_shrinkwrap_file({
2559 'dependencies': { 3160 'packages': {
2560 'array-flatten': { 3161 'node_modules/array-flatten': {
2561 'version': '1.1.1', 3162 'version': '1.1.1',
2562 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz', 3163 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz',
2563 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=' 3164 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI='
@@ -2590,13 +3191,13 @@ class NPMTest(FetcherTest):
2590 mirrordir = os.path.join(self.tempdir, 'mirror') 3191 mirrordir = os.path.join(self.tempdir, 'mirror')
2591 bb.utils.mkdirhier(mirrordir) 3192 bb.utils.mkdirhier(mirrordir)
2592 os.replace(ud.localpath, os.path.join(mirrordir, os.path.basename(ud.localpath))) 3193 os.replace(ud.localpath, os.path.join(mirrordir, os.path.basename(ud.localpath)))
2593 self.d.setVar('PREMIRRORS', 'https?$://.*/.* file://%s/\n' % mirrordir) 3194 self.d.setVar('PREMIRRORS', 'https?$://.*/.* file://%s/' % mirrordir)
2594 self.d.setVar('BB_FETCH_PREMIRRORONLY', '1') 3195 self.d.setVar('BB_FETCH_PREMIRRORONLY', '1')
2595 # Fetch again 3196 # Fetch again
2596 self.assertFalse(os.path.exists(ud.localpath)) 3197 self.assertFalse(os.path.exists(ud.localpath))
2597 swfile = self.create_shrinkwrap_file({ 3198 swfile = self.create_shrinkwrap_file({
2598 'dependencies': { 3199 'packages': {
2599 'array-flatten': { 3200 'node_modules/array-flatten': {
2600 'version': '1.1.1', 3201 'version': '1.1.1',
2601 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz', 3202 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz',
2602 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=' 3203 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI='
@@ -2619,12 +3220,12 @@ class NPMTest(FetcherTest):
2619 mirrordir = os.path.join(self.tempdir, 'mirror') 3220 mirrordir = os.path.join(self.tempdir, 'mirror')
2620 bb.utils.mkdirhier(mirrordir) 3221 bb.utils.mkdirhier(mirrordir)
2621 os.replace(ud.localpath, os.path.join(mirrordir, os.path.basename(ud.localpath))) 3222 os.replace(ud.localpath, os.path.join(mirrordir, os.path.basename(ud.localpath)))
2622 self.d.setVar('MIRRORS', 'https?$://.*/.* file://%s/\n' % mirrordir) 3223 self.d.setVar('MIRRORS', 'https?$://.*/.* file://%s/' % mirrordir)
2623 # Fetch again with invalid url 3224 # Fetch again with invalid url
2624 self.assertFalse(os.path.exists(ud.localpath)) 3225 self.assertFalse(os.path.exists(ud.localpath))
2625 swfile = self.create_shrinkwrap_file({ 3226 swfile = self.create_shrinkwrap_file({
2626 'dependencies': { 3227 'packages': {
2627 'array-flatten': { 3228 'node_modules/array-flatten': {
2628 'version': '1.1.1', 3229 'version': '1.1.1',
2629 'resolved': 'https://invalid', 3230 'resolved': 'https://invalid',
2630 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=' 3231 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI='
@@ -2634,3 +3235,521 @@ class NPMTest(FetcherTest):
2634 fetcher = bb.fetch.Fetch(['npmsw://' + swfile], self.d) 3235 fetcher = bb.fetch.Fetch(['npmsw://' + swfile], self.d)
2635 fetcher.download() 3236 fetcher.download()
2636 self.assertTrue(os.path.exists(ud.localpath)) 3237 self.assertTrue(os.path.exists(ud.localpath))
3238
3239 @skipIfNoNetwork()
3240 def test_npmsw_bundled(self):
3241 swfile = self.create_shrinkwrap_file({
3242 'packages': {
3243 'node_modules/array-flatten': {
3244 'version': '1.1.1',
3245 'resolved': 'https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz',
3246 'integrity': 'sha1-ml9pkFGx5wczKPKgCJaLZOopVdI='
3247 },
3248 'node_modules/content-type': {
3249 'version': '1.0.4',
3250 'resolved': 'https://registry.npmjs.org/content-type/-/content-type-1.0.4.tgz',
3251 'integrity': 'sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA==',
3252 'inBundle': True
3253 }
3254 }
3255 })
3256 fetcher = bb.fetch.Fetch(['npmsw://' + swfile], self.d)
3257 fetcher.download()
3258 self.assertTrue(os.path.exists(os.path.join(self.dldir, 'npm2', 'array-flatten-1.1.1.tgz')))
3259 self.assertFalse(os.path.exists(os.path.join(self.dldir, 'npm2', 'content-type-1.0.4.tgz')))
3260
3261class GitSharedTest(FetcherTest):
3262 def setUp(self):
3263 super(GitSharedTest, self).setUp()
3264 self.recipe_url = "git://git.openembedded.org/bitbake;branch=master;protocol=https"
3265 self.d.setVar('SRCREV', '82ea737a0b42a8b53e11c9cde141e9e9c0bd8c40')
3266 self.d.setVar("__BBSRCREV_SEEN", "1")
3267
3268 @skipIfNoNetwork()
3269 def test_shared_unpack(self):
3270 fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
3271
3272 fetcher.download()
3273 fetcher.unpack(self.unpackdir)
3274 alt = os.path.join(self.unpackdir, 'git/.git/objects/info/alternates')
3275 self.assertTrue(os.path.exists(alt))
3276
3277 @skipIfNoNetwork()
3278 def test_noshared_unpack(self):
3279 self.d.setVar('BB_GIT_NOSHARED', '1')
3280 self.unpackdir += '_noshared'
3281 fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
3282
3283 fetcher.download()
3284 fetcher.unpack(self.unpackdir)
3285 alt = os.path.join(self.unpackdir, 'git/.git/objects/info/alternates')
3286 self.assertFalse(os.path.exists(alt))
3287
3288class GitTagVerificationTests(FetcherTest):
3289
3290 @skipIfNoNetwork()
3291 def test_tag_rev_match(self):
3292 # Test a url with rev= and tag= set works
3293 fetcher = bb.fetch.Fetch(["git://git.openembedded.org/bitbake;branch=2.8;protocol=https;rev=aa0e540fc31a1c26839efd2c7785a751ce24ebfb;tag=2.8.7"], self.d)
3294 fetcher.download()
3295 fetcher.unpack(self.unpackdir)
3296
3297 def test_annotated_tag_rev_match(self):
3298 # Test a url with rev= and tag= set works
3299 # rev is the annotated tag revision in this case
3300 fetcher = bb.fetch.Fetch(["git://git.openembedded.org/bitbake;branch=2.8;protocol=https;rev=6d363159e4b7dc566fc40d069b2615e61774a7d8;tag=2.8.7"], self.d)
3301 fetcher.download()
3302 fetcher.unpack(self.unpackdir)
3303
3304 @skipIfNoNetwork()
3305 def test_tag_rev_match2(self):
3306 # Test a url with SRCREV and tag= set works
3307 self.d.setVar('SRCREV', 'aa0e540fc31a1c26839efd2c7785a751ce24ebfb')
3308 fetcher = bb.fetch.Fetch(["git://git.openembedded.org/bitbake;branch=2.8;protocol=https;tag=2.8.7"], self.d)
3309 fetcher.download()
3310 fetcher.unpack(self.unpackdir)
3311
3312 @skipIfNoNetwork()
3313 def test_tag_rev_match3(self):
3314 # Test a url with SRCREV, rev= and tag= set works
3315 self.d.setVar('SRCREV', 'aa0e540fc31a1c26839efd2c7785a751ce24ebfb')
3316 fetcher = bb.fetch.Fetch(["git://git.openembedded.org/bitbake;branch=2.8;protocol=https;rev=aa0e540fc31a1c26839efd2c7785a751ce24ebfb;tag=2.8.7"], self.d)
3317 fetcher.download()
3318 fetcher.unpack(self.unpackdir)
3319
3320 @skipIfNoNetwork()
3321 def test_tag_rev_match4(self):
3322 # Test a url with SRCREV and rev= mismatching errors
3323 self.d.setVar('SRCREV', 'bade540fc31a1c26839efd2c7785a751ce24ebfb')
3324 with self.assertRaises(bb.fetch2.FetchError):
3325 fetcher = bb.fetch.Fetch(["git://git.openembedded.org/bitbake;branch=2.8;protocol=https;rev=aa0e540fc31a1c26839efd2c7785a751ce24ebfb;tag=2.8.7"], self.d)
3326
3327 @skipIfNoNetwork()
3328 def test_tag_rev_match5(self):
3329 # Test a url with SRCREV, rev= and tag= set works when using shallow clones
3330 self.d.setVar('BB_GIT_SHALLOW', '1')
3331 self.d.setVar('SRCREV', 'aa0e540fc31a1c26839efd2c7785a751ce24ebfb')
3332 fetcher = bb.fetch.Fetch(["git://git.openembedded.org/bitbake;branch=2.8;protocol=https;rev=aa0e540fc31a1c26839efd2c7785a751ce24ebfb;tag=2.8.7"], self.d)
3333 fetcher.download()
3334 fetcher.unpack(self.unpackdir)
3335
3336 @skipIfNoNetwork()
3337 def test_tag_rev_match6(self):
3338 # Test a url with SRCREV, rev= and a mismatched tag= when using shallow clones
3339 self.d.setVar('BB_GIT_SHALLOW', '1')
3340 fetcher = bb.fetch.Fetch(["git://git.openembedded.org/bitbake;branch=2.8;protocol=https;rev=aa0e540fc31a1c26839efd2c7785a751ce24ebfb;tag=2.8.6"], self.d)
3341 fetcher.download()
3342 with self.assertRaises(bb.fetch2.FetchError):
3343 fetcher.unpack(self.unpackdir)
3344
3345 @skipIfNoNetwork()
3346 def test_tag_rev_match7(self):
3347 # Test a url with SRCREV, rev= and a mismatched tag=
3348 self.d.setVar('SRCREV', 'aa0e540fc31a1c26839efd2c7785a751ce24ebfb')
3349 fetcher = bb.fetch.Fetch(["git://git.openembedded.org/bitbake;branch=2.8;protocol=https;rev=aa0e540fc31a1c26839efd2c7785a751ce24ebfb;tag=2.8.6"], self.d)
3350 fetcher.download()
3351 with self.assertRaises(bb.fetch2.FetchError):
3352 fetcher.unpack(self.unpackdir)
3353
3354
3355class FetchPremirroronlyLocalTest(FetcherTest):
3356
3357 def setUp(self):
3358 super(FetchPremirroronlyLocalTest, self).setUp()
3359 self.mirrordir = os.path.join(self.tempdir, "mirrors")
3360 os.mkdir(self.mirrordir)
3361 self.reponame = "bitbake"
3362 self.gitdir = os.path.join(self.tempdir, "git", self.reponame)
3363 self.recipe_url = "git://git.fake.repo/bitbake;branch=master;protocol=https"
3364 self.d.setVar("BB_FETCH_PREMIRRORONLY", "1")
3365 self.d.setVar("BB_NO_NETWORK", "1")
3366 self.d.setVar("PREMIRRORS", self.recipe_url + " " + "file://{}".format(self.mirrordir) + " \n")
3367 self.mirrorname = "git2_git.fake.repo.bitbake.tar.gz"
3368 self.mirrorfile = os.path.join(self.mirrordir, self.mirrorname)
3369 self.testfilename = "bitbake-fetch.test"
3370
3371 def make_git_repo(self):
3372 recipeurl = "git:/git.fake.repo/bitbake"
3373 os.makedirs(self.gitdir)
3374 self.git_init(cwd=self.gitdir)
3375 for i in range(0):
3376 self.git_new_commit()
3377 bb.process.run('tar -czvf {} .'.format(os.path.join(self.mirrordir, self.mirrorname)), cwd = self.gitdir)
3378
3379 def git_new_commit(self):
3380 import random
3381 os.unlink(os.path.join(self.mirrordir, self.mirrorname))
3382 branch = self.git("branch --show-current", self.gitdir).split()
3383 with open(os.path.join(self.gitdir, self.testfilename), "w") as testfile:
3384 testfile.write("File {} from branch {}; Useless random data {}".format(self.testfilename, branch, random.random()))
3385 self.git("add {}".format(self.testfilename), self.gitdir)
3386 self.git("commit -a -m \"This random commit {} in branch {}. I'm useless.\"".format(random.random(), branch), self.gitdir)
3387 bb.process.run('tar -czvf {} .'.format(os.path.join(self.mirrordir, self.mirrorname)), cwd = self.gitdir)
3388 return self.git("rev-parse HEAD", self.gitdir).strip()
3389
3390 def git_new_branch(self, name):
3391 self.git_new_commit()
3392 head = self.git("rev-parse HEAD", self.gitdir).strip()
3393 self.git("checkout -b {}".format(name), self.gitdir)
3394 newrev = self.git_new_commit()
3395 self.git("checkout {}".format(head), self.gitdir)
3396 return newrev
3397
3398 def test_mirror_multiple_fetches(self):
3399 self.make_git_repo()
3400 self.d.setVar("SRCREV", self.git_new_commit())
3401 fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
3402 fetcher.download()
3403 fetcher.unpack(self.unpackdir)
3404 ## New commit in premirror. it's not in the download_dir
3405 self.d.setVar("SRCREV", self.git_new_commit())
3406 fetcher2 = bb.fetch.Fetch([self.recipe_url], self.d)
3407 fetcher2.download()
3408 fetcher2.unpack(self.unpackdir)
3409 ## New commit in premirror. it's not in the download_dir
3410 self.d.setVar("SRCREV", self.git_new_commit())
3411 fetcher3 = bb.fetch.Fetch([self.recipe_url], self.d)
3412 fetcher3.download()
3413 fetcher3.unpack(self.unpackdir)
3414
3415
3416 def test_mirror_commit_nonexistent(self):
3417 self.make_git_repo()
3418 self.d.setVar("SRCREV", "0"*40)
3419 fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
3420 with self.assertRaises(bb.fetch2.NetworkAccess):
3421 fetcher.download()
3422
3423 def test_mirror_commit_exists(self):
3424 self.make_git_repo()
3425 self.d.setVar("SRCREV", self.git_new_commit())
3426 fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
3427 fetcher.download()
3428 fetcher.unpack(self.unpackdir)
3429
3430 def test_mirror_tarball_nonexistent(self):
3431 self.d.setVar("SRCREV", "0"*40)
3432 fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
3433 with self.assertRaises(bb.fetch2.NetworkAccess):
3434 fetcher.download()
3435
3436
3437class FetchPremirroronlyNetworkTest(FetcherTest):
3438
3439 def setUp(self):
3440 super(FetchPremirroronlyNetworkTest, self).setUp()
3441 self.mirrordir = os.path.join(self.tempdir, "mirrors")
3442 os.mkdir(self.mirrordir)
3443 self.reponame = "fstests"
3444 self.clonedir = os.path.join(self.tempdir, "git")
3445 self.gitdir = os.path.join(self.tempdir, "git", "{}.git".format(self.reponame))
3446 self.recipe_url = "git://git.yoctoproject.org/fstests;protocol=https;branch=master"
3447 self.d.setVar("BB_FETCH_PREMIRRORONLY", "1")
3448 self.d.setVar("BB_NO_NETWORK", "0")
3449 self.d.setVar("PREMIRRORS", self.recipe_url + " " + "file://{}".format(self.mirrordir) + " \n")
3450
3451 def make_git_repo(self):
3452 self.mirrorname = "git2_git.yoctoproject.org.fstests.tar.gz"
3453 os.makedirs(self.clonedir)
3454 self.git("clone --bare {}".format(self.recipe_url), self.clonedir)
3455 self.git("update-ref HEAD 15413486df1f5a5b5af699b6f3ba5f0984e52a9f", self.gitdir)
3456 bb.process.run('tar -czvf {} .'.format(os.path.join(self.mirrordir, self.mirrorname)), cwd = self.gitdir)
3457 shutil.rmtree(self.clonedir)
3458
3459 @skipIfNoNetwork()
3460 def test_mirror_tarball_updated(self):
3461 self.make_git_repo()
3462 ## Upstream commit is in the mirror
3463 self.d.setVar("SRCREV", "15413486df1f5a5b5af699b6f3ba5f0984e52a9f")
3464 fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
3465 fetcher.download()
3466
3467 @skipIfNoNetwork()
3468 def test_mirror_tarball_outdated(self):
3469 self.make_git_repo()
3470 ## Upstream commit not in the mirror
3471 self.d.setVar("SRCREV", "49d65d53c2bf558ae6e9185af0f3af7b79d255ec")
3472 fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
3473 with self.assertRaises(bb.fetch2.NetworkAccess):
3474 fetcher.download()
3475
3476class FetchPremirroronlyMercurialTest(FetcherTest):
3477 """ Test for premirrors with mercurial repos
3478 the test covers also basic hg:// clone (see fetch_and_create_tarball
3479 """
3480 def skipIfNoHg():
3481 if not shutil.which('hg'):
3482 return unittest.skip('Mercurial not installed')
3483 return lambda f: f
3484
3485 def setUp(self):
3486 super(FetchPremirroronlyMercurialTest, self).setUp()
3487 self.mirrordir = os.path.join(self.tempdir, "mirrors")
3488 os.mkdir(self.mirrordir)
3489 self.reponame = "libgnt"
3490 self.clonedir = os.path.join(self.tempdir, "hg")
3491 self.recipe_url = "hg://keep.imfreedom.org/libgnt;module=libgnt"
3492 self.d.setVar("SRCREV", "53e8b422faaf")
3493 self.mirrorname = "hg_libgnt_keep.imfreedom.org_.libgnt.tar.gz"
3494
3495 def fetch_and_create_tarball(self):
3496 """
3497 Ask bitbake to download repo and prepare mirror tarball for us
3498 """
3499 self.d.setVar("BB_GENERATE_MIRROR_TARBALLS", "1")
3500 fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
3501 fetcher.download()
3502 mirrorfile = os.path.join(self.d.getVar("DL_DIR"), self.mirrorname)
3503 self.assertTrue(os.path.exists(mirrorfile), "Mirror tarball {} has not been created".format(mirrorfile))
3504 ## moving tarball to mirror directory
3505 os.rename(mirrorfile, os.path.join(self.mirrordir, self.mirrorname))
3506 self.d.setVar("BB_GENERATE_MIRROR_TARBALLS", "0")
3507
3508
3509 @skipIfNoNetwork()
3510 @skipIfNoHg()
3511 def test_premirror_mercurial(self):
3512 self.fetch_and_create_tarball()
3513 self.d.setVar("PREMIRRORS", self.recipe_url + " " + "file://{}".format(self.mirrordir) + " \n")
3514 self.d.setVar("BB_FETCH_PREMIRRORONLY", "1")
3515 self.d.setVar("BB_NO_NETWORK", "1")
3516 fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
3517 fetcher.download()
3518
3519class FetchPremirroronlyBrokenTarball(FetcherTest):
3520
3521 def setUp(self):
3522 super(FetchPremirroronlyBrokenTarball, self).setUp()
3523 self.mirrordir = os.path.join(self.tempdir, "mirrors")
3524 os.mkdir(self.mirrordir)
3525 self.reponame = "bitbake"
3526 self.gitdir = os.path.join(self.tempdir, "git", self.reponame)
3527 self.recipe_url = "git://git.fake.repo/bitbake;protocol=https;branch=master"
3528 self.d.setVar("BB_FETCH_PREMIRRORONLY", "1")
3529 self.d.setVar("BB_NO_NETWORK", "1")
3530 self.d.setVar("PREMIRRORS", self.recipe_url + " " + "file://{}".format(self.mirrordir) + " \n")
3531 self.mirrorname = "git2_git.fake.repo.bitbake.tar.gz"
3532 with open(os.path.join(self.mirrordir, self.mirrorname), 'w') as targz:
3533 targz.write("This is not tar.gz file!")
3534
3535 def test_mirror_broken_download(self):
3536 self.d.setVar("SRCREV", "0"*40)
3537 fetcher = bb.fetch.Fetch([self.recipe_url], self.d)
3538 with self.assertRaises(bb.fetch2.FetchError), self.assertLogs() as logs:
3539 fetcher.download()
3540 output = "".join(logs.output)
3541 self.assertFalse(" not a git repository (or any parent up to mount point /)" in output)
3542
3543class GoModTest(FetcherTest):
3544
3545 @skipIfNoNetwork()
3546 def test_gomod_url(self):
3547 urls = ['gomod://github.com/Azure/azure-sdk-for-go/sdk/storage/azblob;version=v1.0.0;'
3548 'sha256sum=9bb69aea32f1d59711701f9562d66432c9c0374205e5009d1d1a62f03fb4fdad']
3549
3550 fetcher = bb.fetch2.Fetch(urls, self.d)
3551 ud = fetcher.ud[urls[0]]
3552 self.assertEqual(ud.url, 'https://proxy.golang.org/github.com/%21azure/azure-sdk-for-go/sdk/storage/azblob/%40v/v1.0.0.zip')
3553 self.assertEqual(ud.parm['downloadfilename'], 'github.com.Azure.azure-sdk-for-go.sdk.storage.azblob@v1.0.0.zip')
3554 self.assertEqual(ud.parm['name'], 'github.com/Azure/azure-sdk-for-go/sdk/storage/azblob@v1.0.0')
3555
3556 fetcher.download()
3557 fetcher.unpack(self.unpackdir)
3558 downloaddir = os.path.join(self.unpackdir, 'pkg/mod/cache/download')
3559 self.assertTrue(os.path.exists(os.path.join(downloaddir, 'github.com/!azure/azure-sdk-for-go/sdk/storage/azblob/@v/v1.0.0.zip')))
3560 self.assertTrue(os.path.exists(os.path.join(downloaddir, 'github.com/!azure/azure-sdk-for-go/sdk/storage/azblob/@v/v1.0.0.mod')))
3561 self.assertEqual(bb.utils.sha256_file(os.path.join(downloaddir, 'github.com/!azure/azure-sdk-for-go/sdk/storage/azblob/@v/v1.0.0.mod')),
3562 '7873b8544842329b4f385a3aa6cf82cc2bc8defb41a04fa5291c35fd5900e873')
3563
3564 @skipIfNoNetwork()
3565 def test_gomod_url_go_mod_only(self):
3566 urls = ['gomod://github.com/Azure/azure-sdk-for-go/sdk/storage/azblob;version=v1.0.0;mod=1;'
3567 'sha256sum=7873b8544842329b4f385a3aa6cf82cc2bc8defb41a04fa5291c35fd5900e873']
3568
3569 fetcher = bb.fetch2.Fetch(urls, self.d)
3570 ud = fetcher.ud[urls[0]]
3571 self.assertEqual(ud.url, 'https://proxy.golang.org/github.com/%21azure/azure-sdk-for-go/sdk/storage/azblob/%40v/v1.0.0.mod')
3572 self.assertEqual(ud.parm['downloadfilename'], 'github.com.Azure.azure-sdk-for-go.sdk.storage.azblob@v1.0.0.mod')
3573 self.assertEqual(ud.parm['name'], 'github.com/Azure/azure-sdk-for-go/sdk/storage/azblob@v1.0.0')
3574
3575 fetcher.download()
3576 fetcher.unpack(self.unpackdir)
3577 downloaddir = os.path.join(self.unpackdir, 'pkg/mod/cache/download')
3578 self.assertTrue(os.path.exists(os.path.join(downloaddir, 'github.com/!azure/azure-sdk-for-go/sdk/storage/azblob/@v/v1.0.0.mod')))
3579
3580 @skipIfNoNetwork()
3581 def test_gomod_url_sha256sum_varflag(self):
3582 urls = ['gomod://gopkg.in/ini.v1;version=v1.67.0']
3583 self.d.setVarFlag('SRC_URI', 'gopkg.in/ini.v1@v1.67.0.sha256sum', 'bd845dfc762a87a56e5a32a07770dc83e86976db7705d7f89c5dbafdc60b06c6')
3584
3585 fetcher = bb.fetch2.Fetch(urls, self.d)
3586 ud = fetcher.ud[urls[0]]
3587 self.assertEqual(ud.url, 'https://proxy.golang.org/gopkg.in/ini.v1/%40v/v1.67.0.zip')
3588 self.assertEqual(ud.parm['downloadfilename'], 'gopkg.in.ini.v1@v1.67.0.zip')
3589 self.assertEqual(ud.parm['name'], 'gopkg.in/ini.v1@v1.67.0')
3590
3591 fetcher.download()
3592 fetcher.unpack(self.unpackdir)
3593 downloaddir = os.path.join(self.unpackdir, 'pkg/mod/cache/download')
3594 self.assertTrue(os.path.exists(os.path.join(downloaddir, 'gopkg.in/ini.v1/@v/v1.67.0.zip')))
3595 self.assertTrue(os.path.exists(os.path.join(downloaddir, 'gopkg.in/ini.v1/@v/v1.67.0.mod')))
3596 self.assertEqual(bb.utils.sha256_file(os.path.join(downloaddir, 'gopkg.in/ini.v1/@v/v1.67.0.mod')),
3597 '13aedd85db8e555104108e0e613bb7e4d1242af7f27c15423dd9ab63b60b72a1')
3598
3599 @skipIfNoNetwork()
3600 def test_gomod_url_no_go_mod_in_module(self):
3601 urls = ['gomod://gopkg.in/ini.v1;version=v1.67.0;'
3602 'sha256sum=bd845dfc762a87a56e5a32a07770dc83e86976db7705d7f89c5dbafdc60b06c6']
3603
3604 fetcher = bb.fetch2.Fetch(urls, self.d)
3605 ud = fetcher.ud[urls[0]]
3606 self.assertEqual(ud.url, 'https://proxy.golang.org/gopkg.in/ini.v1/%40v/v1.67.0.zip')
3607 self.assertEqual(ud.parm['downloadfilename'], 'gopkg.in.ini.v1@v1.67.0.zip')
3608 self.assertEqual(ud.parm['name'], 'gopkg.in/ini.v1@v1.67.0')
3609
3610 fetcher.download()
3611 fetcher.unpack(self.unpackdir)
3612 downloaddir = os.path.join(self.unpackdir, 'pkg/mod/cache/download')
3613 self.assertTrue(os.path.exists(os.path.join(downloaddir, 'gopkg.in/ini.v1/@v/v1.67.0.zip')))
3614 self.assertTrue(os.path.exists(os.path.join(downloaddir, 'gopkg.in/ini.v1/@v/v1.67.0.mod')))
3615 self.assertEqual(bb.utils.sha256_file(os.path.join(downloaddir, 'gopkg.in/ini.v1/@v/v1.67.0.mod')),
3616 '13aedd85db8e555104108e0e613bb7e4d1242af7f27c15423dd9ab63b60b72a1')
3617
3618 @skipIfNoNetwork()
3619 def test_gomod_url_host_only(self):
3620 urls = ['gomod://go.opencensus.io;version=v0.24.0;'
3621 'sha256sum=203a767d7f8e7c1ebe5588220ad168d1e15b14ae70a636de7ca9a4a88a7e0d0c']
3622
3623 fetcher = bb.fetch2.Fetch(urls, self.d)
3624 ud = fetcher.ud[urls[0]]
3625 self.assertEqual(ud.url, 'https://proxy.golang.org/go.opencensus.io/%40v/v0.24.0.zip')
3626 self.assertEqual(ud.parm['downloadfilename'], 'go.opencensus.io@v0.24.0.zip')
3627 self.assertEqual(ud.parm['name'], 'go.opencensus.io@v0.24.0')
3628
3629 fetcher.download()
3630 fetcher.unpack(self.unpackdir)
3631 downloaddir = os.path.join(self.unpackdir, 'pkg/mod/cache/download')
3632 self.assertTrue(os.path.exists(os.path.join(downloaddir, 'go.opencensus.io/@v/v0.24.0.zip')))
3633 self.assertTrue(os.path.exists(os.path.join(downloaddir, 'go.opencensus.io/@v/v0.24.0.mod')))
3634 self.assertEqual(bb.utils.sha256_file(os.path.join(downloaddir, 'go.opencensus.io/@v/v0.24.0.mod')),
3635 '0dc9ccc660ad21cebaffd548f2cc6efa27891c68b4fbc1f8a3893b00f1acec96')
3636
3637class GoModGitTest(FetcherTest):
3638
3639 @skipIfNoNetwork()
3640 def test_gomodgit_url_repo(self):
3641 urls = ['gomodgit://golang.org/x/net;version=v0.9.0;'
3642 'repo=go.googlesource.com/net;'
3643 'srcrev=694cff8668bac64e0864b552bffc280cd27f21b1']
3644
3645 fetcher = bb.fetch2.Fetch(urls, self.d)
3646 ud = fetcher.ud[urls[0]]
3647 self.assertEqual(ud.host, 'go.googlesource.com')
3648 self.assertEqual(ud.path, '/net')
3649 self.assertEqual(ud.name, 'golang.org/x/net@v0.9.0')
3650 self.assertEqual(self.d.getVar('SRCREV_golang.org/x/net@v0.9.0'), '694cff8668bac64e0864b552bffc280cd27f21b1')
3651
3652 fetcher.download()
3653 self.assertTrue(os.path.exists(ud.localpath))
3654
3655 fetcher.unpack(self.unpackdir)
3656 vcsdir = os.path.join(self.unpackdir, 'pkg/mod/cache/vcs')
3657 self.assertTrue(os.path.exists(os.path.join(vcsdir, 'ed42bd05533fd84ae290a5d33ebd3695a0a2b06131beebd5450825bee8603aca')))
3658 downloaddir = os.path.join(self.unpackdir, 'pkg/mod/cache/download')
3659 self.assertTrue(os.path.exists(os.path.join(downloaddir, 'golang.org/x/net/@v/v0.9.0.zip')))
3660 self.assertTrue(os.path.exists(os.path.join(downloaddir, 'golang.org/x/net/@v/v0.9.0.mod')))
3661 self.assertEqual(bb.utils.sha256_file(os.path.join(downloaddir, 'golang.org/x/net/@v/v0.9.0.mod')),
3662 'c5d6851ede50ec1c001afb763040194b68961bf06997e2605e8bf06dcd2aeb2e')
3663
3664 @skipIfNoNetwork()
3665 def test_gomodgit_url_subdir(self):
3666 urls = ['gomodgit://github.com/Azure/azure-sdk-for-go/sdk/storage/azblob;version=v1.0.0;'
3667 'repo=github.com/Azure/azure-sdk-for-go;subdir=sdk/storage/azblob;'
3668 'srcrev=ec928e0ed34db682b3f783d3739d1c538142e0c3']
3669
3670 fetcher = bb.fetch2.Fetch(urls, self.d)
3671 ud = fetcher.ud[urls[0]]
3672 self.assertEqual(ud.host, 'github.com')
3673 self.assertEqual(ud.path, '/Azure/azure-sdk-for-go')
3674 self.assertEqual(ud.parm['subpath'], 'sdk/storage/azblob')
3675 self.assertEqual(ud.name, 'github.com/Azure/azure-sdk-for-go/sdk/storage/azblob@v1.0.0')
3676 self.assertEqual(self.d.getVar('SRCREV_github.com/Azure/azure-sdk-for-go/sdk/storage/azblob@v1.0.0'), 'ec928e0ed34db682b3f783d3739d1c538142e0c3')
3677
3678 fetcher.download()
3679 self.assertTrue(os.path.exists(ud.localpath))
3680
3681 fetcher.unpack(self.unpackdir)
3682 vcsdir = os.path.join(self.unpackdir, 'pkg/mod/cache/vcs')
3683 self.assertTrue(os.path.exists(os.path.join(vcsdir, 'd31d6145676ed3066ce573a8198f326dea5be45a43b3d8f41ce7787fd71d66b3')))
3684 downloaddir = os.path.join(self.unpackdir, 'pkg/mod/cache/download')
3685 self.assertTrue(os.path.exists(os.path.join(downloaddir, 'github.com/!azure/azure-sdk-for-go/sdk/storage/azblob/@v/v1.0.0.zip')))
3686 self.assertTrue(os.path.exists(os.path.join(downloaddir, 'github.com/!azure/azure-sdk-for-go/sdk/storage/azblob/@v/v1.0.0.mod')))
3687 self.assertEqual(bb.utils.sha256_file(os.path.join(downloaddir, 'github.com/!azure/azure-sdk-for-go/sdk/storage/azblob/@v/v1.0.0.mod')),
3688 '7873b8544842329b4f385a3aa6cf82cc2bc8defb41a04fa5291c35fd5900e873')
3689
3690 @skipIfNoNetwork()
3691 def test_gomodgit_url_srcrev_var(self):
3692 urls = ['gomodgit://gopkg.in/ini.v1;version=v1.67.0']
3693 self.d.setVar('SRCREV_gopkg.in/ini.v1@v1.67.0', 'b2f570e5b5b844226bbefe6fb521d891f529a951')
3694
3695 fetcher = bb.fetch2.Fetch(urls, self.d)
3696 ud = fetcher.ud[urls[0]]
3697 self.assertEqual(ud.host, 'gopkg.in')
3698 self.assertEqual(ud.path, '/ini.v1')
3699 self.assertEqual(ud.name, 'gopkg.in/ini.v1@v1.67.0')
3700 self.assertEqual(ud.parm['srcrev'], 'b2f570e5b5b844226bbefe6fb521d891f529a951')
3701
3702 fetcher.download()
3703 fetcher.unpack(self.unpackdir)
3704 vcsdir = os.path.join(self.unpackdir, 'pkg/mod/cache/vcs')
3705 self.assertTrue(os.path.exists(os.path.join(vcsdir, 'b7879a4be9ba8598851b8278b14c4f71a8316be64913298d1639cce6bde59bc3')))
3706 downloaddir = os.path.join(self.unpackdir, 'pkg/mod/cache/download')
3707 self.assertTrue(os.path.exists(os.path.join(downloaddir, 'gopkg.in/ini.v1/@v/v1.67.0.zip')))
3708 self.assertTrue(os.path.exists(os.path.join(downloaddir, 'gopkg.in/ini.v1/@v/v1.67.0.mod')))
3709 self.assertEqual(bb.utils.sha256_file(os.path.join(downloaddir, 'gopkg.in/ini.v1/@v/v1.67.0.mod')),
3710 '13aedd85db8e555104108e0e613bb7e4d1242af7f27c15423dd9ab63b60b72a1')
3711
3712 @skipIfNoNetwork()
3713 def test_gomodgit_url_no_go_mod_in_module(self):
3714 urls = ['gomodgit://gopkg.in/ini.v1;version=v1.67.0;'
3715 'srcrev=b2f570e5b5b844226bbefe6fb521d891f529a951']
3716
3717 fetcher = bb.fetch2.Fetch(urls, self.d)
3718 ud = fetcher.ud[urls[0]]
3719 self.assertEqual(ud.host, 'gopkg.in')
3720 self.assertEqual(ud.path, '/ini.v1')
3721 self.assertEqual(ud.name, 'gopkg.in/ini.v1@v1.67.0')
3722 self.assertEqual(self.d.getVar('SRCREV_gopkg.in/ini.v1@v1.67.0'), 'b2f570e5b5b844226bbefe6fb521d891f529a951')
3723
3724 fetcher.download()
3725 fetcher.unpack(self.unpackdir)
3726 vcsdir = os.path.join(self.unpackdir, 'pkg/mod/cache/vcs')
3727 self.assertTrue(os.path.exists(os.path.join(vcsdir, 'b7879a4be9ba8598851b8278b14c4f71a8316be64913298d1639cce6bde59bc3')))
3728 downloaddir = os.path.join(self.unpackdir, 'pkg/mod/cache/download')
3729 self.assertTrue(os.path.exists(os.path.join(downloaddir, 'gopkg.in/ini.v1/@v/v1.67.0.zip')))
3730 self.assertTrue(os.path.exists(os.path.join(downloaddir, 'gopkg.in/ini.v1/@v/v1.67.0.mod')))
3731 self.assertEqual(bb.utils.sha256_file(os.path.join(downloaddir, 'gopkg.in/ini.v1/@v/v1.67.0.mod')),
3732 '13aedd85db8e555104108e0e613bb7e4d1242af7f27c15423dd9ab63b60b72a1')
3733
3734 @skipIfNoNetwork()
3735 def test_gomodgit_url_host_only(self):
3736 urls = ['gomodgit://go.opencensus.io;version=v0.24.0;'
3737 'repo=github.com/census-instrumentation/opencensus-go;'
3738 'srcrev=b1a01ee95db0e690d91d7193d037447816fae4c5']
3739
3740 fetcher = bb.fetch2.Fetch(urls, self.d)
3741 ud = fetcher.ud[urls[0]]
3742 self.assertEqual(ud.host, 'github.com')
3743 self.assertEqual(ud.path, '/census-instrumentation/opencensus-go')
3744 self.assertEqual(ud.name, 'go.opencensus.io@v0.24.0')
3745 self.assertEqual(self.d.getVar('SRCREV_go.opencensus.io@v0.24.0'), 'b1a01ee95db0e690d91d7193d037447816fae4c5')
3746
3747 fetcher.download()
3748 fetcher.unpack(self.unpackdir)
3749 vcsdir = os.path.join(self.unpackdir, 'pkg/mod/cache/vcs')
3750 self.assertTrue(os.path.exists(os.path.join(vcsdir, 'aae3ac7b2122ed3345654e6327855e9682f4a5350d63e93dbcfc51c4419df0e1')))
3751 downloaddir = os.path.join(self.unpackdir, 'pkg/mod/cache/download')
3752 self.assertTrue(os.path.exists(os.path.join(downloaddir, 'go.opencensus.io/@v/v0.24.0.zip')))
3753 self.assertTrue(os.path.exists(os.path.join(downloaddir, 'go.opencensus.io/@v/v0.24.0.mod')))
3754 self.assertEqual(bb.utils.sha256_file(os.path.join(downloaddir, 'go.opencensus.io/@v/v0.24.0.mod')),
3755 '0dc9ccc660ad21cebaffd548f2cc6efa27891c68b4fbc1f8a3893b00f1acec96')
diff --git a/bitbake/lib/bb/tests/parse.py b/bitbake/lib/bb/tests/parse.py
index 9e21e18425..e3cba67ad4 100644
--- a/bitbake/lib/bb/tests/parse.py
+++ b/bitbake/lib/bb/tests/parse.py
@@ -75,6 +75,59 @@ unset B[flag]
75 self.assertEqual(d.getVarFlag("A","flag"), None) 75 self.assertEqual(d.getVarFlag("A","flag"), None)
76 self.assertEqual(d.getVar("B"), "2") 76 self.assertEqual(d.getVar("B"), "2")
77 77
78 defaulttest = """
79A = "set value"
80A ??= "default value"
81
82A[flag_set_vs_question] = "set flag"
83A[flag_set_vs_question] ?= "question flag"
84
85A[flag_set_vs_default] = "set flag"
86A[flag_set_vs_default] ??= "default flag"
87
88A[flag_question] ?= "question flag"
89
90A[flag_default] ??= "default flag"
91
92A[flag_question_vs_default] ?= "question flag"
93A[flag_question_vs_default] ??= "default flag"
94
95A[flag_default_vs_question] ??= "default flag"
96A[flag_default_vs_question] ?= "question flag"
97
98A[flag_set_question_default] = "set flag"
99A[flag_set_question_default] ?= "question flag"
100A[flag_set_question_default] ??= "default flag"
101
102A[flag_set_default_question] = "set flag"
103A[flag_set_default_question] ??= "default flag"
104A[flag_set_default_question] ?= "question flag"
105
106A[flag_set_twice] = "set flag first"
107A[flag_set_twice] = "set flag second"
108
109A[flag_question_twice] ?= "question flag first"
110A[flag_question_twice] ?= "question flag second"
111
112A[flag_default_twice] ??= "default flag first"
113A[flag_default_twice] ??= "default flag second"
114"""
115 def test_parse_defaulttest(self):
116 f = self.parsehelper(self.defaulttest)
117 d = bb.parse.handle(f.name, self.d)['']
118 self.assertEqual(d.getVar("A"), "set value")
119 self.assertEqual(d.getVarFlag("A","flag_set_vs_question"), "set flag")
120 self.assertEqual(d.getVarFlag("A","flag_set_vs_default"), "set flag")
121 self.assertEqual(d.getVarFlag("A","flag_question"), "question flag")
122 self.assertEqual(d.getVarFlag("A","flag_default"), "default flag")
123 self.assertEqual(d.getVarFlag("A","flag_question_vs_default"), "question flag")
124 self.assertEqual(d.getVarFlag("A","flag_default_vs_question"), "question flag")
125 self.assertEqual(d.getVarFlag("A","flag_set_question_default"), "set flag")
126 self.assertEqual(d.getVarFlag("A","flag_set_default_question"), "set flag")
127 self.assertEqual(d.getVarFlag("A","flag_set_twice"), "set flag second")
128 self.assertEqual(d.getVarFlag("A","flag_question_twice"), "question flag first")
129 self.assertEqual(d.getVarFlag("A","flag_default_twice"), "default flag second")
130
78 exporttest = """ 131 exporttest = """
79A = "a" 132A = "a"
80export B = "b" 133export B = "b"
@@ -98,8 +151,8 @@ exportD = "d"
98 151
99 152
100 overridetest = """ 153 overridetest = """
101RRECOMMENDS_${PN} = "a" 154RRECOMMENDS:${PN} = "a"
102RRECOMMENDS_${PN}_libc = "b" 155RRECOMMENDS:${PN}:libc = "b"
103OVERRIDES = "libc:${PN}" 156OVERRIDES = "libc:${PN}"
104PN = "gtk+" 157PN = "gtk+"
105""" 158"""
@@ -110,16 +163,16 @@ PN = "gtk+"
110 self.assertEqual(d.getVar("RRECOMMENDS"), "b") 163 self.assertEqual(d.getVar("RRECOMMENDS"), "b")
111 bb.data.expandKeys(d) 164 bb.data.expandKeys(d)
112 self.assertEqual(d.getVar("RRECOMMENDS"), "b") 165 self.assertEqual(d.getVar("RRECOMMENDS"), "b")
113 d.setVar("RRECOMMENDS_gtk+", "c") 166 d.setVar("RRECOMMENDS:gtk+", "c")
114 self.assertEqual(d.getVar("RRECOMMENDS"), "c") 167 self.assertEqual(d.getVar("RRECOMMENDS"), "c")
115 168
116 overridetest2 = """ 169 overridetest2 = """
117EXTRA_OECONF = "" 170EXTRA_OECONF = ""
118EXTRA_OECONF_class-target = "b" 171EXTRA_OECONF:class-target = "b"
119EXTRA_OECONF_append = " c" 172EXTRA_OECONF:append = " c"
120""" 173"""
121 174
122 def test_parse_overrides(self): 175 def test_parse_overrides2(self):
123 f = self.parsehelper(self.overridetest2) 176 f = self.parsehelper(self.overridetest2)
124 d = bb.parse.handle(f.name, self.d)[''] 177 d = bb.parse.handle(f.name, self.d)['']
125 d.appendVar("EXTRA_OECONF", " d") 178 d.appendVar("EXTRA_OECONF", " d")
@@ -128,7 +181,7 @@ EXTRA_OECONF_append = " c"
128 181
129 overridetest3 = """ 182 overridetest3 = """
130DESCRIPTION = "A" 183DESCRIPTION = "A"
131DESCRIPTION_${PN}-dev = "${DESCRIPTION} B" 184DESCRIPTION:${PN}-dev = "${DESCRIPTION} B"
132PN = "bc" 185PN = "bc"
133""" 186"""
134 187
@@ -136,15 +189,15 @@ PN = "bc"
136 f = self.parsehelper(self.overridetest3) 189 f = self.parsehelper(self.overridetest3)
137 d = bb.parse.handle(f.name, self.d)[''] 190 d = bb.parse.handle(f.name, self.d)['']
138 bb.data.expandKeys(d) 191 bb.data.expandKeys(d)
139 self.assertEqual(d.getVar("DESCRIPTION_bc-dev"), "A B") 192 self.assertEqual(d.getVar("DESCRIPTION:bc-dev"), "A B")
140 d.setVar("DESCRIPTION", "E") 193 d.setVar("DESCRIPTION", "E")
141 d.setVar("DESCRIPTION_bc-dev", "C D") 194 d.setVar("DESCRIPTION:bc-dev", "C D")
142 d.setVar("OVERRIDES", "bc-dev") 195 d.setVar("OVERRIDES", "bc-dev")
143 self.assertEqual(d.getVar("DESCRIPTION"), "C D") 196 self.assertEqual(d.getVar("DESCRIPTION"), "C D")
144 197
145 198
146 classextend = """ 199 classextend = """
147VAR_var_override1 = "B" 200VAR_var:override1 = "B"
148EXTRA = ":override1" 201EXTRA = ":override1"
149OVERRIDES = "nothing${EXTRA}" 202OVERRIDES = "nothing${EXTRA}"
150 203
@@ -164,6 +217,7 @@ python () {
164 # become unset/disappear. 217 # become unset/disappear.
165 # 218 #
166 def test_parse_classextend_contamination(self): 219 def test_parse_classextend_contamination(self):
220 self.d.setVar("__bbclasstype", "recipe")
167 cls = self.parsehelper(self.classextend_bbclass, suffix=".bbclass") 221 cls = self.parsehelper(self.classextend_bbclass, suffix=".bbclass")
168 #clsname = os.path.basename(cls.name).replace(".bbclass", "") 222 #clsname = os.path.basename(cls.name).replace(".bbclass", "")
169 self.classextend = self.classextend.replace("###CLASS###", cls.name) 223 self.classextend = self.classextend.replace("###CLASS###", cls.name)
@@ -176,7 +230,19 @@ python () {
176 230
177 addtask_deltask = """ 231 addtask_deltask = """
178addtask do_patch after do_foo after do_unpack before do_configure before do_compile 232addtask do_patch after do_foo after do_unpack before do_configure before do_compile
179addtask do_fetch do_patch 233addtask do_fetch2 do_patch2
234
235addtask do_myplaintask
236addtask do_myplaintask2
237deltask do_myplaintask2
238addtask do_mytask# comment
239addtask do_mytask2 # comment2
240addtask do_mytask3
241deltask do_mytask3# comment
242deltask do_mytask4 # comment2
243
244# Ensure a missing task prefix on after works
245addtask do_mytask5 after mytask
180 246
181MYVAR = "do_patch" 247MYVAR = "do_patch"
182EMPTYVAR = "" 248EMPTYVAR = ""
@@ -184,13 +250,216 @@ deltask do_fetch ${MYVAR} ${EMPTYVAR}
184deltask ${EMPTYVAR} 250deltask ${EMPTYVAR}
185""" 251"""
186 def test_parse_addtask_deltask(self): 252 def test_parse_addtask_deltask(self):
187 import sys 253
188 f = self.parsehelper(self.addtask_deltask) 254 f = self.parsehelper(self.addtask_deltask)
189 d = bb.parse.handle(f.name, self.d)[''] 255 d = bb.parse.handle(f.name, self.d)['']
190 256
191 stdout = sys.stdout.getvalue() 257 self.assertSequenceEqual(['do_fetch2', 'do_patch2', 'do_myplaintask', 'do_mytask', 'do_mytask2', 'do_mytask5'], bb.build.listtasks(d))
192 self.assertTrue("addtask contained multiple 'before' keywords" in stdout) 258 self.assertEqual(['do_mytask'], d.getVarFlag("do_mytask5", "deps"))
193 self.assertTrue("addtask contained multiple 'after' keywords" in stdout) 259
194 self.assertTrue('addtask ignored: " do_patch"' in stdout) 260 broken_multiline_comment = """
195 #self.assertTrue('dependent task do_foo for do_patch does not exist' in stdout) 261# First line of comment \\
262# Second line of comment \\
263
264"""
265 def test_parse_broken_multiline_comment(self):
266 f = self.parsehelper(self.broken_multiline_comment)
267 with self.assertRaises(bb.BBHandledException):
268 d = bb.parse.handle(f.name, self.d)['']
269
270
271 comment_in_var = """
272VAR = " \\
273 SOMEVAL \\
274# some comment \\
275 SOMEOTHERVAL \\
276"
277"""
278 def test_parse_comment_in_var(self):
279 f = self.parsehelper(self.comment_in_var)
280 with self.assertRaises(bb.BBHandledException):
281 d = bb.parse.handle(f.name, self.d)['']
282
283
284 at_sign_in_var_flag = """
285A[flag@.service] = "nonet"
286B[flag@.target] = "ntb"
287C[f] = "flag"
196 288
289unset A[flag@.service]
290"""
291 def test_parse_at_sign_in_var_flag(self):
292 f = self.parsehelper(self.at_sign_in_var_flag)
293 d = bb.parse.handle(f.name, self.d)['']
294 self.assertEqual(d.getVar("A"), None)
295 self.assertEqual(d.getVar("B"), None)
296 self.assertEqual(d.getVarFlag("A","flag@.service"), None)
297 self.assertEqual(d.getVarFlag("B","flag@.target"), "ntb")
298 self.assertEqual(d.getVarFlag("C","f"), "flag")
299
300 def test_parse_invalid_at_sign_in_var_flag(self):
301 invalid_at_sign = self.at_sign_in_var_flag.replace("B[f", "B[@f")
302 f = self.parsehelper(invalid_at_sign)
303 with self.assertRaises(bb.parse.ParseError):
304 d = bb.parse.handle(f.name, self.d)['']
305
306 export_function_recipe = """
307inherit someclass
308"""
309
310 export_function_recipe2 = """
311inherit someclass
312
313do_compile () {
314 false
315}
316
317python do_compilepython () {
318 bb.note("Something else")
319}
320
321"""
322 export_function_class = """
323someclass_do_compile() {
324 true
325}
326
327python someclass_do_compilepython () {
328 bb.note("Something")
329}
330
331EXPORT_FUNCTIONS do_compile do_compilepython
332"""
333
334 export_function_class2 = """
335secondclass_do_compile() {
336 true
337}
338
339python secondclass_do_compilepython () {
340 bb.note("Something")
341}
342
343EXPORT_FUNCTIONS do_compile do_compilepython
344"""
345
346 def test_parse_export_functions(self):
347 def check_function_flags(d):
348 self.assertEqual(d.getVarFlag("do_compile", "func"), 1)
349 self.assertEqual(d.getVarFlag("do_compilepython", "func"), 1)
350 self.assertEqual(d.getVarFlag("do_compile", "python"), None)
351 self.assertEqual(d.getVarFlag("do_compilepython", "python"), "1")
352
353 with tempfile.TemporaryDirectory() as tempdir:
354 self.d.setVar("__bbclasstype", "recipe")
355 recipename = tempdir + "/recipe.bb"
356 os.makedirs(tempdir + "/classes")
357 with open(tempdir + "/classes/someclass.bbclass", "w") as f:
358 f.write(self.export_function_class)
359 f.flush()
360 with open(tempdir + "/classes/secondclass.bbclass", "w") as f:
361 f.write(self.export_function_class2)
362 f.flush()
363
364 with open(recipename, "w") as f:
365 f.write(self.export_function_recipe)
366 f.flush()
367 os.chdir(tempdir)
368 d = bb.parse.handle(recipename, bb.data.createCopy(self.d))['']
369 self.assertIn("someclass_do_compile", d.getVar("do_compile"))
370 self.assertIn("someclass_do_compilepython", d.getVar("do_compilepython"))
371 check_function_flags(d)
372
373 recipename2 = tempdir + "/recipe2.bb"
374 with open(recipename2, "w") as f:
375 f.write(self.export_function_recipe2)
376 f.flush()
377
378 d = bb.parse.handle(recipename2, bb.data.createCopy(self.d))['']
379 self.assertNotIn("someclass_do_compile", d.getVar("do_compile"))
380 self.assertNotIn("someclass_do_compilepython", d.getVar("do_compilepython"))
381 self.assertIn("false", d.getVar("do_compile"))
382 self.assertIn("else", d.getVar("do_compilepython"))
383 check_function_flags(d)
384
385 with open(recipename, "a+") as f:
386 f.write("\ninherit secondclass\n")
387 f.flush()
388 with open(recipename2, "a+") as f:
389 f.write("\ninherit secondclass\n")
390 f.flush()
391
392 d = bb.parse.handle(recipename, bb.data.createCopy(self.d))['']
393 self.assertIn("secondclass_do_compile", d.getVar("do_compile"))
394 self.assertIn("secondclass_do_compilepython", d.getVar("do_compilepython"))
395 check_function_flags(d)
396
397 d = bb.parse.handle(recipename2, bb.data.createCopy(self.d))['']
398 self.assertNotIn("someclass_do_compile", d.getVar("do_compile"))
399 self.assertNotIn("someclass_do_compilepython", d.getVar("do_compilepython"))
400 self.assertIn("false", d.getVar("do_compile"))
401 self.assertIn("else", d.getVar("do_compilepython"))
402 check_function_flags(d)
403
404 export_function_unclosed_tab = """
405do_compile () {
406 bb.note("Something")
407\t}
408"""
409 export_function_unclosed_space = """
410do_compile () {
411 bb.note("Something")
412 }
413"""
414 export_function_residue = """
415do_compile () {
416 bb.note("Something")
417}
418
419include \\
420"""
421
422 def test_unclosed_functions(self):
423 def test_helper(content, expected_error):
424 with tempfile.TemporaryDirectory() as tempdir:
425 recipename = tempdir + "/recipe_unclosed.bb"
426 with open(recipename, "w") as f:
427 f.write(content)
428 f.flush()
429 os.chdir(tempdir)
430 with self.assertRaises(bb.parse.ParseError) as error:
431 bb.parse.handle(recipename, bb.data.createCopy(self.d))
432 self.assertIn(expected_error, str(error.exception))
433
434 with tempfile.TemporaryDirectory() as tempdir:
435 test_helper(self.export_function_unclosed_tab, "Unparsed lines from unclosed function")
436 test_helper(self.export_function_unclosed_space, "Unparsed lines from unclosed function")
437 test_helper(self.export_function_residue, "Unparsed lines")
438
439 recipename_closed = tempdir + "/recipe_closed.bb"
440 with open(recipename_closed, "w") as in_file:
441 lines = self.export_function_unclosed_tab.split("\n")
442 lines[3] = "}"
443 in_file.write("\n".join(lines))
444 in_file.flush()
445 bb.parse.handle(recipename_closed, bb.data.createCopy(self.d))
446
447 special_character_assignment = """
448A+="a"
449A+ = "b"
450+ = "c"
451"""
452 ambigous_assignment = """
453+= "d"
454"""
455 def test_parse_special_character_assignment(self):
456 f = self.parsehelper(self.special_character_assignment)
457 d = bb.parse.handle(f.name, self.d)['']
458 self.assertEqual(d.getVar("A"), " a")
459 self.assertEqual(d.getVar("A+"), "b")
460 self.assertEqual(d.getVar("+"), "c")
461
462 f = self.parsehelper(self.ambigous_assignment)
463 with self.assertRaises(bb.parse.ParseError) as error:
464 bb.parse.handle(f.name, self.d)
465 self.assertIn("Empty variable name in assignment", str(error.exception))
diff --git a/bitbake/lib/bb/tests/persist_data.py b/bitbake/lib/bb/tests/persist_data.py
deleted file mode 100644
index f641b5acbc..0000000000
--- a/bitbake/lib/bb/tests/persist_data.py
+++ /dev/null
@@ -1,129 +0,0 @@
1#
2# BitBake Test for lib/bb/persist_data/
3#
4# Copyright (C) 2018 Garmin Ltd.
5#
6# SPDX-License-Identifier: GPL-2.0-only
7#
8
9import unittest
10import bb.data
11import bb.persist_data
12import tempfile
13import threading
14
15class PersistDataTest(unittest.TestCase):
16 def _create_data(self):
17 return bb.persist_data.persist('TEST_PERSIST_DATA', self.d)
18
19 def setUp(self):
20 self.d = bb.data.init()
21 self.tempdir = tempfile.TemporaryDirectory()
22 self.d['PERSISTENT_DIR'] = self.tempdir.name
23 self.data = self._create_data()
24 self.items = {
25 'A1': '1',
26 'B1': '2',
27 'C2': '3'
28 }
29 self.stress_count = 10000
30 self.thread_count = 5
31
32 for k,v in self.items.items():
33 self.data[k] = v
34
35 def tearDown(self):
36 self.tempdir.cleanup()
37
38 def _iter_helper(self, seen, iterator):
39 with iter(iterator):
40 for v in iterator:
41 self.assertTrue(v in seen)
42 seen.remove(v)
43 self.assertEqual(len(seen), 0, '%s not seen' % seen)
44
45 def test_get(self):
46 for k, v in self.items.items():
47 self.assertEqual(self.data[k], v)
48
49 self.assertIsNone(self.data.get('D'))
50 with self.assertRaises(KeyError):
51 self.data['D']
52
53 def test_set(self):
54 for k, v in self.items.items():
55 self.data[k] += '-foo'
56
57 for k, v in self.items.items():
58 self.assertEqual(self.data[k], v + '-foo')
59
60 def test_delete(self):
61 self.data['D'] = '4'
62 self.assertEqual(self.data['D'], '4')
63 del self.data['D']
64 self.assertIsNone(self.data.get('D'))
65 with self.assertRaises(KeyError):
66 self.data['D']
67
68 def test_contains(self):
69 for k in self.items:
70 self.assertTrue(k in self.data)
71 self.assertTrue(self.data.has_key(k))
72 self.assertFalse('NotFound' in self.data)
73 self.assertFalse(self.data.has_key('NotFound'))
74
75 def test_len(self):
76 self.assertEqual(len(self.data), len(self.items))
77
78 def test_iter(self):
79 self._iter_helper(set(self.items.keys()), self.data)
80
81 def test_itervalues(self):
82 self._iter_helper(set(self.items.values()), self.data.itervalues())
83
84 def test_iteritems(self):
85 self._iter_helper(set(self.items.items()), self.data.iteritems())
86
87 def test_get_by_pattern(self):
88 self._iter_helper({'1', '2'}, self.data.get_by_pattern('_1'))
89
90 def _stress_read(self, data):
91 for i in range(self.stress_count):
92 for k in self.items:
93 data[k]
94
95 def _stress_write(self, data):
96 for i in range(self.stress_count):
97 for k, v in self.items.items():
98 data[k] = v + str(i)
99
100 def _validate_stress(self):
101 for k, v in self.items.items():
102 self.assertEqual(self.data[k], v + str(self.stress_count - 1))
103
104 def test_stress(self):
105 self._stress_read(self.data)
106 self._stress_write(self.data)
107 self._validate_stress()
108
109 def test_stress_threads(self):
110 def read_thread():
111 data = self._create_data()
112 self._stress_read(data)
113
114 def write_thread():
115 data = self._create_data()
116 self._stress_write(data)
117
118 threads = []
119 for i in range(self.thread_count):
120 threads.append(threading.Thread(target=read_thread))
121 threads.append(threading.Thread(target=write_thread))
122
123 for t in threads:
124 t.start()
125 self._stress_read(self.data)
126 for t in threads:
127 t.join()
128 self._validate_stress()
129
diff --git a/bitbake/lib/bb/tests/runqueue-tests/classes/base.bbclass b/bitbake/lib/bb/tests/runqueue-tests/classes/base.bbclass
index b57650d591..80b003b2b5 100644
--- a/bitbake/lib/bb/tests/runqueue-tests/classes/base.bbclass
+++ b/bitbake/lib/bb/tests/runqueue-tests/classes/base.bbclass
@@ -9,7 +9,7 @@ def stamptask(d):
9 with open(stampname, "a+") as f: 9 with open(stampname, "a+") as f:
10 f.write(d.getVar("BB_UNIHASH") + "\n") 10 f.write(d.getVar("BB_UNIHASH") + "\n")
11 11
12 if d.getVar("BB_CURRENT_MC") != "default": 12 if d.getVar("BB_CURRENT_MC") != "":
13 thistask = d.expand("${BB_CURRENT_MC}:${PN}:${BB_CURRENTTASK}") 13 thistask = d.expand("${BB_CURRENT_MC}:${PN}:${BB_CURRENTTASK}")
14 if thistask in d.getVar("SLOWTASKS").split(): 14 if thistask in d.getVar("SLOWTASKS").split():
15 bb.note("Slowing task %s" % thistask) 15 bb.note("Slowing task %s" % thistask)
diff --git a/bitbake/lib/bb/tests/runqueue-tests/conf/bitbake.conf b/bitbake/lib/bb/tests/runqueue-tests/conf/bitbake.conf
index efebf001a9..05d7fd07dd 100644
--- a/bitbake/lib/bb/tests/runqueue-tests/conf/bitbake.conf
+++ b/bitbake/lib/bb/tests/runqueue-tests/conf/bitbake.conf
@@ -12,6 +12,6 @@ STAMP = "${TMPDIR}/stamps/${PN}"
12T = "${TMPDIR}/workdir/${PN}/temp" 12T = "${TMPDIR}/workdir/${PN}/temp"
13BB_NUMBER_THREADS = "4" 13BB_NUMBER_THREADS = "4"
14 14
15BB_HASHBASE_WHITELIST = "BB_CURRENT_MC BB_HASHSERVE TMPDIR TOPDIR SLOWTASKS SSTATEVALID FILE" 15BB_BASEHASH_IGNORE_VARS = "BB_CURRENT_MC BB_HASHSERVE TMPDIR TOPDIR SLOWTASKS SSTATEVALID FILE BB_CURRENTTASK"
16 16
17include conf/multiconfig/${BB_CURRENT_MC}.conf 17include conf/multiconfig/${BB_CURRENT_MC}.conf
diff --git a/bitbake/lib/bb/tests/runqueue-tests/recipes/g1.bb b/bitbake/lib/bb/tests/runqueue-tests/recipes/g1.bb
new file mode 100644
index 0000000000..3c7dca0257
--- /dev/null
+++ b/bitbake/lib/bb/tests/runqueue-tests/recipes/g1.bb
@@ -0,0 +1,2 @@
1do_build[mcdepends] = "mc::mc-1:h1:do_invalid"
2
diff --git a/bitbake/lib/bb/tests/runqueue-tests/recipes/h1.bb b/bitbake/lib/bb/tests/runqueue-tests/recipes/h1.bb
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/bitbake/lib/bb/tests/runqueue-tests/recipes/h1.bb
diff --git a/bitbake/lib/bb/tests/runqueue.py b/bitbake/lib/bb/tests/runqueue.py
index 3d51779d6c..74f5ded2e6 100644
--- a/bitbake/lib/bb/tests/runqueue.py
+++ b/bitbake/lib/bb/tests/runqueue.py
@@ -26,20 +26,23 @@ class RunQueueTests(unittest.TestCase):
26 a1_sstatevalid = "a1:do_package a1:do_package_qa a1:do_packagedata a1:do_package_write_ipk a1:do_package_write_rpm a1:do_populate_lic a1:do_populate_sysroot" 26 a1_sstatevalid = "a1:do_package a1:do_package_qa a1:do_packagedata a1:do_package_write_ipk a1:do_package_write_rpm a1:do_populate_lic a1:do_populate_sysroot"
27 b1_sstatevalid = "b1:do_package b1:do_package_qa b1:do_packagedata b1:do_package_write_ipk b1:do_package_write_rpm b1:do_populate_lic b1:do_populate_sysroot" 27 b1_sstatevalid = "b1:do_package b1:do_package_qa b1:do_packagedata b1:do_package_write_ipk b1:do_package_write_rpm b1:do_populate_lic b1:do_populate_sysroot"
28 28
29 def run_bitbakecmd(self, cmd, builddir, sstatevalid="", slowtasks="", extraenv=None, cleanup=False): 29 def run_bitbakecmd(self, cmd, builddir, sstatevalid="", slowtasks="", extraenv=None, cleanup=False, allowfailure=False):
30 env = os.environ.copy() 30 env = os.environ.copy()
31 env["BBPATH"] = os.path.realpath(os.path.join(os.path.dirname(__file__), "runqueue-tests")) 31 env["BBPATH"] = os.path.realpath(os.path.join(os.path.dirname(__file__), "runqueue-tests"))
32 env["BB_ENV_EXTRAWHITE"] = "SSTATEVALID SLOWTASKS" 32 env["BB_ENV_PASSTHROUGH_ADDITIONS"] = "SSTATEVALID SLOWTASKS TOPDIR"
33 env["SSTATEVALID"] = sstatevalid 33 env["SSTATEVALID"] = sstatevalid
34 env["SLOWTASKS"] = slowtasks 34 env["SLOWTASKS"] = slowtasks
35 env["TOPDIR"] = builddir
35 if extraenv: 36 if extraenv:
36 for k in extraenv: 37 for k in extraenv:
37 env[k] = extraenv[k] 38 env[k] = extraenv[k]
38 env["BB_ENV_EXTRAWHITE"] = env["BB_ENV_EXTRAWHITE"] + " " + k 39 env["BB_ENV_PASSTHROUGH_ADDITIONS"] = env["BB_ENV_PASSTHROUGH_ADDITIONS"] + " " + k
39 try: 40 try:
40 output = subprocess.check_output(cmd, env=env, stderr=subprocess.STDOUT,universal_newlines=True, cwd=builddir) 41 output = subprocess.check_output(cmd, env=env, stderr=subprocess.STDOUT,universal_newlines=True, cwd=builddir)
41 print(output) 42 print(output)
42 except subprocess.CalledProcessError as e: 43 except subprocess.CalledProcessError as e:
44 if allowfailure:
45 return e.output
43 self.fail("Command %s failed with %s" % (cmd, e.output)) 46 self.fail("Command %s failed with %s" % (cmd, e.output))
44 tasks = [] 47 tasks = []
45 tasklog = builddir + "/task.log" 48 tasklog = builddir + "/task.log"
@@ -58,6 +61,8 @@ class RunQueueTests(unittest.TestCase):
58 expected = ['a1:' + x for x in self.alltasks] 61 expected = ['a1:' + x for x in self.alltasks]
59 self.assertEqual(set(tasks), set(expected)) 62 self.assertEqual(set(tasks), set(expected))
60 63
64 self.shutdown(tempdir)
65
61 def test_single_setscenevalid(self): 66 def test_single_setscenevalid(self):
62 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: 67 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
63 cmd = ["bitbake", "a1"] 68 cmd = ["bitbake", "a1"]
@@ -68,6 +73,8 @@ class RunQueueTests(unittest.TestCase):
68 'a1:populate_sysroot', 'a1:build'] 73 'a1:populate_sysroot', 'a1:build']
69 self.assertEqual(set(tasks), set(expected)) 74 self.assertEqual(set(tasks), set(expected))
70 75
76 self.shutdown(tempdir)
77
71 def test_intermediate_setscenevalid(self): 78 def test_intermediate_setscenevalid(self):
72 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: 79 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
73 cmd = ["bitbake", "a1"] 80 cmd = ["bitbake", "a1"]
@@ -77,6 +84,8 @@ class RunQueueTests(unittest.TestCase):
77 'a1:populate_sysroot_setscene', 'a1:build'] 84 'a1:populate_sysroot_setscene', 'a1:build']
78 self.assertEqual(set(tasks), set(expected)) 85 self.assertEqual(set(tasks), set(expected))
79 86
87 self.shutdown(tempdir)
88
80 def test_intermediate_notcovered(self): 89 def test_intermediate_notcovered(self):
81 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: 90 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
82 cmd = ["bitbake", "a1"] 91 cmd = ["bitbake", "a1"]
@@ -86,6 +95,8 @@ class RunQueueTests(unittest.TestCase):
86 'a1:package_qa_setscene', 'a1:build', 'a1:populate_sysroot_setscene'] 95 'a1:package_qa_setscene', 'a1:build', 'a1:populate_sysroot_setscene']
87 self.assertEqual(set(tasks), set(expected)) 96 self.assertEqual(set(tasks), set(expected))
88 97
98 self.shutdown(tempdir)
99
89 def test_all_setscenevalid(self): 100 def test_all_setscenevalid(self):
90 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: 101 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
91 cmd = ["bitbake", "a1"] 102 cmd = ["bitbake", "a1"]
@@ -95,6 +106,8 @@ class RunQueueTests(unittest.TestCase):
95 'a1:package_qa_setscene', 'a1:build', 'a1:populate_sysroot_setscene'] 106 'a1:package_qa_setscene', 'a1:build', 'a1:populate_sysroot_setscene']
96 self.assertEqual(set(tasks), set(expected)) 107 self.assertEqual(set(tasks), set(expected))
97 108
109 self.shutdown(tempdir)
110
98 def test_no_settasks(self): 111 def test_no_settasks(self):
99 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: 112 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
100 cmd = ["bitbake", "a1", "-c", "patch"] 113 cmd = ["bitbake", "a1", "-c", "patch"]
@@ -103,6 +116,8 @@ class RunQueueTests(unittest.TestCase):
103 expected = ['a1:fetch', 'a1:unpack', 'a1:patch'] 116 expected = ['a1:fetch', 'a1:unpack', 'a1:patch']
104 self.assertEqual(set(tasks), set(expected)) 117 self.assertEqual(set(tasks), set(expected))
105 118
119 self.shutdown(tempdir)
120
106 def test_mix_covered_notcovered(self): 121 def test_mix_covered_notcovered(self):
107 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: 122 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
108 cmd = ["bitbake", "a1:do_patch", "a1:do_populate_sysroot"] 123 cmd = ["bitbake", "a1:do_patch", "a1:do_populate_sysroot"]
@@ -111,6 +126,7 @@ class RunQueueTests(unittest.TestCase):
111 expected = ['a1:fetch', 'a1:unpack', 'a1:patch', 'a1:populate_sysroot_setscene'] 126 expected = ['a1:fetch', 'a1:unpack', 'a1:patch', 'a1:populate_sysroot_setscene']
112 self.assertEqual(set(tasks), set(expected)) 127 self.assertEqual(set(tasks), set(expected))
113 128
129 self.shutdown(tempdir)
114 130
115 # Test targets with intermediate setscene tasks alongside a target with no intermediate setscene tasks 131 # Test targets with intermediate setscene tasks alongside a target with no intermediate setscene tasks
116 def test_mixed_direct_tasks_setscene_tasks(self): 132 def test_mixed_direct_tasks_setscene_tasks(self):
@@ -122,6 +138,8 @@ class RunQueueTests(unittest.TestCase):
122 'a1:package_qa_setscene', 'a1:build', 'a1:populate_sysroot_setscene'] 138 'a1:package_qa_setscene', 'a1:build', 'a1:populate_sysroot_setscene']
123 self.assertEqual(set(tasks), set(expected)) 139 self.assertEqual(set(tasks), set(expected))
124 140
141 self.shutdown(tempdir)
142
125 # This test slows down the execution of do_package_setscene until after other real tasks have 143 # This test slows down the execution of do_package_setscene until after other real tasks have
126 # started running which tests for a bug where tasks were being lost from the buildable list of real 144 # started running which tests for a bug where tasks were being lost from the buildable list of real
127 # tasks if they weren't in tasks_covered or tasks_notcovered 145 # tasks if they weren't in tasks_covered or tasks_notcovered
@@ -136,12 +154,14 @@ class RunQueueTests(unittest.TestCase):
136 'a1:populate_sysroot', 'a1:build'] 154 'a1:populate_sysroot', 'a1:build']
137 self.assertEqual(set(tasks), set(expected)) 155 self.assertEqual(set(tasks), set(expected))
138 156
139 def test_setscenewhitelist(self): 157 self.shutdown(tempdir)
158
159 def test_setscene_ignore_tasks(self):
140 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: 160 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
141 cmd = ["bitbake", "a1"] 161 cmd = ["bitbake", "a1"]
142 extraenv = { 162 extraenv = {
143 "BB_SETSCENE_ENFORCE" : "1", 163 "BB_SETSCENE_ENFORCE" : "1",
144 "BB_SETSCENE_ENFORCE_WHITELIST" : "a1:do_package_write_rpm a1:do_build" 164 "BB_SETSCENE_ENFORCE_IGNORE_TASKS" : "a1:do_package_write_rpm a1:do_build"
145 } 165 }
146 sstatevalid = "a1:do_package a1:do_package_qa a1:do_packagedata a1:do_package_write_ipk a1:do_populate_lic a1:do_populate_sysroot" 166 sstatevalid = "a1:do_package a1:do_package_qa a1:do_packagedata a1:do_package_write_ipk a1:do_populate_lic a1:do_populate_sysroot"
147 tasks = self.run_bitbakecmd(cmd, tempdir, sstatevalid, extraenv=extraenv) 167 tasks = self.run_bitbakecmd(cmd, tempdir, sstatevalid, extraenv=extraenv)
@@ -149,6 +169,8 @@ class RunQueueTests(unittest.TestCase):
149 'a1:populate_sysroot_setscene', 'a1:package_setscene'] 169 'a1:populate_sysroot_setscene', 'a1:package_setscene']
150 self.assertEqual(set(tasks), set(expected)) 170 self.assertEqual(set(tasks), set(expected))
151 171
172 self.shutdown(tempdir)
173
152 # Tests for problems with dependencies between setscene tasks 174 # Tests for problems with dependencies between setscene tasks
153 def test_no_setscenevalid_harddeps(self): 175 def test_no_setscenevalid_harddeps(self):
154 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: 176 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
@@ -162,6 +184,8 @@ class RunQueueTests(unittest.TestCase):
162 'd1:populate_sysroot', 'd1:build'] 184 'd1:populate_sysroot', 'd1:build']
163 self.assertEqual(set(tasks), set(expected)) 185 self.assertEqual(set(tasks), set(expected))
164 186
187 self.shutdown(tempdir)
188
165 def test_no_setscenevalid_withdeps(self): 189 def test_no_setscenevalid_withdeps(self):
166 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: 190 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
167 cmd = ["bitbake", "b1"] 191 cmd = ["bitbake", "b1"]
@@ -172,6 +196,8 @@ class RunQueueTests(unittest.TestCase):
172 expected.remove('a1:package_qa') 196 expected.remove('a1:package_qa')
173 self.assertEqual(set(tasks), set(expected)) 197 self.assertEqual(set(tasks), set(expected))
174 198
199 self.shutdown(tempdir)
200
175 def test_single_a1_setscenevalid_withdeps(self): 201 def test_single_a1_setscenevalid_withdeps(self):
176 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: 202 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
177 cmd = ["bitbake", "b1"] 203 cmd = ["bitbake", "b1"]
@@ -182,6 +208,8 @@ class RunQueueTests(unittest.TestCase):
182 'a1:populate_sysroot'] + ['b1:' + x for x in self.alltasks] 208 'a1:populate_sysroot'] + ['b1:' + x for x in self.alltasks]
183 self.assertEqual(set(tasks), set(expected)) 209 self.assertEqual(set(tasks), set(expected))
184 210
211 self.shutdown(tempdir)
212
185 def test_single_b1_setscenevalid_withdeps(self): 213 def test_single_b1_setscenevalid_withdeps(self):
186 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: 214 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
187 cmd = ["bitbake", "b1"] 215 cmd = ["bitbake", "b1"]
@@ -193,6 +221,8 @@ class RunQueueTests(unittest.TestCase):
193 expected.remove('b1:package') 221 expected.remove('b1:package')
194 self.assertEqual(set(tasks), set(expected)) 222 self.assertEqual(set(tasks), set(expected))
195 223
224 self.shutdown(tempdir)
225
196 def test_intermediate_setscenevalid_withdeps(self): 226 def test_intermediate_setscenevalid_withdeps(self):
197 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: 227 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
198 cmd = ["bitbake", "b1"] 228 cmd = ["bitbake", "b1"]
@@ -203,6 +233,8 @@ class RunQueueTests(unittest.TestCase):
203 expected.remove('b1:package') 233 expected.remove('b1:package')
204 self.assertEqual(set(tasks), set(expected)) 234 self.assertEqual(set(tasks), set(expected))
205 235
236 self.shutdown(tempdir)
237
206 def test_all_setscenevalid_withdeps(self): 238 def test_all_setscenevalid_withdeps(self):
207 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: 239 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
208 cmd = ["bitbake", "b1"] 240 cmd = ["bitbake", "b1"]
@@ -213,6 +245,8 @@ class RunQueueTests(unittest.TestCase):
213 'b1:packagedata_setscene', 'b1:package_qa_setscene', 'b1:populate_sysroot_setscene'] 245 'b1:packagedata_setscene', 'b1:package_qa_setscene', 'b1:populate_sysroot_setscene']
214 self.assertEqual(set(tasks), set(expected)) 246 self.assertEqual(set(tasks), set(expected))
215 247
248 self.shutdown(tempdir)
249
216 def test_multiconfig_setscene_optimise(self): 250 def test_multiconfig_setscene_optimise(self):
217 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: 251 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
218 extraenv = { 252 extraenv = {
@@ -232,6 +266,8 @@ class RunQueueTests(unittest.TestCase):
232 expected.remove(x) 266 expected.remove(x)
233 self.assertEqual(set(tasks), set(expected)) 267 self.assertEqual(set(tasks), set(expected))
234 268
269 self.shutdown(tempdir)
270
235 def test_multiconfig_bbmask(self): 271 def test_multiconfig_bbmask(self):
236 # This test validates that multiconfigs can independently mask off 272 # This test validates that multiconfigs can independently mask off
237 # recipes they do not want with BBMASK. It works by having recipes 273 # recipes they do not want with BBMASK. It works by having recipes
@@ -248,11 +284,13 @@ class RunQueueTests(unittest.TestCase):
248 cmd = ["bitbake", "mc:mc-1:fails-mc2", "mc:mc_2:fails-mc1"] 284 cmd = ["bitbake", "mc:mc-1:fails-mc2", "mc:mc_2:fails-mc1"]
249 self.run_bitbakecmd(cmd, tempdir, "", extraenv=extraenv) 285 self.run_bitbakecmd(cmd, tempdir, "", extraenv=extraenv)
250 286
287 self.shutdown(tempdir)
288
251 def test_multiconfig_mcdepends(self): 289 def test_multiconfig_mcdepends(self):
252 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: 290 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
253 extraenv = { 291 extraenv = {
254 "BBMULTICONFIG" : "mc-1 mc_2", 292 "BBMULTICONFIG" : "mc-1 mc_2",
255 "BB_SIGNATURE_HANDLER" : "TestMulticonfigDepends", 293 "BB_SIGNATURE_HANDLER" : "basichash",
256 "EXTRA_BBFILES": "${COREBASE}/recipes/fails-mc/*.bb", 294 "EXTRA_BBFILES": "${COREBASE}/recipes/fails-mc/*.bb",
257 } 295 }
258 tasks = self.run_bitbakecmd(["bitbake", "mc:mc-1:f1"], tempdir, "", extraenv=extraenv, cleanup=True) 296 tasks = self.run_bitbakecmd(["bitbake", "mc:mc-1:f1"], tempdir, "", extraenv=extraenv, cleanup=True)
@@ -278,7 +316,15 @@ class RunQueueTests(unittest.TestCase):
278 ["mc_2:a1:%s" % t for t in rerun_tasks] 316 ["mc_2:a1:%s" % t for t in rerun_tasks]
279 self.assertEqual(set(tasks), set(expected)) 317 self.assertEqual(set(tasks), set(expected))
280 318
281 @unittest.skipIf(sys.version_info < (3, 5, 0), 'Python 3.5 or later required') 319 # Check that a multiconfig that doesn't exist rasies a correct error message
320 error_output = self.run_bitbakecmd(["bitbake", "g1"], tempdir, "", extraenv=extraenv, cleanup=True, allowfailure=True)
321 self.assertIn("non-existent task", error_output)
322 # If the word 'Traceback' or 'KeyError' is in the output we've regressed
323 self.assertNotIn("Traceback", error_output)
324 self.assertNotIn("KeyError", error_output)
325
326 self.shutdown(tempdir)
327
282 def test_hashserv_single(self): 328 def test_hashserv_single(self):
283 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: 329 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
284 extraenv = { 330 extraenv = {
@@ -304,7 +350,6 @@ class RunQueueTests(unittest.TestCase):
304 350
305 self.shutdown(tempdir) 351 self.shutdown(tempdir)
306 352
307 @unittest.skipIf(sys.version_info < (3, 5, 0), 'Python 3.5 or later required')
308 def test_hashserv_double(self): 353 def test_hashserv_double(self):
309 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: 354 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
310 extraenv = { 355 extraenv = {
@@ -329,7 +374,6 @@ class RunQueueTests(unittest.TestCase):
329 374
330 self.shutdown(tempdir) 375 self.shutdown(tempdir)
331 376
332 @unittest.skipIf(sys.version_info < (3, 5, 0), 'Python 3.5 or later required')
333 def test_hashserv_multiple_setscene(self): 377 def test_hashserv_multiple_setscene(self):
334 # Runs e1:do_package_setscene twice 378 # Runs e1:do_package_setscene twice
335 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir: 379 with tempfile.TemporaryDirectory(prefix="runqueuetest") as tempdir:
@@ -361,7 +405,6 @@ class RunQueueTests(unittest.TestCase):
361 405
362 def shutdown(self, tempdir): 406 def shutdown(self, tempdir):
363 # Wait for the hashserve socket to disappear else we'll see races with the tempdir cleanup 407 # Wait for the hashserve socket to disappear else we'll see races with the tempdir cleanup
364 while os.path.exists(tempdir + "/hashserve.sock"): 408 while (os.path.exists(tempdir + "/hashserve.sock") or os.path.exists(tempdir + "cache/hashserv.db-wal") or os.path.exists(tempdir + "/bitbake.lock")):
365 time.sleep(0.5) 409 time.sleep(0.5)
366 410
367
diff --git a/bitbake/lib/bb/tests/siggen.py b/bitbake/lib/bb/tests/siggen.py
index c21ab4e4fb..0dc67e6cc2 100644
--- a/bitbake/lib/bb/tests/siggen.py
+++ b/bitbake/lib/bb/tests/siggen.py
@@ -17,75 +17,12 @@ import bb.siggen
17 17
18class SiggenTest(unittest.TestCase): 18class SiggenTest(unittest.TestCase):
19 19
20 def test_clean_basepath_simple_target_basepath(self): 20 def test_build_pnid(self):
21 basepath = '/full/path/to/poky/meta/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask' 21 tests = {
22 expected_cleaned = 'helloworld/helloworld_1.2.3.bb:do_sometask' 22 ('', 'helloworld', 'do_sometask') : 'helloworld:do_sometask',
23 ('XX', 'helloworld', 'do_sometask') : 'mc:XX:helloworld:do_sometask',
24 }
23 25
24 actual_cleaned = bb.siggen.clean_basepath(basepath) 26 for t in tests:
27 self.assertEqual(bb.siggen.build_pnid(*t), tests[t])
25 28
26 self.assertEqual(actual_cleaned, expected_cleaned)
27
28 def test_clean_basepath_basic_virtual_basepath(self):
29 basepath = 'virtual:something:/full/path/to/poky/meta/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask'
30 expected_cleaned = 'helloworld/helloworld_1.2.3.bb:do_sometask:virtual:something'
31
32 actual_cleaned = bb.siggen.clean_basepath(basepath)
33
34 self.assertEqual(actual_cleaned, expected_cleaned)
35
36 def test_clean_basepath_mc_basepath(self):
37 basepath = 'mc:somemachine:/full/path/to/poky/meta/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask'
38 expected_cleaned = 'helloworld/helloworld_1.2.3.bb:do_sometask:mc:somemachine'
39
40 actual_cleaned = bb.siggen.clean_basepath(basepath)
41
42 self.assertEqual(actual_cleaned, expected_cleaned)
43
44 def test_clean_basepath_virtual_long_prefix_basepath(self):
45 basepath = 'virtual:something:A:B:C:/full/path/to/poky/meta/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask'
46 expected_cleaned = 'helloworld/helloworld_1.2.3.bb:do_sometask:virtual:something:A:B:C'
47
48 actual_cleaned = bb.siggen.clean_basepath(basepath)
49
50 self.assertEqual(actual_cleaned, expected_cleaned)
51
52 def test_clean_basepath_mc_virtual_basepath(self):
53 basepath = 'mc:somemachine:virtual:something:/full/path/to/poky/meta/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask'
54 expected_cleaned = 'helloworld/helloworld_1.2.3.bb:do_sometask:virtual:something:mc:somemachine'
55
56 actual_cleaned = bb.siggen.clean_basepath(basepath)
57
58 self.assertEqual(actual_cleaned, expected_cleaned)
59
60 def test_clean_basepath_mc_virtual_long_prefix_basepath(self):
61 basepath = 'mc:X:virtual:something:C:B:A:/full/path/to/poky/meta/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask'
62 expected_cleaned = 'helloworld/helloworld_1.2.3.bb:do_sometask:virtual:something:C:B:A:mc:X'
63
64 actual_cleaned = bb.siggen.clean_basepath(basepath)
65
66 self.assertEqual(actual_cleaned, expected_cleaned)
67
68
69 # def test_clean_basepath_performance(self):
70 # input_basepaths = [
71 # 'mc:X:/full/path/to/poky/meta/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask',
72 # 'mc:X:virtual:something:C:B:A:/full/path/to/poky/meta/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask',
73 # 'virtual:something:C:B:A:/different/path/to/poky/meta/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask',
74 # 'virtual:something:A:/full/path/to/poky/meta/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask',
75 # '/this/is/most/common/input/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask',
76 # '/and/should/be/tested/with/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask',
77 # '/more/weight/recipes-whatever/helloworld/helloworld_1.2.3.bb:do_sometask',
78 # ]
79
80 # time_start = time.time()
81
82 # i = 2000000
83 # while i >= 0:
84 # for basepath in input_basepaths:
85 # bb.siggen.clean_basepath(basepath)
86 # i -= 1
87
88 # elapsed = time.time() - time_start
89 # print('{} ({}s)'.format(self.id(), round(elapsed, 3)))
90
91 # self.assertTrue(False)
diff --git a/bitbake/lib/bb/tests/utils.py b/bitbake/lib/bb/tests/utils.py
index a7ff33db52..52b7bf85bf 100644
--- a/bitbake/lib/bb/tests/utils.py
+++ b/bitbake/lib/bb/tests/utils.py
@@ -130,6 +130,14 @@ class Checksum(unittest.TestCase):
130 checksum = bb.utils.sha256_file(f.name) 130 checksum = bb.utils.sha256_file(f.name)
131 self.assertEqual(checksum, "fcfbae8bf6b721dbb9d2dc6a9334a58f2031a9a9b302999243f99da4d7f12d0f") 131 self.assertEqual(checksum, "fcfbae8bf6b721dbb9d2dc6a9334a58f2031a9a9b302999243f99da4d7f12d0f")
132 132
133 def test_goh1(self):
134 import hashlib
135 with tempfile.NamedTemporaryFile() as f:
136 f.write(self.filler)
137 f.flush()
138 checksum = bb.utils.goh1_file(f.name)
139 self.assertEqual(checksum, "81191f04d4abf413e5badd234814e4202d9efa73e6f9437e9ddd6b8165b569ef")
140
133class EditMetadataFile(unittest.TestCase): 141class EditMetadataFile(unittest.TestCase):
134 _origfile = """ 142 _origfile = """
135# A comment 143# A comment
@@ -418,7 +426,7 @@ MULTILINE = " stuff \\
418 ['MULTILINE'], 426 ['MULTILINE'],
419 handle_var) 427 handle_var)
420 428
421 testvalue = re.sub('\s+', ' ', value_in_callback.strip()) 429 testvalue = re.sub(r'\s+', ' ', value_in_callback.strip())
422 self.assertEqual(expected_value, testvalue) 430 self.assertEqual(expected_value, testvalue)
423 431
424class EditBbLayersConf(unittest.TestCase): 432class EditBbLayersConf(unittest.TestCase):
@@ -666,3 +674,32 @@ class GetReferencedVars(unittest.TestCase):
666 674
667 layers = [{"SRC_URI"}, {"QT_GIT", "QT_MODULE", "QT_MODULE_BRANCH_PARAM", "QT_GIT_PROTOCOL"}, {"QT_GIT_PROJECT", "QT_MODULE_BRANCH", "BPN"}, {"PN", "SPECIAL_PKGSUFFIX"}] 675 layers = [{"SRC_URI"}, {"QT_GIT", "QT_MODULE", "QT_MODULE_BRANCH_PARAM", "QT_GIT_PROTOCOL"}, {"QT_GIT_PROJECT", "QT_MODULE_BRANCH", "BPN"}, {"PN", "SPECIAL_PKGSUFFIX"}]
668 self.check_referenced("${SRC_URI}", layers) 676 self.check_referenced("${SRC_URI}", layers)
677
678
679class EnvironmentTests(unittest.TestCase):
680 def test_environment(self):
681 os.environ["A"] = "this is A"
682 self.assertIn("A", os.environ)
683 self.assertEqual(os.environ["A"], "this is A")
684 self.assertNotIn("B", os.environ)
685
686 with bb.utils.environment(B="this is B"):
687 self.assertIn("A", os.environ)
688 self.assertEqual(os.environ["A"], "this is A")
689 self.assertIn("B", os.environ)
690 self.assertEqual(os.environ["B"], "this is B")
691
692 self.assertIn("A", os.environ)
693 self.assertEqual(os.environ["A"], "this is A")
694 self.assertNotIn("B", os.environ)
695
696class FilemodeTests(unittest.TestCase):
697 def test_filemode_convert(self):
698 self.assertEqual(0o775, bb.utils.to_filemode("0o775"))
699 self.assertEqual(0o775, bb.utils.to_filemode(0o775))
700 self.assertEqual(0o775, bb.utils.to_filemode("775"))
701 with self.assertRaises(ValueError):
702 bb.utils.to_filemode("xyz")
703 with self.assertRaises(ValueError):
704 bb.utils.to_filemode("999")
705
diff --git a/bitbake/lib/bb/tinfoil.py b/bitbake/lib/bb/tinfoil.py
index 763c329810..e7fbcbca0a 100644
--- a/bitbake/lib/bb/tinfoil.py
+++ b/bitbake/lib/bb/tinfoil.py
@@ -10,10 +10,12 @@
10import logging 10import logging
11import os 11import os
12import sys 12import sys
13import time
13import atexit 14import atexit
14import re 15import re
15from collections import OrderedDict, defaultdict 16from collections import OrderedDict, defaultdict
16from functools import partial 17from functools import partial, wraps
18from contextlib import contextmanager
17 19
18import bb.cache 20import bb.cache
19import bb.cooker 21import bb.cooker
@@ -25,6 +27,135 @@ import bb.remotedata
25from bb.main import setup_bitbake, BitBakeConfigParameters 27from bb.main import setup_bitbake, BitBakeConfigParameters
26import bb.fetch2 28import bb.fetch2
27 29
30def wait_for(f):
31 """
32 Wrap a function that makes an asynchronous tinfoil call using
33 self.run_command() and wait for events to say that the call has been
34 successful, or an error has occurred.
35 """
36 @wraps(f)
37 def wrapper(self, *args, handle_events=True, extra_events=None, event_callback=None, **kwargs):
38 if handle_events:
39 # A reasonable set of default events matching up with those we handle below
40 eventmask = [
41 'bb.event.BuildStarted',
42 'bb.event.BuildCompleted',
43 'logging.LogRecord',
44 'bb.event.NoProvider',
45 'bb.command.CommandCompleted',
46 'bb.command.CommandFailed',
47 'bb.build.TaskStarted',
48 'bb.build.TaskFailed',
49 'bb.build.TaskSucceeded',
50 'bb.build.TaskFailedSilent',
51 'bb.build.TaskProgress',
52 'bb.runqueue.runQueueTaskStarted',
53 'bb.runqueue.sceneQueueTaskStarted',
54 'bb.event.ProcessStarted',
55 'bb.event.ProcessProgress',
56 'bb.event.ProcessFinished',
57 ]
58 if extra_events:
59 eventmask.extend(extra_events)
60 ret = self.set_event_mask(eventmask)
61
62 includelogs = self.config_data.getVar('BBINCLUDELOGS')
63 loglines = self.config_data.getVar('BBINCLUDELOGS_LINES')
64
65 # Call actual function
66 ret = f(self, *args, **kwargs)
67
68 if handle_events:
69 lastevent = time.time()
70 result = False
71 # Borrowed from knotty, instead somewhat hackily we use the helper
72 # as the object to store "shutdown" on
73 helper = bb.ui.uihelper.BBUIHelper()
74 helper.shutdown = 0
75 parseprogress = None
76 termfilter = bb.ui.knotty.TerminalFilter(helper, helper, self.logger.handlers, quiet=self.quiet)
77 try:
78 while True:
79 try:
80 event = self.wait_event(0.25)
81 if event:
82 lastevent = time.time()
83 if event_callback and event_callback(event):
84 continue
85 if helper.eventHandler(event):
86 if isinstance(event, bb.build.TaskFailedSilent):
87 self.logger.warning("Logfile for failed setscene task is %s" % event.logfile)
88 elif isinstance(event, bb.build.TaskFailed):
89 bb.ui.knotty.print_event_log(event, includelogs, loglines, termfilter)
90 continue
91 if isinstance(event, bb.event.ProcessStarted):
92 if self.quiet > 1:
93 continue
94 parseprogress = bb.ui.knotty.new_progress(event.processname, event.total)
95 parseprogress.start(False)
96 continue
97 if isinstance(event, bb.event.ProcessProgress):
98 if self.quiet > 1:
99 continue
100 if parseprogress:
101 parseprogress.update(event.progress)
102 else:
103 bb.warn("Got ProcessProgress event for something that never started?")
104 continue
105 if isinstance(event, bb.event.ProcessFinished):
106 if self.quiet > 1:
107 continue
108 if parseprogress:
109 parseprogress.finish()
110 parseprogress = None
111 continue
112 if isinstance(event, bb.command.CommandCompleted):
113 result = True
114 break
115 if isinstance(event, (bb.command.CommandFailed, bb.command.CommandExit)):
116 self.logger.error(str(event))
117 result = False
118 break
119 if isinstance(event, logging.LogRecord):
120 if event.taskpid == 0 or event.levelno > logging.INFO:
121 self.logger.handle(event)
122 continue
123 if isinstance(event, bb.event.NoProvider):
124 self.logger.error(str(event))
125 result = False
126 break
127 elif helper.shutdown > 1:
128 break
129 termfilter.updateFooter()
130 if time.time() > (lastevent + (3*60)):
131 if not self.run_command('ping', handle_events=False):
132 print("\nUnable to ping server and no events, closing down...\n")
133 return False
134 except KeyboardInterrupt:
135 termfilter.clearFooter()
136 if helper.shutdown == 1:
137 print("\nSecond Keyboard Interrupt, stopping...\n")
138 ret = self.run_command("stateForceShutdown")
139 if ret and ret[2]:
140 self.logger.error("Unable to cleanly stop: %s" % ret[2])
141 elif helper.shutdown == 0:
142 print("\nKeyboard Interrupt, closing down...\n")
143 interrupted = True
144 ret = self.run_command("stateShutdown")
145 if ret and ret[2]:
146 self.logger.error("Unable to cleanly shutdown: %s" % ret[2])
147 helper.shutdown = helper.shutdown + 1
148 termfilter.clearFooter()
149 finally:
150 termfilter.finish()
151 if helper.failed_tasks:
152 result = False
153 return result
154 else:
155 return ret
156
157 return wrapper
158
28 159
29# We need this in order to shut down the connection to the bitbake server, 160# We need this in order to shut down the connection to the bitbake server,
30# otherwise the process will never properly exit 161# otherwise the process will never properly exit
@@ -52,6 +183,10 @@ class TinfoilDataStoreConnectorVarHistory:
52 def remoteCommand(self, cmd, *args, **kwargs): 183 def remoteCommand(self, cmd, *args, **kwargs):
53 return self.tinfoil.run_command('dataStoreConnectorVarHistCmd', self.dsindex, cmd, args, kwargs) 184 return self.tinfoil.run_command('dataStoreConnectorVarHistCmd', self.dsindex, cmd, args, kwargs)
54 185
186 def emit(self, var, oval, val, o, d):
187 ret = self.tinfoil.run_command('dataStoreConnectorVarHistCmdEmit', self.dsindex, var, oval, val, d.dsindex)
188 o.write(ret)
189
55 def __getattr__(self, name): 190 def __getattr__(self, name):
56 if not hasattr(bb.data_smart.VariableHistory, name): 191 if not hasattr(bb.data_smart.VariableHistory, name):
57 raise AttributeError("VariableHistory has no such method %s" % name) 192 raise AttributeError("VariableHistory has no such method %s" % name)
@@ -183,11 +318,19 @@ class TinfoilCookerAdapter:
183 self._cache[name] = attrvalue 318 self._cache[name] = attrvalue
184 return attrvalue 319 return attrvalue
185 320
321 class TinfoilSkiplistByMcAdapter:
322 def __init__(self, tinfoil):
323 self.tinfoil = tinfoil
324
325 def __getitem__(self, mc):
326 return self.tinfoil.get_skipped_recipes(mc)
327
186 def __init__(self, tinfoil): 328 def __init__(self, tinfoil):
187 self.tinfoil = tinfoil 329 self.tinfoil = tinfoil
188 self.multiconfigs = [''] + (tinfoil.config_data.getVar('BBMULTICONFIG') or '').split() 330 self.multiconfigs = [''] + (tinfoil.config_data.getVar('BBMULTICONFIG') or '').split()
189 self.collections = {} 331 self.collections = {}
190 self.recipecaches = {} 332 self.recipecaches = {}
333 self.skiplist_by_mc = self.TinfoilSkiplistByMcAdapter(tinfoil)
191 for mc in self.multiconfigs: 334 for mc in self.multiconfigs:
192 self.collections[mc] = self.TinfoilCookerCollectionAdapter(tinfoil, mc) 335 self.collections[mc] = self.TinfoilCookerCollectionAdapter(tinfoil, mc)
193 self.recipecaches[mc] = self.TinfoilRecipeCacheAdapter(tinfoil, mc) 336 self.recipecaches[mc] = self.TinfoilRecipeCacheAdapter(tinfoil, mc)
@@ -196,8 +339,6 @@ class TinfoilCookerAdapter:
196 # Grab these only when they are requested since they aren't always used 339 # Grab these only when they are requested since they aren't always used
197 if name in self._cache: 340 if name in self._cache:
198 return self._cache[name] 341 return self._cache[name]
199 elif name == 'skiplist':
200 attrvalue = self.tinfoil.get_skipped_recipes()
201 elif name == 'bbfile_config_priorities': 342 elif name == 'bbfile_config_priorities':
202 ret = self.tinfoil.run_command('getLayerPriorities') 343 ret = self.tinfoil.run_command('getLayerPriorities')
203 bbfile_config_priorities = [] 344 bbfile_config_priorities = []
@@ -320,11 +461,11 @@ class Tinfoil:
320 self.recipes_parsed = False 461 self.recipes_parsed = False
321 self.quiet = 0 462 self.quiet = 0
322 self.oldhandlers = self.logger.handlers[:] 463 self.oldhandlers = self.logger.handlers[:]
464 self.localhandlers = []
323 if setup_logging: 465 if setup_logging:
324 # This is the *client-side* logger, nothing to do with 466 # This is the *client-side* logger, nothing to do with
325 # logging messages from the server 467 # logging messages from the server
326 bb.msg.logger_create('BitBake', output) 468 bb.msg.logger_create('BitBake', output)
327 self.localhandlers = []
328 for handler in self.logger.handlers: 469 for handler in self.logger.handlers:
329 if handler not in self.oldhandlers: 470 if handler not in self.oldhandlers:
330 self.localhandlers.append(handler) 471 self.localhandlers.append(handler)
@@ -440,11 +581,17 @@ class Tinfoil:
440 to initialise Tinfoil and use it with config_only=True first and 581 to initialise Tinfoil and use it with config_only=True first and
441 then conditionally call this function to parse recipes later. 582 then conditionally call this function to parse recipes later.
442 """ 583 """
443 config_params = TinfoilConfigParameters(config_only=False) 584 config_params = TinfoilConfigParameters(config_only=False, quiet=self.quiet)
444 self.run_actions(config_params) 585 self.run_actions(config_params)
445 self.recipes_parsed = True 586 self.recipes_parsed = True
446 587
447 def run_command(self, command, *params): 588 def modified_files(self):
589 """
590 Notify the server it needs to revalidate it's caches since the client has modified files
591 """
592 self.run_command("revalidateCaches")
593
594 def run_command(self, command, *params, handle_events=True):
448 """ 595 """
449 Run a command on the server (as implemented in bb.command). 596 Run a command on the server (as implemented in bb.command).
450 Note that there are two types of command - synchronous and 597 Note that there are two types of command - synchronous and
@@ -464,7 +611,7 @@ class Tinfoil:
464 try: 611 try:
465 result = self.server_connection.connection.runCommand(commandline) 612 result = self.server_connection.connection.runCommand(commandline)
466 finally: 613 finally:
467 while True: 614 while handle_events:
468 event = self.wait_event() 615 event = self.wait_event()
469 if not event: 616 if not event:
470 break 617 break
@@ -489,7 +636,7 @@ class Tinfoil:
489 Wait for an event from the server for the specified time. 636 Wait for an event from the server for the specified time.
490 A timeout of 0 means don't wait if there are no events in the queue. 637 A timeout of 0 means don't wait if there are no events in the queue.
491 Returns the next event in the queue or None if the timeout was 638 Returns the next event in the queue or None if the timeout was
492 reached. Note that in order to recieve any events you will 639 reached. Note that in order to receive any events you will
493 first need to set the internal event mask using set_event_mask() 640 first need to set the internal event mask using set_event_mask()
494 (otherwise whatever event mask the UI set up will be in effect). 641 (otherwise whatever event mask the UI set up will be in effect).
495 """ 642 """
@@ -503,12 +650,12 @@ class Tinfoil:
503 """ 650 """
504 return defaultdict(list, self.run_command('getOverlayedRecipes', mc)) 651 return defaultdict(list, self.run_command('getOverlayedRecipes', mc))
505 652
506 def get_skipped_recipes(self): 653 def get_skipped_recipes(self, mc=''):
507 """ 654 """
508 Find recipes which were skipped (i.e. SkipRecipe was raised 655 Find recipes which were skipped (i.e. SkipRecipe was raised
509 during parsing). 656 during parsing).
510 """ 657 """
511 return OrderedDict(self.run_command('getSkippedRecipes')) 658 return OrderedDict(self.run_command('getSkippedRecipes', mc))
512 659
513 def get_all_providers(self, mc=''): 660 def get_all_providers(self, mc=''):
514 return defaultdict(list, self.run_command('allProviders', mc)) 661 return defaultdict(list, self.run_command('allProviders', mc))
@@ -522,6 +669,7 @@ class Tinfoil:
522 def get_runtime_providers(self, rdep): 669 def get_runtime_providers(self, rdep):
523 return self.run_command('getRuntimeProviders', rdep) 670 return self.run_command('getRuntimeProviders', rdep)
524 671
672 # TODO: teach this method about mc
525 def get_recipe_file(self, pn): 673 def get_recipe_file(self, pn):
526 """ 674 """
527 Get the file name for the specified recipe/target. Raises 675 Get the file name for the specified recipe/target. Raises
@@ -530,6 +678,7 @@ class Tinfoil:
530 """ 678 """
531 best = self.find_best_provider(pn) 679 best = self.find_best_provider(pn)
532 if not best or (len(best) > 3 and not best[3]): 680 if not best or (len(best) > 3 and not best[3]):
681 # TODO: pass down mc
533 skiplist = self.get_skipped_recipes() 682 skiplist = self.get_skipped_recipes()
534 taskdata = bb.taskdata.TaskData(None, skiplist=skiplist) 683 taskdata = bb.taskdata.TaskData(None, skiplist=skiplist)
535 skipreasons = taskdata.get_reasons(pn) 684 skipreasons = taskdata.get_reasons(pn)
@@ -622,6 +771,29 @@ class Tinfoil:
622 fn = self.get_recipe_file(pn) 771 fn = self.get_recipe_file(pn)
623 return self.parse_recipe_file(fn) 772 return self.parse_recipe_file(fn)
624 773
774 @contextmanager
775 def _data_tracked_if_enabled(self):
776 """
777 A context manager to enable data tracking for a code segment if data
778 tracking was enabled for this tinfoil instance.
779 """
780 if self.tracking:
781 # Enable history tracking just for the operation
782 self.run_command('enableDataTracking')
783
784 # Here goes the operation with the optional data tracking
785 yield
786
787 if self.tracking:
788 self.run_command('disableDataTracking')
789
790 def finalizeData(self):
791 """
792 Run anonymous functions and expand keys
793 """
794 with self._data_tracked_if_enabled():
795 return self._reconvert_type(self.run_command('finalizeData'), 'DataStoreConnectionHandle')
796
625 def parse_recipe_file(self, fn, appends=True, appendlist=None, config_data=None): 797 def parse_recipe_file(self, fn, appends=True, appendlist=None, config_data=None):
626 """ 798 """
627 Parse the specified recipe file (with or without bbappends) 799 Parse the specified recipe file (with or without bbappends)
@@ -634,10 +806,7 @@ class Tinfoil:
634 appendlist: optional list of bbappend files to apply, if you 806 appendlist: optional list of bbappend files to apply, if you
635 want to filter them 807 want to filter them
636 """ 808 """
637 if self.tracking: 809 with self._data_tracked_if_enabled():
638 # Enable history tracking just for the parse operation
639 self.run_command('enableDataTracking')
640 try:
641 if appends and appendlist == []: 810 if appends and appendlist == []:
642 appends = False 811 appends = False
643 if config_data: 812 if config_data:
@@ -649,9 +818,6 @@ class Tinfoil:
649 return self._reconvert_type(dscon, 'DataStoreConnectionHandle') 818 return self._reconvert_type(dscon, 'DataStoreConnectionHandle')
650 else: 819 else:
651 return None 820 return None
652 finally:
653 if self.tracking:
654 self.run_command('disableDataTracking')
655 821
656 def build_file(self, buildfile, task, internal=True): 822 def build_file(self, buildfile, task, internal=True):
657 """ 823 """
@@ -663,6 +829,10 @@ class Tinfoil:
663 """ 829 """
664 return self.run_command('buildFile', buildfile, task, internal) 830 return self.run_command('buildFile', buildfile, task, internal)
665 831
832 @wait_for
833 def build_file_sync(self, *args):
834 self.build_file(*args)
835
666 def build_targets(self, targets, task=None, handle_events=True, extra_events=None, event_callback=None): 836 def build_targets(self, targets, task=None, handle_events=True, extra_events=None, event_callback=None):
667 """ 837 """
668 Builds the specified targets. This is equivalent to a normal invocation 838 Builds the specified targets. This is equivalent to a normal invocation
@@ -725,6 +895,7 @@ class Tinfoil:
725 895
726 ret = self.run_command('buildTargets', targets, task) 896 ret = self.run_command('buildTargets', targets, task)
727 if handle_events: 897 if handle_events:
898 lastevent = time.time()
728 result = False 899 result = False
729 # Borrowed from knotty, instead somewhat hackily we use the helper 900 # Borrowed from knotty, instead somewhat hackily we use the helper
730 # as the object to store "shutdown" on 901 # as the object to store "shutdown" on
@@ -737,6 +908,7 @@ class Tinfoil:
737 try: 908 try:
738 event = self.wait_event(0.25) 909 event = self.wait_event(0.25)
739 if event: 910 if event:
911 lastevent = time.time()
740 if event_callback and event_callback(event): 912 if event_callback and event_callback(event):
741 continue 913 continue
742 if helper.eventHandler(event): 914 if helper.eventHandler(event):
@@ -757,7 +929,7 @@ class Tinfoil:
757 if parseprogress: 929 if parseprogress:
758 parseprogress.update(event.progress) 930 parseprogress.update(event.progress)
759 else: 931 else:
760 bb.warn("Got ProcessProgress event for someting that never started?") 932 bb.warn("Got ProcessProgress event for something that never started?")
761 continue 933 continue
762 if isinstance(event, bb.event.ProcessFinished): 934 if isinstance(event, bb.event.ProcessFinished):
763 if self.quiet > 1: 935 if self.quiet > 1:
@@ -769,7 +941,7 @@ class Tinfoil:
769 if isinstance(event, bb.command.CommandCompleted): 941 if isinstance(event, bb.command.CommandCompleted):
770 result = True 942 result = True
771 break 943 break
772 if isinstance(event, bb.command.CommandFailed): 944 if isinstance(event, (bb.command.CommandFailed, bb.command.CommandExit)):
773 self.logger.error(str(event)) 945 self.logger.error(str(event))
774 result = False 946 result = False
775 break 947 break
@@ -781,10 +953,13 @@ class Tinfoil:
781 self.logger.error(str(event)) 953 self.logger.error(str(event))
782 result = False 954 result = False
783 break 955 break
784
785 elif helper.shutdown > 1: 956 elif helper.shutdown > 1:
786 break 957 break
787 termfilter.updateFooter() 958 termfilter.updateFooter()
959 if time.time() > (lastevent + (3*60)):
960 if not self.run_command('ping', handle_events=False):
961 print("\nUnable to ping server and no events, closing down...\n")
962 return False
788 except KeyboardInterrupt: 963 except KeyboardInterrupt:
789 termfilter.clearFooter() 964 termfilter.clearFooter()
790 if helper.shutdown == 1: 965 if helper.shutdown == 1:
diff --git a/bitbake/lib/bb/ui/buildinfohelper.py b/bitbake/lib/bb/ui/buildinfohelper.py
index 43aa592842..4ee45d67a2 100644
--- a/bitbake/lib/bb/ui/buildinfohelper.py
+++ b/bitbake/lib/bb/ui/buildinfohelper.py
@@ -45,7 +45,7 @@ from pprint import pformat
45import logging 45import logging
46from datetime import datetime, timedelta 46from datetime import datetime, timedelta
47 47
48from django.db import transaction, connection 48from django.db import transaction
49 49
50 50
51# pylint: disable=invalid-name 51# pylint: disable=invalid-name
@@ -227,6 +227,12 @@ class ORMWrapper(object):
227 build.completed_on = timezone.now() 227 build.completed_on = timezone.now()
228 build.outcome = outcome 228 build.outcome = outcome
229 build.save() 229 build.save()
230
231 # We force a sync point here to force the outcome status commit,
232 # which resolves a race condition with the build completion takedown
233 transaction.set_autocommit(True)
234 transaction.set_autocommit(False)
235
230 signal_runbuilds() 236 signal_runbuilds()
231 237
232 def update_target_set_license_manifest(self, target, license_manifest_path): 238 def update_target_set_license_manifest(self, target, license_manifest_path):
@@ -483,14 +489,14 @@ class ORMWrapper(object):
483 489
484 # we already created the root directory, so ignore any 490 # we already created the root directory, so ignore any
485 # entry for it 491 # entry for it
486 if len(path) == 0: 492 if not path:
487 continue 493 continue
488 494
489 parent_path = "/".join(path.split("/")[:len(path.split("/")) - 1]) 495 parent_path = "/".join(path.split("/")[:len(path.split("/")) - 1])
490 if len(parent_path) == 0: 496 if not parent_path:
491 parent_path = "/" 497 parent_path = "/"
492 parent_obj = self._cached_get(Target_File, target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY) 498 parent_obj = self._cached_get(Target_File, target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY)
493 tf_obj = Target_File.objects.create( 499 Target_File.objects.create(
494 target = target_obj, 500 target = target_obj,
495 path = path, 501 path = path,
496 size = size, 502 size = size,
@@ -553,9 +559,12 @@ class ORMWrapper(object):
553 # we might have an invalid link; no way to detect this. just set it to None 559 # we might have an invalid link; no way to detect this. just set it to None
554 filetarget_obj = None 560 filetarget_obj = None
555 561
556 parent_obj = Target_File.objects.get(target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY) 562 try:
563 parent_obj = Target_File.objects.get(target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY)
564 except Target_File.DoesNotExist:
565 parent_obj = None
557 566
558 tf_obj = Target_File.objects.create( 567 Target_File.objects.create(
559 target = target_obj, 568 target = target_obj,
560 path = path, 569 path = path,
561 size = size, 570 size = size,
@@ -571,7 +580,7 @@ class ORMWrapper(object):
571 assert isinstance(build_obj, Build) 580 assert isinstance(build_obj, Build)
572 assert isinstance(target_obj, Target) 581 assert isinstance(target_obj, Target)
573 582
574 errormsg = "" 583 errormsg = []
575 for p in packagedict: 584 for p in packagedict:
576 # Search name swtiches round the installed name vs package name 585 # Search name swtiches round the installed name vs package name
577 # by default installed name == package name 586 # by default installed name == package name
@@ -633,10 +642,10 @@ class ORMWrapper(object):
633 packagefile_objects.append(Package_File( package = packagedict[p]['object'], 642 packagefile_objects.append(Package_File( package = packagedict[p]['object'],
634 path = targetpath, 643 path = targetpath,
635 size = targetfilesize)) 644 size = targetfilesize))
636 if len(packagefile_objects): 645 if packagefile_objects:
637 Package_File.objects.bulk_create(packagefile_objects) 646 Package_File.objects.bulk_create(packagefile_objects)
638 except KeyError as e: 647 except KeyError as e:
639 errormsg += " stpi: Key error, package %s key %s \n" % ( p, e ) 648 errormsg.append(" stpi: Key error, package %s key %s \n" % (p, e))
640 649
641 # save disk installed size 650 # save disk installed size
642 packagedict[p]['object'].installed_size = packagedict[p]['size'] 651 packagedict[p]['object'].installed_size = packagedict[p]['size']
@@ -673,13 +682,13 @@ class ORMWrapper(object):
673 logger.warning("Could not add dependency to the package %s " 682 logger.warning("Could not add dependency to the package %s "
674 "because %s is an unknown package", p, px) 683 "because %s is an unknown package", p, px)
675 684
676 if len(packagedeps_objs) > 0: 685 if packagedeps_objs:
677 Package_Dependency.objects.bulk_create(packagedeps_objs) 686 Package_Dependency.objects.bulk_create(packagedeps_objs)
678 else: 687 else:
679 logger.info("No package dependencies created") 688 logger.info("No package dependencies created")
680 689
681 if len(errormsg) > 0: 690 if errormsg:
682 logger.warning("buildinfohelper: target_package_info could not identify recipes: \n%s", errormsg) 691 logger.warning("buildinfohelper: target_package_info could not identify recipes: \n%s", "".join(errormsg))
683 692
684 def save_target_image_file_information(self, target_obj, file_name, file_size): 693 def save_target_image_file_information(self, target_obj, file_name, file_size):
685 Target_Image_File.objects.create(target=target_obj, 694 Target_Image_File.objects.create(target=target_obj,
@@ -767,7 +776,7 @@ class ORMWrapper(object):
767 packagefile_objects.append(Package_File( package = bp_object, 776 packagefile_objects.append(Package_File( package = bp_object,
768 path = path, 777 path = path,
769 size = package_info['FILES_INFO'][path] )) 778 size = package_info['FILES_INFO'][path] ))
770 if len(packagefile_objects): 779 if packagefile_objects:
771 Package_File.objects.bulk_create(packagefile_objects) 780 Package_File.objects.bulk_create(packagefile_objects)
772 781
773 def _po_byname(p): 782 def _po_byname(p):
@@ -809,7 +818,7 @@ class ORMWrapper(object):
809 packagedeps_objs.append(Package_Dependency( package = bp_object, 818 packagedeps_objs.append(Package_Dependency( package = bp_object,
810 depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RCONFLICTS)) 819 depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RCONFLICTS))
811 820
812 if len(packagedeps_objs) > 0: 821 if packagedeps_objs:
813 Package_Dependency.objects.bulk_create(packagedeps_objs) 822 Package_Dependency.objects.bulk_create(packagedeps_objs)
814 823
815 return bp_object 824 return bp_object
@@ -826,7 +835,7 @@ class ORMWrapper(object):
826 desc = vardump[root_var]['doc'] 835 desc = vardump[root_var]['doc']
827 if desc is None: 836 if desc is None:
828 desc = '' 837 desc = ''
829 if len(desc): 838 if desc:
830 HelpText.objects.get_or_create(build=build_obj, 839 HelpText.objects.get_or_create(build=build_obj,
831 area=HelpText.VARIABLE, 840 area=HelpText.VARIABLE,
832 key=k, text=desc) 841 key=k, text=desc)
@@ -846,7 +855,7 @@ class ORMWrapper(object):
846 file_name = vh['file'], 855 file_name = vh['file'],
847 line_number = vh['line'], 856 line_number = vh['line'],
848 operation = vh['op'])) 857 operation = vh['op']))
849 if len(varhist_objects): 858 if varhist_objects:
850 VariableHistory.objects.bulk_create(varhist_objects) 859 VariableHistory.objects.bulk_create(varhist_objects)
851 860
852 861
@@ -893,9 +902,6 @@ class BuildInfoHelper(object):
893 self.task_order = 0 902 self.task_order = 0
894 self.autocommit_step = 1 903 self.autocommit_step = 1
895 self.server = server 904 self.server = server
896 # we use manual transactions if the database doesn't autocommit on us
897 if not connection.features.autocommits_when_autocommit_is_off:
898 transaction.set_autocommit(False)
899 self.orm_wrapper = ORMWrapper() 905 self.orm_wrapper = ORMWrapper()
900 self.has_build_history = has_build_history 906 self.has_build_history = has_build_history
901 self.tmp_dir = self.server.runCommand(["getVariable", "TMPDIR"])[0] 907 self.tmp_dir = self.server.runCommand(["getVariable", "TMPDIR"])[0]
@@ -1059,27 +1065,6 @@ class BuildInfoHelper(object):
1059 1065
1060 return recipe_info 1066 return recipe_info
1061 1067
1062 def _get_path_information(self, task_object):
1063 self._ensure_build()
1064
1065 assert isinstance(task_object, Task)
1066 build_stats_format = "{tmpdir}/buildstats/{buildname}/{package}/"
1067 build_stats_path = []
1068
1069 for t in self.internal_state['targets']:
1070 buildname = self.internal_state['build'].build_name
1071 pe, pv = task_object.recipe.version.split(":",1)
1072 if len(pe) > 0:
1073 package = task_object.recipe.name + "-" + pe + "_" + pv
1074 else:
1075 package = task_object.recipe.name + "-" + pv
1076
1077 build_stats_path.append(build_stats_format.format(tmpdir=self.tmp_dir,
1078 buildname=buildname,
1079 package=package))
1080
1081 return build_stats_path
1082
1083 1068
1084 ################################ 1069 ################################
1085 ## external available methods to store information 1070 ## external available methods to store information
@@ -1313,12 +1298,11 @@ class BuildInfoHelper(object):
1313 task_information['outcome'] = Task.OUTCOME_FAILED 1298 task_information['outcome'] = Task.OUTCOME_FAILED
1314 del self.internal_state['taskdata'][identifier] 1299 del self.internal_state['taskdata'][identifier]
1315 1300
1316 if not connection.features.autocommits_when_autocommit_is_off: 1301 # we force a sync point here, to get the progress bar to show
1317 # we force a sync point here, to get the progress bar to show 1302 if self.autocommit_step % 3 == 0:
1318 if self.autocommit_step % 3 == 0: 1303 transaction.set_autocommit(True)
1319 transaction.set_autocommit(True) 1304 transaction.set_autocommit(False)
1320 transaction.set_autocommit(False) 1305 self.autocommit_step += 1
1321 self.autocommit_step += 1
1322 1306
1323 self.orm_wrapper.get_update_task_object(task_information, True) # must exist 1307 self.orm_wrapper.get_update_task_object(task_information, True) # must exist
1324 1308
@@ -1404,7 +1388,7 @@ class BuildInfoHelper(object):
1404 assert 'pn' in event._depgraph 1388 assert 'pn' in event._depgraph
1405 assert 'tdepends' in event._depgraph 1389 assert 'tdepends' in event._depgraph
1406 1390
1407 errormsg = "" 1391 errormsg = []
1408 1392
1409 # save layer version priorities 1393 # save layer version priorities
1410 if 'layer-priorities' in event._depgraph.keys(): 1394 if 'layer-priorities' in event._depgraph.keys():
@@ -1496,7 +1480,7 @@ class BuildInfoHelper(object):
1496 elif dep in self.internal_state['recipes']: 1480 elif dep in self.internal_state['recipes']:
1497 dependency = self.internal_state['recipes'][dep] 1481 dependency = self.internal_state['recipes'][dep]
1498 else: 1482 else:
1499 errormsg += " stpd: KeyError saving recipe dependency for %s, %s \n" % (recipe, dep) 1483 errormsg.append(" stpd: KeyError saving recipe dependency for %s, %s \n" % (recipe, dep))
1500 continue 1484 continue
1501 recipe_dep = Recipe_Dependency(recipe=target, 1485 recipe_dep = Recipe_Dependency(recipe=target,
1502 depends_on=dependency, 1486 depends_on=dependency,
@@ -1537,8 +1521,8 @@ class BuildInfoHelper(object):
1537 taskdeps_objects.append(Task_Dependency( task = target, depends_on = dep )) 1521 taskdeps_objects.append(Task_Dependency( task = target, depends_on = dep ))
1538 Task_Dependency.objects.bulk_create(taskdeps_objects) 1522 Task_Dependency.objects.bulk_create(taskdeps_objects)
1539 1523
1540 if len(errormsg) > 0: 1524 if errormsg:
1541 logger.warning("buildinfohelper: dependency info not identify recipes: \n%s", errormsg) 1525 logger.warning("buildinfohelper: dependency info not identify recipes: \n%s", "".join(errormsg))
1542 1526
1543 1527
1544 def store_build_package_information(self, event): 1528 def store_build_package_information(self, event):
@@ -1618,7 +1602,7 @@ class BuildInfoHelper(object):
1618 1602
1619 if 'backlog' in self.internal_state: 1603 if 'backlog' in self.internal_state:
1620 # if we have a backlog of events, do our best to save them here 1604 # if we have a backlog of events, do our best to save them here
1621 if len(self.internal_state['backlog']): 1605 if self.internal_state['backlog']:
1622 tempevent = self.internal_state['backlog'].pop() 1606 tempevent = self.internal_state['backlog'].pop()
1623 logger.debug("buildinfohelper: Saving stored event %s " 1607 logger.debug("buildinfohelper: Saving stored event %s "
1624 % tempevent) 1608 % tempevent)
@@ -1765,7 +1749,6 @@ class BuildInfoHelper(object):
1765 1749
1766 buildname = self.server.runCommand(['getVariable', 'BUILDNAME'])[0] 1750 buildname = self.server.runCommand(['getVariable', 'BUILDNAME'])[0]
1767 machine = self.server.runCommand(['getVariable', 'MACHINE'])[0] 1751 machine = self.server.runCommand(['getVariable', 'MACHINE'])[0]
1768 image_name = self.server.runCommand(['getVariable', 'IMAGE_NAME'])[0]
1769 1752
1770 # location of the manifest files for this build; 1753 # location of the manifest files for this build;
1771 # note that this file is only produced if an image is produced 1754 # note that this file is only produced if an image is produced
@@ -1786,6 +1769,18 @@ class BuildInfoHelper(object):
1786 # filter out anything which isn't an image target 1769 # filter out anything which isn't an image target
1787 image_targets = [target for target in targets if target.is_image] 1770 image_targets = [target for target in targets if target.is_image]
1788 1771
1772 if len(image_targets) > 0:
1773 #if there are image targets retrieve image_name
1774 image_name = self.server.runCommand(['getVariable', 'IMAGE_NAME'])[0]
1775 if not image_name:
1776 #When build target is an image and image_name is not found as an environment variable
1777 logger.info("IMAGE_NAME not found, extracting from bitbake command")
1778 cmd = self.server.runCommand(['getVariable','BB_CMDLINE'])[0]
1779 #filter out tokens that are command line options
1780 cmd = [token for token in cmd if not token.startswith('-')]
1781 image_name = cmd[1].split(':', 1)[0] # remove everything after : in image name
1782 logger.info("IMAGE_NAME found as : %s " % image_name)
1783
1789 for image_target in image_targets: 1784 for image_target in image_targets:
1790 # this is set to True if we find at least one file relating to 1785 # this is set to True if we find at least one file relating to
1791 # this target; if this remains False after the scan, we copy the 1786 # this target; if this remains False after the scan, we copy the
@@ -1990,8 +1985,6 @@ class BuildInfoHelper(object):
1990 # Do not skip command line build events 1985 # Do not skip command line build events
1991 self.store_log_event(tempevent,False) 1986 self.store_log_event(tempevent,False)
1992 1987
1993 if not connection.features.autocommits_when_autocommit_is_off:
1994 transaction.set_autocommit(True)
1995 1988
1996 # unset the brbe; this is to prevent subsequent command-line builds 1989 # unset the brbe; this is to prevent subsequent command-line builds
1997 # being incorrectly attached to the previous Toaster-triggered build; 1990 # being incorrectly attached to the previous Toaster-triggered build;
diff --git a/bitbake/lib/bb/ui/eventreplay.py b/bitbake/lib/bb/ui/eventreplay.py
new file mode 100644
index 0000000000..d62ecbfa56
--- /dev/null
+++ b/bitbake/lib/bb/ui/eventreplay.py
@@ -0,0 +1,86 @@
1#!/usr/bin/env python3
2#
3# SPDX-License-Identifier: GPL-2.0-only
4#
5# This file re-uses code spread throughout other Bitbake source files.
6# As such, all other copyrights belong to their own right holders.
7#
8
9
10import os
11import sys
12import json
13import pickle
14import codecs
15
16
17class EventPlayer:
18 """Emulate a connection to a bitbake server."""
19
20 def __init__(self, eventfile, variables):
21 self.eventfile = eventfile
22 self.variables = variables
23 self.eventmask = []
24
25 def waitEvent(self, _timeout):
26 """Read event from the file."""
27 line = self.eventfile.readline().strip()
28 if not line:
29 return
30 try:
31 decodedline = json.loads(line)
32 if 'allvariables' in decodedline:
33 self.variables = decodedline['allvariables']
34 return
35 if not 'vars' in decodedline:
36 raise ValueError
37 event_str = decodedline['vars'].encode('utf-8')
38 event = pickle.loads(codecs.decode(event_str, 'base64'))
39 event_name = "%s.%s" % (event.__module__, event.__class__.__name__)
40 if event_name not in self.eventmask:
41 return
42 return event
43 except ValueError as err:
44 print("Failed loading ", line)
45 raise err
46
47 def runCommand(self, command_line):
48 """Emulate running a command on the server."""
49 name = command_line[0]
50
51 if name == "getVariable":
52 var_name = command_line[1]
53 variable = self.variables.get(var_name)
54 if variable:
55 return variable['v'], None
56 return None, "Missing variable %s" % var_name
57
58 elif name == "getAllKeysWithFlags":
59 dump = {}
60 flaglist = command_line[1]
61 for key, val in self.variables.items():
62 try:
63 if not key.startswith("__"):
64 dump[key] = {
65 'v': val['v'],
66 'history' : val['history'],
67 }
68 for flag in flaglist:
69 dump[key][flag] = val[flag]
70 except Exception as err:
71 print(err)
72 return (dump, None)
73
74 elif name == 'setEventMask':
75 self.eventmask = command_line[-1]
76 return True, None
77
78 else:
79 raise Exception("Command %s not implemented" % command_line[0])
80
81 def getEventHandle(self):
82 """
83 This method is called by toasterui.
84 The return value is passed to self.runCommand but not used there.
85 """
86 pass
diff --git a/bitbake/lib/bb/ui/knotty.py b/bitbake/lib/bb/ui/knotty.py
index 0efa614dfc..9a589a5c8e 100644
--- a/bitbake/lib/bb/ui/knotty.py
+++ b/bitbake/lib/bb/ui/knotty.py
@@ -21,10 +21,17 @@ import fcntl
21import struct 21import struct
22import copy 22import copy
23import atexit 23import atexit
24from itertools import groupby
24 25
25from bb.ui import uihelper 26from bb.ui import uihelper
27import bb.build
28import bb.command
29import bb.cooker
30import bb.event
31import bb.runqueue
32import bb.utils
26 33
27featureSet = [bb.cooker.CookerFeatures.SEND_SANITYEVENTS] 34featureSet = [bb.cooker.CookerFeatures.SEND_SANITYEVENTS, bb.cooker.CookerFeatures.BASEDATASTORE_TRACKING]
28 35
29logger = logging.getLogger("BitBake") 36logger = logging.getLogger("BitBake")
30interactive = sys.stdout.isatty() 37interactive = sys.stdout.isatty()
@@ -102,7 +109,7 @@ def new_progress(msg, maxval):
102 return NonInteractiveProgress(msg, maxval) 109 return NonInteractiveProgress(msg, maxval)
103 110
104def pluralise(singular, plural, qty): 111def pluralise(singular, plural, qty):
105 if(qty == 1): 112 if qty == 1:
106 return singular % qty 113 return singular % qty
107 else: 114 else:
108 return plural % qty 115 return plural % qty
@@ -111,6 +118,7 @@ def pluralise(singular, plural, qty):
111class InteractConsoleLogFilter(logging.Filter): 118class InteractConsoleLogFilter(logging.Filter):
112 def __init__(self, tf): 119 def __init__(self, tf):
113 self.tf = tf 120 self.tf = tf
121 super().__init__()
114 122
115 def filter(self, record): 123 def filter(self, record):
116 if record.levelno == bb.msg.BBLogFormatter.NOTE and (record.msg.startswith("Running") or record.msg.startswith("recipe ")): 124 if record.levelno == bb.msg.BBLogFormatter.NOTE and (record.msg.startswith("Running") or record.msg.startswith("recipe ")):
@@ -178,7 +186,7 @@ class TerminalFilter(object):
178 new[3] = new[3] & ~termios.ECHO 186 new[3] = new[3] & ~termios.ECHO
179 termios.tcsetattr(fd, termios.TCSADRAIN, new) 187 termios.tcsetattr(fd, termios.TCSADRAIN, new)
180 curses.setupterm() 188 curses.setupterm()
181 if curses.tigetnum("colors") > 2: 189 if curses.tigetnum("colors") > 2 and os.environ.get('NO_COLOR', '') == '':
182 for h in handlers: 190 for h in handlers:
183 try: 191 try:
184 h.formatter.enable_color() 192 h.formatter.enable_color()
@@ -227,7 +235,9 @@ class TerminalFilter(object):
227 235
228 def keepAlive(self, t): 236 def keepAlive(self, t):
229 if not self.cuu: 237 if not self.cuu:
230 print("Bitbake still alive (%ds)" % t) 238 print("Bitbake still alive (no events for %ds). Active tasks:" % t)
239 for t in self.helper.running_tasks:
240 print(t)
231 sys.stdout.flush() 241 sys.stdout.flush()
232 242
233 def updateFooter(self): 243 def updateFooter(self):
@@ -249,58 +259,68 @@ class TerminalFilter(object):
249 return 259 return
250 tasks = [] 260 tasks = []
251 for t in runningpids: 261 for t in runningpids:
262 start_time = activetasks[t].get("starttime", None)
263 if start_time:
264 msg = "%s - %s (pid %s)" % (activetasks[t]["title"], self.elapsed(currenttime - start_time), activetasks[t]["pid"])
265 else:
266 msg = "%s (pid %s)" % (activetasks[t]["title"], activetasks[t]["pid"])
252 progress = activetasks[t].get("progress", None) 267 progress = activetasks[t].get("progress", None)
253 if progress is not None: 268 if progress is not None:
254 pbar = activetasks[t].get("progressbar", None) 269 pbar = activetasks[t].get("progressbar", None)
255 rate = activetasks[t].get("rate", None) 270 rate = activetasks[t].get("rate", None)
256 start_time = activetasks[t].get("starttime", None)
257 if not pbar or pbar.bouncing != (progress < 0): 271 if not pbar or pbar.bouncing != (progress < 0):
258 if progress < 0: 272 if progress < 0:
259 pbar = BBProgress("0: %s (pid %s)" % (activetasks[t]["title"], activetasks[t]["pid"]), 100, widgets=[' ', progressbar.BouncingSlider(), ''], extrapos=3, resize_handler=self.sigwinch_handle) 273 pbar = BBProgress("0: %s" % msg, 100, widgets=[' ', progressbar.BouncingSlider(), ''], extrapos=3, resize_handler=self.sigwinch_handle)
260 pbar.bouncing = True 274 pbar.bouncing = True
261 else: 275 else:
262 pbar = BBProgress("0: %s (pid %s)" % (activetasks[t]["title"], activetasks[t]["pid"]), 100, widgets=[' ', progressbar.Percentage(), ' ', progressbar.Bar(), ''], extrapos=5, resize_handler=self.sigwinch_handle) 276 pbar = BBProgress("0: %s" % msg, 100, widgets=[' ', progressbar.Percentage(), ' ', progressbar.Bar(), ''], extrapos=5, resize_handler=self.sigwinch_handle)
263 pbar.bouncing = False 277 pbar.bouncing = False
264 activetasks[t]["progressbar"] = pbar 278 activetasks[t]["progressbar"] = pbar
265 tasks.append((pbar, progress, rate, start_time)) 279 tasks.append((pbar, msg, progress, rate, start_time))
266 else: 280 else:
267 start_time = activetasks[t].get("starttime", None) 281 tasks.append(msg)
268 if start_time:
269 tasks.append("%s - %s (pid %s)" % (activetasks[t]["title"], self.elapsed(currenttime - start_time), activetasks[t]["pid"]))
270 else:
271 tasks.append("%s (pid %s)" % (activetasks[t]["title"], activetasks[t]["pid"]))
272 282
273 if self.main.shutdown: 283 if self.main.shutdown:
274 content = "Waiting for %s running tasks to finish:" % len(activetasks) 284 content = pluralise("Waiting for %s running task to finish",
285 "Waiting for %s running tasks to finish", len(activetasks))
286 if not self.quiet:
287 content += ':'
275 print(content) 288 print(content)
276 else: 289 else:
290 scene_tasks = "%s of %s" % (self.helper.setscene_current, self.helper.setscene_total)
291 cur_tasks = "%s of %s" % (self.helper.tasknumber_current, self.helper.tasknumber_total)
292
293 content = ''
294 if not self.quiet:
295 msg = "Setscene tasks: %s" % scene_tasks
296 content += msg + "\n"
297 print(msg)
298
277 if self.quiet: 299 if self.quiet:
278 content = "Running tasks (%s of %s)" % (self.helper.tasknumber_current, self.helper.tasknumber_total) 300 msg = "Running tasks (%s, %s)" % (scene_tasks, cur_tasks)
279 elif not len(activetasks): 301 elif not len(activetasks):
280 content = "No currently running tasks (%s of %s)" % (self.helper.tasknumber_current, self.helper.tasknumber_total) 302 msg = "No currently running tasks (%s)" % cur_tasks
281 else: 303 else:
282 content = "Currently %2s running tasks (%s of %s)" % (len(activetasks), self.helper.tasknumber_current, self.helper.tasknumber_total) 304 msg = "Currently %2s running tasks (%s)" % (len(activetasks), cur_tasks)
283 maxtask = self.helper.tasknumber_total 305 maxtask = self.helper.tasknumber_total
284 if not self.main_progress or self.main_progress.maxval != maxtask: 306 if not self.main_progress or self.main_progress.maxval != maxtask:
285 widgets = [' ', progressbar.Percentage(), ' ', progressbar.Bar()] 307 widgets = [' ', progressbar.Percentage(), ' ', progressbar.Bar()]
286 self.main_progress = BBProgress("Running tasks", maxtask, widgets=widgets, resize_handler=self.sigwinch_handle) 308 self.main_progress = BBProgress("Running tasks", maxtask, widgets=widgets, resize_handler=self.sigwinch_handle)
287 self.main_progress.start(False) 309 self.main_progress.start(False)
288 self.main_progress.setmessage(content) 310 self.main_progress.setmessage(msg)
289 progress = self.helper.tasknumber_current - 1 311 progress = max(0, self.helper.tasknumber_current - 1)
290 if progress < 0: 312 content += self.main_progress.update(progress)
291 progress = 0
292 content = self.main_progress.update(progress)
293 print('') 313 print('')
294 lines = 1 + int(len(content) / (self.columns + 1)) 314 lines = self.getlines(content)
295 if self.quiet == 0: 315 if not self.quiet:
296 for tasknum, task in enumerate(tasks[:(self.rows - 2)]): 316 for tasknum, task in enumerate(tasks[:(self.rows - 1 - lines)]):
297 if isinstance(task, tuple): 317 if isinstance(task, tuple):
298 pbar, progress, rate, start_time = task 318 pbar, msg, progress, rate, start_time = task
299 if not pbar.start_time: 319 if not pbar.start_time:
300 pbar.start(False) 320 pbar.start(False)
301 if start_time: 321 if start_time:
302 pbar.start_time = start_time 322 pbar.start_time = start_time
303 pbar.setmessage('%s:%s' % (tasknum, pbar.msg.split(':', 1)[1])) 323 pbar.setmessage('%s: %s' % (tasknum, msg))
304 pbar.setextra(rate) 324 pbar.setextra(rate)
305 if progress > -1: 325 if progress > -1:
306 content = pbar.update(progress) 326 content = pbar.update(progress)
@@ -310,11 +330,17 @@ class TerminalFilter(object):
310 else: 330 else:
311 content = "%s: %s" % (tasknum, task) 331 content = "%s: %s" % (tasknum, task)
312 print(content) 332 print(content)
313 lines = lines + 1 + int(len(content) / (self.columns + 1)) 333 lines = lines + self.getlines(content)
314 self.footer_present = lines 334 self.footer_present = lines
315 self.lastpids = runningpids[:] 335 self.lastpids = runningpids[:]
316 self.lastcount = self.helper.tasknumber_current 336 self.lastcount = self.helper.tasknumber_current
317 337
338 def getlines(self, content):
339 lines = 0
340 for line in content.split("\n"):
341 lines = lines + 1 + int(len(line) / (self.columns + 1))
342 return lines
343
318 def finish(self): 344 def finish(self):
319 if self.stdinbackup: 345 if self.stdinbackup:
320 fd = sys.stdin.fileno() 346 fd = sys.stdin.fileno()
@@ -327,7 +353,7 @@ def print_event_log(event, includelogs, loglines, termfilter):
327 termfilter.clearFooter() 353 termfilter.clearFooter()
328 bb.error("Logfile of failure stored in: %s" % logfile) 354 bb.error("Logfile of failure stored in: %s" % logfile)
329 if includelogs and not event.errprinted: 355 if includelogs and not event.errprinted:
330 print("Log data follows:") 356 bb.plain("Log data follows:")
331 f = open(logfile, "r") 357 f = open(logfile, "r")
332 lines = [] 358 lines = []
333 while True: 359 while True:
@@ -340,11 +366,11 @@ def print_event_log(event, includelogs, loglines, termfilter):
340 if len(lines) > int(loglines): 366 if len(lines) > int(loglines):
341 lines.pop(0) 367 lines.pop(0)
342 else: 368 else:
343 print('| %s' % l) 369 bb.plain('| %s' % l)
344 f.close() 370 f.close()
345 if lines: 371 if lines:
346 for line in lines: 372 for line in lines:
347 print(line) 373 bb.plain(line)
348 374
349def _log_settings_from_server(server, observe_only): 375def _log_settings_from_server(server, observe_only):
350 # Get values of variables which control our output 376 # Get values of variables which control our output
@@ -401,6 +427,11 @@ def main(server, eventHandler, params, tf = TerminalFilter):
401 except bb.BBHandledException: 427 except bb.BBHandledException:
402 drain_events_errorhandling(eventHandler) 428 drain_events_errorhandling(eventHandler)
403 return 1 429 return 1
430 except Exception as e:
431 # bitbake-server comms failure
432 early_logger = bb.msg.logger_create('bitbake', sys.stdout)
433 early_logger.fatal("Attempting to set server environment: %s", e)
434 return 1
404 435
405 if params.options.quiet == 0: 436 if params.options.quiet == 0:
406 console_loglevel = loglevel 437 console_loglevel = loglevel
@@ -531,13 +562,30 @@ def main(server, eventHandler, params, tf = TerminalFilter):
531 } 562 }
532 }) 563 })
533 564
534 bb.utils.mkdirhier(os.path.dirname(consolelogfile)) 565 consolelogdirname = os.path.dirname(consolelogfile)
535 loglink = os.path.join(os.path.dirname(consolelogfile), 'console-latest.log') 566 # `bb.utils.mkdirhier` has this check, but it reports failure using bb.fatal, which logs
567 # to the very logger we are trying to set up.
568 if '${' in str(consolelogdirname):
569 print(
570 "FATAL: Directory name {} contains unexpanded bitbake variable. This may cause build failures and WORKDIR pollution.".format(
571 consolelogdirname))
572 if '${MACHINE}' in consolelogdirname:
573 print("HINT: It looks like you forgot to set MACHINE in local.conf.")
574
575 bb.utils.mkdirhier(consolelogdirname)
576 loglink = os.path.join(consolelogdirname, 'console-latest.log')
536 bb.utils.remove(loglink) 577 bb.utils.remove(loglink)
537 try: 578 try:
538 os.symlink(os.path.basename(consolelogfile), loglink) 579 os.symlink(os.path.basename(consolelogfile), loglink)
539 except OSError: 580 except OSError:
540 pass 581 pass
582
583 # Add the logging domains specified by the user on the command line
584 for (domainarg, iterator) in groupby(params.debug_domains):
585 dlevel = len(tuple(iterator))
586 l = logconfig["loggers"].setdefault("BitBake.%s" % domainarg, {})
587 l["level"] = logging.DEBUG - dlevel + 1
588 l.setdefault("handlers", []).extend(["BitBake.verbconsole"])
541 589
542 conf = bb.msg.setLoggingConfig(logconfig, logconfigfile) 590 conf = bb.msg.setLoggingConfig(logconfig, logconfigfile)
543 591
@@ -546,6 +594,8 @@ def main(server, eventHandler, params, tf = TerminalFilter):
546 else: 594 else:
547 log_exec_tty = False 595 log_exec_tty = False
548 596
597 should_print_hyperlinks = sys.stdout.isatty() and os.environ.get('NO_COLOR', '') == ''
598
549 helper = uihelper.BBUIHelper() 599 helper = uihelper.BBUIHelper()
550 600
551 # Look for the specially designated handlers which need to be passed to the 601 # Look for the specially designated handlers which need to be passed to the
@@ -559,7 +609,12 @@ def main(server, eventHandler, params, tf = TerminalFilter):
559 return 609 return
560 610
561 llevel, debug_domains = bb.msg.constructLogOptions() 611 llevel, debug_domains = bb.msg.constructLogOptions()
562 server.runCommand(["setEventMask", server.getEventHandle(), llevel, debug_domains, _evt_list]) 612 try:
613 server.runCommand(["setEventMask", server.getEventHandle(), llevel, debug_domains, _evt_list])
614 except (BrokenPipeError, EOFError) as e:
615 # bitbake-server comms failure
616 logger.fatal("Attempting to set event mask: %s", e)
617 return 1
563 618
564 # The logging_tree module is *extremely* helpful in debugging logging 619 # The logging_tree module is *extremely* helpful in debugging logging
565 # domains. Uncomment here to dump the logging tree when bitbake starts 620 # domains. Uncomment here to dump the logging tree when bitbake starts
@@ -568,7 +623,11 @@ def main(server, eventHandler, params, tf = TerminalFilter):
568 623
569 universe = False 624 universe = False
570 if not params.observe_only: 625 if not params.observe_only:
571 params.updateFromServer(server) 626 try:
627 params.updateFromServer(server)
628 except Exception as e:
629 logger.fatal("Fetching command line: %s", e)
630 return 1
572 cmdline = params.parseActions() 631 cmdline = params.parseActions()
573 if not cmdline: 632 if not cmdline:
574 print("Nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information.") 633 print("Nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information.")
@@ -579,7 +638,12 @@ def main(server, eventHandler, params, tf = TerminalFilter):
579 if cmdline['action'][0] == "buildTargets" and "universe" in cmdline['action'][1]: 638 if cmdline['action'][0] == "buildTargets" and "universe" in cmdline['action'][1]:
580 universe = True 639 universe = True
581 640
582 ret, error = server.runCommand(cmdline['action']) 641 try:
642 ret, error = server.runCommand(cmdline['action'])
643 except (BrokenPipeError, EOFError) as e:
644 # bitbake-server comms failure
645 logger.fatal("Command '{}' failed: %s".format(cmdline), e)
646 return 1
583 if error: 647 if error:
584 logger.error("Command '%s' failed: %s" % (cmdline, error)) 648 logger.error("Command '%s' failed: %s" % (cmdline, error))
585 return 1 649 return 1
@@ -595,28 +659,42 @@ def main(server, eventHandler, params, tf = TerminalFilter):
595 return_value = 0 659 return_value = 0
596 errors = 0 660 errors = 0
597 warnings = 0 661 warnings = 0
598 taskfailures = [] 662 taskfailures = {}
599 663
600 printinterval = 5000 664 printintervaldelta = 10 * 60 # 10 minutes
601 lastprint = time.time() 665 printinterval = printintervaldelta
666 pinginterval = 1 * 60 # 1 minute
667 lastevent = lastprint = time.time()
602 668
603 termfilter = tf(main, helper, console_handlers, params.options.quiet) 669 termfilter = tf(main, helper, console_handlers, params.options.quiet)
604 atexit.register(termfilter.finish) 670 atexit.register(termfilter.finish)
605 671
606 while True: 672 # shutdown levels
673 # 0 - normal operation
674 # 1 - no new task execution, let current running tasks finish
675 # 2 - interrupting currently executing tasks
676 # 3 - we're done, exit
677 while main.shutdown < 3:
607 try: 678 try:
608 if (lastprint + printinterval) <= time.time(): 679 if (lastprint + printinterval) <= time.time():
609 termfilter.keepAlive(printinterval) 680 termfilter.keepAlive(printinterval)
610 printinterval += 5000 681 printinterval += printintervaldelta
611 event = eventHandler.waitEvent(0) 682 event = eventHandler.waitEvent(0)
612 if event is None: 683 if event is None:
613 if main.shutdown > 1: 684 if (lastevent + pinginterval) <= time.time():
614 break 685 ret, error = server.runCommand(["ping"])
686 if error or not ret:
687 termfilter.clearFooter()
688 print("No reply after pinging server (%s, %s), exiting." % (str(error), str(ret)))
689 return_value = 3
690 main.shutdown = 3
691 lastevent = time.time()
615 if not parseprogress: 692 if not parseprogress:
616 termfilter.updateFooter() 693 termfilter.updateFooter()
617 event = eventHandler.waitEvent(0.25) 694 event = eventHandler.waitEvent(0.25)
618 if event is None: 695 if event is None:
619 continue 696 continue
697 lastevent = time.time()
620 helper.eventHandler(event) 698 helper.eventHandler(event)
621 if isinstance(event, bb.runqueue.runQueueExitWait): 699 if isinstance(event, bb.runqueue.runQueueExitWait):
622 if not main.shutdown: 700 if not main.shutdown:
@@ -638,8 +716,8 @@ def main(server, eventHandler, params, tf = TerminalFilter):
638 716
639 if isinstance(event, logging.LogRecord): 717 if isinstance(event, logging.LogRecord):
640 lastprint = time.time() 718 lastprint = time.time()
641 printinterval = 5000 719 printinterval = printintervaldelta
642 if event.levelno >= bb.msg.BBLogFormatter.ERROR: 720 if event.levelno >= bb.msg.BBLogFormatter.ERRORONCE:
643 errors = errors + 1 721 errors = errors + 1
644 return_value = 1 722 return_value = 1
645 elif event.levelno == bb.msg.BBLogFormatter.WARNING: 723 elif event.levelno == bb.msg.BBLogFormatter.WARNING:
@@ -653,10 +731,10 @@ def main(server, eventHandler, params, tf = TerminalFilter):
653 continue 731 continue
654 732
655 # Prefix task messages with recipe/task 733 # Prefix task messages with recipe/task
656 if event.taskpid in helper.pidmap and event.levelno != bb.msg.BBLogFormatter.PLAIN: 734 if event.taskpid in helper.pidmap and event.levelno not in [bb.msg.BBLogFormatter.PLAIN, bb.msg.BBLogFormatter.WARNONCE, bb.msg.BBLogFormatter.ERRORONCE]:
657 taskinfo = helper.running_tasks[helper.pidmap[event.taskpid]] 735 taskinfo = helper.running_tasks[helper.pidmap[event.taskpid]]
658 event.msg = taskinfo['title'] + ': ' + event.msg 736 event.msg = taskinfo['title'] + ': ' + event.msg
659 if hasattr(event, 'fn'): 737 if hasattr(event, 'fn') and event.levelno not in [bb.msg.BBLogFormatter.WARNONCE, bb.msg.BBLogFormatter.ERRORONCE]:
660 event.msg = event.fn + ': ' + event.msg 738 event.msg = event.fn + ': ' + event.msg
661 logging.getLogger(event.name).handle(event) 739 logging.getLogger(event.name).handle(event)
662 continue 740 continue
@@ -667,6 +745,8 @@ def main(server, eventHandler, params, tf = TerminalFilter):
667 if isinstance(event, bb.build.TaskFailed): 745 if isinstance(event, bb.build.TaskFailed):
668 return_value = 1 746 return_value = 1
669 print_event_log(event, includelogs, loglines, termfilter) 747 print_event_log(event, includelogs, loglines, termfilter)
748 k = "{}:{}".format(event._fn, event._task)
749 taskfailures[k] = event.logfile
670 if isinstance(event, bb.build.TaskBase): 750 if isinstance(event, bb.build.TaskBase):
671 logger.info(event._message) 751 logger.info(event._message)
672 continue 752 continue
@@ -721,15 +801,15 @@ def main(server, eventHandler, params, tf = TerminalFilter):
721 if event.error: 801 if event.error:
722 errors = errors + 1 802 errors = errors + 1
723 logger.error(str(event)) 803 logger.error(str(event))
724 main.shutdown = 2 804 main.shutdown = 3
725 continue 805 continue
726 if isinstance(event, bb.command.CommandExit): 806 if isinstance(event, bb.command.CommandExit):
727 if not return_value: 807 if not return_value:
728 return_value = event.exitcode 808 return_value = event.exitcode
729 main.shutdown = 2 809 main.shutdown = 3
730 continue 810 continue
731 if isinstance(event, (bb.command.CommandCompleted, bb.cooker.CookerExit)): 811 if isinstance(event, (bb.command.CommandCompleted, bb.cooker.CookerExit)):
732 main.shutdown = 2 812 main.shutdown = 3
733 continue 813 continue
734 if isinstance(event, bb.event.MultipleProviders): 814 if isinstance(event, bb.event.MultipleProviders):
735 logger.info(str(event)) 815 logger.info(str(event))
@@ -745,7 +825,7 @@ def main(server, eventHandler, params, tf = TerminalFilter):
745 continue 825 continue
746 826
747 if isinstance(event, bb.runqueue.sceneQueueTaskStarted): 827 if isinstance(event, bb.runqueue.sceneQueueTaskStarted):
748 logger.info("Running setscene task %d of %d (%s)" % (event.stats.completed + event.stats.active + event.stats.failed + 1, event.stats.total, event.taskstring)) 828 logger.info("Running setscene task %d of %d (%s)" % (event.stats.setscene_covered + event.stats.setscene_active + event.stats.setscene_notcovered + 1, event.stats.setscene_total, event.taskstring))
749 continue 829 continue
750 830
751 if isinstance(event, bb.runqueue.runQueueTaskStarted): 831 if isinstance(event, bb.runqueue.runQueueTaskStarted):
@@ -762,7 +842,7 @@ def main(server, eventHandler, params, tf = TerminalFilter):
762 842
763 if isinstance(event, bb.runqueue.runQueueTaskFailed): 843 if isinstance(event, bb.runqueue.runQueueTaskFailed):
764 return_value = 1 844 return_value = 1
765 taskfailures.append(event.taskstring) 845 taskfailures.setdefault(event.taskstring)
766 logger.error(str(event)) 846 logger.error(str(event))
767 continue 847 continue
768 848
@@ -814,15 +894,26 @@ def main(server, eventHandler, params, tf = TerminalFilter):
814 894
815 logger.error("Unknown event: %s", event) 895 logger.error("Unknown event: %s", event)
816 896
897 except (BrokenPipeError, EOFError) as e:
898 # bitbake-server comms failure, don't attempt further comms and exit
899 logger.fatal("Executing event: %s", e)
900 return_value = 1
901 errors = errors + 1
902 main.shutdown = 3
817 except EnvironmentError as ioerror: 903 except EnvironmentError as ioerror:
818 termfilter.clearFooter() 904 termfilter.clearFooter()
819 # ignore interrupted io 905 # ignore interrupted io
820 if ioerror.args[0] == 4: 906 if ioerror.args[0] == 4:
821 continue 907 continue
822 sys.stderr.write(str(ioerror)) 908 sys.stderr.write(str(ioerror))
823 if not params.observe_only:
824 _, error = server.runCommand(["stateForceShutdown"])
825 main.shutdown = 2 909 main.shutdown = 2
910 if not params.observe_only:
911 try:
912 _, error = server.runCommand(["stateForceShutdown"])
913 except (BrokenPipeError, EOFError) as e:
914 # bitbake-server comms failure, don't attempt further comms and exit
915 logger.fatal("Unable to force shutdown: %s", e)
916 main.shutdown = 3
826 except KeyboardInterrupt: 917 except KeyboardInterrupt:
827 termfilter.clearFooter() 918 termfilter.clearFooter()
828 if params.observe_only: 919 if params.observe_only:
@@ -831,9 +922,13 @@ def main(server, eventHandler, params, tf = TerminalFilter):
831 922
832 def state_force_shutdown(): 923 def state_force_shutdown():
833 print("\nSecond Keyboard Interrupt, stopping...\n") 924 print("\nSecond Keyboard Interrupt, stopping...\n")
834 _, error = server.runCommand(["stateForceShutdown"]) 925 try:
835 if error: 926 _, error = server.runCommand(["stateForceShutdown"])
836 logger.error("Unable to cleanly stop: %s" % error) 927 if error:
928 logger.error("Unable to cleanly stop: %s" % error)
929 except (BrokenPipeError, EOFError) as e:
930 # bitbake-server comms failure
931 logger.fatal("Unable to cleanly stop: %s", e)
837 932
838 if not params.observe_only and main.shutdown == 1: 933 if not params.observe_only and main.shutdown == 1:
839 state_force_shutdown() 934 state_force_shutdown()
@@ -846,32 +941,49 @@ def main(server, eventHandler, params, tf = TerminalFilter):
846 _, error = server.runCommand(["stateShutdown"]) 941 _, error = server.runCommand(["stateShutdown"])
847 if error: 942 if error:
848 logger.error("Unable to cleanly shutdown: %s" % error) 943 logger.error("Unable to cleanly shutdown: %s" % error)
944 except (BrokenPipeError, EOFError) as e:
945 # bitbake-server comms failure
946 logger.fatal("Unable to cleanly shutdown: %s", e)
849 except KeyboardInterrupt: 947 except KeyboardInterrupt:
850 state_force_shutdown() 948 state_force_shutdown()
851 949
852 main.shutdown = main.shutdown + 1 950 main.shutdown = main.shutdown + 1
853 pass
854 except Exception as e: 951 except Exception as e:
855 import traceback 952 import traceback
856 sys.stderr.write(traceback.format_exc()) 953 sys.stderr.write(traceback.format_exc())
857 if not params.observe_only:
858 _, error = server.runCommand(["stateForceShutdown"])
859 main.shutdown = 2 954 main.shutdown = 2
955 if not params.observe_only:
956 try:
957 _, error = server.runCommand(["stateForceShutdown"])
958 except (BrokenPipeError, EOFError) as e:
959 # bitbake-server comms failure, don't attempt further comms and exit
960 logger.fatal("Unable to force shutdown: %s", e)
961 main.shudown = 3
860 return_value = 1 962 return_value = 1
861 try: 963 try:
862 termfilter.clearFooter() 964 termfilter.clearFooter()
863 summary = "" 965 summary = ""
966 def format_hyperlink(url, link_text):
967 if should_print_hyperlinks:
968 start = f'\033]8;;{url}\033\\'
969 end = '\033]8;;\033\\'
970 return f'{start}{link_text}{end}'
971 return link_text
972
864 if taskfailures: 973 if taskfailures:
865 summary += pluralise("\nSummary: %s task failed:", 974 summary += pluralise("\nSummary: %s task failed:",
866 "\nSummary: %s tasks failed:", len(taskfailures)) 975 "\nSummary: %s tasks failed:", len(taskfailures))
867 for failure in taskfailures: 976 for (failure, log_file) in taskfailures.items():
868 summary += "\n %s" % failure 977 summary += "\n %s" % failure
978 if log_file:
979 hyperlink = format_hyperlink(f"file://{log_file}", log_file)
980 summary += "\n log: {}".format(hyperlink)
869 if warnings: 981 if warnings:
870 summary += pluralise("\nSummary: There was %s WARNING message shown.", 982 summary += pluralise("\nSummary: There was %s WARNING message.",
871 "\nSummary: There were %s WARNING messages shown.", warnings) 983 "\nSummary: There were %s WARNING messages.", warnings)
872 if return_value and errors: 984 if return_value and errors:
873 summary += pluralise("\nSummary: There was %s ERROR message shown, returning a non-zero exit code.", 985 summary += pluralise("\nSummary: There was %s ERROR message, returning a non-zero exit code.",
874 "\nSummary: There were %s ERROR messages shown, returning a non-zero exit code.", errors) 986 "\nSummary: There were %s ERROR messages, returning a non-zero exit code.", errors)
875 if summary and params.options.quiet == 0: 987 if summary and params.options.quiet == 0:
876 print(summary) 988 print(summary)
877 989
diff --git a/bitbake/lib/bb/ui/ncurses.py b/bitbake/lib/bb/ui/ncurses.py
index cf1c876a51..18a706547a 100644
--- a/bitbake/lib/bb/ui/ncurses.py
+++ b/bitbake/lib/bb/ui/ncurses.py
@@ -227,6 +227,9 @@ class NCursesUI:
227 shutdown = 0 227 shutdown = 0
228 228
229 try: 229 try:
230 if not params.observe_only:
231 params.updateToServer(server, os.environ.copy())
232
230 params.updateFromServer(server) 233 params.updateFromServer(server)
231 cmdline = params.parseActions() 234 cmdline = params.parseActions()
232 if not cmdline: 235 if not cmdline:
diff --git a/bitbake/lib/bb/ui/taskexp.py b/bitbake/lib/bb/ui/taskexp.py
index 2b246710ca..bedfd69b09 100644
--- a/bitbake/lib/bb/ui/taskexp.py
+++ b/bitbake/lib/bb/ui/taskexp.py
@@ -8,6 +8,7 @@
8# 8#
9 9
10import sys 10import sys
11import traceback
11 12
12try: 13try:
13 import gi 14 import gi
@@ -176,7 +177,7 @@ class gtkthread(threading.Thread):
176 quit = threading.Event() 177 quit = threading.Event()
177 def __init__(self, shutdown): 178 def __init__(self, shutdown):
178 threading.Thread.__init__(self) 179 threading.Thread.__init__(self)
179 self.setDaemon(True) 180 self.daemon = True
180 self.shutdown = shutdown 181 self.shutdown = shutdown
181 if not Gtk.init_check()[0]: 182 if not Gtk.init_check()[0]:
182 sys.stderr.write("Gtk+ init failed. Make sure DISPLAY variable is set.\n") 183 sys.stderr.write("Gtk+ init failed. Make sure DISPLAY variable is set.\n")
@@ -196,6 +197,7 @@ def main(server, eventHandler, params):
196 gtkgui.start() 197 gtkgui.start()
197 198
198 try: 199 try:
200 params.updateToServer(server, os.environ.copy())
199 params.updateFromServer(server) 201 params.updateFromServer(server)
200 cmdline = params.parseActions() 202 cmdline = params.parseActions()
201 if not cmdline: 203 if not cmdline:
@@ -218,6 +220,9 @@ def main(server, eventHandler, params):
218 except client.Fault as x: 220 except client.Fault as x:
219 print("XMLRPC Fault getting commandline:\n %s" % x) 221 print("XMLRPC Fault getting commandline:\n %s" % x)
220 return 222 return
223 except Exception as e:
224 print("Exception in startup:\n %s" % traceback.format_exc())
225 return
221 226
222 if gtkthread.quit.isSet(): 227 if gtkthread.quit.isSet():
223 return 228 return
diff --git a/bitbake/lib/bb/ui/taskexp_ncurses.py b/bitbake/lib/bb/ui/taskexp_ncurses.py
new file mode 100755
index 0000000000..ea94a4987f
--- /dev/null
+++ b/bitbake/lib/bb/ui/taskexp_ncurses.py
@@ -0,0 +1,1511 @@
1#
2# BitBake Graphical ncurses-based Dependency Explorer
3# * Based on the GTK implementation
4# * Intended to run on any Linux host
5#
6# Copyright (C) 2007 Ross Burton
7# Copyright (C) 2007 - 2008 Richard Purdie
8# Copyright (C) 2022 - 2024 David Reyna
9#
10# SPDX-License-Identifier: GPL-2.0-only
11#
12
13#
14# Execution example:
15# $ bitbake -g -u taskexp_ncurses zlib acl
16#
17# Self-test example (executes a script of GUI actions):
18# $ TASK_EXP_UNIT_TEST=1 bitbake -g -u taskexp_ncurses zlib acl
19# ...
20# $ echo $?
21# 0
22# $ TASK_EXP_UNIT_TEST=1 bitbake -g -u taskexp_ncurses zlib acl foo
23# ERROR: Nothing PROVIDES 'foo'. Close matches:
24# ofono
25# $ echo $?
26# 1
27#
28# Self-test with no terminal example (only tests dependency fetch from bitbake):
29# $ TASK_EXP_UNIT_TEST_NOTERM=1 bitbake -g -u taskexp_ncurses quilt
30# $ echo $?
31# 0
32#
33# Features:
34# * Ncurses is used for the presentation layer. Only the 'curses'
35# library is used (none of the extension libraries), plus only
36# one main screen is used (no sub-windows)
37# * Uses the 'generateDepTreeEvent' bitbake event to fetch the
38# dynamic dependency data based on passed recipes
39# * Computes and provides reverse dependencies
40# * Supports task sorting on:
41# (a) Task dependency order within each recipe
42# (b) Pure alphabetical order
43# (c) Provisions for third sort order (bitbake order?)
44# * The 'Filter' does a "*string*" wildcard filter on tasks in the
45# main window, dynamically re-ordering and re-centering the content
46# * A 'Print' function exports the selected task or its whole recipe
47# task set to the default file "taskdep.txt"
48# * Supports a progress bar for bitbake loads and file printing
49# * Line art for box drawing supported, ASCII art an alernative
50# * No horizontal scrolling support. Selected task's full name
51# shown in bottom bar
52# * Dynamically catches terminals that are (or become) too small
53# * Exception to insure return to normal terminal on errors
54# * Debugging support, self test option
55#
56
57import sys
58import traceback
59import curses
60import re
61import time
62
63# Bitbake server support
64import threading
65from xmlrpc import client
66import bb
67import bb.event
68
69# Dependency indexes (depends_model)
70(TYPE_DEP, TYPE_RDEP) = (0, 1)
71DEPENDS_TYPE = 0
72DEPENDS_TASK = 1
73DEPENDS_DEPS = 2
74# Task indexes (task_list)
75TASK_NAME = 0
76TASK_PRIMARY = 1
77TASK_SORT_ALPHA = 2
78TASK_SORT_DEPS = 3
79TASK_SORT_BITBAKE = 4
80# Sort options (default is SORT_DEPS)
81SORT_ALPHA = 0
82SORT_DEPS = 1
83SORT_BITBAKE_ENABLE = False # NOTE: future sort
84SORT_BITBAKE = 2
85sort_model = SORT_DEPS
86# Print options
87PRINT_MODEL_1 = 0
88PRINT_MODEL_2 = 1
89print_model = PRINT_MODEL_2
90print_file_name = "taskdep_print.log"
91print_file_backup_name = "taskdep_print_backup.log"
92is_printed = False
93is_filter = False
94
95# Standard (and backup) key mappings
96CHAR_NUL = 0 # Used as self-test nop char
97CHAR_BS_H = 8 # Alternate backspace key
98CHAR_TAB = 9
99CHAR_RETURN = 10
100CHAR_ESCAPE = 27
101CHAR_UP = ord('{') # Used as self-test ASCII char
102CHAR_DOWN = ord('}') # Used as self-test ASCII char
103
104# Color_pair IDs
105CURSES_NORMAL = 0
106CURSES_HIGHLIGHT = 1
107CURSES_WARNING = 2
108
109
110#################################################
111### Debugging support
112###
113
114verbose = False
115
116# Debug: message display slow-step through display update issues
117def alert(msg,screen):
118 if msg:
119 screen.addstr(0, 10, '[%-4s]' % msg)
120 screen.refresh();
121 curses.napms(2000)
122 else:
123 if do_line_art:
124 for i in range(10, 24):
125 screen.addch(0, i, curses.ACS_HLINE)
126 else:
127 screen.addstr(0, 10, '-' * 14)
128 screen.refresh();
129
130# Debug: display edge conditions on frame movements
131def debug_frame(nbox_ojb):
132 if verbose:
133 nbox_ojb.screen.addstr(0, 50, '[I=%2d,O=%2d,S=%3s,H=%2d,M=%4d]' % (
134 nbox_ojb.cursor_index,
135 nbox_ojb.cursor_offset,
136 nbox_ojb.scroll_offset,
137 nbox_ojb.inside_height,
138 len(nbox_ojb.task_list),
139 ))
140 nbox_ojb.screen.refresh();
141
142#
143# Unit test (assumes that 'quilt-native' is always present)
144#
145
146unit_test = os.environ.get('TASK_EXP_UNIT_TEST')
147unit_test_cmnds=[
148 '# Default selected task in primary box',
149 'tst_selected=<TASK>.do_recipe_qa',
150 '# Default selected task in deps',
151 'tst_entry=<TAB>',
152 'tst_selected=',
153 '# Default selected task in rdeps',
154 'tst_entry=<TAB>',
155 'tst_selected=<TASK>.do_fetch',
156 "# Test 'select' back to primary box",
157 'tst_entry=<CR>',
158 '#tst_entry=<DOWN>', # optional injected error
159 'tst_selected=<TASK>.do_fetch',
160 '# Check filter',
161 'tst_entry=/uilt-nativ/',
162 'tst_selected=quilt-native.do_recipe_qa',
163 '# Check print',
164 'tst_entry=p',
165 'tst_printed=quilt-native.do_fetch',
166 '#tst_printed=quilt-foo.do_nothing', # optional injected error
167 '# Done!',
168 'tst_entry=q',
169]
170unit_test_idx=0
171unit_test_command_chars=''
172unit_test_results=[]
173def unit_test_action(active_package):
174 global unit_test_idx
175 global unit_test_command_chars
176 global unit_test_results
177 ret = CHAR_NUL
178 if unit_test_command_chars:
179 ch = unit_test_command_chars[0]
180 unit_test_command_chars = unit_test_command_chars[1:]
181 time.sleep(0.5)
182 ret = ord(ch)
183 else:
184 line = unit_test_cmnds[unit_test_idx]
185 unit_test_idx += 1
186 line = re.sub('#.*', '', line).strip()
187 line = line.replace('<TASK>',active_package.primary[0])
188 line = line.replace('<TAB>','\t').replace('<CR>','\n')
189 line = line.replace('<UP>','{').replace('<DOWN>','}')
190 if not line: line = 'nop=nop'
191 cmnd,value = line.split('=')
192 if cmnd == 'tst_entry':
193 unit_test_command_chars = value
194 elif cmnd == 'tst_selected':
195 active_selected = active_package.get_selected()
196 if active_selected != value:
197 unit_test_results.append("ERROR:SELFTEST:expected '%s' but got '%s' (NOTE:bitbake may have changed)" % (value,active_selected))
198 ret = ord('Q')
199 else:
200 unit_test_results.append("Pass:SELFTEST:found '%s'" % (value))
201 elif cmnd == 'tst_printed':
202 result = os.system('grep %s %s' % (value,print_file_name))
203 if result:
204 unit_test_results.append("ERROR:PRINTTEST:expected '%s' in '%s'" % (value,print_file_name))
205 ret = ord('Q')
206 else:
207 unit_test_results.append("Pass:PRINTTEST:found '%s'" % (value))
208 # Return the action (CHAR_NUL for no action til next round)
209 return(ret)
210
211# Unit test without an interative terminal (e.g. ptest)
212unit_test_noterm = os.environ.get('TASK_EXP_UNIT_TEST_NOTERM')
213
214
215#################################################
216### Window frame rendering
217###
218### By default, use the normal line art. Since
219### these extended characters are not ASCII, one
220### must use the ncursus API to render them
221### The alternate ASCII line art set is optionally
222### available via the 'do_line_art' flag
223
224# By default, render frames using line art
225do_line_art = True
226
227# ASCII render set option
228CHAR_HBAR = '-'
229CHAR_VBAR = '|'
230CHAR_UL_CORNER = '/'
231CHAR_UR_CORNER = '\\'
232CHAR_LL_CORNER = '\\'
233CHAR_LR_CORNER = '/'
234
235# Box frame drawing with line-art
236def line_art_frame(box):
237 x = box.base_x
238 y = box.base_y
239 w = box.width
240 h = box.height + 1
241
242 if do_line_art:
243 for i in range(1, w - 1):
244 box.screen.addch(y, x + i, curses.ACS_HLINE, box.color)
245 box.screen.addch(y + h - 1, x + i, curses.ACS_HLINE, box.color)
246 body_line = "%s" % (' ' * (w - 2))
247 for i in range(1, h - 1):
248 box.screen.addch(y + i, x, curses.ACS_VLINE, box.color)
249 box.screen.addstr(y + i, x + 1, body_line, box.color)
250 box.screen.addch(y + i, x + w - 1, curses.ACS_VLINE, box.color)
251 box.screen.addch(y, x, curses.ACS_ULCORNER, box.color)
252 box.screen.addch(y, x + w - 1, curses.ACS_URCORNER, box.color)
253 box.screen.addch(y + h - 1, x, curses.ACS_LLCORNER, box.color)
254 box.screen.addch(y + h - 1, x + w - 1, curses.ACS_LRCORNER, box.color)
255 else:
256 top_line = "%s%s%s" % (CHAR_UL_CORNER,CHAR_HBAR * (w - 2),CHAR_UR_CORNER)
257 body_line = "%s%s%s" % (CHAR_VBAR,' ' * (w - 2),CHAR_VBAR)
258 bot_line = "%s%s%s" % (CHAR_UR_CORNER,CHAR_HBAR * (w - 2),CHAR_UL_CORNER)
259 tag_line = "%s%s%s" % ('[',CHAR_HBAR * (w - 2),']')
260 # Top bar
261 box.screen.addstr(y, x, top_line)
262 # Middle frame
263 for i in range(1, (h - 1)):
264 box.screen.addstr(y+i, x, body_line)
265 # Bottom bar
266 box.screen.addstr(y + (h - 1), x, bot_line)
267
268# Connect the separate boxes
269def line_art_fixup(box):
270 if do_line_art:
271 box.screen.addch(box.base_y+2, box.base_x, curses.ACS_LTEE, box.color)
272 box.screen.addch(box.base_y+2, box.base_x+box.width-1, curses.ACS_RTEE, box.color)
273
274
275#################################################
276### Ncurses box object : box frame object to display
277### and manage a sub-window's display elements
278### using basic ncurses
279###
280### Supports:
281### * Frame drawing, content (re)drawing
282### * Content scrolling via ArrowUp, ArrowDn, PgUp, PgDN,
283### * Highlighting for active selected item
284### * Content sorting based on selected sort model
285###
286
287class NBox():
288 def __init__(self, screen, label, primary, base_x, base_y, width, height):
289 # Box description
290 self.screen = screen
291 self.label = label
292 self.primary = primary
293 self.color = curses.color_pair(CURSES_NORMAL) if screen else None
294 # Box boundaries
295 self.base_x = base_x
296 self.base_y = base_y
297 self.width = width
298 self.height = height
299 # Cursor/scroll management
300 self.cursor_enable = False
301 self.cursor_index = 0 # Absolute offset
302 self.cursor_offset = 0 # Frame centric offset
303 self.scroll_offset = 0 # Frame centric offset
304 # Box specific content
305 # Format of each entry is [package_name,is_primary_recipe,alpha_sort_key,deps_sort_key]
306 self.task_list = []
307
308 @property
309 def inside_width(self):
310 return(self.width-2)
311
312 @property
313 def inside_height(self):
314 return(self.height-2)
315
316 # Populate the box's content, include the sort mappings and is_primary flag
317 def task_list_append(self,task_name,dep):
318 task_sort_alpha = task_name
319 task_sort_deps = dep.get_dep_sort(task_name)
320 is_primary = False
321 for primary in self.primary:
322 if task_name.startswith(primary+'.'):
323 is_primary = True
324 if SORT_BITBAKE_ENABLE:
325 task_sort_bitbake = dep.get_bb_sort(task_name)
326 self.task_list.append([task_name,is_primary,task_sort_alpha,task_sort_deps,task_sort_bitbake])
327 else:
328 self.task_list.append([task_name,is_primary,task_sort_alpha,task_sort_deps])
329
330 def reset(self):
331 self.task_list = []
332 self.cursor_index = 0 # Absolute offset
333 self.cursor_offset = 0 # Frame centric offset
334 self.scroll_offset = 0 # Frame centric offset
335
336 # Sort the box's content based on the current sort model
337 def sort(self):
338 if SORT_ALPHA == sort_model:
339 self.task_list.sort(key = lambda x: x[TASK_SORT_ALPHA])
340 elif SORT_DEPS == sort_model:
341 self.task_list.sort(key = lambda x: x[TASK_SORT_DEPS])
342 elif SORT_BITBAKE == sort_model:
343 self.task_list.sort(key = lambda x: x[TASK_SORT_BITBAKE])
344
345 # The target package list (to hightlight), from the command line
346 def set_primary(self,primary):
347 self.primary = primary
348
349 # Draw the box's outside frame
350 def draw_frame(self):
351 line_art_frame(self)
352 # Title
353 self.screen.addstr(self.base_y,
354 (self.base_x + (self.width//2))-((len(self.label)+2)//2),
355 '['+self.label+']')
356 self.screen.refresh()
357
358 # Draw the box's inside text content
359 def redraw(self):
360 task_list_len = len(self.task_list)
361 # Middle frame
362 body_line = "%s" % (' ' * (self.inside_width-1) )
363 for i in range(0,self.inside_height+1):
364 if i < (task_list_len + self.scroll_offset):
365 str_ctl = "%%-%ss" % (self.width-3)
366 # Safety assert
367 if (i + self.scroll_offset) >= task_list_len:
368 alert("REDRAW:%2d,%4d,%4d" % (i,self.scroll_offset,task_list_len),self.screen)
369 break
370
371 task_obj = self.task_list[i + self.scroll_offset]
372 task = task_obj[TASK_NAME][:self.inside_width-1]
373 task_primary = task_obj[TASK_PRIMARY]
374
375 if task_primary:
376 line = str_ctl % task[:self.inside_width-1]
377 self.screen.addstr(self.base_y+1+i, self.base_x+2, line, curses.A_BOLD)
378 else:
379 line = str_ctl % task[:self.inside_width-1]
380 self.screen.addstr(self.base_y+1+i, self.base_x+2, line)
381 else:
382 line = "%s" % (' ' * (self.inside_width-1) )
383 self.screen.addstr(self.base_y+1+i, self.base_x+2, line)
384 self.screen.refresh()
385
386 # Show the current selected task over the bottom of the frame
387 def show_selected(self,selected_task):
388 if not selected_task:
389 selected_task = self.get_selected()
390 tag_line = "%s%s%s" % ('[',CHAR_HBAR * (self.width-2),']')
391 self.screen.addstr(self.base_y + self.height, self.base_x, tag_line)
392 self.screen.addstr(self.base_y + self.height,
393 (self.base_x + (self.width//2))-((len(selected_task)+2)//2),
394 '['+selected_task+']')
395 self.screen.refresh()
396
397 # Load box with new table of content
398 def update_content(self,task_list):
399 self.task_list = task_list
400 if self.cursor_enable:
401 cursor_update(turn_on=False)
402 self.cursor_index = 0
403 self.cursor_offset = 0
404 self.scroll_offset = 0
405 self.redraw()
406 if self.cursor_enable:
407 cursor_update(turn_on=True)
408
409 # Manage the box's highlighted task and blinking cursor character
410 def cursor_on(self,is_on):
411 self.cursor_enable = is_on
412 self.cursor_update(is_on)
413
414 # High-light the current pointed package, normal for released packages
415 def cursor_update(self,turn_on=True):
416 str_ctl = "%%-%ss" % (self.inside_width-1)
417 try:
418 if len(self.task_list):
419 task_obj = self.task_list[self.cursor_index]
420 task = task_obj[TASK_NAME][:self.inside_width-1]
421 task_primary = task_obj[TASK_PRIMARY]
422 task_font = curses.A_BOLD if task_primary else 0
423 else:
424 task = ''
425 task_font = 0
426 except Exception as e:
427 alert("CURSOR_UPDATE:%s" % (e),self.screen)
428 return
429 if turn_on:
430 self.screen.addstr(self.base_y+1+self.cursor_offset,self.base_x+1,">", curses.color_pair(CURSES_HIGHLIGHT) | curses.A_BLINK)
431 self.screen.addstr(self.base_y+1+self.cursor_offset,self.base_x+2,str_ctl % task, curses.color_pair(CURSES_HIGHLIGHT) | task_font)
432 else:
433 self.screen.addstr(self.base_y+1+self.cursor_offset,self.base_x+1," ")
434 self.screen.addstr(self.base_y+1+self.cursor_offset,self.base_x+2,str_ctl % task, task_font)
435
436 # Down arrow
437 def line_down(self):
438 if len(self.task_list) <= (self.cursor_index+1):
439 return
440 self.cursor_update(turn_on=False)
441 self.cursor_index += 1
442 self.cursor_offset += 1
443 if self.cursor_offset > (self.inside_height):
444 self.cursor_offset -= 1
445 self.scroll_offset += 1
446 self.redraw()
447 self.cursor_update(turn_on=True)
448 debug_frame(self)
449
450 # Up arrow
451 def line_up(self):
452 if 0 > (self.cursor_index-1):
453 return
454 self.cursor_update(turn_on=False)
455 self.cursor_index -= 1
456 self.cursor_offset -= 1
457 if self.cursor_offset < 0:
458 self.cursor_offset += 1
459 self.scroll_offset -= 1
460 self.redraw()
461 self.cursor_update(turn_on=True)
462 debug_frame(self)
463
464 # Page down
465 def page_down(self):
466 max_task = len(self.task_list)-1
467 if max_task < self.inside_height:
468 return
469 self.cursor_update(turn_on=False)
470 self.cursor_index += 10
471 self.cursor_index = min(self.cursor_index,max_task)
472 self.cursor_offset = min(self.inside_height,self.cursor_index)
473 self.scroll_offset = self.cursor_index - self.cursor_offset
474 self.redraw()
475 self.cursor_update(turn_on=True)
476 debug_frame(self)
477
478 # Page up
479 def page_up(self):
480 max_task = len(self.task_list)-1
481 if max_task < self.inside_height:
482 return
483 self.cursor_update(turn_on=False)
484 self.cursor_index -= 10
485 self.cursor_index = max(self.cursor_index,0)
486 self.cursor_offset = max(0, self.inside_height - (max_task - self.cursor_index))
487 self.scroll_offset = self.cursor_index - self.cursor_offset
488 self.redraw()
489 self.cursor_update(turn_on=True)
490 debug_frame(self)
491
492 # Return the currently selected task name for this box
493 def get_selected(self):
494 if self.task_list:
495 return(self.task_list[self.cursor_index][TASK_NAME])
496 else:
497 return('')
498
499#################################################
500### The helper sub-windows
501###
502
503# Show persistent help at the top of the screen
504class HelpBarView(NBox):
505 def __init__(self, screen, label, primary, base_x, base_y, width, height):
506 super(HelpBarView, self).__init__(screen, label, primary, base_x, base_y, width, height)
507
508 def show_help(self,show):
509 self.screen.addstr(self.base_y,self.base_x, "%s" % (' ' * self.inside_width))
510 if show:
511 help = "Help='?' Filter='/' NextBox=<Tab> Select=<Enter> Print='p','P' Quit='q'"
512 bar_size = self.inside_width - 5 - len(help)
513 self.screen.addstr(self.base_y,self.base_x+((self.inside_width-len(help))//2), help)
514 self.screen.refresh()
515
516# Pop up a detailed Help box
517class HelpBoxView(NBox):
518 def __init__(self, screen, label, primary, base_x, base_y, width, height, dep):
519 super(HelpBoxView, self).__init__(screen, label, primary, base_x, base_y, width, height)
520 self.x_pos = 0
521 self.y_pos = 0
522 self.dep = dep
523
524 # Instantial the pop-up help box
525 def show_help(self,show):
526 self.x_pos = self.base_x + 4
527 self.y_pos = self.base_y + 2
528
529 def add_line(line):
530 if line:
531 self.screen.addstr(self.y_pos,self.x_pos,line)
532 self.y_pos += 1
533
534 # Gather some statisics
535 dep_count = 0
536 rdep_count = 0
537 for task_obj in self.dep.depends_model:
538 if TYPE_DEP == task_obj[DEPENDS_TYPE]:
539 dep_count += 1
540 elif TYPE_RDEP == task_obj[DEPENDS_TYPE]:
541 rdep_count += 1
542
543 self.draw_frame()
544 line_art_fixup(self.dep)
545 add_line("Quit : 'q' ")
546 add_line("Filter task names : '/'")
547 add_line("Tab to next box : <Tab>")
548 add_line("Select a task : <Enter>")
549 add_line("Print task's deps : 'p'")
550 add_line("Print recipe's deps : 'P'")
551 add_line(" -> '%s'" % print_file_name)
552 add_line("Sort toggle : 's'")
553 add_line(" %s Recipe inner-depends order" % ('->' if (SORT_DEPS == sort_model) else '- '))
554 add_line(" %s Alpha-numeric order" % ('->' if (SORT_ALPHA == sort_model) else '- '))
555 if SORT_BITBAKE_ENABLE:
556 add_line(" %s Bitbake order" % ('->' if (TASK_SORT_BITBAKE == sort_model) else '- '))
557 add_line("Alternate backspace : <CTRL-H>")
558 add_line("")
559 add_line("Primary recipes = %s" % ','.join(self.primary))
560 add_line("Task count = %4d" % len(self.dep.pkg_model))
561 add_line("Deps count = %4d" % dep_count)
562 add_line("RDeps count = %4d" % rdep_count)
563 add_line("")
564 self.screen.addstr(self.y_pos,self.x_pos+7,"<Press any key>", curses.color_pair(CURSES_HIGHLIGHT))
565 self.screen.refresh()
566 c = self.screen.getch()
567
568# Show a progress bar
569class ProgressView(NBox):
570 def __init__(self, screen, label, primary, base_x, base_y, width, height):
571 super(ProgressView, self).__init__(screen, label, primary, base_x, base_y, width, height)
572
573 def progress(self,title,current,max):
574 if title:
575 self.label = title
576 else:
577 title = self.label
578 if max <=0: max = 10
579 bar_size = self.width - 7 - len(title)
580 bar_done = int( (float(current)/float(max)) * float(bar_size) )
581 self.screen.addstr(self.base_y,self.base_x, " %s:[%s%s]" % (title,'*' * bar_done,' ' * (bar_size-bar_done)))
582 self.screen.refresh()
583 return(current+1)
584
585 def clear(self):
586 self.screen.addstr(self.base_y,self.base_x, "%s" % (' ' * self.width))
587 self.screen.refresh()
588
589# Implement a task filter bar
590class FilterView(NBox):
591 SEARCH_NOP = 0
592 SEARCH_GO = 1
593 SEARCH_CANCEL = 2
594
595 def __init__(self, screen, label, primary, base_x, base_y, width, height):
596 super(FilterView, self).__init__(screen, label, primary, base_x, base_y, width, height)
597 self.do_show = False
598 self.filter_str = ""
599
600 def clear(self,enable_show=True):
601 self.filter_str = ""
602
603 def show(self,enable_show=True):
604 self.do_show = enable_show
605 if self.do_show:
606 self.screen.addstr(self.base_y,self.base_x, "[ Filter: %-25s ] '/'=cancel, format='abc' " % self.filter_str[0:25])
607 else:
608 self.screen.addstr(self.base_y,self.base_x, "%s" % (' ' * self.width))
609 self.screen.refresh()
610
611 def show_prompt(self):
612 self.screen.addstr(self.base_y,self.base_x + 10 + len(self.filter_str), " ")
613 self.screen.addstr(self.base_y,self.base_x + 10 + len(self.filter_str), "")
614
615 # Keys specific to the filter box (start/stop filter keys are in the main loop)
616 def input(self,c,ch):
617 ret = self.SEARCH_GO
618 if c in (curses.KEY_BACKSPACE,CHAR_BS_H):
619 # Backspace
620 if self.filter_str:
621 self.filter_str = self.filter_str[0:-1]
622 self.show()
623 elif ((ch >= 'a') and (ch <= 'z')) or ((ch >= 'A') and (ch <= 'Z')) or ((ch >= '0') and (ch <= '9')) or (ch in (' ','_','.','-')):
624 # The isalnum() acts strangly with keypad(True), so explicit bounds
625 self.filter_str += ch
626 self.show()
627 else:
628 ret = self.SEARCH_NOP
629 return(ret)
630
631
632#################################################
633### The primary dependency windows
634###
635
636# The main list of package tasks
637class PackageView(NBox):
638 def __init__(self, screen, label, primary, base_x, base_y, width, height):
639 super(PackageView, self).__init__(screen, label, primary, base_x, base_y, width, height)
640
641 # Find and verticaly center a selected task (from filter or from dependent box)
642 # The 'task_filter_str' can be a full or a partial (filter) task name
643 def find(self,task_filter_str):
644 found = False
645 max = self.height-2
646 if not task_filter_str:
647 return(found)
648 for i,task_obj in enumerate(self.task_list):
649 task = task_obj[TASK_NAME]
650 if task.startswith(task_filter_str):
651 self.cursor_on(False)
652 self.cursor_index = i
653
654 # Position selected at vertical center
655 vcenter = self.inside_height // 2
656 if self.cursor_index <= vcenter:
657 self.scroll_offset = 0
658 self.cursor_offset = self.cursor_index
659 elif self.cursor_index >= (len(self.task_list) - vcenter - 1):
660 self.cursor_offset = self.inside_height-1
661 self.scroll_offset = self.cursor_index - self.cursor_offset
662 else:
663 self.cursor_offset = vcenter
664 self.scroll_offset = self.cursor_index - self.cursor_offset
665
666 self.redraw()
667 self.cursor_on(True)
668 found = True
669 break
670 return(found)
671
672# The view of dependent packages
673class PackageDepView(NBox):
674 def __init__(self, screen, label, primary, base_x, base_y, width, height):
675 super(PackageDepView, self).__init__(screen, label, primary, base_x, base_y, width, height)
676
677# The view of reverse-dependent packages
678class PackageReverseDepView(NBox):
679 def __init__(self, screen, label, primary, base_x, base_y, width, height):
680 super(PackageReverseDepView, self).__init__(screen, label, primary, base_x, base_y, width, height)
681
682
683#################################################
684### DepExplorer : The parent frame and object
685###
686
687class DepExplorer(NBox):
688 def __init__(self,screen):
689 title = "Task Dependency Explorer"
690 super(DepExplorer, self).__init__(screen, 'Task Dependency Explorer','',0,0,80,23)
691
692 self.screen = screen
693 self.pkg_model = []
694 self.depends_model = []
695 self.dep_sort_map = {}
696 self.bb_sort_map = {}
697 self.filter_str = ''
698 self.filter_prev = 'deadbeef'
699
700 if self.screen:
701 self.help_bar_view = HelpBarView(screen, "Help",'',1,1,79,1)
702 self.help_box_view = HelpBoxView(screen, "Help",'',0,2,40,20,self)
703 self.progress_view = ProgressView(screen, "Progress",'',2,1,76,1)
704 self.filter_view = FilterView(screen, "Filter",'',2,1,76,1)
705 self.package_view = PackageView(screen, "Package",'alpha', 0,2,40,20)
706 self.dep_view = PackageDepView(screen, "Dependencies",'beta',40,2,40,10)
707 self.reverse_view = PackageReverseDepView(screen, "Dependent Tasks",'gamma',40,13,40,9)
708 self.draw_frames()
709
710 # Draw this main window's frame and all sub-windows
711 def draw_frames(self):
712 self.draw_frame()
713 self.package_view.draw_frame()
714 self.dep_view.draw_frame()
715 self.reverse_view.draw_frame()
716 if is_filter:
717 self.filter_view.show(True)
718 self.filter_view.show_prompt()
719 else:
720 self.help_bar_view.show_help(True)
721 self.package_view.redraw()
722 self.dep_view.redraw()
723 self.reverse_view.redraw()
724 self.show_selected(self.package_view.get_selected())
725 line_art_fixup(self)
726
727 # Parse the bitbake dependency event object
728 def parse(self, depgraph):
729 for task in depgraph["tdepends"]:
730 self.pkg_model.insert(0, task)
731 for depend in depgraph["tdepends"][task]:
732 self.depends_model.insert (0, (TYPE_DEP, task, depend))
733 self.depends_model.insert (0, (TYPE_RDEP, depend, task))
734 if self.screen:
735 self.dep_sort_prep()
736
737 # Prepare the dependency sort order keys
738 # This method creates sort keys per recipe tasks in
739 # the order of each recipe's internal dependecies
740 # Method:
741 # Filter the tasks in dep order in dep_sort_map = {}
742 # (a) Find a task that has no dependecies
743 # Ignore non-recipe specific tasks
744 # (b) Add it to the sort mapping dict with
745 # key of "<task_group>_<order>"
746 # (c) Remove it as a dependency from the other tasks
747 # (d) Repeat till all tasks are mapped
748 # Use placeholders to insure each sub-dict is instantiated
749 def dep_sort_prep(self):
750 self.progress_view.progress('DepSort',0,4)
751 # Init the task base entries
752 self.progress_view.progress('DepSort',1,4)
753 dep_table = {}
754 bb_index = 0
755 for task in self.pkg_model:
756 # First define the incoming bitbake sort order
757 self.bb_sort_map[task] = "%04d" % (bb_index)
758 bb_index += 1
759 task_group = task[0:task.find('.')]
760 if task_group not in dep_table:
761 dep_table[task_group] = {}
762 dep_table[task_group]['-'] = {} # Placeholder
763 if task not in dep_table[task_group]:
764 dep_table[task_group][task] = {}
765 dep_table[task_group][task]['-'] = {} # Placeholder
766 # Add the task dependecy entries
767 self.progress_view.progress('DepSort',2,4)
768 for task_obj in self.depends_model:
769 if task_obj[DEPENDS_TYPE] != TYPE_DEP:
770 continue
771 task = task_obj[DEPENDS_TASK]
772 task_dep = task_obj[DEPENDS_DEPS]
773 task_group = task[0:task.find('.')]
774 # Only track depends within same group
775 if task_dep.startswith(task_group+'.'):
776 dep_table[task_group][task][task_dep] = 1
777 self.progress_view.progress('DepSort',3,4)
778 for task_group in dep_table:
779 dep_index = 0
780 # Whittle down the tasks of each group
781 this_pass = 1
782 do_loop = True
783 while (len(dep_table[task_group]) > 1) and do_loop:
784 this_pass += 1
785 is_change = False
786 delete_list = []
787 for task in dep_table[task_group]:
788 if '-' == task:
789 continue
790 if 1 == len(dep_table[task_group][task]):
791 is_change = True
792 # No more deps, so collect this task...
793 self.dep_sort_map[task] = "%s_%04d" % (task_group,dep_index)
794 dep_index += 1
795 # ... remove it from other lists as resolved ...
796 for dep_task in dep_table[task_group]:
797 if task in dep_table[task_group][dep_task]:
798 del dep_table[task_group][dep_task][task]
799 # ... and remove it from from the task group
800 delete_list.append(task)
801 for task in delete_list:
802 del dep_table[task_group][task]
803 if not is_change:
804 alert("ERROR:DEP_SIEVE_NO_CHANGE:%s" % task_group,self.screen)
805 do_loop = False
806 continue
807 self.progress_view.progress('',4,4)
808 self.progress_view.clear()
809 self.help_bar_view.show_help(True)
810 if len(self.dep_sort_map) != len(self.pkg_model):
811 alert("ErrorDepSort:%d/%d" % (len(self.dep_sort_map),len(self.pkg_model)),self.screen)
812
813 # Look up a dep sort order key
814 def get_dep_sort(self,key):
815 if key in self.dep_sort_map:
816 return(self.dep_sort_map[key])
817 else:
818 return(key)
819
820 # Look up a bitbake sort order key
821 def get_bb_sort(self,key):
822 if key in self.bb_sort_map:
823 return(self.bb_sort_map[key])
824 else:
825 return(key)
826
827 # Find the selected package in the main frame, update the dependency frames content accordingly
828 def select(self, package_name, only_update_dependents=False):
829 if not package_name:
830 package_name = self.package_view.get_selected()
831 # alert("SELECT:%s:" % package_name,self.screen)
832
833 if self.filter_str != self.filter_prev:
834 self.package_view.cursor_on(False)
835 # Fill of the main package task list using new filter
836 self.package_view.task_list = []
837 for package in self.pkg_model:
838 if self.filter_str:
839 if self.filter_str in package:
840 self.package_view.task_list_append(package,self)
841 else:
842 self.package_view.task_list_append(package,self)
843 self.package_view.sort()
844 self.filter_prev = self.filter_str
845
846 # Old position is lost, assert new position of previous task (if still filtered in)
847 self.package_view.cursor_index = 0
848 self.package_view.cursor_offset = 0
849 self.package_view.scroll_offset = 0
850 self.package_view.redraw()
851 self.package_view.cursor_on(True)
852
853 # Make sure the selected package is in view, with implicit redraw()
854 if (not only_update_dependents):
855 self.package_view.find(package_name)
856 # In case selected name change (i.e. filter removed previous)
857 package_name = self.package_view.get_selected()
858
859 # Filter the package's dependent list to the dependent view
860 self.dep_view.reset()
861 for package_def in self.depends_model:
862 if (package_def[DEPENDS_TYPE] == TYPE_DEP) and (package_def[DEPENDS_TASK] == package_name):
863 self.dep_view.task_list_append(package_def[DEPENDS_DEPS],self)
864 self.dep_view.sort()
865 self.dep_view.redraw()
866 # Filter the package's dependent list to the reverse dependent view
867 self.reverse_view.reset()
868 for package_def in self.depends_model:
869 if (package_def[DEPENDS_TYPE] == TYPE_RDEP) and (package_def[DEPENDS_TASK] == package_name):
870 self.reverse_view.task_list_append(package_def[DEPENDS_DEPS],self)
871 self.reverse_view.sort()
872 self.reverse_view.redraw()
873 self.show_selected(package_name)
874 self.screen.refresh()
875
876 # The print-to-file method
877 def print_deps(self,whole_group=False):
878 global is_printed
879 # Print the selected deptree(s) to a file
880 if not is_printed:
881 try:
882 # Move to backup any exiting file before first write
883 if os.path.isfile(print_file_name):
884 os.system('mv -f %s %s' % (print_file_name,print_file_backup_name))
885 except Exception as e:
886 alert(e,self.screen)
887 alert('',self.screen)
888 print_list = []
889 selected_task = self.package_view.get_selected()
890 if not selected_task:
891 return
892 if not whole_group:
893 print_list.append(selected_task)
894 else:
895 # Use the presorted task_group order from 'package_view'
896 task_group = selected_task[0:selected_task.find('.')+1]
897 for task_obj in self.package_view.task_list:
898 task = task_obj[TASK_NAME]
899 if task.startswith(task_group):
900 print_list.append(task)
901 with open(print_file_name, "a") as fd:
902 print_max = len(print_list)
903 print_count = 1
904 self.progress_view.progress('Write "%s"' % print_file_name,0,print_max)
905 for task in print_list:
906 print_count = self.progress_view.progress('',print_count,print_max)
907 self.select(task)
908 self.screen.refresh();
909 # Utilize the current print output model
910 if print_model == PRINT_MODEL_1:
911 print("=== Dependendency Snapshot ===",file=fd)
912 print(" = Package =",file=fd)
913 print(' '+task,file=fd)
914 # Fill in the matching dependencies
915 print(" = Dependencies =",file=fd)
916 for task_obj in self.dep_view.task_list:
917 print(' '+ task_obj[TASK_NAME],file=fd)
918 print(" = Dependent Tasks =",file=fd)
919 for task_obj in self.reverse_view.task_list:
920 print(' '+ task_obj[TASK_NAME],file=fd)
921 if print_model == PRINT_MODEL_2:
922 print("=== Dependendency Snapshot ===",file=fd)
923 dep_count = len(self.dep_view.task_list) - 1
924 for i,task_obj in enumerate(self.dep_view.task_list):
925 print('%s%s' % ("Dep =" if (i==dep_count) else " ",task_obj[TASK_NAME]),file=fd)
926 if not self.dep_view.task_list:
927 print('Dep =',file=fd)
928 print("Package=%s" % task,file=fd)
929 for i,task_obj in enumerate(self.reverse_view.task_list):
930 print('%s%s' % ("RDep =" if (i==0) else " ",task_obj[TASK_NAME]),file=fd)
931 if not self.reverse_view.task_list:
932 print('RDep =',file=fd)
933 curses.napms(2000)
934 self.progress_view.clear()
935 self.help_bar_view.show_help(True)
936 print('',file=fd)
937 # Restore display to original selected task
938 self.select(selected_task)
939 is_printed = True
940
941#################################################
942### Load bitbake data
943###
944
945def bitbake_load(server, eventHandler, params, dep, curses_off, screen):
946 global bar_len_old
947 bar_len_old = 0
948
949 # Support no screen
950 def progress(msg,count,max):
951 global bar_len_old
952 if screen:
953 dep.progress_view.progress(msg,count,max)
954 else:
955 if msg:
956 if bar_len_old:
957 bar_len_old = 0
958 print("\n")
959 print(f"{msg}: ({count} of {max})")
960 else:
961 bar_len = int((count*40)/max)
962 if bar_len_old != bar_len:
963 print(f"{'*' * (bar_len-bar_len_old)}",end='',flush=True)
964 bar_len_old = bar_len
965 def clear():
966 if screen:
967 dep.progress_view.clear()
968 def clear_curses(screen):
969 if screen:
970 curses_off(screen)
971
972 #
973 # Trigger bitbake "generateDepTreeEvent"
974 #
975
976 cmdline = ''
977 try:
978 params.updateToServer(server, os.environ.copy())
979 params.updateFromServer(server)
980 cmdline = params.parseActions()
981 if not cmdline:
982 clear_curses(screen)
983 print("ERROR: nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information.")
984 return 1,cmdline
985 if 'msg' in cmdline and cmdline['msg']:
986 clear_curses(screen)
987 print('ERROR: ' + cmdline['msg'])
988 return 1,cmdline
989 cmdline = cmdline['action']
990 if not cmdline or cmdline[0] != "generateDotGraph":
991 clear_curses(screen)
992 print("ERROR: This UI requires the -g option")
993 return 1,cmdline
994 ret, error = server.runCommand(["generateDepTreeEvent", cmdline[1], cmdline[2]])
995 if error:
996 clear_curses(screen)
997 print("ERROR: running command '%s': %s" % (cmdline, error))
998 return 1,cmdline
999 elif not ret:
1000 clear_curses(screen)
1001 print("ERROR: running command '%s': returned %s" % (cmdline, ret))
1002 return 1,cmdline
1003 except client.Fault as x:
1004 clear_curses(screen)
1005 print("ERROR: XMLRPC Fault getting commandline:\n %s" % x)
1006 return 1,cmdline
1007 except Exception as e:
1008 clear_curses(screen)
1009 print("ERROR: in startup:\n %s" % traceback.format_exc())
1010 return 1,cmdline
1011
1012 #
1013 # Receive data from bitbake
1014 #
1015
1016 progress_total = 0
1017 load_bitbake = True
1018 quit = False
1019 try:
1020 while load_bitbake:
1021 try:
1022 event = eventHandler.waitEvent(0.25)
1023 if quit:
1024 _, error = server.runCommand(["stateForceShutdown"])
1025 clear_curses(screen)
1026 if error:
1027 print('Unable to cleanly stop: %s' % error)
1028 break
1029
1030 if event is None:
1031 continue
1032
1033 if isinstance(event, bb.event.CacheLoadStarted):
1034 progress_total = event.total
1035 progress('Loading Cache',0,progress_total)
1036 continue
1037
1038 if isinstance(event, bb.event.CacheLoadProgress):
1039 x = event.current
1040 progress('',x,progress_total)
1041 continue
1042
1043 if isinstance(event, bb.event.CacheLoadCompleted):
1044 clear()
1045 progress('Bitbake... ',1,2)
1046 continue
1047
1048 if isinstance(event, bb.event.ParseStarted):
1049 progress_total = event.total
1050 progress('Processing recipes',0,progress_total)
1051 if progress_total == 0:
1052 continue
1053
1054 if isinstance(event, bb.event.ParseProgress):
1055 x = event.current
1056 progress('',x,progress_total)
1057 continue
1058
1059 if isinstance(event, bb.event.ParseCompleted):
1060 progress('Generating dependency tree',0,3)
1061 continue
1062
1063 if isinstance(event, bb.event.DepTreeGenerated):
1064 progress('Generating dependency tree',1,3)
1065 dep.parse(event._depgraph)
1066 progress('Generating dependency tree',2,3)
1067
1068 if isinstance(event, bb.command.CommandCompleted):
1069 load_bitbake = False
1070 progress('Generating dependency tree',3,3)
1071 clear()
1072 if screen:
1073 dep.help_bar_view.show_help(True)
1074 continue
1075
1076 if isinstance(event, bb.event.NoProvider):
1077 clear_curses(screen)
1078 print('ERROR: %s' % event)
1079
1080 _, error = server.runCommand(["stateShutdown"])
1081 if error:
1082 print('ERROR: Unable to cleanly shutdown: %s' % error)
1083 return 1,cmdline
1084
1085 if isinstance(event, bb.command.CommandFailed):
1086 clear_curses(screen)
1087 print('ERROR: ' + str(event))
1088 return event.exitcode,cmdline
1089
1090 if isinstance(event, bb.command.CommandExit):
1091 clear_curses(screen)
1092 return event.exitcode,cmdline
1093
1094 if isinstance(event, bb.cooker.CookerExit):
1095 break
1096
1097 continue
1098 except EnvironmentError as ioerror:
1099 # ignore interrupted io
1100 if ioerror.args[0] == 4:
1101 pass
1102 except KeyboardInterrupt:
1103 if shutdown == 2:
1104 clear_curses(screen)
1105 print("\nThird Keyboard Interrupt, exit.\n")
1106 break
1107 if shutdown == 1:
1108 clear_curses(screen)
1109 print("\nSecond Keyboard Interrupt, stopping...\n")
1110 _, error = server.runCommand(["stateForceShutdown"])
1111 if error:
1112 print('Unable to cleanly stop: %s' % error)
1113 if shutdown == 0:
1114 clear_curses(screen)
1115 print("\nKeyboard Interrupt, closing down...\n")
1116 _, error = server.runCommand(["stateShutdown"])
1117 if error:
1118 print('Unable to cleanly shutdown: %s' % error)
1119 shutdown = shutdown + 1
1120 pass
1121 except Exception as e:
1122 # Safe exit on error
1123 clear_curses(screen)
1124 print("Exception : %s" % e)
1125 print("Exception in startup:\n %s" % traceback.format_exc())
1126
1127 return 0,cmdline
1128
1129#################################################
1130### main
1131###
1132
1133SCREEN_COL_MIN = 83
1134SCREEN_ROW_MIN = 26
1135
1136def main(server, eventHandler, params):
1137 global verbose
1138 global sort_model
1139 global print_model
1140 global is_printed
1141 global is_filter
1142 global screen_too_small
1143
1144 shutdown = 0
1145 screen_too_small = False
1146 quit = False
1147
1148 # Unit test with no terminal?
1149 if unit_test_noterm:
1150 # Load bitbake, test that there is valid dependency data, then exit
1151 screen = None
1152 print("* UNIT TEST:START")
1153 dep = DepExplorer(screen)
1154 print("* UNIT TEST:BITBAKE FETCH")
1155 ret,cmdline = bitbake_load(server, eventHandler, params, dep, None, screen)
1156 if ret:
1157 print("* UNIT TEST: BITBAKE FAILED")
1158 return ret
1159 # Test the acquired dependency data
1160 quilt_native_deps = 0
1161 quilt_native_rdeps = 0
1162 quilt_deps = 0
1163 quilt_rdeps = 0
1164 for i,task_obj in enumerate(dep.depends_model):
1165 if TYPE_DEP == task_obj[0]:
1166 task = task_obj[1]
1167 if task.startswith('quilt-native'):
1168 quilt_native_deps += 1
1169 elif task.startswith('quilt'):
1170 quilt_deps += 1
1171 elif TYPE_RDEP == task_obj[0]:
1172 task = task_obj[1]
1173 if task.startswith('quilt-native'):
1174 quilt_native_rdeps += 1
1175 elif task.startswith('quilt'):
1176 quilt_rdeps += 1
1177 # Print results
1178 failed = False
1179 if 0 < len(dep.depends_model):
1180 print(f"Pass:Bitbake dependency count = {len(dep.depends_model)}")
1181 else:
1182 failed = True
1183 print(f"FAIL:Bitbake dependency count = 0")
1184 if quilt_native_deps:
1185 print(f"Pass:Quilt-native depends count = {quilt_native_deps}")
1186 else:
1187 failed = True
1188 print(f"FAIL:Quilt-native depends count = 0")
1189 if quilt_native_rdeps:
1190 print(f"Pass:Quilt-native rdepends count = {quilt_native_rdeps}")
1191 else:
1192 failed = True
1193 print(f"FAIL:Quilt-native rdepends count = 0")
1194 if quilt_deps:
1195 print(f"Pass:Quilt depends count = {quilt_deps}")
1196 else:
1197 failed = True
1198 print(f"FAIL:Quilt depends count = 0")
1199 if quilt_rdeps:
1200 print(f"Pass:Quilt rdepends count = {quilt_rdeps}")
1201 else:
1202 failed = True
1203 print(f"FAIL:Quilt rdepends count = 0")
1204 print("* UNIT TEST:STOP")
1205 return failed
1206
1207 # Help method to dynamically test parent window too small
1208 def check_screen_size(dep, active_package):
1209 global screen_too_small
1210 rows, cols = screen.getmaxyx()
1211 if (rows >= SCREEN_ROW_MIN) and (cols >= SCREEN_COL_MIN):
1212 if screen_too_small:
1213 # Now big enough, remove error message and redraw screen
1214 dep.draw_frames()
1215 active_package.cursor_on(True)
1216 screen_too_small = False
1217 return True
1218 # Test on App init
1219 if not dep:
1220 # Do not start this app if screen not big enough
1221 curses.endwin()
1222 print("")
1223 print("ERROR(Taskexp_cli): Mininal screen size is %dx%d" % (SCREEN_COL_MIN,SCREEN_ROW_MIN))
1224 print("Current screen is Cols=%s,Rows=%d" % (cols,rows))
1225 return False
1226 # First time window too small
1227 if not screen_too_small:
1228 active_package.cursor_on(False)
1229 dep.screen.addstr(0,2,'[BIGGER WINDOW PLEASE]', curses.color_pair(CURSES_WARNING) | curses.A_BLINK)
1230 screen_too_small = True
1231 return False
1232
1233 # Helper method to turn off curses mode
1234 def curses_off(screen):
1235 if not screen: return
1236 # Safe error exit
1237 screen.keypad(False)
1238 curses.echo()
1239 curses.curs_set(1)
1240 curses.endwin()
1241
1242 if unit_test_results:
1243 print('\nUnit Test Results:')
1244 for line in unit_test_results:
1245 print(" %s" % line)
1246
1247 #
1248 # Initialize the ncurse environment
1249 #
1250
1251 screen = curses.initscr()
1252 try:
1253 if not check_screen_size(None, None):
1254 exit(1)
1255 try:
1256 curses.start_color()
1257 curses.use_default_colors();
1258 curses.init_pair(0xFF, curses.COLOR_BLACK, curses.COLOR_WHITE);
1259 curses.init_pair(CURSES_NORMAL, curses.COLOR_WHITE, curses.COLOR_BLACK)
1260 curses.init_pair(CURSES_HIGHLIGHT, curses.COLOR_WHITE, curses.COLOR_BLUE)
1261 curses.init_pair(CURSES_WARNING, curses.COLOR_WHITE, curses.COLOR_RED)
1262 except:
1263 curses.endwin()
1264 print("")
1265 print("ERROR(Taskexp_cli): Requires 256 colors. Please use this or the equivalent:")
1266 print(" $ export TERM='xterm-256color'")
1267 exit(1)
1268
1269 screen.keypad(True)
1270 curses.noecho()
1271 curses.curs_set(0)
1272 screen.refresh();
1273 except Exception as e:
1274 # Safe error exit
1275 curses_off(screen)
1276 print("Exception : %s" % e)
1277 print("Exception in startup:\n %s" % traceback.format_exc())
1278 exit(1)
1279
1280 try:
1281 #
1282 # Instantiate the presentation layers
1283 #
1284
1285 dep = DepExplorer(screen)
1286
1287 #
1288 # Prepare bitbake
1289 #
1290
1291 # Fetch bitbake dependecy data
1292 ret,cmdline = bitbake_load(server, eventHandler, params, dep, curses_off, screen)
1293 if ret: return ret
1294
1295 #
1296 # Preset the views
1297 #
1298
1299 # Cmdline example = ['generateDotGraph', ['acl', 'zlib'], 'build']
1300 primary_packages = cmdline[1]
1301 dep.package_view.set_primary(primary_packages)
1302 dep.dep_view.set_primary(primary_packages)
1303 dep.reverse_view.set_primary(primary_packages)
1304 dep.help_box_view.set_primary(primary_packages)
1305 dep.help_bar_view.show_help(True)
1306 active_package = dep.package_view
1307 active_package.cursor_on(True)
1308 dep.select(primary_packages[0]+'.')
1309 if unit_test:
1310 alert('UNIT_TEST',screen)
1311
1312 # Help method to start/stop the filter feature
1313 def filter_mode(new_filter_status):
1314 global is_filter
1315 if is_filter == new_filter_status:
1316 # Ignore no changes
1317 return
1318 if not new_filter_status:
1319 # Turn off
1320 curses.curs_set(0)
1321 #active_package.cursor_on(False)
1322 active_package = dep.package_view
1323 active_package.cursor_on(True)
1324 is_filter = False
1325 dep.help_bar_view.show_help(True)
1326 dep.filter_str = ''
1327 dep.select('')
1328 else:
1329 # Turn on
1330 curses.curs_set(1)
1331 dep.help_bar_view.show_help(False)
1332 dep.filter_view.clear()
1333 dep.filter_view.show(True)
1334 dep.filter_view.show_prompt()
1335 is_filter = True
1336
1337 #
1338 # Main user loop
1339 #
1340
1341 while not quit:
1342 if is_filter:
1343 dep.filter_view.show_prompt()
1344 if unit_test:
1345 c = unit_test_action(active_package)
1346 else:
1347 c = screen.getch()
1348 ch = chr(c)
1349
1350 # Do not draw if window now too small
1351 if not check_screen_size(dep,active_package):
1352 continue
1353
1354 if verbose:
1355 if c == CHAR_RETURN:
1356 screen.addstr(0, 4, "|%3d,CR |" % (c))
1357 else:
1358 screen.addstr(0, 4, "|%3d,%3s|" % (c,chr(c)))
1359
1360 # pre-map alternate filter close keys
1361 if is_filter and (c == CHAR_ESCAPE):
1362 # Alternate exit from filter
1363 ch = '/'
1364 c = ord(ch)
1365
1366 # Filter and non-filter mode command keys
1367 # https://docs.python.org/3/library/curses.html
1368 if c in (curses.KEY_UP,CHAR_UP):
1369 active_package.line_up()
1370 if active_package == dep.package_view:
1371 dep.select('',only_update_dependents=True)
1372 elif c in (curses.KEY_DOWN,CHAR_DOWN):
1373 active_package.line_down()
1374 if active_package == dep.package_view:
1375 dep.select('',only_update_dependents=True)
1376 elif curses.KEY_PPAGE == c:
1377 active_package.page_up()
1378 if active_package == dep.package_view:
1379 dep.select('',only_update_dependents=True)
1380 elif curses.KEY_NPAGE == c:
1381 active_package.page_down()
1382 if active_package == dep.package_view:
1383 dep.select('',only_update_dependents=True)
1384 elif CHAR_TAB == c:
1385 # Tab between boxes
1386 active_package.cursor_on(False)
1387 if active_package == dep.package_view:
1388 active_package = dep.dep_view
1389 elif active_package == dep.dep_view:
1390 active_package = dep.reverse_view
1391 else:
1392 active_package = dep.package_view
1393 active_package.cursor_on(True)
1394 elif curses.KEY_BTAB == c:
1395 # Shift-Tab reverse between boxes
1396 active_package.cursor_on(False)
1397 if active_package == dep.package_view:
1398 active_package = dep.reverse_view
1399 elif active_package == dep.reverse_view:
1400 active_package = dep.dep_view
1401 else:
1402 active_package = dep.package_view
1403 active_package.cursor_on(True)
1404 elif (CHAR_RETURN == c):
1405 # CR to select
1406 selected = active_package.get_selected()
1407 if selected:
1408 active_package.cursor_on(False)
1409 active_package = dep.package_view
1410 filter_mode(False)
1411 dep.select(selected)
1412 else:
1413 filter_mode(False)
1414 dep.select(primary_packages[0]+'.')
1415
1416 elif '/' == ch: # Enter/exit dep.filter_view
1417 if is_filter:
1418 filter_mode(False)
1419 else:
1420 filter_mode(True)
1421 elif is_filter:
1422 # If in filter mode, re-direct all these other keys to the filter box
1423 result = dep.filter_view.input(c,ch)
1424 dep.filter_str = dep.filter_view.filter_str
1425 dep.select('')
1426
1427 # Non-filter mode command keys
1428 elif 'p' == ch:
1429 dep.print_deps(whole_group=False)
1430 elif 'P' == ch:
1431 dep.print_deps(whole_group=True)
1432 elif 'w' == ch:
1433 # Toggle the print model
1434 if print_model == PRINT_MODEL_1:
1435 print_model = PRINT_MODEL_2
1436 else:
1437 print_model = PRINT_MODEL_1
1438 elif 's' == ch:
1439 # Toggle the sort model
1440 if sort_model == SORT_DEPS:
1441 sort_model = SORT_ALPHA
1442 elif sort_model == SORT_ALPHA:
1443 if SORT_BITBAKE_ENABLE:
1444 sort_model = TASK_SORT_BITBAKE
1445 else:
1446 sort_model = SORT_DEPS
1447 else:
1448 sort_model = SORT_DEPS
1449 active_package.cursor_on(False)
1450 current_task = active_package.get_selected()
1451 dep.package_view.sort()
1452 dep.dep_view.sort()
1453 dep.reverse_view.sort()
1454 active_package = dep.package_view
1455 active_package.cursor_on(True)
1456 dep.select(current_task)
1457 # Announce the new sort model
1458 alert("SORT=%s" % ("ALPHA" if (sort_model == SORT_ALPHA) else "DEPS"),screen)
1459 alert('',screen)
1460
1461 elif 'q' == ch:
1462 quit = True
1463 elif ch in ('h','?'):
1464 dep.help_box_view.show_help(True)
1465 dep.select(active_package.get_selected())
1466
1467 #
1468 # Debugging commands
1469 #
1470
1471 elif 'V' == ch:
1472 verbose = not verbose
1473 alert('Verbose=%s' % str(verbose),screen)
1474 alert('',screen)
1475 elif 'R' == ch:
1476 screen.refresh()
1477 elif 'B' == ch:
1478 # Progress bar unit test
1479 dep.progress_view.progress('Test',0,40)
1480 curses.napms(1000)
1481 dep.progress_view.progress('',10,40)
1482 curses.napms(1000)
1483 dep.progress_view.progress('',20,40)
1484 curses.napms(1000)
1485 dep.progress_view.progress('',30,40)
1486 curses.napms(1000)
1487 dep.progress_view.progress('',40,40)
1488 curses.napms(1000)
1489 dep.progress_view.clear()
1490 dep.help_bar_view.show_help(True)
1491 elif 'Q' == ch:
1492 # Simulated error
1493 curses_off(screen)
1494 print('ERROR: simulated error exit')
1495 return 1
1496
1497 # Safe exit
1498 curses_off(screen)
1499 except Exception as e:
1500 # Safe exit on error
1501 curses_off(screen)
1502 print("Exception : %s" % e)
1503 print("Exception in startup:\n %s" % traceback.format_exc())
1504
1505 # Reminder to pick up your printed results
1506 if is_printed:
1507 print("")
1508 print("You have output ready!")
1509 print(" * Your printed dependency file is: %s" % print_file_name)
1510 print(" * Your previous results saved in: %s" % print_file_backup_name)
1511 print("")
diff --git a/bitbake/lib/bb/ui/teamcity.py b/bitbake/lib/bb/ui/teamcity.py
index fca46c2874..7eeaab8d63 100644
--- a/bitbake/lib/bb/ui/teamcity.py
+++ b/bitbake/lib/bb/ui/teamcity.py
@@ -30,7 +30,6 @@ import bb.build
30import bb.command 30import bb.command
31import bb.cooker 31import bb.cooker
32import bb.event 32import bb.event
33import bb.exceptions
34import bb.runqueue 33import bb.runqueue
35from bb.ui import uihelper 34from bb.ui import uihelper
36 35
@@ -102,10 +101,6 @@ class TeamcityLogFormatter(logging.Formatter):
102 details = "" 101 details = ""
103 if hasattr(record, 'bb_exc_formatted'): 102 if hasattr(record, 'bb_exc_formatted'):
104 details = ''.join(record.bb_exc_formatted) 103 details = ''.join(record.bb_exc_formatted)
105 elif hasattr(record, 'bb_exc_info'):
106 etype, value, tb = record.bb_exc_info
107 formatted = bb.exceptions.format_exception(etype, value, tb, limit=5)
108 details = ''.join(formatted)
109 104
110 if record.levelno in [bb.msg.BBLogFormatter.ERROR, bb.msg.BBLogFormatter.CRITICAL]: 105 if record.levelno in [bb.msg.BBLogFormatter.ERROR, bb.msg.BBLogFormatter.CRITICAL]:
111 # ERROR gets a separate errorDetails field 106 # ERROR gets a separate errorDetails field
diff --git a/bitbake/lib/bb/ui/toasterui.py b/bitbake/lib/bb/ui/toasterui.py
index ec5bd4f105..6bd21f1844 100644
--- a/bitbake/lib/bb/ui/toasterui.py
+++ b/bitbake/lib/bb/ui/toasterui.py
@@ -385,7 +385,7 @@ def main(server, eventHandler, params):
385 main.shutdown = 1 385 main.shutdown = 1
386 386
387 logger.info("ToasterUI build done, brbe: %s", brbe) 387 logger.info("ToasterUI build done, brbe: %s", brbe)
388 continue 388 break
389 389
390 if isinstance(event, (bb.command.CommandCompleted, 390 if isinstance(event, (bb.command.CommandCompleted,
391 bb.command.CommandFailed, 391 bb.command.CommandFailed,
diff --git a/bitbake/lib/bb/ui/uievent.py b/bitbake/lib/bb/ui/uievent.py
index 8607d0523b..c2f830d530 100644
--- a/bitbake/lib/bb/ui/uievent.py
+++ b/bitbake/lib/bb/ui/uievent.py
@@ -44,7 +44,7 @@ class BBUIEventQueue:
44 for count_tries in range(5): 44 for count_tries in range(5):
45 ret = self.BBServer.registerEventHandler(self.host, self.port) 45 ret = self.BBServer.registerEventHandler(self.host, self.port)
46 46
47 if isinstance(ret, collections.Iterable): 47 if isinstance(ret, collections.abc.Iterable):
48 self.EventHandle, error = ret 48 self.EventHandle, error = ret
49 else: 49 else:
50 self.EventHandle = ret 50 self.EventHandle = ret
@@ -65,35 +65,27 @@ class BBUIEventQueue:
65 self.server = server 65 self.server = server
66 66
67 self.t = threading.Thread() 67 self.t = threading.Thread()
68 self.t.setDaemon(True) 68 self.t.daemon = True
69 self.t.run = self.startCallbackHandler 69 self.t.run = self.startCallbackHandler
70 self.t.start() 70 self.t.start()
71 71
72 def getEvent(self): 72 def getEvent(self):
73 73 with bb.utils.lock_timeout(self.eventQueueLock):
74 self.eventQueueLock.acquire() 74 if not self.eventQueue:
75 75 return None
76 if len(self.eventQueue) == 0: 76 item = self.eventQueue.pop(0)
77 self.eventQueueLock.release() 77 if not self.eventQueue:
78 return None 78 self.eventQueueNotify.clear()
79 79 return item
80 item = self.eventQueue.pop(0)
81
82 if len(self.eventQueue) == 0:
83 self.eventQueueNotify.clear()
84
85 self.eventQueueLock.release()
86 return item
87 80
88 def waitEvent(self, delay): 81 def waitEvent(self, delay):
89 self.eventQueueNotify.wait(delay) 82 self.eventQueueNotify.wait(delay)
90 return self.getEvent() 83 return self.getEvent()
91 84
92 def queue_event(self, event): 85 def queue_event(self, event):
93 self.eventQueueLock.acquire() 86 with bb.utils.lock_timeout(self.eventQueueLock):
94 self.eventQueue.append(event) 87 self.eventQueue.append(event)
95 self.eventQueueNotify.set() 88 self.eventQueueNotify.set()
96 self.eventQueueLock.release()
97 89
98 def send_event(self, event): 90 def send_event(self, event):
99 self.queue_event(pickle.loads(event)) 91 self.queue_event(pickle.loads(event))
diff --git a/bitbake/lib/bb/ui/uihelper.py b/bitbake/lib/bb/ui/uihelper.py
index 48d808ae28..e6983bd559 100644
--- a/bitbake/lib/bb/ui/uihelper.py
+++ b/bitbake/lib/bb/ui/uihelper.py
@@ -31,7 +31,7 @@ class BBUIHelper:
31 31
32 if isinstance(event, bb.build.TaskStarted): 32 if isinstance(event, bb.build.TaskStarted):
33 tid = event._fn + ":" + event._task 33 tid = event._fn + ":" + event._task
34 if event._mc != "default": 34 if event._mc != "":
35 self.running_tasks[tid] = { 'title' : "mc:%s:%s %s" % (event._mc, event._package, event._task), 'starttime' : time.time(), 'pid' : event.pid } 35 self.running_tasks[tid] = { 'title' : "mc:%s:%s %s" % (event._mc, event._package, event._task), 'starttime' : time.time(), 'pid' : event.pid }
36 else: 36 else:
37 self.running_tasks[tid] = { 'title' : "%s %s" % (event._package, event._task), 'starttime' : time.time(), 'pid' : event.pid } 37 self.running_tasks[tid] = { 'title' : "%s %s" % (event._package, event._task), 'starttime' : time.time(), 'pid' : event.pid }
@@ -49,9 +49,11 @@ class BBUIHelper:
49 tid = event._fn + ":" + event._task 49 tid = event._fn + ":" + event._task
50 removetid(event.pid, tid) 50 removetid(event.pid, tid)
51 self.failed_tasks.append( { 'title' : "%s %s" % (event._package, event._task)}) 51 self.failed_tasks.append( { 'title' : "%s %s" % (event._package, event._task)})
52 elif isinstance(event, bb.runqueue.runQueueTaskStarted): 52 elif isinstance(event, bb.runqueue.runQueueTaskStarted) or isinstance(event, bb.runqueue.sceneQueueTaskStarted):
53 self.tasknumber_current = event.stats.completed + event.stats.active + event.stats.failed + 1 53 self.tasknumber_current = event.stats.completed + event.stats.active + event.stats.failed
54 self.tasknumber_total = event.stats.total 54 self.tasknumber_total = event.stats.total
55 self.setscene_current = event.stats.setscene_active + event.stats.setscene_covered + event.stats.setscene_notcovered
56 self.setscene_total = event.stats.setscene_total
55 self.needUpdate = True 57 self.needUpdate = True
56 elif isinstance(event, bb.build.TaskProgress): 58 elif isinstance(event, bb.build.TaskProgress):
57 if event.pid > 0 and event.pid in self.pidmap: 59 if event.pid > 0 and event.pid in self.pidmap:
diff --git a/bitbake/lib/bb/utils.py b/bitbake/lib/bb/utils.py
index b282d09abf..1cc74ed546 100644
--- a/bitbake/lib/bb/utils.py
+++ b/bitbake/lib/bb/utils.py
@@ -11,24 +11,29 @@ import re, fcntl, os, string, stat, shutil, time
11import sys 11import sys
12import errno 12import errno
13import logging 13import logging
14import bb 14import locale
15import bb.msg
16import multiprocessing 15import multiprocessing
17import fcntl
18import importlib 16import importlib
19from importlib import machinery 17import importlib.machinery
18import importlib.util
20import itertools 19import itertools
21import subprocess 20import subprocess
22import glob 21import glob
23import fnmatch 22import fnmatch
24import traceback 23import traceback
25import errno
26import signal 24import signal
27import collections 25import collections
28import copy 26import copy
27import ctypes
28import random
29import socket
30import struct
31import tempfile
29from subprocess import getstatusoutput 32from subprocess import getstatusoutput
30from contextlib import contextmanager 33from contextlib import contextmanager
31from ctypes import cdll 34from ctypes import cdll
35import bb
36import bb.msg
32 37
33logger = logging.getLogger("BitBake.Util") 38logger = logging.getLogger("BitBake.Util")
34python_extensions = importlib.machinery.all_suffixes() 39python_extensions = importlib.machinery.all_suffixes()
@@ -43,7 +48,7 @@ def clean_context():
43 48
44def get_context(): 49def get_context():
45 return _context 50 return _context
46 51
47 52
48def set_context(ctx): 53def set_context(ctx):
49 _context = ctx 54 _context = ctx
@@ -77,7 +82,16 @@ def explode_version(s):
77 return r 82 return r
78 83
79def split_version(s): 84def split_version(s):
80 """Split a version string into its constituent parts (PE, PV, PR)""" 85 """Split a version string into its constituent parts (PE, PV, PR).
86
87 Arguments:
88
89 - ``s``: version string. The format of the input string should be::
90
91 ${PE}:${PV}-${PR}
92
93 Returns a tuple ``(pe, pv, pr)``.
94 """
81 s = s.strip(" <>=") 95 s = s.strip(" <>=")
82 e = 0 96 e = 0
83 if s.count(':'): 97 if s.count(':'):
@@ -129,16 +143,30 @@ def vercmp(ta, tb):
129 return r 143 return r
130 144
131def vercmp_string(a, b): 145def vercmp_string(a, b):
132 """ Split version strings and compare them """ 146 """ Split version strings using ``bb.utils.split_version()`` and compare
147 them with ``bb.utils.vercmp().``
148
149 Arguments:
150
151 - ``a``: left version string operand.
152 - ``b``: right version string operand.
153
154 Returns what ``bb.utils.vercmp()`` returns."""
133 ta = split_version(a) 155 ta = split_version(a)
134 tb = split_version(b) 156 tb = split_version(b)
135 return vercmp(ta, tb) 157 return vercmp(ta, tb)
136 158
137def vercmp_string_op(a, b, op): 159def vercmp_string_op(a, b, op):
138 """ 160 """
139 Compare two versions and check if the specified comparison operator matches the result of the comparison. 161 Takes the return value ``bb.utils.vercmp()`` and returns the operation
140 This function is fairly liberal about what operators it will accept since there are a variety of styles 162 defined by ``op`` between the return value and 0.
141 depending on the context. 163
164 Arguments:
165
166 - ``a``: left version string operand.
167 - ``b``: right version string operand.
168 - ``op``: operator string. Can be one of ``=``, ``==``, ``<=``, ``>=``,
169 ``>``, ``>>``, ``<``, ``<<`` or ``!=``.
142 """ 170 """
143 res = vercmp_string(a, b) 171 res = vercmp_string(a, b)
144 if op in ('=', '=='): 172 if op in ('=', '=='):
@@ -158,9 +186,16 @@ def vercmp_string_op(a, b, op):
158 186
159def explode_deps(s): 187def explode_deps(s):
160 """ 188 """
161 Take an RDEPENDS style string of format: 189 Takes an RDEPENDS style string of format::
162 "DEPEND1 (optional version) DEPEND2 (optional version) ..." 190
163 and return a list of dependencies. 191 DEPEND1 (optional version) DEPEND2 (optional version) ...
192
193 Arguments:
194
195 - ``s``: input RDEPENDS style string
196
197 Returns a list of dependencies.
198
164 Version information is ignored. 199 Version information is ignored.
165 """ 200 """
166 r = [] 201 r = []
@@ -182,9 +217,17 @@ def explode_deps(s):
182 217
183def explode_dep_versions2(s, *, sort=True): 218def explode_dep_versions2(s, *, sort=True):
184 """ 219 """
185 Take an RDEPENDS style string of format: 220 Takes an RDEPENDS style string of format::
186 "DEPEND1 (optional version) DEPEND2 (optional version) ..." 221
187 and return a dictionary of dependencies and versions. 222 DEPEND1 (optional version) DEPEND2 (optional version) ...
223
224 Arguments:
225
226 - ``s``: input RDEPENDS style string
227 - ``*``: *Unused*.
228 - ``sort``: whether to sort the output or not.
229
230 Returns a dictionary of dependencies and versions.
188 """ 231 """
189 r = collections.OrderedDict() 232 r = collections.OrderedDict()
190 l = s.replace(",", "").split() 233 l = s.replace(",", "").split()
@@ -205,8 +248,8 @@ def explode_dep_versions2(s, *, sort=True):
205 inversion = True 248 inversion = True
206 # This list is based on behavior and supported comparisons from deb, opkg and rpm. 249 # This list is based on behavior and supported comparisons from deb, opkg and rpm.
207 # 250 #
208 # Even though =<, <<, ==, !=, =>, and >> may not be supported, 251 # Even though =<, <<, ==, !=, =>, and >> may not be supported,
209 # we list each possibly valid item. 252 # we list each possibly valid item.
210 # The build system is responsible for validation of what it supports. 253 # The build system is responsible for validation of what it supports.
211 if i.startswith(('<=', '=<', '<<', '==', '!=', '>=', '=>', '>>')): 254 if i.startswith(('<=', '=<', '<<', '==', '!=', '>=', '=>', '>>')):
212 lastcmp = i[0:2] 255 lastcmp = i[0:2]
@@ -249,10 +292,17 @@ def explode_dep_versions2(s, *, sort=True):
249 292
250def explode_dep_versions(s): 293def explode_dep_versions(s):
251 """ 294 """
252 Take an RDEPENDS style string of format: 295 Take an RDEPENDS style string of format::
253 "DEPEND1 (optional version) DEPEND2 (optional version) ..." 296
254 skip null value and items appeared in dependancy string multiple times 297 DEPEND1 (optional version) DEPEND2 (optional version) ...
255 and return a dictionary of dependencies and versions. 298
299 Skips null values and items appeared in dependency string multiple times.
300
301 Arguments:
302
303 - ``s``: input RDEPENDS style string
304
305 Returns a dictionary of dependencies and versions.
256 """ 306 """
257 r = explode_dep_versions2(s) 307 r = explode_dep_versions2(s)
258 for d in r: 308 for d in r:
@@ -266,7 +316,17 @@ def explode_dep_versions(s):
266 316
267def join_deps(deps, commasep=True): 317def join_deps(deps, commasep=True):
268 """ 318 """
269 Take the result from explode_dep_versions and generate a dependency string 319 Take a result from ``bb.utils.explode_dep_versions()`` and generate a
320 dependency string.
321
322 Arguments:
323
324 - ``deps``: dictionary of dependencies and versions.
325 - ``commasep``: makes the return value separated by commas if ``True``,
326 separated by spaces otherwise.
327
328 Returns a comma-separated (space-separated if ``comma-sep`` is ``False``)
329 string of dependencies and versions.
270 """ 330 """
271 result = [] 331 result = []
272 for dep in deps: 332 for dep in deps:
@@ -340,7 +400,7 @@ def _print_exception(t, value, tb, realfile, text, context):
340 exception = traceback.format_exception_only(t, value) 400 exception = traceback.format_exception_only(t, value)
341 error.append('Error executing a python function in %s:\n' % realfile) 401 error.append('Error executing a python function in %s:\n' % realfile)
342 402
343 # Strip 'us' from the stack (better_exec call) unless that was where the 403 # Strip 'us' from the stack (better_exec call) unless that was where the
344 # error came from 404 # error came from
345 if tb.tb_next is not None: 405 if tb.tb_next is not None:
346 tb = tb.tb_next 406 tb = tb.tb_next
@@ -379,7 +439,7 @@ def _print_exception(t, value, tb, realfile, text, context):
379 439
380 error.append("Exception: %s" % ''.join(exception)) 440 error.append("Exception: %s" % ''.join(exception))
381 441
382 # If the exception is from spwaning a task, let's be helpful and display 442 # If the exception is from spawning a task, let's be helpful and display
383 # the output (which hopefully includes stderr). 443 # the output (which hopefully includes stderr).
384 if isinstance(value, subprocess.CalledProcessError) and value.output: 444 if isinstance(value, subprocess.CalledProcessError) and value.output:
385 error.append("Subprocess output:") 445 error.append("Subprocess output:")
@@ -400,7 +460,7 @@ def better_exec(code, context, text = None, realfile = "<code>", pythonexception
400 code = better_compile(code, realfile, realfile) 460 code = better_compile(code, realfile, realfile)
401 try: 461 try:
402 exec(code, get_context(), context) 462 exec(code, get_context(), context)
403 except (bb.BBHandledException, bb.parse.SkipRecipe, bb.data_smart.ExpansionError): 463 except (bb.BBHandledException, bb.parse.SkipRecipe, bb.data_smart.ExpansionError, bb.process.ExecutionError):
404 # Error already shown so passthrough, no need for traceback 464 # Error already shown so passthrough, no need for traceback
405 raise 465 raise
406 except Exception as e: 466 except Exception as e:
@@ -427,33 +487,56 @@ def better_eval(source, locals, extraglobals = None):
427 return eval(source, ctx, locals) 487 return eval(source, ctx, locals)
428 488
429@contextmanager 489@contextmanager
430def fileslocked(files): 490def fileslocked(files, *args, **kwargs):
431 """Context manager for locking and unlocking file locks.""" 491 """Context manager for locking and unlocking file locks. Uses
492 ``bb.utils.lockfile()`` and ``bb.utils.unlockfile()`` to lock and unlock
493 files.
494
495 No return value."""
432 locks = [] 496 locks = []
433 if files: 497 if files:
434 for lockfile in files: 498 for lockfile in files:
435 locks.append(bb.utils.lockfile(lockfile)) 499 l = bb.utils.lockfile(lockfile, *args, **kwargs)
500 if l is not None:
501 locks.append(l)
436 502
437 try: 503 try:
438 yield 504 yield
439 finally: 505 finally:
506 locks.reverse()
440 for lock in locks: 507 for lock in locks:
441 bb.utils.unlockfile(lock) 508 bb.utils.unlockfile(lock)
442 509
443def lockfile(name, shared=False, retry=True, block=False): 510def lockfile(name, shared=False, retry=True, block=False):
444 """ 511 """
445 Use the specified file as a lock file, return when the lock has 512 Use the specified file (with filename ``name``) as a lock file, return when
446 been acquired. Returns a variable to pass to unlockfile(). 513 the lock has been acquired. Returns a variable to pass to unlockfile().
447 Parameters: 514
448 retry: True to re-try locking if it fails, False otherwise 515 Arguments:
449 block: True to block until the lock succeeds, False otherwise 516
517 - ``shared``: sets the lock as a shared lock instead of an
518 exclusive lock.
519 - ``retry``: ``True`` to re-try locking if it fails, ``False``
520 otherwise.
521 - ``block``: ``True`` to block until the lock succeeds,
522 ``False`` otherwise.
523
450 The retry and block parameters are kind of equivalent unless you 524 The retry and block parameters are kind of equivalent unless you
451 consider the possibility of sending a signal to the process to break 525 consider the possibility of sending a signal to the process to break
452 out - at which point you want block=True rather than retry=True. 526 out - at which point you want block=True rather than retry=True.
527
528 Returns the locked file descriptor in case of success, ``None`` otherwise.
453 """ 529 """
530 basename = os.path.basename(name)
531 if len(basename) > 255:
532 root, ext = os.path.splitext(basename)
533 basename = root[:255 - len(ext)] + ext
534
454 dirname = os.path.dirname(name) 535 dirname = os.path.dirname(name)
455 mkdirhier(dirname) 536 mkdirhier(dirname)
456 537
538 name = os.path.join(dirname, basename)
539
457 if not os.access(dirname, os.W_OK): 540 if not os.access(dirname, os.W_OK):
458 logger.error("Unable to acquire lock '%s', directory is not writable", 541 logger.error("Unable to acquire lock '%s', directory is not writable",
459 name) 542 name)
@@ -487,7 +570,7 @@ def lockfile(name, shared=False, retry=True, block=False):
487 return lf 570 return lf
488 lf.close() 571 lf.close()
489 except OSError as e: 572 except OSError as e:
490 if e.errno == errno.EACCES: 573 if e.errno == errno.EACCES or e.errno == errno.ENAMETOOLONG:
491 logger.error("Unable to acquire lock '%s', %s", 574 logger.error("Unable to acquire lock '%s', %s",
492 e.strerror, name) 575 e.strerror, name)
493 sys.exit(1) 576 sys.exit(1)
@@ -501,7 +584,13 @@ def lockfile(name, shared=False, retry=True, block=False):
501 584
502def unlockfile(lf): 585def unlockfile(lf):
503 """ 586 """
504 Unlock a file locked using lockfile() 587 Unlock a file locked using ``bb.utils.lockfile()``.
588
589 Arguments:
590
591 - ``lf``: the locked file descriptor.
592
593 No return value.
505 """ 594 """
506 try: 595 try:
507 # If we had a shared lock, we need to promote to exclusive before 596 # If we had a shared lock, we need to promote to exclusive before
@@ -529,43 +618,97 @@ def _hasher(method, filename):
529 618
530def md5_file(filename): 619def md5_file(filename):
531 """ 620 """
532 Return the hex string representation of the MD5 checksum of filename. 621 Arguments:
622
623 - ``filename``: path to the input file.
624
625 Returns the hexadecimal string representation of the MD5 checksum of filename.
533 """ 626 """
534 import hashlib 627 import hashlib
535 return _hasher(hashlib.md5(), filename) 628 try:
629 sig = hashlib.new('MD5', usedforsecurity=False)
630 except TypeError:
631 # Some configurations don't appear to support two arguments
632 sig = hashlib.new('MD5')
633 return _hasher(sig, filename)
536 634
537def sha256_file(filename): 635def sha256_file(filename):
538 """ 636 """
539 Return the hex string representation of the 256-bit SHA checksum of 637 Returns the hexadecimal representation of the 256-bit SHA checksum of
540 filename. 638 filename.
639
640 Arguments:
641
642 - ``filename``: path to the file.
541 """ 643 """
542 import hashlib 644 import hashlib
543 return _hasher(hashlib.sha256(), filename) 645 return _hasher(hashlib.sha256(), filename)
544 646
545def sha1_file(filename): 647def sha1_file(filename):
546 """ 648 """
547 Return the hex string representation of the SHA1 checksum of the filename 649 Returns the hexadecimal representation of the SHA1 checksum of the filename
650
651 Arguments:
652
653 - ``filename``: path to the file.
548 """ 654 """
549 import hashlib 655 import hashlib
550 return _hasher(hashlib.sha1(), filename) 656 return _hasher(hashlib.sha1(), filename)
551 657
552def sha384_file(filename): 658def sha384_file(filename):
553 """ 659 """
554 Return the hex string representation of the SHA384 checksum of the filename 660 Returns the hexadecimal representation of the SHA384 checksum of the filename
661
662 Arguments:
663
664 - ``filename``: path to the file.
555 """ 665 """
556 import hashlib 666 import hashlib
557 return _hasher(hashlib.sha384(), filename) 667 return _hasher(hashlib.sha384(), filename)
558 668
559def sha512_file(filename): 669def sha512_file(filename):
560 """ 670 """
561 Return the hex string representation of the SHA512 checksum of the filename 671 Returns the hexadecimal representation of the SHA512 checksum of the filename
672
673 Arguments:
674
675 - ``filename``: path to the file.
562 """ 676 """
563 import hashlib 677 import hashlib
564 return _hasher(hashlib.sha512(), filename) 678 return _hasher(hashlib.sha512(), filename)
565 679
680def goh1_file(filename):
681 """
682 Returns the hexadecimal string representation of the Go mod h1 checksum of the
683 filename. The Go mod h1 checksum uses the Go dirhash package. The package
684 defines hashes over directory trees and is used by go mod for mod files and
685 zip archives.
686
687 Arguments:
688
689 - ``filename``: path to the file.
690 """
691 import hashlib
692 import zipfile
693
694 lines = []
695 if zipfile.is_zipfile(filename):
696 with zipfile.ZipFile(filename) as archive:
697 for fn in sorted(archive.namelist()):
698 method = hashlib.sha256()
699 method.update(archive.read(fn))
700 hash = method.hexdigest()
701 lines.append("%s %s\n" % (hash, fn))
702 else:
703 hash = _hasher(hashlib.sha256(), filename)
704 lines.append("%s go.mod\n" % hash)
705 method = hashlib.sha256()
706 method.update("".join(lines).encode('utf-8'))
707 return method.hexdigest()
708
566def preserved_envvars_exported(): 709def preserved_envvars_exported():
567 """Variables which are taken from the environment and placed in and exported 710 """Returns the list of variables which are taken from the environment and
568 from the metadata""" 711 placed in and exported from the metadata."""
569 return [ 712 return [
570 'BB_TASKHASH', 713 'BB_TASKHASH',
571 'HOME', 714 'HOME',
@@ -579,19 +722,42 @@ def preserved_envvars_exported():
579 ] 722 ]
580 723
581def preserved_envvars(): 724def preserved_envvars():
582 """Variables which are taken from the environment and placed in the metadata""" 725 """Returns the list of variables which are taken from the environment and
726 placed in the metadata."""
583 v = [ 727 v = [
584 'BBPATH', 728 'BBPATH',
585 'BB_PRESERVE_ENV', 729 'BB_PRESERVE_ENV',
586 'BB_ENV_WHITELIST', 730 'BB_ENV_PASSTHROUGH_ADDITIONS',
587 'BB_ENV_EXTRAWHITE',
588 ] 731 ]
589 return v + preserved_envvars_exported() 732 return v + preserved_envvars_exported()
590 733
734def check_system_locale():
735 """Make sure the required system locale are available and configured.
736
737 No return value."""
738 default_locale = locale.getlocale(locale.LC_CTYPE)
739
740 try:
741 locale.setlocale(locale.LC_CTYPE, ("en_US", "UTF-8"))
742 except:
743 sys.exit("Please make sure locale 'en_US.UTF-8' is available on your system")
744 else:
745 locale.setlocale(locale.LC_CTYPE, default_locale)
746
747 if sys.getfilesystemencoding() != "utf-8":
748 sys.exit("Please use a locale setting which supports UTF-8 (such as LANG=en_US.UTF-8).\n"
749 "Python can't change the filesystem locale after loading so we need a UTF-8 when Python starts or things won't work.")
750
591def filter_environment(good_vars): 751def filter_environment(good_vars):
592 """ 752 """
593 Create a pristine environment for bitbake. This will remove variables that 753 Create a pristine environment for bitbake. This will remove variables that
594 are not known and may influence the build in a negative way. 754 are not known and may influence the build in a negative way.
755
756 Arguments:
757
758 - ``good_vars``: list of variable to exclude from the filtering.
759
760 No return value.
595 """ 761 """
596 762
597 removed_vars = {} 763 removed_vars = {}
@@ -615,27 +781,29 @@ def filter_environment(good_vars):
615 781
616def approved_variables(): 782def approved_variables():
617 """ 783 """
618 Determine and return the list of whitelisted variables which are approved 784 Determine and return the list of variables which are approved
619 to remain in the environment. 785 to remain in the environment.
620 """ 786 """
621 if 'BB_PRESERVE_ENV' in os.environ: 787 if 'BB_PRESERVE_ENV' in os.environ:
622 return os.environ.keys() 788 return os.environ.keys()
623 approved = [] 789 approved = []
624 if 'BB_ENV_WHITELIST' in os.environ: 790 if 'BB_ENV_PASSTHROUGH' in os.environ:
625 approved = os.environ['BB_ENV_WHITELIST'].split() 791 approved = os.environ['BB_ENV_PASSTHROUGH'].split()
626 approved.extend(['BB_ENV_WHITELIST']) 792 approved.extend(['BB_ENV_PASSTHROUGH'])
627 else: 793 else:
628 approved = preserved_envvars() 794 approved = preserved_envvars()
629 if 'BB_ENV_EXTRAWHITE' in os.environ: 795 if 'BB_ENV_PASSTHROUGH_ADDITIONS' in os.environ:
630 approved.extend(os.environ['BB_ENV_EXTRAWHITE'].split()) 796 approved.extend(os.environ['BB_ENV_PASSTHROUGH_ADDITIONS'].split())
631 if 'BB_ENV_EXTRAWHITE' not in approved: 797 if 'BB_ENV_PASSTHROUGH_ADDITIONS' not in approved:
632 approved.extend(['BB_ENV_EXTRAWHITE']) 798 approved.extend(['BB_ENV_PASSTHROUGH_ADDITIONS'])
633 return approved 799 return approved
634 800
635def clean_environment(): 801def clean_environment():
636 """ 802 """
637 Clean up any spurious environment variables. This will remove any 803 Clean up any spurious environment variables. This will remove any
638 variables the user hasn't chosen to preserve. 804 variables the user hasn't chosen to preserve.
805
806 No return value.
639 """ 807 """
640 if 'BB_PRESERVE_ENV' not in os.environ: 808 if 'BB_PRESERVE_ENV' not in os.environ:
641 good_vars = approved_variables() 809 good_vars = approved_variables()
@@ -646,6 +814,8 @@ def clean_environment():
646def empty_environment(): 814def empty_environment():
647 """ 815 """
648 Remove all variables from the environment. 816 Remove all variables from the environment.
817
818 No return value.
649 """ 819 """
650 for s in list(os.environ.keys()): 820 for s in list(os.environ.keys()):
651 os.unsetenv(s) 821 os.unsetenv(s)
@@ -654,6 +824,12 @@ def empty_environment():
654def build_environment(d): 824def build_environment(d):
655 """ 825 """
656 Build an environment from all exported variables. 826 Build an environment from all exported variables.
827
828 Arguments:
829
830 - ``d``: the data store.
831
832 No return value.
657 """ 833 """
658 import bb.data 834 import bb.data
659 for var in bb.data.keys(d): 835 for var in bb.data.keys(d):
@@ -678,13 +854,23 @@ def _check_unsafe_delete_path(path):
678 return False 854 return False
679 855
680def remove(path, recurse=False, ionice=False): 856def remove(path, recurse=False, ionice=False):
681 """Equivalent to rm -f or rm -rf""" 857 """Equivalent to rm -f or rm -rf.
858
859 Arguments:
860
861 - ``path``: path to file/directory to remove.
862 - ``recurse``: deletes recursively if ``True``.
863 - ``ionice``: prepends ``ionice -c 3`` to the ``rm`` command. See ``man
864 ionice``.
865
866 No return value.
867 """
682 if not path: 868 if not path:
683 return 869 return
684 if recurse: 870 if recurse:
685 for name in glob.glob(path): 871 for name in glob.glob(path):
686 if _check_unsafe_delete_path(path): 872 if _check_unsafe_delete_path(name):
687 raise Exception('bb.utils.remove: called with dangerous path "%s" and recurse=True, refusing to delete!' % path) 873 raise Exception('bb.utils.remove: called with dangerous path "%s" and recurse=True, refusing to delete!' % name)
688 # shutil.rmtree(name) would be ideal but its too slow 874 # shutil.rmtree(name) would be ideal but its too slow
689 cmd = [] 875 cmd = []
690 if ionice: 876 if ionice:
@@ -699,7 +885,17 @@ def remove(path, recurse=False, ionice=False):
699 raise 885 raise
700 886
701def prunedir(topdir, ionice=False): 887def prunedir(topdir, ionice=False):
702 """ Delete everything reachable from the directory named in 'topdir'. """ 888 """
889 Delete everything reachable from the directory named in ``topdir``.
890
891 Arguments:
892
893 - ``topdir``: directory path.
894 - ``ionice``: prepends ``ionice -c 3`` to the ``rm`` command. See ``man
895 ionice``.
896
897 No return value.
898 """
703 # CAUTION: This is dangerous! 899 # CAUTION: This is dangerous!
704 if _check_unsafe_delete_path(topdir): 900 if _check_unsafe_delete_path(topdir):
705 raise Exception('bb.utils.prunedir: called with dangerous path "%s", refusing to delete!' % topdir) 901 raise Exception('bb.utils.prunedir: called with dangerous path "%s", refusing to delete!' % topdir)
@@ -710,9 +906,16 @@ def prunedir(topdir, ionice=False):
710# but thats possibly insane and suffixes is probably going to be small 906# but thats possibly insane and suffixes is probably going to be small
711# 907#
712def prune_suffix(var, suffixes, d): 908def prune_suffix(var, suffixes, d):
713 """ 909 """
714 See if var ends with any of the suffixes listed and 910 Check if ``var`` ends with any of the suffixes listed in ``suffixes`` and
715 remove it if found 911 remove it if found.
912
913 Arguments:
914
915 - ``var``: string to check for suffixes.
916 - ``suffixes``: list of strings representing suffixes to check for.
917
918 Returns the string ``var`` without the suffix.
716 """ 919 """
717 for suffix in suffixes: 920 for suffix in suffixes:
718 if suffix and var.endswith(suffix): 921 if suffix and var.endswith(suffix):
@@ -721,9 +924,16 @@ def prune_suffix(var, suffixes, d):
721 924
722def mkdirhier(directory): 925def mkdirhier(directory):
723 """Create a directory like 'mkdir -p', but does not complain if 926 """Create a directory like 'mkdir -p', but does not complain if
724 directory already exists like os.makedirs 927 directory already exists like ``os.makedirs()``.
725 """
726 928
929 Arguments:
930
931 - ``directory``: path to the directory.
932
933 No return value.
934 """
935 if '${' in str(directory):
936 bb.fatal("Directory name {} contains unexpanded bitbake variable. This may cause build failures and WORKDIR polution.".format(directory))
727 try: 937 try:
728 os.makedirs(directory) 938 os.makedirs(directory)
729 except OSError as e: 939 except OSError as e:
@@ -731,10 +941,24 @@ def mkdirhier(directory):
731 raise e 941 raise e
732 942
733def movefile(src, dest, newmtime = None, sstat = None): 943def movefile(src, dest, newmtime = None, sstat = None):
734 """Moves a file from src to dest, preserving all permissions and 944 """Moves a file from ``src`` to ``dest``, preserving all permissions and
735 attributes; mtime will be preserved even when moving across 945 attributes; mtime will be preserved even when moving across
736 filesystems. Returns true on success and false on failure. Move is 946 filesystems. Returns ``True`` on success and ``False`` on failure. Move is
737 atomic. 947 atomic.
948
949 Arguments:
950
951 - ``src`` -- Source file.
952 - ``dest`` -- Destination file.
953 - ``newmtime`` -- new mtime to be passed as float seconds since the epoch.
954 - ``sstat`` -- os.stat_result to use for the destination file.
955
956 Returns an ``os.stat_result`` of the destination file if the
957 source file is a symbolic link or the ``sstat`` argument represents a
958 symbolic link - in which case the destination file will also be created as
959 a symbolic link.
960
961 Otherwise, returns ``newmtime`` on success and ``False`` on failure.
738 """ 962 """
739 963
740 #print "movefile(" + src + "," + dest + "," + str(newmtime) + "," + str(sstat) + ")" 964 #print "movefile(" + src + "," + dest + "," + str(newmtime) + "," + str(sstat) + ")"
@@ -742,7 +966,7 @@ def movefile(src, dest, newmtime = None, sstat = None):
742 if not sstat: 966 if not sstat:
743 sstat = os.lstat(src) 967 sstat = os.lstat(src)
744 except Exception as e: 968 except Exception as e:
745 print("movefile: Stating source file failed...", e) 969 logger.warning("movefile: Stating source file failed...", e)
746 return None 970 return None
747 971
748 destexists = 1 972 destexists = 1
@@ -770,7 +994,7 @@ def movefile(src, dest, newmtime = None, sstat = None):
770 os.unlink(src) 994 os.unlink(src)
771 return os.lstat(dest) 995 return os.lstat(dest)
772 except Exception as e: 996 except Exception as e:
773 print("movefile: failed to properly create symlink:", dest, "->", target, e) 997 logger.warning("movefile: failed to properly create symlink:", dest, "->", target, e)
774 return None 998 return None
775 999
776 renamefailed = 1 1000 renamefailed = 1
@@ -782,12 +1006,12 @@ def movefile(src, dest, newmtime = None, sstat = None):
782 1006
783 if sstat[stat.ST_DEV] == dstat[stat.ST_DEV]: 1007 if sstat[stat.ST_DEV] == dstat[stat.ST_DEV]:
784 try: 1008 try:
785 os.rename(src, destpath) 1009 bb.utils.rename(src, destpath)
786 renamefailed = 0 1010 renamefailed = 0
787 except Exception as e: 1011 except Exception as e:
788 if e.errno != errno.EXDEV: 1012 if e.errno != errno.EXDEV:
789 # Some random error. 1013 # Some random error.
790 print("movefile: Failed to move", src, "to", dest, e) 1014 logger.warning("movefile: Failed to move", src, "to", dest, e)
791 return None 1015 return None
792 # Invalid cross-device-link 'bind' mounted or actually Cross-Device 1016 # Invalid cross-device-link 'bind' mounted or actually Cross-Device
793 1017
@@ -796,16 +1020,16 @@ def movefile(src, dest, newmtime = None, sstat = None):
796 if stat.S_ISREG(sstat[stat.ST_MODE]): 1020 if stat.S_ISREG(sstat[stat.ST_MODE]):
797 try: # For safety copy then move it over. 1021 try: # For safety copy then move it over.
798 shutil.copyfile(src, destpath + "#new") 1022 shutil.copyfile(src, destpath + "#new")
799 os.rename(destpath + "#new", destpath) 1023 bb.utils.rename(destpath + "#new", destpath)
800 didcopy = 1 1024 didcopy = 1
801 except Exception as e: 1025 except Exception as e:
802 print('movefile: copy', src, '->', dest, 'failed.', e) 1026 logger.warning('movefile: copy', src, '->', dest, 'failed.', e)
803 return None 1027 return None
804 else: 1028 else:
805 #we don't yet handle special, so we need to fall back to /bin/mv 1029 #we don't yet handle special, so we need to fall back to /bin/mv
806 a = getstatusoutput("/bin/mv -f " + "'" + src + "' '" + dest + "'") 1030 a = getstatusoutput("/bin/mv -f " + "'" + src + "' '" + dest + "'")
807 if a[0] != 0: 1031 if a[0] != 0:
808 print("movefile: Failed to move special file:" + src + "' to '" + dest + "'", a) 1032 logger.warning("movefile: Failed to move special file:" + src + "' to '" + dest + "'", a)
809 return None # failure 1033 return None # failure
810 try: 1034 try:
811 if didcopy: 1035 if didcopy:
@@ -813,7 +1037,7 @@ def movefile(src, dest, newmtime = None, sstat = None):
813 os.chmod(destpath, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown 1037 os.chmod(destpath, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown
814 os.unlink(src) 1038 os.unlink(src)
815 except Exception as e: 1039 except Exception as e:
816 print("movefile: Failed to chown/chmod/unlink", dest, e) 1040 logger.warning("movefile: Failed to chown/chmod/unlink", dest, e)
817 return None 1041 return None
818 1042
819 if newmtime: 1043 if newmtime:
@@ -825,9 +1049,24 @@ def movefile(src, dest, newmtime = None, sstat = None):
825 1049
826def copyfile(src, dest, newmtime = None, sstat = None): 1050def copyfile(src, dest, newmtime = None, sstat = None):
827 """ 1051 """
828 Copies a file from src to dest, preserving all permissions and 1052 Copies a file from ``src`` to ``dest``, preserving all permissions and
829 attributes; mtime will be preserved even when moving across 1053 attributes; mtime will be preserved even when moving across
830 filesystems. Returns true on success and false on failure. 1054 filesystems.
1055
1056 Arguments:
1057
1058 - ``src``: Source file.
1059 - ``dest``: Destination file.
1060 - ``newmtime``: new mtime to be passed as float seconds since the epoch.
1061 - ``sstat``: os.stat_result to use for the destination file.
1062
1063 Returns an ``os.stat_result`` of the destination file if the
1064 source file is a symbolic link or the ``sstat`` argument represents a
1065 symbolic link - in which case the destination file will also be created as
1066 a symbolic link.
1067
1068 Otherwise, returns ``newmtime`` on success and ``False`` on failure.
1069
831 """ 1070 """
832 #print "copyfile(" + src + "," + dest + "," + str(newmtime) + "," + str(sstat) + ")" 1071 #print "copyfile(" + src + "," + dest + "," + str(newmtime) + "," + str(sstat) + ")"
833 try: 1072 try:
@@ -874,7 +1113,7 @@ def copyfile(src, dest, newmtime = None, sstat = None):
874 1113
875 # For safety copy then move it over. 1114 # For safety copy then move it over.
876 shutil.copyfile(src, dest + "#new") 1115 shutil.copyfile(src, dest + "#new")
877 os.rename(dest + "#new", dest) 1116 bb.utils.rename(dest + "#new", dest)
878 except Exception as e: 1117 except Exception as e:
879 logger.warning("copyfile: copy %s to %s failed (%s)" % (src, dest, e)) 1118 logger.warning("copyfile: copy %s to %s failed (%s)" % (src, dest, e))
880 return False 1119 return False
@@ -905,10 +1144,16 @@ def copyfile(src, dest, newmtime = None, sstat = None):
905 1144
906def break_hardlinks(src, sstat = None): 1145def break_hardlinks(src, sstat = None):
907 """ 1146 """
908 Ensures src is the only hardlink to this file. Other hardlinks, 1147 Ensures ``src`` is the only hardlink to this file. Other hardlinks,
909 if any, are not affected (other than in their st_nlink value, of 1148 if any, are not affected (other than in their st_nlink value, of
910 course). Returns true on success and false on failure. 1149 course).
1150
1151 Arguments:
1152
1153 - ``src``: source file path.
1154 - ``sstat``: os.stat_result to use when checking if the file is a link.
911 1155
1156 Returns ``True`` on success and ``False`` on failure.
912 """ 1157 """
913 try: 1158 try:
914 if not sstat: 1159 if not sstat:
@@ -922,11 +1167,24 @@ def break_hardlinks(src, sstat = None):
922 1167
923def which(path, item, direction = 0, history = False, executable=False): 1168def which(path, item, direction = 0, history = False, executable=False):
924 """ 1169 """
925 Locate `item` in the list of paths `path` (colon separated string like $PATH). 1170 Locate ``item`` in the list of paths ``path`` (colon separated string like
926 If `direction` is non-zero then the list is reversed. 1171 ``$PATH``).
927 If `history` is True then the list of candidates also returned as result,history. 1172
928 If `executable` is True then the candidate has to be an executable file, 1173 Arguments:
929 otherwise the candidate simply has to exist. 1174
1175 - ``path``: list of colon-separated paths.
1176 - ``item``: string to search for.
1177 - ``direction``: if non-zero then the list is reversed.
1178 - ``history``: if ``True`` then the list of candidates also returned as
1179 ``result,history`` where ``history`` is the list of previous path
1180 checked.
1181 - ``executable``: if ``True`` then the candidate defined by ``path`` has
1182 to be an executable file, otherwise if ``False`` the candidate simply
1183 has to exist.
1184
1185 Returns the item if found in the list of path, otherwise an empty string.
1186 If ``history`` is ``True``, return the list of previous path checked in a
1187 tuple with the found (or not found) item as ``(item, history)``.
930 """ 1188 """
931 1189
932 if executable: 1190 if executable:
@@ -953,10 +1211,29 @@ def which(path, item, direction = 0, history = False, executable=False):
953 return "", hist 1211 return "", hist
954 return "" 1212 return ""
955 1213
1214def to_filemode(input):
1215 """
1216 Take a bitbake variable contents defining a file mode and return
1217 the proper python representation of the number
1218
1219 Arguments:
1220
1221 - ``input``: a string or number to convert, e.g. a bitbake variable
1222 string, assumed to be an octal representation
1223
1224 Returns the python file mode as a number
1225 """
1226 # umask might come in as a number or text string..
1227 if type(input) is int:
1228 return input
1229 return int(input, 8)
1230
956@contextmanager 1231@contextmanager
957def umask(new_mask): 1232def umask(new_mask):
958 """ 1233 """
959 Context manager to set the umask to a specific mask, and restore it afterwards. 1234 Context manager to set the umask to a specific mask, and restore it afterwards.
1235
1236 No return value.
960 """ 1237 """
961 current_mask = os.umask(new_mask) 1238 current_mask = os.umask(new_mask)
962 try: 1239 try:
@@ -965,13 +1242,26 @@ def umask(new_mask):
965 os.umask(current_mask) 1242 os.umask(current_mask)
966 1243
967def to_boolean(string, default=None): 1244def to_boolean(string, default=None):
968 """ 1245 """
969 Check input string and return boolean value True/False/None 1246 Check input string and return boolean value True/False/None
970 depending upon the checks 1247 depending upon the checks.
1248
1249 Arguments:
1250
1251 - ``string``: input string.
1252 - ``default``: default return value if the input ``string`` is ``None``,
1253 ``0``, ``False`` or an empty string.
1254
1255 Returns ``True`` if the string is one of "y", "yes", "1", "true", ``False``
1256 if the string is one of "n", "no", "0", or "false". Return ``default`` if
1257 the input ``string`` is ``None``, ``0``, ``False`` or an empty string.
971 """ 1258 """
972 if not string: 1259 if not string:
973 return default 1260 return default
974 1261
1262 if isinstance(string, int):
1263 return string != 0
1264
975 normalized = string.lower() 1265 normalized = string.lower()
976 if normalized in ("y", "yes", "1", "true"): 1266 if normalized in ("y", "yes", "1", "true"):
977 return True 1267 return True
@@ -985,18 +1275,17 @@ def contains(variable, checkvalues, truevalue, falsevalue, d):
985 1275
986 Arguments: 1276 Arguments:
987 1277
988 variable -- the variable name. This will be fetched and expanded (using 1278 - ``variable``: the variable name. This will be fetched and expanded (using
989 d.getVar(variable)) and then split into a set(). 1279 d.getVar(variable)) and then split into a set().
990 1280 - ``checkvalues``: if this is a string it is split on whitespace into a set(),
991 checkvalues -- if this is a string it is split on whitespace into a set(), 1281 otherwise coerced directly into a set().
992 otherwise coerced directly into a set(). 1282 - ``truevalue``: the value to return if checkvalues is a subset of variable.
1283 - ``falsevalue``: the value to return if variable is empty or if checkvalues is
1284 not a subset of variable.
1285 - ``d``: the data store.
993 1286
994 truevalue -- the value to return if checkvalues is a subset of variable. 1287 Returns ``True`` if the variable contains the values specified, ``False``
995 1288 otherwise.
996 falsevalue -- the value to return if variable is empty or if checkvalues is
997 not a subset of variable.
998
999 d -- the data store.
1000 """ 1289 """
1001 1290
1002 val = d.getVar(variable) 1291 val = d.getVar(variable)
@@ -1016,18 +1305,17 @@ def contains_any(variable, checkvalues, truevalue, falsevalue, d):
1016 1305
1017 Arguments: 1306 Arguments:
1018 1307
1019 variable -- the variable name. This will be fetched and expanded (using 1308 - ``variable``: the variable name. This will be fetched and expanded (using
1020 d.getVar(variable)) and then split into a set(). 1309 d.getVar(variable)) and then split into a set().
1021 1310 - ``checkvalues``: if this is a string it is split on whitespace into a set(),
1022 checkvalues -- if this is a string it is split on whitespace into a set(), 1311 otherwise coerced directly into a set().
1023 otherwise coerced directly into a set(). 1312 - ``truevalue``: the value to return if checkvalues is a subset of variable.
1024 1313 - ``falsevalue``: the value to return if variable is empty or if checkvalues is
1025 truevalue -- the value to return if checkvalues is a subset of variable. 1314 not a subset of variable.
1315 - ``d``: the data store.
1026 1316
1027 falsevalue -- the value to return if variable is empty or if checkvalues is 1317 Returns ``True`` if the variable contains any of the values specified,
1028 not a subset of variable. 1318 ``False`` otherwise.
1029
1030 d -- the data store.
1031 """ 1319 """
1032 val = d.getVar(variable) 1320 val = d.getVar(variable)
1033 if not val: 1321 if not val:
@@ -1042,17 +1330,17 @@ def contains_any(variable, checkvalues, truevalue, falsevalue, d):
1042 return falsevalue 1330 return falsevalue
1043 1331
1044def filter(variable, checkvalues, d): 1332def filter(variable, checkvalues, d):
1045 """Return all words in the variable that are present in the checkvalues. 1333 """Return all words in the variable that are present in the ``checkvalues``.
1046 1334
1047 Arguments: 1335 Arguments:
1048 1336
1049 variable -- the variable name. This will be fetched and expanded (using 1337 - ``variable``: the variable name. This will be fetched and expanded (using
1050 d.getVar(variable)) and then split into a set(). 1338 d.getVar(variable)) and then split into a set().
1051 1339 - ``checkvalues``: if this is a string it is split on whitespace into a set(),
1052 checkvalues -- if this is a string it is split on whitespace into a set(), 1340 otherwise coerced directly into a set().
1053 otherwise coerced directly into a set(). 1341 - ``d``: the data store.
1054 1342
1055 d -- the data store. 1343 Returns a list of string.
1056 """ 1344 """
1057 1345
1058 val = d.getVar(variable) 1346 val = d.getVar(variable)
@@ -1068,8 +1356,27 @@ def filter(variable, checkvalues, d):
1068 1356
1069def get_referenced_vars(start_expr, d): 1357def get_referenced_vars(start_expr, d):
1070 """ 1358 """
1071 :return: names of vars referenced in start_expr (recursively), in quasi-BFS order (variables within the same level 1359 Get the names of the variables referenced in a given expression.
1072 are ordered arbitrarily) 1360
1361 Arguments:
1362
1363 - ``start_expr``: the expression where to look for variables references.
1364
1365 For example::
1366
1367 ${VAR_A} string ${VAR_B}
1368
1369 Or::
1370
1371 ${@d.getVar('VAR')}
1372
1373 If a variables makes references to other variables, the latter are also
1374 returned recursively.
1375
1376 - ``d``: the data store.
1377
1378 Returns the names of vars referenced in ``start_expr`` (recursively), in
1379 quasi-BFS order (variables within the same level are ordered arbitrarily).
1073 """ 1380 """
1074 1381
1075 seen = set() 1382 seen = set()
@@ -1103,7 +1410,10 @@ def get_referenced_vars(start_expr, d):
1103 1410
1104 1411
1105def cpu_count(): 1412def cpu_count():
1106 return multiprocessing.cpu_count() 1413 try:
1414 return len(os.sched_getaffinity(0))
1415 except OSError:
1416 return multiprocessing.cpu_count()
1107 1417
1108def nonblockingfd(fd): 1418def nonblockingfd(fd):
1109 fcntl.fcntl(fd, fcntl.F_SETFL, fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK) 1419 fcntl.fcntl(fd, fcntl.F_SETFL, fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK)
@@ -1146,7 +1456,9 @@ def multiprocessingpool(*args, **kwargs):
1146 return multiprocessing.Pool(*args, **kwargs) 1456 return multiprocessing.Pool(*args, **kwargs)
1147 1457
1148def exec_flat_python_func(func, *args, **kwargs): 1458def exec_flat_python_func(func, *args, **kwargs):
1149 """Execute a flat python function (defined with def funcname(args):...)""" 1459 """Execute a flat python function (defined with ``def funcname(args): ...``)
1460
1461 Returns the return value of the function."""
1150 # Prepare a small piece of python code which calls the requested function 1462 # Prepare a small piece of python code which calls the requested function
1151 # To do this we need to prepare two things - a set of variables we can use to pass 1463 # To do this we need to prepare two things - a set of variables we can use to pass
1152 # the values of arguments into the calling function, and the list of arguments for 1464 # the values of arguments into the calling function, and the list of arguments for
@@ -1172,48 +1484,57 @@ def edit_metadata(meta_lines, variables, varfunc, match_overrides=False):
1172 """Edit lines from a recipe or config file and modify one or more 1484 """Edit lines from a recipe or config file and modify one or more
1173 specified variable values set in the file using a specified callback 1485 specified variable values set in the file using a specified callback
1174 function. Lines are expected to have trailing newlines. 1486 function. Lines are expected to have trailing newlines.
1175 Parameters: 1487
1176 meta_lines: lines from the file; can be a list or an iterable 1488 Arguments:
1177 (e.g. file pointer) 1489
1178 variables: a list of variable names to look for. Functions 1490 - ``meta_lines``: lines from the file; can be a list or an iterable
1179 may also be specified, but must be specified with '()' at 1491 (e.g. file pointer)
1180 the end of the name. Note that the function doesn't have 1492 - ``variables``: a list of variable names to look for. Functions
1181 any intrinsic understanding of _append, _prepend, _remove, 1493 may also be specified, but must be specified with ``()`` at
1182 or overrides, so these are considered as part of the name. 1494 the end of the name. Note that the function doesn't have
1183 These values go into a regular expression, so regular 1495 any intrinsic understanding of ``:append``, ``:prepend``, ``:remove``,
1184 expression syntax is allowed. 1496 or overrides, so these are considered as part of the name.
1185 varfunc: callback function called for every variable matching 1497 These values go into a regular expression, so regular
1186 one of the entries in the variables parameter. The function 1498 expression syntax is allowed.
1187 should take four arguments: 1499 - ``varfunc``: callback function called for every variable matching
1188 varname: name of variable matched 1500 one of the entries in the variables parameter.
1189 origvalue: current value in file 1501
1190 op: the operator (e.g. '+=') 1502 The function should take four arguments:
1191 newlines: list of lines up to this point. You can use 1503
1192 this to prepend lines before this variable setting 1504 - ``varname``: name of variable matched
1193 if you wish. 1505 - ``origvalue``: current value in file
1194 and should return a four-element tuple: 1506 - ``op``: the operator (e.g. ``+=``)
1195 newvalue: new value to substitute in, or None to drop 1507 - ``newlines``: list of lines up to this point. You can use
1196 the variable setting entirely. (If the removal 1508 this to prepend lines before this variable setting
1197 results in two consecutive blank lines, one of the 1509 if you wish.
1198 blank lines will also be dropped). 1510
1199 newop: the operator to use - if you specify None here, 1511 And should return a four-element tuple:
1200 the original operation will be used. 1512
1201 indent: number of spaces to indent multi-line entries, 1513 - ``newvalue``: new value to substitute in, or ``None`` to drop
1202 or -1 to indent up to the level of the assignment 1514 the variable setting entirely. (If the removal
1203 and opening quote, or a string to use as the indent. 1515 results in two consecutive blank lines, one of the
1204 minbreak: True to allow the first element of a 1516 blank lines will also be dropped).
1205 multi-line value to continue on the same line as 1517 - ``newop``: the operator to use - if you specify ``None`` here,
1206 the assignment, False to indent before the first 1518 the original operation will be used.
1207 element. 1519 - ``indent``: number of spaces to indent multi-line entries,
1208 To clarify, if you wish not to change the value, then you 1520 or ``-1`` to indent up to the level of the assignment
1209 would return like this: return origvalue, None, 0, True 1521 and opening quote, or a string to use as the indent.
1210 match_overrides: True to match items with _overrides on the end, 1522 - ``minbreak``: ``True`` to allow the first element of a
1211 False otherwise 1523 multi-line value to continue on the same line as
1524 the assignment, ``False`` to indent before the first
1525 element.
1526
1527 To clarify, if you wish not to change the value, then you
1528 would return like this::
1529
1530 return origvalue, None, 0, True
1531 - ``match_overrides``: True to match items with _overrides on the end,
1532 False otherwise
1533
1212 Returns a tuple: 1534 Returns a tuple:
1213 updated: 1535
1214 True if changes were made, False otherwise. 1536 - ``updated``: ``True`` if changes were made, ``False`` otherwise.
1215 newlines: 1537 - ``newlines``: Lines after processing.
1216 Lines after processing
1217 """ 1538 """
1218 1539
1219 var_res = {} 1540 var_res = {}
@@ -1357,12 +1678,13 @@ def edit_metadata(meta_lines, variables, varfunc, match_overrides=False):
1357 1678
1358 1679
1359def edit_metadata_file(meta_file, variables, varfunc): 1680def edit_metadata_file(meta_file, variables, varfunc):
1360 """Edit a recipe or config file and modify one or more specified 1681 """Edit a recipe or configuration file and modify one or more specified
1361 variable values set in the file using a specified callback function. 1682 variable values set in the file using a specified callback function.
1362 The file is only written to if the value(s) actually change. 1683 The file is only written to if the value(s) actually change.
1363 This is basically the file version of edit_metadata(), see that 1684 This is basically the file version of ``bb.utils.edit_metadata()``, see that
1364 function's description for parameter/usage information. 1685 function's description for parameter/usage information.
1365 Returns True if the file was written to, False otherwise. 1686
1687 Returns ``True`` if the file was written to, ``False`` otherwise.
1366 """ 1688 """
1367 with open(meta_file, 'r') as f: 1689 with open(meta_file, 'r') as f:
1368 (updated, newlines) = edit_metadata(f, variables, varfunc) 1690 (updated, newlines) = edit_metadata(f, variables, varfunc)
@@ -1373,23 +1695,25 @@ def edit_metadata_file(meta_file, variables, varfunc):
1373 1695
1374 1696
1375def edit_bblayers_conf(bblayers_conf, add, remove, edit_cb=None): 1697def edit_bblayers_conf(bblayers_conf, add, remove, edit_cb=None):
1376 """Edit bblayers.conf, adding and/or removing layers 1698 """Edit ``bblayers.conf``, adding and/or removing layers.
1377 Parameters: 1699
1378 bblayers_conf: path to bblayers.conf file to edit 1700 Arguments:
1379 add: layer path (or list of layer paths) to add; None or empty 1701
1380 list to add nothing 1702 - ``bblayers_conf``: path to ``bblayers.conf`` file to edit
1381 remove: layer path (or list of layer paths) to remove; None or 1703 - ``add``: layer path (or list of layer paths) to add; ``None`` or empty
1382 empty list to remove nothing 1704 list to add nothing
1383 edit_cb: optional callback function that will be called after 1705 - ``remove``: layer path (or list of layer paths) to remove; ``None`` or
1384 processing adds/removes once per existing entry. 1706 empty list to remove nothing
1707 - ``edit_cb``: optional callback function that will be called
1708 after processing adds/removes once per existing entry.
1709
1385 Returns a tuple: 1710 Returns a tuple:
1386 notadded: list of layers specified to be added but weren't
1387 (because they were already in the list)
1388 notremoved: list of layers that were specified to be removed
1389 but weren't (because they weren't in the list)
1390 """
1391 1711
1392 import fnmatch 1712 - ``notadded``: list of layers specified to be added but weren't
1713 (because they were already in the list)
1714 - ``notremoved``: list of layers that were specified to be removed
1715 but weren't (because they weren't in the list)
1716 """
1393 1717
1394 def remove_trailing_sep(pth): 1718 def remove_trailing_sep(pth):
1395 if pth and pth[-1] == os.sep: 1719 if pth and pth[-1] == os.sep:
@@ -1508,7 +1832,22 @@ def get_collection_res(d):
1508 1832
1509 1833
1510def get_file_layer(filename, d, collection_res={}): 1834def get_file_layer(filename, d, collection_res={}):
1511 """Determine the collection (as defined by a layer's layer.conf file) containing the specified file""" 1835 """Determine the collection (or layer name, as defined by a layer's
1836 ``layer.conf`` file) containing the specified file.
1837
1838 Arguments:
1839
1840 - ``filename``: the filename to look for.
1841 - ``d``: the data store.
1842 - ``collection_res``: dictionary with the layer names as keys and file
1843 patterns to match as defined with the BBFILE_COLLECTIONS and
1844 BBFILE_PATTERN variables respectively. The return value of
1845 ``bb.utils.get_collection_res()`` is the default if this variable is
1846 not specified.
1847
1848 Returns the layer name containing the file. If multiple layers contain the
1849 file, the last matching layer name from collection_res is returned.
1850 """
1512 if not collection_res: 1851 if not collection_res:
1513 collection_res = get_collection_res(d) 1852 collection_res = get_collection_res(d)
1514 1853
@@ -1546,7 +1885,13 @@ class PrCtlError(Exception):
1546 1885
1547def signal_on_parent_exit(signame): 1886def signal_on_parent_exit(signame):
1548 """ 1887 """
1549 Trigger signame to be sent when the parent process dies 1888 Trigger ``signame`` to be sent when the parent process dies.
1889
1890 Arguments:
1891
1892 - ``signame``: name of the signal. See ``man signal``.
1893
1894 No return value.
1550 """ 1895 """
1551 signum = getattr(signal, signame) 1896 signum = getattr(signal, signame)
1552 # http://linux.die.net/man/2/prctl 1897 # http://linux.die.net/man/2/prctl
@@ -1581,7 +1926,7 @@ def ioprio_set(who, cls, value):
1581 bb.warn("Unable to set IO Prio for arch %s" % _unamearch) 1926 bb.warn("Unable to set IO Prio for arch %s" % _unamearch)
1582 1927
1583def set_process_name(name): 1928def set_process_name(name):
1584 from ctypes import cdll, byref, create_string_buffer 1929 from ctypes import byref, create_string_buffer
1585 # This is nice to have for debugging, not essential 1930 # This is nice to have for debugging, not essential
1586 try: 1931 try:
1587 libc = cdll.LoadLibrary('libc.so.6') 1932 libc = cdll.LoadLibrary('libc.so.6')
@@ -1590,33 +1935,96 @@ def set_process_name(name):
1590 except: 1935 except:
1591 pass 1936 pass
1592 1937
1593def export_proxies(d): 1938def enable_loopback_networking():
1594 """ export common proxies variables from datastore to environment """ 1939 # From bits/ioctls.h
1595 import os 1940 SIOCGIFFLAGS = 0x8913
1941 SIOCSIFFLAGS = 0x8914
1942 SIOCSIFADDR = 0x8916
1943 SIOCSIFNETMASK = 0x891C
1596 1944
1597 variables = ['http_proxy', 'HTTP_PROXY', 'https_proxy', 'HTTPS_PROXY', 1945 # if.h
1598 'ftp_proxy', 'FTP_PROXY', 'no_proxy', 'NO_PROXY', 1946 IFF_UP = 0x1
1599 'GIT_PROXY_COMMAND'] 1947 IFF_RUNNING = 0x40
1600 exported = False
1601 1948
1602 for v in variables: 1949 # bits/socket.h
1603 if v in os.environ.keys(): 1950 AF_INET = 2
1604 exported = True 1951
1605 else: 1952 # char ifr_name[IFNAMSIZ=16]
1606 v_proxy = d.getVar(v) 1953 ifr_name = struct.pack("@16s", b"lo")
1607 if v_proxy is not None: 1954 def netdev_req(fd, req, data = b""):
1608 os.environ[v] = v_proxy 1955 # Pad and add interface name
1609 exported = True 1956 data = ifr_name + data + (b'\x00' * (16 - len(data)))
1957 # Return all data after interface name
1958 return fcntl.ioctl(fd, req, data)[16:]
1959
1960 with socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_IP) as sock:
1961 fd = sock.fileno()
1962
1963 # struct sockaddr_in ifr_addr { unsigned short family; uint16_t sin_port ; uint32_t in_addr; }
1964 req = struct.pack("@H", AF_INET) + struct.pack("=H4B", 0, 127, 0, 0, 1)
1965 netdev_req(fd, SIOCSIFADDR, req)
1610 1966
1611 return exported 1967 # short ifr_flags
1968 flags = struct.unpack_from('@h', netdev_req(fd, SIOCGIFFLAGS))[0]
1969 flags |= IFF_UP | IFF_RUNNING
1970 netdev_req(fd, SIOCSIFFLAGS, struct.pack('@h', flags))
1612 1971
1972 # struct sockaddr_in ifr_netmask
1973 req = struct.pack("@H", AF_INET) + struct.pack("=H4B", 0, 255, 0, 0, 0)
1974 netdev_req(fd, SIOCSIFNETMASK, req)
1975
1976def disable_network(uid=None, gid=None):
1977 """
1978 Disable networking in the current process if the kernel supports it, else
1979 just return after logging to debug. To do this we need to create a new user
1980 namespace, then map back to the original uid/gid.
1981
1982 Arguments:
1983
1984 - ``uid``: original user id.
1985 - ``gid``: original user group id.
1986
1987 No return value.
1988 """
1989 libc = ctypes.CDLL('libc.so.6')
1990
1991 # From sched.h
1992 # New user namespace
1993 CLONE_NEWUSER = 0x10000000
1994 # New network namespace
1995 CLONE_NEWNET = 0x40000000
1996
1997 if uid is None:
1998 uid = os.getuid()
1999 if gid is None:
2000 gid = os.getgid()
2001
2002 ret = libc.unshare(CLONE_NEWNET | CLONE_NEWUSER)
2003 if ret != 0:
2004 logger.debug("System doesn't support disabling network without admin privs")
2005 return
2006 with open("/proc/self/uid_map", "w") as f:
2007 f.write("%s %s 1" % (uid, uid))
2008 with open("/proc/self/setgroups", "w") as f:
2009 f.write("deny")
2010 with open("/proc/self/gid_map", "w") as f:
2011 f.write("%s %s 1" % (gid, gid))
2012
2013def export_proxies(d):
2014 from bb.fetch2 import get_fetcher_environment
2015 """ export common proxies variables from datastore to environment """
2016 newenv = get_fetcher_environment(d)
2017 for v in newenv:
2018 os.environ[v] = newenv[v]
1613 2019
1614def load_plugins(logger, plugins, pluginpath): 2020def load_plugins(logger, plugins, pluginpath):
1615 def load_plugin(name): 2021 def load_plugin(name):
1616 logger.debug('Loading plugin %s' % name) 2022 logger.debug('Loading plugin %s' % name)
1617 spec = importlib.machinery.PathFinder.find_spec(name, path=[pluginpath] ) 2023 spec = importlib.machinery.PathFinder.find_spec(name, path=[pluginpath] )
1618 if spec: 2024 if spec:
1619 return spec.loader.load_module() 2025 mod = importlib.util.module_from_spec(spec)
2026 spec.loader.exec_module(mod)
2027 return mod
1620 2028
1621 logger.debug('Loading plugins from %s...' % pluginpath) 2029 logger.debug('Loading plugins from %s...' % pluginpath)
1622 2030
@@ -1646,9 +2054,14 @@ class LogCatcher(logging.Handler):
1646 2054
1647def is_semver(version): 2055def is_semver(version):
1648 """ 2056 """
1649 Is the version string following the semver semantic? 2057 Arguments:
2058
2059 - ``version``: the version string.
2060
2061 Returns ``True`` if the version string follow semantic versioning, ``False``
2062 otherwise.
1650 2063
1651 https://semver.org/spec/v2.0.0.html 2064 See https://semver.org/spec/v2.0.0.html.
1652 """ 2065 """
1653 regex = re.compile( 2066 regex = re.compile(
1654 r""" 2067 r"""
@@ -1669,3 +2082,150 @@ def is_semver(version):
1669 return False 2082 return False
1670 2083
1671 return True 2084 return True
2085
2086# Wrapper around os.rename which can handle cross device problems
2087# e.g. from container filesystems
2088def rename(src, dst):
2089 try:
2090 os.rename(src, dst)
2091 except OSError as err:
2092 if err.errno == 18:
2093 # Invalid cross-device link error
2094 shutil.move(src, dst)
2095 else:
2096 raise err
2097
2098@contextmanager
2099def environment(**envvars):
2100 """
2101 Context manager to selectively update the environment with the specified mapping.
2102
2103 No return value.
2104 """
2105 backup = dict(os.environ)
2106 try:
2107 os.environ.update(envvars)
2108 yield
2109 finally:
2110 for var in envvars:
2111 if var in backup:
2112 os.environ[var] = backup[var]
2113 elif var in os.environ:
2114 del os.environ[var]
2115
2116def is_local_uid(uid=''):
2117 """
2118 Check whether uid is a local one or not.
2119 Can't use pwd module since it gets all UIDs, not local ones only.
2120
2121 Arguments:
2122
2123 - ``uid``: user id. If not specified the user id is determined from
2124 ``os.getuid()``.
2125
2126 Returns ``True`` is the user id is local, ``False`` otherwise.
2127 """
2128 if not uid:
2129 uid = os.getuid()
2130 with open('/etc/passwd', 'r') as f:
2131 for line in f:
2132 line_split = line.split(':')
2133 if len(line_split) < 3:
2134 continue
2135 if str(uid) == line_split[2]:
2136 return True
2137 return False
2138
2139def mkstemp(suffix=None, prefix=None, dir=None, text=False):
2140 """
2141 Generates a unique temporary file, independent of time.
2142
2143 mkstemp() in glibc (at least) generates unique file names based on the
2144 current system time. When combined with highly parallel builds, and
2145 operating over NFS (e.g. shared sstate/downloads) this can result in
2146 conflicts and race conditions.
2147
2148 This function adds additional entropy to the file name so that a collision
2149 is independent of time and thus extremely unlikely.
2150
2151 Arguments:
2152
2153 - ``suffix``: filename suffix.
2154 - ``prefix``: filename prefix.
2155 - ``dir``: directory where the file will be created.
2156 - ``text``: if ``True``, the file is opened in text mode.
2157
2158 Returns a tuple containing:
2159
2160 - the file descriptor for the created file
2161 - the name of the file.
2162 """
2163 entropy = "".join(random.choices("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890", k=20))
2164 if prefix:
2165 prefix = prefix + entropy
2166 else:
2167 prefix = tempfile.gettempprefix() + entropy
2168 return tempfile.mkstemp(suffix=suffix, prefix=prefix, dir=dir, text=text)
2169
2170def path_is_descendant(descendant, ancestor):
2171 """
2172 Returns ``True`` if the path ``descendant`` is a descendant of ``ancestor``
2173 (including being equivalent to ``ancestor`` itself). Otherwise returns
2174 ``False``.
2175
2176 Correctly accounts for symlinks, bind mounts, etc. by using
2177 ``os.path.samestat()`` to compare paths.
2178
2179 May raise any exception that ``os.stat()`` raises.
2180
2181 Arguments:
2182
2183 - ``descendant``: path to check for being an ancestor.
2184 - ``ancestor``: path to the ancestor ``descendant`` will be checked
2185 against.
2186 """
2187
2188 ancestor_stat = os.stat(ancestor)
2189
2190 # Recurse up each directory component of the descendant to see if it is
2191 # equivalent to the ancestor
2192 check_dir = os.path.abspath(descendant).rstrip("/")
2193 while check_dir:
2194 check_stat = os.stat(check_dir)
2195 if os.path.samestat(check_stat, ancestor_stat):
2196 return True
2197 check_dir = os.path.dirname(check_dir).rstrip("/")
2198
2199 return False
2200
2201# If we don't have a timeout of some kind and a process/thread exits badly (for example
2202# OOM killed) and held a lock, we'd just hang in the lock futex forever. It is better
2203# we exit at some point than hang. 5 minutes with no progress means we're probably deadlocked.
2204# This function can still deadlock python since it can't signal the other threads to exit
2205# (signals are handled in the main thread) and even os._exit() will wait on non-daemon threads
2206# to exit.
2207@contextmanager
2208def lock_timeout(lock):
2209 try:
2210 s = signal.pthread_sigmask(signal.SIG_BLOCK, signal.valid_signals())
2211 held = lock.acquire(timeout=5*60)
2212 if not held:
2213 bb.server.process.serverlog("Couldn't get the lock for 5 mins, timed out, exiting.\n%s" % traceback.format_stack())
2214 os._exit(1)
2215 yield held
2216 finally:
2217 lock.release()
2218 signal.pthread_sigmask(signal.SIG_SETMASK, s)
2219
2220# A version of lock_timeout without the check that the lock was locked and a shorter timeout
2221@contextmanager
2222def lock_timeout_nocheck(lock):
2223 l = False
2224 try:
2225 s = signal.pthread_sigmask(signal.SIG_BLOCK, signal.valid_signals())
2226 l = lock.acquire(timeout=10)
2227 yield l
2228 finally:
2229 if l:
2230 lock.release()
2231 signal.pthread_sigmask(signal.SIG_SETMASK, s)
diff --git a/bitbake/lib/bb/xattr.py b/bitbake/lib/bb/xattr.py
new file mode 100755
index 0000000000..7b634944a4
--- /dev/null
+++ b/bitbake/lib/bb/xattr.py
@@ -0,0 +1,126 @@
1#! /usr/bin/env python3
2#
3# Copyright 2023 by Garmin Ltd. or its subsidiaries
4#
5# SPDX-License-Identifier: MIT
6
7import sys
8import ctypes
9import os
10import errno
11
12libc = ctypes.CDLL("libc.so.6", use_errno=True)
13fsencoding = sys.getfilesystemencoding()
14
15
16libc.listxattr.argtypes = [ctypes.c_char_p, ctypes.c_char_p, ctypes.c_size_t]
17libc.llistxattr.argtypes = [ctypes.c_char_p, ctypes.c_char_p, ctypes.c_size_t]
18
19
20def listxattr(path, follow=True):
21 func = libc.listxattr if follow else libc.llistxattr
22
23 os_path = os.fsencode(path)
24
25 while True:
26 length = func(os_path, None, 0)
27
28 if length < 0:
29 err = ctypes.get_errno()
30 raise OSError(err, os.strerror(err), str(path))
31
32 if length == 0:
33 return []
34
35 arr = ctypes.create_string_buffer(length)
36
37 read_length = func(os_path, arr, length)
38 if read_length != length:
39 # Race!
40 continue
41
42 return [a.decode(fsencoding) for a in arr.raw.split(b"\x00") if a]
43
44
45libc.getxattr.argtypes = [
46 ctypes.c_char_p,
47 ctypes.c_char_p,
48 ctypes.c_char_p,
49 ctypes.c_size_t,
50]
51libc.lgetxattr.argtypes = [
52 ctypes.c_char_p,
53 ctypes.c_char_p,
54 ctypes.c_char_p,
55 ctypes.c_size_t,
56]
57
58
59def getxattr(path, name, follow=True):
60 func = libc.getxattr if follow else libc.lgetxattr
61
62 os_path = os.fsencode(path)
63 os_name = os.fsencode(name)
64
65 while True:
66 length = func(os_path, os_name, None, 0)
67
68 if length < 0:
69 err = ctypes.get_errno()
70 if err == errno.ENODATA:
71 return None
72 raise OSError(err, os.strerror(err), str(path))
73
74 if length == 0:
75 return ""
76
77 arr = ctypes.create_string_buffer(length)
78
79 read_length = func(os_path, os_name, arr, length)
80 if read_length != length:
81 # Race!
82 continue
83
84 return arr.raw
85
86
87def get_all_xattr(path, follow=True):
88 attrs = {}
89
90 names = listxattr(path, follow)
91
92 for name in names:
93 value = getxattr(path, name, follow)
94 if value is None:
95 # This can happen if a value is erased after listxattr is called,
96 # so ignore it
97 continue
98 attrs[name] = value
99
100 return attrs
101
102
103def main():
104 import argparse
105 from pathlib import Path
106
107 parser = argparse.ArgumentParser()
108 parser.add_argument("path", help="File Path", type=Path)
109
110 args = parser.parse_args()
111
112 attrs = get_all_xattr(args.path)
113
114 for name, value in attrs.items():
115 try:
116 value = value.decode(fsencoding)
117 except UnicodeDecodeError:
118 pass
119
120 print(f"{name} = {value}")
121
122 return 0
123
124
125if __name__ == "__main__":
126 sys.exit(main())