summaryrefslogtreecommitdiffstats
path: root/meta/lib/oe
diff options
context:
space:
mode:
Diffstat (limited to 'meta/lib/oe')
-rw-r--r--meta/lib/oe/__init__.py0
-rw-r--r--meta/lib/oe/buildhistory_analysis.py453
-rw-r--r--meta/lib/oe/cachedpath.py233
-rw-r--r--meta/lib/oe/classextend.py104
-rw-r--r--meta/lib/oe/classutils.py43
-rw-r--r--meta/lib/oe/data.py17
-rw-r--r--meta/lib/oe/distro_check.py383
-rw-r--r--meta/lib/oe/license.py116
-rw-r--r--meta/lib/oe/lsb.py81
-rw-r--r--meta/lib/oe/maketype.py99
-rw-r--r--meta/lib/oe/package.py96
-rw-r--r--meta/lib/oe/packagedata.py94
-rw-r--r--meta/lib/oe/packagegroup.py29
-rw-r--r--meta/lib/oe/patch.py441
-rw-r--r--meta/lib/oe/path.py261
-rw-r--r--meta/lib/oe/prservice.py126
-rw-r--r--meta/lib/oe/qa.py111
-rw-r--r--meta/lib/oe/sstatesig.py161
-rw-r--r--meta/lib/oe/terminal.py218
-rw-r--r--meta/lib/oe/tests/__init__.py0
-rw-r--r--meta/lib/oe/tests/test_license.py68
-rw-r--r--meta/lib/oe/tests/test_path.py89
-rw-r--r--meta/lib/oe/tests/test_types.py62
-rw-r--r--meta/lib/oe/tests/test_utils.py51
-rw-r--r--meta/lib/oe/types.py153
-rw-r--r--meta/lib/oe/utils.py152
26 files changed, 3641 insertions, 0 deletions
diff --git a/meta/lib/oe/__init__.py b/meta/lib/oe/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/meta/lib/oe/__init__.py
diff --git a/meta/lib/oe/buildhistory_analysis.py b/meta/lib/oe/buildhistory_analysis.py
new file mode 100644
index 0000000000..86b5a12347
--- /dev/null
+++ b/meta/lib/oe/buildhistory_analysis.py
@@ -0,0 +1,453 @@
1# Report significant differences in the buildhistory repository since a specific revision
2#
3# Copyright (C) 2012 Intel Corporation
4# Author: Paul Eggleton <paul.eggleton@linux.intel.com>
5#
6# Note: requires GitPython 0.3.1+
7#
8# You can use this from the command line by running scripts/buildhistory-diff
9#
10
11import sys
12import os.path
13import difflib
14import git
15import re
16import bb.utils
17
18
19# How to display fields
20list_fields = ['DEPENDS', 'RPROVIDES', 'RDEPENDS', 'RRECOMMENDS', 'RSUGGESTS', 'RREPLACES', 'RCONFLICTS', 'FILES', 'FILELIST', 'USER_CLASSES', 'IMAGE_CLASSES', 'IMAGE_FEATURES', 'IMAGE_LINGUAS', 'IMAGE_INSTALL', 'BAD_RECOMMENDATIONS']
21list_order_fields = ['PACKAGES']
22defaultval_fields = ['PKG', 'PKGE', 'PKGV', 'PKGR']
23numeric_fields = ['PKGSIZE', 'IMAGESIZE']
24# Fields to monitor
25monitor_fields = ['RPROVIDES', 'RDEPENDS', 'RRECOMMENDS', 'RREPLACES', 'RCONFLICTS', 'PACKAGES', 'FILELIST', 'PKGSIZE', 'IMAGESIZE', 'PKG', 'PKGE', 'PKGV', 'PKGR']
26# Percentage change to alert for numeric fields
27monitor_numeric_threshold = 10
28# Image files to monitor (note that image-info.txt is handled separately)
29img_monitor_files = ['installed-package-names.txt', 'files-in-image.txt']
30# Related context fields for reporting (note: PE, PV & PR are always reported for monitored package fields)
31related_fields = {}
32related_fields['RDEPENDS'] = ['DEPENDS']
33related_fields['RRECOMMENDS'] = ['DEPENDS']
34related_fields['FILELIST'] = ['FILES']
35related_fields['PKGSIZE'] = ['FILELIST']
36related_fields['files-in-image.txt'] = ['installed-package-names.txt', 'USER_CLASSES', 'IMAGE_CLASSES', 'ROOTFS_POSTPROCESS_COMMAND', 'IMAGE_POSTPROCESS_COMMAND']
37related_fields['installed-package-names.txt'] = ['IMAGE_FEATURES', 'IMAGE_LINGUAS', 'IMAGE_INSTALL', 'BAD_RECOMMENDATIONS']
38
39
40class ChangeRecord:
41 def __init__(self, path, fieldname, oldvalue, newvalue, monitored):
42 self.path = path
43 self.fieldname = fieldname
44 self.oldvalue = oldvalue
45 self.newvalue = newvalue
46 self.monitored = monitored
47 self.related = []
48 self.filechanges = None
49
50 def __str__(self):
51 return self._str_internal(True)
52
53 def _str_internal(self, outer):
54 if outer:
55 if '/image-files/' in self.path:
56 prefix = '%s: ' % self.path.split('/image-files/')[0]
57 else:
58 prefix = '%s: ' % self.path
59 else:
60 prefix = ''
61
62 def pkglist_combine(depver):
63 pkglist = []
64 for k,v in depver.iteritems():
65 if v:
66 pkglist.append("%s (%s)" % (k,v))
67 else:
68 pkglist.append(k)
69 return pkglist
70
71 if self.fieldname in list_fields or self.fieldname in list_order_fields:
72 if self.fieldname in ['RPROVIDES', 'RDEPENDS', 'RRECOMMENDS', 'RSUGGESTS', 'RREPLACES', 'RCONFLICTS']:
73 (depvera, depverb) = compare_pkg_lists(self.oldvalue, self.newvalue)
74 aitems = pkglist_combine(depvera)
75 bitems = pkglist_combine(depverb)
76 else:
77 aitems = self.oldvalue.split()
78 bitems = self.newvalue.split()
79 removed = list(set(aitems) - set(bitems))
80 added = list(set(bitems) - set(aitems))
81
82 if removed or added:
83 if removed and not bitems:
84 out = '%s: removed all items "%s"' % (self.fieldname, ' '.join(removed))
85 else:
86 out = '%s:%s%s' % (self.fieldname, ' removed "%s"' % ' '.join(removed) if removed else '', ' added "%s"' % ' '.join(added) if added else '')
87 else:
88 out = '%s changed order' % self.fieldname
89 elif self.fieldname in numeric_fields:
90 aval = int(self.oldvalue or 0)
91 bval = int(self.newvalue or 0)
92 if aval != 0:
93 percentchg = ((bval - aval) / float(aval)) * 100
94 else:
95 percentchg = 100
96 out = '%s changed from %s to %s (%s%d%%)' % (self.fieldname, self.oldvalue or "''", self.newvalue or "''", '+' if percentchg > 0 else '', percentchg)
97 elif self.fieldname in defaultval_fields:
98 out = '%s changed from %s to %s' % (self.fieldname, self.oldvalue, self.newvalue)
99 if self.fieldname == 'PKG' and '[default]' in self.newvalue:
100 out += ' - may indicate debian renaming failure'
101 elif self.fieldname in ['pkg_preinst', 'pkg_postinst', 'pkg_prerm', 'pkg_postrm']:
102 if self.oldvalue and self.newvalue:
103 out = '%s changed:\n ' % self.fieldname
104 elif self.newvalue:
105 out = '%s added:\n ' % self.fieldname
106 elif self.oldvalue:
107 out = '%s cleared:\n ' % self.fieldname
108 alines = self.oldvalue.splitlines()
109 blines = self.newvalue.splitlines()
110 diff = difflib.unified_diff(alines, blines, self.fieldname, self.fieldname, lineterm='')
111 out += '\n '.join(list(diff)[2:])
112 out += '\n --'
113 elif self.fieldname in img_monitor_files or '/image-files/' in self.path:
114 fieldname = self.fieldname
115 if '/image-files/' in self.path:
116 fieldname = os.path.join('/' + self.path.split('/image-files/')[1], self.fieldname)
117 out = 'Changes to %s:\n ' % fieldname
118 else:
119 if outer:
120 prefix = 'Changes to %s ' % self.path
121 out = '(%s):\n ' % self.fieldname
122 if self.filechanges:
123 out += '\n '.join(['%s' % i for i in self.filechanges])
124 else:
125 alines = self.oldvalue.splitlines()
126 blines = self.newvalue.splitlines()
127 diff = difflib.unified_diff(alines, blines, fieldname, fieldname, lineterm='')
128 out += '\n '.join(list(diff))
129 out += '\n --'
130 else:
131 out = '%s changed from "%s" to "%s"' % (self.fieldname, self.oldvalue, self.newvalue)
132
133 if self.related:
134 for chg in self.related:
135 if not outer and chg.fieldname in ['PE', 'PV', 'PR']:
136 continue
137 for line in chg._str_internal(False).splitlines():
138 out += '\n * %s' % line
139
140 return '%s%s' % (prefix, out)
141
142class FileChange:
143 changetype_add = 'A'
144 changetype_remove = 'R'
145 changetype_type = 'T'
146 changetype_perms = 'P'
147 changetype_ownergroup = 'O'
148 changetype_link = 'L'
149
150 def __init__(self, path, changetype, oldvalue = None, newvalue = None):
151 self.path = path
152 self.changetype = changetype
153 self.oldvalue = oldvalue
154 self.newvalue = newvalue
155
156 def _ftype_str(self, ftype):
157 if ftype == '-':
158 return 'file'
159 elif ftype == 'd':
160 return 'directory'
161 elif ftype == 'l':
162 return 'symlink'
163 elif ftype == 'c':
164 return 'char device'
165 elif ftype == 'b':
166 return 'block device'
167 elif ftype == 'p':
168 return 'fifo'
169 elif ftype == 's':
170 return 'socket'
171 else:
172 return 'unknown (%s)' % ftype
173
174 def __str__(self):
175 if self.changetype == self.changetype_add:
176 return '%s was added' % self.path
177 elif self.changetype == self.changetype_remove:
178 return '%s was removed' % self.path
179 elif self.changetype == self.changetype_type:
180 return '%s changed type from %s to %s' % (self.path, self._ftype_str(self.oldvalue), self._ftype_str(self.newvalue))
181 elif self.changetype == self.changetype_perms:
182 return '%s changed permissions from %s to %s' % (self.path, self.oldvalue, self.newvalue)
183 elif self.changetype == self.changetype_ownergroup:
184 return '%s changed owner/group from %s to %s' % (self.path, self.oldvalue, self.newvalue)
185 elif self.changetype == self.changetype_link:
186 return '%s changed symlink target from %s to %s' % (self.path, self.oldvalue, self.newvalue)
187 else:
188 return '%s changed (unknown)' % self.path
189
190
191def blob_to_dict(blob):
192 alines = blob.data_stream.read().splitlines()
193 adict = {}
194 for line in alines:
195 splitv = [i.strip() for i in line.split('=',1)]
196 if len(splitv) > 1:
197 adict[splitv[0]] = splitv[1]
198 return adict
199
200
201def file_list_to_dict(lines):
202 adict = {}
203 for line in lines:
204 # Leave the last few fields intact so we handle file names containing spaces
205 splitv = line.split(None,4)
206 # Grab the path and remove the leading .
207 path = splitv[4][1:].strip()
208 # Handle symlinks
209 if(' -> ' in path):
210 target = path.split(' -> ')[1]
211 path = path.split(' -> ')[0]
212 adict[path] = splitv[0:3] + [target]
213 else:
214 adict[path] = splitv[0:3]
215 return adict
216
217
218def compare_file_lists(alines, blines):
219 adict = file_list_to_dict(alines)
220 bdict = file_list_to_dict(blines)
221 filechanges = []
222 for path, splitv in adict.iteritems():
223 newsplitv = bdict.pop(path, None)
224 if newsplitv:
225 # Check type
226 oldvalue = splitv[0][0]
227 newvalue = newsplitv[0][0]
228 if oldvalue != newvalue:
229 filechanges.append(FileChange(path, FileChange.changetype_type, oldvalue, newvalue))
230 # Check permissions
231 oldvalue = splitv[0][1:]
232 newvalue = newsplitv[0][1:]
233 if oldvalue != newvalue:
234 filechanges.append(FileChange(path, FileChange.changetype_perms, oldvalue, newvalue))
235 # Check owner/group
236 oldvalue = '%s/%s' % (splitv[1], splitv[2])
237 newvalue = '%s/%s' % (newsplitv[1], newsplitv[2])
238 if oldvalue != newvalue:
239 filechanges.append(FileChange(path, FileChange.changetype_ownergroup, oldvalue, newvalue))
240 # Check symlink target
241 if newsplitv[0][0] == 'l':
242 if len(splitv) > 3:
243 oldvalue = splitv[3]
244 else:
245 oldvalue = None
246 newvalue = newsplitv[3]
247 if oldvalue != newvalue:
248 filechanges.append(FileChange(path, FileChange.changetype_link, oldvalue, newvalue))
249 else:
250 filechanges.append(FileChange(path, FileChange.changetype_remove))
251
252 # Whatever is left over has been added
253 for path in bdict:
254 filechanges.append(FileChange(path, FileChange.changetype_add))
255
256 return filechanges
257
258
259def compare_lists(alines, blines):
260 removed = list(set(alines) - set(blines))
261 added = list(set(blines) - set(alines))
262
263 filechanges = []
264 for pkg in removed:
265 filechanges.append(FileChange(pkg, FileChange.changetype_remove))
266 for pkg in added:
267 filechanges.append(FileChange(pkg, FileChange.changetype_add))
268
269 return filechanges
270
271
272def compare_pkg_lists(astr, bstr):
273 depvera = bb.utils.explode_dep_versions2(astr)
274 depverb = bb.utils.explode_dep_versions2(bstr)
275
276 # Strip out changes where the version has increased
277 remove = []
278 for k in depvera:
279 if k in depverb:
280 dva = depvera[k]
281 dvb = depverb[k]
282 if dva and dvb and len(dva) == len(dvb):
283 # Since length is the same, sort so that prefixes (e.g. >=) will line up
284 dva.sort()
285 dvb.sort()
286 removeit = True
287 for dvai, dvbi in zip(dva, dvb):
288 if dvai != dvbi:
289 aiprefix = dvai.split(' ')[0]
290 biprefix = dvbi.split(' ')[0]
291 if aiprefix == biprefix and aiprefix in ['>=', '=']:
292 if bb.utils.vercmp(bb.utils.split_version(dvai), bb.utils.split_version(dvbi)) > 0:
293 removeit = False
294 break
295 else:
296 removeit = False
297 break
298 if removeit:
299 remove.append(k)
300
301 for k in remove:
302 depvera.pop(k)
303 depverb.pop(k)
304
305 return (depvera, depverb)
306
307
308def compare_dict_blobs(path, ablob, bblob, report_all):
309 adict = blob_to_dict(ablob)
310 bdict = blob_to_dict(bblob)
311
312 pkgname = os.path.basename(path)
313 defaultvals = {}
314 defaultvals['PKG'] = pkgname
315 defaultvals['PKGE'] = adict.get('PE', '0')
316 defaultvals['PKGV'] = adict.get('PV', '')
317 defaultvals['PKGR'] = adict.get('PR', '')
318 for key in defaultvals:
319 defaultvals[key] = '%s [default]' % defaultvals[key]
320
321 changes = []
322 keys = list(set(adict.keys()) | set(bdict.keys()))
323 for key in keys:
324 astr = adict.get(key, '')
325 bstr = bdict.get(key, '')
326 if astr != bstr:
327 if (not report_all) and key in numeric_fields:
328 aval = int(astr or 0)
329 bval = int(bstr or 0)
330 if aval != 0:
331 percentchg = ((bval - aval) / float(aval)) * 100
332 else:
333 percentchg = 100
334 if abs(percentchg) < monitor_numeric_threshold:
335 continue
336 elif (not report_all) and key in list_fields:
337 if key == "FILELIST" and path.endswith("-dbg") and bstr.strip() != '':
338 continue
339 if key in ['RPROVIDES', 'RDEPENDS', 'RRECOMMENDS', 'RSUGGESTS', 'RREPLACES', 'RCONFLICTS']:
340 (depvera, depverb) = compare_pkg_lists(astr, bstr)
341 if depvera == depverb:
342 continue
343 alist = astr.split()
344 alist.sort()
345 blist = bstr.split()
346 blist.sort()
347 # We don't care about the removal of self-dependencies
348 if pkgname in alist and not pkgname in blist:
349 alist.remove(pkgname)
350 if ' '.join(alist) == ' '.join(blist):
351 continue
352
353 if key in defaultval_fields:
354 if not astr:
355 astr = defaultvals[key]
356 elif not bstr:
357 bstr = defaultvals[key]
358
359 chg = ChangeRecord(path, key, astr, bstr, key in monitor_fields)
360 changes.append(chg)
361 return changes
362
363
364def process_changes(repopath, revision1, revision2 = 'HEAD', report_all = False):
365 repo = git.Repo(repopath)
366 assert repo.bare == False
367 commit = repo.commit(revision1)
368 diff = commit.diff(revision2)
369
370 changes = []
371 for d in diff.iter_change_type('M'):
372 path = os.path.dirname(d.a_blob.path)
373 if path.startswith('packages/'):
374 filename = os.path.basename(d.a_blob.path)
375 if filename == 'latest':
376 changes.extend(compare_dict_blobs(path, d.a_blob, d.b_blob, report_all))
377 elif filename.startswith('latest.'):
378 chg = ChangeRecord(path, filename, d.a_blob.data_stream.read(), d.b_blob.data_stream.read(), True)
379 changes.append(chg)
380 elif path.startswith('images/'):
381 filename = os.path.basename(d.a_blob.path)
382 if filename in img_monitor_files:
383 if filename == 'files-in-image.txt':
384 alines = d.a_blob.data_stream.read().splitlines()
385 blines = d.b_blob.data_stream.read().splitlines()
386 filechanges = compare_file_lists(alines,blines)
387 if filechanges:
388 chg = ChangeRecord(path, filename, None, None, True)
389 chg.filechanges = filechanges
390 changes.append(chg)
391 elif filename == 'installed-package-names.txt':
392 alines = d.a_blob.data_stream.read().splitlines()
393 blines = d.b_blob.data_stream.read().splitlines()
394 filechanges = compare_lists(alines,blines)
395 if filechanges:
396 chg = ChangeRecord(path, filename, None, None, True)
397 chg.filechanges = filechanges
398 changes.append(chg)
399 else:
400 chg = ChangeRecord(path, filename, d.a_blob.data_stream.read(), d.b_blob.data_stream.read(), True)
401 changes.append(chg)
402 elif filename == 'image-info.txt':
403 changes.extend(compare_dict_blobs(path, d.a_blob, d.b_blob, report_all))
404 elif '/image-files/' in path:
405 chg = ChangeRecord(path, filename, d.a_blob.data_stream.read(), d.b_blob.data_stream.read(), True)
406 changes.append(chg)
407
408 # Look for added preinst/postinst/prerm/postrm
409 # (without reporting newly added recipes)
410 addedpkgs = []
411 addedchanges = []
412 for d in diff.iter_change_type('A'):
413 path = os.path.dirname(d.b_blob.path)
414 if path.startswith('packages/'):
415 filename = os.path.basename(d.b_blob.path)
416 if filename == 'latest':
417 addedpkgs.append(path)
418 elif filename.startswith('latest.'):
419 chg = ChangeRecord(path, filename[7:], '', d.b_blob.data_stream.read(), True)
420 addedchanges.append(chg)
421 for chg in addedchanges:
422 found = False
423 for pkg in addedpkgs:
424 if chg.path.startswith(pkg):
425 found = True
426 break
427 if not found:
428 changes.append(chg)
429
430 # Look for cleared preinst/postinst/prerm/postrm
431 for d in diff.iter_change_type('D'):
432 path = os.path.dirname(d.a_blob.path)
433 if path.startswith('packages/'):
434 filename = os.path.basename(d.a_blob.path)
435 if filename != 'latest' and filename.startswith('latest.'):
436 chg = ChangeRecord(path, filename[7:], d.a_blob.data_stream.read(), '', True)
437 changes.append(chg)
438
439 # Link related changes
440 for chg in changes:
441 if chg.monitored:
442 for chg2 in changes:
443 # (Check dirname in the case of fields from recipe info files)
444 if chg.path == chg2.path or os.path.dirname(chg.path) == chg2.path:
445 if chg2.fieldname in related_fields.get(chg.fieldname, []):
446 chg.related.append(chg2)
447 elif chg.path == chg2.path and chg.path.startswith('packages/') and chg2.fieldname in ['PE', 'PV', 'PR']:
448 chg.related.append(chg2)
449
450 if report_all:
451 return changes
452 else:
453 return [chg for chg in changes if chg.monitored]
diff --git a/meta/lib/oe/cachedpath.py b/meta/lib/oe/cachedpath.py
new file mode 100644
index 0000000000..0840cc4c3f
--- /dev/null
+++ b/meta/lib/oe/cachedpath.py
@@ -0,0 +1,233 @@
1#
2# Based on standard python library functions but avoid
3# repeated stat calls. Its assumed the files will not change from under us
4# so we can cache stat calls.
5#
6
7import os
8import errno
9import stat as statmod
10
11class CachedPath(object):
12 def __init__(self):
13 self.statcache = {}
14 self.lstatcache = {}
15 self.normpathcache = {}
16 return
17
18 def updatecache(self, x):
19 x = self.normpath(x)
20 if x in self.statcache:
21 del self.statcache[x]
22 if x in self.lstatcache:
23 del self.lstatcache[x]
24
25 def normpath(self, path):
26 if path in self.normpathcache:
27 return self.normpathcache[path]
28 newpath = os.path.normpath(path)
29 self.normpathcache[path] = newpath
30 return newpath
31
32 def _callstat(self, path):
33 if path in self.statcache:
34 return self.statcache[path]
35 try:
36 st = os.stat(path)
37 self.statcache[path] = st
38 return st
39 except os.error:
40 self.statcache[path] = False
41 return False
42
43 # We might as well call lstat and then only
44 # call stat as well in the symbolic link case
45 # since this turns out to be much more optimal
46 # in real world usage of this cache
47 def callstat(self, path):
48 path = self.normpath(path)
49 self.calllstat(path)
50 return self.statcache[path]
51
52 def calllstat(self, path):
53 path = self.normpath(path)
54 if path in self.lstatcache:
55 return self.lstatcache[path]
56 #bb.error("LStatpath:" + path)
57 try:
58 lst = os.lstat(path)
59 self.lstatcache[path] = lst
60 if not statmod.S_ISLNK(lst.st_mode):
61 self.statcache[path] = lst
62 else:
63 self._callstat(path)
64 return lst
65 except (os.error, AttributeError):
66 self.lstatcache[path] = False
67 self.statcache[path] = False
68 return False
69
70 # This follows symbolic links, so both islink() and isdir() can be true
71 # for the same path ono systems that support symlinks
72 def isfile(self, path):
73 """Test whether a path is a regular file"""
74 st = self.callstat(path)
75 if not st:
76 return False
77 return statmod.S_ISREG(st.st_mode)
78
79 # Is a path a directory?
80 # This follows symbolic links, so both islink() and isdir()
81 # can be true for the same path on systems that support symlinks
82 def isdir(self, s):
83 """Return true if the pathname refers to an existing directory."""
84 st = self.callstat(s)
85 if not st:
86 return False
87 return statmod.S_ISDIR(st.st_mode)
88
89 def islink(self, path):
90 """Test whether a path is a symbolic link"""
91 st = self.calllstat(path)
92 if not st:
93 return False
94 return statmod.S_ISLNK(st.st_mode)
95
96 # Does a path exist?
97 # This is false for dangling symbolic links on systems that support them.
98 def exists(self, path):
99 """Test whether a path exists. Returns False for broken symbolic links"""
100 if self.callstat(path):
101 return True
102 return False
103
104 def lexists(self, path):
105 """Test whether a path exists. Returns True for broken symbolic links"""
106 if self.calllstat(path):
107 return True
108 return False
109
110 def stat(self, path):
111 return self.callstat(path)
112
113 def lstat(self, path):
114 return self.calllstat(path)
115
116 def walk(self, top, topdown=True, onerror=None, followlinks=False):
117 # Matches os.walk, not os.path.walk()
118
119 # We may not have read permission for top, in which case we can't
120 # get a list of the files the directory contains. os.path.walk
121 # always suppressed the exception then, rather than blow up for a
122 # minor reason when (say) a thousand readable directories are still
123 # left to visit. That logic is copied here.
124 try:
125 names = os.listdir(top)
126 except os.error as err:
127 if onerror is not None:
128 onerror(err)
129 return
130
131 dirs, nondirs = [], []
132 for name in names:
133 if self.isdir(os.path.join(top, name)):
134 dirs.append(name)
135 else:
136 nondirs.append(name)
137
138 if topdown:
139 yield top, dirs, nondirs
140 for name in dirs:
141 new_path = os.path.join(top, name)
142 if followlinks or not self.islink(new_path):
143 for x in self.walk(new_path, topdown, onerror, followlinks):
144 yield x
145 if not topdown:
146 yield top, dirs, nondirs
147
148 ## realpath() related functions
149 def __is_path_below(self, file, root):
150 return (file + os.path.sep).startswith(root)
151
152 def __realpath_rel(self, start, rel_path, root, loop_cnt, assume_dir):
153 """Calculates real path of symlink 'start' + 'rel_path' below
154 'root'; no part of 'start' below 'root' must contain symlinks. """
155 have_dir = True
156
157 for d in rel_path.split(os.path.sep):
158 if not have_dir and not assume_dir:
159 raise OSError(errno.ENOENT, "no such directory %s" % start)
160
161 if d == os.path.pardir: # '..'
162 if len(start) >= len(root):
163 # do not follow '..' before root
164 start = os.path.dirname(start)
165 else:
166 # emit warning?
167 pass
168 else:
169 (start, have_dir) = self.__realpath(os.path.join(start, d),
170 root, loop_cnt, assume_dir)
171
172 assert(self.__is_path_below(start, root))
173
174 return start
175
176 def __realpath(self, file, root, loop_cnt, assume_dir):
177 while self.islink(file) and len(file) >= len(root):
178 if loop_cnt == 0:
179 raise OSError(errno.ELOOP, file)
180
181 loop_cnt -= 1
182 target = os.path.normpath(os.readlink(file))
183
184 if not os.path.isabs(target):
185 tdir = os.path.dirname(file)
186 assert(self.__is_path_below(tdir, root))
187 else:
188 tdir = root
189
190 file = self.__realpath_rel(tdir, target, root, loop_cnt, assume_dir)
191
192 try:
193 is_dir = self.isdir(file)
194 except:
195 is_dir = False
196
197 return (file, is_dir)
198
199 def realpath(self, file, root, use_physdir = True, loop_cnt = 100, assume_dir = False):
200 """ Returns the canonical path of 'file' with assuming a
201 toplevel 'root' directory. When 'use_physdir' is set, all
202 preceding path components of 'file' will be resolved first;
203 this flag should be set unless it is guaranteed that there is
204 no symlink in the path. When 'assume_dir' is not set, missing
205 path components will raise an ENOENT error"""
206
207 root = os.path.normpath(root)
208 file = os.path.normpath(file)
209
210 if not root.endswith(os.path.sep):
211 # letting root end with '/' makes some things easier
212 root = root + os.path.sep
213
214 if not self.__is_path_below(file, root):
215 raise OSError(errno.EINVAL, "file '%s' is not below root" % file)
216
217 try:
218 if use_physdir:
219 file = self.__realpath_rel(root, file[(len(root) - 1):], root, loop_cnt, assume_dir)
220 else:
221 file = self.__realpath(file, root, loop_cnt, assume_dir)[0]
222 except OSError as e:
223 if e.errno == errno.ELOOP:
224 # make ELOOP more readable; without catching it, there will
225 # be printed a backtrace with 100s of OSError exceptions
226 # else
227 raise OSError(errno.ELOOP,
228 "too much recursions while resolving '%s'; loop in '%s'" %
229 (file, e.strerror))
230
231 raise
232
233 return file
diff --git a/meta/lib/oe/classextend.py b/meta/lib/oe/classextend.py
new file mode 100644
index 0000000000..e2ae7e9f94
--- /dev/null
+++ b/meta/lib/oe/classextend.py
@@ -0,0 +1,104 @@
1class ClassExtender(object):
2 def __init__(self, extname, d):
3 self.extname = extname
4 self.d = d
5 self.pkgs_mapping = []
6
7 def extend_name(self, name):
8 if name.startswith("kernel-") or name == "virtual/kernel":
9 return name
10 if name.startswith("rtld"):
11 return name
12 if name.endswith("-" + self.extname):
13 name = name.replace("-" + self.extname, "")
14 if name.startswith("virtual/"):
15 subs = name.split("/", 1)[1]
16 if not subs.startswith(self.extname):
17 return "virtual/" + self.extname + "-" + subs
18 return name
19 if not name.startswith(self.extname):
20 return self.extname + "-" + name
21 return name
22
23 def map_variable(self, varname, setvar = True):
24 var = self.d.getVar(varname, True)
25 if not var:
26 return ""
27 var = var.split()
28 newvar = []
29 for v in var:
30 newvar.append(self.extend_name(v))
31 newdata = " ".join(newvar)
32 if setvar:
33 self.d.setVar(varname, newdata)
34 return newdata
35
36 def map_regexp_variable(self, varname, setvar = True):
37 var = self.d.getVar(varname, True)
38 if not var:
39 return ""
40 var = var.split()
41 newvar = []
42 for v in var:
43 if v.startswith("^" + self.extname):
44 newvar.append(v)
45 elif v.startswith("^"):
46 newvar.append("^" + self.extname + "-" + v[1:])
47 else:
48 newvar.append(self.extend_name(v))
49 newdata = " ".join(newvar)
50 if setvar:
51 self.d.setVar(varname, newdata)
52 return newdata
53
54 def map_depends(self, dep):
55 if dep.endswith(("-native", "-native-runtime", "-crosssdk")) or ('nativesdk-' in dep) or ('cross-canadian' in dep):
56 return dep
57 else:
58 return self.extend_name(dep)
59
60 def map_depends_variable(self, varname, suffix = ""):
61 if suffix:
62 varname = varname + "_" + suffix
63 deps = self.d.getVar(varname, True)
64 if not deps:
65 return
66 deps = bb.utils.explode_dep_versions2(deps)
67 newdeps = {}
68 for dep in deps:
69 newdeps[self.map_depends(dep)] = deps[dep]
70
71 self.d.setVar(varname, bb.utils.join_deps(newdeps, False))
72
73 def map_packagevars(self):
74 for pkg in (self.d.getVar("PACKAGES", True).split() + [""]):
75 self.map_depends_variable("RDEPENDS", pkg)
76 self.map_depends_variable("RRECOMMENDS", pkg)
77 self.map_depends_variable("RSUGGESTS", pkg)
78 self.map_depends_variable("RPROVIDES", pkg)
79 self.map_depends_variable("RREPLACES", pkg)
80 self.map_depends_variable("RCONFLICTS", pkg)
81 self.map_depends_variable("PKG", pkg)
82
83 def rename_packages(self):
84 for pkg in (self.d.getVar("PACKAGES", True) or "").split():
85 if pkg.startswith(self.extname):
86 self.pkgs_mapping.append([pkg.split(self.extname + "-")[1], pkg])
87 continue
88 self.pkgs_mapping.append([pkg, self.extend_name(pkg)])
89
90 self.d.setVar("PACKAGES", " ".join([row[1] for row in self.pkgs_mapping]))
91
92 def rename_package_variables(self, variables):
93 for pkg_mapping in self.pkgs_mapping:
94 for subs in variables:
95 self.d.renameVar("%s_%s" % (subs, pkg_mapping[0]), "%s_%s" % (subs, pkg_mapping[1]))
96
97class NativesdkClassExtender(ClassExtender):
98 def map_depends(self, dep):
99 if dep.endswith(("-native", "-native-runtime", "-cross", "-crosssdk")) or ('nativesdk-' in dep):
100 return dep
101 elif dep.endswith(("-gcc-intermediate", "-gcc-initial", "-gcc", "-g++")):
102 return dep + "-crosssdk"
103 else:
104 return self.extend_name(dep)
diff --git a/meta/lib/oe/classutils.py b/meta/lib/oe/classutils.py
new file mode 100644
index 0000000000..58188fdd6e
--- /dev/null
+++ b/meta/lib/oe/classutils.py
@@ -0,0 +1,43 @@
1class ClassRegistry(type):
2 """Maintain a registry of classes, indexed by name.
3
4Note that this implementation requires that the names be unique, as it uses
5a dictionary to hold the classes by name.
6
7The name in the registry can be overridden via the 'name' attribute of the
8class, and the 'priority' attribute controls priority. The prioritized()
9method returns the registered classes in priority order.
10
11Subclasses of ClassRegistry may define an 'implemented' property to exert
12control over whether the class will be added to the registry (e.g. to keep
13abstract base classes out of the registry)."""
14 priority = 0
15 class __metaclass__(type):
16 """Give each ClassRegistry their own registry"""
17 def __init__(cls, name, bases, attrs):
18 cls.registry = {}
19 type.__init__(cls, name, bases, attrs)
20
21 def __init__(cls, name, bases, attrs):
22 super(ClassRegistry, cls).__init__(name, bases, attrs)
23 try:
24 if not cls.implemented:
25 return
26 except AttributeError:
27 pass
28
29 try:
30 cls.name
31 except AttributeError:
32 cls.name = name
33 cls.registry[cls.name] = cls
34
35 @classmethod
36 def prioritized(tcls):
37 return sorted(tcls.registry.values(),
38 key=lambda v: v.priority, reverse=True)
39
40 def unregister(cls):
41 for key in cls.registry.keys():
42 if cls.registry[key] is cls:
43 del cls.registry[key]
diff --git a/meta/lib/oe/data.py b/meta/lib/oe/data.py
new file mode 100644
index 0000000000..4cc0e02968
--- /dev/null
+++ b/meta/lib/oe/data.py
@@ -0,0 +1,17 @@
1import oe.maketype
2
3def typed_value(key, d):
4 """Construct a value for the specified metadata variable, using its flags
5 to determine the type and parameters for construction."""
6 var_type = d.getVarFlag(key, 'type')
7 flags = d.getVarFlags(key)
8 if flags is not None:
9 flags = dict((flag, d.expand(value))
10 for flag, value in flags.iteritems())
11 else:
12 flags = {}
13
14 try:
15 return oe.maketype.create(d.getVar(key, True) or '', var_type, **flags)
16 except (TypeError, ValueError), exc:
17 bb.msg.fatal("Data", "%s: %s" % (key, str(exc)))
diff --git a/meta/lib/oe/distro_check.py b/meta/lib/oe/distro_check.py
new file mode 100644
index 0000000000..8ed5b0ec80
--- /dev/null
+++ b/meta/lib/oe/distro_check.py
@@ -0,0 +1,383 @@
1def get_links_from_url(url):
2 "Return all the href links found on the web location"
3
4 import urllib, sgmllib
5
6 class LinksParser(sgmllib.SGMLParser):
7 def parse(self, s):
8 "Parse the given string 's'."
9 self.feed(s)
10 self.close()
11
12 def __init__(self, verbose=0):
13 "Initialise an object passing 'verbose' to the superclass."
14 sgmllib.SGMLParser.__init__(self, verbose)
15 self.hyperlinks = []
16
17 def start_a(self, attributes):
18 "Process a hyperlink and its 'attributes'."
19 for name, value in attributes:
20 if name == "href":
21 self.hyperlinks.append(value.strip('/'))
22
23 def get_hyperlinks(self):
24 "Return the list of hyperlinks."
25 return self.hyperlinks
26
27 sock = urllib.urlopen(url)
28 webpage = sock.read()
29 sock.close()
30
31 linksparser = LinksParser()
32 linksparser.parse(webpage)
33 return linksparser.get_hyperlinks()
34
35def find_latest_numeric_release(url):
36 "Find the latest listed numeric release on the given url"
37 max=0
38 maxstr=""
39 for link in get_links_from_url(url):
40 try:
41 release = float(link)
42 except:
43 release = 0
44 if release > max:
45 max = release
46 maxstr = link
47 return maxstr
48
49def is_src_rpm(name):
50 "Check if the link is pointing to a src.rpm file"
51 if name[-8:] == ".src.rpm":
52 return True
53 else:
54 return False
55
56def package_name_from_srpm(srpm):
57 "Strip out the package name from the src.rpm filename"
58 strings = srpm.split('-')
59 package_name = strings[0]
60 for i in range(1, len (strings) - 1):
61 str = strings[i]
62 if not str[0].isdigit():
63 package_name += '-' + str
64 return package_name
65
66def clean_package_list(package_list):
67 "Removes multiple entries of packages and sorts the list"
68 set = {}
69 map(set.__setitem__, package_list, [])
70 return set.keys()
71
72
73def get_latest_released_meego_source_package_list():
74 "Returns list of all the name os packages in the latest meego distro"
75
76 package_names = []
77 try:
78 f = open("/tmp/Meego-1.1", "r")
79 for line in f:
80 package_names.append(line[:-1] + ":" + "main") # Also strip the '\n' at the end
81 except IOError: pass
82 package_list=clean_package_list(package_names)
83 return "1.0", package_list
84
85def get_source_package_list_from_url(url, section):
86 "Return a sectioned list of package names from a URL list"
87
88 bb.note("Reading %s: %s" % (url, section))
89 links = get_links_from_url(url)
90 srpms = filter(is_src_rpm, links)
91 names_list = map(package_name_from_srpm, srpms)
92
93 new_pkgs = []
94 for pkgs in names_list:
95 new_pkgs.append(pkgs + ":" + section)
96
97 return new_pkgs
98
99def get_latest_released_fedora_source_package_list():
100 "Returns list of all the name os packages in the latest fedora distro"
101 latest = find_latest_numeric_release("http://archive.fedoraproject.org/pub/fedora/linux/releases/")
102
103 package_names = get_source_package_list_from_url("http://archive.fedoraproject.org/pub/fedora/linux/releases/%s/Fedora/source/SRPMS/" % latest, "main")
104
105# package_names += get_source_package_list_from_url("http://download.fedora.redhat.com/pub/fedora/linux/releases/%s/Everything/source/SPRMS/" % latest, "everything")
106 package_names += get_source_package_list_from_url("http://archive.fedoraproject.org/pub/fedora/linux/updates/%s/SRPMS/" % latest, "updates")
107
108 package_list=clean_package_list(package_names)
109
110 return latest, package_list
111
112def get_latest_released_opensuse_source_package_list():
113 "Returns list of all the name os packages in the latest opensuse distro"
114 latest = find_latest_numeric_release("http://download.opensuse.org/source/distribution/")
115
116 package_names = get_source_package_list_from_url("http://download.opensuse.org/source/distribution/%s/repo/oss/suse/src/" % latest, "main")
117 package_names += get_source_package_list_from_url("http://download.opensuse.org/update/%s/rpm/src/" % latest, "updates")
118
119 package_list=clean_package_list(package_names)
120 return latest, package_list
121
122def get_latest_released_mandriva_source_package_list():
123 "Returns list of all the name os packages in the latest mandriva distro"
124 latest = find_latest_numeric_release("http://distrib-coffee.ipsl.jussieu.fr/pub/linux/MandrivaLinux/official/")
125 package_names = get_source_package_list_from_url("http://distrib-coffee.ipsl.jussieu.fr/pub/linux/MandrivaLinux/official/%s/SRPMS/main/release/" % latest, "main")
126# package_names += get_source_package_list_from_url("http://distrib-coffee.ipsl.jussieu.fr/pub/linux/MandrivaLinux/official/%s/SRPMS/contrib/release/" % latest, "contrib")
127 package_names += get_source_package_list_from_url("http://distrib-coffee.ipsl.jussieu.fr/pub/linux/MandrivaLinux/official/%s/SRPMS/main/updates/" % latest, "updates")
128
129 package_list=clean_package_list(package_names)
130 return latest, package_list
131
132def find_latest_debian_release(url):
133 "Find the latest listed debian release on the given url"
134
135 releases = []
136 for link in get_links_from_url(url):
137 if link[:6] == "Debian":
138 if ';' not in link:
139 releases.append(link)
140 releases.sort()
141 try:
142 return releases.pop()[6:]
143 except:
144 return "_NotFound_"
145
146def get_debian_style_source_package_list(url, section):
147 "Return the list of package-names stored in the debian style Sources.gz file"
148 import urllib
149 sock = urllib.urlopen(url)
150 import tempfile
151 tmpfile = tempfile.NamedTemporaryFile(mode='wb', prefix='oecore.', suffix='.tmp', delete=False)
152 tmpfilename=tmpfile.name
153 tmpfile.write(sock.read())
154 sock.close()
155 tmpfile.close()
156 import gzip
157 bb.note("Reading %s: %s" % (url, section))
158
159 f = gzip.open(tmpfilename)
160 package_names = []
161 for line in f:
162 if line[:9] == "Package: ":
163 package_names.append(line[9:-1] + ":" + section) # Also strip the '\n' at the end
164 os.unlink(tmpfilename)
165
166 return package_names
167
168def get_latest_released_debian_source_package_list():
169 "Returns list of all the name os packages in the latest debian distro"
170 latest = find_latest_debian_release("http://ftp.debian.org/debian/dists/")
171 url = "http://ftp.debian.org/debian/dists/stable/main/source/Sources.gz"
172 package_names = get_debian_style_source_package_list(url, "main")
173# url = "http://ftp.debian.org/debian/dists/stable/contrib/source/Sources.gz"
174# package_names += get_debian_style_source_package_list(url, "contrib")
175 url = "http://ftp.debian.org/debian/dists/stable-proposed-updates/main/source/Sources.gz"
176 package_names += get_debian_style_source_package_list(url, "updates")
177 package_list=clean_package_list(package_names)
178 return latest, package_list
179
180def find_latest_ubuntu_release(url):
181 "Find the latest listed ubuntu release on the given url"
182 url += "?C=M;O=D" # Descending Sort by Last Modified
183 for link in get_links_from_url(url):
184 if link[-8:] == "-updates":
185 return link[:-8]
186 return "_NotFound_"
187
188def get_latest_released_ubuntu_source_package_list():
189 "Returns list of all the name os packages in the latest ubuntu distro"
190 latest = find_latest_ubuntu_release("http://archive.ubuntu.com/ubuntu/dists/")
191 url = "http://archive.ubuntu.com/ubuntu/dists/%s/main/source/Sources.gz" % latest
192 package_names = get_debian_style_source_package_list(url, "main")
193# url = "http://archive.ubuntu.com/ubuntu/dists/%s/multiverse/source/Sources.gz" % latest
194# package_names += get_debian_style_source_package_list(url, "multiverse")
195# url = "http://archive.ubuntu.com/ubuntu/dists/%s/universe/source/Sources.gz" % latest
196# package_names += get_debian_style_source_package_list(url, "universe")
197 url = "http://archive.ubuntu.com/ubuntu/dists/%s-updates/main/source/Sources.gz" % latest
198 package_names += get_debian_style_source_package_list(url, "updates")
199 package_list=clean_package_list(package_names)
200 return latest, package_list
201
202def create_distro_packages_list(distro_check_dir):
203 pkglst_dir = os.path.join(distro_check_dir, "package_lists")
204 if not os.path.isdir (pkglst_dir):
205 os.makedirs(pkglst_dir)
206 # first clear old stuff
207 for file in os.listdir(pkglst_dir):
208 os.unlink(os.path.join(pkglst_dir, file))
209
210 per_distro_functions = [
211 ["Debian", get_latest_released_debian_source_package_list],
212 ["Ubuntu", get_latest_released_ubuntu_source_package_list],
213 ["Fedora", get_latest_released_fedora_source_package_list],
214 ["OpenSuSE", get_latest_released_opensuse_source_package_list],
215 ["Mandriva", get_latest_released_mandriva_source_package_list],
216 ["Meego", get_latest_released_meego_source_package_list]
217 ]
218
219 from datetime import datetime
220 begin = datetime.now()
221 for distro in per_distro_functions:
222 name = distro[0]
223 release, package_list = distro[1]()
224 bb.note("Distro: %s, Latest Release: %s, # src packages: %d" % (name, release, len(package_list)))
225 package_list_file = os.path.join(pkglst_dir, name + "-" + release)
226 f = open(package_list_file, "w+b")
227 for pkg in package_list:
228 f.write(pkg + "\n")
229 f.close()
230 end = datetime.now()
231 delta = end - begin
232 bb.note("package_list generatiosn took this much time: %d seconds" % delta.seconds)
233
234def update_distro_data(distro_check_dir, datetime):
235 """
236 If distro packages list data is old then rebuild it.
237 The operations has to be protected by a lock so that
238 only one thread performes it at a time.
239 """
240 if not os.path.isdir (distro_check_dir):
241 try:
242 bb.note ("Making new directory: %s" % distro_check_dir)
243 os.makedirs (distro_check_dir)
244 except OSError:
245 raise Exception('Unable to create directory %s' % (distro_check_dir))
246
247
248 datetime_file = os.path.join(distro_check_dir, "build_datetime")
249 saved_datetime = "_invalid_"
250 import fcntl
251 try:
252 if not os.path.exists(datetime_file):
253 open(datetime_file, 'w+b').close() # touch the file so that the next open won't fail
254
255 f = open(datetime_file, "r+b")
256 fcntl.lockf(f, fcntl.LOCK_EX)
257 saved_datetime = f.read()
258 if saved_datetime[0:8] != datetime[0:8]:
259 bb.note("The build datetime did not match: saved:%s current:%s" % (saved_datetime, datetime))
260 bb.note("Regenerating distro package lists")
261 create_distro_packages_list(distro_check_dir)
262 f.seek(0)
263 f.write(datetime)
264
265 except OSError:
266 raise Exception('Unable to read/write this file: %s' % (datetime_file))
267 finally:
268 fcntl.lockf(f, fcntl.LOCK_UN)
269 f.close()
270
271def compare_in_distro_packages_list(distro_check_dir, d):
272 if not os.path.isdir(distro_check_dir):
273 raise Exception("compare_in_distro_packages_list: invalid distro_check_dir passed")
274
275 localdata = bb.data.createCopy(d)
276 pkglst_dir = os.path.join(distro_check_dir, "package_lists")
277 matching_distros = []
278 pn = d.getVar('PN', True)
279 recipe_name = d.getVar('PN', True)
280 bb.note("Checking: %s" % pn)
281
282 trim_dict = dict({"-native":"-native", "-cross":"-cross", "-initial":"-initial"})
283
284 if pn.find("-native") != -1:
285 pnstripped = pn.split("-native")
286 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True))
287 bb.data.update_data(localdata)
288 recipe_name = pnstripped[0]
289
290 if pn.startswith("nativesdk-"):
291 pnstripped = pn.split("nativesdk-")
292 localdata.setVar('OVERRIDES', "pn-" + pnstripped[1] + ":" + d.getVar('OVERRIDES', True))
293 bb.data.update_data(localdata)
294 recipe_name = pnstripped[1]
295
296 if pn.find("-cross") != -1:
297 pnstripped = pn.split("-cross")
298 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True))
299 bb.data.update_data(localdata)
300 recipe_name = pnstripped[0]
301
302 if pn.find("-initial") != -1:
303 pnstripped = pn.split("-initial")
304 localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True))
305 bb.data.update_data(localdata)
306 recipe_name = pnstripped[0]
307
308 bb.note("Recipe: %s" % recipe_name)
309 tmp = localdata.getVar('DISTRO_PN_ALIAS', True)
310
311 distro_exceptions = dict({"OE-Core":'OE-Core', "OpenedHand":'OpenedHand', "Intel":'Intel', "Upstream":'Upstream', "Windriver":'Windriver', "OSPDT":'OSPDT Approved', "Poky":'poky'})
312
313 if tmp:
314 list = tmp.split(' ')
315 for str in list:
316 if str and str.find("=") == -1 and distro_exceptions[str]:
317 matching_distros.append(str)
318
319 distro_pn_aliases = {}
320 if tmp:
321 list = tmp.split(' ')
322 for str in list:
323 if str.find("=") != -1:
324 (dist, pn_alias) = str.split('=')
325 distro_pn_aliases[dist.strip().lower()] = pn_alias.strip()
326
327 for file in os.listdir(pkglst_dir):
328 (distro, distro_release) = file.split("-")
329 f = open(os.path.join(pkglst_dir, file), "rb")
330 for line in f:
331 (pkg, section) = line.split(":")
332 if distro.lower() in distro_pn_aliases:
333 pn = distro_pn_aliases[distro.lower()]
334 else:
335 pn = recipe_name
336 if pn == pkg:
337 matching_distros.append(distro + "-" + section[:-1]) # strip the \n at the end
338 f.close()
339 break
340 f.close()
341
342
343 if tmp != None:
344 list = tmp.split(' ')
345 for item in list:
346 matching_distros.append(item)
347 bb.note("Matching: %s" % matching_distros)
348 return matching_distros
349
350def create_log_file(d, logname):
351 import subprocess
352 logpath = d.getVar('LOG_DIR', True)
353 bb.utils.mkdirhier(logpath)
354 logfn, logsuffix = os.path.splitext(logname)
355 logfile = os.path.join(logpath, "%s.%s%s" % (logfn, d.getVar('DATETIME', True), logsuffix))
356 if not os.path.exists(logfile):
357 slogfile = os.path.join(logpath, logname)
358 if os.path.exists(slogfile):
359 os.remove(slogfile)
360 subprocess.call("touch %s" % logfile, shell=True)
361 os.symlink(logfile, slogfile)
362 d.setVar('LOG_FILE', logfile)
363 return logfile
364
365
366def save_distro_check_result(result, datetime, result_file, d):
367 pn = d.getVar('PN', True)
368 logdir = d.getVar('LOG_DIR', True)
369 if not logdir:
370 bb.error("LOG_DIR variable is not defined, can't write the distro_check results")
371 return
372 if not os.path.isdir(logdir):
373 os.makedirs(logdir)
374 line = pn
375 for i in result:
376 line = line + "," + i
377 f = open(result_file, "a")
378 import fcntl
379 fcntl.lockf(f, fcntl.LOCK_EX)
380 f.seek(0, os.SEEK_END) # seek to the end of file
381 f.write(line + "\n")
382 fcntl.lockf(f, fcntl.LOCK_UN)
383 f.close()
diff --git a/meta/lib/oe/license.py b/meta/lib/oe/license.py
new file mode 100644
index 0000000000..340da61102
--- /dev/null
+++ b/meta/lib/oe/license.py
@@ -0,0 +1,116 @@
1# vi:sts=4:sw=4:et
2"""Code for parsing OpenEmbedded license strings"""
3
4import ast
5import re
6from fnmatch import fnmatchcase as fnmatch
7
8class LicenseError(Exception):
9 pass
10
11class LicenseSyntaxError(LicenseError):
12 def __init__(self, licensestr, exc):
13 self.licensestr = licensestr
14 self.exc = exc
15 LicenseError.__init__(self)
16
17 def __str__(self):
18 return "error in '%s': %s" % (self.licensestr, self.exc)
19
20class InvalidLicense(LicenseError):
21 def __init__(self, license):
22 self.license = license
23 LicenseError.__init__(self)
24
25 def __str__(self):
26 return "invalid characters in license '%s'" % self.license
27
28license_operator = re.compile('([&|() ])')
29license_pattern = re.compile('[a-zA-Z0-9.+_\-]+$')
30
31class LicenseVisitor(ast.NodeVisitor):
32 """Syntax tree visitor which can accept OpenEmbedded license strings"""
33 def visit_string(self, licensestr):
34 new_elements = []
35 elements = filter(lambda x: x.strip(), license_operator.split(licensestr))
36 for pos, element in enumerate(elements):
37 if license_pattern.match(element):
38 if pos > 0 and license_pattern.match(elements[pos-1]):
39 new_elements.append('&')
40 element = '"' + element + '"'
41 elif not license_operator.match(element):
42 raise InvalidLicense(element)
43 new_elements.append(element)
44
45 self.visit(ast.parse(' '.join(new_elements)))
46
47class FlattenVisitor(LicenseVisitor):
48 """Flatten a license tree (parsed from a string) by selecting one of each
49 set of OR options, in the way the user specifies"""
50 def __init__(self, choose_licenses):
51 self.choose_licenses = choose_licenses
52 self.licenses = []
53 LicenseVisitor.__init__(self)
54
55 def visit_Str(self, node):
56 self.licenses.append(node.s)
57
58 def visit_BinOp(self, node):
59 if isinstance(node.op, ast.BitOr):
60 left = FlattenVisitor(self.choose_licenses)
61 left.visit(node.left)
62
63 right = FlattenVisitor(self.choose_licenses)
64 right.visit(node.right)
65
66 selected = self.choose_licenses(left.licenses, right.licenses)
67 self.licenses.extend(selected)
68 else:
69 self.generic_visit(node)
70
71def flattened_licenses(licensestr, choose_licenses):
72 """Given a license string and choose_licenses function, return a flat list of licenses"""
73 flatten = FlattenVisitor(choose_licenses)
74 try:
75 flatten.visit_string(licensestr)
76 except SyntaxError as exc:
77 raise LicenseSyntaxError(licensestr, exc)
78 return flatten.licenses
79
80def is_included(licensestr, whitelist=None, blacklist=None):
81 """Given a license string and whitelist and blacklist, determine if the
82 license string matches the whitelist and does not match the blacklist.
83
84 Returns a tuple holding the boolean state and a list of the applicable
85 licenses which were excluded (or None, if the state is True)
86 """
87
88 def include_license(license):
89 return any(fnmatch(license, pattern) for pattern in whitelist)
90
91 def exclude_license(license):
92 return any(fnmatch(license, pattern) for pattern in blacklist)
93
94 def choose_licenses(alpha, beta):
95 """Select the option in an OR which is the 'best' (has the most
96 included licenses)."""
97 alpha_weight = len(filter(include_license, alpha))
98 beta_weight = len(filter(include_license, beta))
99 if alpha_weight > beta_weight:
100 return alpha
101 else:
102 return beta
103
104 if not whitelist:
105 whitelist = ['*']
106
107 if not blacklist:
108 blacklist = []
109
110 licenses = flattened_licenses(licensestr, choose_licenses)
111 excluded = filter(lambda lic: exclude_license(lic), licenses)
112 included = filter(lambda lic: include_license(lic), licenses)
113 if excluded:
114 return False, excluded
115 else:
116 return True, included
diff --git a/meta/lib/oe/lsb.py b/meta/lib/oe/lsb.py
new file mode 100644
index 0000000000..b53f361035
--- /dev/null
+++ b/meta/lib/oe/lsb.py
@@ -0,0 +1,81 @@
1def release_dict():
2 """Return the output of lsb_release -ir as a dictionary"""
3 from subprocess import PIPE
4
5 try:
6 output, err = bb.process.run(['lsb_release', '-ir'], stderr=PIPE)
7 except bb.process.CmdError as exc:
8 return None
9
10 data = {}
11 for line in output.splitlines():
12 try:
13 key, value = line.split(":\t", 1)
14 except ValueError:
15 continue
16 else:
17 data[key] = value
18 return data
19
20def release_dict_file():
21 """ Try to gather LSB release information manually when lsb_release tool is unavailable """
22 data = None
23 try:
24 if os.path.exists('/etc/lsb-release'):
25 data = {}
26 with open('/etc/lsb-release') as f:
27 for line in f:
28 key, value = line.split("=", 1)
29 data[key] = value.strip()
30 elif os.path.exists('/etc/redhat-release'):
31 data = {}
32 with open('/etc/redhat-release') as f:
33 distro = f.readline().strip()
34 import re
35 match = re.match(r'(.*) release (.*) \((.*)\)', distro)
36 if match:
37 data['DISTRIB_ID'] = match.group(1)
38 data['DISTRIB_RELEASE'] = match.group(2)
39 elif os.path.exists('/etc/SuSE-release'):
40 data = {}
41 data['DISTRIB_ID'] = 'SUSE LINUX'
42 with open('/etc/SuSE-release') as f:
43 for line in f:
44 if line.startswith('VERSION = '):
45 data['DISTRIB_RELEASE'] = line[10:].rstrip()
46 break
47 elif os.path.exists('/etc/os-release'):
48 data = {}
49 with open('/etc/os-release') as f:
50 for line in f:
51 if line.startswith('NAME='):
52 data['DISTRIB_ID'] = line[5:].rstrip().strip('"')
53 if line.startswith('VERSION_ID='):
54 data['DISTRIB_RELEASE'] = line[11:].rstrip().strip('"')
55 except IOError:
56 return None
57 return data
58
59def distro_identifier(adjust_hook=None):
60 """Return a distro identifier string based upon lsb_release -ri,
61 with optional adjustment via a hook"""
62
63 lsb_data = release_dict()
64 if lsb_data:
65 distro_id, release = lsb_data['Distributor ID'], lsb_data['Release']
66 else:
67 lsb_data_file = release_dict_file()
68 if lsb_data_file:
69 distro_id, release = lsb_data_file['DISTRIB_ID'], lsb_data_file.get('DISTRIB_RELEASE', None)
70 else:
71 distro_id, release = None, None
72
73 if adjust_hook:
74 distro_id, release = adjust_hook(distro_id, release)
75 if not distro_id:
76 return "Unknown"
77 if release:
78 id_str = '{0}-{1}'.format(distro_id, release)
79 else:
80 id_str = distro_id
81 return id_str.replace(' ','-').replace('/','-')
diff --git a/meta/lib/oe/maketype.py b/meta/lib/oe/maketype.py
new file mode 100644
index 0000000000..139f333691
--- /dev/null
+++ b/meta/lib/oe/maketype.py
@@ -0,0 +1,99 @@
1"""OpenEmbedded variable typing support
2
3Types are defined in the metadata by name, using the 'type' flag on a
4variable. Other flags may be utilized in the construction of the types. See
5the arguments of the type's factory for details.
6"""
7
8import inspect
9import types
10
11available_types = {}
12
13class MissingFlag(TypeError):
14 """A particular flag is required to construct the type, but has not been
15 provided."""
16 def __init__(self, flag, type):
17 self.flag = flag
18 self.type = type
19 TypeError.__init__(self)
20
21 def __str__(self):
22 return "Type '%s' requires flag '%s'" % (self.type, self.flag)
23
24def factory(var_type):
25 """Return the factory for a specified type."""
26 if var_type is None:
27 raise TypeError("No type specified. Valid types: %s" %
28 ', '.join(available_types))
29 try:
30 return available_types[var_type]
31 except KeyError:
32 raise TypeError("Invalid type '%s':\n Valid types: %s" %
33 (var_type, ', '.join(available_types)))
34
35def create(value, var_type, **flags):
36 """Create an object of the specified type, given the specified flags and
37 string value."""
38 obj = factory(var_type)
39 objflags = {}
40 for flag in obj.flags:
41 if flag not in flags:
42 if flag not in obj.optflags:
43 raise MissingFlag(flag, var_type)
44 else:
45 objflags[flag] = flags[flag]
46
47 return obj(value, **objflags)
48
49def get_callable_args(obj):
50 """Grab all but the first argument of the specified callable, returning
51 the list, as well as a list of which of the arguments have default
52 values."""
53 if type(obj) is type:
54 obj = obj.__init__
55
56 args, varargs, keywords, defaults = inspect.getargspec(obj)
57 flaglist = []
58 if args:
59 if len(args) > 1 and args[0] == 'self':
60 args = args[1:]
61 flaglist.extend(args)
62
63 optional = set()
64 if defaults:
65 optional |= set(flaglist[-len(defaults):])
66 return flaglist, optional
67
68def factory_setup(name, obj):
69 """Prepare a factory for use."""
70 args, optional = get_callable_args(obj)
71 extra_args = args[1:]
72 if extra_args:
73 obj.flags, optional = extra_args, optional
74 obj.optflags = set(optional)
75 else:
76 obj.flags = obj.optflags = ()
77
78 if not hasattr(obj, 'name'):
79 obj.name = name
80
81def register(name, factory):
82 """Register a type, given its name and a factory callable.
83
84 Determines the required and optional flags from the factory's
85 arguments."""
86 factory_setup(name, factory)
87 available_types[factory.name] = factory
88
89
90# Register all our included types
91for name in dir(types):
92 if name.startswith('_'):
93 continue
94
95 obj = getattr(types, name)
96 if not callable(obj):
97 continue
98
99 register(name, obj)
diff --git a/meta/lib/oe/package.py b/meta/lib/oe/package.py
new file mode 100644
index 0000000000..9a0ddb8536
--- /dev/null
+++ b/meta/lib/oe/package.py
@@ -0,0 +1,96 @@
1def runstrip(arg):
2 # Function to strip a single file, called from split_and_strip_files below
3 # A working 'file' (one which works on the target architecture)
4 #
5 # The elftype is a bit pattern (explained in split_and_strip_files) to tell
6 # us what type of file we're processing...
7 # 4 - executable
8 # 8 - shared library
9 # 16 - kernel module
10
11 import commands, stat, subprocess
12
13 (file, elftype, strip) = arg
14
15 newmode = None
16 if not os.access(file, os.W_OK) or os.access(file, os.R_OK):
17 origmode = os.stat(file)[stat.ST_MODE]
18 newmode = origmode | stat.S_IWRITE | stat.S_IREAD
19 os.chmod(file, newmode)
20
21 extraflags = ""
22
23 # kernel module
24 if elftype & 16:
25 extraflags = "--strip-debug --remove-section=.comment --remove-section=.note --preserve-dates"
26 # .so and shared library
27 elif ".so" in file and elftype & 8:
28 extraflags = "--remove-section=.comment --remove-section=.note --strip-unneeded"
29 # shared or executable:
30 elif elftype & 8 or elftype & 4:
31 extraflags = "--remove-section=.comment --remove-section=.note"
32
33 stripcmd = "'%s' %s '%s'" % (strip, extraflags, file)
34 bb.debug(1, "runstrip: %s" % stripcmd)
35
36 ret = subprocess.call(stripcmd, shell=True)
37
38 if newmode:
39 os.chmod(file, origmode)
40
41 if ret:
42 bb.error("runstrip: '%s' strip command failed" % stripcmd)
43
44 return
45
46
47def file_translate(file):
48 ft = file.replace("@", "@at@")
49 ft = ft.replace(" ", "@space@")
50 ft = ft.replace("\t", "@tab@")
51 ft = ft.replace("[", "@openbrace@")
52 ft = ft.replace("]", "@closebrace@")
53 ft = ft.replace("_", "@underscore@")
54 return ft
55
56def filedeprunner(arg):
57 import re
58
59 (pkg, pkgfiles, rpmdeps, pkgdest) = arg
60 provides = {}
61 requires = {}
62
63 r = re.compile(r'[<>=]+ +[^ ]*')
64
65 def process_deps(pipe, pkg, pkgdest, provides, requires):
66 for line in pipe:
67 f = line.split(" ", 1)[0].strip()
68 line = line.split(" ", 1)[1].strip()
69
70 if line.startswith("Requires:"):
71 i = requires
72 elif line.startswith("Provides:"):
73 i = provides
74 else:
75 continue
76
77 file = f.replace(pkgdest + "/" + pkg, "")
78 file = file_translate(file)
79 value = line.split(":", 1)[1].strip()
80 value = r.sub(r'(\g<0>)', value)
81
82 if value.startswith("rpmlib("):
83 continue
84 if value == "python":
85 continue
86 if file not in i:
87 i[file] = []
88 i[file].append(value)
89
90 return provides, requires
91
92 dep_pipe = os.popen(rpmdeps + " " + " ".join(pkgfiles))
93
94 provides, requires = process_deps(dep_pipe, pkg, pkgdest, provides, requires)
95
96 return (pkg, provides, requires)
diff --git a/meta/lib/oe/packagedata.py b/meta/lib/oe/packagedata.py
new file mode 100644
index 0000000000..cd5f0445f5
--- /dev/null
+++ b/meta/lib/oe/packagedata.py
@@ -0,0 +1,94 @@
1import codecs
2
3def packaged(pkg, d):
4 return os.access(get_subpkgedata_fn(pkg, d) + '.packaged', os.R_OK)
5
6def read_pkgdatafile(fn):
7 pkgdata = {}
8
9 def decode(str):
10 c = codecs.getdecoder("string_escape")
11 return c(str)[0]
12
13 if os.access(fn, os.R_OK):
14 import re
15 f = open(fn, 'r')
16 lines = f.readlines()
17 f.close()
18 r = re.compile("([^:]+):\s*(.*)")
19 for l in lines:
20 m = r.match(l)
21 if m:
22 pkgdata[m.group(1)] = decode(m.group(2))
23
24 return pkgdata
25
26def get_subpkgedata_fn(pkg, d):
27 return d.expand('${PKGDATA_DIR}/runtime/%s' % pkg)
28
29def has_subpkgdata(pkg, d):
30 return os.access(get_subpkgedata_fn(pkg, d), os.R_OK)
31
32def read_subpkgdata(pkg, d):
33 return read_pkgdatafile(get_subpkgedata_fn(pkg, d))
34
35def has_pkgdata(pn, d):
36 fn = d.expand('${PKGDATA_DIR}/%s' % pn)
37 return os.access(fn, os.R_OK)
38
39def read_pkgdata(pn, d):
40 fn = d.expand('${PKGDATA_DIR}/%s' % pn)
41 return read_pkgdatafile(fn)
42
43#
44# Collapse FOO_pkg variables into FOO
45#
46def read_subpkgdata_dict(pkg, d):
47 ret = {}
48 subd = read_pkgdatafile(get_subpkgedata_fn(pkg, d))
49 for var in subd:
50 newvar = var.replace("_" + pkg, "")
51 if newvar == var and var + "_" + pkg in subd:
52 continue
53 ret[newvar] = subd[var]
54 return ret
55
56def _pkgmap(d):
57 """Return a dictionary mapping package to recipe name."""
58
59 pkgdatadir = d.getVar("PKGDATA_DIR", True)
60
61 pkgmap = {}
62 try:
63 files = os.listdir(pkgdatadir)
64 except OSError:
65 bb.warn("No files in %s?" % pkgdatadir)
66 files = []
67
68 for pn in filter(lambda f: not os.path.isdir(os.path.join(pkgdatadir, f)), files):
69 try:
70 pkgdata = read_pkgdatafile(os.path.join(pkgdatadir, pn))
71 except OSError:
72 continue
73
74 packages = pkgdata.get("PACKAGES") or ""
75 for pkg in packages.split():
76 pkgmap[pkg] = pn
77
78 return pkgmap
79
80def pkgmap(d):
81 """Return a dictionary mapping package to recipe name.
82 Cache the mapping in the metadata"""
83
84 pkgmap_data = d.getVar("__pkgmap_data", False)
85 if pkgmap_data is None:
86 pkgmap_data = _pkgmap(d)
87 d.setVar("__pkgmap_data", pkgmap_data)
88
89 return pkgmap_data
90
91def recipename(pkg, d):
92 """Return the recipe name for the given binary package name."""
93
94 return pkgmap(d).get(pkg)
diff --git a/meta/lib/oe/packagegroup.py b/meta/lib/oe/packagegroup.py
new file mode 100644
index 0000000000..b04c45a1af
--- /dev/null
+++ b/meta/lib/oe/packagegroup.py
@@ -0,0 +1,29 @@
1import itertools
2
3def is_optional(group, d):
4 return bool(d.getVarFlag("PACKAGE_GROUP_%s" % group, "optional"))
5
6def packages(groups, d):
7 for group in groups:
8 for pkg in (d.getVar("PACKAGE_GROUP_%s" % group, True) or "").split():
9 yield pkg
10
11def required_packages(groups, d):
12 req = filter(lambda group: not is_optional(group, d), groups)
13 return packages(req, d)
14
15def optional_packages(groups, d):
16 opt = filter(lambda group: is_optional(group, d), groups)
17 return packages(opt, d)
18
19def active_packages(features, d):
20 return itertools.chain(required_packages(features, d),
21 optional_packages(features, d))
22
23def active_recipes(features, d):
24 import oe.packagedata
25
26 for pkg in active_packages(features, d):
27 recipe = oe.packagedata.recipename(pkg, d)
28 if recipe:
29 yield recipe
diff --git a/meta/lib/oe/patch.py b/meta/lib/oe/patch.py
new file mode 100644
index 0000000000..59abd0af19
--- /dev/null
+++ b/meta/lib/oe/patch.py
@@ -0,0 +1,441 @@
1import oe.path
2
3class NotFoundError(bb.BBHandledException):
4 def __init__(self, path):
5 self.path = path
6
7 def __str__(self):
8 return "Error: %s not found." % self.path
9
10class CmdError(bb.BBHandledException):
11 def __init__(self, exitstatus, output):
12 self.status = exitstatus
13 self.output = output
14
15 def __str__(self):
16 return "Command Error: exit status: %d Output:\n%s" % (self.status, self.output)
17
18
19def runcmd(args, dir = None):
20 import pipes
21
22 if dir:
23 olddir = os.path.abspath(os.curdir)
24 if not os.path.exists(dir):
25 raise NotFoundError(dir)
26 os.chdir(dir)
27 # print("cwd: %s -> %s" % (olddir, dir))
28
29 try:
30 args = [ pipes.quote(str(arg)) for arg in args ]
31 cmd = " ".join(args)
32 # print("cmd: %s" % cmd)
33 (exitstatus, output) = oe.utils.getstatusoutput(cmd)
34 if exitstatus != 0:
35 raise CmdError(exitstatus >> 8, output)
36 return output
37
38 finally:
39 if dir:
40 os.chdir(olddir)
41
42class PatchError(Exception):
43 def __init__(self, msg):
44 self.msg = msg
45
46 def __str__(self):
47 return "Patch Error: %s" % self.msg
48
49class PatchSet(object):
50 defaults = {
51 "strippath": 1
52 }
53
54 def __init__(self, dir, d):
55 self.dir = dir
56 self.d = d
57 self.patches = []
58 self._current = None
59
60 def current(self):
61 return self._current
62
63 def Clean(self):
64 """
65 Clean out the patch set. Generally includes unapplying all
66 patches and wiping out all associated metadata.
67 """
68 raise NotImplementedError()
69
70 def Import(self, patch, force):
71 if not patch.get("file"):
72 if not patch.get("remote"):
73 raise PatchError("Patch file must be specified in patch import.")
74 else:
75 patch["file"] = bb.fetch2.localpath(patch["remote"], self.d)
76
77 for param in PatchSet.defaults:
78 if not patch.get(param):
79 patch[param] = PatchSet.defaults[param]
80
81 if patch.get("remote"):
82 patch["file"] = bb.data.expand(bb.fetch2.localpath(patch["remote"], self.d), self.d)
83
84 patch["filemd5"] = bb.utils.md5_file(patch["file"])
85
86 def Push(self, force):
87 raise NotImplementedError()
88
89 def Pop(self, force):
90 raise NotImplementedError()
91
92 def Refresh(self, remote = None, all = None):
93 raise NotImplementedError()
94
95
96class PatchTree(PatchSet):
97 def __init__(self, dir, d):
98 PatchSet.__init__(self, dir, d)
99 self.patchdir = os.path.join(self.dir, 'patches')
100 self.seriespath = os.path.join(self.dir, 'patches', 'series')
101 bb.utils.mkdirhier(self.patchdir)
102
103 def _appendPatchFile(self, patch, strippath):
104 with open(self.seriespath, 'a') as f:
105 f.write(os.path.basename(patch) + "," + strippath + "\n")
106 shellcmd = ["cat", patch, ">" , self.patchdir + "/" + os.path.basename(patch)]
107 runcmd(["sh", "-c", " ".join(shellcmd)], self.dir)
108
109 def _removePatch(self, p):
110 patch = {}
111 patch['file'] = p.split(",")[0]
112 patch['strippath'] = p.split(",")[1]
113 self._applypatch(patch, False, True)
114
115 def _removePatchFile(self, all = False):
116 if not os.path.exists(self.seriespath):
117 return
118 patches = open(self.seriespath, 'r+').readlines()
119 if all:
120 for p in reversed(patches):
121 self._removePatch(os.path.join(self.patchdir, p.strip()))
122 patches = []
123 else:
124 self._removePatch(os.path.join(self.patchdir, patches[-1].strip()))
125 patches.pop()
126 with open(self.seriespath, 'w') as f:
127 for p in patches:
128 f.write(p)
129
130 def Import(self, patch, force = None):
131 """"""
132 PatchSet.Import(self, patch, force)
133
134 if self._current is not None:
135 i = self._current + 1
136 else:
137 i = 0
138 self.patches.insert(i, patch)
139
140 def _applypatch(self, patch, force = False, reverse = False, run = True):
141 shellcmd = ["cat", patch['file'], "|", "patch", "-p", patch['strippath']]
142 if reverse:
143 shellcmd.append('-R')
144
145 if not run:
146 return "sh" + "-c" + " ".join(shellcmd)
147
148 if not force:
149 shellcmd.append('--dry-run')
150
151 output = runcmd(["sh", "-c", " ".join(shellcmd)], self.dir)
152
153 if force:
154 return
155
156 shellcmd.pop(len(shellcmd) - 1)
157 output = runcmd(["sh", "-c", " ".join(shellcmd)], self.dir)
158
159 if not reverse:
160 self._appendPatchFile(patch['file'], patch['strippath'])
161
162 return output
163
164 def Push(self, force = False, all = False, run = True):
165 bb.note("self._current is %s" % self._current)
166 bb.note("patches is %s" % self.patches)
167 if all:
168 for i in self.patches:
169 bb.note("applying patch %s" % i)
170 self._applypatch(i, force)
171 self._current = i
172 else:
173 if self._current is not None:
174 next = self._current + 1
175 else:
176 next = 0
177
178 bb.note("applying patch %s" % self.patches[next])
179 ret = self._applypatch(self.patches[next], force)
180
181 self._current = next
182 return ret
183
184 def Pop(self, force = None, all = None):
185 if all:
186 self._removePatchFile(True)
187 self._current = None
188 else:
189 self._removePatchFile(False)
190
191 if self._current == 0:
192 self._current = None
193
194 if self._current is not None:
195 self._current = self._current - 1
196
197 def Clean(self):
198 """"""
199 self.Pop(all=True)
200
201class GitApplyTree(PatchTree):
202 def __init__(self, dir, d):
203 PatchTree.__init__(self, dir, d)
204
205 def _applypatch(self, patch, force = False, reverse = False, run = True):
206 shellcmd = ["git", "--git-dir=.", "apply", "-p%s" % patch['strippath']]
207
208 if reverse:
209 shellcmd.append('-R')
210
211 shellcmd.append(patch['file'])
212
213 if not run:
214 return "sh" + "-c" + " ".join(shellcmd)
215
216 return runcmd(["sh", "-c", " ".join(shellcmd)], self.dir)
217
218
219class QuiltTree(PatchSet):
220 def _runcmd(self, args, run = True):
221 quiltrc = self.d.getVar('QUILTRCFILE', True)
222 if not run:
223 return ["quilt"] + ["--quiltrc"] + [quiltrc] + args
224 runcmd(["quilt"] + ["--quiltrc"] + [quiltrc] + args, self.dir)
225
226 def _quiltpatchpath(self, file):
227 return os.path.join(self.dir, "patches", os.path.basename(file))
228
229
230 def __init__(self, dir, d):
231 PatchSet.__init__(self, dir, d)
232 self.initialized = False
233 p = os.path.join(self.dir, 'patches')
234 if not os.path.exists(p):
235 os.makedirs(p)
236
237 def Clean(self):
238 try:
239 self._runcmd(["pop", "-a", "-f"])
240 oe.path.remove(os.path.join(self.dir, "patches","series"))
241 except Exception:
242 pass
243 self.initialized = True
244
245 def InitFromDir(self):
246 # read series -> self.patches
247 seriespath = os.path.join(self.dir, 'patches', 'series')
248 if not os.path.exists(self.dir):
249 raise NotFoundError(self.dir)
250 if os.path.exists(seriespath):
251 series = file(seriespath, 'r')
252 for line in series.readlines():
253 patch = {}
254 parts = line.strip().split()
255 patch["quiltfile"] = self._quiltpatchpath(parts[0])
256 patch["quiltfilemd5"] = bb.utils.md5_file(patch["quiltfile"])
257 if len(parts) > 1:
258 patch["strippath"] = parts[1][2:]
259 self.patches.append(patch)
260 series.close()
261
262 # determine which patches are applied -> self._current
263 try:
264 output = runcmd(["quilt", "applied"], self.dir)
265 except CmdError:
266 import sys
267 if sys.exc_value.output.strip() == "No patches applied":
268 return
269 else:
270 raise
271 output = [val for val in output.split('\n') if not val.startswith('#')]
272 for patch in self.patches:
273 if os.path.basename(patch["quiltfile"]) == output[-1]:
274 self._current = self.patches.index(patch)
275 self.initialized = True
276
277 def Import(self, patch, force = None):
278 if not self.initialized:
279 self.InitFromDir()
280 PatchSet.Import(self, patch, force)
281 oe.path.symlink(patch["file"], self._quiltpatchpath(patch["file"]), force=True)
282 f = open(os.path.join(self.dir, "patches","series"), "a");
283 f.write(os.path.basename(patch["file"]) + " -p" + patch["strippath"]+"\n")
284 f.close()
285 patch["quiltfile"] = self._quiltpatchpath(patch["file"])
286 patch["quiltfilemd5"] = bb.utils.md5_file(patch["quiltfile"])
287
288 # TODO: determine if the file being imported:
289 # 1) is already imported, and is the same
290 # 2) is already imported, but differs
291
292 self.patches.insert(self._current or 0, patch)
293
294
295 def Push(self, force = False, all = False, run = True):
296 # quilt push [-f]
297
298 args = ["push"]
299 if force:
300 args.append("-f")
301 if all:
302 args.append("-a")
303 if not run:
304 return self._runcmd(args, run)
305
306 self._runcmd(args)
307
308 if self._current is not None:
309 self._current = self._current + 1
310 else:
311 self._current = 0
312
313 def Pop(self, force = None, all = None):
314 # quilt pop [-f]
315 args = ["pop"]
316 if force:
317 args.append("-f")
318 if all:
319 args.append("-a")
320
321 self._runcmd(args)
322
323 if self._current == 0:
324 self._current = None
325
326 if self._current is not None:
327 self._current = self._current - 1
328
329 def Refresh(self, **kwargs):
330 if kwargs.get("remote"):
331 patch = self.patches[kwargs["patch"]]
332 if not patch:
333 raise PatchError("No patch found at index %s in patchset." % kwargs["patch"])
334 (type, host, path, user, pswd, parm) = bb.fetch.decodeurl(patch["remote"])
335 if type == "file":
336 import shutil
337 if not patch.get("file") and patch.get("remote"):
338 patch["file"] = bb.fetch2.localpath(patch["remote"], self.d)
339
340 shutil.copyfile(patch["quiltfile"], patch["file"])
341 else:
342 raise PatchError("Unable to do a remote refresh of %s, unsupported remote url scheme %s." % (os.path.basename(patch["quiltfile"]), type))
343 else:
344 # quilt refresh
345 args = ["refresh"]
346 if kwargs.get("quiltfile"):
347 args.append(os.path.basename(kwargs["quiltfile"]))
348 elif kwargs.get("patch"):
349 args.append(os.path.basename(self.patches[kwargs["patch"]]["quiltfile"]))
350 self._runcmd(args)
351
352class Resolver(object):
353 def __init__(self, patchset, terminal):
354 raise NotImplementedError()
355
356 def Resolve(self):
357 raise NotImplementedError()
358
359 def Revert(self):
360 raise NotImplementedError()
361
362 def Finalize(self):
363 raise NotImplementedError()
364
365class NOOPResolver(Resolver):
366 def __init__(self, patchset, terminal):
367 self.patchset = patchset
368 self.terminal = terminal
369
370 def Resolve(self):
371 olddir = os.path.abspath(os.curdir)
372 os.chdir(self.patchset.dir)
373 try:
374 self.patchset.Push()
375 except Exception:
376 import sys
377 os.chdir(olddir)
378 raise
379
380# Patch resolver which relies on the user doing all the work involved in the
381# resolution, with the exception of refreshing the remote copy of the patch
382# files (the urls).
383class UserResolver(Resolver):
384 def __init__(self, patchset, terminal):
385 self.patchset = patchset
386 self.terminal = terminal
387
388 # Force a push in the patchset, then drop to a shell for the user to
389 # resolve any rejected hunks
390 def Resolve(self):
391 olddir = os.path.abspath(os.curdir)
392 os.chdir(self.patchset.dir)
393 try:
394 self.patchset.Push(False)
395 except CmdError as v:
396 # Patch application failed
397 patchcmd = self.patchset.Push(True, False, False)
398
399 t = self.patchset.d.getVar('T', True)
400 if not t:
401 bb.msg.fatal("Build", "T not set")
402 bb.utils.mkdirhier(t)
403 import random
404 rcfile = "%s/bashrc.%s.%s" % (t, str(os.getpid()), random.random())
405 f = open(rcfile, "w")
406 f.write("echo '*** Manual patch resolution mode ***'\n")
407 f.write("echo 'Dropping to a shell, so patch rejects can be fixed manually.'\n")
408 f.write("echo 'Run \"quilt refresh\" when patch is corrected, press CTRL+D to exit.'\n")
409 f.write("echo ''\n")
410 f.write(" ".join(patchcmd) + "\n")
411 f.close()
412 os.chmod(rcfile, 0775)
413
414 self.terminal("bash --rcfile " + rcfile, 'Patch Rejects: Please fix patch rejects manually', self.patchset.d)
415
416 # Construct a new PatchSet after the user's changes, compare the
417 # sets, checking patches for modifications, and doing a remote
418 # refresh on each.
419 oldpatchset = self.patchset
420 self.patchset = oldpatchset.__class__(self.patchset.dir, self.patchset.d)
421
422 for patch in self.patchset.patches:
423 oldpatch = None
424 for opatch in oldpatchset.patches:
425 if opatch["quiltfile"] == patch["quiltfile"]:
426 oldpatch = opatch
427
428 if oldpatch:
429 patch["remote"] = oldpatch["remote"]
430 if patch["quiltfile"] == oldpatch["quiltfile"]:
431 if patch["quiltfilemd5"] != oldpatch["quiltfilemd5"]:
432 bb.note("Patch %s has changed, updating remote url %s" % (os.path.basename(patch["quiltfile"]), patch["remote"]))
433 # user change? remote refresh
434 self.patchset.Refresh(remote=True, patch=self.patchset.patches.index(patch))
435 else:
436 # User did not fix the problem. Abort.
437 raise PatchError("Patch application failed, and user did not fix and refresh the patch.")
438 except Exception:
439 os.chdir(olddir)
440 raise
441 os.chdir(olddir)
diff --git a/meta/lib/oe/path.py b/meta/lib/oe/path.py
new file mode 100644
index 0000000000..1310e38fe1
--- /dev/null
+++ b/meta/lib/oe/path.py
@@ -0,0 +1,261 @@
1import errno
2import glob
3import shutil
4import subprocess
5import os.path
6
7def join(*paths):
8 """Like os.path.join but doesn't treat absolute RHS specially"""
9 return os.path.normpath("/".join(paths))
10
11def relative(src, dest):
12 """ Return a relative path from src to dest.
13
14 >>> relative("/usr/bin", "/tmp/foo/bar")
15 ../../tmp/foo/bar
16
17 >>> relative("/usr/bin", "/usr/lib")
18 ../lib
19
20 >>> relative("/tmp", "/tmp/foo/bar")
21 foo/bar
22 """
23
24 if hasattr(os.path, "relpath"):
25 return os.path.relpath(dest, src)
26 else:
27 destlist = os.path.normpath(dest).split(os.path.sep)
28 srclist = os.path.normpath(src).split(os.path.sep)
29
30 # Find common section of the path
31 common = os.path.commonprefix([destlist, srclist])
32 commonlen = len(common)
33
34 # Climb back to the point where they differentiate
35 relpath = [ os.path.pardir ] * (len(srclist) - commonlen)
36 if commonlen < len(destlist):
37 # Add remaining portion
38 relpath += destlist[commonlen:]
39
40 return os.path.sep.join(relpath)
41
42def make_relative_symlink(path):
43 """ Convert an absolute symlink to a relative one """
44 if not os.path.islink(path):
45 return
46 link = os.readlink(path)
47 if not os.path.isabs(link):
48 return
49
50 # find the common ancestor directory
51 ancestor = path
52 depth = 0
53 while ancestor and not link.startswith(ancestor):
54 ancestor = ancestor.rpartition('/')[0]
55 depth += 1
56
57 if not ancestor:
58 print("make_relative_symlink() Error: unable to find the common ancestor of %s and its target" % path)
59 return
60
61 base = link.partition(ancestor)[2].strip('/')
62 while depth > 1:
63 base = "../" + base
64 depth -= 1
65
66 os.remove(path)
67 os.symlink(base, path)
68
69def format_display(path, metadata):
70 """ Prepare a path for display to the user. """
71 rel = relative(metadata.getVar("TOPDIR", True), path)
72 if len(rel) > len(path):
73 return path
74 else:
75 return rel
76
77def copytree(src, dst):
78 # We could use something like shutil.copytree here but it turns out to
79 # to be slow. It takes twice as long copying to an empty directory.
80 # If dst already has contents performance can be 15 time slower
81 # This way we also preserve hardlinks between files in the tree.
82
83 bb.utils.mkdirhier(dst)
84 cmd = 'tar -cf - -C %s -p . | tar -xf - -C %s' % (src, dst)
85 check_output(cmd, shell=True, stderr=subprocess.STDOUT)
86
87def copyhardlinktree(src, dst):
88 """ Make the hard link when possible, otherwise copy. """
89 bb.utils.mkdirhier(dst)
90 if os.path.isdir(src) and not len(os.listdir(src)):
91 return
92
93 if (os.stat(src).st_dev == os.stat(dst).st_dev):
94 # Need to copy directories only with tar first since cp will error if two
95 # writers try and create a directory at the same time
96 cmd = 'cd %s; find . -type d -print | tar -cf - -C %s -p --files-from - | tar -xf - -C %s' % (src, src, dst)
97 check_output(cmd, shell=True, stderr=subprocess.STDOUT)
98 if os.path.isdir(src):
99 src = src + "/*"
100 cmd = 'cp -afl %s %s' % (src, dst)
101 check_output(cmd, shell=True, stderr=subprocess.STDOUT)
102 else:
103 copytree(src, dst)
104
105def remove(path, recurse=True):
106 """Equivalent to rm -f or rm -rf"""
107 for name in glob.glob(path):
108 try:
109 os.unlink(name)
110 except OSError as exc:
111 if recurse and exc.errno == errno.EISDIR:
112 shutil.rmtree(name)
113 elif exc.errno != errno.ENOENT:
114 raise
115
116def symlink(source, destination, force=False):
117 """Create a symbolic link"""
118 try:
119 if force:
120 remove(destination)
121 os.symlink(source, destination)
122 except OSError as e:
123 if e.errno != errno.EEXIST or os.readlink(destination) != source:
124 raise
125
126class CalledProcessError(Exception):
127 def __init__(self, retcode, cmd, output = None):
128 self.retcode = retcode
129 self.cmd = cmd
130 self.output = output
131 def __str__(self):
132 return "Command '%s' returned non-zero exit status %d with output %s" % (self.cmd, self.retcode, self.output)
133
134# Not needed when we move to python 2.7
135def check_output(*popenargs, **kwargs):
136 r"""Run command with arguments and return its output as a byte string.
137
138 If the exit code was non-zero it raises a CalledProcessError. The
139 CalledProcessError object will have the return code in the returncode
140 attribute and output in the output attribute.
141
142 The arguments are the same as for the Popen constructor. Example:
143
144 >>> check_output(["ls", "-l", "/dev/null"])
145 'crw-rw-rw- 1 root root 1, 3 Oct 18 2007 /dev/null\n'
146
147 The stdout argument is not allowed as it is used internally.
148 To capture standard error in the result, use stderr=STDOUT.
149
150 >>> check_output(["/bin/sh", "-c",
151 ... "ls -l non_existent_file ; exit 0"],
152 ... stderr=STDOUT)
153 'ls: non_existent_file: No such file or directory\n'
154 """
155 if 'stdout' in kwargs:
156 raise ValueError('stdout argument not allowed, it will be overridden.')
157 process = subprocess.Popen(stdout=subprocess.PIPE, *popenargs, **kwargs)
158 output, unused_err = process.communicate()
159 retcode = process.poll()
160 if retcode:
161 cmd = kwargs.get("args")
162 if cmd is None:
163 cmd = popenargs[0]
164 raise CalledProcessError(retcode, cmd, output=output)
165 return output
166
167def find(dir, **walkoptions):
168 """ Given a directory, recurses into that directory,
169 returning all files as absolute paths. """
170
171 for root, dirs, files in os.walk(dir, **walkoptions):
172 for file in files:
173 yield os.path.join(root, file)
174
175
176## realpath() related functions
177def __is_path_below(file, root):
178 return (file + os.path.sep).startswith(root)
179
180def __realpath_rel(start, rel_path, root, loop_cnt, assume_dir):
181 """Calculates real path of symlink 'start' + 'rel_path' below
182 'root'; no part of 'start' below 'root' must contain symlinks. """
183 have_dir = True
184
185 for d in rel_path.split(os.path.sep):
186 if not have_dir and not assume_dir:
187 raise OSError(errno.ENOENT, "no such directory %s" % start)
188
189 if d == os.path.pardir: # '..'
190 if len(start) >= len(root):
191 # do not follow '..' before root
192 start = os.path.dirname(start)
193 else:
194 # emit warning?
195 pass
196 else:
197 (start, have_dir) = __realpath(os.path.join(start, d),
198 root, loop_cnt, assume_dir)
199
200 assert(__is_path_below(start, root))
201
202 return start
203
204def __realpath(file, root, loop_cnt, assume_dir):
205 while os.path.islink(file) and len(file) >= len(root):
206 if loop_cnt == 0:
207 raise OSError(errno.ELOOP, file)
208
209 loop_cnt -= 1
210 target = os.path.normpath(os.readlink(file))
211
212 if not os.path.isabs(target):
213 tdir = os.path.dirname(file)
214 assert(__is_path_below(tdir, root))
215 else:
216 tdir = root
217
218 file = __realpath_rel(tdir, target, root, loop_cnt, assume_dir)
219
220 try:
221 is_dir = os.path.isdir(file)
222 except:
223 is_dir = false
224
225 return (file, is_dir)
226
227def realpath(file, root, use_physdir = True, loop_cnt = 100, assume_dir = False):
228 """ Returns the canonical path of 'file' with assuming a
229 toplevel 'root' directory. When 'use_physdir' is set, all
230 preceding path components of 'file' will be resolved first;
231 this flag should be set unless it is guaranteed that there is
232 no symlink in the path. When 'assume_dir' is not set, missing
233 path components will raise an ENOENT error"""
234
235 root = os.path.normpath(root)
236 file = os.path.normpath(file)
237
238 if not root.endswith(os.path.sep):
239 # letting root end with '/' makes some things easier
240 root = root + os.path.sep
241
242 if not __is_path_below(file, root):
243 raise OSError(errno.EINVAL, "file '%s' is not below root" % file)
244
245 try:
246 if use_physdir:
247 file = __realpath_rel(root, file[(len(root) - 1):], root, loop_cnt, assume_dir)
248 else:
249 file = __realpath(file, root, loop_cnt, assume_dir)[0]
250 except OSError as e:
251 if e.errno == errno.ELOOP:
252 # make ELOOP more readable; without catching it, there will
253 # be printed a backtrace with 100s of OSError exceptions
254 # else
255 raise OSError(errno.ELOOP,
256 "too much recursions while resolving '%s'; loop in '%s'" %
257 (file, e.strerror))
258
259 raise
260
261 return file
diff --git a/meta/lib/oe/prservice.py b/meta/lib/oe/prservice.py
new file mode 100644
index 0000000000..b0cbcb1fbc
--- /dev/null
+++ b/meta/lib/oe/prservice.py
@@ -0,0 +1,126 @@
1
2def prserv_make_conn(d, check = False):
3 import prserv.serv
4 host_params = filter(None, (d.getVar("PRSERV_HOST", True) or '').split(':'))
5 try:
6 conn = None
7 conn = prserv.serv.PRServerConnection(host_params[0], int(host_params[1]))
8 if check:
9 if not conn.ping():
10 raise Exception('service not available')
11 d.setVar("__PRSERV_CONN",conn)
12 except Exception, exc:
13 bb.fatal("Connecting to PR service %s:%s failed: %s" % (host_params[0], host_params[1], str(exc)))
14
15 return conn
16
17def prserv_dump_db(d):
18 if not d.getVar('PRSERV_HOST', True):
19 bb.error("Not using network based PR service")
20 return None
21
22 conn = d.getVar("__PRSERV_CONN", True)
23 if conn is None:
24 conn = prserv_make_conn(d)
25 if conn is None:
26 bb.error("Making connection failed to remote PR service")
27 return None
28
29 #dump db
30 opt_version = d.getVar('PRSERV_DUMPOPT_VERSION', True)
31 opt_pkgarch = d.getVar('PRSERV_DUMPOPT_PKGARCH', True)
32 opt_checksum = d.getVar('PRSERV_DUMPOPT_CHECKSUM', True)
33 opt_col = ("1" == d.getVar('PRSERV_DUMPOPT_COL', True))
34 return conn.export(opt_version, opt_pkgarch, opt_checksum, opt_col)
35
36def prserv_import_db(d, filter_version=None, filter_pkgarch=None, filter_checksum=None):
37 if not d.getVar('PRSERV_HOST', True):
38 bb.error("Not using network based PR service")
39 return None
40
41 conn = d.getVar("__PRSERV_CONN", True)
42 if conn is None:
43 conn = prserv_make_conn(d)
44 if conn is None:
45 bb.error("Making connection failed to remote PR service")
46 return None
47 #get the entry values
48 imported = []
49 prefix = "PRAUTO$"
50 for v in d.keys():
51 if v.startswith(prefix):
52 (remain, sep, checksum) = v.rpartition('$')
53 (remain, sep, pkgarch) = remain.rpartition('$')
54 (remain, sep, version) = remain.rpartition('$')
55 if (remain + '$' != prefix) or \
56 (filter_version and filter_version != version) or \
57 (filter_pkgarch and filter_pkgarch != pkgarch) or \
58 (filter_checksum and filter_checksum != checksum):
59 continue
60 try:
61 value = int(d.getVar(remain + '$' + version + '$' + pkgarch + '$' + checksum, True))
62 except BaseException as exc:
63 bb.debug("Not valid value of %s:%s" % (v,str(exc)))
64 continue
65 ret = conn.importone(version,pkgarch,checksum,value)
66 if ret != value:
67 bb.error("importing(%s,%s,%s,%d) failed. DB may have larger value %d" % (version,pkgarch,checksum,value,ret))
68 else:
69 imported.append((version,pkgarch,checksum,value))
70 return imported
71
72def prserv_export_tofile(d, metainfo, datainfo, lockdown, nomax=False):
73 import bb.utils
74 #initilize the output file
75 bb.utils.mkdirhier(d.getVar('PRSERV_DUMPDIR', True))
76 df = d.getVar('PRSERV_DUMPFILE', True)
77 #write data
78 lf = bb.utils.lockfile("%s.lock" % df)
79 f = open(df, "a")
80 if metainfo:
81 #dump column info
82 f.write("#PR_core_ver = \"%s\"\n\n" % metainfo['core_ver']);
83 f.write("#Table: %s\n" % metainfo['tbl_name'])
84 f.write("#Columns:\n")
85 f.write("#name \t type \t notn \t dflt \t pk\n")
86 f.write("#----------\t --------\t --------\t --------\t ----\n")
87 for i in range(len(metainfo['col_info'])):
88 f.write("#%10s\t %8s\t %8s\t %8s\t %4s\n" %
89 (metainfo['col_info'][i]['name'],
90 metainfo['col_info'][i]['type'],
91 metainfo['col_info'][i]['notnull'],
92 metainfo['col_info'][i]['dflt_value'],
93 metainfo['col_info'][i]['pk']))
94 f.write("\n")
95
96 if lockdown:
97 f.write("PRSERV_LOCKDOWN = \"1\"\n\n")
98
99 if datainfo:
100 idx = {}
101 for i in range(len(datainfo)):
102 pkgarch = datainfo[i]['pkgarch']
103 value = datainfo[i]['value']
104 if pkgarch not in idx:
105 idx[pkgarch] = i
106 elif value > datainfo[idx[pkgarch]]['value']:
107 idx[pkgarch] = i
108 f.write("PRAUTO$%s$%s$%s = \"%s\"\n" %
109 (str(datainfo[i]['version']), pkgarch, str(datainfo[i]['checksum']), str(value)))
110 if not nomax:
111 for i in idx:
112 f.write("PRAUTO_%s_%s = \"%s\"\n" % (str(datainfo[idx[i]]['version']),str(datainfo[idx[i]]['pkgarch']),str(datainfo[idx[i]]['value'])))
113 f.close()
114 bb.utils.unlockfile(lf)
115
116def prserv_check_avail(d):
117 host_params = filter(None, (d.getVar("PRSERV_HOST", True) or '').split(':'))
118 try:
119 if len(host_params) != 2:
120 raise TypeError
121 else:
122 int(host_params[1])
123 except TypeError:
124 bb.fatal('Undefined/incorrect PRSERV_HOST value. Format: "host:port"')
125 else:
126 prserv_make_conn(d, True)
diff --git a/meta/lib/oe/qa.py b/meta/lib/oe/qa.py
new file mode 100644
index 0000000000..d5cdaa0fcd
--- /dev/null
+++ b/meta/lib/oe/qa.py
@@ -0,0 +1,111 @@
1class ELFFile:
2 EI_NIDENT = 16
3
4 EI_CLASS = 4
5 EI_DATA = 5
6 EI_VERSION = 6
7 EI_OSABI = 7
8 EI_ABIVERSION = 8
9
10 # possible values for EI_CLASS
11 ELFCLASSNONE = 0
12 ELFCLASS32 = 1
13 ELFCLASS64 = 2
14
15 # possible value for EI_VERSION
16 EV_CURRENT = 1
17
18 # possible values for EI_DATA
19 ELFDATANONE = 0
20 ELFDATA2LSB = 1
21 ELFDATA2MSB = 2
22
23 def my_assert(self, expectation, result):
24 if not expectation == result:
25 #print "'%x','%x' %s" % (ord(expectation), ord(result), self.name)
26 raise Exception("This does not work as expected")
27
28 def __init__(self, name, bits = 0):
29 self.name = name
30 self.bits = bits
31 self.objdump_output = {}
32
33 def open(self):
34 self.file = file(self.name, "r")
35 self.data = self.file.read(ELFFile.EI_NIDENT+4)
36
37 self.my_assert(len(self.data), ELFFile.EI_NIDENT+4)
38 self.my_assert(self.data[0], chr(0x7f) )
39 self.my_assert(self.data[1], 'E')
40 self.my_assert(self.data[2], 'L')
41 self.my_assert(self.data[3], 'F')
42 if self.bits == 0:
43 if self.data[ELFFile.EI_CLASS] == chr(ELFFile.ELFCLASS32):
44 self.bits = 32
45 elif self.data[ELFFile.EI_CLASS] == chr(ELFFile.ELFCLASS64):
46 self.bits = 64
47 else:
48 # Not 32-bit or 64.. lets assert
49 raise Exception("ELF but not 32 or 64 bit.")
50 elif self.bits == 32:
51 self.my_assert(self.data[ELFFile.EI_CLASS], chr(ELFFile.ELFCLASS32))
52 elif self.bits == 64:
53 self.my_assert(self.data[ELFFile.EI_CLASS], chr(ELFFile.ELFCLASS64))
54 else:
55 raise Exception("Must specify unknown, 32 or 64 bit size.")
56 self.my_assert(self.data[ELFFile.EI_VERSION], chr(ELFFile.EV_CURRENT) )
57
58 self.sex = self.data[ELFFile.EI_DATA]
59 if self.sex == chr(ELFFile.ELFDATANONE):
60 raise Exception("self.sex == ELFDATANONE")
61 elif self.sex == chr(ELFFile.ELFDATA2LSB):
62 self.sex = "<"
63 elif self.sex == chr(ELFFile.ELFDATA2MSB):
64 self.sex = ">"
65 else:
66 raise Exception("Unknown self.sex")
67
68 def osAbi(self):
69 return ord(self.data[ELFFile.EI_OSABI])
70
71 def abiVersion(self):
72 return ord(self.data[ELFFile.EI_ABIVERSION])
73
74 def abiSize(self):
75 return self.bits
76
77 def isLittleEndian(self):
78 return self.sex == "<"
79
80 def isBigEngian(self):
81 return self.sex == ">"
82
83 def machine(self):
84 """
85 We know the sex stored in self.sex and we
86 know the position
87 """
88 import struct
89 (a,) = struct.unpack(self.sex+"H", self.data[18:20])
90 return a
91
92 def run_objdump(self, cmd, d):
93 import bb.process
94 import sys
95
96 if cmd in self.objdump_output:
97 return self.objdump_output[cmd]
98
99 objdump = d.getVar('OBJDUMP', True)
100
101 env = os.environ.copy()
102 env["LC_ALL"] = "C"
103 env["PATH"] = d.getVar('PATH', True)
104
105 try:
106 bb.note("%s %s %s" % (objdump, cmd, self.name))
107 self.objdump_output[cmd] = bb.process.run([objdump, cmd, self.name], env=env, shell=False)[0]
108 return self.objdump_output[cmd]
109 except Exception as e:
110 bb.note("%s %s %s failed: %s" % (objdump, cmd, self.name, e))
111 return ""
diff --git a/meta/lib/oe/sstatesig.py b/meta/lib/oe/sstatesig.py
new file mode 100644
index 0000000000..852fb7e64a
--- /dev/null
+++ b/meta/lib/oe/sstatesig.py
@@ -0,0 +1,161 @@
1import bb.siggen
2
3def sstate_rundepfilter(siggen, fn, recipename, task, dep, depname, dataCache):
4 # Return True if we should keep the dependency, False to drop it
5 def isNative(x):
6 return x.endswith("-native")
7 def isCross(x):
8 return x.endswith("-cross") or x.endswith("-cross-initial") or x.endswith("-cross-intermediate")
9 def isNativeSDK(x):
10 return x.startswith("nativesdk-")
11 def isKernel(fn):
12 inherits = " ".join(dataCache.inherits[fn])
13 return inherits.find("module-base.bbclass") != -1 or inherits.find("linux-kernel-base.bbclass") != -1
14
15 # Always include our own inter-task dependencies
16 if recipename == depname:
17 return True
18
19 # Quilt (patch application) changing isn't likely to affect anything
20 excludelist = ['quilt-native', 'subversion-native', 'git-native']
21 if depname in excludelist and recipename != depname:
22 return False
23
24 # Don't change native/cross/nativesdk recipe dependencies any further
25 if isNative(recipename) or isCross(recipename) or isNativeSDK(recipename):
26 return True
27
28 # Only target packages beyond here
29
30 # Drop native/cross/nativesdk dependencies from target recipes
31 if isNative(depname) or isCross(depname) or isNativeSDK(depname):
32 return False
33
34 # Exclude well defined machine specific configurations which don't change ABI
35 if depname in siggen.abisaferecipes:
36 return False
37
38 # Exclude well defined recipe->dependency
39 if "%s->%s" % (recipename, depname) in siggen.saferecipedeps:
40 return False
41
42 # Kernel modules are well namespaced. We don't want to depend on the kernel's checksum
43 # if we're just doing an RRECOMMENDS_xxx = "kernel-module-*", not least because the checksum
44 # is machine specific.
45 # Therefore if we're not a kernel or a module recipe (inheriting the kernel classes)
46 # and we reccomend a kernel-module, we exclude the dependency.
47 depfn = dep.rsplit(".", 1)[0]
48 if dataCache and isKernel(depfn) and not isKernel(fn):
49 for pkg in dataCache.runrecs[fn]:
50 if " ".join(dataCache.runrecs[fn][pkg]).find("kernel-module-") != -1:
51 return False
52
53 # Default to keep dependencies
54 return True
55
56class SignatureGeneratorOEBasic(bb.siggen.SignatureGeneratorBasic):
57 name = "OEBasic"
58 def init_rundepcheck(self, data):
59 self.abisaferecipes = (data.getVar("SIGGEN_EXCLUDERECIPES_ABISAFE", True) or "").split()
60 self.saferecipedeps = (data.getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS", True) or "").split()
61 pass
62 def rundep_check(self, fn, recipename, task, dep, depname, dataCache = None):
63 return sstate_rundepfilter(self, fn, recipename, task, dep, depname, dataCache)
64
65class SignatureGeneratorOEBasicHash(bb.siggen.SignatureGeneratorBasicHash):
66 name = "OEBasicHash"
67 def init_rundepcheck(self, data):
68 self.abisaferecipes = (data.getVar("SIGGEN_EXCLUDERECIPES_ABISAFE", True) or "").split()
69 self.saferecipedeps = (data.getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS", True) or "").split()
70 pass
71 def rundep_check(self, fn, recipename, task, dep, depname, dataCache = None):
72 return sstate_rundepfilter(self, fn, recipename, task, dep, depname, dataCache)
73
74# Insert these classes into siggen's namespace so it can see and select them
75bb.siggen.SignatureGeneratorOEBasic = SignatureGeneratorOEBasic
76bb.siggen.SignatureGeneratorOEBasicHash = SignatureGeneratorOEBasicHash
77
78
79def find_siginfo(pn, taskname, taskhashlist, d):
80 """ Find signature data files for comparison purposes """
81
82 import fnmatch
83 import glob
84
85 if taskhashlist:
86 hashfiles = {}
87
88 if not taskname:
89 # We have to derive pn and taskname
90 key = pn
91 splitit = key.split('.bb.')
92 taskname = splitit[1]
93 pn = os.path.basename(splitit[0]).split('_')[0]
94 if key.startswith('virtual:native:'):
95 pn = pn + '-native'
96
97 filedates = {}
98
99 # First search in stamps dir
100 localdata = d.createCopy()
101 localdata.setVar('MULTIMACH_TARGET_SYS', '*')
102 localdata.setVar('PN', pn)
103 localdata.setVar('PV', '*')
104 localdata.setVar('PR', '*')
105 localdata.setVar('EXTENDPE', '')
106 stamp = localdata.getVar('STAMP', True)
107 filespec = '%s.%s.sigdata.*' % (stamp, taskname)
108 foundall = False
109 import glob
110 for fullpath in glob.glob(filespec):
111 match = False
112 if taskhashlist:
113 for taskhash in taskhashlist:
114 if fullpath.endswith('.%s' % taskhash):
115 hashfiles[taskhash] = fullpath
116 if len(hashfiles) == len(taskhashlist):
117 foundall = True
118 break
119 else:
120 filedates[fullpath] = os.stat(fullpath).st_mtime
121
122 if len(filedates) < 2 and not foundall:
123 # That didn't work, look in sstate-cache
124 hashes = taskhashlist or ['*']
125 localdata = bb.data.createCopy(d)
126 for hashval in hashes:
127 localdata.setVar('PACKAGE_ARCH', '*')
128 localdata.setVar('TARGET_VENDOR', '*')
129 localdata.setVar('TARGET_OS', '*')
130 localdata.setVar('PN', pn)
131 localdata.setVar('PV', '*')
132 localdata.setVar('PR', '*')
133 localdata.setVar('BB_TASKHASH', hashval)
134 if pn.endswith('-native') or pn.endswith('-crosssdk') or pn.endswith('-cross'):
135 localdata.setVar('SSTATE_EXTRAPATH', "${NATIVELSBSTRING}/")
136 sstatename = d.getVarFlag(taskname, "sstate-name")
137 if not sstatename:
138 sstatename = taskname
139 filespec = '%s_%s.*.siginfo' % (localdata.getVar('SSTATE_PKG', True), sstatename)
140
141 if hashval != '*':
142 sstatedir = "%s/%s" % (d.getVar('SSTATE_DIR', True), hashval[:2])
143 else:
144 sstatedir = d.getVar('SSTATE_DIR', True)
145
146 filedates = {}
147 for root, dirs, files in os.walk(sstatedir):
148 for fn in files:
149 fullpath = os.path.join(root, fn)
150 if fnmatch.fnmatch(fullpath, filespec):
151 if taskhashlist:
152 hashfiles[hashval] = fullpath
153 else:
154 filedates[fullpath] = os.stat(fullpath).st_mtime
155
156 if taskhashlist:
157 return hashfiles
158 else:
159 return filedates
160
161bb.siggen.find_siginfo = find_siginfo
diff --git a/meta/lib/oe/terminal.py b/meta/lib/oe/terminal.py
new file mode 100644
index 0000000000..be2a26bedd
--- /dev/null
+++ b/meta/lib/oe/terminal.py
@@ -0,0 +1,218 @@
1import logging
2import oe.classutils
3import shlex
4from bb.process import Popen, ExecutionError
5
6logger = logging.getLogger('BitBake.OE.Terminal')
7
8
9class UnsupportedTerminal(Exception):
10 pass
11
12class NoSupportedTerminals(Exception):
13 pass
14
15
16class Registry(oe.classutils.ClassRegistry):
17 command = None
18
19 def __init__(cls, name, bases, attrs):
20 super(Registry, cls).__init__(name.lower(), bases, attrs)
21
22 @property
23 def implemented(cls):
24 return bool(cls.command)
25
26
27class Terminal(Popen):
28 __metaclass__ = Registry
29
30 def __init__(self, sh_cmd, title=None, env=None, d=None):
31 fmt_sh_cmd = self.format_command(sh_cmd, title)
32 try:
33 Popen.__init__(self, fmt_sh_cmd, env=env)
34 except OSError as exc:
35 import errno
36 if exc.errno == errno.ENOENT:
37 raise UnsupportedTerminal(self.name)
38 else:
39 raise
40
41 def format_command(self, sh_cmd, title):
42 fmt = {'title': title or 'Terminal', 'command': sh_cmd}
43 if isinstance(self.command, basestring):
44 return shlex.split(self.command.format(**fmt))
45 else:
46 return [element.format(**fmt) for element in self.command]
47
48class XTerminal(Terminal):
49 def __init__(self, sh_cmd, title=None, env=None, d=None):
50 Terminal.__init__(self, sh_cmd, title, env, d)
51 if not os.environ.get('DISPLAY'):
52 raise UnsupportedTerminal(self.name)
53
54class Gnome(XTerminal):
55 command = 'gnome-terminal --disable-factory -t "{title}" -x {command}'
56 priority = 2
57
58class Mate(XTerminal):
59 command = 'mate-terminal --disable-factory -t "{title}" -x {command}'
60 priority = 2
61
62class Xfce(XTerminal):
63 command = 'Terminal -T "{title}" -e "{command}"'
64 priority = 2
65
66 def __init__(self, command, title=None, env=None, d=None):
67 # Upstream binary name is Terminal but Debian/Ubuntu use
68 # xfce4-terminal to avoid possible(?) conflicts
69 distro = distro_name()
70 if distro == 'ubuntu' or distro == 'debian':
71 cmd = 'xfce4-terminal -T "{title}" -e "{command}"'
72 else:
73 cmd = command
74 XTerminal.__init__(self, cmd, title, env, d)
75
76class Konsole(XTerminal):
77 command = 'konsole -T "{title}" -e {command}'
78 priority = 2
79
80 def __init__(self, sh_cmd, title=None, env=None, d=None):
81 # Check version
82 vernum = check_konsole_version("konsole")
83 if vernum:
84 if vernum.split('.')[0] == "2":
85 logger.debug(1, 'Konsole from KDE 4.x will not work as devshell, skipping')
86 raise UnsupportedTerminal(self.name)
87 XTerminal.__init__(self, sh_cmd, title, env, d)
88
89class XTerm(XTerminal):
90 command = 'xterm -T "{title}" -e {command}'
91 priority = 1
92
93class Rxvt(XTerminal):
94 command = 'rxvt -T "{title}" -e {command}'
95 priority = 1
96
97class Screen(Terminal):
98 command = 'screen -D -m -t "{title}" -S devshell {command}'
99
100 def __init__(self, sh_cmd, title=None, env=None, d=None):
101 s_id = "devshell_%i" % os.getpid()
102 self.command = "screen -D -m -t \"{title}\" -S %s {command}" % s_id
103 Terminal.__init__(self, sh_cmd, title, env, d)
104 msg = 'Screen started. Please connect in another terminal with ' \
105 '"screen -r %s"' % s_id
106 if (d):
107 bb.event.fire(bb.event.LogExecTTY(msg, "screen -r %s" % s_id,
108 0.5, 10), d)
109 else:
110 logger.warn(msg)
111
112class TmuxRunning(Terminal):
113 """Open a new pane in the current running tmux window"""
114 name = 'tmux-running'
115 command = 'tmux split-window "{command}"'
116 priority = 2.75
117
118 def __init__(self, sh_cmd, title=None, env=None, d=None):
119 if not bb.utils.which(os.getenv('PATH'), 'tmux'):
120 raise UnsupportedTerminal('tmux is not installed')
121
122 if not os.getenv('TMUX'):
123 raise UnsupportedTerminal('tmux is not running')
124
125 Terminal.__init__(self, sh_cmd, title, env, d)
126
127class Tmux(Terminal):
128 """Start a new tmux session and window"""
129 command = 'tmux new -d -s devshell -n devshell "{command}"'
130 priority = 0.75
131
132 def __init__(self, sh_cmd, title=None, env=None, d=None):
133 if not bb.utils.which(os.getenv('PATH'), 'tmux'):
134 raise UnsupportedTerminal('tmux is not installed')
135
136 # TODO: consider using a 'devshell' session shared amongst all
137 # devshells, if it's already there, add a new window to it.
138 window_name = 'devshell-%i' % os.getpid()
139
140 self.command = 'tmux new -d -s {0} -n {0} "{{command}}"'.format(window_name)
141 Terminal.__init__(self, sh_cmd, title, env, d)
142
143 attach_cmd = 'tmux att -t {0}'.format(window_name)
144 msg = 'Tmux started. Please connect in another terminal with `tmux att -t {0}`'.format(window_name)
145 if d:
146 bb.event.fire(bb.event.LogExecTTY(msg, attach_cmd, 0.5, 10), d)
147 else:
148 logger.warn(msg)
149
150class Custom(Terminal):
151 command = 'false' # This is a placeholder
152 priority = 3
153
154 def __init__(self, sh_cmd, title=None, env=None, d=None):
155 self.command = d and d.getVar('OE_TERMINAL_CUSTOMCMD', True)
156 if self.command:
157 if not '{command}' in self.command:
158 self.command += ' {command}'
159 Terminal.__init__(self, sh_cmd, title, env, d)
160 logger.warn('Custom terminal was started.')
161 else:
162 logger.debug(1, 'No custom terminal (OE_TERMINAL_CUSTOMCMD) set')
163 raise UnsupportedTerminal('OE_TERMINAL_CUSTOMCMD not set')
164
165
166def prioritized():
167 return Registry.prioritized()
168
169def spawn_preferred(sh_cmd, title=None, env=None, d=None):
170 """Spawn the first supported terminal, by priority"""
171 for terminal in prioritized():
172 try:
173 spawn(terminal.name, sh_cmd, title, env, d)
174 break
175 except UnsupportedTerminal:
176 continue
177 else:
178 raise NoSupportedTerminals()
179
180def spawn(name, sh_cmd, title=None, env=None, d=None):
181 """Spawn the specified terminal, by name"""
182 logger.debug(1, 'Attempting to spawn terminal "%s"', name)
183 try:
184 terminal = Registry.registry[name]
185 except KeyError:
186 raise UnsupportedTerminal(name)
187
188 pipe = terminal(sh_cmd, title, env, d)
189 output = pipe.communicate()[0]
190 if pipe.returncode != 0:
191 raise ExecutionError(sh_cmd, pipe.returncode, output)
192
193def check_konsole_version(konsole):
194 import subprocess as sub
195 try:
196 p = sub.Popen(['sh', '-c', '%s --version' % konsole],stdout=sub.PIPE,stderr=sub.PIPE)
197 out, err = p.communicate()
198 ver_info = out.rstrip().split('\n')
199 except OSError as exc:
200 import errno
201 if exc.errno == errno.ENOENT:
202 return None
203 else:
204 raise
205 vernum = None
206 for ver in ver_info:
207 if ver.startswith('Konsole'):
208 vernum = ver.split(' ')[-1]
209 return vernum
210
211def distro_name():
212 try:
213 p = Popen(['lsb_release', '-i'])
214 out, err = p.communicate()
215 distro = out.split(':')[1].strip().lower()
216 except:
217 distro = "unknown"
218 return distro
diff --git a/meta/lib/oe/tests/__init__.py b/meta/lib/oe/tests/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/meta/lib/oe/tests/__init__.py
diff --git a/meta/lib/oe/tests/test_license.py b/meta/lib/oe/tests/test_license.py
new file mode 100644
index 0000000000..c388886184
--- /dev/null
+++ b/meta/lib/oe/tests/test_license.py
@@ -0,0 +1,68 @@
1import unittest
2import oe.license
3
4class SeenVisitor(oe.license.LicenseVisitor):
5 def __init__(self):
6 self.seen = []
7 oe.license.LicenseVisitor.__init__(self)
8
9 def visit_Str(self, node):
10 self.seen.append(node.s)
11
12class TestSingleLicense(unittest.TestCase):
13 licenses = [
14 "GPLv2",
15 "LGPL-2.0",
16 "Artistic",
17 "MIT",
18 "GPLv3+",
19 "FOO_BAR",
20 ]
21 invalid_licenses = ["GPL/BSD"]
22
23 @staticmethod
24 def parse(licensestr):
25 visitor = SeenVisitor()
26 visitor.visit_string(licensestr)
27 return visitor.seen
28
29 def test_single_licenses(self):
30 for license in self.licenses:
31 licenses = self.parse(license)
32 self.assertListEqual(licenses, [license])
33
34 def test_invalid_licenses(self):
35 for license in self.invalid_licenses:
36 with self.assertRaises(oe.license.InvalidLicense) as cm:
37 self.parse(license)
38 self.assertEqual(cm.exception.license, license)
39
40class TestSimpleCombinations(unittest.TestCase):
41 tests = {
42 "FOO&BAR": ["FOO", "BAR"],
43 "BAZ & MOO": ["BAZ", "MOO"],
44 "ALPHA|BETA": ["ALPHA"],
45 "BAZ&MOO|FOO": ["FOO"],
46 "FOO&BAR|BAZ": ["FOO", "BAR"],
47 }
48 preferred = ["ALPHA", "FOO", "BAR"]
49
50 def test_tests(self):
51 def choose(a, b):
52 if all(lic in self.preferred for lic in b):
53 return b
54 else:
55 return a
56
57 for license, expected in self.tests.items():
58 licenses = oe.license.flattened_licenses(license, choose)
59 self.assertListEqual(licenses, expected)
60
61class TestComplexCombinations(TestSimpleCombinations):
62 tests = {
63 "FOO & (BAR | BAZ)&MOO": ["FOO", "BAR", "MOO"],
64 "(ALPHA|(BETA&THETA)|OMEGA)&DELTA": ["OMEGA", "DELTA"],
65 "((ALPHA|BETA)&FOO)|BAZ": ["BETA", "FOO"],
66 "(GPL-2.0|Proprietary)&BSD-4-clause&MIT": ["GPL-2.0", "BSD-4-clause", "MIT"],
67 }
68 preferred = ["BAR", "OMEGA", "BETA", "GPL-2.0"]
diff --git a/meta/lib/oe/tests/test_path.py b/meta/lib/oe/tests/test_path.py
new file mode 100644
index 0000000000..3d41ce157a
--- /dev/null
+++ b/meta/lib/oe/tests/test_path.py
@@ -0,0 +1,89 @@
1import unittest
2import oe, oe.path
3import tempfile
4import os
5import errno
6import shutil
7
8class TestRealPath(unittest.TestCase):
9 DIRS = [ "a", "b", "etc", "sbin", "usr", "usr/bin", "usr/binX", "usr/sbin", "usr/include", "usr/include/gdbm" ]
10 FILES = [ "etc/passwd", "b/file" ]
11 LINKS = [
12 ( "bin", "/usr/bin", "/usr/bin" ),
13 ( "binX", "usr/binX", "/usr/binX" ),
14 ( "c", "broken", "/broken" ),
15 ( "etc/passwd-1", "passwd", "/etc/passwd" ),
16 ( "etc/passwd-2", "passwd-1", "/etc/passwd" ),
17 ( "etc/passwd-3", "/etc/passwd-1", "/etc/passwd" ),
18 ( "etc/shadow-1", "/etc/shadow", "/etc/shadow" ),
19 ( "etc/shadow-2", "/etc/shadow-1", "/etc/shadow" ),
20 ( "prog-A", "bin/prog-A", "/usr/bin/prog-A" ),
21 ( "prog-B", "/bin/prog-B", "/usr/bin/prog-B" ),
22 ( "usr/bin/prog-C", "../../sbin/prog-C", "/sbin/prog-C" ),
23 ( "usr/bin/prog-D", "/sbin/prog-D", "/sbin/prog-D" ),
24 ( "usr/binX/prog-E", "../sbin/prog-E", None ),
25 ( "usr/bin/prog-F", "../../../sbin/prog-F", "/sbin/prog-F" ),
26 ( "loop", "a/loop", None ),
27 ( "a/loop", "../loop", None ),
28 ( "b/test", "file/foo", "/b/file/foo" ),
29 ]
30
31 LINKS_PHYS = [
32 ( "./", "/", "" ),
33 ( "binX/prog-E", "/usr/sbin/prog-E", "/sbin/prog-E" ),
34 ]
35
36 EXCEPTIONS = [
37 ( "loop", errno.ELOOP ),
38 ( "b/test", errno.ENOENT ),
39 ]
40
41 def __del__(self):
42 try:
43 #os.system("tree -F %s" % self.tmpdir)
44 shutil.rmtree(self.tmpdir)
45 except:
46 pass
47
48 def setUp(self):
49 self.tmpdir = tempfile.mkdtemp(prefix = "oe-test_path")
50 self.root = os.path.join(self.tmpdir, "R")
51
52 os.mkdir(os.path.join(self.tmpdir, "_real"))
53 os.symlink("_real", self.root)
54
55 for d in self.DIRS:
56 os.mkdir(os.path.join(self.root, d))
57 for f in self.FILES:
58 file(os.path.join(self.root, f), "w")
59 for l in self.LINKS:
60 os.symlink(l[1], os.path.join(self.root, l[0]))
61
62 def __realpath(self, file, use_physdir, assume_dir = True):
63 return oe.path.realpath(os.path.join(self.root, file), self.root,
64 use_physdir, assume_dir = assume_dir)
65
66 def test_norm(self):
67 for l in self.LINKS:
68 if l[2] == None:
69 continue
70
71 target_p = self.__realpath(l[0], True)
72 target_l = self.__realpath(l[0], False)
73
74 if l[2] != False:
75 self.assertEqual(target_p, target_l)
76 self.assertEqual(l[2], target_p[len(self.root):])
77
78 def test_phys(self):
79 for l in self.LINKS_PHYS:
80 target_p = self.__realpath(l[0], True)
81 target_l = self.__realpath(l[0], False)
82
83 self.assertEqual(l[1], target_p[len(self.root):])
84 self.assertEqual(l[2], target_l[len(self.root):])
85
86 def test_loop(self):
87 for e in self.EXCEPTIONS:
88 self.assertRaisesRegexp(OSError, r'\[Errno %u\]' % e[1],
89 self.__realpath, e[0], False, False)
diff --git a/meta/lib/oe/tests/test_types.py b/meta/lib/oe/tests/test_types.py
new file mode 100644
index 0000000000..367cc30e45
--- /dev/null
+++ b/meta/lib/oe/tests/test_types.py
@@ -0,0 +1,62 @@
1import unittest
2from oe.maketype import create, factory
3
4class TestTypes(unittest.TestCase):
5 def assertIsInstance(self, obj, cls):
6 return self.assertTrue(isinstance(obj, cls))
7
8 def assertIsNot(self, obj, other):
9 return self.assertFalse(obj is other)
10
11 def assertFactoryCreated(self, value, type, **flags):
12 cls = factory(type)
13 self.assertIsNot(cls, None)
14 self.assertIsInstance(create(value, type, **flags), cls)
15
16class TestBooleanType(TestTypes):
17 def test_invalid(self):
18 self.assertRaises(ValueError, create, '', 'boolean')
19 self.assertRaises(ValueError, create, 'foo', 'boolean')
20 self.assertRaises(TypeError, create, object(), 'boolean')
21
22 def test_true(self):
23 self.assertTrue(create('y', 'boolean'))
24 self.assertTrue(create('yes', 'boolean'))
25 self.assertTrue(create('1', 'boolean'))
26 self.assertTrue(create('t', 'boolean'))
27 self.assertTrue(create('true', 'boolean'))
28 self.assertTrue(create('TRUE', 'boolean'))
29 self.assertTrue(create('truE', 'boolean'))
30
31 def test_false(self):
32 self.assertFalse(create('n', 'boolean'))
33 self.assertFalse(create('no', 'boolean'))
34 self.assertFalse(create('0', 'boolean'))
35 self.assertFalse(create('f', 'boolean'))
36 self.assertFalse(create('false', 'boolean'))
37 self.assertFalse(create('FALSE', 'boolean'))
38 self.assertFalse(create('faLse', 'boolean'))
39
40 def test_bool_equality(self):
41 self.assertEqual(create('n', 'boolean'), False)
42 self.assertNotEqual(create('n', 'boolean'), True)
43 self.assertEqual(create('y', 'boolean'), True)
44 self.assertNotEqual(create('y', 'boolean'), False)
45
46class TestList(TestTypes):
47 def assertListEqual(self, value, valid, sep=None):
48 obj = create(value, 'list', separator=sep)
49 self.assertEqual(obj, valid)
50 if sep is not None:
51 self.assertEqual(obj.separator, sep)
52 self.assertEqual(str(obj), obj.separator.join(obj))
53
54 def test_list_nosep(self):
55 testlist = ['alpha', 'beta', 'theta']
56 self.assertListEqual('alpha beta theta', testlist)
57 self.assertListEqual('alpha beta\ttheta', testlist)
58 self.assertListEqual('alpha', ['alpha'])
59
60 def test_list_usersep(self):
61 self.assertListEqual('foo:bar', ['foo', 'bar'], ':')
62 self.assertListEqual('foo:bar:baz', ['foo', 'bar', 'baz'], ':')
diff --git a/meta/lib/oe/tests/test_utils.py b/meta/lib/oe/tests/test_utils.py
new file mode 100644
index 0000000000..5d9ac52e7d
--- /dev/null
+++ b/meta/lib/oe/tests/test_utils.py
@@ -0,0 +1,51 @@
1import unittest
2from oe.utils import packages_filter_out_system
3
4class TestPackagesFilterOutSystem(unittest.TestCase):
5 def test_filter(self):
6 """
7 Test that oe.utils.packages_filter_out_system works.
8 """
9 try:
10 import bb
11 except ImportError:
12 self.skipTest("Cannot import bb")
13
14 d = bb.data_smart.DataSmart()
15 d.setVar("PN", "foo")
16
17 d.setVar("PACKAGES", "foo foo-doc foo-dev")
18 pkgs = packages_filter_out_system(d)
19 self.assertEqual(pkgs, [])
20
21 d.setVar("PACKAGES", "foo foo-doc foo-data foo-dev")
22 pkgs = packages_filter_out_system(d)
23 self.assertEqual(pkgs, ["foo-data"])
24
25 d.setVar("PACKAGES", "foo foo-locale-en-gb")
26 pkgs = packages_filter_out_system(d)
27 self.assertEqual(pkgs, [])
28
29 d.setVar("PACKAGES", "foo foo-data foo-locale-en-gb")
30 pkgs = packages_filter_out_system(d)
31 self.assertEqual(pkgs, ["foo-data"])
32
33
34class TestTrimVersion(unittest.TestCase):
35 def test_version_exception(self):
36 with self.assertRaises(TypeError):
37 trim_version(None, 2)
38 with self.assertRaises(TypeError):
39 trim_version((1, 2, 3), 2)
40
41 def test_num_exception(self):
42 with self.assertRaises(ValueError):
43 trim_version("1.2.3", 0)
44 with self.assertRaises(ValueError):
45 trim_version("1.2.3", -1)
46
47 def test_valid(self):
48 self.assertEqual(trim_version("1.2.3", 1), "1")
49 self.assertEqual(trim_version("1.2.3", 2), "1.2")
50 self.assertEqual(trim_version("1.2.3", 3), "1.2.3")
51 self.assertEqual(trim_version("1.2.3", 4), "1.2.3")
diff --git a/meta/lib/oe/types.py b/meta/lib/oe/types.py
new file mode 100644
index 0000000000..7f47c17d0e
--- /dev/null
+++ b/meta/lib/oe/types.py
@@ -0,0 +1,153 @@
1import errno
2import re
3import os
4
5
6class OEList(list):
7 """OpenEmbedded 'list' type
8
9 Acts as an ordinary list, but is constructed from a string value and a
10 separator (optional), and re-joins itself when converted to a string with
11 str(). Set the variable type flag to 'list' to use this type, and the
12 'separator' flag may be specified (defaulting to whitespace)."""
13
14 name = "list"
15
16 def __init__(self, value, separator = None):
17 if value is not None:
18 list.__init__(self, value.split(separator))
19 else:
20 list.__init__(self)
21
22 if separator is None:
23 self.separator = " "
24 else:
25 self.separator = separator
26
27 def __str__(self):
28 return self.separator.join(self)
29
30def choice(value, choices):
31 """OpenEmbedded 'choice' type
32
33 Acts as a multiple choice for the user. To use this, set the variable
34 type flag to 'choice', and set the 'choices' flag to a space separated
35 list of valid values."""
36 if not isinstance(value, basestring):
37 raise TypeError("choice accepts a string, not '%s'" % type(value))
38
39 value = value.lower()
40 choices = choices.lower()
41 if value not in choices.split():
42 raise ValueError("Invalid choice '%s'. Valid choices: %s" %
43 (value, choices))
44 return value
45
46class NoMatch(object):
47 """Stub python regex pattern object which never matches anything"""
48 def findall(self, string, flags=0):
49 return None
50
51 def finditer(self, string, flags=0):
52 return None
53
54 def match(self, flags=0):
55 return None
56
57 def search(self, string, flags=0):
58 return None
59
60 def split(self, string, maxsplit=0):
61 return None
62
63 def sub(pattern, repl, string, count=0):
64 return None
65
66 def subn(pattern, repl, string, count=0):
67 return None
68
69NoMatch = NoMatch()
70
71def regex(value, regexflags=None):
72 """OpenEmbedded 'regex' type
73
74 Acts as a regular expression, returning the pre-compiled regular
75 expression pattern object. To use this type, set the variable type flag
76 to 'regex', and optionally, set the 'regexflags' type to a space separated
77 list of the flags to control the regular expression matching (e.g.
78 FOO[regexflags] += 'ignorecase'). See the python documentation on the
79 're' module for a list of valid flags."""
80
81 flagval = 0
82 if regexflags:
83 for flag in regexflags.split():
84 flag = flag.upper()
85 try:
86 flagval |= getattr(re, flag)
87 except AttributeError:
88 raise ValueError("Invalid regex flag '%s'" % flag)
89
90 if not value:
91 # Let's ensure that the default behavior for an undefined or empty
92 # variable is to match nothing. If the user explicitly wants to match
93 # anything, they can match '.*' instead.
94 return NoMatch
95
96 try:
97 return re.compile(value, flagval)
98 except re.error as exc:
99 raise ValueError("Invalid regex value '%s': %s" %
100 (value, exc.args[0]))
101
102def boolean(value):
103 """OpenEmbedded 'boolean' type
104
105 Valid values for true: 'yes', 'y', 'true', 't', '1'
106 Valid values for false: 'no', 'n', 'false', 'f', '0'
107 """
108
109 if not isinstance(value, basestring):
110 raise TypeError("boolean accepts a string, not '%s'" % type(value))
111
112 value = value.lower()
113 if value in ('yes', 'y', 'true', 't', '1'):
114 return True
115 elif value in ('no', 'n', 'false', 'f', '0'):
116 return False
117 raise ValueError("Invalid boolean value '%s'" % value)
118
119def integer(value, numberbase=10):
120 """OpenEmbedded 'integer' type
121
122 Defaults to base 10, but this can be specified using the optional
123 'numberbase' flag."""
124
125 return int(value, int(numberbase))
126
127_float = float
128def float(value, fromhex='false'):
129 """OpenEmbedded floating point type
130
131 To use this type, set the type flag to 'float', and optionally set the
132 'fromhex' flag to a true value (obeying the same rules as for the
133 'boolean' type) if the value is in base 16 rather than base 10."""
134
135 if boolean(fromhex):
136 return _float.fromhex(value)
137 else:
138 return _float(value)
139
140def path(value, relativeto='', normalize='true', mustexist='false'):
141 value = os.path.join(relativeto, value)
142
143 if boolean(normalize):
144 value = os.path.normpath(value)
145
146 if boolean(mustexist):
147 try:
148 open(value, 'r')
149 except IOError as exc:
150 if exc.errno == errno.ENOENT:
151 raise ValueError("{0}: {1}".format(value, os.strerror(errno.ENOENT)))
152
153 return value
diff --git a/meta/lib/oe/utils.py b/meta/lib/oe/utils.py
new file mode 100644
index 0000000000..82987e80d0
--- /dev/null
+++ b/meta/lib/oe/utils.py
@@ -0,0 +1,152 @@
1try:
2 # Python 2
3 import commands as cmdstatus
4except ImportError:
5 # Python 3
6 import subprocess as cmdstatus
7
8def read_file(filename):
9 try:
10 f = open( filename, "r" )
11 except IOError as reason:
12 return "" # WARNING: can't raise an error now because of the new RDEPENDS handling. This is a bit ugly. :M:
13 else:
14 data = f.read().strip()
15 f.close()
16 return data
17 return None
18
19def ifelse(condition, iftrue = True, iffalse = False):
20 if condition:
21 return iftrue
22 else:
23 return iffalse
24
25def conditional(variable, checkvalue, truevalue, falsevalue, d):
26 if d.getVar(variable,1) == checkvalue:
27 return truevalue
28 else:
29 return falsevalue
30
31def less_or_equal(variable, checkvalue, truevalue, falsevalue, d):
32 if float(d.getVar(variable,1)) <= float(checkvalue):
33 return truevalue
34 else:
35 return falsevalue
36
37def version_less_or_equal(variable, checkvalue, truevalue, falsevalue, d):
38 result = bb.utils.vercmp_string(d.getVar(variable,True), checkvalue)
39 if result <= 0:
40 return truevalue
41 else:
42 return falsevalue
43
44def contains(variable, checkvalues, truevalue, falsevalue, d):
45 val = d.getVar(variable, True)
46 if not val:
47 return falsevalue
48 val = set(val.split())
49 if isinstance(checkvalues, basestring):
50 checkvalues = set(checkvalues.split())
51 else:
52 checkvalues = set(checkvalues)
53 if checkvalues.issubset(val):
54 return truevalue
55 return falsevalue
56
57def both_contain(variable1, variable2, checkvalue, d):
58 if d.getVar(variable1,1).find(checkvalue) != -1 and d.getVar(variable2,1).find(checkvalue) != -1:
59 return checkvalue
60 else:
61 return ""
62
63def prune_suffix(var, suffixes, d):
64 # See if var ends with any of the suffixes listed and
65 # remove it if found
66 for suffix in suffixes:
67 if var.endswith(suffix):
68 var = var.replace(suffix, "")
69
70 prefix = d.getVar("MLPREFIX", True)
71 if prefix and var.startswith(prefix):
72 var = var.replace(prefix, "")
73
74 return var
75
76def str_filter(f, str, d):
77 from re import match
78 return " ".join(filter(lambda x: match(f, x, 0), str.split()))
79
80def str_filter_out(f, str, d):
81 from re import match
82 return " ".join(filter(lambda x: not match(f, x, 0), str.split()))
83
84def param_bool(cfg, field, dflt = None):
85 """Lookup <field> in <cfg> map and convert it to a boolean; take
86 <dflt> when this <field> does not exist"""
87 value = cfg.get(field, dflt)
88 strvalue = str(value).lower()
89 if strvalue in ('yes', 'y', 'true', 't', '1'):
90 return True
91 elif strvalue in ('no', 'n', 'false', 'f', '0'):
92 return False
93 raise ValueError("invalid value for boolean parameter '%s': '%s'" % (field, value))
94
95def inherits(d, *classes):
96 """Return True if the metadata inherits any of the specified classes"""
97 return any(bb.data.inherits_class(cls, d) for cls in classes)
98
99def features_backfill(var,d):
100 # This construct allows the addition of new features to variable specified
101 # as var
102 # Example for var = "DISTRO_FEATURES"
103 # This construct allows the addition of new features to DISTRO_FEATURES
104 # that if not present would disable existing functionality, without
105 # disturbing distributions that have already set DISTRO_FEATURES.
106 # Distributions wanting to elide a value in DISTRO_FEATURES_BACKFILL should
107 # add the feature to DISTRO_FEATURES_BACKFILL_CONSIDERED
108 features = (d.getVar(var, True) or "").split()
109 backfill = (d.getVar(var+"_BACKFILL", True) or "").split()
110 considered = (d.getVar(var+"_BACKFILL_CONSIDERED", True) or "").split()
111
112 addfeatures = []
113 for feature in backfill:
114 if feature not in features and feature not in considered:
115 addfeatures.append(feature)
116
117 if addfeatures:
118 d.appendVar(var, " " + " ".join(addfeatures))
119
120
121def packages_filter_out_system(d):
122 """
123 Return a list of packages from PACKAGES with the "system" packages such as
124 PN-dbg PN-doc PN-locale-eb-gb removed.
125 """
126 pn = d.getVar('PN', True)
127 blacklist = map(lambda suffix: pn + suffix, ('', '-dbg', '-dev', '-doc', '-locale', '-staticdev'))
128 localepkg = pn + "-locale-"
129 pkgs = []
130
131 for pkg in d.getVar('PACKAGES', True).split():
132 if pkg not in blacklist and localepkg not in pkg:
133 pkgs.append(pkg)
134 return pkgs
135
136def getstatusoutput(cmd):
137 return cmdstatus.getstatusoutput(cmd)
138
139
140def trim_version(version, num_parts=2):
141 """
142 Return just the first <num_parts> of <version>, split by periods. For
143 example, trim_version("1.2.3", 2) will return "1.2".
144 """
145 if type(version) is not str:
146 raise TypeError("Version should be a string")
147 if num_parts < 1:
148 raise ValueError("Cannot split to parts < 1")
149
150 parts = version.split(".")
151 trimmed = ".".join(parts[:num_parts])
152 return trimmed