summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--bitbake-dev/AUTHORS10
-rw-r--r--bitbake-dev/COPYING339
-rw-r--r--bitbake-dev/ChangeLog317
-rwxr-xr-xbitbake-dev/bin/bitbake195
-rwxr-xr-xbitbake-dev/bin/bitdoc534
-rw-r--r--bitbake-dev/lib/bb/COW.py318
-rw-r--r--bitbake-dev/lib/bb/__init__.py1134
-rw-r--r--bitbake-dev/lib/bb/build.py394
-rw-r--r--bitbake-dev/lib/bb/cache.py533
-rw-r--r--bitbake-dev/lib/bb/cooker.py978
-rw-r--r--bitbake-dev/lib/bb/data.py562
-rw-r--r--bitbake-dev/lib/bb/data_smart.py289
-rw-r--r--bitbake-dev/lib/bb/event.py275
-rw-r--r--bitbake-dev/lib/bb/fetch/__init__.py640
-rw-r--r--bitbake-dev/lib/bb/fetch/bzr.py153
-rw-r--r--bitbake-dev/lib/bb/fetch/cvs.py182
-rw-r--r--bitbake-dev/lib/bb/fetch/git.py216
-rw-r--r--bitbake-dev/lib/bb/fetch/hg.py178
-rw-r--r--bitbake-dev/lib/bb/fetch/local.py72
-rw-r--r--bitbake-dev/lib/bb/fetch/osc.py155
-rw-r--r--bitbake-dev/lib/bb/fetch/perforce.py214
-rw-r--r--bitbake-dev/lib/bb/fetch/ssh.py118
-rw-r--r--bitbake-dev/lib/bb/fetch/svk.py109
-rw-r--r--bitbake-dev/lib/bb/fetch/svn.py206
-rw-r--r--bitbake-dev/lib/bb/fetch/wget.py130
-rw-r--r--bitbake-dev/lib/bb/manifest.py144
-rw-r--r--bitbake-dev/lib/bb/methodpool.py84
-rw-r--r--bitbake-dev/lib/bb/msg.py125
-rw-r--r--bitbake-dev/lib/bb/parse/__init__.py84
-rw-r--r--bitbake-dev/lib/bb/parse/parse_py/BBHandler.py410
-rw-r--r--bitbake-dev/lib/bb/parse/parse_py/ConfHandler.py241
-rw-r--r--bitbake-dev/lib/bb/parse/parse_py/__init__.py33
-rw-r--r--bitbake-dev/lib/bb/persist_data.py121
-rw-r--r--bitbake-dev/lib/bb/providers.py327
-rw-r--r--bitbake-dev/lib/bb/runqueue.py1174
-rw-r--r--bitbake-dev/lib/bb/shell.py824
-rw-r--r--bitbake-dev/lib/bb/taskdata.py610
-rw-r--r--bitbake-dev/lib/bb/utils.py431
-rw-r--r--bitbake/AUTHORS2
-rw-r--r--bitbake/ChangeLog217
-rw-r--r--bitbake/MANIFEST53
-rwxr-xr-xbitbake/bin/bitbake97
-rwxr-xr-xbitbake/bin/bitdoc2
-rw-r--r--bitbake/contrib/vim/syntax/bitbake.vim35
-rw-r--r--bitbake/doc/bitbake.14
-rw-r--r--bitbake/doc/manual/usermanual.xml28
-rw-r--r--bitbake/lib/bb/__init__.py3
-rw-r--r--bitbake/lib/bb/build.py226
-rw-r--r--bitbake/lib/bb/cache.py89
-rw-r--r--bitbake/lib/bb/command.py (renamed from bitbake-dev/lib/bb/command.py)0
-rw-r--r--bitbake/lib/bb/cooker.py761
-rw-r--r--bitbake/lib/bb/daemonize.py (renamed from bitbake-dev/lib/bb/daemonize.py)0
-rw-r--r--bitbake/lib/bb/data.py2
-rw-r--r--bitbake/lib/bb/event.py211
-rw-r--r--bitbake/lib/bb/fetch/__init__.py42
-rw-r--r--bitbake/lib/bb/fetch/cvs.py2
-rw-r--r--bitbake/lib/bb/fetch/git.py73
-rw-r--r--bitbake/lib/bb/fetch/local.py4
-rw-r--r--bitbake/lib/bb/fetch/svk.py2
-rw-r--r--bitbake/lib/bb/fetch/wget.py2
-rw-r--r--bitbake/lib/bb/msg.py26
-rw-r--r--bitbake/lib/bb/parse/parse_py/BBHandler.py32
-rw-r--r--bitbake/lib/bb/parse/parse_py/ConfHandler.py13
-rw-r--r--bitbake/lib/bb/providers.py4
-rw-r--r--bitbake/lib/bb/runqueue.py341
-rw-r--r--bitbake/lib/bb/server/__init__.py (renamed from bitbake-dev/lib/bb/server/__init__.py)0
-rw-r--r--bitbake/lib/bb/server/none.py (renamed from bitbake-dev/lib/bb/server/none.py)0
-rw-r--r--bitbake/lib/bb/server/xmlrpc.py (renamed from bitbake-dev/lib/bb/server/xmlrpc.py)0
-rw-r--r--bitbake/lib/bb/shell.py19
-rw-r--r--bitbake/lib/bb/taskdata.py38
-rw-r--r--bitbake/lib/bb/ui/__init__.py (renamed from bitbake-dev/lib/bb/ui/__init__.py)0
-rw-r--r--bitbake/lib/bb/ui/crumbs/__init__.py (renamed from bitbake-dev/lib/bb/ui/crumbs/__init__.py)0
-rw-r--r--bitbake/lib/bb/ui/crumbs/buildmanager.py (renamed from bitbake-dev/lib/bb/ui/crumbs/buildmanager.py)0
-rw-r--r--bitbake/lib/bb/ui/crumbs/puccho.glade (renamed from bitbake-dev/lib/bb/ui/crumbs/puccho.glade)0
-rw-r--r--bitbake/lib/bb/ui/crumbs/runningbuild.py (renamed from bitbake-dev/lib/bb/ui/crumbs/runningbuild.py)0
-rw-r--r--bitbake/lib/bb/ui/depexp.py (renamed from bitbake-dev/lib/bb/ui/depexp.py)0
-rw-r--r--bitbake/lib/bb/ui/goggle.py (renamed from bitbake-dev/lib/bb/ui/goggle.py)0
-rw-r--r--bitbake/lib/bb/ui/knotty.py (renamed from bitbake-dev/lib/bb/ui/knotty.py)0
-rw-r--r--bitbake/lib/bb/ui/ncurses.py (renamed from bitbake-dev/lib/bb/ui/ncurses.py)0
-rw-r--r--bitbake/lib/bb/ui/puccho.py (renamed from bitbake-dev/lib/bb/ui/puccho.py)0
-rw-r--r--bitbake/lib/bb/ui/uievent.py (renamed from bitbake-dev/lib/bb/ui/uievent.py)0
-rw-r--r--bitbake/lib/bb/ui/uihelper.py (renamed from bitbake-dev/lib/bb/ui/uihelper.py)0
-rw-r--r--bitbake/lib/bb/utils.py20
83 files changed, 1392 insertions, 13815 deletions
diff --git a/bitbake-dev/AUTHORS b/bitbake-dev/AUTHORS
deleted file mode 100644
index a4014b1e39..0000000000
--- a/bitbake-dev/AUTHORS
+++ /dev/null
@@ -1,10 +0,0 @@
1Tim Ansell <mithro@mithis.net>
2Phil Blundell <pb@handhelds.org>
3Seb Frankengul <seb@frankengul.org>
4Holger Freyther <zecke@handhelds.org>
5Marcin Juszkiewicz <marcin@juszkiewicz.com.pl>
6Chris Larson <kergoth@handhelds.org>
7Ulrich Luckas <luckas@musoft.de>
8Mickey Lauer <mickey@Vanille.de>
9Richard Purdie <rpurdie@rpsys.net>
10Holger Schurig <holgerschurig@gmx.de>
diff --git a/bitbake-dev/COPYING b/bitbake-dev/COPYING
deleted file mode 100644
index d511905c16..0000000000
--- a/bitbake-dev/COPYING
+++ /dev/null
@@ -1,339 +0,0 @@
1 GNU GENERAL PUBLIC LICENSE
2 Version 2, June 1991
3
4 Copyright (C) 1989, 1991 Free Software Foundation, Inc.,
5 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
6 Everyone is permitted to copy and distribute verbatim copies
7 of this license document, but changing it is not allowed.
8
9 Preamble
10
11 The licenses for most software are designed to take away your
12freedom to share and change it. By contrast, the GNU General Public
13License is intended to guarantee your freedom to share and change free
14software--to make sure the software is free for all its users. This
15General Public License applies to most of the Free Software
16Foundation's software and to any other program whose authors commit to
17using it. (Some other Free Software Foundation software is covered by
18the GNU Lesser General Public License instead.) You can apply it to
19your programs, too.
20
21 When we speak of free software, we are referring to freedom, not
22price. Our General Public Licenses are designed to make sure that you
23have the freedom to distribute copies of free software (and charge for
24this service if you wish), that you receive source code or can get it
25if you want it, that you can change the software or use pieces of it
26in new free programs; and that you know you can do these things.
27
28 To protect your rights, we need to make restrictions that forbid
29anyone to deny you these rights or to ask you to surrender the rights.
30These restrictions translate to certain responsibilities for you if you
31distribute copies of the software, or if you modify it.
32
33 For example, if you distribute copies of such a program, whether
34gratis or for a fee, you must give the recipients all the rights that
35you have. You must make sure that they, too, receive or can get the
36source code. And you must show them these terms so they know their
37rights.
38
39 We protect your rights with two steps: (1) copyright the software, and
40(2) offer you this license which gives you legal permission to copy,
41distribute and/or modify the software.
42
43 Also, for each author's protection and ours, we want to make certain
44that everyone understands that there is no warranty for this free
45software. If the software is modified by someone else and passed on, we
46want its recipients to know that what they have is not the original, so
47that any problems introduced by others will not reflect on the original
48authors' reputations.
49
50 Finally, any free program is threatened constantly by software
51patents. We wish to avoid the danger that redistributors of a free
52program will individually obtain patent licenses, in effect making the
53program proprietary. To prevent this, we have made it clear that any
54patent must be licensed for everyone's free use or not licensed at all.
55
56 The precise terms and conditions for copying, distribution and
57modification follow.
58
59 GNU GENERAL PUBLIC LICENSE
60 TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
61
62 0. This License applies to any program or other work which contains
63a notice placed by the copyright holder saying it may be distributed
64under the terms of this General Public License. The "Program", below,
65refers to any such program or work, and a "work based on the Program"
66means either the Program or any derivative work under copyright law:
67that is to say, a work containing the Program or a portion of it,
68either verbatim or with modifications and/or translated into another
69language. (Hereinafter, translation is included without limitation in
70the term "modification".) Each licensee is addressed as "you".
71
72Activities other than copying, distribution and modification are not
73covered by this License; they are outside its scope. The act of
74running the Program is not restricted, and the output from the Program
75is covered only if its contents constitute a work based on the
76Program (independent of having been made by running the Program).
77Whether that is true depends on what the Program does.
78
79 1. You may copy and distribute verbatim copies of the Program's
80source code as you receive it, in any medium, provided that you
81conspicuously and appropriately publish on each copy an appropriate
82copyright notice and disclaimer of warranty; keep intact all the
83notices that refer to this License and to the absence of any warranty;
84and give any other recipients of the Program a copy of this License
85along with the Program.
86
87You may charge a fee for the physical act of transferring a copy, and
88you may at your option offer warranty protection in exchange for a fee.
89
90 2. You may modify your copy or copies of the Program or any portion
91of it, thus forming a work based on the Program, and copy and
92distribute such modifications or work under the terms of Section 1
93above, provided that you also meet all of these conditions:
94
95 a) You must cause the modified files to carry prominent notices
96 stating that you changed the files and the date of any change.
97
98 b) You must cause any work that you distribute or publish, that in
99 whole or in part contains or is derived from the Program or any
100 part thereof, to be licensed as a whole at no charge to all third
101 parties under the terms of this License.
102
103 c) If the modified program normally reads commands interactively
104 when run, you must cause it, when started running for such
105 interactive use in the most ordinary way, to print or display an
106 announcement including an appropriate copyright notice and a
107 notice that there is no warranty (or else, saying that you provide
108 a warranty) and that users may redistribute the program under
109 these conditions, and telling the user how to view a copy of this
110 License. (Exception: if the Program itself is interactive but
111 does not normally print such an announcement, your work based on
112 the Program is not required to print an announcement.)
113
114These requirements apply to the modified work as a whole. If
115identifiable sections of that work are not derived from the Program,
116and can be reasonably considered independent and separate works in
117themselves, then this License, and its terms, do not apply to those
118sections when you distribute them as separate works. But when you
119distribute the same sections as part of a whole which is a work based
120on the Program, the distribution of the whole must be on the terms of
121this License, whose permissions for other licensees extend to the
122entire whole, and thus to each and every part regardless of who wrote it.
123
124Thus, it is not the intent of this section to claim rights or contest
125your rights to work written entirely by you; rather, the intent is to
126exercise the right to control the distribution of derivative or
127collective works based on the Program.
128
129In addition, mere aggregation of another work not based on the Program
130with the Program (or with a work based on the Program) on a volume of
131a storage or distribution medium does not bring the other work under
132the scope of this License.
133
134 3. You may copy and distribute the Program (or a work based on it,
135under Section 2) in object code or executable form under the terms of
136Sections 1 and 2 above provided that you also do one of the following:
137
138 a) Accompany it with the complete corresponding machine-readable
139 source code, which must be distributed under the terms of Sections
140 1 and 2 above on a medium customarily used for software interchange; or,
141
142 b) Accompany it with a written offer, valid for at least three
143 years, to give any third party, for a charge no more than your
144 cost of physically performing source distribution, a complete
145 machine-readable copy of the corresponding source code, to be
146 distributed under the terms of Sections 1 and 2 above on a medium
147 customarily used for software interchange; or,
148
149 c) Accompany it with the information you received as to the offer
150 to distribute corresponding source code. (This alternative is
151 allowed only for noncommercial distribution and only if you
152 received the program in object code or executable form with such
153 an offer, in accord with Subsection b above.)
154
155The source code for a work means the preferred form of the work for
156making modifications to it. For an executable work, complete source
157code means all the source code for all modules it contains, plus any
158associated interface definition files, plus the scripts used to
159control compilation and installation of the executable. However, as a
160special exception, the source code distributed need not include
161anything that is normally distributed (in either source or binary
162form) with the major components (compiler, kernel, and so on) of the
163operating system on which the executable runs, unless that component
164itself accompanies the executable.
165
166If distribution of executable or object code is made by offering
167access to copy from a designated place, then offering equivalent
168access to copy the source code from the same place counts as
169distribution of the source code, even though third parties are not
170compelled to copy the source along with the object code.
171
172 4. You may not copy, modify, sublicense, or distribute the Program
173except as expressly provided under this License. Any attempt
174otherwise to copy, modify, sublicense or distribute the Program is
175void, and will automatically terminate your rights under this License.
176However, parties who have received copies, or rights, from you under
177this License will not have their licenses terminated so long as such
178parties remain in full compliance.
179
180 5. You are not required to accept this License, since you have not
181signed it. However, nothing else grants you permission to modify or
182distribute the Program or its derivative works. These actions are
183prohibited by law if you do not accept this License. Therefore, by
184modifying or distributing the Program (or any work based on the
185Program), you indicate your acceptance of this License to do so, and
186all its terms and conditions for copying, distributing or modifying
187the Program or works based on it.
188
189 6. Each time you redistribute the Program (or any work based on the
190Program), the recipient automatically receives a license from the
191original licensor to copy, distribute or modify the Program subject to
192these terms and conditions. You may not impose any further
193restrictions on the recipients' exercise of the rights granted herein.
194You are not responsible for enforcing compliance by third parties to
195this License.
196
197 7. If, as a consequence of a court judgment or allegation of patent
198infringement or for any other reason (not limited to patent issues),
199conditions are imposed on you (whether by court order, agreement or
200otherwise) that contradict the conditions of this License, they do not
201excuse you from the conditions of this License. If you cannot
202distribute so as to satisfy simultaneously your obligations under this
203License and any other pertinent obligations, then as a consequence you
204may not distribute the Program at all. For example, if a patent
205license would not permit royalty-free redistribution of the Program by
206all those who receive copies directly or indirectly through you, then
207the only way you could satisfy both it and this License would be to
208refrain entirely from distribution of the Program.
209
210If any portion of this section is held invalid or unenforceable under
211any particular circumstance, the balance of the section is intended to
212apply and the section as a whole is intended to apply in other
213circumstances.
214
215It is not the purpose of this section to induce you to infringe any
216patents or other property right claims or to contest validity of any
217such claims; this section has the sole purpose of protecting the
218integrity of the free software distribution system, which is
219implemented by public license practices. Many people have made
220generous contributions to the wide range of software distributed
221through that system in reliance on consistent application of that
222system; it is up to the author/donor to decide if he or she is willing
223to distribute software through any other system and a licensee cannot
224impose that choice.
225
226This section is intended to make thoroughly clear what is believed to
227be a consequence of the rest of this License.
228
229 8. If the distribution and/or use of the Program is restricted in
230certain countries either by patents or by copyrighted interfaces, the
231original copyright holder who places the Program under this License
232may add an explicit geographical distribution limitation excluding
233those countries, so that distribution is permitted only in or among
234countries not thus excluded. In such case, this License incorporates
235the limitation as if written in the body of this License.
236
237 9. The Free Software Foundation may publish revised and/or new versions
238of the General Public License from time to time. Such new versions will
239be similar in spirit to the present version, but may differ in detail to
240address new problems or concerns.
241
242Each version is given a distinguishing version number. If the Program
243specifies a version number of this License which applies to it and "any
244later version", you have the option of following the terms and conditions
245either of that version or of any later version published by the Free
246Software Foundation. If the Program does not specify a version number of
247this License, you may choose any version ever published by the Free Software
248Foundation.
249
250 10. If you wish to incorporate parts of the Program into other free
251programs whose distribution conditions are different, write to the author
252to ask for permission. For software which is copyrighted by the Free
253Software Foundation, write to the Free Software Foundation; we sometimes
254make exceptions for this. Our decision will be guided by the two goals
255of preserving the free status of all derivatives of our free software and
256of promoting the sharing and reuse of software generally.
257
258 NO WARRANTY
259
260 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY
261FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN
262OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES
263PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED
264OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
265MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS
266TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE
267PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING,
268REPAIR OR CORRECTION.
269
270 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
271WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR
272REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,
273INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING
274OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED
275TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY
276YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER
277PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE
278POSSIBILITY OF SUCH DAMAGES.
279
280 END OF TERMS AND CONDITIONS
281
282 How to Apply These Terms to Your New Programs
283
284 If you develop a new program, and you want it to be of the greatest
285possible use to the public, the best way to achieve this is to make it
286free software which everyone can redistribute and change under these terms.
287
288 To do so, attach the following notices to the program. It is safest
289to attach them to the start of each source file to most effectively
290convey the exclusion of warranty; and each file should have at least
291the "copyright" line and a pointer to where the full notice is found.
292
293 <one line to give the program's name and a brief idea of what it does.>
294 Copyright (C) <year> <name of author>
295
296 This program is free software; you can redistribute it and/or modify
297 it under the terms of the GNU General Public License as published by
298 the Free Software Foundation; either version 2 of the License, or
299 (at your option) any later version.
300
301 This program is distributed in the hope that it will be useful,
302 but WITHOUT ANY WARRANTY; without even the implied warranty of
303 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
304 GNU General Public License for more details.
305
306 You should have received a copy of the GNU General Public License along
307 with this program; if not, write to the Free Software Foundation, Inc.,
308 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
309
310Also add information on how to contact you by electronic and paper mail.
311
312If the program is interactive, make it output a short notice like this
313when it starts in an interactive mode:
314
315 Gnomovision version 69, Copyright (C) year name of author
316 Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
317 This is free software, and you are welcome to redistribute it
318 under certain conditions; type `show c' for details.
319
320The hypothetical commands `show w' and `show c' should show the appropriate
321parts of the General Public License. Of course, the commands you use may
322be called something other than `show w' and `show c'; they could even be
323mouse-clicks or menu items--whatever suits your program.
324
325You should also get your employer (if you work as a programmer) or your
326school, if any, to sign a "copyright disclaimer" for the program, if
327necessary. Here is a sample; alter the names:
328
329 Yoyodyne, Inc., hereby disclaims all copyright interest in the program
330 `Gnomovision' (which makes passes at compilers) written by James Hacker.
331
332 <signature of Ty Coon>, 1 April 1989
333 Ty Coon, President of Vice
334
335This General Public License does not permit incorporating your program into
336proprietary programs. If your program is a subroutine library, you may
337consider it more useful to permit linking proprietary applications with the
338library. If this is what you want to do, use the GNU Lesser General
339Public License instead of this License.
diff --git a/bitbake-dev/ChangeLog b/bitbake-dev/ChangeLog
deleted file mode 100644
index 22124cb7ea..0000000000
--- a/bitbake-dev/ChangeLog
+++ /dev/null
@@ -1,317 +0,0 @@
1Changes in Bitbake 1.9.x:
2 - Add PE (Package Epoch) support from Philipp Zabel (pH5)
3 - Treat python functions the same as shell functions for logging
4 - Use TMPDIR/anonfunc as a __anonfunc temp directory (T)
5 - Catch truncated cache file errors
6 - Allow operations other than assignment on flag variables
7 - Add code to handle inter-task dependencies
8 - Fix cache errors when generation dotGraphs
9 - Make sure __inherit_cache is updated before calling include() (from Michael Krelin)
10 - Fix bug when target was in ASSUME_PROVIDED (#2236)
11 - Raise ParseError for filenames with multiple underscores instead of infinitely looping (#2062)
12 - Fix invalid regexp in BBMASK error handling (missing import) (#1124)
13 - Promote certain warnings from debug to note 2 level
14 - Update manual
15 - Correctly redirect stdin when forking
16 - If parsing errors are found, exit, too many users miss the errors
17 - Remove supriours PREFERRED_PROVIDER warnings
18 - svn fetcher: Add _buildsvncommand function
19 - Improve certain error messages
20 - Rewrite svn fetcher to make adding extra operations easier
21 as part of future SRCDATE="now" fixes
22 (requires new FETCHCMD_svn definition in bitbake.conf)
23 - Change SVNDIR layout to be more unique (fixes #2644 and #2624)
24 - Add ConfigParsed Event after configuration parsing is complete
25 - Add SRCREV support for svn fetcher
26 - data.emit_var() - only call getVar if we need the variable
27 - Stop generating the A variable (seems to be legacy code)
28 - Make sure intertask depends get processed correcting in recursive depends
29 - Add pn-PN to overrides when evaluating PREFERRED_VERSION
30 - Improve the progress indicator by skipping tasks that have
31 already run before starting the build rather than during it
32 - Add profiling option (-P)
33 - Add BB_SRCREV_POLICY variable (clear or cache) to control SRCREV cache
34 - Add SRCREV_FORMAT support
35 - Fix local fetcher's localpath return values
36 - Apply OVERRIDES before performing immediate expansions
37 - Allow the -b -e option combination to take regular expressions
38 - Fix handling of variables with expansion in the name using _append/_prepend
39 e.g. RRECOMMENDS_${PN}_append_xyz = "abc"
40 - Add plain message function to bb.msg
41 - Sort the list of providers before processing so dependency problems are
42 reproducible rather than effectively random
43 - Fix/improve bitbake -s output
44 - Add locking for fetchers so only one tries to fetch a given file at a given time
45 - Fix int(0)/None confusion in runqueue.py which causes random gaps in dependency chains
46 - Expand data in addtasks
47 - Print the list of missing DEPENDS,RDEPENDS for the "No buildable providers available for required...."
48 error message.
49 - Rework add_task to be more efficient (6% speedup, 7% number of function calls reduction)
50 - Sort digraph output to make builds more reproducible
51 - Split expandKeys into two for loops to benefit from the expand_cache (12% speedup)
52 - runqueue.py: Fix idepends handling to avoid dependency errors
53 - Clear the terminal TOSTOP flag if set (and warn the user)
54 - Fix regression from r653 and make SRCDATE/CVSDATE work for packages again
55 - Fix a bug in bb.decodeurl where http://some.where.com/somefile.tgz decoded to host="" (#1530)
56 - Warn about malformed PREFERRED_PROVIDERS (#1072)
57 - Add support for BB_NICE_LEVEL option (#1627)
58 - Psyco is used only on x86 as there is no support for other architectures.
59 - Sort initial providers list by default preference (#1145, #2024)
60 - Improve provider sorting so prefered versions have preference over latest versions (#768)
61 - Detect builds of tasks with overlapping providers and warn (will become a fatal error) (#1359)
62 - Add MULTI_PROVIDER_WHITELIST variable to allow known safe multiple providers to be listed
63 - Handle paths in svn fetcher module parameter
64 - Support the syntax "export VARIABLE"
65 - Add bzr fetcher
66 - Add support for cleaning directories before a task in the form:
67 do_taskname[cleandirs] = "dir"
68 - bzr fetcher tweaks from Robert Schuster (#2913)
69 - Add mercurial (hg) fetcher from Robert Schuster (#2913)
70 - Don't add duplicates to BBPATH
71 - Fix preferred_version return values (providers.py)
72 - Fix 'depends' flag splitting
73 - Fix unexport handling (#3135)
74 - Add bb.copyfile function similar to bb.movefile (and improve movefile error reporting)
75 - Allow multiple options for deptask flag
76 - Use git-fetch instead of git-pull removing any need for merges when
77 fetching (we don't care about the index). Fixes fetch errors.
78 - Add BB_GENERATE_MIRROR_TARBALLS option, set to 0 to make git fetches
79 faster at the expense of not creating mirror tarballs.
80 - SRCREV handling updates, improvements and fixes from Poky
81 - Add bb.utils.lockfile() and bb.utils.unlockfile() from Poky
82 - Add support for task selfstamp and lockfiles flags
83 - Disable task number acceleration since it can allow the tasks to run
84 out of sequence
85 - Improve runqueue code comments
86 - Add task scheduler abstraction and some example schedulers
87 - Improve circular dependency chain debugging code and user feedback
88 - Don't give a stacktrace for invalid tasks, have a user friendly message (#3431)
89 - Add support for "-e target" (#3432)
90 - Fix shell showdata command (#3259)
91 - Fix shell data updating problems (#1880)
92 - Properly raise errors for invalid source URI protocols
93 - Change the wget fetcher failure handling to avoid lockfile problems
94 - Add support for branches in git fetcher (Otavio Salvador, Michael Lauer)
95 - Make taskdata and runqueue errors more user friendly
96 - Add norecurse and fullpath options to cvs fetcher
97 - Fix exit code for build failures in --continue mode
98 - Fix git branch tags fetching
99 - Change parseConfigurationFile so it works on real data, not a copy
100 - Handle 'base' inherit and all other INHERITs from parseConfigurationFile
101 instead of BBHandler
102 - Fix getVarFlags bug in data_smart
103 - Optmise cache handling by more quickly detecting an invalid cache, only
104 saving the cache when its changed, moving the cache validity check into
105 the parsing loop and factoring some getVar calls outside a for loop
106 - Cooker: Remove a debug message from the parsing loop to lower overhead
107 - Convert build.py exec_task to use getVarFlags
108 - Update shell to use cooker.buildFile
109 - Add StampUpdate event
110 - Convert -b option to use taskdata/runqueue
111 - Remove digraph and switch to new stamp checking code. exec_task no longer
112 honours dependencies
113 - Make fetcher timestamp updating non-fatal when permissions don't allow
114 updates
115 - Add BB_SCHEDULER variable/option ("completion" or "speed") controlling
116 the way bitbake schedules tasks
117 - Add BB_STAMP_POLICY variable/option ("perfile" or "full") controlling
118 how extensively stamps are looked at for validity
119 - When handling build target failures make sure idepends are checked and
120 failed where needed. Fixes --continue mode crashes.
121 - Fix -f (force) in conjunction with -b
122 - Fix problems with recrdeptask handling where some idepends weren't handled
123 correctly.
124 - Handle exit codes correctly (from pH5)
125 - Work around refs/HEAD issues with git over http (#3410)
126 - Add proxy support to the CVS fetcher (from Cyril Chemparathy)
127 - Improve runfetchcmd so errors are seen and various GIT variables are exported
128 - Add ability to fetchers to check URL validity without downloading
129 - Improve runtime PREFERRED_PROVIDERS warning message
130 - Add BB_STAMP_WHITELIST option which contains a list of stamps to ignore when
131 checking stamp dependencies and using a BB_STAMP_POLICY of "whitelist"
132 - No longer weight providers on the basis of a package being "already staged". This
133 leads to builds being non-deterministic.
134 - Flush stdout/stderr before forking to fix duplicate console output
135 - Make sure recrdeps tasks include all inter-task dependencies of a given fn
136 - Add bb.runqueue.check_stamp_fn() for use by packaged-staging
137 - Add PERSISTENT_DIR to store the PersistData in a persistent
138 directory != the cache dir.
139 - Add md5 and sha256 checksum generation functions to utils.py
140 - Correctly handle '-' characters in class names (#2958)
141 - Make sure expandKeys has been called on the data dictonary before running tasks
142 - Correctly add a task override in the form task-TASKNAME.
143 - Revert the '-' character fix in class names since it breaks things
144 - When a regexp fails to compile for PACKAGES_DYNAMIC, print a more useful error (#4444)
145 - Allow to checkout CVS by Date and Time. Just add HHmm to the SRCDATE.
146 - Move prunedir function to utils.py and add explode_dep_versions function
147 - Raise an exception if SRCREV == 'INVALID'
148 - Fix hg fetcher username/password handling and fix crash
149 - Fix PACKAGES_DYNAMIC handling of packages with '++' in the name
150 - Rename __depends to __base_depends after configuration parsing so we don't
151 recheck the validity of the config files time after time
152 - Add better environmental variable handling. By default it will now only pass certain
153 whitelisted variables into the data store. If BB_PRESERVE_ENV is set bitbake will use
154 all variable from the environment. If BB_ENV_WHITELIST is set, that whitelist will be
155 used instead of the internal bitbake one. Alternatively, BB_ENV_EXTRAWHITE can be used
156 to extend the internal whitelist.
157 - Perforce fetcher fix to use commandline options instead of being overriden by the environment
158 - bb.utils.prunedir can cope with symlinks to directoriees without exceptions
159 - use @rev when doing a svn checkout
160 - Add osc fetcher (from Joshua Lock in Poky)
161 - When SRCREV autorevisioning for a recipe is in use, don't cache the recipe
162 - Add tryaltconfigs option to control whether bitbake trys using alternative providers
163 to fulfil failed dependencies. It defaults to off, changing the default since this
164 behaviour confuses many users and isn't often useful.
165 - Improve lock file function error handling
166 - Add username handling to the git fetcher (Robert Bragg)
167 - Add support for HTTP_PROXY and HTTP_PROXY_IGNORE variables to the wget fetcher
168 - Export more variables to the fetcher commands to allow ssh checkouts and checkouts through
169 proxies to work better. (from Poky)
170 - Also allow user and pswd options in SRC_URIs globally (from Poky)
171 - Improve proxy handling when using mirrors (from Poky)
172 - Add bb.utils.prune_suffix function
173 - Fix hg checkouts of specific revisions (from Poky)
174 - Fix wget fetching of urls with parameters specified (from Poky)
175 - Add username handling to git fetcher (from Poky)
176 - Set HOME environmental variable when running fetcher commands (from Poky)
177 - Make sure allowed variables inherited from the environment are exported again (from Poky)
178 - When running a stage task in bbshell, run populate_staging, not the stage task (from Poky)
179 - Fix + character escaping from PACKAGES_DYNAMIC (thanks Otavio Salvador)
180 - Addition of BBCLASSEXTEND support for allowing one recipe to provide multiple targets (from Poky)
181
182Changes in Bitbake 1.8.0:
183 - Release 1.7.x as a stable series
184
185Changes in BitBake 1.7.x:
186 - Major updates of the dependency handling and execution
187 of tasks. Code from bin/bitbake replaced with runqueue.py
188 and taskdata.py
189 - New task execution code supports multithreading with a simplistic
190 threading algorithm controlled by BB_NUMBER_THREADS
191 - Change of the SVN Fetcher to keep the checkout around
192 courtsey of Paul Sokolovsky (#1367)
193 - PATH fix to bbimage (#1108)
194 - Allow debug domains to be specified on the commandline (-l)
195 - Allow 'interactive' tasks
196 - Logging message improvements
197 - Drop now uneeded BUILD_ALL_DEPS variable
198 - Add support for wildcards to -b option
199 - Major overhaul of the fetchers making a large amount of code common
200 including mirroring code
201 - Fetchers now touch md5 stamps upon access (to show activity)
202 - Fix -f force option when used without -b (long standing bug)
203 - Add expand_cache to data_cache.py, caching expanded data (speedup)
204 - Allow version field in DEPENDS (ignored for now)
205 - Add abort flag support to the shell
206 - Make inherit fail if the class doesn't exist (#1478)
207 - Fix data.emit_env() to expand keynames as well as values
208 - Add ssh fetcher
209 - Add perforce fetcher
210 - Make PREFERRED_PROVIDER_foobar defaults to foobar if available
211 - Share the parser's mtime_cache, reducing the number of stat syscalls
212 - Compile all anonfuncs at once!
213 *** Anonfuncs must now use common spacing format ***
214 - Memorise the list of handlers in __BBHANDLERS and tasks in __BBTASKS
215 This removes 2 million function calls resulting in a 5-10% speedup
216 - Add manpage
217 - Update generateDotGraph to use taskData/runQueue improving accuracy
218 and also adding a task dependency graph
219 - Fix/standardise on GPLv2 licence
220 - Move most functionality from bin/bitbake to cooker.py and split into
221 separate funcitons
222 - CVS fetcher: Added support for non-default port
223 - Add BBINCLUDELOGS_LINES, the number of lines to read from any logfile
224 - Drop shebangs from lib/bb scripts
225
226Changes in Bitbake 1.6.0:
227 - Better msg handling
228 - COW dict implementation from Tim Ansell (mithro) leading
229 to better performance
230 - Speed up of -s
231
232Changes in Bitbake 1.4.4:
233 - SRCDATE now handling courtsey Justin Patrin
234 - #1017 fix to work with rm_work
235
236Changes in BitBake 1.4.2:
237 - Send logs to oe.pastebin.com instead of pastebin.com
238 fixes #856
239 - Copy the internal bitbake data before building the
240 dependency graph. This fixes nano not having a
241 virtual/libc dependency
242 - Allow multiple TARBALL_STASH entries
243 - Cache, check if the directory exists before changing
244 into it
245 - git speedup cloning by not doing a checkout
246 - allow to have spaces in filenames (.conf, .bb, .bbclass)
247
248Changes in BitBake 1.4.0:
249 - Fix to check both RDEPENDS and RDEPENDS_${PN}
250 - Fix a RDEPENDS parsing bug in utils:explode_deps()
251 - Update git fetcher behaviour to match git changes
252 - ASSUME_PROVIDED allowed to include runtime packages
253 - git fetcher cleanup and efficency improvements
254 - Change the format of the cache
255 - Update usermanual to document the Fetchers
256 - Major changes to caching with a new strategy
257 giving a major performance increase when reparsing
258 with few data changes
259
260Changes in BitBake 1.3.3:
261 - Create a new Fetcher module to ease the
262 development of new Fetchers.
263 Issue #438 fixed by rpurdie@openedhand.com
264 - Make the Subversion fetcher honor the SRC Date
265 (CVSDATE).
266 Issue #555 fixed by chris@openedhand.com
267 - Expand PREFERRED_PROVIDER properly
268 Issue #436 fixed by rprudie@openedhand.com
269 - Typo fix for Issue #531 by Philipp Zabel for the
270 BitBake Shell
271 - Introduce a new special variable SRCDATE as
272 a generic naming to replace CVSDATE.
273 - Introduce a new keyword 'required'. In contrast
274 to 'include' parsing will fail if a to be included
275 file can not be found.
276 - Remove hardcoding of the STAMP directory. Patch
277 courtsey pHilipp Zabel
278 - Track the RDEPENDS of each package (rpurdie@openedhand.com)
279 - Introduce BUILD_ALL_DEPS to build all RDEPENDS. E.g
280 this is used by the OpenEmbedded Meta Packages.
281 (rpurdie@openedhand.com).
282
283Changes in BitBake 1.3.2:
284 - reintegration of make.py into BitBake
285 - bbread is gone, use bitbake -e
286 - lots of shell updates and bugfixes
287 - Introduction of the .= and =. operator
288 - Sort variables, keys and groups in bitdoc
289 - Fix regression in the handling of BBCOLLECTIONS
290 - Update the bitbake usermanual
291
292Changes in BitBake 1.3.0:
293 - add bitbake interactive shell (bitbake -i)
294 - refactor bitbake utility in OO style
295 - kill default arguments in methods in the bb.data module
296 - kill default arguments in methods in the bb.fetch module
297 - the http/https/ftp fetcher will fail if the to be
298 downloaded file was not found in DL_DIR (this is needed
299 to avoid unpacking the sourceforge mirror page)
300 - Switch to a cow like data instance for persistent and non
301 persisting mode (called data_smart.py)
302 - Changed the callback of bb.make.collect_bbfiles to carry
303 additional parameters
304 - Drastically reduced the amount of needed RAM by not holding
305 each data instance in memory when using a cache/persistent
306 storage
307
308Changes in BitBake 1.2.1:
309 The 1.2.1 release is meant as a intermediate release to lay the
310 ground for more radical changes. The most notable changes are:
311
312 - Do not hardcode {}, use bb.data.init() instead if you want to
313 get a instance of a data class
314 - bb.data.init() is a factory and the old bb.data methods are delegates
315 - Do not use deepcopy use bb.data.createCopy() instead.
316 - Removed default arguments in bb.fetch
317
diff --git a/bitbake-dev/bin/bitbake b/bitbake-dev/bin/bitbake
deleted file mode 100755
index 23c9d73ee4..0000000000
--- a/bitbake-dev/bin/bitbake
+++ /dev/null
@@ -1,195 +0,0 @@
1#!/usr/bin/env python
2# ex:ts=4:sw=4:sts=4:et
3# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
4#
5# Copyright (C) 2003, 2004 Chris Larson
6# Copyright (C) 2003, 2004 Phil Blundell
7# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
8# Copyright (C) 2005 Holger Hans Peter Freyther
9# Copyright (C) 2005 ROAD GmbH
10# Copyright (C) 2006 Richard Purdie
11#
12# This program is free software; you can redistribute it and/or modify
13# it under the terms of the GNU General Public License version 2 as
14# published by the Free Software Foundation.
15#
16# This program is distributed in the hope that it will be useful,
17# but WITHOUT ANY WARRANTY; without even the implied warranty of
18# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19# GNU General Public License for more details.
20#
21# You should have received a copy of the GNU General Public License along
22# with this program; if not, write to the Free Software Foundation, Inc.,
23# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
24
25import sys, os, getopt, re, time, optparse, xmlrpclib
26sys.path.insert(0,os.path.join(os.path.dirname(os.path.dirname(sys.argv[0])), 'lib'))
27import bb
28from bb import cooker
29from bb import ui
30
31
32__version__ = "1.9.0"
33
34if sys.hexversion < 0x020500F0:
35 print "Sorry, python 2.5 or later is required for this version of bitbake"
36 sys.exit(1)
37
38#============================================================================#
39# BBOptions
40#============================================================================#
41class BBConfiguration( object ):
42 """
43 Manages build options and configurations for one run
44 """
45 def __init__( self, options ):
46 for key, val in options.__dict__.items():
47 setattr( self, key, val )
48
49
50def print_exception(exc, value, tb):
51 """
52 Print the exception to stderr, only showing the traceback if bitbake
53 debugging is enabled.
54 """
55 if not bb.msg.debug_level['default']:
56 tb = None
57
58 sys.__excepthook__(exc, value, tb)
59
60
61#============================================================================#
62# main
63#============================================================================#
64
65def main():
66 return_value = 0
67 pythonver = sys.version_info
68 if pythonver[0] < 2 or (pythonver[0] == 2 and pythonver[1] < 5):
69 print "Sorry, bitbake needs python 2.5 or later."
70 sys.exit(1)
71
72 parser = optparse.OptionParser( version = "BitBake Build Tool Core version %s, %%prog version %s" % ( bb.__version__, __version__ ),
73 usage = """%prog [options] [package ...]
74
75Executes the specified task (default is 'build') for a given set of BitBake files.
76It expects that BBFILES is defined, which is a space separated list of files to
77be executed. BBFILES does support wildcards.
78Default BBFILES are the .bb files in the current directory.""" )
79
80 parser.add_option( "-b", "--buildfile", help = "execute the task against this .bb file, rather than a package from BBFILES.",
81 action = "store", dest = "buildfile", default = None )
82
83 parser.add_option( "-k", "--continue", help = "continue as much as possible after an error. While the target that failed, and those that depend on it, cannot be remade, the other dependencies of these targets can be processed all the same.",
84 action = "store_false", dest = "abort", default = True )
85
86 parser.add_option( "-a", "--tryaltconfigs", help = "continue with builds by trying to use alternative providers where possible.",
87 action = "store_true", dest = "tryaltconfigs", default = False )
88
89 parser.add_option( "-f", "--force", help = "force run of specified cmd, regardless of stamp status",
90 action = "store_true", dest = "force", default = False )
91
92 parser.add_option( "-i", "--interactive", help = "drop into the interactive mode also called the BitBake shell.",
93 action = "store_true", dest = "interactive", default = False )
94
95 parser.add_option( "-c", "--cmd", help = "Specify task to execute. Note that this only executes the specified task for the providee and the packages it depends on, i.e. 'compile' does not implicitly call stage for the dependencies (IOW: use only if you know what you are doing). Depending on the base.bbclass a listtasks tasks is defined and will show available tasks",
96 action = "store", dest = "cmd" )
97
98 parser.add_option( "-r", "--read", help = "read the specified file before bitbake.conf",
99 action = "append", dest = "file", default = [] )
100
101 parser.add_option( "-v", "--verbose", help = "output more chit-chat to the terminal",
102 action = "store_true", dest = "verbose", default = False )
103
104 parser.add_option( "-D", "--debug", help = "Increase the debug level. You can specify this more than once.",
105 action = "count", dest="debug", default = 0)
106
107 parser.add_option( "-n", "--dry-run", help = "don't execute, just go through the motions",
108 action = "store_true", dest = "dry_run", default = False )
109
110 parser.add_option( "-p", "--parse-only", help = "quit after parsing the BB files (developers only)",
111 action = "store_true", dest = "parse_only", default = False )
112
113 parser.add_option( "-d", "--disable-psyco", help = "disable using the psyco just-in-time compiler (not recommended)",
114 action = "store_true", dest = "disable_psyco", default = False )
115
116 parser.add_option( "-s", "--show-versions", help = "show current and preferred versions of all packages",
117 action = "store_true", dest = "show_versions", default = False )
118
119 parser.add_option( "-e", "--environment", help = "show the global or per-package environment (this is what used to be bbread)",
120 action = "store_true", dest = "show_environment", default = False )
121
122 parser.add_option( "-g", "--graphviz", help = "emit the dependency trees of the specified packages in the dot syntax",
123 action = "store_true", dest = "dot_graph", default = False )
124
125 parser.add_option( "-I", "--ignore-deps", help = """Assume these dependencies don't exist and are already provided (equivalent to ASSUME_PROVIDED). Useful to make dependency graphs more appealing""",
126 action = "append", dest = "extra_assume_provided", default = [] )
127
128 parser.add_option( "-l", "--log-domains", help = """Show debug logging for the specified logging domains""",
129 action = "append", dest = "debug_domains", default = [] )
130
131 parser.add_option( "-P", "--profile", help = "profile the command and print a report",
132 action = "store_true", dest = "profile", default = False )
133
134 parser.add_option( "-u", "--ui", help = "userinterface to use",
135 action = "store", dest = "ui")
136
137 parser.add_option( "", "--revisions-changed", help = "Set the exit code depending on whether upstream floating revisions have changed or not",
138 action = "store_true", dest = "revisions_changed", default = False )
139
140 options, args = parser.parse_args(sys.argv)
141
142 configuration = BBConfiguration(options)
143 configuration.pkgs_to_build = []
144 configuration.pkgs_to_build.extend(args[1:])
145
146 #server = bb.server.xmlrpc
147 server = bb.server.none
148
149 # Save a logfile for cooker into the current working directory. When the
150 # server is daemonized this logfile will be truncated.
151 cooker_logfile = os.path.join (os.getcwd(), "cooker.log")
152
153 cooker = bb.cooker.BBCooker(configuration, server)
154
155 # Clear away any spurious environment variables. But don't wipe the
156 # environment totally. This is necessary to ensure the correct operation
157 # of the UIs (e.g. for DISPLAY, etc.)
158 bb.utils.clean_environment()
159
160 cooker.parseCommandLine()
161
162 serverinfo = server.BitbakeServerInfo(cooker.server)
163
164 server.BitBakeServerFork(serverinfo, cooker.serve, cooker_logfile)
165 del cooker
166
167 sys.excepthook = print_exception
168
169 # Setup a connection to the server (cooker)
170 serverConnection = server.BitBakeServerConnection(serverinfo)
171
172 # Launch the UI
173 if configuration.ui:
174 ui = configuration.ui
175 else:
176 ui = "knotty"
177
178 try:
179 # Dynamically load the UI based on the ui name. Although we
180 # suggest a fixed set this allows you to have flexibility in which
181 # ones are available.
182 exec "from bb.ui import " + ui
183 exec "return_value = " + ui + ".init(serverConnection.connection, serverConnection.events)"
184 except ImportError:
185 print "FATAL: Invalid user interface '%s' specified. " % ui
186 print "Valid interfaces are 'ncurses', 'depexp' or the default, 'knotty'."
187 except Exception, e:
188 print "FATAL: Unable to start to '%s' UI: %s." % (configuration.ui, e.message)
189 finally:
190 serverConnection.terminate()
191 return return_value
192
193if __name__ == "__main__":
194 ret = main()
195 sys.exit(ret)
diff --git a/bitbake-dev/bin/bitdoc b/bitbake-dev/bin/bitdoc
deleted file mode 100755
index 4940f660a6..0000000000
--- a/bitbake-dev/bin/bitdoc
+++ /dev/null
@@ -1,534 +0,0 @@
1#!/usr/bin/env python
2# ex:ts=4:sw=4:sts=4:et
3# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
4#
5# Copyright (C) 2005 Holger Hans Peter Freyther
6#
7# This program is free software; you can redistribute it and/or modify
8# it under the terms of the GNU General Public License version 2 as
9# published by the Free Software Foundation.
10#
11# This program is distributed in the hope that it will be useful,
12# but WITHOUT ANY WARRANTY; without even the implied warranty of
13# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14# GNU General Public License for more details.
15#
16# You should have received a copy of the GNU General Public License along
17# with this program; if not, write to the Free Software Foundation, Inc.,
18# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
19
20import optparse, os, sys
21
22# bitbake
23sys.path.append(os.path.join(os.path.dirname(os.path.dirname(sys.argv[0])), 'lib'))
24import bb
25import bb.parse
26from string import split, join
27
28__version__ = "0.0.2"
29
30class HTMLFormatter:
31 """
32 Simple class to help to generate some sort of HTML files. It is
33 quite inferior solution compared to docbook, gtkdoc, doxygen but it
34 should work for now.
35 We've a global introduction site (index.html) and then one site for
36 the list of keys (alphabetical sorted) and one for the list of groups,
37 one site for each key with links to the relations and groups.
38
39 index.html
40 all_keys.html
41 all_groups.html
42 groupNAME.html
43 keyNAME.html
44 """
45
46 def replace(self, text, *pairs):
47 """
48 From pydoc... almost identical at least
49 """
50 while pairs:
51 (a,b) = pairs[0]
52 text = join(split(text, a), b)
53 pairs = pairs[1:]
54 return text
55 def escape(self, text):
56 """
57 Escape string to be conform HTML
58 """
59 return self.replace(text,
60 ('&', '&amp;'),
61 ('<', '&lt;' ),
62 ('>', '&gt;' ) )
63 def createNavigator(self):
64 """
65 Create the navgiator
66 """
67 return """<table class="navigation" width="100%" summary="Navigation header" cellpadding="2" cellspacing="2">
68<tr valign="middle">
69<td><a accesskey="g" href="index.html">Home</a></td>
70<td><a accesskey="n" href="all_groups.html">Groups</a></td>
71<td><a accesskey="u" href="all_keys.html">Keys</a></td>
72</tr></table>
73"""
74
75 def relatedKeys(self, item):
76 """
77 Create HTML to link to foreign keys
78 """
79
80 if len(item.related()) == 0:
81 return ""
82
83 txt = "<p><b>See also:</b><br>"
84 txts = []
85 for it in item.related():
86 txts.append("""<a href="key%(it)s.html">%(it)s</a>""" % vars() )
87
88 return txt + ",".join(txts)
89
90 def groups(self,item):
91 """
92 Create HTML to link to related groups
93 """
94
95 if len(item.groups()) == 0:
96 return ""
97
98
99 txt = "<p><b>See also:</b><br>"
100 txts = []
101 for group in item.groups():
102 txts.append( """<a href="group%s.html">%s</a> """ % (group,group) )
103
104 return txt + ",".join(txts)
105
106
107 def createKeySite(self,item):
108 """
109 Create a site for a key. It contains the header/navigator, a heading,
110 the description, links to related keys and to the groups.
111 """
112
113 return """<!doctype html PUBLIC "-//W3C//DTD HTML 4.0 Transitional//EN">
114<html><head><title>Key %s</title></head>
115<link rel="stylesheet" href="style.css" type="text/css">
116<body bgcolor="white" text="black" link="#0000FF" vlink="#840084" alink="#0000FF">
117%s
118<h2><span class="refentrytitle">%s</span></h2>
119
120<div class="refsynopsisdiv">
121<h2>Synopsis</h2>
122<p>
123%s
124</p>
125</div>
126
127<div class="refsynopsisdiv">
128<h2>Related Keys</h2>
129<p>
130%s
131</p>
132</div>
133
134<div class="refsynopsisdiv">
135<h2>Groups</h2>
136<p>
137%s
138</p>
139</div>
140
141
142</body>
143""" % (item.name(), self.createNavigator(), item.name(),
144 self.escape(item.description()), self.relatedKeys(item), self.groups(item))
145
146 def createGroupsSite(self, doc):
147 """
148 Create the Group Overview site
149 """
150
151 groups = ""
152 sorted_groups = doc.groups()
153 sorted_groups.sort()
154 for group in sorted_groups:
155 groups += """<a href="group%s.html">%s</a><br>""" % (group, group)
156
157 return """<!doctype html PUBLIC "-//W3C//DTD HTML 4.0 Transitional//EN">
158<html><head><title>Group overview</title></head>
159<link rel="stylesheet" href="style.css" type="text/css">
160<body bgcolor="white" text="black" link="#0000FF" vlink="#840084" alink="#0000FF">
161%s
162<h2>Available Groups</h2>
163%s
164</body>
165""" % (self.createNavigator(), groups)
166
167 def createIndex(self):
168 """
169 Create the index file
170 """
171
172 return """<!doctype html PUBLIC "-//W3C//DTD HTML 4.0 Transitional//EN">
173<html><head><title>Bitbake Documentation</title></head>
174<link rel="stylesheet" href="style.css" type="text/css">
175<body bgcolor="white" text="black" link="#0000FF" vlink="#840084" alink="#0000FF">
176%s
177<h2>Documentation Entrance</h2>
178<a href="all_groups.html">All available groups</a><br>
179<a href="all_keys.html">All available keys</a><br>
180</body>
181""" % self.createNavigator()
182
183 def createKeysSite(self, doc):
184 """
185 Create Overview of all avilable keys
186 """
187 keys = ""
188 sorted_keys = doc.doc_keys()
189 sorted_keys.sort()
190 for key in sorted_keys:
191 keys += """<a href="key%s.html">%s</a><br>""" % (key, key)
192
193 return """<!doctype html PUBLIC "-//W3C//DTD HTML 4.0 Transitional//EN">
194<html><head><title>Key overview</title></head>
195<link rel="stylesheet" href="style.css" type="text/css">
196<body bgcolor="white" text="black" link="#0000FF" vlink="#840084" alink="#0000FF">
197%s
198<h2>Available Keys</h2>
199%s
200</body>
201""" % (self.createNavigator(), keys)
202
203 def createGroupSite(self, gr, items, _description = None):
204 """
205 Create a site for a group:
206 Group the name of the group, items contain the name of the keys
207 inside this group
208 """
209 groups = ""
210 description = ""
211
212 # create a section with the group descriptions
213 if _description:
214 description += "<h2 Description of Grozp %s</h2>" % gr
215 description += _description
216
217 items.sort(lambda x,y:cmp(x.name(),y.name()))
218 for group in items:
219 groups += """<a href="key%s.html">%s</a><br>""" % (group.name(), group.name())
220
221 return """<!doctype html PUBLIC "-//W3C//DTD HTML 4.0 Transitional//EN">
222<html><head><title>Group %s</title></head>
223<link rel="stylesheet" href="style.css" type="text/css">
224<body bgcolor="white" text="black" link="#0000FF" vlink="#840084" alink="#0000FF">
225%s
226%s
227<div class="refsynopsisdiv">
228<h2>Keys in Group %s</h2>
229<pre class="synopsis">
230%s
231</pre>
232</div>
233</body>
234""" % (gr, self.createNavigator(), description, gr, groups)
235
236
237
238 def createCSS(self):
239 """
240 Create the CSS file
241 """
242 return """.synopsis, .classsynopsis
243{
244 background: #eeeeee;
245 border: solid 1px #aaaaaa;
246 padding: 0.5em;
247}
248.programlisting
249{
250 background: #eeeeff;
251 border: solid 1px #aaaaff;
252 padding: 0.5em;
253}
254.variablelist
255{
256 padding: 4px;
257 margin-left: 3em;
258}
259.variablelist td:first-child
260{
261 vertical-align: top;
262}
263table.navigation
264{
265 background: #ffeeee;
266 border: solid 1px #ffaaaa;
267 margin-top: 0.5em;
268 margin-bottom: 0.5em;
269}
270.navigation a
271{
272 color: #770000;
273}
274.navigation a:visited
275{
276 color: #550000;
277}
278.navigation .title
279{
280 font-size: 200%;
281}
282div.refnamediv
283{
284 margin-top: 2em;
285}
286div.gallery-float
287{
288 float: left;
289 padding: 10px;
290}
291div.gallery-float img
292{
293 border-style: none;
294}
295div.gallery-spacer
296{
297 clear: both;
298}
299a
300{
301 text-decoration: none;
302}
303a:hover
304{
305 text-decoration: underline;
306 color: #FF0000;
307}
308"""
309
310
311
312class DocumentationItem:
313 """
314 A class to hold information about a configuration
315 item. It contains the key name, description, a list of related names,
316 and the group this item is contained in.
317 """
318
319 def __init__(self):
320 self._groups = []
321 self._related = []
322 self._name = ""
323 self._desc = ""
324
325 def groups(self):
326 return self._groups
327
328 def name(self):
329 return self._name
330
331 def description(self):
332 return self._desc
333
334 def related(self):
335 return self._related
336
337 def setName(self, name):
338 self._name = name
339
340 def setDescription(self, desc):
341 self._desc = desc
342
343 def addGroup(self, group):
344 self._groups.append(group)
345
346 def addRelation(self,relation):
347 self._related.append(relation)
348
349 def sort(self):
350 self._related.sort()
351 self._groups.sort()
352
353
354class Documentation:
355 """
356 Holds the documentation... with mappings from key to items...
357 """
358
359 def __init__(self):
360 self.__keys = {}
361 self.__groups = {}
362
363 def insert_doc_item(self, item):
364 """
365 Insert the Doc Item into the internal list
366 of representation
367 """
368 item.sort()
369 self.__keys[item.name()] = item
370
371 for group in item.groups():
372 if not group in self.__groups:
373 self.__groups[group] = []
374 self.__groups[group].append(item)
375 self.__groups[group].sort()
376
377
378 def doc_item(self, key):
379 """
380 Return the DocumentationInstance describing the key
381 """
382 try:
383 return self.__keys[key]
384 except KeyError:
385 return None
386
387 def doc_keys(self):
388 """
389 Return the documented KEYS (names)
390 """
391 return self.__keys.keys()
392
393 def groups(self):
394 """
395 Return the names of available groups
396 """
397 return self.__groups.keys()
398
399 def group_content(self,group_name):
400 """
401 Return a list of keys/names that are in a specefic
402 group or the empty list
403 """
404 try:
405 return self.__groups[group_name]
406 except KeyError:
407 return []
408
409
410def parse_cmdline(args):
411 """
412 Parse the CMD line and return the result as a n-tuple
413 """
414
415 parser = optparse.OptionParser( version = "Bitbake Documentation Tool Core version %s, %%prog version %s" % (bb.__version__,__version__))
416 usage = """%prog [options]
417
418Create a set of html pages (documentation) for a bitbake.conf....
419"""
420
421 # Add the needed options
422 parser.add_option( "-c", "--config", help = "Use the specified configuration file as source",
423 action = "store", dest = "config", default = os.path.join("conf", "documentation.conf") )
424
425 parser.add_option( "-o", "--output", help = "Output directory for html files",
426 action = "store", dest = "output", default = "html/" )
427
428 parser.add_option( "-D", "--debug", help = "Increase the debug level",
429 action = "count", dest = "debug", default = 0 )
430
431 parser.add_option( "-v","--verbose", help = "output more chit-char to the terminal",
432 action = "store_true", dest = "verbose", default = False )
433
434 options, args = parser.parse_args( sys.argv )
435
436 if options.debug:
437 bb.msg.set_debug_level(options.debug)
438
439 return options.config, options.output
440
441def main():
442 """
443 The main Method
444 """
445
446 (config_file,output_dir) = parse_cmdline( sys.argv )
447
448 # right to let us load the file now
449 try:
450 documentation = bb.parse.handle( config_file, bb.data.init() )
451 except IOError:
452 bb.fatal( "Unable to open %s" % config_file )
453 except bb.parse.ParseError:
454 bb.fatal( "Unable to parse %s" % config_file )
455
456 if isinstance(documentation, dict):
457 documentation = documentation[""]
458
459 # Assuming we've the file loaded now, we will initialize the 'tree'
460 doc = Documentation()
461
462 # defined states
463 state_begin = 0
464 state_see = 1
465 state_group = 2
466
467 for key in bb.data.keys(documentation):
468 data = bb.data.getVarFlag(key, "doc", documentation)
469 if not data:
470 continue
471
472 # The Documentation now starts
473 doc_ins = DocumentationItem()
474 doc_ins.setName(key)
475
476
477 tokens = data.split(' ')
478 state = state_begin
479 string= ""
480 for token in tokens:
481 token = token.strip(',')
482
483 if not state == state_see and token == "@see":
484 state = state_see
485 continue
486 elif not state == state_group and token == "@group":
487 state = state_group
488 continue
489
490 if state == state_begin:
491 string += " %s" % token
492 elif state == state_see:
493 doc_ins.addRelation(token)
494 elif state == state_group:
495 doc_ins.addGroup(token)
496
497 # set the description
498 doc_ins.setDescription(string)
499 doc.insert_doc_item(doc_ins)
500
501 # let us create the HTML now
502 bb.mkdirhier(output_dir)
503 os.chdir(output_dir)
504
505 # Let us create the sites now. We do it in the following order
506 # Start with the index.html. It will point to sites explaining all
507 # keys and groups
508 html_slave = HTMLFormatter()
509
510 f = file('style.css', 'w')
511 print >> f, html_slave.createCSS()
512
513 f = file('index.html', 'w')
514 print >> f, html_slave.createIndex()
515
516 f = file('all_groups.html', 'w')
517 print >> f, html_slave.createGroupsSite(doc)
518
519 f = file('all_keys.html', 'w')
520 print >> f, html_slave.createKeysSite(doc)
521
522 # now for each group create the site
523 for group in doc.groups():
524 f = file('group%s.html' % group, 'w')
525 print >> f, html_slave.createGroupSite(group, doc.group_content(group))
526
527 # now for the keys
528 for key in doc.doc_keys():
529 f = file('key%s.html' % doc.doc_item(key).name(), 'w')
530 print >> f, html_slave.createKeySite(doc.doc_item(key))
531
532
533if __name__ == "__main__":
534 main()
diff --git a/bitbake-dev/lib/bb/COW.py b/bitbake-dev/lib/bb/COW.py
deleted file mode 100644
index ca206cf4b4..0000000000
--- a/bitbake-dev/lib/bb/COW.py
+++ /dev/null
@@ -1,318 +0,0 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3#
4# This is a copy on write dictionary and set which abuses classes to try and be nice and fast.
5#
6# Copyright (C) 2006 Tim Amsell
7#
8# This program is free software; you can redistribute it and/or modify
9# it under the terms of the GNU General Public License version 2 as
10# published by the Free Software Foundation.
11#
12# This program is distributed in the hope that it will be useful,
13# but WITHOUT ANY WARRANTY; without even the implied warranty of
14# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15# GNU General Public License for more details.
16#
17# You should have received a copy of the GNU General Public License along
18# with this program; if not, write to the Free Software Foundation, Inc.,
19# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
20#
21#Please Note:
22# Be careful when using mutable types (ie Dict and Lists) - operations involving these are SLOW.
23# Assign a file to __warn__ to get warnings about slow operations.
24#
25
26import copy
27import types
28types.ImmutableTypes = tuple([ \
29 types.BooleanType, \
30 types.ComplexType, \
31 types.FloatType, \
32 types.IntType, \
33 types.LongType, \
34 types.NoneType, \
35 types.TupleType, \
36 frozenset] + \
37 list(types.StringTypes))
38
39MUTABLE = "__mutable__"
40
41class COWMeta(type):
42 pass
43
44class COWDictMeta(COWMeta):
45 __warn__ = False
46 __hasmutable__ = False
47 __marker__ = tuple()
48
49 def __str__(cls):
50 # FIXME: I have magic numbers!
51 return "<COWDict Level: %i Current Keys: %i>" % (cls.__count__, len(cls.__dict__) - 3)
52 __repr__ = __str__
53
54 def cow(cls):
55 class C(cls):
56 __count__ = cls.__count__ + 1
57 return C
58 copy = cow
59 __call__ = cow
60
61 def __setitem__(cls, key, value):
62 if not isinstance(value, types.ImmutableTypes):
63 if not isinstance(value, COWMeta):
64 cls.__hasmutable__ = True
65 key += MUTABLE
66 setattr(cls, key, value)
67
68 def __getmutable__(cls, key, readonly=False):
69 nkey = key + MUTABLE
70 try:
71 return cls.__dict__[nkey]
72 except KeyError:
73 pass
74
75 value = getattr(cls, nkey)
76 if readonly:
77 return value
78
79 if not cls.__warn__ is False and not isinstance(value, COWMeta):
80 print >> cls.__warn__, "Warning: Doing a copy because %s is a mutable type." % key
81 try:
82 value = value.copy()
83 except AttributeError, e:
84 value = copy.copy(value)
85 setattr(cls, nkey, value)
86 return value
87
88 __getmarker__ = []
89 def __getreadonly__(cls, key, default=__getmarker__):
90 """\
91 Get a value (even if mutable) which you promise not to change.
92 """
93 return cls.__getitem__(key, default, True)
94
95 def __getitem__(cls, key, default=__getmarker__, readonly=False):
96 try:
97 try:
98 value = getattr(cls, key)
99 except AttributeError:
100 value = cls.__getmutable__(key, readonly)
101
102 # This is for values which have been deleted
103 if value is cls.__marker__:
104 raise AttributeError("key %s does not exist." % key)
105
106 return value
107 except AttributeError, e:
108 if not default is cls.__getmarker__:
109 return default
110
111 raise KeyError(str(e))
112
113 def __delitem__(cls, key):
114 cls.__setitem__(key, cls.__marker__)
115
116 def __revertitem__(cls, key):
117 if not cls.__dict__.has_key(key):
118 key += MUTABLE
119 delattr(cls, key)
120
121 def has_key(cls, key):
122 value = cls.__getreadonly__(key, cls.__marker__)
123 if value is cls.__marker__:
124 return False
125 return True
126
127 def iter(cls, type, readonly=False):
128 for key in dir(cls):
129 if key.startswith("__"):
130 continue
131
132 if key.endswith(MUTABLE):
133 key = key[:-len(MUTABLE)]
134
135 if type == "keys":
136 yield key
137
138 try:
139 if readonly:
140 value = cls.__getreadonly__(key)
141 else:
142 value = cls[key]
143 except KeyError:
144 continue
145
146 if type == "values":
147 yield value
148 if type == "items":
149 yield (key, value)
150 raise StopIteration()
151
152 def iterkeys(cls):
153 return cls.iter("keys")
154 def itervalues(cls, readonly=False):
155 if not cls.__warn__ is False and cls.__hasmutable__ and readonly is False:
156 print >> cls.__warn__, "Warning: If you arn't going to change any of the values call with True."
157 return cls.iter("values", readonly)
158 def iteritems(cls, readonly=False):
159 if not cls.__warn__ is False and cls.__hasmutable__ and readonly is False:
160 print >> cls.__warn__, "Warning: If you arn't going to change any of the values call with True."
161 return cls.iter("items", readonly)
162
163class COWSetMeta(COWDictMeta):
164 def __str__(cls):
165 # FIXME: I have magic numbers!
166 return "<COWSet Level: %i Current Keys: %i>" % (cls.__count__, len(cls.__dict__) -3)
167 __repr__ = __str__
168
169 def cow(cls):
170 class C(cls):
171 __count__ = cls.__count__ + 1
172 return C
173
174 def add(cls, value):
175 COWDictMeta.__setitem__(cls, repr(hash(value)), value)
176
177 def remove(cls, value):
178 COWDictMeta.__delitem__(cls, repr(hash(value)))
179
180 def __in__(cls, value):
181 return COWDictMeta.has_key(repr(hash(value)))
182
183 def iterkeys(cls):
184 raise TypeError("sets don't have keys")
185
186 def iteritems(cls):
187 raise TypeError("sets don't have 'items'")
188
189# These are the actual classes you use!
190class COWDictBase(object):
191 __metaclass__ = COWDictMeta
192 __count__ = 0
193
194class COWSetBase(object):
195 __metaclass__ = COWSetMeta
196 __count__ = 0
197
198if __name__ == "__main__":
199 import sys
200 COWDictBase.__warn__ = sys.stderr
201 a = COWDictBase()
202 print "a", a
203
204 a['a'] = 'a'
205 a['b'] = 'b'
206 a['dict'] = {}
207
208 b = a.copy()
209 print "b", b
210 b['c'] = 'b'
211
212 print
213
214 print "a", a
215 for x in a.iteritems():
216 print x
217 print "--"
218 print "b", b
219 for x in b.iteritems():
220 print x
221 print
222
223 b['dict']['a'] = 'b'
224 b['a'] = 'c'
225
226 print "a", a
227 for x in a.iteritems():
228 print x
229 print "--"
230 print "b", b
231 for x in b.iteritems():
232 print x
233 print
234
235 try:
236 b['dict2']
237 except KeyError, e:
238 print "Okay!"
239
240 a['set'] = COWSetBase()
241 a['set'].add("o1")
242 a['set'].add("o1")
243 a['set'].add("o2")
244
245 print "a", a
246 for x in a['set'].itervalues():
247 print x
248 print "--"
249 print "b", b
250 for x in b['set'].itervalues():
251 print x
252 print
253
254 b['set'].add('o3')
255
256 print "a", a
257 for x in a['set'].itervalues():
258 print x
259 print "--"
260 print "b", b
261 for x in b['set'].itervalues():
262 print x
263 print
264
265 a['set2'] = set()
266 a['set2'].add("o1")
267 a['set2'].add("o1")
268 a['set2'].add("o2")
269
270 print "a", a
271 for x in a.iteritems():
272 print x
273 print "--"
274 print "b", b
275 for x in b.iteritems(readonly=True):
276 print x
277 print
278
279 del b['b']
280 try:
281 print b['b']
282 except KeyError:
283 print "Yay! deleted key raises error"
284
285 if b.has_key('b'):
286 print "Boo!"
287 else:
288 print "Yay - has_key with delete works!"
289
290 print "a", a
291 for x in a.iteritems():
292 print x
293 print "--"
294 print "b", b
295 for x in b.iteritems(readonly=True):
296 print x
297 print
298
299 b.__revertitem__('b')
300
301 print "a", a
302 for x in a.iteritems():
303 print x
304 print "--"
305 print "b", b
306 for x in b.iteritems(readonly=True):
307 print x
308 print
309
310 b.__revertitem__('dict')
311 print "a", a
312 for x in a.iteritems():
313 print x
314 print "--"
315 print "b", b
316 for x in b.iteritems(readonly=True):
317 print x
318 print
diff --git a/bitbake-dev/lib/bb/__init__.py b/bitbake-dev/lib/bb/__init__.py
deleted file mode 100644
index f2f8f656d8..0000000000
--- a/bitbake-dev/lib/bb/__init__.py
+++ /dev/null
@@ -1,1134 +0,0 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3#
4# BitBake Build System Python Library
5#
6# Copyright (C) 2003 Holger Schurig
7# Copyright (C) 2003, 2004 Chris Larson
8#
9# Based on Gentoo's portage.py.
10#
11# This program is free software; you can redistribute it and/or modify
12# it under the terms of the GNU General Public License version 2 as
13# published by the Free Software Foundation.
14#
15# This program is distributed in the hope that it will be useful,
16# but WITHOUT ANY WARRANTY; without even the implied warranty of
17# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18# GNU General Public License for more details.
19#
20# You should have received a copy of the GNU General Public License along
21# with this program; if not, write to the Free Software Foundation, Inc.,
22# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
23
24__version__ = "1.9.0"
25
26__all__ = [
27
28 "debug",
29 "note",
30 "error",
31 "fatal",
32
33 "mkdirhier",
34 "movefile",
35
36 "tokenize",
37 "evaluate",
38 "flatten",
39 "relparse",
40 "ververify",
41 "isjustname",
42 "isspecific",
43 "pkgsplit",
44 "catpkgsplit",
45 "vercmp",
46 "pkgcmp",
47 "dep_parenreduce",
48 "dep_opconvert",
49
50# fetch
51 "decodeurl",
52 "encodeurl",
53
54# modules
55 "parse",
56 "data",
57 "command",
58 "event",
59 "build",
60 "fetch",
61 "manifest",
62 "methodpool",
63 "cache",
64 "runqueue",
65 "taskdata",
66 "providers",
67 ]
68
69whitespace = '\t\n\x0b\x0c\r '
70lowercase = 'abcdefghijklmnopqrstuvwxyz'
71
72import sys, os, types, re, string, bb
73from bb import msg
74
75#projectdir = os.path.dirname(os.path.dirname(os.path.abspath(sys.argv[0])))
76projectdir = os.getcwd()
77
78if "BBDEBUG" in os.environ:
79 level = int(os.environ["BBDEBUG"])
80 if level:
81 bb.msg.set_debug_level(level)
82
83class VarExpandError(Exception):
84 pass
85
86class MalformedUrl(Exception):
87 """Exception raised when encountering an invalid url"""
88
89
90#######################################################################
91#######################################################################
92#
93# SECTION: Debug
94#
95# PURPOSE: little functions to make yourself known
96#
97#######################################################################
98#######################################################################
99
100def plain(*args):
101 bb.msg.warn(''.join(args))
102
103def debug(lvl, *args):
104 bb.msg.debug(lvl, None, ''.join(args))
105
106def note(*args):
107 bb.msg.note(1, None, ''.join(args))
108
109def warn(*args):
110 bb.msg.warn(1, None, ''.join(args))
111
112def error(*args):
113 bb.msg.error(None, ''.join(args))
114
115def fatal(*args):
116 bb.msg.fatal(None, ''.join(args))
117
118
119#######################################################################
120#######################################################################
121#
122# SECTION: File
123#
124# PURPOSE: Basic file and directory tree related functions
125#
126#######################################################################
127#######################################################################
128
129def mkdirhier(dir):
130 """Create a directory like 'mkdir -p', but does not complain if
131 directory already exists like os.makedirs
132 """
133
134 debug(3, "mkdirhier(%s)" % dir)
135 try:
136 os.makedirs(dir)
137 debug(2, "created " + dir)
138 except OSError, e:
139 if e.errno != 17: raise e
140
141
142#######################################################################
143
144import stat
145
146def movefile(src,dest,newmtime=None,sstat=None):
147 """Moves a file from src to dest, preserving all permissions and
148 attributes; mtime will be preserved even when moving across
149 filesystems. Returns true on success and false on failure. Move is
150 atomic.
151 """
152
153 #print "movefile("+src+","+dest+","+str(newmtime)+","+str(sstat)+")"
154 try:
155 if not sstat:
156 sstat=os.lstat(src)
157 except Exception, e:
158 print "movefile: Stating source file failed...", e
159 return None
160
161 destexists=1
162 try:
163 dstat=os.lstat(dest)
164 except:
165 dstat=os.lstat(os.path.dirname(dest))
166 destexists=0
167
168 if destexists:
169 if stat.S_ISLNK(dstat[stat.ST_MODE]):
170 try:
171 os.unlink(dest)
172 destexists=0
173 except Exception, e:
174 pass
175
176 if stat.S_ISLNK(sstat[stat.ST_MODE]):
177 try:
178 target=os.readlink(src)
179 if destexists and not stat.S_ISDIR(dstat[stat.ST_MODE]):
180 os.unlink(dest)
181 os.symlink(target,dest)
182 #os.lchown(dest,sstat[stat.ST_UID],sstat[stat.ST_GID])
183 os.unlink(src)
184 return os.lstat(dest)
185 except Exception, e:
186 print "movefile: failed to properly create symlink:", dest, "->", target, e
187 return None
188
189 renamefailed=1
190 if sstat[stat.ST_DEV]==dstat[stat.ST_DEV]:
191 try:
192 ret=os.rename(src,dest)
193 renamefailed=0
194 except Exception, e:
195 import errno
196 if e[0]!=errno.EXDEV:
197 # Some random error.
198 print "movefile: Failed to move", src, "to", dest, e
199 return None
200 # Invalid cross-device-link 'bind' mounted or actually Cross-Device
201
202 if renamefailed:
203 didcopy=0
204 if stat.S_ISREG(sstat[stat.ST_MODE]):
205 try: # For safety copy then move it over.
206 shutil.copyfile(src,dest+"#new")
207 os.rename(dest+"#new",dest)
208 didcopy=1
209 except Exception, e:
210 print 'movefile: copy', src, '->', dest, 'failed.', e
211 return None
212 else:
213 #we don't yet handle special, so we need to fall back to /bin/mv
214 a=getstatusoutput("/bin/mv -f "+"'"+src+"' '"+dest+"'")
215 if a[0]!=0:
216 print "movefile: Failed to move special file:" + src + "' to '" + dest + "'", a
217 return None # failure
218 try:
219 if didcopy:
220 missingos.lchown(dest,sstat[stat.ST_UID],sstat[stat.ST_GID])
221 os.chmod(dest, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown
222 os.unlink(src)
223 except Exception, e:
224 print "movefile: Failed to chown/chmod/unlink", dest, e
225 return None
226
227 if newmtime:
228 os.utime(dest,(newmtime,newmtime))
229 else:
230 os.utime(dest, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME]))
231 newmtime=sstat[stat.ST_MTIME]
232 return newmtime
233
234def copyfile(src,dest,newmtime=None,sstat=None):
235 """
236 Copies a file from src to dest, preserving all permissions and
237 attributes; mtime will be preserved even when moving across
238 filesystems. Returns true on success and false on failure.
239 """
240 import os, stat, shutil
241
242 #print "copyfile("+src+","+dest+","+str(newmtime)+","+str(sstat)+")"
243 try:
244 if not sstat:
245 sstat=os.lstat(src)
246 except Exception, e:
247 print "copyfile: Stating source file failed...", e
248 return False
249
250 destexists=1
251 try:
252 dstat=os.lstat(dest)
253 except:
254 dstat=os.lstat(os.path.dirname(dest))
255 destexists=0
256
257 if destexists:
258 if stat.S_ISLNK(dstat[stat.ST_MODE]):
259 try:
260 os.unlink(dest)
261 destexists=0
262 except Exception, e:
263 pass
264
265 if stat.S_ISLNK(sstat[stat.ST_MODE]):
266 try:
267 target=os.readlink(src)
268 if destexists and not stat.S_ISDIR(dstat[stat.ST_MODE]):
269 os.unlink(dest)
270 os.symlink(target,dest)
271 #os.lchown(dest,sstat[stat.ST_UID],sstat[stat.ST_GID])
272 return os.lstat(dest)
273 except Exception, e:
274 print "copyfile: failed to properly create symlink:", dest, "->", target, e
275 return False
276
277 if stat.S_ISREG(sstat[stat.ST_MODE]):
278 try: # For safety copy then move it over.
279 shutil.copyfile(src,dest+"#new")
280 os.rename(dest+"#new",dest)
281 except Exception, e:
282 print 'copyfile: copy', src, '->', dest, 'failed.', e
283 return False
284 else:
285 #we don't yet handle special, so we need to fall back to /bin/mv
286 a=getstatusoutput("/bin/cp -f "+"'"+src+"' '"+dest+"'")
287 if a[0]!=0:
288 print "copyfile: Failed to copy special file:" + src + "' to '" + dest + "'", a
289 return False # failure
290 try:
291 os.lchown(dest,sstat[stat.ST_UID],sstat[stat.ST_GID])
292 os.chmod(dest, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown
293 except Exception, e:
294 print "copyfile: Failed to chown/chmod/unlink", dest, e
295 return False
296
297 if newmtime:
298 os.utime(dest,(newmtime,newmtime))
299 else:
300 os.utime(dest, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME]))
301 newmtime=sstat[stat.ST_MTIME]
302 return newmtime
303
304#######################################################################
305#######################################################################
306#
307# SECTION: Download
308#
309# PURPOSE: Download via HTTP, FTP, CVS, BITKEEPER, handling of MD5-signatures
310# and mirrors
311#
312#######################################################################
313#######################################################################
314
315def decodeurl(url):
316 """Decodes an URL into the tokens (scheme, network location, path,
317 user, password, parameters).
318
319 >>> decodeurl("http://www.google.com/index.html")
320 ('http', 'www.google.com', '/index.html', '', '', {})
321
322 CVS url with username, host and cvsroot. The cvs module to check out is in the
323 parameters:
324
325 >>> decodeurl("cvs://anoncvs@cvs.handhelds.org/cvs;module=familiar/dist/ipkg")
326 ('cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', '', {'module': 'familiar/dist/ipkg'})
327
328 Dito, but this time the username has a password part. And we also request a special tag
329 to check out.
330
331 >>> decodeurl("cvs://anoncvs:anonymous@cvs.handhelds.org/cvs;module=familiar/dist/ipkg;tag=V0-99-81")
332 ('cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', 'anonymous', {'tag': 'V0-99-81', 'module': 'familiar/dist/ipkg'})
333 """
334
335 m = re.compile('(?P<type>[^:]*)://((?P<user>.+)@)?(?P<location>[^;]+)(;(?P<parm>.*))?').match(url)
336 if not m:
337 raise MalformedUrl(url)
338
339 type = m.group('type')
340 location = m.group('location')
341 if not location:
342 raise MalformedUrl(url)
343 user = m.group('user')
344 parm = m.group('parm')
345
346 locidx = location.find('/')
347 if locidx != -1:
348 host = location[:locidx]
349 path = location[locidx:]
350 else:
351 host = ""
352 path = location
353 if user:
354 m = re.compile('(?P<user>[^:]+)(:?(?P<pswd>.*))').match(user)
355 if m:
356 user = m.group('user')
357 pswd = m.group('pswd')
358 else:
359 user = ''
360 pswd = ''
361
362 p = {}
363 if parm:
364 for s in parm.split(';'):
365 s1,s2 = s.split('=')
366 p[s1] = s2
367
368 return (type, host, path, user, pswd, p)
369
370#######################################################################
371
372def encodeurl(decoded):
373 """Encodes a URL from tokens (scheme, network location, path,
374 user, password, parameters).
375
376 >>> encodeurl(['http', 'www.google.com', '/index.html', '', '', {}])
377 'http://www.google.com/index.html'
378
379 CVS with username, host and cvsroot. The cvs module to check out is in the
380 parameters:
381
382 >>> encodeurl(['cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', '', {'module': 'familiar/dist/ipkg'}])
383 'cvs://anoncvs@cvs.handhelds.org/cvs;module=familiar/dist/ipkg'
384
385 Dito, but this time the username has a password part. And we also request a special tag
386 to check out.
387
388 >>> encodeurl(['cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', 'anonymous', {'tag': 'V0-99-81', 'module': 'familiar/dist/ipkg'}])
389 'cvs://anoncvs:anonymous@cvs.handhelds.org/cvs;tag=V0-99-81;module=familiar/dist/ipkg'
390 """
391
392 (type, host, path, user, pswd, p) = decoded
393
394 if not type or not path:
395 fatal("invalid or missing parameters for url encoding")
396 url = '%s://' % type
397 if user:
398 url += "%s" % user
399 if pswd:
400 url += ":%s" % pswd
401 url += "@"
402 if host:
403 url += "%s" % host
404 url += "%s" % path
405 if p:
406 for parm in p.keys():
407 url += ";%s=%s" % (parm, p[parm])
408
409 return url
410
411#######################################################################
412
413def which(path, item, direction = 0):
414 """
415 Locate a file in a PATH
416 """
417
418 paths = (path or "").split(':')
419 if direction != 0:
420 paths.reverse()
421
422 for p in (path or "").split(':'):
423 next = os.path.join(p, item)
424 if os.path.exists(next):
425 return next
426
427 return ""
428
429#######################################################################
430
431
432
433
434#######################################################################
435#######################################################################
436#
437# SECTION: Dependency
438#
439# PURPOSE: Compare build & run dependencies
440#
441#######################################################################
442#######################################################################
443
444def tokenize(mystring):
445 """Breaks a string like 'foo? (bar) oni? (blah (blah))' into (possibly embedded) lists:
446
447 >>> tokenize("x")
448 ['x']
449 >>> tokenize("x y")
450 ['x', 'y']
451 >>> tokenize("(x y)")
452 [['x', 'y']]
453 >>> tokenize("(x y) b c")
454 [['x', 'y'], 'b', 'c']
455 >>> tokenize("foo? (bar) oni? (blah (blah))")
456 ['foo?', ['bar'], 'oni?', ['blah', ['blah']]]
457 >>> tokenize("sys-apps/linux-headers nls? (sys-devel/gettext)")
458 ['sys-apps/linux-headers', 'nls?', ['sys-devel/gettext']]
459 """
460
461 newtokens = []
462 curlist = newtokens
463 prevlists = []
464 level = 0
465 accum = ""
466 for x in mystring:
467 if x=="(":
468 if accum:
469 curlist.append(accum)
470 accum=""
471 prevlists.append(curlist)
472 curlist=[]
473 level=level+1
474 elif x==")":
475 if accum:
476 curlist.append(accum)
477 accum=""
478 if level==0:
479 print "!!! tokenizer: Unmatched left parenthesis in:\n'"+mystring+"'"
480 return None
481 newlist=curlist
482 curlist=prevlists.pop()
483 curlist.append(newlist)
484 level=level-1
485 elif x in whitespace:
486 if accum:
487 curlist.append(accum)
488 accum=""
489 else:
490 accum=accum+x
491 if accum:
492 curlist.append(accum)
493 if (level!=0):
494 print "!!! tokenizer: Exiting with unterminated parenthesis in:\n'"+mystring+"'"
495 return None
496 return newtokens
497
498
499#######################################################################
500
501def evaluate(tokens,mydefines,allon=0):
502 """Removes tokens based on whether conditional definitions exist or not.
503 Recognizes !
504
505 >>> evaluate(['sys-apps/linux-headers', 'nls?', ['sys-devel/gettext']], {})
506 ['sys-apps/linux-headers']
507
508 Negate the flag:
509
510 >>> evaluate(['sys-apps/linux-headers', '!nls?', ['sys-devel/gettext']], {})
511 ['sys-apps/linux-headers', ['sys-devel/gettext']]
512
513 Define 'nls':
514
515 >>> evaluate(['sys-apps/linux-headers', 'nls?', ['sys-devel/gettext']], {"nls":1})
516 ['sys-apps/linux-headers', ['sys-devel/gettext']]
517
518 Turn allon on:
519
520 >>> evaluate(['sys-apps/linux-headers', 'nls?', ['sys-devel/gettext']], {}, True)
521 ['sys-apps/linux-headers', ['sys-devel/gettext']]
522 """
523
524 if tokens == None:
525 return None
526 mytokens = tokens + [] # this copies the list
527 pos = 0
528 while pos < len(mytokens):
529 if type(mytokens[pos]) == types.ListType:
530 evaluate(mytokens[pos], mydefines)
531 if not len(mytokens[pos]):
532 del mytokens[pos]
533 continue
534 elif mytokens[pos][-1] == "?":
535 cur = mytokens[pos][:-1]
536 del mytokens[pos]
537 if allon:
538 if cur[0] == "!":
539 del mytokens[pos]
540 else:
541 if cur[0] == "!":
542 if (cur[1:] in mydefines) and (pos < len(mytokens)):
543 del mytokens[pos]
544 continue
545 elif (cur not in mydefines) and (pos < len(mytokens)):
546 del mytokens[pos]
547 continue
548 pos = pos + 1
549 return mytokens
550
551
552#######################################################################
553
554def flatten(mytokens):
555 """Converts nested arrays into a flat arrays:
556
557 >>> flatten([1,[2,3]])
558 [1, 2, 3]
559 >>> flatten(['sys-apps/linux-headers', ['sys-devel/gettext']])
560 ['sys-apps/linux-headers', 'sys-devel/gettext']
561 """
562
563 newlist=[]
564 for x in mytokens:
565 if type(x)==types.ListType:
566 newlist.extend(flatten(x))
567 else:
568 newlist.append(x)
569 return newlist
570
571
572#######################################################################
573
574_package_weights_ = {"pre":-2,"p":0,"alpha":-4,"beta":-3,"rc":-1} # dicts are unordered
575_package_ends_ = ["pre", "p", "alpha", "beta", "rc", "cvs", "bk", "HEAD" ] # so we need ordered list
576
577def relparse(myver):
578 """Parses the last elements of a version number into a triplet, that can
579 later be compared:
580
581 >>> relparse('1.2_pre3')
582 [1.2, -2, 3.0]
583 >>> relparse('1.2b')
584 [1.2, 98, 0]
585 >>> relparse('1.2')
586 [1.2, 0, 0]
587 """
588
589 number = 0
590 p1 = 0
591 p2 = 0
592 mynewver = myver.split('_')
593 if len(mynewver)==2:
594 # an _package_weights_
595 number = float(mynewver[0])
596 match = 0
597 for x in _package_ends_:
598 elen = len(x)
599 if mynewver[1][:elen] == x:
600 match = 1
601 p1 = _package_weights_[x]
602 try:
603 p2 = float(mynewver[1][elen:])
604 except:
605 p2 = 0
606 break
607 if not match:
608 # normal number or number with letter at end
609 divider = len(myver)-1
610 if myver[divider:] not in "1234567890":
611 # letter at end
612 p1 = ord(myver[divider:])
613 number = float(myver[0:divider])
614 else:
615 number = float(myver)
616 else:
617 # normal number or number with letter at end
618 divider = len(myver)-1
619 if myver[divider:] not in "1234567890":
620 #letter at end
621 p1 = ord(myver[divider:])
622 number = float(myver[0:divider])
623 else:
624 number = float(myver)
625 return [number,p1,p2]
626
627
628#######################################################################
629
630__ververify_cache__ = {}
631
632def ververify(myorigval,silent=1):
633 """Returns 1 if given a valid version string, els 0. Valid versions are in the format
634
635 <v1>.<v2>...<vx>[a-z,_{_package_weights_}[vy]]
636
637 >>> ververify('2.4.20')
638 1
639 >>> ververify('2.4..20') # two dots
640 0
641 >>> ververify('2.x.20') # 'x' is not numeric
642 0
643 >>> ververify('2.4.20a')
644 1
645 >>> ververify('2.4.20cvs') # only one trailing letter
646 0
647 >>> ververify('1a')
648 1
649 >>> ververify('test_a') # no version at all
650 0
651 >>> ververify('2.4.20_beta1')
652 1
653 >>> ververify('2.4.20_beta')
654 1
655 >>> ververify('2.4.20_wrongext') # _wrongext is no valid trailer
656 0
657 """
658
659 # Lookup the cache first
660 try:
661 return __ververify_cache__[myorigval]
662 except KeyError:
663 pass
664
665 if len(myorigval) == 0:
666 if not silent:
667 error("package version is empty")
668 __ververify_cache__[myorigval] = 0
669 return 0
670 myval = myorigval.split('.')
671 if len(myval)==0:
672 if not silent:
673 error("package name has empty version string")
674 __ververify_cache__[myorigval] = 0
675 return 0
676 # all but the last version must be a numeric
677 for x in myval[:-1]:
678 if not len(x):
679 if not silent:
680 error("package version has two points in a row")
681 __ververify_cache__[myorigval] = 0
682 return 0
683 try:
684 foo = int(x)
685 except:
686 if not silent:
687 error("package version contains non-numeric '"+x+"'")
688 __ververify_cache__[myorigval] = 0
689 return 0
690 if not len(myval[-1]):
691 if not silent:
692 error("package version has trailing dot")
693 __ververify_cache__[myorigval] = 0
694 return 0
695 try:
696 foo = int(myval[-1])
697 __ververify_cache__[myorigval] = 1
698 return 1
699 except:
700 pass
701
702 # ok, our last component is not a plain number or blank, let's continue
703 if myval[-1][-1] in lowercase:
704 try:
705 foo = int(myval[-1][:-1])
706 return 1
707 __ververify_cache__[myorigval] = 1
708 # 1a, 2.0b, etc.
709 except:
710 pass
711 # ok, maybe we have a 1_alpha or 1_beta2; let's see
712 ep=string.split(myval[-1],"_")
713 if len(ep)!= 2:
714 if not silent:
715 error("package version has more than one letter at then end")
716 __ververify_cache__[myorigval] = 0
717 return 0
718 try:
719 foo = string.atoi(ep[0])
720 except:
721 # this needs to be numeric, i.e. the "1" in "1_alpha"
722 if not silent:
723 error("package version must have numeric part before the '_'")
724 __ververify_cache__[myorigval] = 0
725 return 0
726
727 for mye in _package_ends_:
728 if ep[1][0:len(mye)] == mye:
729 if len(mye) == len(ep[1]):
730 # no trailing numeric is ok
731 __ververify_cache__[myorigval] = 1
732 return 1
733 else:
734 try:
735 foo = string.atoi(ep[1][len(mye):])
736 __ververify_cache__[myorigval] = 1
737 return 1
738 except:
739 # if no _package_weights_ work, *then* we return 0
740 pass
741 if not silent:
742 error("package version extension after '_' is invalid")
743 __ververify_cache__[myorigval] = 0
744 return 0
745
746
747def isjustname(mypkg):
748 myparts = string.split(mypkg,'-')
749 for x in myparts:
750 if ververify(x):
751 return 0
752 return 1
753
754
755_isspecific_cache_={}
756
757def isspecific(mypkg):
758 "now supports packages with no category"
759 try:
760 return __isspecific_cache__[mypkg]
761 except:
762 pass
763
764 mysplit = string.split(mypkg,"/")
765 if not isjustname(mysplit[-1]):
766 __isspecific_cache__[mypkg] = 1
767 return 1
768 __isspecific_cache__[mypkg] = 0
769 return 0
770
771
772#######################################################################
773
774__pkgsplit_cache__={}
775
776def pkgsplit(mypkg, silent=1):
777
778 """This function can be used as a package verification function. If
779 it is a valid name, pkgsplit will return a list containing:
780 [pkgname, pkgversion(norev), pkgrev ].
781
782 >>> pkgsplit('')
783 >>> pkgsplit('x')
784 >>> pkgsplit('x-')
785 >>> pkgsplit('-1')
786 >>> pkgsplit('glibc-1.2-8.9-r7')
787 >>> pkgsplit('glibc-2.2.5-r7')
788 ['glibc', '2.2.5', 'r7']
789 >>> pkgsplit('foo-1.2-1')
790 >>> pkgsplit('Mesa-3.0')
791 ['Mesa', '3.0', 'r0']
792 """
793
794 try:
795 return __pkgsplit_cache__[mypkg]
796 except KeyError:
797 pass
798
799 myparts = string.split(mypkg,'-')
800 if len(myparts) < 2:
801 if not silent:
802 error("package name without name or version part")
803 __pkgsplit_cache__[mypkg] = None
804 return None
805 for x in myparts:
806 if len(x) == 0:
807 if not silent:
808 error("package name with empty name or version part")
809 __pkgsplit_cache__[mypkg] = None
810 return None
811 # verify rev
812 revok = 0
813 myrev = myparts[-1]
814 ververify(myrev, silent)
815 if len(myrev) and myrev[0] == "r":
816 try:
817 string.atoi(myrev[1:])
818 revok = 1
819 except:
820 pass
821 if revok:
822 if ververify(myparts[-2]):
823 if len(myparts) == 2:
824 __pkgsplit_cache__[mypkg] = None
825 return None
826 else:
827 for x in myparts[:-2]:
828 if ververify(x):
829 __pkgsplit_cache__[mypkg]=None
830 return None
831 # names can't have versiony looking parts
832 myval=[string.join(myparts[:-2],"-"),myparts[-2],myparts[-1]]
833 __pkgsplit_cache__[mypkg]=myval
834 return myval
835 else:
836 __pkgsplit_cache__[mypkg] = None
837 return None
838
839 elif ververify(myparts[-1],silent):
840 if len(myparts)==1:
841 if not silent:
842 print "!!! Name error in",mypkg+": missing name part."
843 __pkgsplit_cache__[mypkg]=None
844 return None
845 else:
846 for x in myparts[:-1]:
847 if ververify(x):
848 if not silent: error("package name has multiple version parts")
849 __pkgsplit_cache__[mypkg] = None
850 return None
851 myval = [string.join(myparts[:-1],"-"), myparts[-1],"r0"]
852 __pkgsplit_cache__[mypkg] = myval
853 return myval
854 else:
855 __pkgsplit_cache__[mypkg] = None
856 return None
857
858
859#######################################################################
860
861__catpkgsplit_cache__ = {}
862
863def catpkgsplit(mydata,silent=1):
864 """returns [cat, pkgname, version, rev ]
865
866 >>> catpkgsplit('sys-libs/glibc-1.2-r7')
867 ['sys-libs', 'glibc', '1.2', 'r7']
868 >>> catpkgsplit('glibc-1.2-r7')
869 [None, 'glibc', '1.2', 'r7']
870 """
871
872 try:
873 return __catpkgsplit_cache__[mydata]
874 except KeyError:
875 pass
876
877 cat = os.path.basename(os.path.dirname(mydata))
878 mydata = os.path.join(cat, os.path.basename(mydata))
879 if mydata[-3:] == '.bb':
880 mydata = mydata[:-3]
881
882 mysplit = mydata.split("/")
883 p_split = None
884 splitlen = len(mysplit)
885 if splitlen == 1:
886 retval = [None]
887 p_split = pkgsplit(mydata,silent)
888 else:
889 retval = [mysplit[splitlen - 2]]
890 p_split = pkgsplit(mysplit[splitlen - 1],silent)
891 if not p_split:
892 __catpkgsplit_cache__[mydata] = None
893 return None
894 retval.extend(p_split)
895 __catpkgsplit_cache__[mydata] = retval
896 return retval
897
898
899#######################################################################
900
901__vercmp_cache__ = {}
902
903def vercmp(val1,val2):
904 """This takes two version strings and returns an integer to tell you whether
905 the versions are the same, val1>val2 or val2>val1.
906
907 >>> vercmp('1', '2')
908 -1.0
909 >>> vercmp('2', '1')
910 1.0
911 >>> vercmp('1', '1.0')
912 0
913 >>> vercmp('1', '1.1')
914 -1.0
915 >>> vercmp('1.1', '1_p2')
916 1.0
917 """
918
919 # quick short-circuit
920 if val1 == val2:
921 return 0
922 valkey = val1+" "+val2
923
924 # cache lookup
925 try:
926 return __vercmp_cache__[valkey]
927 try:
928 return - __vercmp_cache__[val2+" "+val1]
929 except KeyError:
930 pass
931 except KeyError:
932 pass
933
934 # consider 1_p2 vc 1.1
935 # after expansion will become (1_p2,0) vc (1,1)
936 # then 1_p2 is compared with 1 before 0 is compared with 1
937 # to solve the bug we need to convert it to (1,0_p2)
938 # by splitting _prepart part and adding it back _after_expansion
939
940 val1_prepart = val2_prepart = ''
941 if val1.count('_'):
942 val1, val1_prepart = val1.split('_', 1)
943 if val2.count('_'):
944 val2, val2_prepart = val2.split('_', 1)
945
946 # replace '-' by '.'
947 # FIXME: Is it needed? can val1/2 contain '-'?
948
949 val1 = string.split(val1,'-')
950 if len(val1) == 2:
951 val1[0] = val1[0] +"."+ val1[1]
952 val2 = string.split(val2,'-')
953 if len(val2) == 2:
954 val2[0] = val2[0] +"."+ val2[1]
955
956 val1 = string.split(val1[0],'.')
957 val2 = string.split(val2[0],'.')
958
959 # add back decimal point so that .03 does not become "3" !
960 for x in range(1,len(val1)):
961 if val1[x][0] == '0' :
962 val1[x] = '.' + val1[x]
963 for x in range(1,len(val2)):
964 if val2[x][0] == '0' :
965 val2[x] = '.' + val2[x]
966
967 # extend varion numbers
968 if len(val2) < len(val1):
969 val2.extend(["0"]*(len(val1)-len(val2)))
970 elif len(val1) < len(val2):
971 val1.extend(["0"]*(len(val2)-len(val1)))
972
973 # add back _prepart tails
974 if val1_prepart:
975 val1[-1] += '_' + val1_prepart
976 if val2_prepart:
977 val2[-1] += '_' + val2_prepart
978 # The above code will extend version numbers out so they
979 # have the same number of digits.
980 for x in range(0,len(val1)):
981 cmp1 = relparse(val1[x])
982 cmp2 = relparse(val2[x])
983 for y in range(0,3):
984 myret = cmp1[y] - cmp2[y]
985 if myret != 0:
986 __vercmp_cache__[valkey] = myret
987 return myret
988 __vercmp_cache__[valkey] = 0
989 return 0
990
991
992#######################################################################
993
994def pkgcmp(pkg1,pkg2):
995 """ Compares two packages, which should have been split via
996 pkgsplit(). if the return value val is less than zero, then pkg2 is
997 newer than pkg1, zero if equal and positive if older.
998
999 >>> pkgcmp(['glibc', '2.2.5', 'r7'], ['glibc', '2.2.5', 'r7'])
1000 0
1001 >>> pkgcmp(['glibc', '2.2.5', 'r4'], ['glibc', '2.2.5', 'r7'])
1002 -1
1003 >>> pkgcmp(['glibc', '2.2.5', 'r7'], ['glibc', '2.2.5', 'r2'])
1004 1
1005 """
1006
1007 mycmp = vercmp(pkg1[1],pkg2[1])
1008 if mycmp > 0:
1009 return 1
1010 if mycmp < 0:
1011 return -1
1012 r1=string.atoi(pkg1[2][1:])
1013 r2=string.atoi(pkg2[2][1:])
1014 if r1 > r2:
1015 return 1
1016 if r2 > r1:
1017 return -1
1018 return 0
1019
1020
1021#######################################################################
1022
1023def dep_parenreduce(mysplit, mypos=0):
1024 """Accepts a list of strings, and converts '(' and ')' surrounded items to sub-lists:
1025
1026 >>> dep_parenreduce([''])
1027 ['']
1028 >>> dep_parenreduce(['1', '2', '3'])
1029 ['1', '2', '3']
1030 >>> dep_parenreduce(['1', '(', '2', '3', ')', '4'])
1031 ['1', ['2', '3'], '4']
1032 """
1033
1034 while mypos < len(mysplit):
1035 if mysplit[mypos] == "(":
1036 firstpos = mypos
1037 mypos = mypos + 1
1038 while mypos < len(mysplit):
1039 if mysplit[mypos] == ")":
1040 mysplit[firstpos:mypos+1] = [mysplit[firstpos+1:mypos]]
1041 mypos = firstpos
1042 break
1043 elif mysplit[mypos] == "(":
1044 # recurse
1045 mysplit = dep_parenreduce(mysplit,mypos)
1046 mypos = mypos + 1
1047 mypos = mypos + 1
1048 return mysplit
1049
1050
1051def dep_opconvert(mysplit, myuse):
1052 "Does dependency operator conversion"
1053
1054 mypos = 0
1055 newsplit = []
1056 while mypos < len(mysplit):
1057 if type(mysplit[mypos]) == types.ListType:
1058 newsplit.append(dep_opconvert(mysplit[mypos],myuse))
1059 mypos += 1
1060 elif mysplit[mypos] == ")":
1061 # mismatched paren, error
1062 return None
1063 elif mysplit[mypos]=="||":
1064 if ((mypos+1)>=len(mysplit)) or (type(mysplit[mypos+1])!=types.ListType):
1065 # || must be followed by paren'd list
1066 return None
1067 try:
1068 mynew = dep_opconvert(mysplit[mypos+1],myuse)
1069 except Exception, e:
1070 error("unable to satisfy OR dependancy: " + string.join(mysplit," || "))
1071 raise e
1072 mynew[0:0] = ["||"]
1073 newsplit.append(mynew)
1074 mypos += 2
1075 elif mysplit[mypos][-1] == "?":
1076 # use clause, i.e "gnome? ( foo bar )"
1077 # this is a quick and dirty hack so that repoman can enable all USE vars:
1078 if (len(myuse) == 1) and (myuse[0] == "*"):
1079 # enable it even if it's ! (for repoman) but kill it if it's
1080 # an arch variable that isn't for this arch. XXX Sparc64?
1081 if (mysplit[mypos][:-1] not in settings.usemask) or \
1082 (mysplit[mypos][:-1]==settings["ARCH"]):
1083 enabled=1
1084 else:
1085 enabled=0
1086 else:
1087 if mysplit[mypos][0] == "!":
1088 myusevar = mysplit[mypos][1:-1]
1089 enabled = not myusevar in myuse
1090 #if myusevar in myuse:
1091 # enabled = 0
1092 #else:
1093 # enabled = 1
1094 else:
1095 myusevar=mysplit[mypos][:-1]
1096 enabled = myusevar in myuse
1097 #if myusevar in myuse:
1098 # enabled=1
1099 #else:
1100 # enabled=0
1101 if (mypos +2 < len(mysplit)) and (mysplit[mypos+2] == ":"):
1102 # colon mode
1103 if enabled:
1104 # choose the first option
1105 if type(mysplit[mypos+1]) == types.ListType:
1106 newsplit.append(dep_opconvert(mysplit[mypos+1],myuse))
1107 else:
1108 newsplit.append(mysplit[mypos+1])
1109 else:
1110 # choose the alternate option
1111 if type(mysplit[mypos+1]) == types.ListType:
1112 newsplit.append(dep_opconvert(mysplit[mypos+3],myuse))
1113 else:
1114 newsplit.append(mysplit[mypos+3])
1115 mypos += 4
1116 else:
1117 # normal use mode
1118 if enabled:
1119 if type(mysplit[mypos+1]) == types.ListType:
1120 newsplit.append(dep_opconvert(mysplit[mypos+1],myuse))
1121 else:
1122 newsplit.append(mysplit[mypos+1])
1123 # otherwise, continue
1124 mypos += 2
1125 else:
1126 # normal item
1127 newsplit.append(mysplit[mypos])
1128 mypos += 1
1129 return newsplit
1130
1131if __name__ == "__main__":
1132 import doctest, bb
1133 bb.msg.set_debug_level(0)
1134 doctest.testmod(bb)
diff --git a/bitbake-dev/lib/bb/build.py b/bitbake-dev/lib/bb/build.py
deleted file mode 100644
index 6d80b4b549..0000000000
--- a/bitbake-dev/lib/bb/build.py
+++ /dev/null
@@ -1,394 +0,0 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3#
4# BitBake 'Build' implementation
5#
6# Core code for function execution and task handling in the
7# BitBake build tools.
8#
9# Copyright (C) 2003, 2004 Chris Larson
10#
11# Based on Gentoo's portage.py.
12#
13# This program is free software; you can redistribute it and/or modify
14# it under the terms of the GNU General Public License version 2 as
15# published by the Free Software Foundation.
16#
17# This program is distributed in the hope that it will be useful,
18# but WITHOUT ANY WARRANTY; without even the implied warranty of
19# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20# GNU General Public License for more details.
21#
22# You should have received a copy of the GNU General Public License along
23# with this program; if not, write to the Free Software Foundation, Inc.,
24# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
25#
26#Based on functions from the base bb module, Copyright 2003 Holger Schurig
27
28from bb import data, event, mkdirhier, utils
29import bb, os, sys
30
31# When we execute a python function we'd like certain things
32# in all namespaces, hence we add them to __builtins__
33# If we do not do this and use the exec globals, they will
34# not be available to subfunctions.
35__builtins__['bb'] = bb
36__builtins__['os'] = os
37
38# events
39class FuncFailed(Exception):
40 """
41 Executed function failed
42 First parameter a message
43 Second paramter is a logfile (optional)
44 """
45
46class EventException(Exception):
47 """Exception which is associated with an Event."""
48
49 def __init__(self, msg, event):
50 self.args = msg, event
51
52class TaskBase(event.Event):
53 """Base class for task events"""
54
55 def __init__(self, t, d ):
56 self._task = t
57 self._package = bb.data.getVar("PF", d, 1)
58 event.Event.__init__(self)
59 self._message = "package %s: task %s: %s" % (bb.data.getVar("PF", d, 1), t, bb.event.getName(self)[4:])
60
61 def getTask(self):
62 return self._task
63
64 def setTask(self, task):
65 self._task = task
66
67 task = property(getTask, setTask, None, "task property")
68
69class TaskStarted(TaskBase):
70 """Task execution started"""
71
72class TaskSucceeded(TaskBase):
73 """Task execution completed"""
74
75class TaskFailed(TaskBase):
76 """Task execution failed"""
77 def __init__(self, msg, logfile, t, d ):
78 self.logfile = logfile
79 self.msg = msg
80 TaskBase.__init__(self, t, d)
81
82class InvalidTask(TaskBase):
83 """Invalid Task"""
84
85# functions
86
87def exec_func(func, d, dirs = None):
88 """Execute an BB 'function'"""
89
90 body = data.getVar(func, d)
91 if not body:
92 return
93
94 flags = data.getVarFlags(func, d)
95 for item in ['deps', 'check', 'interactive', 'python', 'cleandirs', 'dirs', 'lockfiles', 'fakeroot']:
96 if not item in flags:
97 flags[item] = None
98
99 ispython = flags['python']
100
101 cleandirs = (data.expand(flags['cleandirs'], d) or "").split()
102 for cdir in cleandirs:
103 os.system("rm -rf %s" % cdir)
104
105 if dirs:
106 dirs = data.expand(dirs, d)
107 else:
108 dirs = (data.expand(flags['dirs'], d) or "").split()
109 for adir in dirs:
110 mkdirhier(adir)
111
112 if len(dirs) > 0:
113 adir = dirs[-1]
114 else:
115 adir = data.getVar('B', d, 1)
116
117 # Save current directory
118 try:
119 prevdir = os.getcwd()
120 except OSError:
121 prevdir = data.getVar('TOPDIR', d, True)
122
123 # Setup logfiles
124 t = data.getVar('T', d, 1)
125 if not t:
126 bb.msg.fatal(bb.msg.domain.Build, "T not set")
127 mkdirhier(t)
128 # Gross hack, FIXME
129 import random
130 logfile = "%s/log.%s.%s.%s" % (t, func, str(os.getpid()),random.random())
131 runfile = "%s/run.%s.%s" % (t, func, str(os.getpid()))
132
133 # Change to correct directory (if specified)
134 if adir and os.access(adir, os.F_OK):
135 os.chdir(adir)
136
137 # Handle logfiles
138 si = file('/dev/null', 'r')
139 try:
140 if bb.msg.debug_level['default'] > 0 or ispython:
141 so = os.popen("tee \"%s\"" % logfile, "w")
142 else:
143 so = file(logfile, 'w')
144 except OSError, e:
145 bb.msg.error(bb.msg.domain.Build, "opening log file: %s" % e)
146 pass
147
148 se = so
149
150 # Dup the existing fds so we dont lose them
151 osi = [os.dup(sys.stdin.fileno()), sys.stdin.fileno()]
152 oso = [os.dup(sys.stdout.fileno()), sys.stdout.fileno()]
153 ose = [os.dup(sys.stderr.fileno()), sys.stderr.fileno()]
154
155 # Replace those fds with our own
156 os.dup2(si.fileno(), osi[1])
157 os.dup2(so.fileno(), oso[1])
158 os.dup2(se.fileno(), ose[1])
159
160 locks = []
161 lockfiles = (data.expand(flags['lockfiles'], d) or "").split()
162 for lock in lockfiles:
163 locks.append(bb.utils.lockfile(lock))
164
165 try:
166 # Run the function
167 if ispython:
168 exec_func_python(func, d, runfile, logfile)
169 else:
170 exec_func_shell(func, d, runfile, logfile, flags)
171
172 # Restore original directory
173 try:
174 os.chdir(prevdir)
175 except:
176 pass
177
178 finally:
179
180 # Unlock any lockfiles
181 for lock in locks:
182 bb.utils.unlockfile(lock)
183
184 # Restore the backup fds
185 os.dup2(osi[0], osi[1])
186 os.dup2(oso[0], oso[1])
187 os.dup2(ose[0], ose[1])
188
189 # Close our logs
190 si.close()
191 so.close()
192 se.close()
193
194 if os.path.exists(logfile) and os.path.getsize(logfile) == 0:
195 bb.msg.debug(2, bb.msg.domain.Build, "Zero size logfile %s, removing" % logfile)
196 os.remove(logfile)
197
198 # Close the backup fds
199 os.close(osi[0])
200 os.close(oso[0])
201 os.close(ose[0])
202
203def exec_func_python(func, d, runfile, logfile):
204 """Execute a python BB 'function'"""
205 import re, os
206
207 bbfile = bb.data.getVar('FILE', d, 1)
208 tmp = "def " + func + "():\n%s" % data.getVar(func, d)
209 tmp += '\n' + func + '()'
210
211 f = open(runfile, "w")
212 f.write(tmp)
213 comp = utils.better_compile(tmp, func, bbfile)
214 g = {} # globals
215 g['d'] = d
216 try:
217 utils.better_exec(comp, g, tmp, bbfile)
218 except:
219 (t,value,tb) = sys.exc_info()
220
221 if t in [bb.parse.SkipPackage, bb.build.FuncFailed]:
222 raise
223 bb.msg.error(bb.msg.domain.Build, "Function %s failed" % func)
224 raise FuncFailed("function %s failed" % func, logfile)
225
226def exec_func_shell(func, d, runfile, logfile, flags):
227 """Execute a shell BB 'function' Returns true if execution was successful.
228
229 For this, it creates a bash shell script in the tmp dectory, writes the local
230 data into it and finally executes. The output of the shell will end in a log file and stdout.
231
232 Note on directory behavior. The 'dirs' varflag should contain a list
233 of the directories you need created prior to execution. The last
234 item in the list is where we will chdir/cd to.
235 """
236
237 deps = flags['deps']
238 check = flags['check']
239 if check in globals():
240 if globals()[check](func, deps):
241 return
242
243 f = open(runfile, "w")
244 f.write("#!/bin/sh -e\n")
245 if bb.msg.debug_level['default'] > 0: f.write("set -x\n")
246 data.emit_env(f, d)
247
248 f.write("cd %s\n" % os.getcwd())
249 if func: f.write("%s\n" % func)
250 f.close()
251 os.chmod(runfile, 0775)
252 if not func:
253 bb.msg.error(bb.msg.domain.Build, "Function not specified")
254 raise FuncFailed("Function not specified for exec_func_shell")
255
256 # execute function
257 if flags['fakeroot']:
258 maybe_fakeroot = "PATH=\"%s\" fakeroot " % bb.data.getVar("PATH", d, 1)
259 else:
260 maybe_fakeroot = ''
261 lang_environment = "LC_ALL=C "
262 ret = os.system('%s%ssh -e %s' % (lang_environment, maybe_fakeroot, runfile))
263
264 if ret == 0:
265 return
266
267 bb.msg.error(bb.msg.domain.Build, "Function %s failed" % func)
268 raise FuncFailed("function %s failed" % func, logfile)
269
270
271def exec_task(task, d):
272 """Execute an BB 'task'
273
274 The primary difference between executing a task versus executing
275 a function is that a task exists in the task digraph, and therefore
276 has dependencies amongst other tasks."""
277
278 # Check whther this is a valid task
279 if not data.getVarFlag(task, 'task', d):
280 raise EventException("No such task", InvalidTask(task, d))
281
282 try:
283 bb.msg.debug(1, bb.msg.domain.Build, "Executing task %s" % task)
284 old_overrides = data.getVar('OVERRIDES', d, 0)
285 localdata = data.createCopy(d)
286 data.setVar('OVERRIDES', 'task-%s:%s' % (task[3:], old_overrides), localdata)
287 data.update_data(localdata)
288 data.expandKeys(localdata)
289 event.fire(TaskStarted(task, localdata), localdata)
290 exec_func(task, localdata)
291 event.fire(TaskSucceeded(task, localdata), localdata)
292 except FuncFailed, message:
293 # Try to extract the optional logfile
294 try:
295 (msg, logfile) = message
296 except:
297 logfile = None
298 msg = message
299 bb.msg.note(1, bb.msg.domain.Build, "Task failed: %s" % message )
300 failedevent = TaskFailed(msg, logfile, task, d)
301 event.fire(failedevent, d)
302 raise EventException("Function failed in task: %s" % message, failedevent)
303
304 # make stamp, or cause event and raise exception
305 if not data.getVarFlag(task, 'nostamp', d) and not data.getVarFlag(task, 'selfstamp', d):
306 make_stamp(task, d)
307
308def extract_stamp(d, fn):
309 """
310 Extracts stamp format which is either a data dictonary (fn unset)
311 or a dataCache entry (fn set).
312 """
313 if fn:
314 return d.stamp[fn]
315 return data.getVar('STAMP', d, 1)
316
317def stamp_internal(task, d, file_name):
318 """
319 Internal stamp helper function
320 Removes any stamp for the given task
321 Makes sure the stamp directory exists
322 Returns the stamp path+filename
323 """
324 stamp = extract_stamp(d, file_name)
325 if not stamp:
326 return
327 stamp = "%s.%s" % (stamp, task)
328 mkdirhier(os.path.dirname(stamp))
329 # Remove the file and recreate to force timestamp
330 # change on broken NFS filesystems
331 if os.access(stamp, os.F_OK):
332 os.remove(stamp)
333 return stamp
334
335def make_stamp(task, d, file_name = None):
336 """
337 Creates/updates a stamp for a given task
338 (d can be a data dict or dataCache)
339 """
340 stamp = stamp_internal(task, d, file_name)
341 if stamp:
342 f = open(stamp, "w")
343 f.close()
344
345def del_stamp(task, d, file_name = None):
346 """
347 Removes a stamp for a given task
348 (d can be a data dict or dataCache)
349 """
350 stamp_internal(task, d, file_name)
351
352def add_tasks(tasklist, d):
353 task_deps = data.getVar('_task_deps', d)
354 if not task_deps:
355 task_deps = {}
356 if not 'tasks' in task_deps:
357 task_deps['tasks'] = []
358 if not 'parents' in task_deps:
359 task_deps['parents'] = {}
360
361 for task in tasklist:
362 task = data.expand(task, d)
363 data.setVarFlag(task, 'task', 1, d)
364
365 if not task in task_deps['tasks']:
366 task_deps['tasks'].append(task)
367
368 flags = data.getVarFlags(task, d)
369 def getTask(name):
370 if not name in task_deps:
371 task_deps[name] = {}
372 if name in flags:
373 deptask = data.expand(flags[name], d)
374 task_deps[name][task] = deptask
375 getTask('depends')
376 getTask('deptask')
377 getTask('rdeptask')
378 getTask('recrdeptask')
379 getTask('nostamp')
380 task_deps['parents'][task] = []
381 for dep in flags['deps']:
382 dep = data.expand(dep, d)
383 task_deps['parents'][task].append(dep)
384
385 # don't assume holding a reference
386 data.setVar('_task_deps', task_deps, d)
387
388def remove_task(task, kill, d):
389 """Remove an BB 'task'.
390
391 If kill is 1, also remove tasks that depend on this task."""
392
393 data.delVarFlag(task, 'task', d)
394
diff --git a/bitbake-dev/lib/bb/cache.py b/bitbake-dev/lib/bb/cache.py
deleted file mode 100644
index 2f1b8fa601..0000000000
--- a/bitbake-dev/lib/bb/cache.py
+++ /dev/null
@@ -1,533 +0,0 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3#
4# BitBake 'Event' implementation
5#
6# Caching of bitbake variables before task execution
7
8# Copyright (C) 2006 Richard Purdie
9
10# but small sections based on code from bin/bitbake:
11# Copyright (C) 2003, 2004 Chris Larson
12# Copyright (C) 2003, 2004 Phil Blundell
13# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
14# Copyright (C) 2005 Holger Hans Peter Freyther
15# Copyright (C) 2005 ROAD GmbH
16#
17# This program is free software; you can redistribute it and/or modify
18# it under the terms of the GNU General Public License version 2 as
19# published by the Free Software Foundation.
20#
21# This program is distributed in the hope that it will be useful,
22# but WITHOUT ANY WARRANTY; without even the implied warranty of
23# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
24# GNU General Public License for more details.
25#
26# You should have received a copy of the GNU General Public License along
27# with this program; if not, write to the Free Software Foundation, Inc.,
28# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
29
30
31import os, re
32import bb.data
33import bb.utils
34
35try:
36 import cPickle as pickle
37except ImportError:
38 import pickle
39 bb.msg.note(1, bb.msg.domain.Cache, "Importing cPickle failed. Falling back to a very slow implementation.")
40
41__cache_version__ = "130"
42
43class Cache:
44 """
45 BitBake Cache implementation
46 """
47 def __init__(self, cooker):
48
49
50 self.cachedir = bb.data.getVar("CACHE", cooker.configuration.data, True)
51 self.clean = {}
52 self.checked = {}
53 self.depends_cache = {}
54 self.data = None
55 self.data_fn = None
56 self.cacheclean = True
57
58 if self.cachedir in [None, '']:
59 self.has_cache = False
60 bb.msg.note(1, bb.msg.domain.Cache, "Not using a cache. Set CACHE = <directory> to enable.")
61 return
62
63 self.has_cache = True
64 self.cachefile = os.path.join(self.cachedir,"bb_cache.dat")
65
66 bb.msg.debug(1, bb.msg.domain.Cache, "Using cache in '%s'" % self.cachedir)
67 try:
68 os.stat( self.cachedir )
69 except OSError:
70 bb.mkdirhier( self.cachedir )
71
72 # If any of configuration.data's dependencies are newer than the
73 # cache there isn't even any point in loading it...
74 newest_mtime = 0
75 deps = bb.data.getVar("__depends", cooker.configuration.data, True)
76 for f,old_mtime in deps:
77 if old_mtime > newest_mtime:
78 newest_mtime = old_mtime
79
80 if bb.parse.cached_mtime_noerror(self.cachefile) >= newest_mtime:
81 try:
82 p = pickle.Unpickler(file(self.cachefile, "rb"))
83 self.depends_cache, version_data = p.load()
84 if version_data['CACHE_VER'] != __cache_version__:
85 raise ValueError, 'Cache Version Mismatch'
86 if version_data['BITBAKE_VER'] != bb.__version__:
87 raise ValueError, 'Bitbake Version Mismatch'
88 except EOFError:
89 bb.msg.note(1, bb.msg.domain.Cache, "Truncated cache found, rebuilding...")
90 self.depends_cache = {}
91 except:
92 bb.msg.note(1, bb.msg.domain.Cache, "Invalid cache found, rebuilding...")
93 self.depends_cache = {}
94 else:
95 try:
96 os.stat( self.cachefile )
97 bb.msg.note(1, bb.msg.domain.Cache, "Out of date cache found, rebuilding...")
98 except OSError:
99 pass
100
101 def getVar(self, var, fn, exp = 0):
102 """
103 Gets the value of a variable
104 (similar to getVar in the data class)
105
106 There are two scenarios:
107 1. We have cached data - serve from depends_cache[fn]
108 2. We're learning what data to cache - serve from data
109 backend but add a copy of the data to the cache.
110 """
111 if fn in self.clean:
112 return self.depends_cache[fn][var]
113
114 if not fn in self.depends_cache:
115 self.depends_cache[fn] = {}
116
117 if fn != self.data_fn:
118 # We're trying to access data in the cache which doesn't exist
119 # yet setData hasn't been called to setup the right access. Very bad.
120 bb.msg.error(bb.msg.domain.Cache, "Parsing error data_fn %s and fn %s don't match" % (self.data_fn, fn))
121
122 self.cacheclean = False
123 result = bb.data.getVar(var, self.data, exp)
124 self.depends_cache[fn][var] = result
125 return result
126
127 def setData(self, virtualfn, fn, data):
128 """
129 Called to prime bb_cache ready to learn which variables to cache.
130 Will be followed by calls to self.getVar which aren't cached
131 but can be fulfilled from self.data.
132 """
133 self.data_fn = virtualfn
134 self.data = data
135
136 # Make sure __depends makes the depends_cache
137 # If we're a virtual class we need to make sure all our depends are appended
138 # to the depends of fn.
139 depends = self.getVar("__depends", virtualfn, True) or []
140 if "__depends" not in self.depends_cache[fn] or not self.depends_cache[fn]["__depends"]:
141 self.depends_cache[fn]["__depends"] = depends
142 for dep in depends:
143 if dep not in self.depends_cache[fn]["__depends"]:
144 self.depends_cache[fn]["__depends"].append(dep)
145
146 # Make sure BBCLASSEXTEND always makes the cache too
147 self.getVar('BBCLASSEXTEND', virtualfn, True)
148
149 self.depends_cache[virtualfn]["CACHETIMESTAMP"] = bb.parse.cached_mtime(fn)
150
151 def virtualfn2realfn(self, virtualfn):
152 """
153 Convert a virtual file name to a real one + the associated subclass keyword
154 """
155
156 fn = virtualfn
157 cls = ""
158 if virtualfn.startswith('virtual:'):
159 cls = virtualfn.split(':', 2)[1]
160 fn = virtualfn.replace('virtual:' + cls + ':', '')
161 #bb.msg.debug(2, bb.msg.domain.Cache, "virtualfn2realfn %s to %s %s" % (virtualfn, fn, cls))
162 return (fn, cls)
163
164 def realfn2virtual(self, realfn, cls):
165 """
166 Convert a real filename + the associated subclass keyword to a virtual filename
167 """
168 if cls == "":
169 #bb.msg.debug(2, bb.msg.domain.Cache, "realfn2virtual %s and '%s' to %s" % (realfn, cls, realfn))
170 return realfn
171 #bb.msg.debug(2, bb.msg.domain.Cache, "realfn2virtual %s and %s to %s" % (realfn, cls, "virtual:" + cls + ":" + realfn))
172 return "virtual:" + cls + ":" + realfn
173
174 def loadDataFull(self, virtualfn, cfgData):
175 """
176 Return a complete set of data for fn.
177 To do this, we need to parse the file.
178 """
179
180 (fn, cls) = self.virtualfn2realfn(virtualfn)
181
182 bb.msg.debug(1, bb.msg.domain.Cache, "Parsing %s (full)" % fn)
183
184 bb_data = self.load_bbfile(fn, cfgData)
185 return bb_data[cls]
186
187 def loadData(self, fn, cfgData, cacheData):
188 """
189 Load a subset of data for fn.
190 If the cached data is valid we do nothing,
191 To do this, we need to parse the file and set the system
192 to record the variables accessed.
193 Return the cache status and whether the file was skipped when parsed
194 """
195 skipped = 0
196 virtuals = 0
197
198 if fn not in self.checked:
199 self.cacheValidUpdate(fn)
200
201 if self.cacheValid(fn):
202 multi = self.getVar('BBCLASSEXTEND', fn, True)
203 for cls in (multi or "").split() + [""]:
204 virtualfn = self.realfn2virtual(fn, cls)
205 if self.depends_cache[virtualfn]["__SKIPPED"]:
206 skipped += 1
207 bb.msg.debug(1, bb.msg.domain.Cache, "Skipping %s" % virtualfn)
208 continue
209 self.handle_data(virtualfn, cacheData)
210 virtuals += 1
211 return True, skipped, virtuals
212
213 bb.msg.debug(1, bb.msg.domain.Cache, "Parsing %s" % fn)
214
215 bb_data = self.load_bbfile(fn, cfgData)
216
217 for data in bb_data:
218 virtualfn = self.realfn2virtual(fn, data)
219 self.setData(virtualfn, fn, bb_data[data])
220 if self.getVar("__SKIPPED", virtualfn, True):
221 skipped += 1
222 bb.msg.debug(1, bb.msg.domain.Cache, "Skipping %s" % virtualfn)
223 else:
224 self.handle_data(virtualfn, cacheData)
225 virtuals += 1
226 return False, skipped, virtuals
227
228
229 def cacheValid(self, fn):
230 """
231 Is the cache valid for fn?
232 Fast version, no timestamps checked.
233 """
234 # Is cache enabled?
235 if not self.has_cache:
236 return False
237 if fn in self.clean:
238 return True
239 return False
240
241 def cacheValidUpdate(self, fn):
242 """
243 Is the cache valid for fn?
244 Make thorough (slower) checks including timestamps.
245 """
246 # Is cache enabled?
247 if not self.has_cache:
248 return False
249
250 self.checked[fn] = ""
251
252 # Pretend we're clean so getVar works
253 self.clean[fn] = ""
254
255 # File isn't in depends_cache
256 if not fn in self.depends_cache:
257 bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s is not cached" % fn)
258 self.remove(fn)
259 return False
260
261 mtime = bb.parse.cached_mtime_noerror(fn)
262
263 # Check file still exists
264 if mtime == 0:
265 bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s not longer exists" % fn)
266 self.remove(fn)
267 return False
268
269 # Check the file's timestamp
270 if mtime != self.getVar("CACHETIMESTAMP", fn, True):
271 bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s changed" % fn)
272 self.remove(fn)
273 return False
274
275 # Check dependencies are still valid
276 depends = self.getVar("__depends", fn, True)
277 if depends:
278 for f,old_mtime in depends:
279 fmtime = bb.parse.cached_mtime_noerror(f)
280 # Check if file still exists
281 if old_mtime != 0 and fmtime == 0:
282 self.remove(fn)
283 return False
284
285 if (fmtime != old_mtime):
286 bb.msg.debug(2, bb.msg.domain.Cache, "Cache: %s's dependency %s changed" % (fn, f))
287 self.remove(fn)
288 return False
289
290 #bb.msg.debug(2, bb.msg.domain.Cache, "Depends Cache: %s is clean" % fn)
291 if not fn in self.clean:
292 self.clean[fn] = ""
293
294 # Mark extended class data as clean too
295 multi = self.getVar('BBCLASSEXTEND', fn, True)
296 for cls in (multi or "").split():
297 virtualfn = self.realfn2virtual(fn, cls)
298 self.clean[virtualfn] = ""
299
300 return True
301
302 def remove(self, fn):
303 """
304 Remove a fn from the cache
305 Called from the parser in error cases
306 """
307 bb.msg.debug(1, bb.msg.domain.Cache, "Removing %s from cache" % fn)
308 if fn in self.depends_cache:
309 del self.depends_cache[fn]
310 if fn in self.clean:
311 del self.clean[fn]
312
313 def sync(self):
314 """
315 Save the cache
316 Called from the parser when complete (or exiting)
317 """
318 import copy
319
320 if not self.has_cache:
321 return
322
323 if self.cacheclean:
324 bb.msg.note(1, bb.msg.domain.Cache, "Cache is clean, not saving.")
325 return
326
327 version_data = {}
328 version_data['CACHE_VER'] = __cache_version__
329 version_data['BITBAKE_VER'] = bb.__version__
330
331 cache_data = copy.deepcopy(self.depends_cache)
332 for fn in self.depends_cache:
333 if '__BB_DONT_CACHE' in self.depends_cache[fn] and self.depends_cache[fn]['__BB_DONT_CACHE']:
334 bb.msg.debug(2, bb.msg.domain.Cache, "Not caching %s, marked as not cacheable" % fn)
335 del cache_data[fn]
336 elif 'PV' in self.depends_cache[fn] and 'SRCREVINACTION' in self.depends_cache[fn]['PV']:
337 bb.msg.error(bb.msg.domain.Cache, "Not caching %s as it had SRCREVINACTION in PV. Please report this bug" % fn)
338 del cache_data[fn]
339
340 p = pickle.Pickler(file(self.cachefile, "wb" ), -1 )
341 p.dump([cache_data, version_data])
342
343 def mtime(self, cachefile):
344 return bb.parse.cached_mtime_noerror(cachefile)
345
346 def handle_data(self, file_name, cacheData):
347 """
348 Save data we need into the cache
349 """
350
351 pn = self.getVar('PN', file_name, True)
352 pe = self.getVar('PE', file_name, True) or "0"
353 pv = self.getVar('PV', file_name, True)
354 if 'SRCREVINACTION' in pv:
355 bb.note("Found SRCREVINACTION in PV (%s) or %s. Please report this bug." % (pv, file_name))
356 pr = self.getVar('PR', file_name, True)
357 dp = int(self.getVar('DEFAULT_PREFERENCE', file_name, True) or "0")
358 depends = bb.utils.explode_deps(self.getVar("DEPENDS", file_name, True) or "")
359 packages = (self.getVar('PACKAGES', file_name, True) or "").split()
360 packages_dynamic = (self.getVar('PACKAGES_DYNAMIC', file_name, True) or "").split()
361 rprovides = (self.getVar("RPROVIDES", file_name, True) or "").split()
362
363 cacheData.task_deps[file_name] = self.getVar("_task_deps", file_name, True)
364
365 # build PackageName to FileName lookup table
366 if pn not in cacheData.pkg_pn:
367 cacheData.pkg_pn[pn] = []
368 cacheData.pkg_pn[pn].append(file_name)
369
370 cacheData.stamp[file_name] = self.getVar('STAMP', file_name, True)
371
372 # build FileName to PackageName lookup table
373 cacheData.pkg_fn[file_name] = pn
374 cacheData.pkg_pepvpr[file_name] = (pe,pv,pr)
375 cacheData.pkg_dp[file_name] = dp
376
377 provides = [pn]
378 for provide in (self.getVar("PROVIDES", file_name, True) or "").split():
379 if provide not in provides:
380 provides.append(provide)
381
382 # Build forward and reverse provider hashes
383 # Forward: virtual -> [filenames]
384 # Reverse: PN -> [virtuals]
385 if pn not in cacheData.pn_provides:
386 cacheData.pn_provides[pn] = []
387
388 cacheData.fn_provides[file_name] = provides
389 for provide in provides:
390 if provide not in cacheData.providers:
391 cacheData.providers[provide] = []
392 cacheData.providers[provide].append(file_name)
393 if not provide in cacheData.pn_provides[pn]:
394 cacheData.pn_provides[pn].append(provide)
395
396 cacheData.deps[file_name] = []
397 for dep in depends:
398 if not dep in cacheData.deps[file_name]:
399 cacheData.deps[file_name].append(dep)
400 if not dep in cacheData.all_depends:
401 cacheData.all_depends.append(dep)
402
403 # Build reverse hash for PACKAGES, so runtime dependencies
404 # can be be resolved (RDEPENDS, RRECOMMENDS etc.)
405 for package in packages:
406 if not package in cacheData.packages:
407 cacheData.packages[package] = []
408 cacheData.packages[package].append(file_name)
409 rprovides += (self.getVar("RPROVIDES_%s" % package, file_name, 1) or "").split()
410
411 for package in packages_dynamic:
412 if not package in cacheData.packages_dynamic:
413 cacheData.packages_dynamic[package] = []
414 cacheData.packages_dynamic[package].append(file_name)
415
416 for rprovide in rprovides:
417 if not rprovide in cacheData.rproviders:
418 cacheData.rproviders[rprovide] = []
419 cacheData.rproviders[rprovide].append(file_name)
420
421 # Build hash of runtime depends and rececommends
422
423 if not file_name in cacheData.rundeps:
424 cacheData.rundeps[file_name] = {}
425 if not file_name in cacheData.runrecs:
426 cacheData.runrecs[file_name] = {}
427
428 rdepends = self.getVar('RDEPENDS', file_name, True) or ""
429 rrecommends = self.getVar('RRECOMMENDS', file_name, True) or ""
430 for package in packages + [pn]:
431 if not package in cacheData.rundeps[file_name]:
432 cacheData.rundeps[file_name][package] = []
433 if not package in cacheData.runrecs[file_name]:
434 cacheData.runrecs[file_name][package] = []
435
436 cacheData.rundeps[file_name][package] = rdepends + " " + (self.getVar("RDEPENDS_%s" % package, file_name, True) or "")
437 cacheData.runrecs[file_name][package] = rrecommends + " " + (self.getVar("RRECOMMENDS_%s" % package, file_name, True) or "")
438
439 # Collect files we may need for possible world-dep
440 # calculations
441 if not self.getVar('BROKEN', file_name, True) and not self.getVar('EXCLUDE_FROM_WORLD', file_name, True):
442 cacheData.possible_world.append(file_name)
443
444 # Touch this to make sure its in the cache
445 self.getVar('__BB_DONT_CACHE', file_name, True)
446 self.getVar('BBCLASSEXTEND', file_name, True)
447
448 def load_bbfile( self, bbfile , config):
449 """
450 Load and parse one .bb build file
451 Return the data and whether parsing resulted in the file being skipped
452 """
453
454 import bb
455 from bb import utils, data, parse, debug, event, fatal
456
457 # expand tmpdir to include this topdir
458 data.setVar('TMPDIR', data.getVar('TMPDIR', config, 1) or "", config)
459 bbfile_loc = os.path.abspath(os.path.dirname(bbfile))
460 oldpath = os.path.abspath(os.getcwd())
461 if bb.parse.cached_mtime_noerror(bbfile_loc):
462 os.chdir(bbfile_loc)
463 bb_data = data.init_db(config)
464 try:
465 bb_data = parse.handle(bbfile, bb_data) # read .bb data
466 os.chdir(oldpath)
467 return bb_data
468 except:
469 os.chdir(oldpath)
470 raise
471
472def init(cooker):
473 """
474 The Objective: Cache the minimum amount of data possible yet get to the
475 stage of building packages (i.e. tryBuild) without reparsing any .bb files.
476
477 To do this, we intercept getVar calls and only cache the variables we see
478 being accessed. We rely on the cache getVar calls being made for all
479 variables bitbake might need to use to reach this stage. For each cached
480 file we need to track:
481
482 * Its mtime
483 * The mtimes of all its dependencies
484 * Whether it caused a parse.SkipPackage exception
485
486 Files causing parsing errors are evicted from the cache.
487
488 """
489 return Cache(cooker)
490
491
492
493#============================================================================#
494# CacheData
495#============================================================================#
496class CacheData:
497 """
498 The data structures we compile from the cached data
499 """
500
501 def __init__(self):
502 """
503 Direct cache variables
504 (from Cache.handle_data)
505 """
506 self.providers = {}
507 self.rproviders = {}
508 self.packages = {}
509 self.packages_dynamic = {}
510 self.possible_world = []
511 self.pkg_pn = {}
512 self.pkg_fn = {}
513 self.pkg_pepvpr = {}
514 self.pkg_dp = {}
515 self.pn_provides = {}
516 self.fn_provides = {}
517 self.all_depends = []
518 self.deps = {}
519 self.rundeps = {}
520 self.runrecs = {}
521 self.task_queues = {}
522 self.task_deps = {}
523 self.stamp = {}
524 self.preferred = {}
525
526 """
527 Indirect Cache variables
528 (set elsewhere)
529 """
530 self.ignored_dependencies = []
531 self.world_target = set()
532 self.bbfile_priority = {}
533 self.bbfile_config_priorities = []
diff --git a/bitbake-dev/lib/bb/cooker.py b/bitbake-dev/lib/bb/cooker.py
deleted file mode 100644
index 8036d7e9d5..0000000000
--- a/bitbake-dev/lib/bb/cooker.py
+++ /dev/null
@@ -1,978 +0,0 @@
1#!/usr/bin/env python
2# ex:ts=4:sw=4:sts=4:et
3# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
4#
5# Copyright (C) 2003, 2004 Chris Larson
6# Copyright (C) 2003, 2004 Phil Blundell
7# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
8# Copyright (C) 2005 Holger Hans Peter Freyther
9# Copyright (C) 2005 ROAD GmbH
10# Copyright (C) 2006 - 2007 Richard Purdie
11#
12# This program is free software; you can redistribute it and/or modify
13# it under the terms of the GNU General Public License version 2 as
14# published by the Free Software Foundation.
15#
16# This program is distributed in the hope that it will be useful,
17# but WITHOUT ANY WARRANTY; without even the implied warranty of
18# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19# GNU General Public License for more details.
20#
21# You should have received a copy of the GNU General Public License along
22# with this program; if not, write to the Free Software Foundation, Inc.,
23# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
24
25import sys, os, getopt, glob, copy, os.path, re, time
26import bb
27from bb import utils, data, parse, event, cache, providers, taskdata, runqueue
28from bb import command
29import bb.server.xmlrpc
30import itertools, sre_constants
31
32class MultipleMatches(Exception):
33 """
34 Exception raised when multiple file matches are found
35 """
36
37class ParsingErrorsFound(Exception):
38 """
39 Exception raised when parsing errors are found
40 """
41
42class NothingToBuild(Exception):
43 """
44 Exception raised when there is nothing to build
45 """
46
47
48# Different states cooker can be in
49cookerClean = 1
50cookerParsing = 2
51cookerParsed = 3
52
53# Different action states the cooker can be in
54cookerRun = 1 # Cooker is running normally
55cookerShutdown = 2 # Active tasks should be brought to a controlled stop
56cookerStop = 3 # Stop, now!
57
58#============================================================================#
59# BBCooker
60#============================================================================#
61class BBCooker:
62 """
63 Manages one bitbake build run
64 """
65
66 def __init__(self, configuration, server):
67 self.status = None
68
69 self.cache = None
70 self.bb_cache = None
71
72 self.server = server.BitBakeServer(self)
73
74 self.configuration = configuration
75
76 if self.configuration.verbose:
77 bb.msg.set_verbose(True)
78
79 if self.configuration.debug:
80 bb.msg.set_debug_level(self.configuration.debug)
81 else:
82 bb.msg.set_debug_level(0)
83
84 if self.configuration.debug_domains:
85 bb.msg.set_debug_domains(self.configuration.debug_domains)
86
87 self.configuration.data = bb.data.init()
88
89 bb.data.inheritFromOS(self.configuration.data)
90
91 for f in self.configuration.file:
92 self.parseConfigurationFile( f )
93
94 self.parseConfigurationFile( os.path.join( "conf", "bitbake.conf" ) )
95
96 if not self.configuration.cmd:
97 self.configuration.cmd = bb.data.getVar("BB_DEFAULT_TASK", self.configuration.data, True) or "build"
98
99 bbpkgs = bb.data.getVar('BBPKGS', self.configuration.data, True)
100 if bbpkgs and len(self.configuration.pkgs_to_build) == 0:
101 self.configuration.pkgs_to_build.extend(bbpkgs.split())
102
103 #
104 # Special updated configuration we use for firing events
105 #
106 self.configuration.event_data = bb.data.createCopy(self.configuration.data)
107 bb.data.update_data(self.configuration.event_data)
108
109 # TOSTOP must not be set or our children will hang when they output
110 fd = sys.stdout.fileno()
111 if os.isatty(fd):
112 import termios
113 tcattr = termios.tcgetattr(fd)
114 if tcattr[3] & termios.TOSTOP:
115 bb.msg.note(1, bb.msg.domain.Build, "The terminal had the TOSTOP bit set, clearing...")
116 tcattr[3] = tcattr[3] & ~termios.TOSTOP
117 termios.tcsetattr(fd, termios.TCSANOW, tcattr)
118
119 self.command = bb.command.Command(self)
120 self.cookerState = cookerClean
121 self.cookerAction = cookerRun
122
123 def parseConfiguration(self):
124
125
126 # Change nice level if we're asked to
127 nice = bb.data.getVar("BB_NICE_LEVEL", self.configuration.data, True)
128 if nice:
129 curnice = os.nice(0)
130 nice = int(nice) - curnice
131 bb.msg.note(2, bb.msg.domain.Build, "Renice to %s " % os.nice(nice))
132
133 def parseCommandLine(self):
134 # Parse any commandline into actions
135 if self.configuration.show_environment:
136 self.commandlineAction = None
137
138 if 'world' in self.configuration.pkgs_to_build:
139 bb.error("'world' is not a valid target for --environment.")
140 elif len(self.configuration.pkgs_to_build) > 1:
141 bb.error("Only one target can be used with the --environment option.")
142 elif self.configuration.buildfile and len(self.configuration.pkgs_to_build) > 0:
143 bb.error("No target should be used with the --environment and --buildfile options.")
144 elif len(self.configuration.pkgs_to_build) > 0:
145 self.commandlineAction = ["showEnvironmentTarget", self.configuration.pkgs_to_build]
146 else:
147 self.commandlineAction = ["showEnvironment", self.configuration.buildfile]
148 elif self.configuration.buildfile is not None:
149 self.commandlineAction = ["buildFile", self.configuration.buildfile, self.configuration.cmd]
150 elif self.configuration.revisions_changed:
151 self.commandlineAction = ["compareRevisions"]
152 elif self.configuration.show_versions:
153 self.commandlineAction = ["showVersions"]
154 elif self.configuration.parse_only:
155 self.commandlineAction = ["parseFiles"]
156 # FIXME - implement
157 #elif self.configuration.interactive:
158 # self.interactiveMode()
159 elif self.configuration.dot_graph:
160 if self.configuration.pkgs_to_build:
161 self.commandlineAction = ["generateDotGraph", self.configuration.pkgs_to_build, self.configuration.cmd]
162 else:
163 self.commandlineAction = None
164 bb.error("Please specify a package name for dependency graph generation.")
165 else:
166 if self.configuration.pkgs_to_build:
167 self.commandlineAction = ["buildTargets", self.configuration.pkgs_to_build, self.configuration.cmd]
168 else:
169 self.commandlineAction = None
170 bb.error("Nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information.")
171
172 def runCommands(self, server, data, abort):
173 """
174 Run any queued asynchronous command
175 This is done by the idle handler so it runs in true context rather than
176 tied to any UI.
177 """
178
179 return self.command.runAsyncCommand()
180
181 def tryBuildPackage(self, fn, item, task, the_data):
182 """
183 Build one task of a package, optionally build following task depends
184 """
185 try:
186 if not self.configuration.dry_run:
187 bb.build.exec_task('do_%s' % task, the_data)
188 return True
189 except bb.build.FuncFailed:
190 bb.msg.error(bb.msg.domain.Build, "task stack execution failed")
191 raise
192 except bb.build.EventException, e:
193 event = e.args[1]
194 bb.msg.error(bb.msg.domain.Build, "%s event exception, aborting" % bb.event.getName(event))
195 raise
196
197 def tryBuild(self, fn, task):
198 """
199 Build a provider and its dependencies.
200 build_depends is a list of previous build dependencies (not runtime)
201 If build_depends is empty, we're dealing with a runtime depends
202 """
203
204 the_data = self.bb_cache.loadDataFull(fn, self.configuration.data)
205
206 item = self.status.pkg_fn[fn]
207
208 #if bb.build.stamp_is_current('do_%s' % self.configuration.cmd, the_data):
209 # return True
210
211 return self.tryBuildPackage(fn, item, task, the_data)
212
213 def showVersions(self):
214
215 # Need files parsed
216 self.updateCache()
217
218 pkg_pn = self.status.pkg_pn
219 preferred_versions = {}
220 latest_versions = {}
221
222 # Sort by priority
223 for pn in pkg_pn.keys():
224 (last_ver,last_file,pref_ver,pref_file) = bb.providers.findBestProvider(pn, self.configuration.data, self.status)
225 preferred_versions[pn] = (pref_ver, pref_file)
226 latest_versions[pn] = (last_ver, last_file)
227
228 pkg_list = pkg_pn.keys()
229 pkg_list.sort()
230
231 bb.msg.plain("%-35s %25s %25s" % ("Package Name", "Latest Version", "Preferred Version"))
232 bb.msg.plain("%-35s %25s %25s\n" % ("============", "==============", "================="))
233
234 for p in pkg_list:
235 pref = preferred_versions[p]
236 latest = latest_versions[p]
237
238 prefstr = pref[0][0] + ":" + pref[0][1] + '-' + pref[0][2]
239 lateststr = latest[0][0] + ":" + latest[0][1] + "-" + latest[0][2]
240
241 if pref == latest:
242 prefstr = ""
243
244 bb.msg.plain("%-35s %25s %25s" % (p, lateststr, prefstr))
245
246 def compareRevisions(self):
247 ret = bb.fetch.fetcher_compare_revisons(self.configuration.data)
248 bb.event.fire(bb.command.CookerCommandSetExitCode(ret), self.configuration.event_data)
249
250 def showEnvironment(self, buildfile = None, pkgs_to_build = []):
251 """
252 Show the outer or per-package environment
253 """
254 fn = None
255 envdata = None
256
257 if buildfile:
258 self.cb = None
259 self.bb_cache = bb.cache.init(self)
260 fn = self.matchFile(buildfile)
261 elif len(pkgs_to_build) == 1:
262 self.updateCache()
263
264 localdata = data.createCopy(self.configuration.data)
265 bb.data.update_data(localdata)
266 bb.data.expandKeys(localdata)
267
268 taskdata = bb.taskdata.TaskData(self.configuration.abort)
269 taskdata.add_provider(localdata, self.status, pkgs_to_build[0])
270 taskdata.add_unresolved(localdata, self.status)
271
272 targetid = taskdata.getbuild_id(pkgs_to_build[0])
273 fnid = taskdata.build_targets[targetid][0]
274 fn = taskdata.fn_index[fnid]
275 else:
276 envdata = self.configuration.data
277
278 if fn:
279 try:
280 envdata = self.bb_cache.loadDataFull(fn, self.configuration.data)
281 except IOError, e:
282 bb.msg.error(bb.msg.domain.Parsing, "Unable to read %s: %s" % (fn, e))
283 raise
284 except Exception, e:
285 bb.msg.error(bb.msg.domain.Parsing, "%s" % e)
286 raise
287
288 class dummywrite:
289 def __init__(self):
290 self.writebuf = ""
291 def write(self, output):
292 self.writebuf = self.writebuf + output
293
294 # emit variables and shell functions
295 try:
296 data.update_data(envdata)
297 wb = dummywrite()
298 data.emit_env(wb, envdata, True)
299 bb.msg.plain(wb.writebuf)
300 except Exception, e:
301 bb.msg.fatal(bb.msg.domain.Parsing, "%s" % e)
302 # emit the metadata which isnt valid shell
303 data.expandKeys(envdata)
304 for e in envdata.keys():
305 if data.getVarFlag( e, 'python', envdata ):
306 bb.msg.plain("\npython %s () {\n%s}\n" % (e, data.getVar(e, envdata, 1)))
307
308 def generateDepTreeData(self, pkgs_to_build, task):
309 """
310 Create a dependency tree of pkgs_to_build, returning the data.
311 """
312
313 # Need files parsed
314 self.updateCache()
315
316 # If we are told to do the None task then query the default task
317 if (task == None):
318 task = self.configuration.cmd
319
320 pkgs_to_build = self.checkPackages(pkgs_to_build)
321
322 localdata = data.createCopy(self.configuration.data)
323 bb.data.update_data(localdata)
324 bb.data.expandKeys(localdata)
325 taskdata = bb.taskdata.TaskData(self.configuration.abort)
326
327 runlist = []
328 for k in pkgs_to_build:
329 taskdata.add_provider(localdata, self.status, k)
330 runlist.append([k, "do_%s" % task])
331 taskdata.add_unresolved(localdata, self.status)
332
333 rq = bb.runqueue.RunQueue(self, self.configuration.data, self.status, taskdata, runlist)
334 rq.prepare_runqueue()
335
336 seen_fnids = []
337 depend_tree = {}
338 depend_tree["depends"] = {}
339 depend_tree["tdepends"] = {}
340 depend_tree["pn"] = {}
341 depend_tree["rdepends-pn"] = {}
342 depend_tree["packages"] = {}
343 depend_tree["rdepends-pkg"] = {}
344 depend_tree["rrecs-pkg"] = {}
345
346 for task in range(len(rq.runq_fnid)):
347 taskname = rq.runq_task[task]
348 fnid = rq.runq_fnid[task]
349 fn = taskdata.fn_index[fnid]
350 pn = self.status.pkg_fn[fn]
351 version = "%s:%s-%s" % self.status.pkg_pepvpr[fn]
352 if pn not in depend_tree["pn"]:
353 depend_tree["pn"][pn] = {}
354 depend_tree["pn"][pn]["filename"] = fn
355 depend_tree["pn"][pn]["version"] = version
356 for dep in rq.runq_depends[task]:
357 depfn = taskdata.fn_index[rq.runq_fnid[dep]]
358 deppn = self.status.pkg_fn[depfn]
359 dotname = "%s.%s" % (pn, rq.runq_task[task])
360 if not dotname in depend_tree["tdepends"]:
361 depend_tree["tdepends"][dotname] = []
362 depend_tree["tdepends"][dotname].append("%s.%s" % (deppn, rq.runq_task[dep]))
363 if fnid not in seen_fnids:
364 seen_fnids.append(fnid)
365 packages = []
366
367 depend_tree["depends"][pn] = []
368 for dep in taskdata.depids[fnid]:
369 depend_tree["depends"][pn].append(taskdata.build_names_index[dep])
370
371 depend_tree["rdepends-pn"][pn] = []
372 for rdep in taskdata.rdepids[fnid]:
373 depend_tree["rdepends-pn"][pn].append(taskdata.run_names_index[rdep])
374
375 rdepends = self.status.rundeps[fn]
376 for package in rdepends:
377 depend_tree["rdepends-pkg"][package] = []
378 for rdepend in rdepends[package]:
379 depend_tree["rdepends-pkg"][package].append(rdepend)
380 packages.append(package)
381
382 rrecs = self.status.runrecs[fn]
383 for package in rrecs:
384 depend_tree["rrecs-pkg"][package] = []
385 for rdepend in rrecs[package]:
386 depend_tree["rrecs-pkg"][package].append(rdepend)
387 if not package in packages:
388 packages.append(package)
389
390 for package in packages:
391 if package not in depend_tree["packages"]:
392 depend_tree["packages"][package] = {}
393 depend_tree["packages"][package]["pn"] = pn
394 depend_tree["packages"][package]["filename"] = fn
395 depend_tree["packages"][package]["version"] = version
396
397 return depend_tree
398
399
400 def generateDepTreeEvent(self, pkgs_to_build, task):
401 """
402 Create a task dependency graph of pkgs_to_build.
403 Generate an event with the result
404 """
405 depgraph = self.generateDepTreeData(pkgs_to_build, task)
406 bb.event.fire(bb.event.DepTreeGenerated(depgraph), self.configuration.data)
407
408 def generateDotGraphFiles(self, pkgs_to_build, task):
409 """
410 Create a task dependency graph of pkgs_to_build.
411 Save the result to a set of .dot files.
412 """
413
414 depgraph = self.generateDepTreeData(pkgs_to_build, task)
415
416 # Prints a flattened form of package-depends below where subpackages of a package are merged into the main pn
417 depends_file = file('pn-depends.dot', 'w' )
418 print >> depends_file, "digraph depends {"
419 for pn in depgraph["pn"]:
420 fn = depgraph["pn"][pn]["filename"]
421 version = depgraph["pn"][pn]["version"]
422 print >> depends_file, '"%s" [label="%s %s\\n%s"]' % (pn, pn, version, fn)
423 for pn in depgraph["depends"]:
424 for depend in depgraph["depends"][pn]:
425 print >> depends_file, '"%s" -> "%s"' % (pn, depend)
426 for pn in depgraph["rdepends-pn"]:
427 for rdepend in depgraph["rdepends-pn"][pn]:
428 print >> depends_file, '"%s" -> "%s" [style=dashed]' % (pn, rdepend)
429 print >> depends_file, "}"
430 bb.msg.plain("PN dependencies saved to 'pn-depends.dot'")
431
432 depends_file = file('package-depends.dot', 'w' )
433 print >> depends_file, "digraph depends {"
434 for package in depgraph["packages"]:
435 pn = depgraph["packages"][package]["pn"]
436 fn = depgraph["packages"][package]["filename"]
437 version = depgraph["packages"][package]["version"]
438 if package == pn:
439 print >> depends_file, '"%s" [label="%s %s\\n%s"]' % (pn, pn, version, fn)
440 else:
441 print >> depends_file, '"%s" [label="%s(%s) %s\\n%s"]' % (package, package, pn, version, fn)
442 for depend in depgraph["depends"][pn]:
443 print >> depends_file, '"%s" -> "%s"' % (package, depend)
444 for package in depgraph["rdepends-pkg"]:
445 for rdepend in depgraph["rdepends-pkg"][package]:
446 print >> depends_file, '"%s" -> "%s" [style=dashed]' % (package, rdepend)
447 for package in depgraph["rrecs-pkg"]:
448 for rdepend in depgraph["rrecs-pkg"][package]:
449 print >> depends_file, '"%s" -> "%s" [style=dashed]' % (package, rdepend)
450 print >> depends_file, "}"
451 bb.msg.plain("Package dependencies saved to 'package-depends.dot'")
452
453 tdepends_file = file('task-depends.dot', 'w' )
454 print >> tdepends_file, "digraph depends {"
455 for task in depgraph["tdepends"]:
456 (pn, taskname) = task.rsplit(".", 1)
457 fn = depgraph["pn"][pn]["filename"]
458 version = depgraph["pn"][pn]["version"]
459 print >> tdepends_file, '"%s.%s" [label="%s %s\\n%s\\n%s"]' % (pn, taskname, pn, taskname, version, fn)
460 for dep in depgraph["tdepends"][task]:
461 print >> tdepends_file, '"%s" -> "%s"' % (task, dep)
462 print >> tdepends_file, "}"
463 bb.msg.plain("Task dependencies saved to 'task-depends.dot'")
464
465 def buildDepgraph( self ):
466 all_depends = self.status.all_depends
467 pn_provides = self.status.pn_provides
468
469 localdata = data.createCopy(self.configuration.data)
470 bb.data.update_data(localdata)
471 bb.data.expandKeys(localdata)
472
473 def calc_bbfile_priority(filename):
474 for (regex, pri) in self.status.bbfile_config_priorities:
475 if regex.match(filename):
476 return pri
477 return 0
478
479 # Handle PREFERRED_PROVIDERS
480 for p in (bb.data.getVar('PREFERRED_PROVIDERS', localdata, 1) or "").split():
481 try:
482 (providee, provider) = p.split(':')
483 except:
484 bb.msg.fatal(bb.msg.domain.Provider, "Malformed option in PREFERRED_PROVIDERS variable: %s" % p)
485 continue
486 if providee in self.status.preferred and self.status.preferred[providee] != provider:
487 bb.msg.error(bb.msg.domain.Provider, "conflicting preferences for %s: both %s and %s specified" % (providee, provider, self.status.preferred[providee]))
488 self.status.preferred[providee] = provider
489
490 # Calculate priorities for each file
491 for p in self.status.pkg_fn.keys():
492 self.status.bbfile_priority[p] = calc_bbfile_priority(p)
493
494 def buildWorldTargetList(self):
495 """
496 Build package list for "bitbake world"
497 """
498 all_depends = self.status.all_depends
499 pn_provides = self.status.pn_provides
500 bb.msg.debug(1, bb.msg.domain.Parsing, "collating packages for \"world\"")
501 for f in self.status.possible_world:
502 terminal = True
503 pn = self.status.pkg_fn[f]
504
505 for p in pn_provides[pn]:
506 if p.startswith('virtual/'):
507 bb.msg.debug(2, bb.msg.domain.Parsing, "World build skipping %s due to %s provider starting with virtual/" % (f, p))
508 terminal = False
509 break
510 for pf in self.status.providers[p]:
511 if self.status.pkg_fn[pf] != pn:
512 bb.msg.debug(2, bb.msg.domain.Parsing, "World build skipping %s due to both us and %s providing %s" % (f, pf, p))
513 terminal = False
514 break
515 if terminal:
516 self.status.world_target.add(pn)
517
518 # drop reference count now
519 self.status.possible_world = None
520 self.status.all_depends = None
521
522 def interactiveMode( self ):
523 """Drop off into a shell"""
524 try:
525 from bb import shell
526 except ImportError, details:
527 bb.msg.fatal(bb.msg.domain.Parsing, "Sorry, shell not available (%s)" % details )
528 else:
529 shell.start( self )
530
531 def parseConfigurationFile( self, afile ):
532 try:
533 self.configuration.data = bb.parse.handle( afile, self.configuration.data )
534
535 # Handle any INHERITs and inherit the base class
536 inherits = ["base"] + (bb.data.getVar('INHERIT', self.configuration.data, True ) or "").split()
537 for inherit in inherits:
538 self.configuration.data = bb.parse.handle(os.path.join('classes', '%s.bbclass' % inherit), self.configuration.data, True )
539
540 # Nomally we only register event handlers at the end of parsing .bb files
541 # We register any handlers we've found so far here...
542 for var in data.getVar('__BBHANDLERS', self.configuration.data) or []:
543 bb.event.register(var,bb.data.getVar(var, self.configuration.data))
544
545 bb.fetch.fetcher_init(self.configuration.data)
546
547 bb.event.fire(bb.event.ConfigParsed(), self.configuration.data)
548
549 except IOError, e:
550 bb.msg.fatal(bb.msg.domain.Parsing, "Error when parsing %s: %s" % (afile, str(e)))
551 except bb.parse.ParseError, details:
552 bb.msg.fatal(bb.msg.domain.Parsing, "Unable to parse %s (%s)" % (afile, details) )
553
554 def handleCollections( self, collections ):
555 """Handle collections"""
556 if collections:
557 collection_list = collections.split()
558 for c in collection_list:
559 regex = bb.data.getVar("BBFILE_PATTERN_%s" % c, self.configuration.data, 1)
560 if regex == None:
561 bb.msg.error(bb.msg.domain.Parsing, "BBFILE_PATTERN_%s not defined" % c)
562 continue
563 priority = bb.data.getVar("BBFILE_PRIORITY_%s" % c, self.configuration.data, 1)
564 if priority == None:
565 bb.msg.error(bb.msg.domain.Parsing, "BBFILE_PRIORITY_%s not defined" % c)
566 continue
567 try:
568 cre = re.compile(regex)
569 except re.error:
570 bb.msg.error(bb.msg.domain.Parsing, "BBFILE_PATTERN_%s \"%s\" is not a valid regular expression" % (c, regex))
571 continue
572 try:
573 pri = int(priority)
574 self.status.bbfile_config_priorities.append((cre, pri))
575 except ValueError:
576 bb.msg.error(bb.msg.domain.Parsing, "invalid value for BBFILE_PRIORITY_%s: \"%s\"" % (c, priority))
577
578 def buildSetVars(self):
579 """
580 Setup any variables needed before starting a build
581 """
582 if not bb.data.getVar("BUILDNAME", self.configuration.data):
583 bb.data.setVar("BUILDNAME", os.popen('date +%Y%m%d%H%M').readline().strip(), self.configuration.data)
584 bb.data.setVar("BUILDSTART", time.strftime('%m/%d/%Y %H:%M:%S',time.gmtime()), self.configuration.data)
585
586 def matchFiles(self, buildfile):
587 """
588 Find the .bb files which match the expression in 'buildfile'.
589 """
590
591 bf = os.path.abspath(buildfile)
592 try:
593 os.stat(bf)
594 return [bf]
595 except OSError:
596 (filelist, masked) = self.collect_bbfiles()
597 regexp = re.compile(buildfile)
598 matches = []
599 for f in filelist:
600 if regexp.search(f) and os.path.isfile(f):
601 bf = f
602 matches.append(f)
603 return matches
604
605 def matchFile(self, buildfile):
606 """
607 Find the .bb file which matches the expression in 'buildfile'.
608 Raise an error if multiple files
609 """
610 matches = self.matchFiles(buildfile)
611 if len(matches) != 1:
612 bb.msg.error(bb.msg.domain.Parsing, "Unable to match %s (%s matches found):" % (buildfile, len(matches)))
613 for f in matches:
614 bb.msg.error(bb.msg.domain.Parsing, " %s" % f)
615 raise MultipleMatches
616 return matches[0]
617
618 def buildFile(self, buildfile, task):
619 """
620 Build the file matching regexp buildfile
621 """
622
623 # Parse the configuration here. We need to do it explicitly here since
624 # buildFile() doesn't use the cache
625 self.parseConfiguration()
626
627 # If we are told to do the None task then query the default task
628 if (task == None):
629 task = self.configuration.cmd
630
631 fn = self.matchFile(buildfile)
632 self.buildSetVars()
633
634 # Load data into the cache for fn and parse the loaded cache data
635 self.bb_cache = bb.cache.init(self)
636 self.status = bb.cache.CacheData()
637 self.bb_cache.loadData(fn, self.configuration.data, self.status)
638
639 # Tweak some variables
640 item = self.bb_cache.getVar('PN', fn, True)
641 self.status.ignored_dependencies = set()
642 self.status.bbfile_priority[fn] = 1
643
644 # Remove external dependencies
645 self.status.task_deps[fn]['depends'] = {}
646 self.status.deps[fn] = []
647 self.status.rundeps[fn] = []
648 self.status.runrecs[fn] = []
649
650 # Remove stamp for target if force mode active
651 if self.configuration.force:
652 bb.msg.note(2, bb.msg.domain.RunQueue, "Remove stamp %s, %s" % (task, fn))
653 bb.build.del_stamp('do_%s' % task, self.status, fn)
654
655 # Setup taskdata structure
656 taskdata = bb.taskdata.TaskData(self.configuration.abort)
657 taskdata.add_provider(self.configuration.data, self.status, item)
658
659 buildname = bb.data.getVar("BUILDNAME", self.configuration.data)
660 bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.configuration.event_data)
661
662 # Execute the runqueue
663 runlist = [[item, "do_%s" % task]]
664
665 rq = bb.runqueue.RunQueue(self, self.configuration.data, self.status, taskdata, runlist)
666
667 def buildFileIdle(server, rq, abort):
668
669 if abort or self.cookerAction == cookerStop:
670 rq.finish_runqueue(True)
671 elif self.cookerAction == cookerShutdown:
672 rq.finish_runqueue(False)
673 failures = 0
674 try:
675 retval = rq.execute_runqueue()
676 except runqueue.TaskFailure, fnids:
677 for fnid in fnids:
678 bb.msg.error(bb.msg.domain.Build, "'%s' failed" % taskdata.fn_index[fnid])
679 failures = failures + 1
680 retval = False
681 if not retval:
682 self.command.finishAsyncCommand()
683 bb.event.fire(bb.event.BuildCompleted(buildname, item, failures), self.configuration.event_data)
684 return False
685 return 0.5
686
687 self.server.register_idle_function(buildFileIdle, rq)
688
689 def buildTargets(self, targets, task):
690 """
691 Attempt to build the targets specified
692 """
693
694 # Need files parsed
695 self.updateCache()
696
697 # If we are told to do the NULL task then query the default task
698 if (task == None):
699 task = self.configuration.cmd
700
701 targets = self.checkPackages(targets)
702
703 def buildTargetsIdle(server, rq, abort):
704
705 if abort or self.cookerAction == cookerStop:
706 rq.finish_runqueue(True)
707 elif self.cookerAction == cookerShutdown:
708 rq.finish_runqueue(False)
709 failures = 0
710 try:
711 retval = rq.execute_runqueue()
712 except runqueue.TaskFailure, fnids:
713 for fnid in fnids:
714 bb.msg.error(bb.msg.domain.Build, "'%s' failed" % taskdata.fn_index[fnid])
715 failures = failures + 1
716 retval = False
717 if not retval:
718 self.command.finishAsyncCommand()
719 bb.event.fire(bb.event.BuildCompleted(buildname, targets, failures), self.configuration.event_data)
720 return None
721 return 0.5
722
723 self.buildSetVars()
724
725 buildname = bb.data.getVar("BUILDNAME", self.configuration.data)
726 bb.event.fire(bb.event.BuildStarted(buildname, targets), self.configuration.event_data)
727
728 localdata = data.createCopy(self.configuration.data)
729 bb.data.update_data(localdata)
730 bb.data.expandKeys(localdata)
731
732 taskdata = bb.taskdata.TaskData(self.configuration.abort)
733
734 runlist = []
735 for k in targets:
736 taskdata.add_provider(localdata, self.status, k)
737 runlist.append([k, "do_%s" % task])
738 taskdata.add_unresolved(localdata, self.status)
739
740 rq = bb.runqueue.RunQueue(self, self.configuration.data, self.status, taskdata, runlist)
741
742 self.server.register_idle_function(buildTargetsIdle, rq)
743
744 def updateCache(self):
745
746 if self.cookerState == cookerParsed:
747 return
748
749 if self.cookerState != cookerParsing:
750
751 self.parseConfiguration ()
752
753 # Import Psyco if available and not disabled
754 import platform
755 if platform.machine() in ['i386', 'i486', 'i586', 'i686']:
756 if not self.configuration.disable_psyco:
757 try:
758 import psyco
759 except ImportError:
760 bb.msg.note(1, bb.msg.domain.Collection, "Psyco JIT Compiler (http://psyco.sf.net) not available. Install it to increase performance.")
761 else:
762 psyco.bind( CookerParser.parse_next )
763 else:
764 bb.msg.note(1, bb.msg.domain.Collection, "You have disabled Psyco. This decreases performance.")
765
766 self.status = bb.cache.CacheData()
767
768 ignore = bb.data.getVar("ASSUME_PROVIDED", self.configuration.data, 1) or ""
769 self.status.ignored_dependencies = set(ignore.split())
770
771 for dep in self.configuration.extra_assume_provided:
772 self.status.ignored_dependencies.add(dep)
773
774 self.handleCollections( bb.data.getVar("BBFILE_COLLECTIONS", self.configuration.data, 1) )
775
776 bb.msg.debug(1, bb.msg.domain.Collection, "collecting .bb files")
777 (filelist, masked) = self.collect_bbfiles()
778 bb.data.renameVar("__depends", "__base_depends", self.configuration.data)
779
780 self.parser = CookerParser(self, filelist, masked)
781 self.cookerState = cookerParsing
782
783 if not self.parser.parse_next():
784 bb.msg.debug(1, bb.msg.domain.Collection, "parsing complete")
785 self.buildDepgraph()
786 self.cookerState = cookerParsed
787 return None
788
789 return True
790
791 def checkPackages(self, pkgs_to_build):
792
793 if len(pkgs_to_build) == 0:
794 raise NothingToBuild
795
796 if 'world' in pkgs_to_build:
797 self.buildWorldTargetList()
798 pkgs_to_build.remove('world')
799 for t in self.status.world_target:
800 pkgs_to_build.append(t)
801
802 return pkgs_to_build
803
804 def get_bbfiles( self, path = os.getcwd() ):
805 """Get list of default .bb files by reading out the current directory"""
806 contents = os.listdir(path)
807 bbfiles = []
808 for f in contents:
809 (root, ext) = os.path.splitext(f)
810 if ext == ".bb":
811 bbfiles.append(os.path.abspath(os.path.join(os.getcwd(),f)))
812 return bbfiles
813
814 def find_bbfiles( self, path ):
815 """Find all the .bb files in a directory"""
816 from os.path import join
817
818 found = []
819 for dir, dirs, files in os.walk(path):
820 for ignored in ('SCCS', 'CVS', '.svn'):
821 if ignored in dirs:
822 dirs.remove(ignored)
823 found += [join(dir,f) for f in files if f.endswith('.bb')]
824
825 return found
826
827 def collect_bbfiles( self ):
828 """Collect all available .bb build files"""
829 parsed, cached, skipped, masked = 0, 0, 0, 0
830 self.bb_cache = bb.cache.init(self)
831
832 files = (data.getVar( "BBFILES", self.configuration.data, 1 ) or "").split()
833 data.setVar("BBFILES", " ".join(files), self.configuration.data)
834
835 if not len(files):
836 files = self.get_bbfiles()
837
838 if not len(files):
839 bb.msg.error(bb.msg.domain.Collection, "no files to build.")
840
841 newfiles = []
842 for f in files:
843 if os.path.isdir(f):
844 dirfiles = self.find_bbfiles(f)
845 if dirfiles:
846 newfiles += dirfiles
847 continue
848 else:
849 globbed = glob.glob(f)
850 if not globbed and os.path.exists(f):
851 globbed = [f]
852 newfiles += globbed
853
854 bbmask = bb.data.getVar('BBMASK', self.configuration.data, 1)
855
856 if not bbmask:
857 return (newfiles, 0)
858
859 try:
860 bbmask_compiled = re.compile(bbmask)
861 except sre_constants.error:
862 bb.msg.fatal(bb.msg.domain.Collection, "BBMASK is not a valid regular expression.")
863
864 finalfiles = []
865 for f in newfiles:
866 if bbmask_compiled.search(f):
867 bb.msg.debug(1, bb.msg.domain.Collection, "skipping masked file %s" % f)
868 masked += 1
869 continue
870 finalfiles.append(f)
871
872 return (finalfiles, masked)
873
874 def serve(self):
875
876 # Empty the environment. The environment will be populated as
877 # necessary from the data store.
878 bb.utils.empty_environment()
879
880 if self.configuration.profile:
881 try:
882 import cProfile as profile
883 except:
884 import profile
885
886 profile.runctx("self.server.serve_forever()", globals(), locals(), "profile.log")
887
888 # Redirect stdout to capture profile information
889 pout = open('profile.log.processed', 'w')
890 so = sys.stdout.fileno()
891 os.dup2(pout.fileno(), so)
892
893 import pstats
894 p = pstats.Stats('profile.log')
895 p.sort_stats('time')
896 p.print_stats()
897 p.print_callers()
898 p.sort_stats('cumulative')
899 p.print_stats()
900
901 os.dup2(so, pout.fileno())
902 pout.flush()
903 pout.close()
904 else:
905 self.server.serve_forever()
906
907 bb.event.fire(CookerExit(), self.configuration.event_data)
908
909class CookerExit(bb.event.Event):
910 """
911 Notify clients of the Cooker shutdown
912 """
913
914 def __init__(self):
915 bb.event.Event.__init__(self)
916
917class CookerParser:
918 def __init__(self, cooker, filelist, masked):
919 # Internal data
920 self.filelist = filelist
921 self.cooker = cooker
922
923 # Accounting statistics
924 self.parsed = 0
925 self.cached = 0
926 self.error = 0
927 self.masked = masked
928 self.total = len(filelist)
929
930 self.skipped = 0
931 self.virtuals = 0
932
933 # Pointer to the next file to parse
934 self.pointer = 0
935
936 def parse_next(self):
937 if self.pointer < len(self.filelist):
938 f = self.filelist[self.pointer]
939 cooker = self.cooker
940
941 try:
942 fromCache, skipped, virtuals = cooker.bb_cache.loadData(f, cooker.configuration.data, cooker.status)
943 if fromCache:
944 self.cached += 1
945 else:
946 self.parsed += 1
947
948 self.skipped += skipped
949 self.virtuals += virtuals
950
951 except IOError, e:
952 self.error += 1
953 cooker.bb_cache.remove(f)
954 bb.msg.error(bb.msg.domain.Collection, "opening %s: %s" % (f, e))
955 pass
956 except KeyboardInterrupt:
957 cooker.bb_cache.remove(f)
958 cooker.bb_cache.sync()
959 raise
960 except Exception, e:
961 self.error += 1
962 cooker.bb_cache.remove(f)
963 bb.msg.error(bb.msg.domain.Collection, "%s while parsing %s" % (e, f))
964 except:
965 cooker.bb_cache.remove(f)
966 raise
967 finally:
968 bb.event.fire(bb.event.ParseProgress(self.cached, self.parsed, self.skipped, self.masked, self.virtuals, self.error, self.total), cooker.configuration.event_data)
969
970 self.pointer += 1
971
972 if self.pointer >= self.total:
973 cooker.bb_cache.sync()
974 if self.error > 0:
975 raise ParsingErrorsFound
976 return False
977 return True
978
diff --git a/bitbake-dev/lib/bb/data.py b/bitbake-dev/lib/bb/data.py
deleted file mode 100644
index d3058b9a1d..0000000000
--- a/bitbake-dev/lib/bb/data.py
+++ /dev/null
@@ -1,562 +0,0 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Data' implementations
5
6Functions for interacting with the data structure used by the
7BitBake build tools.
8
9The expandData and update_data are the most expensive
10operations. At night the cookie monster came by and
11suggested 'give me cookies on setting the variables and
12things will work out'. Taking this suggestion into account
13applying the skills from the not yet passed 'Entwurf und
14Analyse von Algorithmen' lecture and the cookie
15monster seems to be right. We will track setVar more carefully
16to have faster update_data and expandKeys operations.
17
18This is a treade-off between speed and memory again but
19the speed is more critical here.
20"""
21
22# Copyright (C) 2003, 2004 Chris Larson
23# Copyright (C) 2005 Holger Hans Peter Freyther
24#
25# This program is free software; you can redistribute it and/or modify
26# it under the terms of the GNU General Public License version 2 as
27# published by the Free Software Foundation.
28#
29# This program is distributed in the hope that it will be useful,
30# but WITHOUT ANY WARRANTY; without even the implied warranty of
31# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
32# GNU General Public License for more details.
33#
34# You should have received a copy of the GNU General Public License along
35# with this program; if not, write to the Free Software Foundation, Inc.,
36# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
37#
38#Based on functions from the base bb module, Copyright 2003 Holger Schurig
39
40import sys, os, re, types
41if sys.argv[0][-5:] == "pydoc":
42 path = os.path.dirname(os.path.dirname(sys.argv[1]))
43else:
44 path = os.path.dirname(os.path.dirname(sys.argv[0]))
45sys.path.insert(0,path)
46
47from bb import data_smart
48import bb
49
50_dict_type = data_smart.DataSmart
51
52def init():
53 return _dict_type()
54
55def init_db(parent = None):
56 if parent:
57 return parent.createCopy()
58 else:
59 return _dict_type()
60
61def createCopy(source):
62 """Link the source set to the destination
63 If one does not find the value in the destination set,
64 search will go on to the source set to get the value.
65 Value from source are copy-on-write. i.e. any try to
66 modify one of them will end up putting the modified value
67 in the destination set.
68 """
69 return source.createCopy()
70
71def initVar(var, d):
72 """Non-destructive var init for data structure"""
73 d.initVar(var)
74
75
76def setVar(var, value, d):
77 """Set a variable to a given value
78
79 Example:
80 >>> d = init()
81 >>> setVar('TEST', 'testcontents', d)
82 >>> print getVar('TEST', d)
83 testcontents
84 """
85 d.setVar(var,value)
86
87
88def getVar(var, d, exp = 0):
89 """Gets the value of a variable
90
91 Example:
92 >>> d = init()
93 >>> setVar('TEST', 'testcontents', d)
94 >>> print getVar('TEST', d)
95 testcontents
96 """
97 return d.getVar(var,exp)
98
99
100def renameVar(key, newkey, d):
101 """Renames a variable from key to newkey
102
103 Example:
104 >>> d = init()
105 >>> setVar('TEST', 'testcontents', d)
106 >>> renameVar('TEST', 'TEST2', d)
107 >>> print getVar('TEST2', d)
108 testcontents
109 """
110 d.renameVar(key, newkey)
111
112def delVar(var, d):
113 """Removes a variable from the data set
114
115 Example:
116 >>> d = init()
117 >>> setVar('TEST', 'testcontents', d)
118 >>> print getVar('TEST', d)
119 testcontents
120 >>> delVar('TEST', d)
121 >>> print getVar('TEST', d)
122 None
123 """
124 d.delVar(var)
125
126def setVarFlag(var, flag, flagvalue, d):
127 """Set a flag for a given variable to a given value
128
129 Example:
130 >>> d = init()
131 >>> setVarFlag('TEST', 'python', 1, d)
132 >>> print getVarFlag('TEST', 'python', d)
133 1
134 """
135 d.setVarFlag(var,flag,flagvalue)
136
137def getVarFlag(var, flag, d):
138 """Gets given flag from given var
139
140 Example:
141 >>> d = init()
142 >>> setVarFlag('TEST', 'python', 1, d)
143 >>> print getVarFlag('TEST', 'python', d)
144 1
145 """
146 return d.getVarFlag(var,flag)
147
148def delVarFlag(var, flag, d):
149 """Removes a given flag from the variable's flags
150
151 Example:
152 >>> d = init()
153 >>> setVarFlag('TEST', 'testflag', 1, d)
154 >>> print getVarFlag('TEST', 'testflag', d)
155 1
156 >>> delVarFlag('TEST', 'testflag', d)
157 >>> print getVarFlag('TEST', 'testflag', d)
158 None
159
160 """
161 d.delVarFlag(var,flag)
162
163def setVarFlags(var, flags, d):
164 """Set the flags for a given variable
165
166 Note:
167 setVarFlags will not clear previous
168 flags. Think of this method as
169 addVarFlags
170
171 Example:
172 >>> d = init()
173 >>> myflags = {}
174 >>> myflags['test'] = 'blah'
175 >>> setVarFlags('TEST', myflags, d)
176 >>> print getVarFlag('TEST', 'test', d)
177 blah
178 """
179 d.setVarFlags(var,flags)
180
181def getVarFlags(var, d):
182 """Gets a variable's flags
183
184 Example:
185 >>> d = init()
186 >>> setVarFlag('TEST', 'test', 'blah', d)
187 >>> print getVarFlags('TEST', d)['test']
188 blah
189 """
190 return d.getVarFlags(var)
191
192def delVarFlags(var, d):
193 """Removes a variable's flags
194
195 Example:
196 >>> data = init()
197 >>> setVarFlag('TEST', 'testflag', 1, data)
198 >>> print getVarFlag('TEST', 'testflag', data)
199 1
200 >>> delVarFlags('TEST', data)
201 >>> print getVarFlags('TEST', data)
202 None
203
204 """
205 d.delVarFlags(var)
206
207def keys(d):
208 """Return a list of keys in d
209
210 Example:
211 >>> d = init()
212 >>> setVar('TEST', 1, d)
213 >>> setVar('MOO' , 2, d)
214 >>> setVarFlag('TEST', 'test', 1, d)
215 >>> keys(d)
216 ['TEST', 'MOO']
217 """
218 return d.keys()
219
220def getData(d):
221 """Returns the data object used"""
222 return d
223
224def setData(newData, d):
225 """Sets the data object to the supplied value"""
226 d = newData
227
228
229##
230## Cookie Monsters' query functions
231##
232def _get_override_vars(d, override):
233 """
234 Internal!!!
235
236 Get the Names of Variables that have a specific
237 override. This function returns a iterable
238 Set or an empty list
239 """
240 return []
241
242def _get_var_flags_triple(d):
243 """
244 Internal!!!
245
246 """
247 return []
248
249__expand_var_regexp__ = re.compile(r"\${[^{}]+}")
250__expand_python_regexp__ = re.compile(r"\${@.+?}")
251
252def expand(s, d, varname = None):
253 """Variable expansion using the data store.
254
255 Example:
256 Standard expansion:
257 >>> d = init()
258 >>> setVar('A', 'sshd', d)
259 >>> print expand('/usr/bin/${A}', d)
260 /usr/bin/sshd
261
262 Python expansion:
263 >>> d = init()
264 >>> print expand('result: ${@37 * 72}', d)
265 result: 2664
266
267 Shell expansion:
268 >>> d = init()
269 >>> print expand('${TARGET_MOO}', d)
270 ${TARGET_MOO}
271 >>> setVar('TARGET_MOO', 'yupp', d)
272 >>> print expand('${TARGET_MOO}',d)
273 yupp
274 >>> setVar('SRC_URI', 'http://somebug.${TARGET_MOO}', d)
275 >>> delVar('TARGET_MOO', d)
276 >>> print expand('${SRC_URI}', d)
277 http://somebug.${TARGET_MOO}
278 """
279 return d.expand(s, varname)
280
281def expandKeys(alterdata, readdata = None):
282 if readdata == None:
283 readdata = alterdata
284
285 todolist = {}
286 for key in keys(alterdata):
287 if not '${' in key:
288 continue
289
290 ekey = expand(key, readdata)
291 if key == ekey:
292 continue
293 todolist[key] = ekey
294
295 # These two for loops are split for performance to maximise the
296 # usefulness of the expand cache
297
298 for key in todolist:
299 ekey = todolist[key]
300 renameVar(key, ekey, alterdata)
301
302def expandData(alterdata, readdata = None):
303 """For each variable in alterdata, expand it, and update the var contents.
304 Replacements use data from readdata.
305
306 Example:
307 >>> a=init()
308 >>> b=init()
309 >>> setVar("dlmsg", "dl_dir is ${DL_DIR}", a)
310 >>> setVar("DL_DIR", "/path/to/whatever", b)
311 >>> expandData(a, b)
312 >>> print getVar("dlmsg", a)
313 dl_dir is /path/to/whatever
314 """
315 if readdata == None:
316 readdata = alterdata
317
318 for key in keys(alterdata):
319 val = getVar(key, alterdata)
320 if type(val) is not types.StringType:
321 continue
322 expanded = expand(val, readdata)
323# print "key is %s, val is %s, expanded is %s" % (key, val, expanded)
324 if val != expanded:
325 setVar(key, expanded, alterdata)
326
327def inheritFromOS(d):
328 """Inherit variables from the environment."""
329 for s in os.environ.keys():
330 try:
331 setVar(s, os.environ[s], d)
332 setVarFlag(s, "export", True, d)
333 except TypeError:
334 pass
335
336def emit_var(var, o=sys.__stdout__, d = init(), all=False):
337 """Emit a variable to be sourced by a shell."""
338 if getVarFlag(var, "python", d):
339 return 0
340
341 export = getVarFlag(var, "export", d)
342 unexport = getVarFlag(var, "unexport", d)
343 func = getVarFlag(var, "func", d)
344 if not all and not export and not unexport and not func:
345 return 0
346
347 try:
348 if all:
349 oval = getVar(var, d, 0)
350 val = getVar(var, d, 1)
351 except KeyboardInterrupt:
352 raise
353 except:
354 excname = str(sys.exc_info()[0])
355 if excname == "bb.build.FuncFailed":
356 raise
357 o.write('# expansion of %s threw %s\n' % (var, excname))
358 return 0
359
360 if all:
361 o.write('# %s=%s\n' % (var, oval))
362
363 if type(val) is not types.StringType:
364 return 0
365
366 if (var.find("-") != -1 or var.find(".") != -1 or var.find('{') != -1 or var.find('}') != -1 or var.find('+') != -1) and not all:
367 return 0
368
369 varExpanded = expand(var, d)
370
371 if unexport:
372 o.write('unset %s\n' % varExpanded)
373 return 1
374
375 val.rstrip()
376 if not val:
377 return 0
378
379 if func:
380 # NOTE: should probably check for unbalanced {} within the var
381 o.write("%s() {\n%s\n}\n" % (varExpanded, val))
382 return 1
383
384 if export:
385 o.write('export ')
386
387 # if we're going to output this within doublequotes,
388 # to a shell, we need to escape the quotes in the var
389 alter = re.sub('"', '\\"', val.strip())
390 o.write('%s="%s"\n' % (varExpanded, alter))
391 return 1
392
393
394def emit_env(o=sys.__stdout__, d = init(), all=False):
395 """Emits all items in the data store in a format such that it can be sourced by a shell."""
396
397 env = keys(d)
398
399 for e in env:
400 if getVarFlag(e, "func", d):
401 continue
402 emit_var(e, o, d, all) and o.write('\n')
403
404 for e in env:
405 if not getVarFlag(e, "func", d):
406 continue
407 emit_var(e, o, d) and o.write('\n')
408
409def update_data(d):
410 """Modifies the environment vars according to local overrides and commands.
411 Examples:
412 Appending to a variable:
413 >>> d = init()
414 >>> setVar('TEST', 'this is a', d)
415 >>> setVar('TEST_append', ' test', d)
416 >>> setVar('TEST_append', ' of the emergency broadcast system.', d)
417 >>> update_data(d)
418 >>> print getVar('TEST', d)
419 this is a test of the emergency broadcast system.
420
421 Prepending to a variable:
422 >>> setVar('TEST', 'virtual/libc', d)
423 >>> setVar('TEST_prepend', 'virtual/tmake ', d)
424 >>> setVar('TEST_prepend', 'virtual/patcher ', d)
425 >>> update_data(d)
426 >>> print getVar('TEST', d)
427 virtual/patcher virtual/tmake virtual/libc
428
429 Overrides:
430 >>> setVar('TEST_arm', 'target', d)
431 >>> setVar('TEST_ramses', 'machine', d)
432 >>> setVar('TEST_local', 'local', d)
433 >>> setVar('OVERRIDES', 'arm', d)
434
435 >>> setVar('TEST', 'original', d)
436 >>> update_data(d)
437 >>> print getVar('TEST', d)
438 target
439
440 >>> setVar('OVERRIDES', 'arm:ramses:local', d)
441 >>> setVar('TEST', 'original', d)
442 >>> update_data(d)
443 >>> print getVar('TEST', d)
444 local
445
446 CopyMonster:
447 >>> e = d.createCopy()
448 >>> setVar('TEST_foo', 'foo', e)
449 >>> update_data(e)
450 >>> print getVar('TEST', e)
451 local
452
453 >>> setVar('OVERRIDES', 'arm:ramses:local:foo', e)
454 >>> update_data(e)
455 >>> print getVar('TEST', e)
456 foo
457
458 >>> f = d.createCopy()
459 >>> setVar('TEST_moo', 'something', f)
460 >>> setVar('OVERRIDES', 'moo:arm:ramses:local:foo', e)
461 >>> update_data(e)
462 >>> print getVar('TEST', e)
463 foo
464
465
466 >>> h = init()
467 >>> setVar('SRC_URI', 'file://append.foo;patch=1 ', h)
468 >>> g = h.createCopy()
469 >>> setVar('SRC_URI_append_arm', 'file://other.foo;patch=1', g)
470 >>> setVar('OVERRIDES', 'arm:moo', g)
471 >>> update_data(g)
472 >>> print getVar('SRC_URI', g)
473 file://append.foo;patch=1 file://other.foo;patch=1
474
475 """
476 bb.msg.debug(2, bb.msg.domain.Data, "update_data()")
477
478 # now ask the cookie monster for help
479 #print "Cookie Monster"
480 #print "Append/Prepend %s" % d._special_values
481 #print "Overrides %s" % d._seen_overrides
482
483 overrides = (getVar('OVERRIDES', d, 1) or "").split(':') or []
484
485 #
486 # Well let us see what breaks here. We used to iterate
487 # over each variable and apply the override and then
488 # do the line expanding.
489 # If we have bad luck - which we will have - the keys
490 # where in some order that is so important for this
491 # method which we don't have anymore.
492 # Anyway we will fix that and write test cases this
493 # time.
494
495 #
496 # First we apply all overrides
497 # Then we will handle _append and _prepend
498 #
499
500 for o in overrides:
501 # calculate '_'+override
502 l = len(o)+1
503
504 # see if one should even try
505 if not d._seen_overrides.has_key(o):
506 continue
507
508 vars = d._seen_overrides[o]
509 for var in vars:
510 name = var[:-l]
511 try:
512 d[name] = d[var]
513 except:
514 bb.msg.note(1, bb.msg.domain.Data, "Untracked delVar")
515
516 # now on to the appends and prepends
517 if d._special_values.has_key('_append'):
518 appends = d._special_values['_append'] or []
519 for append in appends:
520 for (a, o) in getVarFlag(append, '_append', d) or []:
521 # maybe the OVERRIDE was not yet added so keep the append
522 if (o and o in overrides) or not o:
523 delVarFlag(append, '_append', d)
524 if o and not o in overrides:
525 continue
526
527 sval = getVar(append,d) or ""
528 sval+=a
529 setVar(append, sval, d)
530
531
532 if d._special_values.has_key('_prepend'):
533 prepends = d._special_values['_prepend'] or []
534
535 for prepend in prepends:
536 for (a, o) in getVarFlag(prepend, '_prepend', d) or []:
537 # maybe the OVERRIDE was not yet added so keep the prepend
538 if (o and o in overrides) or not o:
539 delVarFlag(prepend, '_prepend', d)
540 if o and not o in overrides:
541 continue
542
543 sval = a + (getVar(prepend,d) or "")
544 setVar(prepend, sval, d)
545
546
547def inherits_class(klass, d):
548 val = getVar('__inherit_cache', d) or []
549 if os.path.join('classes', '%s.bbclass' % klass) in val:
550 return True
551 return False
552
553def _test():
554 """Start a doctest run on this module"""
555 import doctest
556 import bb
557 from bb import data
558 bb.msg.set_debug_level(0)
559 doctest.testmod(data)
560
561if __name__ == "__main__":
562 _test()
diff --git a/bitbake-dev/lib/bb/data_smart.py b/bitbake-dev/lib/bb/data_smart.py
deleted file mode 100644
index 988d5c3578..0000000000
--- a/bitbake-dev/lib/bb/data_smart.py
+++ /dev/null
@@ -1,289 +0,0 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake Smart Dictionary Implementation
5
6Functions for interacting with the data structure used by the
7BitBake build tools.
8
9"""
10
11# Copyright (C) 2003, 2004 Chris Larson
12# Copyright (C) 2004, 2005 Seb Frankengul
13# Copyright (C) 2005, 2006 Holger Hans Peter Freyther
14# Copyright (C) 2005 Uli Luckas
15# Copyright (C) 2005 ROAD GmbH
16#
17# This program is free software; you can redistribute it and/or modify
18# it under the terms of the GNU General Public License version 2 as
19# published by the Free Software Foundation.
20#
21# This program is distributed in the hope that it will be useful,
22# but WITHOUT ANY WARRANTY; without even the implied warranty of
23# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
24# GNU General Public License for more details.
25#
26# You should have received a copy of the GNU General Public License along
27# with this program; if not, write to the Free Software Foundation, Inc.,
28# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
29# Based on functions from the base bb module, Copyright 2003 Holger Schurig
30
31import copy, os, re, sys, time, types
32import bb
33from bb import utils, methodpool
34from COW import COWDictBase
35from new import classobj
36
37
38__setvar_keyword__ = ["_append","_prepend"]
39__setvar_regexp__ = re.compile('(?P<base>.*?)(?P<keyword>_append|_prepend)(_(?P<add>.*))?')
40__expand_var_regexp__ = re.compile(r"\${[^{}]+}")
41__expand_python_regexp__ = re.compile(r"\${@.+?}")
42
43
44class DataSmart:
45 def __init__(self, special = COWDictBase.copy(), seen = COWDictBase.copy() ):
46 self.dict = {}
47
48 # cookie monster tribute
49 self._special_values = special
50 self._seen_overrides = seen
51
52 self.expand_cache = {}
53
54 def expand(self,s, varname):
55 def var_sub(match):
56 key = match.group()[2:-1]
57 if varname and key:
58 if varname == key:
59 raise Exception("variable %s references itself!" % varname)
60 var = self.getVar(key, 1)
61 if var is not None:
62 return var
63 else:
64 return match.group()
65
66 def python_sub(match):
67 import bb
68 code = match.group()[3:-1]
69 locals()['d'] = self
70 s = eval(code)
71 if type(s) == types.IntType: s = str(s)
72 return s
73
74 if type(s) is not types.StringType: # sanity check
75 return s
76
77 if varname and varname in self.expand_cache:
78 return self.expand_cache[varname]
79
80 while s.find('${') != -1:
81 olds = s
82 try:
83 s = __expand_var_regexp__.sub(var_sub, s)
84 s = __expand_python_regexp__.sub(python_sub, s)
85 if s == olds: break
86 if type(s) is not types.StringType: # sanity check
87 bb.msg.error(bb.msg.domain.Data, 'expansion of %s returned non-string %s' % (olds, s))
88 except KeyboardInterrupt:
89 raise
90 except:
91 bb.msg.note(1, bb.msg.domain.Data, "%s:%s while evaluating:\n%s" % (sys.exc_info()[0], sys.exc_info()[1], s))
92 raise
93
94 if varname:
95 self.expand_cache[varname] = s
96
97 return s
98
99 def initVar(self, var):
100 self.expand_cache = {}
101 if not var in self.dict:
102 self.dict[var] = {}
103
104 def _findVar(self,var):
105 _dest = self.dict
106
107 while (_dest and var not in _dest):
108 if not "_data" in _dest:
109 _dest = None
110 break
111 _dest = _dest["_data"]
112
113 if _dest and var in _dest:
114 return _dest[var]
115 return None
116
117 def _makeShadowCopy(self, var):
118 if var in self.dict:
119 return
120
121 local_var = self._findVar(var)
122
123 if local_var:
124 self.dict[var] = copy.copy(local_var)
125 else:
126 self.initVar(var)
127
128 def setVar(self,var,value):
129 self.expand_cache = {}
130 match = __setvar_regexp__.match(var)
131 if match and match.group("keyword") in __setvar_keyword__:
132 base = match.group('base')
133 keyword = match.group("keyword")
134 override = match.group('add')
135 l = self.getVarFlag(base, keyword) or []
136 l.append([value, override])
137 self.setVarFlag(base, keyword, l)
138
139 # todo make sure keyword is not __doc__ or __module__
140 # pay the cookie monster
141 try:
142 self._special_values[keyword].add( base )
143 except:
144 self._special_values[keyword] = set()
145 self._special_values[keyword].add( base )
146
147 return
148
149 if not var in self.dict:
150 self._makeShadowCopy(var)
151
152 # more cookies for the cookie monster
153 if '_' in var:
154 override = var[var.rfind('_')+1:]
155 if not self._seen_overrides.has_key(override):
156 self._seen_overrides[override] = set()
157 self._seen_overrides[override].add( var )
158
159 # setting var
160 self.dict[var]["content"] = value
161
162 def getVar(self,var,exp):
163 value = self.getVarFlag(var,"content")
164
165 if exp and value:
166 return self.expand(value,var)
167 return value
168
169 def renameVar(self, key, newkey):
170 """
171 Rename the variable key to newkey
172 """
173 val = self.getVar(key, 0)
174 if val is not None:
175 self.setVar(newkey, val)
176
177 for i in ('_append', '_prepend'):
178 src = self.getVarFlag(key, i)
179 if src is None:
180 continue
181
182 dest = self.getVarFlag(newkey, i) or []
183 dest.extend(src)
184 self.setVarFlag(newkey, i, dest)
185
186 if self._special_values.has_key(i) and key in self._special_values[i]:
187 self._special_values[i].remove(key)
188 self._special_values[i].add(newkey)
189
190 self.delVar(key)
191
192 def delVar(self,var):
193 self.expand_cache = {}
194 self.dict[var] = {}
195
196 def setVarFlag(self,var,flag,flagvalue):
197 if not var in self.dict:
198 self._makeShadowCopy(var)
199 self.dict[var][flag] = flagvalue
200
201 def getVarFlag(self,var,flag):
202 local_var = self._findVar(var)
203 if local_var:
204 if flag in local_var:
205 return copy.copy(local_var[flag])
206 return None
207
208 def delVarFlag(self,var,flag):
209 local_var = self._findVar(var)
210 if not local_var:
211 return
212 if not var in self.dict:
213 self._makeShadowCopy(var)
214
215 if var in self.dict and flag in self.dict[var]:
216 del self.dict[var][flag]
217
218 def setVarFlags(self,var,flags):
219 if not var in self.dict:
220 self._makeShadowCopy(var)
221
222 for i in flags.keys():
223 if i == "content":
224 continue
225 self.dict[var][i] = flags[i]
226
227 def getVarFlags(self,var):
228 local_var = self._findVar(var)
229 flags = {}
230
231 if local_var:
232 for i in local_var.keys():
233 if i == "content":
234 continue
235 flags[i] = local_var[i]
236
237 if len(flags) == 0:
238 return None
239 return flags
240
241
242 def delVarFlags(self,var):
243 if not var in self.dict:
244 self._makeShadowCopy(var)
245
246 if var in self.dict:
247 content = None
248
249 # try to save the content
250 if "content" in self.dict[var]:
251 content = self.dict[var]["content"]
252 self.dict[var] = {}
253 self.dict[var]["content"] = content
254 else:
255 del self.dict[var]
256
257
258 def createCopy(self):
259 """
260 Create a copy of self by setting _data to self
261 """
262 # we really want this to be a DataSmart...
263 data = DataSmart(seen=self._seen_overrides.copy(), special=self._special_values.copy())
264 data.dict["_data"] = self.dict
265
266 return data
267
268 # Dictionary Methods
269 def keys(self):
270 def _keys(d, mykey):
271 if "_data" in d:
272 _keys(d["_data"],mykey)
273
274 for key in d.keys():
275 if key != "_data":
276 mykey[key] = None
277 keytab = {}
278 _keys(self.dict,keytab)
279 return keytab.keys()
280
281 def __getitem__(self,item):
282 #print "Warning deprecated"
283 return self.getVar(item, False)
284
285 def __setitem__(self,var,data):
286 #print "Warning deprecated"
287 self.setVar(var,data)
288
289
diff --git a/bitbake-dev/lib/bb/event.py b/bitbake-dev/lib/bb/event.py
deleted file mode 100644
index 7251d78715..0000000000
--- a/bitbake-dev/lib/bb/event.py
+++ /dev/null
@@ -1,275 +0,0 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Event' implementation
5
6Classes and functions for manipulating 'events' in the
7BitBake build tools.
8"""
9
10# Copyright (C) 2003, 2004 Chris Larson
11#
12# This program is free software; you can redistribute it and/or modify
13# it under the terms of the GNU General Public License version 2 as
14# published by the Free Software Foundation.
15#
16# This program is distributed in the hope that it will be useful,
17# but WITHOUT ANY WARRANTY; without even the implied warranty of
18# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19# GNU General Public License for more details.
20#
21# You should have received a copy of the GNU General Public License along
22# with this program; if not, write to the Free Software Foundation, Inc.,
23# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
24
25import os, re
26import bb.utils
27import pickle
28
29# This is the pid for which we should generate the event. This is set when
30# the runqueue forks off.
31worker_pid = 0
32worker_pipe = None
33
34class Event:
35 """Base class for events"""
36
37 def __init__(self):
38 self.pid = worker_pid
39
40NotHandled = 0
41Handled = 1
42
43Registered = 10
44AlreadyRegistered = 14
45
46# Internal
47_handlers = {}
48_ui_handlers = {}
49_ui_handler_seq = 0
50
51def fire(event, d):
52 """Fire off an Event"""
53
54 if worker_pid != 0:
55 worker_fire(event, d)
56 return
57
58 for handler in _handlers:
59 h = _handlers[handler]
60 event.data = d
61 if type(h).__name__ == "code":
62 exec(h)
63 tmpHandler(event)
64 else:
65 h(event)
66 del event.data
67
68 errors = []
69 for h in _ui_handlers:
70 #print "Sending event %s" % event
71 try:
72 # We use pickle here since it better handles object instances
73 # which xmlrpc's marshaller does not. Events *must* be serializable
74 # by pickle.
75 _ui_handlers[h].event.send((pickle.dumps(event)))
76 except:
77 errors.append(h)
78 for h in errors:
79 del _ui_handlers[h]
80
81def worker_fire(event, d):
82 data = "<event>" + pickle.dumps(event) + "</event>"
83 if os.write(worker_pipe, data) != len (data):
84 print "Error sending event to server (short write)"
85
86def fire_from_worker(event, d):
87 if not event.startswith("<event>") or not event.endswith("</event>"):
88 print "Error, not an event"
89 return
90 event = pickle.loads(event[7:-8])
91 bb.event.fire(event, d)
92
93def register(name, handler):
94 """Register an Event handler"""
95
96 # already registered
97 if name in _handlers:
98 return AlreadyRegistered
99
100 if handler is not None:
101 # handle string containing python code
102 if type(handler).__name__ == "str":
103 tmp = "def tmpHandler(e):\n%s" % handler
104 comp = bb.utils.better_compile(tmp, "tmpHandler(e)", "bb.event._registerCode")
105 _handlers[name] = comp
106 else:
107 _handlers[name] = handler
108
109 return Registered
110
111def remove(name, handler):
112 """Remove an Event handler"""
113 _handlers.pop(name)
114
115def register_UIHhandler(handler):
116 bb.event._ui_handler_seq = bb.event._ui_handler_seq + 1
117 _ui_handlers[_ui_handler_seq] = handler
118 return _ui_handler_seq
119
120def unregister_UIHhandler(handlerNum):
121 if handlerNum in _ui_handlers:
122 del _ui_handlers[handlerNum]
123 return
124
125def getName(e):
126 """Returns the name of a class or class instance"""
127 if getattr(e, "__name__", None) == None:
128 return e.__class__.__name__
129 else:
130 return e.__name__
131
132class ConfigParsed(Event):
133 """Configuration Parsing Complete"""
134
135class RecipeParsed(Event):
136 """ Recipe Parsing Complete """
137
138 def __init__(self, fn):
139 self.fn = fn
140 Event.__init__(self)
141
142class StampUpdate(Event):
143 """Trigger for any adjustment of the stamp files to happen"""
144
145 def __init__(self, targets, stampfns):
146 self._targets = targets
147 self._stampfns = stampfns
148 Event.__init__(self)
149
150 def getStampPrefix(self):
151 return self._stampfns
152
153 def getTargets(self):
154 return self._targets
155
156 stampPrefix = property(getStampPrefix)
157 targets = property(getTargets)
158
159class BuildBase(Event):
160 """Base class for bbmake run events"""
161
162 def __init__(self, n, p, failures = 0):
163 self._name = n
164 self._pkgs = p
165 Event.__init__(self)
166 self._failures = failures
167
168 def getPkgs(self):
169 return self._pkgs
170
171 def setPkgs(self, pkgs):
172 self._pkgs = pkgs
173
174 def getName(self):
175 return self._name
176
177 def setName(self, name):
178 self._name = name
179
180 def getCfg(self):
181 return self.data
182
183 def setCfg(self, cfg):
184 self.data = cfg
185
186 def getFailures(self):
187 """
188 Return the number of failed packages
189 """
190 return self._failures
191
192 pkgs = property(getPkgs, setPkgs, None, "pkgs property")
193 name = property(getName, setName, None, "name property")
194 cfg = property(getCfg, setCfg, None, "cfg property")
195
196
197
198
199
200class BuildStarted(BuildBase):
201 """bbmake build run started"""
202
203
204class BuildCompleted(BuildBase):
205 """bbmake build run completed"""
206
207
208
209
210class NoProvider(Event):
211 """No Provider for an Event"""
212
213 def __init__(self, item, runtime=False):
214 Event.__init__(self)
215 self._item = item
216 self._runtime = runtime
217
218 def getItem(self):
219 return self._item
220
221 def isRuntime(self):
222 return self._runtime
223
224class MultipleProviders(Event):
225 """Multiple Providers"""
226
227 def __init__(self, item, candidates, runtime = False):
228 Event.__init__(self)
229 self._item = item
230 self._candidates = candidates
231 self._is_runtime = runtime
232
233 def isRuntime(self):
234 """
235 Is this a runtime issue?
236 """
237 return self._is_runtime
238
239 def getItem(self):
240 """
241 The name for the to be build item
242 """
243 return self._item
244
245 def getCandidates(self):
246 """
247 Get the possible Candidates for a PROVIDER.
248 """
249 return self._candidates
250
251class ParseProgress(Event):
252 """
253 Parsing Progress Event
254 """
255
256 def __init__(self, cached, parsed, skipped, masked, virtuals, errors, total):
257 Event.__init__(self)
258 self.cached = cached
259 self.parsed = parsed
260 self.skipped = skipped
261 self.virtuals = virtuals
262 self.masked = masked
263 self.errors = errors
264 self.sofar = cached + parsed
265 self.total = total
266
267class DepTreeGenerated(Event):
268 """
269 Event when a dependency tree has been generated
270 """
271
272 def __init__(self, depgraph):
273 Event.__init__(self)
274 self._depgraph = depgraph
275
diff --git a/bitbake-dev/lib/bb/fetch/__init__.py b/bitbake-dev/lib/bb/fetch/__init__.py
deleted file mode 100644
index ab4658bc3b..0000000000
--- a/bitbake-dev/lib/bb/fetch/__init__.py
+++ /dev/null
@@ -1,640 +0,0 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' implementations
5
6Classes for obtaining upstream sources for the
7BitBake build tools.
8"""
9
10# Copyright (C) 2003, 2004 Chris Larson
11#
12# This program is free software; you can redistribute it and/or modify
13# it under the terms of the GNU General Public License version 2 as
14# published by the Free Software Foundation.
15#
16# This program is distributed in the hope that it will be useful,
17# but WITHOUT ANY WARRANTY; without even the implied warranty of
18# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19# GNU General Public License for more details.
20#
21# You should have received a copy of the GNU General Public License along
22# with this program; if not, write to the Free Software Foundation, Inc.,
23# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
24#
25# Based on functions from the base bb module, Copyright 2003 Holger Schurig
26
27import os, re
28import bb
29from bb import data
30from bb import persist_data
31
32class FetchError(Exception):
33 """Exception raised when a download fails"""
34
35class NoMethodError(Exception):
36 """Exception raised when there is no method to obtain a supplied url or set of urls"""
37
38class MissingParameterError(Exception):
39 """Exception raised when a fetch method is missing a critical parameter in the url"""
40
41class ParameterError(Exception):
42 """Exception raised when a url cannot be proccessed due to invalid parameters."""
43
44class MD5SumError(Exception):
45 """Exception raised when a MD5SUM of a file does not match the expected one"""
46
47class InvalidSRCREV(Exception):
48 """Exception raised when an invalid SRCREV is encountered"""
49
50def uri_replace(uri, uri_find, uri_replace, d):
51# bb.msg.note(1, bb.msg.domain.Fetcher, "uri_replace: operating on %s" % uri)
52 if not uri or not uri_find or not uri_replace:
53 bb.msg.debug(1, bb.msg.domain.Fetcher, "uri_replace: passed an undefined value, not replacing")
54 uri_decoded = list(bb.decodeurl(uri))
55 uri_find_decoded = list(bb.decodeurl(uri_find))
56 uri_replace_decoded = list(bb.decodeurl(uri_replace))
57 result_decoded = ['','','','','',{}]
58 for i in uri_find_decoded:
59 loc = uri_find_decoded.index(i)
60 result_decoded[loc] = uri_decoded[loc]
61 import types
62 if type(i) == types.StringType:
63 if (re.match(i, uri_decoded[loc])):
64 result_decoded[loc] = re.sub(i, uri_replace_decoded[loc], uri_decoded[loc])
65 if uri_find_decoded.index(i) == 2:
66 if d:
67 localfn = bb.fetch.localpath(uri, d)
68 if localfn:
69 result_decoded[loc] = os.path.dirname(result_decoded[loc]) + "/" + os.path.basename(bb.fetch.localpath(uri, d))
70# bb.msg.note(1, bb.msg.domain.Fetcher, "uri_replace: matching %s against %s and replacing with %s" % (i, uri_decoded[loc], uri_replace_decoded[loc]))
71 else:
72# bb.msg.note(1, bb.msg.domain.Fetcher, "uri_replace: no match")
73 return uri
74# else:
75# for j in i.keys():
76# FIXME: apply replacements against options
77 return bb.encodeurl(result_decoded)
78
79methods = []
80urldata_cache = {}
81saved_headrevs = {}
82
83def fetcher_init(d):
84 """
85 Called to initilize the fetchers once the configuration data is known
86 Calls before this must not hit the cache.
87 """
88 pd = persist_data.PersistData(d)
89 # When to drop SCM head revisions controled by user policy
90 srcrev_policy = bb.data.getVar('BB_SRCREV_POLICY', d, 1) or "clear"
91 if srcrev_policy == "cache":
92 bb.msg.debug(1, bb.msg.domain.Fetcher, "Keeping SRCREV cache due to cache policy of: %s" % srcrev_policy)
93 elif srcrev_policy == "clear":
94 bb.msg.debug(1, bb.msg.domain.Fetcher, "Clearing SRCREV cache due to cache policy of: %s" % srcrev_policy)
95 try:
96 bb.fetch.saved_headrevs = pd.getKeyValues("BB_URI_HEADREVS")
97 except:
98 pass
99 pd.delDomain("BB_URI_HEADREVS")
100 else:
101 bb.msg.fatal(bb.msg.domain.Fetcher, "Invalid SRCREV cache policy of: %s" % srcrev_policy)
102
103 for m in methods:
104 if hasattr(m, "init"):
105 m.init(d)
106
107 # Make sure our domains exist
108 pd.addDomain("BB_URI_HEADREVS")
109 pd.addDomain("BB_URI_LOCALCOUNT")
110
111def fetcher_compare_revisons(d):
112 """
113 Compare the revisions in the persistant cache with current values and
114 return true/false on whether they've changed.
115 """
116
117 pd = persist_data.PersistData(d)
118 data = pd.getKeyValues("BB_URI_HEADREVS")
119 data2 = bb.fetch.saved_headrevs
120
121 changed = False
122 for key in data:
123 if key not in data2 or data2[key] != data[key]:
124 bb.msg.debug(1, bb.msg.domain.Fetcher, "%s changed" % key)
125 changed = True
126 return True
127 else:
128 bb.msg.debug(2, bb.msg.domain.Fetcher, "%s did not change" % key)
129 return False
130
131# Function call order is usually:
132# 1. init
133# 2. go
134# 3. localpaths
135# localpath can be called at any time
136
137def init(urls, d, setup = True):
138 urldata = {}
139 fn = bb.data.getVar('FILE', d, 1)
140 if fn in urldata_cache:
141 urldata = urldata_cache[fn]
142
143 for url in urls:
144 if url not in urldata:
145 urldata[url] = FetchData(url, d)
146
147 if setup:
148 for url in urldata:
149 if not urldata[url].setup:
150 urldata[url].setup_localpath(d)
151
152 urldata_cache[fn] = urldata
153 return urldata
154
155def go(d, urls = None):
156 """
157 Fetch all urls
158 init must have previously been called
159 """
160 if not urls:
161 urls = d.getVar("SRC_URI", 1).split()
162 urldata = init(urls, d, True)
163
164 for u in urls:
165 ud = urldata[u]
166 m = ud.method
167 if ud.localfile:
168 if not m.forcefetch(u, ud, d) and os.path.exists(ud.md5):
169 # File already present along with md5 stamp file
170 # Touch md5 file to show activity
171 try:
172 os.utime(ud.md5, None)
173 except:
174 # Errors aren't fatal here
175 pass
176 continue
177 lf = bb.utils.lockfile(ud.lockfile)
178 if not m.forcefetch(u, ud, d) and os.path.exists(ud.md5):
179 # If someone else fetched this before we got the lock,
180 # notice and don't try again
181 try:
182 os.utime(ud.md5, None)
183 except:
184 # Errors aren't fatal here
185 pass
186 bb.utils.unlockfile(lf)
187 continue
188 m.go(u, ud, d)
189 if ud.localfile:
190 if not m.forcefetch(u, ud, d):
191 Fetch.write_md5sum(u, ud, d)
192 bb.utils.unlockfile(lf)
193
194
195def checkstatus(d):
196 """
197 Check all urls exist upstream
198 init must have previously been called
199 """
200 urldata = init([], d, True)
201
202 for u in urldata:
203 ud = urldata[u]
204 m = ud.method
205 bb.msg.note(1, bb.msg.domain.Fetcher, "Testing URL %s" % u)
206 ret = m.checkstatus(u, ud, d)
207 if not ret:
208 bb.msg.fatal(bb.msg.domain.Fetcher, "URL %s doesn't work" % u)
209
210def localpaths(d):
211 """
212 Return a list of the local filenames, assuming successful fetch
213 """
214 local = []
215 urldata = init([], d, True)
216
217 for u in urldata:
218 ud = urldata[u]
219 local.append(ud.localpath)
220
221 return local
222
223srcrev_internal_call = False
224
225def get_srcrev(d):
226 """
227 Return the version string for the current package
228 (usually to be used as PV)
229 Most packages usually only have one SCM so we just pass on the call.
230 In the multi SCM case, we build a value based on SRCREV_FORMAT which must
231 have been set.
232 """
233
234 #
235 # Ugly code alert. localpath in the fetchers will try to evaluate SRCREV which
236 # could translate into a call to here. If it does, we need to catch this
237 # and provide some way so it knows get_srcrev is active instead of being
238 # some number etc. hence the srcrev_internal_call tracking and the magic
239 # "SRCREVINACTION" return value.
240 #
241 # Neater solutions welcome!
242 #
243 if bb.fetch.srcrev_internal_call:
244 return "SRCREVINACTION"
245
246 scms = []
247
248 # Only call setup_localpath on URIs which suppports_srcrev()
249 urldata = init(bb.data.getVar('SRC_URI', d, 1).split(), d, False)
250 for u in urldata:
251 ud = urldata[u]
252 if ud.method.suppports_srcrev():
253 if not ud.setup:
254 ud.setup_localpath(d)
255 scms.append(u)
256
257 if len(scms) == 0:
258 bb.msg.error(bb.msg.domain.Fetcher, "SRCREV was used yet no valid SCM was found in SRC_URI")
259 raise ParameterError
260
261 bb.data.setVar('__BB_DONT_CACHE','1', d)
262
263 if len(scms) == 1:
264 return urldata[scms[0]].method.sortable_revision(scms[0], urldata[scms[0]], d)
265
266 #
267 # Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT
268 #
269 format = bb.data.getVar('SRCREV_FORMAT', d, 1)
270 if not format:
271 bb.msg.error(bb.msg.domain.Fetcher, "The SRCREV_FORMAT variable must be set when multiple SCMs are used.")
272 raise ParameterError
273
274 for scm in scms:
275 if 'name' in urldata[scm].parm:
276 name = urldata[scm].parm["name"]
277 rev = urldata[scm].method.sortable_revision(scm, urldata[scm], d)
278 format = format.replace(name, rev)
279
280 return format
281
282def localpath(url, d, cache = True):
283 """
284 Called from the parser with cache=False since the cache isn't ready
285 at this point. Also called from classed in OE e.g. patch.bbclass
286 """
287 ud = init([url], d)
288 if ud[url].method:
289 return ud[url].localpath
290 return url
291
292def runfetchcmd(cmd, d, quiet = False):
293 """
294 Run cmd returning the command output
295 Raise an error if interrupted or cmd fails
296 Optionally echo command output to stdout
297 """
298
299 # Need to export PATH as binary could be in metadata paths
300 # rather than host provided
301 # Also include some other variables.
302 # FIXME: Should really include all export varaiables?
303 exportvars = ['PATH', 'GIT_PROXY_COMMAND', 'GIT_PROXY_HOST', 'GIT_PROXY_PORT', 'GIT_CONFIG', 'http_proxy', 'ftp_proxy', 'SSH_AUTH_SOCK', 'SSH_AGENT_PID', 'HOME']
304
305 for var in exportvars:
306 val = data.getVar(var, d, True)
307 if val:
308 cmd = 'export ' + var + '=%s; %s' % (val, cmd)
309
310 bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % cmd)
311
312 # redirect stderr to stdout
313 stdout_handle = os.popen(cmd + " 2>&1", "r")
314 output = ""
315
316 while 1:
317 line = stdout_handle.readline()
318 if not line:
319 break
320 if not quiet:
321 print line,
322 output += line
323
324 status = stdout_handle.close() or 0
325 signal = status >> 8
326 exitstatus = status & 0xff
327
328 if signal:
329 raise FetchError("Fetch command %s failed with signal %s, output:\n%s" % (cmd, signal, output))
330 elif status != 0:
331 raise FetchError("Fetch command %s failed with exit code %s, output:\n%s" % (cmd, status, output))
332
333 return output
334
335class FetchData(object):
336 """
337 A class which represents the fetcher state for a given URI.
338 """
339 def __init__(self, url, d):
340 self.localfile = ""
341 (self.type, self.host, self.path, self.user, self.pswd, self.parm) = bb.decodeurl(data.expand(url, d))
342 self.date = Fetch.getSRCDate(self, d)
343 self.url = url
344 if not self.user and "user" in self.parm:
345 self.user = self.parm["user"]
346 if not self.pswd and "pswd" in self.parm:
347 self.pswd = self.parm["pswd"]
348 self.setup = False
349 for m in methods:
350 if m.supports(url, self, d):
351 self.method = m
352 return
353 raise NoMethodError("Missing implementation for url %s" % url)
354
355 def setup_localpath(self, d):
356 self.setup = True
357 if "localpath" in self.parm:
358 # if user sets localpath for file, use it instead.
359 self.localpath = self.parm["localpath"]
360 else:
361 try:
362 bb.fetch.srcrev_internal_call = True
363 self.localpath = self.method.localpath(self.url, self, d)
364 finally:
365 bb.fetch.srcrev_internal_call = False
366 # We have to clear data's internal caches since the cached value of SRCREV is now wrong.
367 # Horrible...
368 bb.data.delVar("ISHOULDNEVEREXIST", d)
369 self.md5 = self.localpath + '.md5'
370 self.lockfile = self.localpath + '.lock'
371
372
373class Fetch(object):
374 """Base class for 'fetch'ing data"""
375
376 def __init__(self, urls = []):
377 self.urls = []
378
379 def supports(self, url, urldata, d):
380 """
381 Check to see if this fetch class supports a given url.
382 """
383 return 0
384
385 def localpath(self, url, urldata, d):
386 """
387 Return the local filename of a given url assuming a successful fetch.
388 Can also setup variables in urldata for use in go (saving code duplication
389 and duplicate code execution)
390 """
391 return url
392
393 def setUrls(self, urls):
394 self.__urls = urls
395
396 def getUrls(self):
397 return self.__urls
398
399 urls = property(getUrls, setUrls, None, "Urls property")
400
401 def forcefetch(self, url, urldata, d):
402 """
403 Force a fetch, even if localpath exists?
404 """
405 return False
406
407 def suppports_srcrev(self):
408 """
409 The fetcher supports auto source revisions (SRCREV)
410 """
411 return False
412
413 def go(self, url, urldata, d):
414 """
415 Fetch urls
416 Assumes localpath was called first
417 """
418 raise NoMethodError("Missing implementation for url")
419
420 def checkstatus(self, url, urldata, d):
421 """
422 Check the status of a URL
423 Assumes localpath was called first
424 """
425 bb.msg.note(1, bb.msg.domain.Fetcher, "URL %s could not be checked for status since no method exists." % url)
426 return True
427
428 def getSRCDate(urldata, d):
429 """
430 Return the SRC Date for the component
431
432 d the bb.data module
433 """
434 if "srcdate" in urldata.parm:
435 return urldata.parm['srcdate']
436
437 pn = data.getVar("PN", d, 1)
438
439 if pn:
440 return data.getVar("SRCDATE_%s" % pn, d, 1) or data.getVar("CVSDATE_%s" % pn, d, 1) or data.getVar("SRCDATE", d, 1) or data.getVar("CVSDATE", d, 1) or data.getVar("DATE", d, 1)
441
442 return data.getVar("SRCDATE", d, 1) or data.getVar("CVSDATE", d, 1) or data.getVar("DATE", d, 1)
443 getSRCDate = staticmethod(getSRCDate)
444
445 def srcrev_internal_helper(ud, d):
446 """
447 Return:
448 a) a source revision if specified
449 b) True if auto srcrev is in action
450 c) False otherwise
451 """
452
453 if 'rev' in ud.parm:
454 return ud.parm['rev']
455
456 if 'tag' in ud.parm:
457 return ud.parm['tag']
458
459 rev = None
460 if 'name' in ud.parm:
461 pn = data.getVar("PN", d, 1)
462 rev = data.getVar("SRCREV_pn-" + pn + "_" + ud.parm['name'], d, 1)
463 if not rev:
464 rev = data.getVar("SRCREV", d, 1)
465 if rev == "INVALID":
466 raise InvalidSRCREV("Please set SRCREV to a valid value")
467 if not rev:
468 return False
469 if rev is "SRCREVINACTION":
470 return True
471 return rev
472
473 srcrev_internal_helper = staticmethod(srcrev_internal_helper)
474
475 def localcount_internal_helper(ud, d):
476 """
477 Return:
478 a) a locked localcount if specified
479 b) None otherwise
480 """
481
482 localcount= None
483 if 'name' in ud.parm:
484 pn = data.getVar("PN", d, 1)
485 localcount = data.getVar("LOCALCOUNT_" + ud.parm['name'], d, 1)
486 if not localcount:
487 localcount = data.getVar("LOCALCOUNT", d, 1)
488 return localcount
489
490 localcount_internal_helper = staticmethod(localcount_internal_helper)
491
492 def try_mirror(d, tarfn):
493 """
494 Try to use a mirrored version of the sources. We do this
495 to avoid massive loads on foreign cvs and svn servers.
496 This method will be used by the different fetcher
497 implementations.
498
499 d Is a bb.data instance
500 tarfn is the name of the tarball
501 """
502 tarpath = os.path.join(data.getVar("DL_DIR", d, 1), tarfn)
503 if os.access(tarpath, os.R_OK):
504 bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists, skipping checkout." % tarfn)
505 return True
506
507 pn = data.getVar('PN', d, True)
508 src_tarball_stash = None
509 if pn:
510 src_tarball_stash = (data.getVar('SRC_TARBALL_STASH_%s' % pn, d, True) or data.getVar('CVS_TARBALL_STASH_%s' % pn, d, True) or data.getVar('SRC_TARBALL_STASH', d, True) or data.getVar('CVS_TARBALL_STASH', d, True) or "").split()
511
512 ld = d.createCopy()
513 for stash in src_tarball_stash:
514 url = stash + tarfn
515 try:
516 ud = FetchData(url, ld)
517 except bb.fetch.NoMethodError:
518 bb.msg.debug(1, bb.msg.domain.Fetcher, "No method for %s" % url)
519 continue
520
521 ud.setup_localpath(ld)
522
523 try:
524 ud.method.go(url, ud, ld)
525 return True
526 except (bb.fetch.MissingParameterError,
527 bb.fetch.FetchError,
528 bb.fetch.MD5SumError):
529 import sys
530 (type, value, traceback) = sys.exc_info()
531 bb.msg.debug(2, bb.msg.domain.Fetcher, "Tarball stash fetch failure: %s" % value)
532 return False
533 try_mirror = staticmethod(try_mirror)
534
535 def verify_md5sum(ud, got_sum):
536 """
537 Verify the md5sum we wanted with the one we got
538 """
539 wanted_sum = None
540 if 'md5sum' in ud.parm:
541 wanted_sum = ud.parm['md5sum']
542 if not wanted_sum:
543 return True
544
545 return wanted_sum == got_sum
546 verify_md5sum = staticmethod(verify_md5sum)
547
548 def write_md5sum(url, ud, d):
549 md5data = bb.utils.md5_file(ud.localpath)
550 # verify the md5sum
551 if not Fetch.verify_md5sum(ud, md5data):
552 raise MD5SumError(url)
553
554 md5out = file(ud.md5, 'w')
555 md5out.write(md5data)
556 md5out.close()
557 write_md5sum = staticmethod(write_md5sum)
558
559 def latest_revision(self, url, ud, d):
560 """
561 Look in the cache for the latest revision, if not present ask the SCM.
562 """
563 if not hasattr(self, "_latest_revision"):
564 raise ParameterError
565
566 pd = persist_data.PersistData(d)
567 key = self.generate_revision_key(url, ud, d)
568 rev = pd.getValue("BB_URI_HEADREVS", key)
569 if rev != None:
570 return str(rev)
571
572 rev = self._latest_revision(url, ud, d)
573 pd.setValue("BB_URI_HEADREVS", key, rev)
574 return rev
575
576 def sortable_revision(self, url, ud, d):
577 """
578
579 """
580 if hasattr(self, "_sortable_revision"):
581 return self._sortable_revision(url, ud, d)
582
583 pd = persist_data.PersistData(d)
584 key = self.generate_revision_key(url, ud, d)
585
586 latest_rev = self._build_revision(url, ud, d)
587 last_rev = pd.getValue("BB_URI_LOCALCOUNT", key + "_rev")
588 uselocalcount = bb.data.getVar("BB_LOCALCOUNT_OVERRIDE", d, True) or False
589 count = None
590 if uselocalcount:
591 count = Fetch.localcount_internal_helper(ud, d)
592 if count is None:
593 count = pd.getValue("BB_URI_LOCALCOUNT", key + "_count")
594
595 if last_rev == latest_rev:
596 return str(count + "+" + latest_rev)
597
598 buildindex_provided = hasattr(self, "_sortable_buildindex")
599 if buildindex_provided:
600 count = self._sortable_buildindex(url, ud, d, latest_rev)
601
602 if count is None:
603 count = "0"
604 elif uselocalcount or buildindex_provided:
605 count = str(count)
606 else:
607 count = str(int(count) + 1)
608
609 pd.setValue("BB_URI_LOCALCOUNT", key + "_rev", latest_rev)
610 pd.setValue("BB_URI_LOCALCOUNT", key + "_count", count)
611
612 return str(count + "+" + latest_rev)
613
614 def generate_revision_key(self, url, ud, d):
615 key = self._revision_key(url, ud, d)
616 return "%s-%s" % (key, bb.data.getVar("PN", d, True) or "")
617
618import cvs
619import git
620import local
621import svn
622import wget
623import svk
624import ssh
625import perforce
626import bzr
627import hg
628import osc
629
630methods.append(local.Local())
631methods.append(wget.Wget())
632methods.append(svn.Svn())
633methods.append(git.Git())
634methods.append(cvs.Cvs())
635methods.append(svk.Svk())
636methods.append(ssh.SSH())
637methods.append(perforce.Perforce())
638methods.append(bzr.Bzr())
639methods.append(hg.Hg())
640methods.append(osc.Osc())
diff --git a/bitbake-dev/lib/bb/fetch/bzr.py b/bitbake-dev/lib/bb/fetch/bzr.py
deleted file mode 100644
index b27fb63d07..0000000000
--- a/bitbake-dev/lib/bb/fetch/bzr.py
+++ /dev/null
@@ -1,153 +0,0 @@
1"""
2BitBake 'Fetch' implementation for bzr.
3
4"""
5
6# Copyright (C) 2007 Ross Burton
7# Copyright (C) 2007 Richard Purdie
8#
9# Classes for obtaining upstream sources for the
10# BitBake build tools.
11# Copyright (C) 2003, 2004 Chris Larson
12#
13# This program is free software; you can redistribute it and/or modify
14# it under the terms of the GNU General Public License version 2 as
15# published by the Free Software Foundation.
16#
17# This program is distributed in the hope that it will be useful,
18# but WITHOUT ANY WARRANTY; without even the implied warranty of
19# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20# GNU General Public License for more details.
21#
22# You should have received a copy of the GNU General Public License along
23# with this program; if not, write to the Free Software Foundation, Inc.,
24# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
25
26import os
27import sys
28import bb
29from bb import data
30from bb.fetch import Fetch
31from bb.fetch import FetchError
32from bb.fetch import runfetchcmd
33
34class Bzr(Fetch):
35 def supports(self, url, ud, d):
36 return ud.type in ['bzr']
37
38 def localpath (self, url, ud, d):
39
40 # Create paths to bzr checkouts
41 relpath = ud.path
42 if relpath.startswith('/'):
43 # Remove leading slash as os.path.join can't cope
44 relpath = relpath[1:]
45 ud.pkgdir = os.path.join(data.expand('${BZRDIR}', d), ud.host, relpath)
46
47 revision = Fetch.srcrev_internal_helper(ud, d)
48 if revision is True:
49 ud.revision = self.latest_revision(url, ud, d)
50 elif revision:
51 ud.revision = revision
52
53 if not ud.revision:
54 ud.revision = self.latest_revision(url, ud, d)
55
56 ud.localfile = data.expand('bzr_%s_%s_%s.tar.gz' % (ud.host, ud.path.replace('/', '.'), ud.revision), d)
57
58 return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
59
60 def _buildbzrcommand(self, ud, d, command):
61 """
62 Build up an bzr commandline based on ud
63 command is "fetch", "update", "revno"
64 """
65
66 basecmd = data.expand('${FETCHCMD_bzr}', d)
67
68 proto = "http"
69 if "proto" in ud.parm:
70 proto = ud.parm["proto"]
71
72 bzrroot = ud.host + ud.path
73
74 options = []
75
76 if command is "revno":
77 bzrcmd = "%s revno %s %s://%s" % (basecmd, " ".join(options), proto, bzrroot)
78 else:
79 if ud.revision:
80 options.append("-r %s" % ud.revision)
81
82 if command is "fetch":
83 bzrcmd = "%s co %s %s://%s" % (basecmd, " ".join(options), proto, bzrroot)
84 elif command is "update":
85 bzrcmd = "%s pull %s --overwrite" % (basecmd, " ".join(options))
86 else:
87 raise FetchError("Invalid bzr command %s" % command)
88
89 return bzrcmd
90
91 def go(self, loc, ud, d):
92 """Fetch url"""
93
94 # try to use the tarball stash
95 if Fetch.try_mirror(d, ud.localfile):
96 bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists or was mirrored, skipping bzr checkout." % ud.localpath)
97 return
98
99 if os.access(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir), '.bzr'), os.R_OK):
100 bzrcmd = self._buildbzrcommand(ud, d, "update")
101 bb.msg.debug(1, bb.msg.domain.Fetcher, "BZR Update %s" % loc)
102 os.chdir(os.path.join (ud.pkgdir, os.path.basename(ud.path)))
103 runfetchcmd(bzrcmd, d)
104 else:
105 os.system("rm -rf %s" % os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir)))
106 bzrcmd = self._buildbzrcommand(ud, d, "fetch")
107 bb.msg.debug(1, bb.msg.domain.Fetcher, "BZR Checkout %s" % loc)
108 bb.mkdirhier(ud.pkgdir)
109 os.chdir(ud.pkgdir)
110 bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % bzrcmd)
111 runfetchcmd(bzrcmd, d)
112
113 os.chdir(ud.pkgdir)
114 # tar them up to a defined filename
115 try:
116 runfetchcmd("tar -czf %s %s" % (ud.localpath, os.path.basename(ud.pkgdir)), d)
117 except:
118 t, v, tb = sys.exc_info()
119 try:
120 os.unlink(ud.localpath)
121 except OSError:
122 pass
123 raise t, v, tb
124
125 def suppports_srcrev(self):
126 return True
127
128 def _revision_key(self, url, ud, d):
129 """
130 Return a unique key for the url
131 """
132 return "bzr:" + ud.pkgdir
133
134 def _latest_revision(self, url, ud, d):
135 """
136 Return the latest upstream revision number
137 """
138 bb.msg.debug(2, bb.msg.domain.Fetcher, "BZR fetcher hitting network for %s" % url)
139
140 output = runfetchcmd(self._buildbzrcommand(ud, d, "revno"), d, True)
141
142 return output.strip()
143
144 def _sortable_revision(self, url, ud, d):
145 """
146 Return a sortable revision number which in our case is the revision number
147 """
148
149 return self._build_revision(url, ud, d)
150
151 def _build_revision(self, url, ud, d):
152 return ud.revision
153
diff --git a/bitbake-dev/lib/bb/fetch/cvs.py b/bitbake-dev/lib/bb/fetch/cvs.py
deleted file mode 100644
index 90a006500e..0000000000
--- a/bitbake-dev/lib/bb/fetch/cvs.py
+++ /dev/null
@@ -1,182 +0,0 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' implementations
5
6Classes for obtaining upstream sources for the
7BitBake build tools.
8
9"""
10
11# Copyright (C) 2003, 2004 Chris Larson
12#
13# This program is free software; you can redistribute it and/or modify
14# it under the terms of the GNU General Public License version 2 as
15# published by the Free Software Foundation.
16#
17# This program is distributed in the hope that it will be useful,
18# but WITHOUT ANY WARRANTY; without even the implied warranty of
19# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20# GNU General Public License for more details.
21#
22# You should have received a copy of the GNU General Public License along
23# with this program; if not, write to the Free Software Foundation, Inc.,
24# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
25#
26#Based on functions from the base bb module, Copyright 2003 Holger Schurig
27#
28
29import os
30import bb
31from bb import data
32from bb.fetch import Fetch
33from bb.fetch import FetchError
34from bb.fetch import MissingParameterError
35
36class Cvs(Fetch):
37 """
38 Class to fetch a module or modules from cvs repositories
39 """
40 def supports(self, url, ud, d):
41 """
42 Check to see if a given url can be fetched with cvs.
43 """
44 return ud.type in ['cvs']
45
46 def localpath(self, url, ud, d):
47 if not "module" in ud.parm:
48 raise MissingParameterError("cvs method needs a 'module' parameter")
49 ud.module = ud.parm["module"]
50
51 ud.tag = ""
52 if 'tag' in ud.parm:
53 ud.tag = ud.parm['tag']
54
55 # Override the default date in certain cases
56 if 'date' in ud.parm:
57 ud.date = ud.parm['date']
58 elif ud.tag:
59 ud.date = ""
60
61 norecurse = ''
62 if 'norecurse' in ud.parm:
63 norecurse = '_norecurse'
64
65 fullpath = ''
66 if 'fullpath' in ud.parm:
67 fullpath = '_fullpath'
68
69 ud.localfile = data.expand('%s_%s_%s_%s%s%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.tag, ud.date, norecurse, fullpath), d)
70
71 return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
72
73 def forcefetch(self, url, ud, d):
74 if (ud.date == "now"):
75 return True
76 return False
77
78 def go(self, loc, ud, d):
79
80 # try to use the tarball stash
81 if not self.forcefetch(loc, ud, d) and Fetch.try_mirror(d, ud.localfile):
82 bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists or was mirrored, skipping cvs checkout." % ud.localpath)
83 return
84
85 method = "pserver"
86 if "method" in ud.parm:
87 method = ud.parm["method"]
88
89 localdir = ud.module
90 if "localdir" in ud.parm:
91 localdir = ud.parm["localdir"]
92
93 cvs_port = ""
94 if "port" in ud.parm:
95 cvs_port = ud.parm["port"]
96
97 cvs_rsh = None
98 if method == "ext":
99 if "rsh" in ud.parm:
100 cvs_rsh = ud.parm["rsh"]
101
102 if method == "dir":
103 cvsroot = ud.path
104 else:
105 cvsroot = ":" + method
106 cvsproxyhost = data.getVar('CVS_PROXY_HOST', d, True)
107 if cvsproxyhost:
108 cvsroot += ";proxy=" + cvsproxyhost
109 cvsproxyport = data.getVar('CVS_PROXY_PORT', d, True)
110 if cvsproxyport:
111 cvsroot += ";proxyport=" + cvsproxyport
112 cvsroot += ":" + ud.user
113 if ud.pswd:
114 cvsroot += ":" + ud.pswd
115 cvsroot += "@" + ud.host + ":" + cvs_port + ud.path
116
117 options = []
118 if 'norecurse' in ud.parm:
119 options.append("-l")
120 if ud.date:
121 # treat YYYYMMDDHHMM specially for CVS
122 if len(ud.date) == 12:
123 options.append("-D \"%s %s:%s UTC\"" % (ud.date[0:8], ud.date[8:10], ud.date[10:12]))
124 else:
125 options.append("-D \"%s UTC\"" % ud.date)
126 if ud.tag:
127 options.append("-r %s" % ud.tag)
128
129 localdata = data.createCopy(d)
130 data.setVar('OVERRIDES', "cvs:%s" % data.getVar('OVERRIDES', localdata), localdata)
131 data.update_data(localdata)
132
133 data.setVar('CVSROOT', cvsroot, localdata)
134 data.setVar('CVSCOOPTS', " ".join(options), localdata)
135 data.setVar('CVSMODULE', ud.module, localdata)
136 cvscmd = data.getVar('FETCHCOMMAND', localdata, 1)
137 cvsupdatecmd = data.getVar('UPDATECOMMAND', localdata, 1)
138
139 if cvs_rsh:
140 cvscmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvscmd)
141 cvsupdatecmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvsupdatecmd)
142
143 # create module directory
144 bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: checking for module directory")
145 pkg = data.expand('${PN}', d)
146 pkgdir = os.path.join(data.expand('${CVSDIR}', localdata), pkg)
147 moddir = os.path.join(pkgdir,localdir)
148 if os.access(os.path.join(moddir,'CVS'), os.R_OK):
149 bb.msg.note(1, bb.msg.domain.Fetcher, "Update " + loc)
150 # update sources there
151 os.chdir(moddir)
152 myret = os.system(cvsupdatecmd)
153 else:
154 bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc)
155 # check out sources there
156 bb.mkdirhier(pkgdir)
157 os.chdir(pkgdir)
158 bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % cvscmd)
159 myret = os.system(cvscmd)
160
161 if myret != 0 or not os.access(moddir, os.R_OK):
162 try:
163 os.rmdir(moddir)
164 except OSError:
165 pass
166 raise FetchError(ud.module)
167
168 # tar them up to a defined filename
169 if 'fullpath' in ud.parm:
170 os.chdir(pkgdir)
171 myret = os.system("tar -czf %s %s" % (ud.localpath, localdir))
172 else:
173 os.chdir(moddir)
174 os.chdir('..')
175 myret = os.system("tar -czf %s %s" % (ud.localpath, os.path.basename(moddir)))
176
177 if myret != 0:
178 try:
179 os.unlink(ud.localpath)
180 except OSError:
181 pass
182 raise FetchError(ud.module)
diff --git a/bitbake-dev/lib/bb/fetch/git.py b/bitbake-dev/lib/bb/fetch/git.py
deleted file mode 100644
index 0e68325db9..0000000000
--- a/bitbake-dev/lib/bb/fetch/git.py
+++ /dev/null
@@ -1,216 +0,0 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' git implementation
5
6"""
7
8#Copyright (C) 2005 Richard Purdie
9#
10# This program is free software; you can redistribute it and/or modify
11# it under the terms of the GNU General Public License version 2 as
12# published by the Free Software Foundation.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License along
20# with this program; if not, write to the Free Software Foundation, Inc.,
21# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
22
23import os
24import bb
25from bb import data
26from bb.fetch import Fetch
27from bb.fetch import runfetchcmd
28
29class Git(Fetch):
30 """Class to fetch a module or modules from git repositories"""
31 def init(self, d):
32 #
33 # Only enable _sortable revision if the key is set
34 #
35 if bb.data.getVar("BB_GIT_CLONE_FOR_SRCREV", d, True):
36 self._sortable_buildindex = self._sortable_buildindex_disabled
37 def supports(self, url, ud, d):
38 """
39 Check to see if a given url can be fetched with git.
40 """
41 return ud.type in ['git']
42
43 def localpath(self, url, ud, d):
44
45 if 'protocol' in ud.parm:
46 ud.proto = ud.parm['protocol']
47 elif not ud.host:
48 ud.proto = 'file'
49 else:
50 ud.proto = "rsync"
51
52 ud.branch = ud.parm.get("branch", "master")
53
54 gitsrcname = '%s%s' % (ud.host, ud.path.replace('/', '.'))
55 ud.mirrortarball = 'git_%s.tar.gz' % (gitsrcname)
56 ud.clonedir = os.path.join(data.expand('${GITDIR}', d), gitsrcname)
57
58 tag = Fetch.srcrev_internal_helper(ud, d)
59 if tag is True:
60 ud.tag = self.latest_revision(url, ud, d)
61 elif tag:
62 ud.tag = tag
63
64 if not ud.tag or ud.tag == "master":
65 ud.tag = self.latest_revision(url, ud, d)
66
67 subdir = ud.parm.get("subpath", "")
68 if subdir != "":
69 if subdir.endswith("/"):
70 subdir = subdir[:-1]
71 subdirpath = os.path.join(ud.path, subdir);
72 else:
73 subdirpath = ud.path;
74
75 if 'fullclone' in ud.parm:
76 ud.localfile = ud.mirrortarball
77 else:
78 ud.localfile = data.expand('git_%s%s_%s.tar.gz' % (ud.host, subdirpath.replace('/', '.'), ud.tag), d)
79
80 return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
81
82 def go(self, loc, ud, d):
83 """Fetch url"""
84
85 if Fetch.try_mirror(d, ud.localfile):
86 bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists (or was stashed). Skipping git checkout." % ud.localpath)
87 return
88
89 if ud.user:
90 username = ud.user + '@'
91 else:
92 username = ""
93
94 repofile = os.path.join(data.getVar("DL_DIR", d, 1), ud.mirrortarball)
95
96 coname = '%s' % (ud.tag)
97 codir = os.path.join(ud.clonedir, coname)
98
99 if not os.path.exists(ud.clonedir):
100 if Fetch.try_mirror(d, ud.mirrortarball):
101 bb.mkdirhier(ud.clonedir)
102 os.chdir(ud.clonedir)
103 runfetchcmd("tar -xzf %s" % (repofile), d)
104 else:
105 runfetchcmd("git clone -n %s://%s%s%s %s" % (ud.proto, username, ud.host, ud.path, ud.clonedir), d)
106
107 os.chdir(ud.clonedir)
108 # Remove all but the .git directory
109 if not self._contains_ref(ud.tag, d):
110 runfetchcmd("rm * -Rf", d)
111 runfetchcmd("git fetch %s://%s%s%s %s" % (ud.proto, username, ud.host, ud.path, ud.branch), d)
112 runfetchcmd("git fetch --tags %s://%s%s%s" % (ud.proto, username, ud.host, ud.path), d)
113 runfetchcmd("git prune-packed", d)
114 runfetchcmd("git pack-redundant --all | xargs -r rm", d)
115
116 os.chdir(ud.clonedir)
117 mirror_tarballs = data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True)
118 if mirror_tarballs != "0" or 'fullclone' in ud.parm:
119 bb.msg.note(1, bb.msg.domain.Fetcher, "Creating tarball of git repository")
120 runfetchcmd("tar -czf %s %s" % (repofile, os.path.join(".", ".git", "*") ), d)
121
122 if 'fullclone' in ud.parm:
123 return
124
125 if os.path.exists(codir):
126 bb.utils.prunedir(codir)
127
128 subdir = ud.parm.get("subpath", "")
129 if subdir != "":
130 if subdir.endswith("/"):
131 subdirbase = os.path.basename(subdir[:-1])
132 else:
133 subdirbase = os.path.basename(subdir)
134 else:
135 subdirbase = ""
136
137 if subdir != "":
138 readpathspec = ":%s" % (subdir)
139 codir = os.path.join(codir, "git")
140 coprefix = os.path.join(codir, subdirbase, "")
141 else:
142 readpathspec = ""
143 coprefix = os.path.join(codir, "git", "")
144
145 bb.mkdirhier(codir)
146 os.chdir(ud.clonedir)
147 runfetchcmd("git read-tree %s%s" % (ud.tag, readpathspec), d)
148 runfetchcmd("git checkout-index -q -f --prefix=%s -a" % (coprefix), d)
149
150 os.chdir(codir)
151 bb.msg.note(1, bb.msg.domain.Fetcher, "Creating tarball of git checkout")
152 runfetchcmd("tar -czf %s %s" % (ud.localpath, os.path.join(".", "*") ), d)
153
154 os.chdir(ud.clonedir)
155 bb.utils.prunedir(codir)
156
157 def suppports_srcrev(self):
158 return True
159
160 def _contains_ref(self, tag, d):
161 output = runfetchcmd("git log --pretty=oneline -n 1 %s -- 2> /dev/null | wc -l" % tag, d, quiet=True)
162 return output.split()[0] != "0"
163
164 def _revision_key(self, url, ud, d):
165 """
166 Return a unique key for the url
167 """
168 return "git:" + ud.host + ud.path.replace('/', '.')
169
170 def _latest_revision(self, url, ud, d):
171 """
172 Compute the HEAD revision for the url
173 """
174 if ud.user:
175 username = ud.user + '@'
176 else:
177 username = ""
178
179 cmd = "git ls-remote %s://%s%s%s %s" % (ud.proto, username, ud.host, ud.path, ud.branch)
180 output = runfetchcmd(cmd, d, True)
181 if not output:
182 raise bb.fetch.FetchError("Fetch command %s gave empty output\n" % (cmd))
183 return output.split()[0]
184
185 def _build_revision(self, url, ud, d):
186 return ud.tag
187
188 def _sortable_buildindex_disabled(self, url, ud, d, rev):
189 """
190 Return a suitable buildindex for the revision specified. This is done by counting revisions
191 using "git rev-list" which may or may not work in different circumstances.
192 """
193
194 cwd = os.getcwd()
195
196 # Check if we have the rev already
197
198 if not os.path.exists(ud.clonedir):
199 print "no repo"
200 self.go(None, ud, d)
201 if not os.path.exists(ud.clonedir):
202 bb.msg.error(bb.msg.domain.Fetcher, "GIT repository for %s doesn't exist in %s, cannot get sortable buildnumber, using old value" % (url, ud.clonedir))
203 return None
204
205
206 os.chdir(ud.clonedir)
207 if not self._contains_ref(rev, d):
208 self.go(None, ud, d)
209
210 output = runfetchcmd("git rev-list %s -- 2> /dev/null | wc -l" % rev, d, quiet=True)
211 os.chdir(cwd)
212
213 buildindex = "%s" % output.split()[0]
214 bb.msg.debug(1, bb.msg.domain.Fetcher, "GIT repository for %s in %s is returning %s revisions in rev-list before %s" % (url, repodir, buildindex, rev))
215 return buildindex
216
diff --git a/bitbake-dev/lib/bb/fetch/hg.py b/bitbake-dev/lib/bb/fetch/hg.py
deleted file mode 100644
index 08cb61fc28..0000000000
--- a/bitbake-dev/lib/bb/fetch/hg.py
+++ /dev/null
@@ -1,178 +0,0 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' implementation for mercurial DRCS (hg).
5
6"""
7
8# Copyright (C) 2003, 2004 Chris Larson
9# Copyright (C) 2004 Marcin Juszkiewicz
10# Copyright (C) 2007 Robert Schuster
11#
12# This program is free software; you can redistribute it and/or modify
13# it under the terms of the GNU General Public License version 2 as
14# published by the Free Software Foundation.
15#
16# This program is distributed in the hope that it will be useful,
17# but WITHOUT ANY WARRANTY; without even the implied warranty of
18# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19# GNU General Public License for more details.
20#
21# You should have received a copy of the GNU General Public License along
22# with this program; if not, write to the Free Software Foundation, Inc.,
23# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
24#
25# Based on functions from the base bb module, Copyright 2003 Holger Schurig
26
27import os
28import sys
29import bb
30from bb import data
31from bb.fetch import Fetch
32from bb.fetch import FetchError
33from bb.fetch import MissingParameterError
34from bb.fetch import runfetchcmd
35
36class Hg(Fetch):
37 """Class to fetch a from mercurial repositories"""
38 def supports(self, url, ud, d):
39 """
40 Check to see if a given url can be fetched with mercurial.
41 """
42 return ud.type in ['hg']
43
44 def localpath(self, url, ud, d):
45 if not "module" in ud.parm:
46 raise MissingParameterError("hg method needs a 'module' parameter")
47
48 ud.module = ud.parm["module"]
49
50 # Create paths to mercurial checkouts
51 relpath = ud.path
52 if relpath.startswith('/'):
53 # Remove leading slash as os.path.join can't cope
54 relpath = relpath[1:]
55 ud.pkgdir = os.path.join(data.expand('${HGDIR}', d), ud.host, relpath)
56 ud.moddir = os.path.join(ud.pkgdir, ud.module)
57
58 if 'rev' in ud.parm:
59 ud.revision = ud.parm['rev']
60 else:
61 tag = Fetch.srcrev_internal_helper(ud, d)
62 if tag is True:
63 ud.revision = self.latest_revision(url, ud, d)
64 elif tag:
65 ud.revision = tag
66 else:
67 ud.revision = self.latest_revision(url, ud, d)
68
69 ud.localfile = data.expand('%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision), d)
70
71 return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
72
73 def _buildhgcommand(self, ud, d, command):
74 """
75 Build up an hg commandline based on ud
76 command is "fetch", "update", "info"
77 """
78
79 basecmd = data.expand('${FETCHCMD_hg}', d)
80
81 proto = "http"
82 if "proto" in ud.parm:
83 proto = ud.parm["proto"]
84
85 host = ud.host
86 if proto == "file":
87 host = "/"
88 ud.host = "localhost"
89
90 if not ud.user:
91 hgroot = host + ud.path
92 else:
93 hgroot = ud.user + "@" + host + ud.path
94
95 if command is "info":
96 return "%s identify -i %s://%s/%s" % (basecmd, proto, hgroot, ud.module)
97
98 options = [];
99 if ud.revision:
100 options.append("-r %s" % ud.revision)
101
102 if command is "fetch":
103 cmd = "%s clone %s %s://%s/%s %s" % (basecmd, " ".join(options), proto, hgroot, ud.module, ud.module)
104 elif command is "pull":
105 # do not pass options list; limiting pull to rev causes the local
106 # repo not to contain it and immediately following "update" command
107 # will crash
108 cmd = "%s pull" % (basecmd)
109 elif command is "update":
110 cmd = "%s update -C %s" % (basecmd, " ".join(options))
111 else:
112 raise FetchError("Invalid hg command %s" % command)
113
114 return cmd
115
116 def go(self, loc, ud, d):
117 """Fetch url"""
118
119 # try to use the tarball stash
120 if Fetch.try_mirror(d, ud.localfile):
121 bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists or was mirrored, skipping hg checkout." % ud.localpath)
122 return
123
124 bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: checking for module directory '" + ud.moddir + "'")
125
126 if os.access(os.path.join(ud.moddir, '.hg'), os.R_OK):
127 updatecmd = self._buildhgcommand(ud, d, "pull")
128 bb.msg.note(1, bb.msg.domain.Fetcher, "Update " + loc)
129 # update sources there
130 os.chdir(ud.moddir)
131 bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % updatecmd)
132 runfetchcmd(updatecmd, d)
133
134 else:
135 fetchcmd = self._buildhgcommand(ud, d, "fetch")
136 bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc)
137 # check out sources there
138 bb.mkdirhier(ud.pkgdir)
139 os.chdir(ud.pkgdir)
140 bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % fetchcmd)
141 runfetchcmd(fetchcmd, d)
142
143 # Even when we clone (fetch), we still need to update as hg's clone
144 # won't checkout the specified revision if its on a branch
145 updatecmd = self._buildhgcommand(ud, d, "update")
146 bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % updatecmd)
147 runfetchcmd(updatecmd, d)
148
149 os.chdir(ud.pkgdir)
150 try:
151 runfetchcmd("tar -czf %s %s" % (ud.localpath, ud.module), d)
152 except:
153 t, v, tb = sys.exc_info()
154 try:
155 os.unlink(ud.localpath)
156 except OSError:
157 pass
158 raise t, v, tb
159
160 def suppports_srcrev(self):
161 return True
162
163 def _latest_revision(self, url, ud, d):
164 """
165 Compute tip revision for the url
166 """
167 output = runfetchcmd(self._buildhgcommand(ud, d, "info"), d)
168 return output.strip()
169
170 def _build_revision(self, url, ud, d):
171 return ud.revision
172
173 def _revision_key(self, url, ud, d):
174 """
175 Return a unique key for the url
176 """
177 return "hg:" + ud.moddir
178
diff --git a/bitbake-dev/lib/bb/fetch/local.py b/bitbake-dev/lib/bb/fetch/local.py
deleted file mode 100644
index f9bdf589cb..0000000000
--- a/bitbake-dev/lib/bb/fetch/local.py
+++ /dev/null
@@ -1,72 +0,0 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' implementations
5
6Classes for obtaining upstream sources for the
7BitBake build tools.
8
9"""
10
11# Copyright (C) 2003, 2004 Chris Larson
12#
13# This program is free software; you can redistribute it and/or modify
14# it under the terms of the GNU General Public License version 2 as
15# published by the Free Software Foundation.
16#
17# This program is distributed in the hope that it will be useful,
18# but WITHOUT ANY WARRANTY; without even the implied warranty of
19# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20# GNU General Public License for more details.
21#
22# You should have received a copy of the GNU General Public License along
23# with this program; if not, write to the Free Software Foundation, Inc.,
24# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
25#
26# Based on functions from the base bb module, Copyright 2003 Holger Schurig
27
28import os
29import bb
30from bb import data
31from bb.fetch import Fetch
32
33class Local(Fetch):
34 def supports(self, url, urldata, d):
35 """
36 Check to see if a given url represents a local fetch.
37 """
38 return urldata.type in ['file']
39
40 def localpath(self, url, urldata, d):
41 """
42 Return the local filename of a given url assuming a successful fetch.
43 """
44 path = url.split("://")[1]
45 path = path.split(";")[0]
46 newpath = path
47 if path[0] != "/":
48 filespath = data.getVar('FILESPATH', d, 1)
49 if filespath:
50 newpath = bb.which(filespath, path)
51 if not newpath:
52 filesdir = data.getVar('FILESDIR', d, 1)
53 if filesdir:
54 newpath = os.path.join(filesdir, path)
55 # We don't set localfile as for this fetcher the file is already local!
56 return newpath
57
58 def go(self, url, urldata, d):
59 """Fetch urls (no-op for Local method)"""
60 # no need to fetch local files, we'll deal with them in place.
61 return 1
62
63 def checkstatus(self, url, urldata, d):
64 """
65 Check the status of the url
66 """
67 if urldata.localpath.find("*") != -1:
68 bb.msg.note(1, bb.msg.domain.Fetcher, "URL %s looks like a glob and was therefore not checked." % url)
69 return True
70 if os.path.exists(urldata.localpath):
71 return True
72 return False
diff --git a/bitbake-dev/lib/bb/fetch/osc.py b/bitbake-dev/lib/bb/fetch/osc.py
deleted file mode 100644
index 2c34caf6c9..0000000000
--- a/bitbake-dev/lib/bb/fetch/osc.py
+++ /dev/null
@@ -1,155 +0,0 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4Bitbake "Fetch" implementation for osc (Opensuse build service client).
5Based on the svn "Fetch" implementation.
6
7"""
8
9import os
10import sys
11import bb
12from bb import data
13from bb.fetch import Fetch
14from bb.fetch import FetchError
15from bb.fetch import MissingParameterError
16from bb.fetch import runfetchcmd
17
18class Osc(Fetch):
19 """Class to fetch a module or modules from Opensuse build server
20 repositories."""
21
22 def supports(self, url, ud, d):
23 """
24 Check to see if a given url can be fetched with osc.
25 """
26 return ud.type in ['osc']
27
28 def localpath(self, url, ud, d):
29 if not "module" in ud.parm:
30 raise MissingParameterError("osc method needs a 'module' parameter.")
31
32 ud.module = ud.parm["module"]
33
34 # Create paths to osc checkouts
35 relpath = ud.path
36 if relpath.startswith('/'):
37 # Remove leading slash as os.path.join can't cope
38 relpath = relpath[1:]
39 ud.pkgdir = os.path.join(data.expand('${OSCDIR}', d), ud.host)
40 ud.moddir = os.path.join(ud.pkgdir, relpath, ud.module)
41
42 if 'rev' in ud.parm:
43 ud.revision = ud.parm['rev']
44 else:
45 pv = data.getVar("PV", d, 0)
46 rev = Fetch.srcrev_internal_helper(ud, d)
47 if rev and rev != True:
48 ud.revision = rev
49 else:
50 ud.revision = ""
51
52 ud.localfile = data.expand('%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.path.replace('/', '.'), ud.revision), d)
53
54 return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
55
56 def _buildosccommand(self, ud, d, command):
57 """
58 Build up an ocs commandline based on ud
59 command is "fetch", "update", "info"
60 """
61
62 basecmd = data.expand('${FETCHCMD_osc}', d)
63
64 proto = "ocs"
65 if "proto" in ud.parm:
66 proto = ud.parm["proto"]
67
68 options = []
69
70 config = "-c %s" % self.generate_config(ud, d)
71
72 if ud.revision:
73 options.append("-r %s" % ud.revision)
74
75 coroot = ud.path
76 if coroot.startswith('/'):
77 # Remove leading slash as os.path.join can't cope
78 coroot= coroot[1:]
79
80 if command is "fetch":
81 osccmd = "%s %s co %s/%s %s" % (basecmd, config, coroot, ud.module, " ".join(options))
82 elif command is "update":
83 osccmd = "%s %s up %s" % (basecmd, config, " ".join(options))
84 else:
85 raise FetchError("Invalid osc command %s" % command)
86
87 return osccmd
88
89 def go(self, loc, ud, d):
90 """
91 Fetch url
92 """
93
94 # Try to use the tarball stash
95 if Fetch.try_mirror(d, ud.localfile):
96 bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists or was mirrored, skipping osc checkout." % ud.localpath)
97 return
98
99 bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: checking for module directory '" + ud.moddir + "'")
100
101 if os.access(os.path.join(data.expand('${OSCDIR}', d), ud.path, ud.module), os.R_OK):
102 oscupdatecmd = self._buildosccommand(ud, d, "update")
103 bb.msg.note(1, bb.msg.domain.Fetcher, "Update "+ loc)
104 # update sources there
105 os.chdir(ud.moddir)
106 bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % oscupdatecmd)
107 runfetchcmd(oscupdatecmd, d)
108 else:
109 oscfetchcmd = self._buildosccommand(ud, d, "fetch")
110 bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc)
111 # check out sources there
112 bb.mkdirhier(ud.pkgdir)
113 os.chdir(ud.pkgdir)
114 bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % oscfetchcmd)
115 runfetchcmd(oscfetchcmd, d)
116
117 os.chdir(os.path.join(ud.pkgdir + ud.path))
118 # tar them up to a defined filename
119 try:
120 runfetchcmd("tar -czf %s %s" % (ud.localpath, ud.module), d)
121 except:
122 t, v, tb = sys.exc_info()
123 try:
124 os.unlink(ud.localpath)
125 except OSError:
126 pass
127 raise t, v, tb
128
129 def supports_srcrev(self):
130 return False
131
132 def generate_config(self, ud, d):
133 """
134 Generate a .oscrc to be used for this run.
135 """
136
137 config_path = "%s/oscrc" % data.expand('${OSCDIR}', d)
138 if (os.path.exists(config_path)):
139 os.remove(config_path)
140
141 f = open(config_path, 'w')
142 f.write("[general]\n")
143 f.write("apisrv = %s\n" % ud.host)
144 f.write("scheme = http\n")
145 f.write("su-wrapper = su -c\n")
146 f.write("build-root = %s\n" % data.expand('${WORKDIR}', d))
147 f.write("urllist = http://moblin-obs.jf.intel.com:8888/build/%(project)s/%(repository)s/%(buildarch)s/:full/%(name)s.rpm\n")
148 f.write("extra-pkgs = gzip\n")
149 f.write("\n")
150 f.write("[%s]\n" % ud.host)
151 f.write("user = %s\n" % ud.parm["user"])
152 f.write("pass = %s\n" % ud.parm["pswd"])
153 f.close()
154
155 return config_path
diff --git a/bitbake-dev/lib/bb/fetch/perforce.py b/bitbake-dev/lib/bb/fetch/perforce.py
deleted file mode 100644
index 394f5a2253..0000000000
--- a/bitbake-dev/lib/bb/fetch/perforce.py
+++ /dev/null
@@ -1,214 +0,0 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' implementations
5
6Classes for obtaining upstream sources for the
7BitBake build tools.
8
9"""
10
11# Copyright (C) 2003, 2004 Chris Larson
12#
13# This program is free software; you can redistribute it and/or modify
14# it under the terms of the GNU General Public License version 2 as
15# published by the Free Software Foundation.
16#
17# This program is distributed in the hope that it will be useful,
18# but WITHOUT ANY WARRANTY; without even the implied warranty of
19# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20# GNU General Public License for more details.
21#
22# You should have received a copy of the GNU General Public License along
23# with this program; if not, write to the Free Software Foundation, Inc.,
24# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
25#
26# Based on functions from the base bb module, Copyright 2003 Holger Schurig
27
28import os
29import bb
30from bb import data
31from bb.fetch import Fetch
32from bb.fetch import FetchError
33
34class Perforce(Fetch):
35 def supports(self, url, ud, d):
36 return ud.type in ['p4']
37
38 def doparse(url,d):
39 parm = {}
40 path = url.split("://")[1]
41 delim = path.find("@");
42 if delim != -1:
43 (user,pswd,host,port) = path.split('@')[0].split(":")
44 path = path.split('@')[1]
45 else:
46 (host,port) = data.getVar('P4PORT', d).split(':')
47 user = ""
48 pswd = ""
49
50 if path.find(";") != -1:
51 keys=[]
52 values=[]
53 plist = path.split(';')
54 for item in plist:
55 if item.count('='):
56 (key,value) = item.split('=')
57 keys.append(key)
58 values.append(value)
59
60 parm = dict(zip(keys,values))
61 path = "//" + path.split(';')[0]
62 host += ":%s" % (port)
63 parm["cset"] = Perforce.getcset(d, path, host, user, pswd, parm)
64
65 return host,path,user,pswd,parm
66 doparse = staticmethod(doparse)
67
68 def getcset(d, depot,host,user,pswd,parm):
69 p4opt = ""
70 if "cset" in parm:
71 return parm["cset"];
72 if user:
73 p4opt += " -u %s" % (user)
74 if pswd:
75 p4opt += " -P %s" % (pswd)
76 if host:
77 p4opt += " -p %s" % (host)
78
79 p4date = data.getVar("P4DATE", d, 1)
80 if "revision" in parm:
81 depot += "#%s" % (parm["revision"])
82 elif "label" in parm:
83 depot += "@%s" % (parm["label"])
84 elif p4date:
85 depot += "@%s" % (p4date)
86
87 p4cmd = data.getVar('FETCHCOMMAND_p4', d, 1)
88 bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s%s changes -m 1 %s" % (p4cmd, p4opt, depot))
89 p4file = os.popen("%s%s changes -m 1 %s" % (p4cmd, p4opt, depot))
90 cset = p4file.readline().strip()
91 bb.msg.debug(1, bb.msg.domain.Fetcher, "READ %s" % (cset))
92 if not cset:
93 return -1
94
95 return cset.split(' ')[1]
96 getcset = staticmethod(getcset)
97
98 def localpath(self, url, ud, d):
99
100 (host,path,user,pswd,parm) = Perforce.doparse(url,d)
101
102 # If a label is specified, we use that as our filename
103
104 if "label" in parm:
105 ud.localfile = "%s.tar.gz" % (parm["label"])
106 return os.path.join(data.getVar("DL_DIR", d, 1), ud.localfile)
107
108 base = path
109 which = path.find('/...')
110 if which != -1:
111 base = path[:which]
112
113 if base[0] == "/":
114 base = base[1:]
115
116 cset = Perforce.getcset(d, path, host, user, pswd, parm)
117
118 ud.localfile = data.expand('%s+%s+%s.tar.gz' % (host,base.replace('/', '.'), cset), d)
119
120 return os.path.join(data.getVar("DL_DIR", d, 1), ud.localfile)
121
122 def go(self, loc, ud, d):
123 """
124 Fetch urls
125 """
126
127 # try to use the tarball stash
128 if Fetch.try_mirror(d, ud.localfile):
129 bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists or was mirrored, skipping perforce checkout." % ud.localpath)
130 return
131
132 (host,depot,user,pswd,parm) = Perforce.doparse(loc, d)
133
134 if depot.find('/...') != -1:
135 path = depot[:depot.find('/...')]
136 else:
137 path = depot
138
139 if "module" in parm:
140 module = parm["module"]
141 else:
142 module = os.path.basename(path)
143
144 localdata = data.createCopy(d)
145 data.setVar('OVERRIDES', "p4:%s" % data.getVar('OVERRIDES', localdata), localdata)
146 data.update_data(localdata)
147
148 # Get the p4 command
149 p4opt = ""
150 if user:
151 p4opt += " -u %s" % (user)
152
153 if pswd:
154 p4opt += " -P %s" % (pswd)
155
156 if host:
157 p4opt += " -p %s" % (host)
158
159 p4cmd = data.getVar('FETCHCOMMAND', localdata, 1)
160
161 # create temp directory
162 bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: creating temporary directory")
163 bb.mkdirhier(data.expand('${WORKDIR}', localdata))
164 data.setVar('TMPBASE', data.expand('${WORKDIR}/oep4.XXXXXX', localdata), localdata)
165 tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false")
166 tmpfile = tmppipe.readline().strip()
167 if not tmpfile:
168 bb.error("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.")
169 raise FetchError(module)
170
171 if "label" in parm:
172 depot = "%s@%s" % (depot,parm["label"])
173 else:
174 cset = Perforce.getcset(d, depot, host, user, pswd, parm)
175 depot = "%s@%s" % (depot,cset)
176
177 os.chdir(tmpfile)
178 bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc)
179 bb.msg.note(1, bb.msg.domain.Fetcher, "%s%s files %s" % (p4cmd, p4opt, depot))
180 p4file = os.popen("%s%s files %s" % (p4cmd, p4opt, depot))
181
182 if not p4file:
183 bb.error("Fetch: unable to get the P4 files from %s" % (depot))
184 raise FetchError(module)
185
186 count = 0
187
188 for file in p4file:
189 list = file.split()
190
191 if list[2] == "delete":
192 continue
193
194 dest = list[0][len(path)+1:]
195 where = dest.find("#")
196
197 os.system("%s%s print -o %s/%s %s" % (p4cmd, p4opt, module,dest[:where],list[0]))
198 count = count + 1
199
200 if count == 0:
201 bb.error("Fetch: No files gathered from the P4 fetch")
202 raise FetchError(module)
203
204 myret = os.system("tar -czf %s %s" % (ud.localpath, module))
205 if myret != 0:
206 try:
207 os.unlink(ud.localpath)
208 except OSError:
209 pass
210 raise FetchError(module)
211 # cleanup
212 os.system('rm -rf %s' % tmpfile)
213
214
diff --git a/bitbake-dev/lib/bb/fetch/ssh.py b/bitbake-dev/lib/bb/fetch/ssh.py
deleted file mode 100644
index 68e6fdb1df..0000000000
--- a/bitbake-dev/lib/bb/fetch/ssh.py
+++ /dev/null
@@ -1,118 +0,0 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3'''
4BitBake 'Fetch' implementations
5
6This implementation is for Secure Shell (SSH), and attempts to comply with the
7IETF secsh internet draft:
8 http://tools.ietf.org/wg/secsh/draft-ietf-secsh-scp-sftp-ssh-uri/
9
10 Currently does not support the sftp parameters, as this uses scp
11 Also does not support the 'fingerprint' connection parameter.
12
13'''
14
15# Copyright (C) 2006 OpenedHand Ltd.
16#
17#
18# Based in part on svk.py:
19# Copyright (C) 2006 Holger Hans Peter Freyther
20# Based on svn.py:
21# Copyright (C) 2003, 2004 Chris Larson
22# Based on functions from the base bb module:
23# Copyright 2003 Holger Schurig
24#
25#
26# This program is free software; you can redistribute it and/or modify
27# it under the terms of the GNU General Public License version 2 as
28# published by the Free Software Foundation.
29#
30# This program is distributed in the hope that it will be useful,
31# but WITHOUT ANY WARRANTY; without even the implied warranty of
32# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
33# GNU General Public License for more details.
34#
35# You should have received a copy of the GNU General Public License along
36# with this program; if not, write to the Free Software Foundation, Inc.,
37# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
38
39import re, os
40from bb import data
41from bb.fetch import Fetch
42from bb.fetch import FetchError
43
44
45__pattern__ = re.compile(r'''
46 \s* # Skip leading whitespace
47 ssh:// # scheme
48 ( # Optional username/password block
49 (?P<user>\S+) # username
50 (:(?P<pass>\S+))? # colon followed by the password (optional)
51 )?
52 (?P<cparam>(;[^;]+)*)? # connection parameters block (optional)
53 @
54 (?P<host>\S+?) # non-greedy match of the host
55 (:(?P<port>[0-9]+))? # colon followed by the port (optional)
56 /
57 (?P<path>[^;]+) # path on the remote system, may be absolute or relative,
58 # and may include the use of '~' to reference the remote home
59 # directory
60 (?P<sparam>(;[^;]+)*)? # parameters block (optional)
61 $
62''', re.VERBOSE)
63
64class SSH(Fetch):
65 '''Class to fetch a module or modules via Secure Shell'''
66
67 def supports(self, url, urldata, d):
68 return __pattern__.match(url) != None
69
70 def localpath(self, url, urldata, d):
71 m = __pattern__.match(url)
72 path = m.group('path')
73 host = m.group('host')
74 lpath = os.path.join(data.getVar('DL_DIR', d, True), host, os.path.basename(path))
75 return lpath
76
77 def go(self, url, urldata, d):
78 dldir = data.getVar('DL_DIR', d, 1)
79
80 m = __pattern__.match(url)
81 path = m.group('path')
82 host = m.group('host')
83 port = m.group('port')
84 user = m.group('user')
85 password = m.group('pass')
86
87 ldir = os.path.join(dldir, host)
88 lpath = os.path.join(ldir, os.path.basename(path))
89
90 if not os.path.exists(ldir):
91 os.makedirs(ldir)
92
93 if port:
94 port = '-P %s' % port
95 else:
96 port = ''
97
98 if user:
99 fr = user
100 if password:
101 fr += ':%s' % password
102 fr += '@%s' % host
103 else:
104 fr = host
105 fr += ':%s' % path
106
107
108 import commands
109 cmd = 'scp -B -r %s %s %s/' % (
110 port,
111 commands.mkarg(fr),
112 commands.mkarg(ldir)
113 )
114
115 (exitstatus, output) = commands.getstatusoutput(cmd)
116 if exitstatus != 0:
117 print output
118 raise FetchError('Unable to fetch %s' % url)
diff --git a/bitbake-dev/lib/bb/fetch/svk.py b/bitbake-dev/lib/bb/fetch/svk.py
deleted file mode 100644
index 120dad9d4e..0000000000
--- a/bitbake-dev/lib/bb/fetch/svk.py
+++ /dev/null
@@ -1,109 +0,0 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' implementations
5
6This implementation is for svk. It is based on the svn implementation
7
8"""
9
10# Copyright (C) 2006 Holger Hans Peter Freyther
11# Copyright (C) 2003, 2004 Chris Larson
12#
13# This program is free software; you can redistribute it and/or modify
14# it under the terms of the GNU General Public License version 2 as
15# published by the Free Software Foundation.
16#
17# This program is distributed in the hope that it will be useful,
18# but WITHOUT ANY WARRANTY; without even the implied warranty of
19# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20# GNU General Public License for more details.
21#
22# You should have received a copy of the GNU General Public License along
23# with this program; if not, write to the Free Software Foundation, Inc.,
24# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
25#
26# Based on functions from the base bb module, Copyright 2003 Holger Schurig
27
28import os
29import bb
30from bb import data
31from bb.fetch import Fetch
32from bb.fetch import FetchError
33from bb.fetch import MissingParameterError
34
35class Svk(Fetch):
36 """Class to fetch a module or modules from svk repositories"""
37 def supports(self, url, ud, d):
38 """
39 Check to see if a given url can be fetched with svk.
40 """
41 return ud.type in ['svk']
42
43 def localpath(self, url, ud, d):
44 if not "module" in ud.parm:
45 raise MissingParameterError("svk method needs a 'module' parameter")
46 else:
47 ud.module = ud.parm["module"]
48
49 ud.revision = ""
50 if 'rev' in ud.parm:
51 ud.revision = ud.parm['rev']
52
53 ud.localfile = data.expand('%s_%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision, ud.date), d)
54
55 return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
56
57 def forcefetch(self, url, ud, d):
58 if (ud.date == "now"):
59 return True
60 return False
61
62 def go(self, loc, ud, d):
63 """Fetch urls"""
64
65 if not self.forcefetch(loc, ud, d) and Fetch.try_mirror(d, ud.localfile):
66 return
67
68 svkroot = ud.host + ud.path
69
70 svkcmd = "svk co -r {%s} %s/%s" % (ud.date, svkroot, ud.module)
71
72 if ud.revision:
73 svkcmd = "svk co -r %s %s/%s" % (ud.revision, svkroot, ud.module)
74
75 # create temp directory
76 localdata = data.createCopy(d)
77 data.update_data(localdata)
78 bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: creating temporary directory")
79 bb.mkdirhier(data.expand('${WORKDIR}', localdata))
80 data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvk.XXXXXX', localdata), localdata)
81 tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false")
82 tmpfile = tmppipe.readline().strip()
83 if not tmpfile:
84 bb.msg.error(bb.msg.domain.Fetcher, "Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.")
85 raise FetchError(ud.module)
86
87 # check out sources there
88 os.chdir(tmpfile)
89 bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc)
90 bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % svkcmd)
91 myret = os.system(svkcmd)
92 if myret != 0:
93 try:
94 os.rmdir(tmpfile)
95 except OSError:
96 pass
97 raise FetchError(ud.module)
98
99 os.chdir(os.path.join(tmpfile, os.path.dirname(ud.module)))
100 # tar them up to a defined filename
101 myret = os.system("tar -czf %s %s" % (ud.localpath, os.path.basename(ud.module)))
102 if myret != 0:
103 try:
104 os.unlink(ud.localpath)
105 except OSError:
106 pass
107 raise FetchError(ud.module)
108 # cleanup
109 os.system('rm -rf %s' % tmpfile)
diff --git a/bitbake-dev/lib/bb/fetch/svn.py b/bitbake-dev/lib/bb/fetch/svn.py
deleted file mode 100644
index eef9862a84..0000000000
--- a/bitbake-dev/lib/bb/fetch/svn.py
+++ /dev/null
@@ -1,206 +0,0 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' implementation for svn.
5
6"""
7
8# Copyright (C) 2003, 2004 Chris Larson
9# Copyright (C) 2004 Marcin Juszkiewicz
10#
11# This program is free software; you can redistribute it and/or modify
12# it under the terms of the GNU General Public License version 2 as
13# published by the Free Software Foundation.
14#
15# This program is distributed in the hope that it will be useful,
16# but WITHOUT ANY WARRANTY; without even the implied warranty of
17# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18# GNU General Public License for more details.
19#
20# You should have received a copy of the GNU General Public License along
21# with this program; if not, write to the Free Software Foundation, Inc.,
22# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
23#
24# Based on functions from the base bb module, Copyright 2003 Holger Schurig
25
26import os
27import sys
28import bb
29from bb import data
30from bb.fetch import Fetch
31from bb.fetch import FetchError
32from bb.fetch import MissingParameterError
33from bb.fetch import runfetchcmd
34
35class Svn(Fetch):
36 """Class to fetch a module or modules from svn repositories"""
37 def supports(self, url, ud, d):
38 """
39 Check to see if a given url can be fetched with svn.
40 """
41 return ud.type in ['svn']
42
43 def localpath(self, url, ud, d):
44 if not "module" in ud.parm:
45 raise MissingParameterError("svn method needs a 'module' parameter")
46
47 ud.module = ud.parm["module"]
48
49 # Create paths to svn checkouts
50 relpath = ud.path
51 if relpath.startswith('/'):
52 # Remove leading slash as os.path.join can't cope
53 relpath = relpath[1:]
54 ud.pkgdir = os.path.join(data.expand('${SVNDIR}', d), ud.host, relpath)
55 ud.moddir = os.path.join(ud.pkgdir, ud.module)
56
57 if 'rev' in ud.parm:
58 ud.date = ""
59 ud.revision = ud.parm['rev']
60 elif 'date' in ud.date:
61 ud.date = ud.parm['date']
62 ud.revision = ""
63 else:
64 #
65 # ***Nasty hack***
66 # If DATE in unexpanded PV, use ud.date (which is set from SRCDATE)
67 # Should warn people to switch to SRCREV here
68 #
69 pv = data.getVar("PV", d, 0)
70 if "DATE" in pv:
71 ud.revision = ""
72 else:
73 rev = Fetch.srcrev_internal_helper(ud, d)
74 if rev is True:
75 ud.revision = self.latest_revision(url, ud, d)
76 ud.date = ""
77 elif rev:
78 ud.revision = rev
79 ud.date = ""
80 else:
81 ud.revision = ""
82
83 ud.localfile = data.expand('%s_%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision, ud.date), d)
84
85 return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
86
87 def _buildsvncommand(self, ud, d, command):
88 """
89 Build up an svn commandline based on ud
90 command is "fetch", "update", "info"
91 """
92
93 basecmd = data.expand('${FETCHCMD_svn}', d)
94
95 proto = "svn"
96 if "proto" in ud.parm:
97 proto = ud.parm["proto"]
98
99 svn_rsh = None
100 if proto == "svn+ssh" and "rsh" in ud.parm:
101 svn_rsh = ud.parm["rsh"]
102
103 svnroot = ud.host + ud.path
104
105 # either use the revision, or SRCDATE in braces,
106 options = []
107
108 if ud.user:
109 options.append("--username %s" % ud.user)
110
111 if ud.pswd:
112 options.append("--password %s" % ud.pswd)
113
114 if command is "info":
115 svncmd = "%s info %s %s://%s/%s/" % (basecmd, " ".join(options), proto, svnroot, ud.module)
116 else:
117 suffix = ""
118 if ud.revision:
119 options.append("-r %s" % ud.revision)
120 suffix = "@%s" % (ud.revision)
121 elif ud.date:
122 options.append("-r {%s}" % ud.date)
123
124 if command is "fetch":
125 svncmd = "%s co %s %s://%s/%s%s %s" % (basecmd, " ".join(options), proto, svnroot, ud.module, suffix, ud.module)
126 elif command is "update":
127 svncmd = "%s update %s" % (basecmd, " ".join(options))
128 else:
129 raise FetchError("Invalid svn command %s" % command)
130
131 if svn_rsh:
132 svncmd = "svn_RSH=\"%s\" %s" % (svn_rsh, svncmd)
133
134 return svncmd
135
136 def go(self, loc, ud, d):
137 """Fetch url"""
138
139 # try to use the tarball stash
140 if Fetch.try_mirror(d, ud.localfile):
141 bb.msg.debug(1, bb.msg.domain.Fetcher, "%s already exists or was mirrored, skipping svn checkout." % ud.localpath)
142 return
143
144 bb.msg.debug(2, bb.msg.domain.Fetcher, "Fetch: checking for module directory '" + ud.moddir + "'")
145
146 if os.access(os.path.join(ud.moddir, '.svn'), os.R_OK):
147 svnupdatecmd = self._buildsvncommand(ud, d, "update")
148 bb.msg.note(1, bb.msg.domain.Fetcher, "Update " + loc)
149 # update sources there
150 os.chdir(ud.moddir)
151 bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % svnupdatecmd)
152 runfetchcmd(svnupdatecmd, d)
153 else:
154 svnfetchcmd = self._buildsvncommand(ud, d, "fetch")
155 bb.msg.note(1, bb.msg.domain.Fetcher, "Fetch " + loc)
156 # check out sources there
157 bb.mkdirhier(ud.pkgdir)
158 os.chdir(ud.pkgdir)
159 bb.msg.debug(1, bb.msg.domain.Fetcher, "Running %s" % svnfetchcmd)
160 runfetchcmd(svnfetchcmd, d)
161
162 os.chdir(ud.pkgdir)
163 # tar them up to a defined filename
164 try:
165 runfetchcmd("tar -czf %s %s" % (ud.localpath, ud.module), d)
166 except:
167 t, v, tb = sys.exc_info()
168 try:
169 os.unlink(ud.localpath)
170 except OSError:
171 pass
172 raise t, v, tb
173
174 def suppports_srcrev(self):
175 return True
176
177 def _revision_key(self, url, ud, d):
178 """
179 Return a unique key for the url
180 """
181 return "svn:" + ud.moddir
182
183 def _latest_revision(self, url, ud, d):
184 """
185 Return the latest upstream revision number
186 """
187 bb.msg.debug(2, bb.msg.domain.Fetcher, "SVN fetcher hitting network for %s" % url)
188
189 output = runfetchcmd("LANG=C LC_ALL=C " + self._buildsvncommand(ud, d, "info"), d, True)
190
191 revision = None
192 for line in output.splitlines():
193 if "Last Changed Rev" in line:
194 revision = line.split(":")[1].strip()
195
196 return revision
197
198 def _sortable_revision(self, url, ud, d):
199 """
200 Return a sortable revision number which in our case is the revision number
201 """
202
203 return self._build_revision(url, ud, d)
204
205 def _build_revision(self, url, ud, d):
206 return ud.revision
diff --git a/bitbake-dev/lib/bb/fetch/wget.py b/bitbake-dev/lib/bb/fetch/wget.py
deleted file mode 100644
index fd93c7ec46..0000000000
--- a/bitbake-dev/lib/bb/fetch/wget.py
+++ /dev/null
@@ -1,130 +0,0 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'Fetch' implementations
5
6Classes for obtaining upstream sources for the
7BitBake build tools.
8
9"""
10
11# Copyright (C) 2003, 2004 Chris Larson
12#
13# This program is free software; you can redistribute it and/or modify
14# it under the terms of the GNU General Public License version 2 as
15# published by the Free Software Foundation.
16#
17# This program is distributed in the hope that it will be useful,
18# but WITHOUT ANY WARRANTY; without even the implied warranty of
19# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20# GNU General Public License for more details.
21#
22# You should have received a copy of the GNU General Public License along
23# with this program; if not, write to the Free Software Foundation, Inc.,
24# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
25#
26# Based on functions from the base bb module, Copyright 2003 Holger Schurig
27
28import os
29import bb
30from bb import data
31from bb.fetch import Fetch
32from bb.fetch import FetchError
33from bb.fetch import uri_replace
34
35class Wget(Fetch):
36 """Class to fetch urls via 'wget'"""
37 def supports(self, url, ud, d):
38 """
39 Check to see if a given url can be fetched with wget.
40 """
41 return ud.type in ['http','https','ftp']
42
43 def localpath(self, url, ud, d):
44
45 url = bb.encodeurl([ud.type, ud.host, ud.path, ud.user, ud.pswd, {}])
46 ud.basename = os.path.basename(ud.path)
47 ud.localfile = data.expand(os.path.basename(url), d)
48
49 return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
50
51 def go(self, uri, ud, d, checkonly = False):
52 """Fetch urls"""
53
54 def fetch_uri(uri, ud, d):
55 if checkonly:
56 fetchcmd = data.getVar("CHECKCOMMAND", d, 1)
57 elif os.path.exists(ud.localpath):
58 # file exists, but we didnt complete it.. trying again..
59 fetchcmd = data.getVar("RESUMECOMMAND", d, 1)
60 else:
61 fetchcmd = data.getVar("FETCHCOMMAND", d, 1)
62
63 uri = uri.split(";")[0]
64 uri_decoded = list(bb.decodeurl(uri))
65 uri_type = uri_decoded[0]
66 uri_host = uri_decoded[1]
67
68 bb.msg.note(1, bb.msg.domain.Fetcher, "fetch " + uri)
69 fetchcmd = fetchcmd.replace("${URI}", uri.split(";")[0])
70 fetchcmd = fetchcmd.replace("${FILE}", ud.basename)
71 httpproxy = None
72 ftpproxy = None
73 if uri_type == 'http':
74 httpproxy = data.getVar("HTTP_PROXY", d, True)
75 httpproxy_ignore = (data.getVar("HTTP_PROXY_IGNORE", d, True) or "").split()
76 for p in httpproxy_ignore:
77 if uri_host.endswith(p):
78 httpproxy = None
79 break
80 if uri_type == 'ftp':
81 ftpproxy = data.getVar("FTP_PROXY", d, True)
82 ftpproxy_ignore = (data.getVar("HTTP_PROXY_IGNORE", d, True) or "").split()
83 for p in ftpproxy_ignore:
84 if uri_host.endswith(p):
85 ftpproxy = None
86 break
87 if httpproxy:
88 fetchcmd = "http_proxy=" + httpproxy + " " + fetchcmd
89 if ftpproxy:
90 fetchcmd = "ftp_proxy=" + ftpproxy + " " + fetchcmd
91 bb.msg.debug(2, bb.msg.domain.Fetcher, "executing " + fetchcmd)
92 ret = os.system(fetchcmd)
93 if ret != 0:
94 return False
95
96 # Sanity check since wget can pretend it succeed when it didn't
97 # Also, this used to happen if sourceforge sent us to the mirror page
98 if not os.path.exists(ud.localpath) and not checkonly:
99 bb.msg.debug(2, bb.msg.domain.Fetcher, "The fetch command for %s returned success but %s doesn't exist?..." % (uri, ud.localpath))
100 return False
101
102 return True
103
104 localdata = data.createCopy(d)
105 data.setVar('OVERRIDES', "wget:" + data.getVar('OVERRIDES', localdata), localdata)
106 data.update_data(localdata)
107
108 premirrors = [ i.split() for i in (data.getVar('PREMIRRORS', localdata, 1) or "").split('\n') if i ]
109 for (find, replace) in premirrors:
110 newuri = uri_replace(uri, find, replace, d)
111 if newuri != uri:
112 if fetch_uri(newuri, ud, localdata):
113 return True
114
115 if fetch_uri(uri, ud, localdata):
116 return True
117
118 # try mirrors
119 mirrors = [ i.split() for i in (data.getVar('MIRRORS', localdata, 1) or "").split('\n') if i ]
120 for (find, replace) in mirrors:
121 newuri = uri_replace(uri, find, replace, d)
122 if newuri != uri:
123 if fetch_uri(newuri, ud, localdata):
124 return True
125
126 raise FetchError(uri)
127
128
129 def checkstatus(self, uri, ud, d):
130 return self.go(uri, ud, d, True)
diff --git a/bitbake-dev/lib/bb/manifest.py b/bitbake-dev/lib/bb/manifest.py
deleted file mode 100644
index 4e4b7d98ec..0000000000
--- a/bitbake-dev/lib/bb/manifest.py
+++ /dev/null
@@ -1,144 +0,0 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3#
4# Copyright (C) 2003, 2004 Chris Larson
5#
6# This program is free software; you can redistribute it and/or modify
7# it under the terms of the GNU General Public License version 2 as
8# published by the Free Software Foundation.
9#
10# This program is distributed in the hope that it will be useful,
11# but WITHOUT ANY WARRANTY; without even the implied warranty of
12# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13# GNU General Public License for more details.
14#
15# You should have received a copy of the GNU General Public License along
16# with this program; if not, write to the Free Software Foundation, Inc.,
17# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
18
19import os, sys
20import bb, bb.data
21
22def getfields(line):
23 fields = {}
24 fieldmap = ( "pkg", "src", "dest", "type", "mode", "uid", "gid", "major", "minor", "start", "inc", "count" )
25 for f in xrange(len(fieldmap)):
26 fields[fieldmap[f]] = None
27
28 if not line:
29 return None
30
31 splitline = line.split()
32 if not len(splitline):
33 return None
34
35 try:
36 for f in xrange(len(fieldmap)):
37 if splitline[f] == '-':
38 continue
39 fields[fieldmap[f]] = splitline[f]
40 except IndexError:
41 pass
42 return fields
43
44def parse (mfile, d):
45 manifest = []
46 while 1:
47 line = mfile.readline()
48 if not line:
49 break
50 if line.startswith("#"):
51 continue
52 fields = getfields(line)
53 if not fields:
54 continue
55 manifest.append(fields)
56 return manifest
57
58def emit (func, manifest, d):
59#str = "%s () {\n" % func
60 str = ""
61 for line in manifest:
62 emittedline = emit_line(func, line, d)
63 if not emittedline:
64 continue
65 str += emittedline + "\n"
66# str += "}\n"
67 return str
68
69def mangle (func, line, d):
70 import copy
71 newline = copy.copy(line)
72 src = bb.data.expand(newline["src"], d)
73
74 if src:
75 if not os.path.isabs(src):
76 src = "${WORKDIR}/" + src
77
78 dest = newline["dest"]
79 if not dest:
80 return
81
82 if dest.startswith("/"):
83 dest = dest[1:]
84
85 if func is "do_install":
86 dest = "${D}/" + dest
87
88 elif func is "do_populate":
89 dest = "${WORKDIR}/install/" + newline["pkg"] + "/" + dest
90
91 elif func is "do_stage":
92 varmap = {}
93 varmap["${bindir}"] = "${STAGING_DIR}/${HOST_SYS}/bin"
94 varmap["${libdir}"] = "${STAGING_DIR}/${HOST_SYS}/lib"
95 varmap["${includedir}"] = "${STAGING_DIR}/${HOST_SYS}/include"
96 varmap["${datadir}"] = "${STAGING_DATADIR}"
97
98 matched = 0
99 for key in varmap.keys():
100 if dest.startswith(key):
101 dest = varmap[key] + "/" + dest[len(key):]
102 matched = 1
103 if not matched:
104 newline = None
105 return
106 else:
107 newline = None
108 return
109
110 newline["src"] = src
111 newline["dest"] = dest
112 return newline
113
114def emit_line (func, line, d):
115 import copy
116 newline = copy.deepcopy(line)
117 newline = mangle(func, newline, d)
118 if not newline:
119 return None
120
121 str = ""
122 type = newline["type"]
123 mode = newline["mode"]
124 src = newline["src"]
125 dest = newline["dest"]
126 if type is "d":
127 str = "install -d "
128 if mode:
129 str += "-m %s " % mode
130 str += dest
131 elif type is "f":
132 if not src:
133 return None
134 if dest.endswith("/"):
135 str = "install -d "
136 str += dest + "\n"
137 str += "install "
138 else:
139 str = "install -D "
140 if mode:
141 str += "-m %s " % mode
142 str += src + " " + dest
143 del newline
144 return str
diff --git a/bitbake-dev/lib/bb/methodpool.py b/bitbake-dev/lib/bb/methodpool.py
deleted file mode 100644
index f43c4a0580..0000000000
--- a/bitbake-dev/lib/bb/methodpool.py
+++ /dev/null
@@ -1,84 +0,0 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3#
4#
5# Copyright (C) 2006 Holger Hans Peter Freyther
6#
7# This program is free software; you can redistribute it and/or modify
8# it under the terms of the GNU General Public License version 2 as
9# published by the Free Software Foundation.
10#
11# This program is distributed in the hope that it will be useful,
12# but WITHOUT ANY WARRANTY; without even the implied warranty of
13# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14# GNU General Public License for more details.
15#
16# You should have received a copy of the GNU General Public License along
17# with this program; if not, write to the Free Software Foundation, Inc.,
18# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
19
20
21"""
22 What is a method pool?
23
24 BitBake has a global method scope where .bb, .inc and .bbclass
25 files can install methods. These methods are parsed from strings.
26 To avoid recompiling and executing these string we introduce
27 a method pool to do this task.
28
29 This pool will be used to compile and execute the functions. It
30 will be smart enough to
31"""
32
33from bb.utils import better_compile, better_exec
34from bb import error
35
36# A dict of modules we have handled
37# it is the number of .bbclasses + x in size
38_parsed_methods = { }
39_parsed_fns = { }
40
41def insert_method(modulename, code, fn):
42 """
43 Add code of a module should be added. The methods
44 will be simply added, no checking will be done
45 """
46 comp = better_compile(code, "<bb>", fn )
47 better_exec(comp, __builtins__, code, fn)
48
49 # now some instrumentation
50 code = comp.co_names
51 for name in code:
52 if name in ['None', 'False']:
53 continue
54 elif name in _parsed_fns and not _parsed_fns[name] == modulename:
55 error( "Error Method already seen: %s in' %s' now in '%s'" % (name, _parsed_fns[name], modulename))
56 else:
57 _parsed_fns[name] = modulename
58
59def check_insert_method(modulename, code, fn):
60 """
61 Add the code if it wasnt added before. The module
62 name will be used for that
63
64 Variables:
65 @modulename a short name e.g. base.bbclass
66 @code The actual python code
67 @fn The filename from the outer file
68 """
69 if not modulename in _parsed_methods:
70 return insert_method(modulename, code, fn)
71 _parsed_methods[modulename] = 1
72
73def parsed_module(modulename):
74 """
75 Inform me file xyz was parsed
76 """
77 return modulename in _parsed_methods
78
79
80def get_parsed_dict():
81 """
82 shortcut
83 """
84 return _parsed_methods
diff --git a/bitbake-dev/lib/bb/msg.py b/bitbake-dev/lib/bb/msg.py
deleted file mode 100644
index 3fcf7091be..0000000000
--- a/bitbake-dev/lib/bb/msg.py
+++ /dev/null
@@ -1,125 +0,0 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake 'msg' implementation
5
6Message handling infrastructure for bitbake
7
8"""
9
10# Copyright (C) 2006 Richard Purdie
11#
12# This program is free software; you can redistribute it and/or modify
13# it under the terms of the GNU General Public License version 2 as
14# published by the Free Software Foundation.
15#
16# This program is distributed in the hope that it will be useful,
17# but WITHOUT ANY WARRANTY; without even the implied warranty of
18# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19# GNU General Public License for more details.
20#
21# You should have received a copy of the GNU General Public License along
22# with this program; if not, write to the Free Software Foundation, Inc.,
23# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
24
25import sys, bb
26from bb import event
27
28debug_level = {}
29
30verbose = False
31
32domain = bb.utils.Enum(
33 'Build',
34 'Cache',
35 'Collection',
36 'Data',
37 'Depends',
38 'Fetcher',
39 'Parsing',
40 'PersistData',
41 'Provider',
42 'RunQueue',
43 'TaskData',
44 'Util')
45
46
47class MsgBase(bb.event.Event):
48 """Base class for messages"""
49
50 def __init__(self, msg):
51 self._message = msg
52 event.Event.__init__(self)
53
54class MsgDebug(MsgBase):
55 """Debug Message"""
56
57class MsgNote(MsgBase):
58 """Note Message"""
59
60class MsgWarn(MsgBase):
61 """Warning Message"""
62
63class MsgError(MsgBase):
64 """Error Message"""
65
66class MsgFatal(MsgBase):
67 """Fatal Message"""
68
69class MsgPlain(MsgBase):
70 """General output"""
71
72#
73# Message control functions
74#
75
76def set_debug_level(level):
77 bb.msg.debug_level = {}
78 for domain in bb.msg.domain:
79 bb.msg.debug_level[domain] = level
80 bb.msg.debug_level['default'] = level
81
82def set_verbose(level):
83 bb.msg.verbose = level
84
85def set_debug_domains(domains):
86 for domain in domains:
87 found = False
88 for ddomain in bb.msg.domain:
89 if domain == str(ddomain):
90 bb.msg.debug_level[ddomain] = bb.msg.debug_level[ddomain] + 1
91 found = True
92 if not found:
93 bb.msg.warn(None, "Logging domain %s is not valid, ignoring" % domain)
94
95#
96# Message handling functions
97#
98
99def debug(level, domain, msg, fn = None):
100 if not domain:
101 domain = 'default'
102 if debug_level[domain] >= level:
103 bb.event.fire(MsgDebug(msg), None)
104
105def note(level, domain, msg, fn = None):
106 if not domain:
107 domain = 'default'
108 if level == 1 or verbose or debug_level[domain] >= 1:
109 bb.event.fire(MsgNote(msg), None)
110
111def warn(domain, msg, fn = None):
112 bb.event.fire(MsgWarn(msg), None)
113
114def error(domain, msg, fn = None):
115 bb.event.fire(MsgError(msg), None)
116 print 'ERROR: ' + msg
117
118def fatal(domain, msg, fn = None):
119 bb.event.fire(MsgFatal(msg), None)
120 print 'FATAL: ' + msg
121 sys.exit(1)
122
123def plain(msg, fn = None):
124 bb.event.fire(MsgPlain(msg), None)
125
diff --git a/bitbake-dev/lib/bb/parse/__init__.py b/bitbake-dev/lib/bb/parse/__init__.py
deleted file mode 100644
index 5dd96c4136..0000000000
--- a/bitbake-dev/lib/bb/parse/__init__.py
+++ /dev/null
@@ -1,84 +0,0 @@
1"""
2BitBake Parsers
3
4File parsers for the BitBake build tools.
5
6"""
7
8
9# Copyright (C) 2003, 2004 Chris Larson
10# Copyright (C) 2003, 2004 Phil Blundell
11#
12# This program is free software; you can redistribute it and/or modify
13# it under the terms of the GNU General Public License version 2 as
14# published by the Free Software Foundation.
15#
16# This program is distributed in the hope that it will be useful,
17# but WITHOUT ANY WARRANTY; without even the implied warranty of
18# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19# GNU General Public License for more details.
20#
21# You should have received a copy of the GNU General Public License along
22# with this program; if not, write to the Free Software Foundation, Inc.,
23# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
24#
25# Based on functions from the base bb module, Copyright 2003 Holger Schurig
26
27__all__ = [ 'ParseError', 'SkipPackage', 'cached_mtime', 'mark_dependency',
28 'supports', 'handle', 'init' ]
29handlers = []
30
31import bb, os
32
33class ParseError(Exception):
34 """Exception raised when parsing fails"""
35
36class SkipPackage(Exception):
37 """Exception raised to skip this package"""
38
39__mtime_cache = {}
40def cached_mtime(f):
41 if not __mtime_cache.has_key(f):
42 __mtime_cache[f] = os.stat(f)[8]
43 return __mtime_cache[f]
44
45def cached_mtime_noerror(f):
46 if not __mtime_cache.has_key(f):
47 try:
48 __mtime_cache[f] = os.stat(f)[8]
49 except OSError:
50 return 0
51 return __mtime_cache[f]
52
53def update_mtime(f):
54 __mtime_cache[f] = os.stat(f)[8]
55 return __mtime_cache[f]
56
57def mark_dependency(d, f):
58 if f.startswith('./'):
59 f = "%s/%s" % (os.getcwd(), f[2:])
60 deps = bb.data.getVar('__depends', d) or []
61 deps.append( (f, cached_mtime(f)) )
62 bb.data.setVar('__depends', deps, d)
63
64def supports(fn, data):
65 """Returns true if we have a handler for this file, false otherwise"""
66 for h in handlers:
67 if h['supports'](fn, data):
68 return 1
69 return 0
70
71def handle(fn, data, include = 0):
72 """Call the handler that is appropriate for this file"""
73 for h in handlers:
74 if h['supports'](fn, data):
75 return h['handle'](fn, data, include)
76 raise ParseError("%s is not a BitBake file" % fn)
77
78def init(fn, data):
79 for h in handlers:
80 if h['supports'](fn):
81 return h['init'](data)
82
83
84from parse_py import __version__, ConfHandler, BBHandler
diff --git a/bitbake-dev/lib/bb/parse/parse_py/BBHandler.py b/bitbake-dev/lib/bb/parse/parse_py/BBHandler.py
deleted file mode 100644
index 86fa18ebd2..0000000000
--- a/bitbake-dev/lib/bb/parse/parse_py/BBHandler.py
+++ /dev/null
@@ -1,410 +0,0 @@
1#!/usr/bin/env python
2# ex:ts=4:sw=4:sts=4:et
3# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
4"""
5 class for handling .bb files
6
7 Reads a .bb file and obtains its metadata
8
9"""
10
11
12# Copyright (C) 2003, 2004 Chris Larson
13# Copyright (C) 2003, 2004 Phil Blundell
14#
15# This program is free software; you can redistribute it and/or modify
16# it under the terms of the GNU General Public License version 2 as
17# published by the Free Software Foundation.
18#
19# This program is distributed in the hope that it will be useful,
20# but WITHOUT ANY WARRANTY; without even the implied warranty of
21# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
22# GNU General Public License for more details.
23#
24# You should have received a copy of the GNU General Public License along
25# with this program; if not, write to the Free Software Foundation, Inc.,
26# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
27
28import re, bb, os, sys, time, string
29import bb.fetch, bb.build, bb.utils
30from bb import data, fetch, methodpool
31
32from ConfHandler import include, localpath, obtain, init
33from bb.parse import ParseError
34
35__func_start_regexp__ = re.compile( r"(((?P<py>python)|(?P<fr>fakeroot))\s*)*(?P<func>[\w\.\-\+\{\}\$]+)?\s*\(\s*\)\s*{$" )
36__inherit_regexp__ = re.compile( r"inherit\s+(.+)" )
37__export_func_regexp__ = re.compile( r"EXPORT_FUNCTIONS\s+(.+)" )
38__addtask_regexp__ = re.compile("addtask\s+(?P<func>\w+)\s*((before\s*(?P<before>((.*(?=after))|(.*))))|(after\s*(?P<after>((.*(?=before))|(.*)))))*")
39__addhandler_regexp__ = re.compile( r"addhandler\s+(.+)" )
40__def_regexp__ = re.compile( r"def\s+(\w+).*:" )
41__python_func_regexp__ = re.compile( r"(\s+.*)|(^$)" )
42__word__ = re.compile(r"\S+")
43
44__infunc__ = ""
45__inpython__ = False
46__body__ = []
47__classname__ = ""
48classes = [ None, ]
49
50# We need to indicate EOF to the feeder. This code is so messy that
51# factoring it out to a close_parse_file method is out of question.
52# We will use the IN_PYTHON_EOF as an indicator to just close the method
53#
54# The two parts using it are tightly integrated anyway
55IN_PYTHON_EOF = -9999999999999
56
57__parsed_methods__ = methodpool.get_parsed_dict()
58
59def supports(fn, d):
60 localfn = localpath(fn, d)
61 return localfn[-3:] == ".bb" or localfn[-8:] == ".bbclass" or localfn[-4:] == ".inc"
62
63def inherit(files, d):
64 __inherit_cache = data.getVar('__inherit_cache', d) or []
65 fn = ""
66 lineno = 0
67 files = data.expand(files, d)
68 for file in files:
69 if file[0] != "/" and file[-8:] != ".bbclass":
70 file = os.path.join('classes', '%s.bbclass' % file)
71
72 if not file in __inherit_cache:
73 bb.msg.debug(2, bb.msg.domain.Parsing, "BB %s:%d: inheriting %s" % (fn, lineno, file))
74 __inherit_cache.append( file )
75 data.setVar('__inherit_cache', __inherit_cache, d)
76 include(fn, file, d, "inherit")
77 __inherit_cache = data.getVar('__inherit_cache', d) or []
78
79
80def finalise(fn, d):
81 data.expandKeys(d)
82 data.update_data(d)
83 anonqueue = data.getVar("__anonqueue", d, 1) or []
84 body = [x['content'] for x in anonqueue]
85 flag = { 'python' : 1, 'func' : 1 }
86 data.setVar("__anonfunc", "\n".join(body), d)
87 data.setVarFlags("__anonfunc", flag, d)
88 from bb import build
89 try:
90 t = data.getVar('T', d)
91 data.setVar('T', '${TMPDIR}/anonfunc/', d)
92 anonfuncs = data.getVar('__BBANONFUNCS', d) or []
93 code = ""
94 for f in anonfuncs:
95 code = code + " %s(d)\n" % f
96 data.setVar("__anonfunc", code, d)
97 build.exec_func("__anonfunc", d)
98 data.delVar('T', d)
99 if t:
100 data.setVar('T', t, d)
101 except Exception, e:
102 bb.msg.debug(1, bb.msg.domain.Parsing, "Exception when executing anonymous function: %s" % e)
103 raise
104 data.delVar("__anonqueue", d)
105 data.delVar("__anonfunc", d)
106 data.update_data(d)
107
108 all_handlers = {}
109 for var in data.getVar('__BBHANDLERS', d) or []:
110 # try to add the handler
111 handler = data.getVar(var,d)
112 bb.event.register(var, handler)
113
114 tasklist = data.getVar('__BBTASKS', d) or []
115 bb.build.add_tasks(tasklist, d)
116
117 bb.event.fire(bb.event.RecipeParsed(fn), d)
118
119
120def handle(fn, d, include = 0):
121 global __func_start_regexp__, __inherit_regexp__, __export_func_regexp__, __addtask_regexp__, __addhandler_regexp__, __infunc__, __body__, __residue__
122 __body__ = []
123 __infunc__ = ""
124 __classname__ = ""
125 __residue__ = []
126
127 if include == 0:
128 bb.msg.debug(2, bb.msg.domain.Parsing, "BB " + fn + ": handle(data)")
129 else:
130 bb.msg.debug(2, bb.msg.domain.Parsing, "BB " + fn + ": handle(data, include)")
131
132 (root, ext) = os.path.splitext(os.path.basename(fn))
133 base_name = "%s%s" % (root,ext)
134 init(d)
135
136 if ext == ".bbclass":
137 __classname__ = root
138 classes.append(__classname__)
139 __inherit_cache = data.getVar('__inherit_cache', d) or []
140 if not fn in __inherit_cache:
141 __inherit_cache.append(fn)
142 data.setVar('__inherit_cache', __inherit_cache, d)
143
144 if include != 0:
145 oldfile = data.getVar('FILE', d)
146 else:
147 oldfile = None
148
149 fn = obtain(fn, d)
150 bbpath = (data.getVar('BBPATH', d, 1) or '').split(':')
151 if not os.path.isabs(fn):
152 f = None
153 for p in bbpath:
154 j = os.path.join(p, fn)
155 if os.access(j, os.R_OK):
156 abs_fn = j
157 f = open(j, 'r')
158 break
159 if f is None:
160 raise IOError("file %s not found" % fn)
161 else:
162 f = open(fn,'r')
163 abs_fn = fn
164
165 if include:
166 bb.parse.mark_dependency(d, abs_fn)
167
168 if ext != ".bbclass":
169 data.setVar('FILE', fn, d)
170
171 lineno = 0
172 while 1:
173 lineno = lineno + 1
174 s = f.readline()
175 if not s: break
176 s = s.rstrip()
177 feeder(lineno, s, fn, base_name, d)
178 if __inpython__:
179 # add a blank line to close out any python definition
180 feeder(IN_PYTHON_EOF, "", fn, base_name, d)
181 if ext == ".bbclass":
182 classes.remove(__classname__)
183 else:
184 if include == 0:
185 multi = data.getVar('BBCLASSEXTEND', d, 1)
186 if multi:
187 based = bb.data.createCopy(d)
188 else:
189 based = d
190 try:
191 finalise(fn, based)
192 except bb.parse.SkipPackage:
193 bb.data.setVar("__SKIPPED", True, based)
194 darray = {"": based}
195
196 for cls in (multi or "").split():
197 pn = data.getVar('PN', d, True)
198 based = bb.data.createCopy(d)
199 data.setVar('PN', pn + '-' + cls, based)
200 inherit([cls], based)
201 try:
202 finalise(fn, based)
203 except bb.parse.SkipPackage:
204 bb.data.setVar("__SKIPPED", True, based)
205 darray[cls] = based
206 return darray
207
208 bbpath.pop(0)
209 if oldfile:
210 bb.data.setVar("FILE", oldfile, d)
211
212 # we have parsed the bb class now
213 if ext == ".bbclass" or ext == ".inc":
214 __parsed_methods__[base_name] = 1
215
216 return d
217
218def feeder(lineno, s, fn, root, d):
219 global __func_start_regexp__, __inherit_regexp__, __export_func_regexp__, __addtask_regexp__, __addhandler_regexp__, __def_regexp__, __python_func_regexp__, __inpython__,__infunc__, __body__, classes, bb, __residue__
220 if __infunc__:
221 if s == '}':
222 __body__.append('')
223 if __infunc__ == "__anonymous":
224 funcname = ("__anon_%s_%s" % (lineno, fn.translate(string.maketrans('/.+-', '____'))))
225 if not funcname in methodpool._parsed_fns:
226 text = "def %s(d):\n" % (funcname) + '\n'.join(__body__)
227 methodpool.insert_method(funcname, text, fn)
228 anonfuncs = data.getVar('__BBANONFUNCS', d) or []
229 anonfuncs.append(funcname)
230 data.setVar('__BBANONFUNCS', anonfuncs, d)
231 else:
232 data.setVarFlag(__infunc__, "func", 1, d)
233 data.setVar(__infunc__, '\n'.join(__body__), d)
234 __infunc__ = ""
235 __body__ = []
236 else:
237 __body__.append(s)
238 return
239
240 if __inpython__:
241 m = __python_func_regexp__.match(s)
242 if m and lineno != IN_PYTHON_EOF:
243 __body__.append(s)
244 return
245 else:
246 # Note we will add root to parsedmethods after having parse
247 # 'this' file. This means we will not parse methods from
248 # bb classes twice
249 if not root in __parsed_methods__:
250 text = '\n'.join(__body__)
251 methodpool.insert_method( root, text, fn )
252 __body__ = []
253 __inpython__ = False
254
255 if lineno == IN_PYTHON_EOF:
256 return
257
258# fall through
259
260 if s == '' or s[0] == '#': return # skip comments and empty lines
261
262 if s[-1] == '\\':
263 __residue__.append(s[:-1])
264 return
265
266 s = "".join(__residue__) + s
267 __residue__ = []
268
269 m = __func_start_regexp__.match(s)
270 if m:
271 __infunc__ = m.group("func") or "__anonymous"
272 key = __infunc__
273 if data.getVar(key, d):
274# clean up old version of this piece of metadata, as its
275# flags could cause problems
276 data.setVarFlag(key, 'python', None, d)
277 data.setVarFlag(key, 'fakeroot', None, d)
278 if m.group("py") is not None:
279 data.setVarFlag(key, "python", "1", d)
280 else:
281 data.delVarFlag(key, "python", d)
282 if m.group("fr") is not None:
283 data.setVarFlag(key, "fakeroot", "1", d)
284 else:
285 data.delVarFlag(key, "fakeroot", d)
286 return
287
288 m = __def_regexp__.match(s)
289 if m:
290 __body__.append(s)
291 __inpython__ = True
292 return
293
294 m = __export_func_regexp__.match(s)
295 if m:
296 fns = m.group(1)
297 n = __word__.findall(fns)
298 for f in n:
299 allvars = []
300 allvars.append(f)
301 allvars.append(classes[-1] + "_" + f)
302
303 vars = [[ allvars[0], allvars[1] ]]
304 if len(classes) > 1 and classes[-2] is not None:
305 allvars.append(classes[-2] + "_" + f)
306 vars = []
307 vars.append([allvars[2], allvars[1]])
308 vars.append([allvars[0], allvars[2]])
309
310 for (var, calledvar) in vars:
311 if data.getVar(var, d) and not data.getVarFlag(var, 'export_func', d):
312 continue
313
314 if data.getVar(var, d):
315 data.setVarFlag(var, 'python', None, d)
316 data.setVarFlag(var, 'func', None, d)
317
318 for flag in [ "func", "python" ]:
319 if data.getVarFlag(calledvar, flag, d):
320 data.setVarFlag(var, flag, data.getVarFlag(calledvar, flag, d), d)
321 for flag in [ "dirs" ]:
322 if data.getVarFlag(var, flag, d):
323 data.setVarFlag(calledvar, flag, data.getVarFlag(var, flag, d), d)
324
325 if data.getVarFlag(calledvar, "python", d):
326 data.setVar(var, "\tbb.build.exec_func('" + calledvar + "', d)\n", d)
327 else:
328 data.setVar(var, "\t" + calledvar + "\n", d)
329 data.setVarFlag(var, 'export_func', '1', d)
330
331 return
332
333 m = __addtask_regexp__.match(s)
334 if m:
335 func = m.group("func")
336 before = m.group("before")
337 after = m.group("after")
338 if func is None:
339 return
340 if func[:3] != "do_":
341 var = "do_" + func
342
343 data.setVarFlag(var, "task", 1, d)
344
345 bbtasks = data.getVar('__BBTASKS', d) or []
346 if not var in bbtasks:
347 bbtasks.append(var)
348 data.setVar('__BBTASKS', bbtasks, d)
349
350 existing = data.getVarFlag(var, "deps", d) or []
351 if after is not None:
352 # set up deps for function
353 for entry in after.split():
354 if entry not in existing:
355 existing.append(entry)
356 data.setVarFlag(var, "deps", existing, d)
357 if before is not None:
358 # set up things that depend on this func
359 for entry in before.split():
360 existing = data.getVarFlag(entry, "deps", d) or []
361 if var not in existing:
362 data.setVarFlag(entry, "deps", [var] + existing, d)
363 return
364
365 m = __addhandler_regexp__.match(s)
366 if m:
367 fns = m.group(1)
368 hs = __word__.findall(fns)
369 bbhands = data.getVar('__BBHANDLERS', d) or []
370 for h in hs:
371 bbhands.append(h)
372 data.setVarFlag(h, "handler", 1, d)
373 data.setVar('__BBHANDLERS', bbhands, d)
374 return
375
376 m = __inherit_regexp__.match(s)
377 if m:
378
379 files = m.group(1)
380 n = __word__.findall(files)
381 inherit(n, d)
382 return
383
384 from bb.parse import ConfHandler
385 return ConfHandler.feeder(lineno, s, fn, d)
386
387__pkgsplit_cache__={}
388def vars_from_file(mypkg, d):
389 if not mypkg:
390 return (None, None, None)
391 if mypkg in __pkgsplit_cache__:
392 return __pkgsplit_cache__[mypkg]
393
394 myfile = os.path.splitext(os.path.basename(mypkg))
395 parts = myfile[0].split('_')
396 __pkgsplit_cache__[mypkg] = parts
397 if len(parts) > 3:
398 raise ParseError("Unable to generate default variables from the filename: %s (too many underscores)" % mypkg)
399 exp = 3 - len(parts)
400 tmplist = []
401 while exp != 0:
402 exp -= 1
403 tmplist.append(None)
404 parts.extend(tmplist)
405 return parts
406
407# Add us to the handlers list
408from bb.parse import handlers
409handlers.append({'supports': supports, 'handle': handle, 'init': init})
410del handlers
diff --git a/bitbake-dev/lib/bb/parse/parse_py/ConfHandler.py b/bitbake-dev/lib/bb/parse/parse_py/ConfHandler.py
deleted file mode 100644
index 23316ada58..0000000000
--- a/bitbake-dev/lib/bb/parse/parse_py/ConfHandler.py
+++ /dev/null
@@ -1,241 +0,0 @@
1#!/usr/bin/env python
2# ex:ts=4:sw=4:sts=4:et
3# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
4"""
5 class for handling configuration data files
6
7 Reads a .conf file and obtains its metadata
8
9"""
10
11# Copyright (C) 2003, 2004 Chris Larson
12# Copyright (C) 2003, 2004 Phil Blundell
13#
14# This program is free software; you can redistribute it and/or modify
15# it under the terms of the GNU General Public License version 2 as
16# published by the Free Software Foundation.
17#
18# This program is distributed in the hope that it will be useful,
19# but WITHOUT ANY WARRANTY; without even the implied warranty of
20# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21# GNU General Public License for more details.
22#
23# You should have received a copy of the GNU General Public License along
24# with this program; if not, write to the Free Software Foundation, Inc.,
25# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
26
27import re, bb.data, os, sys
28from bb.parse import ParseError
29
30#__config_regexp__ = re.compile( r"(?P<exp>export\s*)?(?P<var>[a-zA-Z0-9\-_+.${}]+)\s*(?P<colon>:)?(?P<ques>\?)?=\s*(?P<apo>['\"]?)(?P<value>.*)(?P=apo)$")
31__config_regexp__ = re.compile( r"(?P<exp>export\s*)?(?P<var>[a-zA-Z0-9\-_+.${}/]+)(\[(?P<flag>[a-zA-Z0-9\-_+.]+)\])?\s*((?P<colon>:=)|(?P<ques>\?=)|(?P<append>\+=)|(?P<prepend>=\+)|(?P<predot>=\.)|(?P<postdot>\.=)|=)\s*(?P<apo>['\"]?)(?P<value>.*)(?P=apo)$")
32__include_regexp__ = re.compile( r"include\s+(.+)" )
33__require_regexp__ = re.compile( r"require\s+(.+)" )
34__export_regexp__ = re.compile( r"export\s+(.+)" )
35
36def init(data):
37 topdir = bb.data.getVar('TOPDIR', data)
38 if not topdir:
39 topdir = os.getcwd()
40 bb.data.setVar('TOPDIR', topdir, data)
41 if not bb.data.getVar('BBPATH', data):
42 from pkg_resources import Requirement, resource_filename
43 bitbake = Requirement.parse("bitbake")
44 datadir = resource_filename(bitbake, "../share/bitbake")
45 basedir = resource_filename(bitbake, "..")
46 bb.data.setVar('BBPATH', '%s:%s:%s' % (topdir, datadir, basedir), data)
47
48
49def supports(fn, d):
50 return localpath(fn, d)[-5:] == ".conf"
51
52def localpath(fn, d):
53 if os.path.exists(fn):
54 return fn
55
56 if "://" not in fn:
57 return fn
58
59 localfn = None
60 try:
61 localfn = bb.fetch.localpath(fn, d, False)
62 except bb.MalformedUrl:
63 pass
64
65 if not localfn:
66 return fn
67 return localfn
68
69def obtain(fn, data):
70 import sys, bb
71 fn = bb.data.expand(fn, data)
72 localfn = bb.data.expand(localpath(fn, data), data)
73
74 if localfn != fn:
75 dldir = bb.data.getVar('DL_DIR', data, 1)
76 if not dldir:
77 bb.msg.debug(1, bb.msg.domain.Parsing, "obtain: DL_DIR not defined")
78 return localfn
79 bb.mkdirhier(dldir)
80 try:
81 bb.fetch.init([fn], data)
82 except bb.fetch.NoMethodError:
83 (type, value, traceback) = sys.exc_info()
84 bb.msg.debug(1, bb.msg.domain.Parsing, "obtain: no method: %s" % value)
85 return localfn
86
87 try:
88 bb.fetch.go(data)
89 except bb.fetch.MissingParameterError:
90 (type, value, traceback) = sys.exc_info()
91 bb.msg.debug(1, bb.msg.domain.Parsing, "obtain: missing parameters: %s" % value)
92 return localfn
93 except bb.fetch.FetchError:
94 (type, value, traceback) = sys.exc_info()
95 bb.msg.debug(1, bb.msg.domain.Parsing, "obtain: failed: %s" % value)
96 return localfn
97 return localfn
98
99
100def include(oldfn, fn, data, error_out):
101 """
102
103 error_out If True a ParseError will be reaised if the to be included
104 """
105 if oldfn == fn: # prevent infinate recursion
106 return None
107
108 import bb
109 fn = bb.data.expand(fn, data)
110 oldfn = bb.data.expand(oldfn, data)
111
112 if not os.path.isabs(fn):
113 dname = os.path.dirname(oldfn)
114 bbpath = "%s:%s" % (dname, bb.data.getVar("BBPATH", data, 1))
115 abs_fn = bb.which(bbpath, fn)
116 if abs_fn:
117 fn = abs_fn
118
119 from bb.parse import handle
120 try:
121 ret = handle(fn, data, True)
122 except IOError:
123 if error_out:
124 raise ParseError("Could not %(error_out)s file %(fn)s" % vars() )
125 bb.msg.debug(2, bb.msg.domain.Parsing, "CONF file '%s' not found" % fn)
126
127def handle(fn, data, include = 0):
128 if include:
129 inc_string = "including"
130 else:
131 inc_string = "reading"
132 init(data)
133
134 if include == 0:
135 oldfile = None
136 else:
137 oldfile = bb.data.getVar('FILE', data)
138
139 fn = obtain(fn, data)
140 if not os.path.isabs(fn):
141 f = None
142 bbpath = bb.data.getVar("BBPATH", data, 1) or []
143 for p in bbpath.split(":"):
144 currname = os.path.join(p, fn)
145 if os.access(currname, os.R_OK):
146 f = open(currname, 'r')
147 abs_fn = currname
148 bb.msg.debug(2, bb.msg.domain.Parsing, "CONF %s %s" % (inc_string, currname))
149 break
150 if f is None:
151 raise IOError("file '%s' not found" % fn)
152 else:
153 f = open(fn,'r')
154 bb.msg.debug(1, bb.msg.domain.Parsing, "CONF %s %s" % (inc_string,fn))
155 abs_fn = fn
156
157 if include:
158 bb.parse.mark_dependency(data, abs_fn)
159
160 lineno = 0
161 bb.data.setVar('FILE', fn, data)
162 while 1:
163 lineno = lineno + 1
164 s = f.readline()
165 if not s: break
166 w = s.strip()
167 if not w: continue # skip empty lines
168 s = s.rstrip()
169 if s[0] == '#': continue # skip comments
170 while s[-1] == '\\':
171 s2 = f.readline()[:-1].strip()
172 lineno = lineno + 1
173 s = s[:-1] + s2
174 feeder(lineno, s, fn, data)
175
176 if oldfile:
177 bb.data.setVar('FILE', oldfile, data)
178 return data
179
180def feeder(lineno, s, fn, data):
181 def getFunc(groupd, key, data):
182 if 'flag' in groupd and groupd['flag'] != None:
183 return bb.data.getVarFlag(key, groupd['flag'], data)
184 else:
185 return bb.data.getVar(key, data)
186
187 m = __config_regexp__.match(s)
188 if m:
189 groupd = m.groupdict()
190 key = groupd["var"]
191 if "exp" in groupd and groupd["exp"] != None:
192 bb.data.setVarFlag(key, "export", 1, data)
193 if "ques" in groupd and groupd["ques"] != None:
194 val = getFunc(groupd, key, data)
195 if val == None:
196 val = groupd["value"]
197 elif "colon" in groupd and groupd["colon"] != None:
198 e = data.createCopy()
199 bb.data.update_data(e)
200 val = bb.data.expand(groupd["value"], e)
201 elif "append" in groupd and groupd["append"] != None:
202 val = "%s %s" % ((getFunc(groupd, key, data) or ""), groupd["value"])
203 elif "prepend" in groupd and groupd["prepend"] != None:
204 val = "%s %s" % (groupd["value"], (getFunc(groupd, key, data) or ""))
205 elif "postdot" in groupd and groupd["postdot"] != None:
206 val = "%s%s" % ((getFunc(groupd, key, data) or ""), groupd["value"])
207 elif "predot" in groupd and groupd["predot"] != None:
208 val = "%s%s" % (groupd["value"], (getFunc(groupd, key, data) or ""))
209 else:
210 val = groupd["value"]
211 if 'flag' in groupd and groupd['flag'] != None:
212 bb.msg.debug(3, bb.msg.domain.Parsing, "setVarFlag(%s, %s, %s, data)" % (key, groupd['flag'], val))
213 bb.data.setVarFlag(key, groupd['flag'], val, data)
214 else:
215 bb.data.setVar(key, val, data)
216 return
217
218 m = __include_regexp__.match(s)
219 if m:
220 s = bb.data.expand(m.group(1), data)
221 bb.msg.debug(3, bb.msg.domain.Parsing, "CONF %s:%d: including %s" % (fn, lineno, s))
222 include(fn, s, data, False)
223 return
224
225 m = __require_regexp__.match(s)
226 if m:
227 s = bb.data.expand(m.group(1), data)
228 include(fn, s, data, "include required")
229 return
230
231 m = __export_regexp__.match(s)
232 if m:
233 bb.data.setVarFlag(m.group(1), "export", 1, data)
234 return
235
236 raise ParseError("%s:%d: unparsed line: '%s'" % (fn, lineno, s));
237
238# Add us to the handlers list
239from bb.parse import handlers
240handlers.append({'supports': supports, 'handle': handle, 'init': init})
241del handlers
diff --git a/bitbake-dev/lib/bb/parse/parse_py/__init__.py b/bitbake-dev/lib/bb/parse/parse_py/__init__.py
deleted file mode 100644
index 9e0e00adda..0000000000
--- a/bitbake-dev/lib/bb/parse/parse_py/__init__.py
+++ /dev/null
@@ -1,33 +0,0 @@
1#!/usr/bin/env python
2# ex:ts=4:sw=4:sts=4:et
3# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
4"""
5BitBake Parsers
6
7File parsers for the BitBake build tools.
8
9"""
10
11# Copyright (C) 2003, 2004 Chris Larson
12# Copyright (C) 2003, 2004 Phil Blundell
13#
14# This program is free software; you can redistribute it and/or modify
15# it under the terms of the GNU General Public License version 2 as
16# published by the Free Software Foundation.
17#
18# This program is distributed in the hope that it will be useful,
19# but WITHOUT ANY WARRANTY; without even the implied warranty of
20# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21# GNU General Public License for more details.
22#
23# You should have received a copy of the GNU General Public License along
24# with this program; if not, write to the Free Software Foundation, Inc.,
25# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
26#
27# Based on functions from the base bb module, Copyright 2003 Holger Schurig
28__version__ = '1.0'
29
30__all__ = [ 'ConfHandler', 'BBHandler']
31
32import ConfHandler
33import BBHandler
diff --git a/bitbake-dev/lib/bb/persist_data.py b/bitbake-dev/lib/bb/persist_data.py
deleted file mode 100644
index bc4045fe85..0000000000
--- a/bitbake-dev/lib/bb/persist_data.py
+++ /dev/null
@@ -1,121 +0,0 @@
1# BitBake Persistent Data Store
2#
3# Copyright (C) 2007 Richard Purdie
4#
5# This program is free software; you can redistribute it and/or modify
6# it under the terms of the GNU General Public License version 2 as
7# published by the Free Software Foundation.
8#
9# This program is distributed in the hope that it will be useful,
10# but WITHOUT ANY WARRANTY; without even the implied warranty of
11# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12# GNU General Public License for more details.
13#
14# You should have received a copy of the GNU General Public License along
15# with this program; if not, write to the Free Software Foundation, Inc.,
16# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
17
18import bb, os
19
20try:
21 import sqlite3
22except ImportError:
23 try:
24 from pysqlite2 import dbapi2 as sqlite3
25 except ImportError:
26 bb.msg.fatal(bb.msg.domain.PersistData, "Importing sqlite3 and pysqlite2 failed, please install one of them. Python 2.5 or a 'python-pysqlite2' like package is likely to be what you need.")
27
28sqlversion = sqlite3.sqlite_version_info
29if sqlversion[0] < 3 or (sqlversion[0] == 3 and sqlversion[1] < 3):
30 bb.msg.fatal(bb.msg.domain.PersistData, "sqlite3 version 3.3.0 or later is required.")
31
32class PersistData:
33 """
34 BitBake Persistent Data Store
35
36 Used to store data in a central location such that other threads/tasks can
37 access them at some future date.
38
39 The "domain" is used as a key to isolate each data pool and in this
40 implementation corresponds to an SQL table. The SQL table consists of a
41 simple key and value pair.
42
43 Why sqlite? It handles all the locking issues for us.
44 """
45 def __init__(self, d):
46 self.cachedir = bb.data.getVar("PERSISTENT_DIR", d, True) or bb.data.getVar("CACHE", d, True)
47 if self.cachedir in [None, '']:
48 bb.msg.fatal(bb.msg.domain.PersistData, "Please set the 'PERSISTENT_DIR' or 'CACHE' variable.")
49 try:
50 os.stat(self.cachedir)
51 except OSError:
52 bb.mkdirhier(self.cachedir)
53
54 self.cachefile = os.path.join(self.cachedir,"bb_persist_data.sqlite3")
55 bb.msg.debug(1, bb.msg.domain.PersistData, "Using '%s' as the persistent data cache" % self.cachefile)
56
57 self.connection = sqlite3.connect(self.cachefile, timeout=5, isolation_level=None)
58
59 def addDomain(self, domain):
60 """
61 Should be called before any domain is used
62 Creates it if it doesn't exist.
63 """
64 self.connection.execute("CREATE TABLE IF NOT EXISTS %s(key TEXT, value TEXT);" % domain)
65
66 def delDomain(self, domain):
67 """
68 Removes a domain and all the data it contains
69 """
70 self.connection.execute("DROP TABLE IF EXISTS %s;" % domain)
71
72 def getKeyValues(self, domain):
73 """
74 Return a list of key + value pairs for a domain
75 """
76 ret = {}
77 data = self.connection.execute("SELECT key, value from %s;" % domain)
78 for row in data:
79 ret[str(row[0])] = str(row[1])
80
81 return ret
82
83 def getValue(self, domain, key):
84 """
85 Return the value of a key for a domain
86 """
87 data = self.connection.execute("SELECT * from %s where key=?;" % domain, [key])
88 for row in data:
89 return row[1]
90
91 def setValue(self, domain, key, value):
92 """
93 Sets the value of a key for a domain
94 """
95 data = self.connection.execute("SELECT * from %s where key=?;" % domain, [key])
96 rows = 0
97 for row in data:
98 rows = rows + 1
99 if rows:
100 self._execute("UPDATE %s SET value=? WHERE key=?;" % domain, [value, key])
101 else:
102 self._execute("INSERT into %s(key, value) values (?, ?);" % domain, [key, value])
103
104 def delValue(self, domain, key):
105 """
106 Deletes a key/value pair
107 """
108 self._execute("DELETE from %s where key=?;" % domain, [key])
109
110 def _execute(self, *query):
111 while True:
112 try:
113 self.connection.execute(*query)
114 return
115 except sqlite3.OperationalError, e:
116 if 'database is locked' in str(e):
117 continue
118 raise
119
120
121
diff --git a/bitbake-dev/lib/bb/providers.py b/bitbake-dev/lib/bb/providers.py
deleted file mode 100644
index 8617251ca3..0000000000
--- a/bitbake-dev/lib/bb/providers.py
+++ /dev/null
@@ -1,327 +0,0 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3#
4# Copyright (C) 2003, 2004 Chris Larson
5# Copyright (C) 2003, 2004 Phil Blundell
6# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
7# Copyright (C) 2005 Holger Hans Peter Freyther
8# Copyright (C) 2005 ROAD GmbH
9# Copyright (C) 2006 Richard Purdie
10#
11# This program is free software; you can redistribute it and/or modify
12# it under the terms of the GNU General Public License version 2 as
13# published by the Free Software Foundation.
14#
15# This program is distributed in the hope that it will be useful,
16# but WITHOUT ANY WARRANTY; without even the implied warranty of
17# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18# GNU General Public License for more details.
19#
20# You should have received a copy of the GNU General Public License along
21# with this program; if not, write to the Free Software Foundation, Inc.,
22# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
23
24import re
25from bb import data, utils
26import bb
27
28class NoProvider(Exception):
29 """Exception raised when no provider of a build dependency can be found"""
30
31class NoRProvider(Exception):
32 """Exception raised when no provider of a runtime dependency can be found"""
33
34
35def sortPriorities(pn, dataCache, pkg_pn = None):
36 """
37 Reorder pkg_pn by file priority and default preference
38 """
39
40 if not pkg_pn:
41 pkg_pn = dataCache.pkg_pn
42
43 files = pkg_pn[pn]
44 priorities = {}
45 for f in files:
46 priority = dataCache.bbfile_priority[f]
47 preference = dataCache.pkg_dp[f]
48 if priority not in priorities:
49 priorities[priority] = {}
50 if preference not in priorities[priority]:
51 priorities[priority][preference] = []
52 priorities[priority][preference].append(f)
53 pri_list = priorities.keys()
54 pri_list.sort(lambda a, b: a - b)
55 tmp_pn = []
56 for pri in pri_list:
57 pref_list = priorities[pri].keys()
58 pref_list.sort(lambda a, b: b - a)
59 tmp_pref = []
60 for pref in pref_list:
61 tmp_pref.extend(priorities[pri][pref])
62 tmp_pn = [tmp_pref] + tmp_pn
63
64 return tmp_pn
65
66def preferredVersionMatch(pe, pv, pr, preferred_e, preferred_v, preferred_r):
67 """
68 Check if the version pe,pv,pr is the preferred one.
69 If there is preferred version defined and ends with '%', then pv has to start with that version after removing the '%'
70 """
71 if (pr == preferred_r or preferred_r == None):
72 if (pe == preferred_e or preferred_e == None):
73 if preferred_v == pv:
74 return True
75 if preferred_v != None and preferred_v.endswith('%') and pv.startswith(preferred_v[:len(preferred_v)-1]):
76 return True
77 return False
78
79def findPreferredProvider(pn, cfgData, dataCache, pkg_pn = None, item = None):
80 """
81 Find the first provider in pkg_pn with a PREFERRED_VERSION set.
82 """
83
84 preferred_file = None
85 preferred_ver = None
86
87 localdata = data.createCopy(cfgData)
88 bb.data.setVar('OVERRIDES', "pn-%s:%s:%s" % (pn, pn, data.getVar('OVERRIDES', localdata)), localdata)
89 bb.data.update_data(localdata)
90
91 preferred_v = bb.data.getVar('PREFERRED_VERSION_%s' % pn, localdata, True)
92 if preferred_v:
93 m = re.match('(\d+:)*(.*)(_.*)*', preferred_v)
94 if m:
95 if m.group(1):
96 preferred_e = int(m.group(1)[:-1])
97 else:
98 preferred_e = None
99 preferred_v = m.group(2)
100 if m.group(3):
101 preferred_r = m.group(3)[1:]
102 else:
103 preferred_r = None
104 else:
105 preferred_e = None
106 preferred_r = None
107
108 for file_set in pkg_pn:
109 for f in file_set:
110 pe,pv,pr = dataCache.pkg_pepvpr[f]
111 if preferredVersionMatch(pe, pv, pr, preferred_e, preferred_v, preferred_r):
112 preferred_file = f
113 preferred_ver = (pe, pv, pr)
114 break
115 if preferred_file:
116 break;
117 if preferred_r:
118 pv_str = '%s-%s' % (preferred_v, preferred_r)
119 else:
120 pv_str = preferred_v
121 if not (preferred_e is None):
122 pv_str = '%s:%s' % (preferred_e, pv_str)
123 itemstr = ""
124 if item:
125 itemstr = " (for item %s)" % item
126 if preferred_file is None:
127 bb.msg.note(1, bb.msg.domain.Provider, "preferred version %s of %s not available%s" % (pv_str, pn, itemstr))
128 else:
129 bb.msg.debug(1, bb.msg.domain.Provider, "selecting %s as PREFERRED_VERSION %s of package %s%s" % (preferred_file, pv_str, pn, itemstr))
130
131 return (preferred_ver, preferred_file)
132
133
134def findLatestProvider(pn, cfgData, dataCache, file_set):
135 """
136 Return the highest version of the providers in file_set.
137 Take default preferences into account.
138 """
139 latest = None
140 latest_p = 0
141 latest_f = None
142 for file_name in file_set:
143 pe,pv,pr = dataCache.pkg_pepvpr[file_name]
144 dp = dataCache.pkg_dp[file_name]
145
146 if (latest is None) or ((latest_p == dp) and (utils.vercmp(latest, (pe, pv, pr)) < 0)) or (dp > latest_p):
147 latest = (pe, pv, pr)
148 latest_f = file_name
149 latest_p = dp
150
151 return (latest, latest_f)
152
153
154def findBestProvider(pn, cfgData, dataCache, pkg_pn = None, item = None):
155 """
156 If there is a PREFERRED_VERSION, find the highest-priority bbfile
157 providing that version. If not, find the latest version provided by
158 an bbfile in the highest-priority set.
159 """
160
161 sortpkg_pn = sortPriorities(pn, dataCache, pkg_pn)
162 # Find the highest priority provider with a PREFERRED_VERSION set
163 (preferred_ver, preferred_file) = findPreferredProvider(pn, cfgData, dataCache, sortpkg_pn, item)
164 # Find the latest version of the highest priority provider
165 (latest, latest_f) = findLatestProvider(pn, cfgData, dataCache, sortpkg_pn[0])
166
167 if preferred_file is None:
168 preferred_file = latest_f
169 preferred_ver = latest
170
171 return (latest, latest_f, preferred_ver, preferred_file)
172
173
174def _filterProviders(providers, item, cfgData, dataCache):
175 """
176 Take a list of providers and filter/reorder according to the
177 environment variables and previous build results
178 """
179 eligible = []
180 preferred_versions = {}
181 sortpkg_pn = {}
182
183 # The order of providers depends on the order of the files on the disk
184 # up to here. Sort pkg_pn to make dependency issues reproducible rather
185 # than effectively random.
186 providers.sort()
187
188 # Collate providers by PN
189 pkg_pn = {}
190 for p in providers:
191 pn = dataCache.pkg_fn[p]
192 if pn not in pkg_pn:
193 pkg_pn[pn] = []
194 pkg_pn[pn].append(p)
195
196 bb.msg.debug(1, bb.msg.domain.Provider, "providers for %s are: %s" % (item, pkg_pn.keys()))
197
198 # First add PREFERRED_VERSIONS
199 for pn in pkg_pn.keys():
200 sortpkg_pn[pn] = sortPriorities(pn, dataCache, pkg_pn)
201 preferred_versions[pn] = findPreferredProvider(pn, cfgData, dataCache, sortpkg_pn[pn], item)
202 if preferred_versions[pn][1]:
203 eligible.append(preferred_versions[pn][1])
204
205 # Now add latest verisons
206 for pn in sortpkg_pn.keys():
207 if pn in preferred_versions and preferred_versions[pn][1]:
208 continue
209 preferred_versions[pn] = findLatestProvider(pn, cfgData, dataCache, sortpkg_pn[pn][0])
210 eligible.append(preferred_versions[pn][1])
211
212 if len(eligible) == 0:
213 bb.msg.error(bb.msg.domain.Provider, "no eligible providers for %s" % item)
214 return 0
215
216 # If pn == item, give it a slight default preference
217 # This means PREFERRED_PROVIDER_foobar defaults to foobar if available
218 for p in providers:
219 pn = dataCache.pkg_fn[p]
220 if pn != item:
221 continue
222 (newvers, fn) = preferred_versions[pn]
223 if not fn in eligible:
224 continue
225 eligible.remove(fn)
226 eligible = [fn] + eligible
227
228 return eligible
229
230
231def filterProviders(providers, item, cfgData, dataCache):
232 """
233 Take a list of providers and filter/reorder according to the
234 environment variables and previous build results
235 Takes a "normal" target item
236 """
237
238 eligible = _filterProviders(providers, item, cfgData, dataCache)
239
240 prefervar = bb.data.getVar('PREFERRED_PROVIDER_%s' % item, cfgData, 1)
241 if prefervar:
242 dataCache.preferred[item] = prefervar
243
244 foundUnique = False
245 if item in dataCache.preferred:
246 for p in eligible:
247 pn = dataCache.pkg_fn[p]
248 if dataCache.preferred[item] == pn:
249 bb.msg.note(2, bb.msg.domain.Provider, "selecting %s to satisfy %s due to PREFERRED_PROVIDERS" % (pn, item))
250 eligible.remove(p)
251 eligible = [p] + eligible
252 foundUnique = True
253 break
254
255 bb.msg.debug(1, bb.msg.domain.Provider, "sorted providers for %s are: %s" % (item, eligible))
256
257 return eligible, foundUnique
258
259def filterProvidersRunTime(providers, item, cfgData, dataCache):
260 """
261 Take a list of providers and filter/reorder according to the
262 environment variables and previous build results
263 Takes a "runtime" target item
264 """
265
266 eligible = _filterProviders(providers, item, cfgData, dataCache)
267
268 # Should use dataCache.preferred here?
269 preferred = []
270 preferred_vars = []
271 for p in eligible:
272 pn = dataCache.pkg_fn[p]
273 provides = dataCache.pn_provides[pn]
274 for provide in provides:
275 bb.msg.note(2, bb.msg.domain.Provider, "checking PREFERRED_PROVIDER_%s" % (provide))
276 prefervar = bb.data.getVar('PREFERRED_PROVIDER_%s' % provide, cfgData, 1)
277 if prefervar == pn:
278 var = "PREFERRED_PROVIDER_%s = %s" % (provide, prefervar)
279 bb.msg.note(2, bb.msg.domain.Provider, "selecting %s to satisfy runtime %s due to %s" % (pn, item, var))
280 preferred_vars.append(var)
281 eligible.remove(p)
282 eligible = [p] + eligible
283 preferred.append(p)
284 break
285
286 numberPreferred = len(preferred)
287
288 if numberPreferred > 1:
289 bb.msg.error(bb.msg.domain.Provider, "Conflicting PREFERRED_PROVIDER entries were found which resulted in an attempt to select multiple providers (%s) for runtime dependecy %s\nThe entries resulting in this conflict were: %s" % (preferred, item, preferred_vars))
290
291 bb.msg.debug(1, bb.msg.domain.Provider, "sorted providers for %s are: %s" % (item, eligible))
292
293 return eligible, numberPreferred
294
295regexp_cache = {}
296
297def getRuntimeProviders(dataCache, rdepend):
298 """
299 Return any providers of runtime dependency
300 """
301 rproviders = []
302
303 if rdepend in dataCache.rproviders:
304 rproviders += dataCache.rproviders[rdepend]
305
306 if rdepend in dataCache.packages:
307 rproviders += dataCache.packages[rdepend]
308
309 if rproviders:
310 return rproviders
311
312 # Only search dynamic packages if we can't find anything in other variables
313 for pattern in dataCache.packages_dynamic:
314 pattern = pattern.replace('+', "\+")
315 if pattern in regexp_cache:
316 regexp = regexp_cache[pattern]
317 else:
318 try:
319 regexp = re.compile(pattern)
320 except:
321 bb.msg.error(bb.msg.domain.Provider, "Error parsing re expression: %s" % pattern)
322 raise
323 regexp_cache[pattern] = regexp
324 if regexp.match(rdepend):
325 rproviders += dataCache.packages_dynamic[pattern]
326
327 return rproviders
diff --git a/bitbake-dev/lib/bb/runqueue.py b/bitbake-dev/lib/bb/runqueue.py
deleted file mode 100644
index c3ad442e47..0000000000
--- a/bitbake-dev/lib/bb/runqueue.py
+++ /dev/null
@@ -1,1174 +0,0 @@
1#!/usr/bin/env python
2# ex:ts=4:sw=4:sts=4:et
3# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
4"""
5BitBake 'RunQueue' implementation
6
7Handles preparation and execution of a queue of tasks
8"""
9
10# Copyright (C) 2006-2007 Richard Purdie
11#
12# This program is free software; you can redistribute it and/or modify
13# it under the terms of the GNU General Public License version 2 as
14# published by the Free Software Foundation.
15#
16# This program is distributed in the hope that it will be useful,
17# but WITHOUT ANY WARRANTY; without even the implied warranty of
18# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19# GNU General Public License for more details.
20#
21# You should have received a copy of the GNU General Public License along
22# with this program; if not, write to the Free Software Foundation, Inc.,
23# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
24
25from bb import msg, data, event, mkdirhier, utils
26import bb, os, sys
27import signal
28import stat
29
30class TaskFailure(Exception):
31 """Exception raised when a task in a runqueue fails"""
32 def __init__(self, x):
33 self.args = x
34
35
36class RunQueueStats:
37 """
38 Holds statistics on the tasks handled by the associated runQueue
39 """
40 def __init__(self, total):
41 self.completed = 0
42 self.skipped = 0
43 self.failed = 0
44 self.active = 0
45 self.total = total
46
47 def taskFailed(self):
48 self.active = self.active - 1
49 self.failed = self.failed + 1
50
51 def taskCompleted(self, number = 1):
52 self.active = self.active - number
53 self.completed = self.completed + number
54
55 def taskSkipped(self, number = 1):
56 self.active = self.active + number
57 self.skipped = self.skipped + number
58
59 def taskActive(self):
60 self.active = self.active + 1
61
62# These values indicate the next step due to be run in the
63# runQueue state machine
64runQueuePrepare = 2
65runQueueRunInit = 3
66runQueueRunning = 4
67runQueueFailed = 6
68runQueueCleanUp = 7
69runQueueComplete = 8
70runQueueChildProcess = 9
71
72class RunQueueScheduler:
73 """
74 Control the order tasks are scheduled in.
75 """
76 def __init__(self, runqueue):
77 """
78 The default scheduler just returns the first buildable task (the
79 priority map is sorted by task numer)
80 """
81 self.rq = runqueue
82 numTasks = len(self.rq.runq_fnid)
83
84 self.prio_map = []
85 self.prio_map.extend(range(numTasks))
86
87 def next(self):
88 """
89 Return the id of the first task we find that is buildable
90 """
91 for task1 in range(len(self.rq.runq_fnid)):
92 task = self.prio_map[task1]
93 if self.rq.runq_running[task] == 1:
94 continue
95 if self.rq.runq_buildable[task] == 1:
96 return task
97
98class RunQueueSchedulerSpeed(RunQueueScheduler):
99 """
100 A scheduler optimised for speed. The priority map is sorted by task weight,
101 heavier weighted tasks (tasks needed by the most other tasks) are run first.
102 """
103 def __init__(self, runqueue):
104 """
105 The priority map is sorted by task weight.
106 """
107 from copy import deepcopy
108
109 self.rq = runqueue
110
111 sortweight = deepcopy(self.rq.runq_weight)
112 sortweight.sort()
113 copyweight = deepcopy(self.rq.runq_weight)
114 self.prio_map = []
115
116 for weight in sortweight:
117 idx = copyweight.index(weight)
118 self.prio_map.append(idx)
119 copyweight[idx] = -1
120
121 self.prio_map.reverse()
122
123class RunQueueSchedulerCompletion(RunQueueSchedulerSpeed):
124 """
125 A scheduler optimised to complete .bb files are quickly as possible. The
126 priority map is sorted by task weight, but then reordered so once a given
127 .bb file starts to build, its completed as quickly as possible. This works
128 well where disk space is at a premium and classes like OE's rm_work are in
129 force.
130 """
131 def __init__(self, runqueue):
132 RunQueueSchedulerSpeed.__init__(self, runqueue)
133 from copy import deepcopy
134
135 #FIXME - whilst this groups all fnids together it does not reorder the
136 #fnid groups optimally.
137
138 basemap = deepcopy(self.prio_map)
139 self.prio_map = []
140 while (len(basemap) > 0):
141 entry = basemap.pop(0)
142 self.prio_map.append(entry)
143 fnid = self.rq.runq_fnid[entry]
144 todel = []
145 for entry in basemap:
146 entry_fnid = self.rq.runq_fnid[entry]
147 if entry_fnid == fnid:
148 todel.append(basemap.index(entry))
149 self.prio_map.append(entry)
150 todel.reverse()
151 for idx in todel:
152 del basemap[idx]
153
154class RunQueue:
155 """
156 BitBake Run Queue implementation
157 """
158 def __init__(self, cooker, cfgData, dataCache, taskData, targets):
159 self.reset_runqueue()
160 self.cooker = cooker
161 self.dataCache = dataCache
162 self.taskData = taskData
163 self.cfgData = cfgData
164 self.targets = targets
165
166 self.number_tasks = int(bb.data.getVar("BB_NUMBER_THREADS", cfgData, 1) or 1)
167 self.multi_provider_whitelist = (bb.data.getVar("MULTI_PROVIDER_WHITELIST", cfgData, 1) or "").split()
168 self.scheduler = bb.data.getVar("BB_SCHEDULER", cfgData, 1) or "speed"
169 self.stamppolicy = bb.data.getVar("BB_STAMP_POLICY", cfgData, 1) or "perfile"
170 self.stampwhitelist = bb.data.getVar("BB_STAMP_WHITELIST", cfgData, 1) or ""
171
172 def reset_runqueue(self):
173 self.runq_fnid = []
174 self.runq_task = []
175 self.runq_depends = []
176 self.runq_revdeps = []
177
178 self.state = runQueuePrepare
179
180 def get_user_idstring(self, task):
181 fn = self.taskData.fn_index[self.runq_fnid[task]]
182 taskname = self.runq_task[task]
183 return "%s, %s" % (fn, taskname)
184
185 def get_task_id(self, fnid, taskname):
186 for listid in range(len(self.runq_fnid)):
187 if self.runq_fnid[listid] == fnid and self.runq_task[listid] == taskname:
188 return listid
189 return None
190
191 def circular_depchains_handler(self, tasks):
192 """
193 Some tasks aren't buildable, likely due to circular dependency issues.
194 Identify the circular dependencies and print them in a user readable format.
195 """
196 from copy import deepcopy
197
198 valid_chains = []
199 explored_deps = {}
200 msgs = []
201
202 def chain_reorder(chain):
203 """
204 Reorder a dependency chain so the lowest task id is first
205 """
206 lowest = 0
207 new_chain = []
208 for entry in range(len(chain)):
209 if chain[entry] < chain[lowest]:
210 lowest = entry
211 new_chain.extend(chain[lowest:])
212 new_chain.extend(chain[:lowest])
213 return new_chain
214
215 def chain_compare_equal(chain1, chain2):
216 """
217 Compare two dependency chains and see if they're the same
218 """
219 if len(chain1) != len(chain2):
220 return False
221 for index in range(len(chain1)):
222 if chain1[index] != chain2[index]:
223 return False
224 return True
225
226 def chain_array_contains(chain, chain_array):
227 """
228 Return True if chain_array contains chain
229 """
230 for ch in chain_array:
231 if chain_compare_equal(ch, chain):
232 return True
233 return False
234
235 def find_chains(taskid, prev_chain):
236 prev_chain.append(taskid)
237 total_deps = []
238 total_deps.extend(self.runq_revdeps[taskid])
239 for revdep in self.runq_revdeps[taskid]:
240 if revdep in prev_chain:
241 idx = prev_chain.index(revdep)
242 # To prevent duplicates, reorder the chain to start with the lowest taskid
243 # and search through an array of those we've already printed
244 chain = prev_chain[idx:]
245 new_chain = chain_reorder(chain)
246 if not chain_array_contains(new_chain, valid_chains):
247 valid_chains.append(new_chain)
248 msgs.append("Dependency loop #%d found:\n" % len(valid_chains))
249 for dep in new_chain:
250 msgs.append(" Task %s (%s) (depends: %s)\n" % (dep, self.get_user_idstring(dep), self.runq_depends[dep]))
251 msgs.append("\n")
252 if len(valid_chains) > 10:
253 msgs.append("Aborted dependency loops search after 10 matches.\n")
254 return msgs
255 continue
256 scan = False
257 if revdep not in explored_deps:
258 scan = True
259 elif revdep in explored_deps[revdep]:
260 scan = True
261 else:
262 for dep in prev_chain:
263 if dep in explored_deps[revdep]:
264 scan = True
265 if scan:
266 find_chains(revdep, deepcopy(prev_chain))
267 for dep in explored_deps[revdep]:
268 if dep not in total_deps:
269 total_deps.append(dep)
270
271 explored_deps[taskid] = total_deps
272
273 for task in tasks:
274 find_chains(task, [])
275
276 return msgs
277
278 def calculate_task_weights(self, endpoints):
279 """
280 Calculate a number representing the "weight" of each task. Heavier weighted tasks
281 have more dependencies and hence should be executed sooner for maximum speed.
282
283 This function also sanity checks the task list finding tasks that its not
284 possible to execute due to circular dependencies.
285 """
286
287 numTasks = len(self.runq_fnid)
288 weight = []
289 deps_left = []
290 task_done = []
291
292 for listid in range(numTasks):
293 task_done.append(False)
294 weight.append(0)
295 deps_left.append(len(self.runq_revdeps[listid]))
296
297 for listid in endpoints:
298 weight[listid] = 1
299 task_done[listid] = True
300
301 while 1:
302 next_points = []
303 for listid in endpoints:
304 for revdep in self.runq_depends[listid]:
305 weight[revdep] = weight[revdep] + weight[listid]
306 deps_left[revdep] = deps_left[revdep] - 1
307 if deps_left[revdep] == 0:
308 next_points.append(revdep)
309 task_done[revdep] = True
310 endpoints = next_points
311 if len(next_points) == 0:
312 break
313
314 # Circular dependency sanity check
315 problem_tasks = []
316 for task in range(numTasks):
317 if task_done[task] is False or deps_left[task] != 0:
318 problem_tasks.append(task)
319 bb.msg.debug(2, bb.msg.domain.RunQueue, "Task %s (%s) is not buildable\n" % (task, self.get_user_idstring(task)))
320 bb.msg.debug(2, bb.msg.domain.RunQueue, "(Complete marker was %s and the remaining dependency count was %s)\n\n" % (task_done[task], deps_left[task]))
321
322 if problem_tasks:
323 message = "Unbuildable tasks were found.\n"
324 message = message + "These are usually caused by circular dependencies and any circular dependency chains found will be printed below. Increase the debug level to see a list of unbuildable tasks.\n\n"
325 message = message + "Identifying dependency loops (this may take a short while)...\n"
326 bb.msg.error(bb.msg.domain.RunQueue, message)
327
328 msgs = self.circular_depchains_handler(problem_tasks)
329
330 message = "\n"
331 for msg in msgs:
332 message = message + msg
333 bb.msg.fatal(bb.msg.domain.RunQueue, message)
334
335 return weight
336
337 def prepare_runqueue(self):
338 """
339 Turn a set of taskData into a RunQueue and compute data needed
340 to optimise the execution order.
341 """
342
343 runq_build = []
344 recursive_tdepends = {}
345 runq_recrdepends = []
346 tdepends_fnid = {}
347
348 taskData = self.taskData
349
350 if len(taskData.tasks_name) == 0:
351 # Nothing to do
352 return
353
354 bb.msg.note(1, bb.msg.domain.RunQueue, "Preparing runqueue")
355
356 # Step A - Work out a list of tasks to run
357 #
358 # Taskdata gives us a list of possible providers for every build and run
359 # target ordered by priority. It also gives information on each of those
360 # providers.
361 #
362 # To create the actual list of tasks to execute we fix the list of
363 # providers and then resolve the dependencies into task IDs. This
364 # process is repeated for each type of dependency (tdepends, deptask,
365 # rdeptast, recrdeptask, idepends).
366
367 def add_build_dependencies(depids, tasknames, depends):
368 for depid in depids:
369 # Won't be in build_targets if ASSUME_PROVIDED
370 if depid not in taskData.build_targets:
371 continue
372 depdata = taskData.build_targets[depid][0]
373 if depdata is None:
374 continue
375 dep = taskData.fn_index[depdata]
376 for taskname in tasknames:
377 taskid = taskData.gettask_id(dep, taskname, False)
378 if taskid is not None:
379 depends.append(taskid)
380
381 def add_runtime_dependencies(depids, tasknames, depends):
382 for depid in depids:
383 if depid not in taskData.run_targets:
384 continue
385 depdata = taskData.run_targets[depid][0]
386 if depdata is None:
387 continue
388 dep = taskData.fn_index[depdata]
389 for taskname in tasknames:
390 taskid = taskData.gettask_id(dep, taskname, False)
391 if taskid is not None:
392 depends.append(taskid)
393
394 for task in range(len(taskData.tasks_name)):
395 depends = []
396 recrdepends = []
397 fnid = taskData.tasks_fnid[task]
398 fn = taskData.fn_index[fnid]
399 task_deps = self.dataCache.task_deps[fn]
400
401 bb.msg.debug(2, bb.msg.domain.RunQueue, "Processing %s:%s" %(fn, taskData.tasks_name[task]))
402
403 if fnid not in taskData.failed_fnids:
404
405 # Resolve task internal dependencies
406 #
407 # e.g. addtask before X after Y
408 depends = taskData.tasks_tdepends[task]
409
410 # Resolve 'deptask' dependencies
411 #
412 # e.g. do_sometask[deptask] = "do_someothertask"
413 # (makes sure sometask runs after someothertask of all DEPENDS)
414 if 'deptask' in task_deps and taskData.tasks_name[task] in task_deps['deptask']:
415 tasknames = task_deps['deptask'][taskData.tasks_name[task]].split()
416 add_build_dependencies(taskData.depids[fnid], tasknames, depends)
417
418 # Resolve 'rdeptask' dependencies
419 #
420 # e.g. do_sometask[rdeptask] = "do_someothertask"
421 # (makes sure sometask runs after someothertask of all RDEPENDS)
422 if 'rdeptask' in task_deps and taskData.tasks_name[task] in task_deps['rdeptask']:
423 taskname = task_deps['rdeptask'][taskData.tasks_name[task]]
424 add_runtime_dependencies(taskData.rdepids[fnid], [taskname], depends)
425
426 # Resolve inter-task dependencies
427 #
428 # e.g. do_sometask[depends] = "targetname:do_someothertask"
429 # (makes sure sometask runs after targetname's someothertask)
430 if fnid not in tdepends_fnid:
431 tdepends_fnid[fnid] = set()
432 idepends = taskData.tasks_idepends[task]
433 for (depid, idependtask) in idepends:
434 if depid in taskData.build_targets:
435 # Won't be in build_targets if ASSUME_PROVIDED
436 depdata = taskData.build_targets[depid][0]
437 if depdata is not None:
438 dep = taskData.fn_index[depdata]
439 taskid = taskData.gettask_id(dep, idependtask)
440 depends.append(taskid)
441 if depdata != fnid:
442 tdepends_fnid[fnid].add(taskid)
443
444
445 # Resolve recursive 'recrdeptask' dependencies (A)
446 #
447 # e.g. do_sometask[recrdeptask] = "do_someothertask"
448 # (makes sure sometask runs after someothertask of all DEPENDS, RDEPENDS and intertask dependencies, recursively)
449 # We cover the recursive part of the dependencies below
450 if 'recrdeptask' in task_deps and taskData.tasks_name[task] in task_deps['recrdeptask']:
451 for taskname in task_deps['recrdeptask'][taskData.tasks_name[task]].split():
452 recrdepends.append(taskname)
453 add_build_dependencies(taskData.depids[fnid], [taskname], depends)
454 add_runtime_dependencies(taskData.rdepids[fnid], [taskname], depends)
455
456 # Rmove all self references
457 if task in depends:
458 newdep = []
459 bb.msg.debug(2, bb.msg.domain.RunQueue, "Task %s (%s %s) contains self reference! %s" % (task, taskData.fn_index[taskData.tasks_fnid[task]], taskData.tasks_name[task], depends))
460 for dep in depends:
461 if task != dep:
462 newdep.append(dep)
463 depends = newdep
464
465 self.runq_fnid.append(taskData.tasks_fnid[task])
466 self.runq_task.append(taskData.tasks_name[task])
467 self.runq_depends.append(set(depends))
468 self.runq_revdeps.append(set())
469
470 runq_build.append(0)
471 runq_recrdepends.append(recrdepends)
472
473 #
474 # Build a list of recursive cumulative dependencies for each fnid
475 # We do this by fnid, since if A depends on some task in B
476 # we're interested in later tasks B's fnid might have but B itself
477 # doesn't depend on
478 #
479 # Algorithm is O(tasks) + O(tasks)*O(fnids)
480 #
481 reccumdepends = {}
482 for task in range(len(self.runq_fnid)):
483 fnid = self.runq_fnid[task]
484 if fnid not in reccumdepends:
485 if fnid in tdepends_fnid:
486 reccumdepends[fnid] = tdepends_fnid[fnid]
487 else:
488 reccumdepends[fnid] = set()
489 reccumdepends[fnid].update(self.runq_depends[task])
490 for task in range(len(self.runq_fnid)):
491 taskfnid = self.runq_fnid[task]
492 for fnid in reccumdepends:
493 if task in reccumdepends[fnid]:
494 reccumdepends[fnid].add(task)
495 if taskfnid in reccumdepends:
496 reccumdepends[fnid].update(reccumdepends[taskfnid])
497
498
499 # Resolve recursive 'recrdeptask' dependencies (B)
500 #
501 # e.g. do_sometask[recrdeptask] = "do_someothertask"
502 # (makes sure sometask runs after someothertask of all DEPENDS, RDEPENDS and intertask dependencies, recursively)
503 for task in range(len(self.runq_fnid)):
504 if len(runq_recrdepends[task]) > 0:
505 taskfnid = self.runq_fnid[task]
506 for dep in reccumdepends[taskfnid]:
507 # Ignore self references
508 if dep == task:
509 continue
510 for taskname in runq_recrdepends[task]:
511 if taskData.tasks_name[dep] == taskname:
512 self.runq_depends[task].add(dep)
513
514 # Step B - Mark all active tasks
515 #
516 # Start with the tasks we were asked to run and mark all dependencies
517 # as active too. If the task is to be 'forced', clear its stamp. Once
518 # all active tasks are marked, prune the ones we don't need.
519
520 bb.msg.note(2, bb.msg.domain.RunQueue, "Marking Active Tasks")
521
522 def mark_active(listid, depth):
523 """
524 Mark an item as active along with its depends
525 (calls itself recursively)
526 """
527
528 if runq_build[listid] == 1:
529 return
530
531 runq_build[listid] = 1
532
533 depends = self.runq_depends[listid]
534 for depend in depends:
535 mark_active(depend, depth+1)
536
537 self.target_pairs = []
538 for target in self.targets:
539 targetid = taskData.getbuild_id(target[0])
540
541 if targetid not in taskData.build_targets:
542 continue
543
544 if targetid in taskData.failed_deps:
545 continue
546
547 fnid = taskData.build_targets[targetid][0]
548 fn = taskData.fn_index[fnid]
549 self.target_pairs.append((fn, target[1]))
550
551 # Remove stamps for targets if force mode active
552 if self.cooker.configuration.force:
553 bb.msg.note(2, bb.msg.domain.RunQueue, "Remove stamp %s, %s" % (target[1], fn))
554 bb.build.del_stamp(target[1], self.dataCache, fn)
555
556 if fnid in taskData.failed_fnids:
557 continue
558
559 if target[1] not in taskData.tasks_lookup[fnid]:
560 bb.msg.fatal(bb.msg.domain.RunQueue, "Task %s does not exist for target %s" % (target[1], target[0]))
561
562 listid = taskData.tasks_lookup[fnid][target[1]]
563
564 mark_active(listid, 1)
565
566 # Step C - Prune all inactive tasks
567 #
568 # Once all active tasks are marked, prune the ones we don't need.
569
570 maps = []
571 delcount = 0
572 for listid in range(len(self.runq_fnid)):
573 if runq_build[listid-delcount] == 1:
574 maps.append(listid-delcount)
575 else:
576 del self.runq_fnid[listid-delcount]
577 del self.runq_task[listid-delcount]
578 del self.runq_depends[listid-delcount]
579 del runq_build[listid-delcount]
580 del self.runq_revdeps[listid-delcount]
581 delcount = delcount + 1
582 maps.append(-1)
583
584 #
585 # Step D - Sanity checks and computation
586 #
587
588 # Check to make sure we still have tasks to run
589 if len(self.runq_fnid) == 0:
590 if not taskData.abort:
591 bb.msg.fatal(bb.msg.domain.RunQueue, "All buildable tasks have been run but the build is incomplete (--continue mode). Errors for the tasks that failed will have been printed above.")
592 else:
593 bb.msg.fatal(bb.msg.domain.RunQueue, "No active tasks and not in --continue mode?! Please report this bug.")
594
595 bb.msg.note(2, bb.msg.domain.RunQueue, "Pruned %s inactive tasks, %s left" % (delcount, len(self.runq_fnid)))
596
597 # Remap the dependencies to account for the deleted tasks
598 # Check we didn't delete a task we depend on
599 for listid in range(len(self.runq_fnid)):
600 newdeps = []
601 origdeps = self.runq_depends[listid]
602 for origdep in origdeps:
603 if maps[origdep] == -1:
604 bb.msg.fatal(bb.msg.domain.RunQueue, "Invalid mapping - Should never happen!")
605 newdeps.append(maps[origdep])
606 self.runq_depends[listid] = set(newdeps)
607
608 bb.msg.note(2, bb.msg.domain.RunQueue, "Assign Weightings")
609
610 # Generate a list of reverse dependencies to ease future calculations
611 for listid in range(len(self.runq_fnid)):
612 for dep in self.runq_depends[listid]:
613 self.runq_revdeps[dep].add(listid)
614
615 # Identify tasks at the end of dependency chains
616 # Error on circular dependency loops (length two)
617 endpoints = []
618 for listid in range(len(self.runq_fnid)):
619 revdeps = self.runq_revdeps[listid]
620 if len(revdeps) == 0:
621 endpoints.append(listid)
622 for dep in revdeps:
623 if dep in self.runq_depends[listid]:
624 #self.dump_data(taskData)
625 bb.msg.fatal(bb.msg.domain.RunQueue, "Task %s (%s) has circular dependency on %s (%s)" % (taskData.fn_index[self.runq_fnid[dep]], self.runq_task[dep] , taskData.fn_index[self.runq_fnid[listid]], self.runq_task[listid]))
626
627 bb.msg.note(2, bb.msg.domain.RunQueue, "Compute totals (have %s endpoint(s))" % len(endpoints))
628
629 # Calculate task weights
630 # Check of higher length circular dependencies
631 self.runq_weight = self.calculate_task_weights(endpoints)
632
633 # Decide what order to execute the tasks in, pick a scheduler
634 #self.sched = RunQueueScheduler(self)
635 if self.scheduler == "completion":
636 self.sched = RunQueueSchedulerCompletion(self)
637 else:
638 self.sched = RunQueueSchedulerSpeed(self)
639
640 # Sanity Check - Check for multiple tasks building the same provider
641 prov_list = {}
642 seen_fn = []
643 for task in range(len(self.runq_fnid)):
644 fn = taskData.fn_index[self.runq_fnid[task]]
645 if fn in seen_fn:
646 continue
647 seen_fn.append(fn)
648 for prov in self.dataCache.fn_provides[fn]:
649 if prov not in prov_list:
650 prov_list[prov] = [fn]
651 elif fn not in prov_list[prov]:
652 prov_list[prov].append(fn)
653 error = False
654 for prov in prov_list:
655 if len(prov_list[prov]) > 1 and prov not in self.multi_provider_whitelist:
656 error = True
657 bb.msg.error(bb.msg.domain.RunQueue, "Multiple .bb files are due to be built which each provide %s (%s).\n This usually means one provides something the other doesn't and should." % (prov, " ".join(prov_list[prov])))
658 #if error:
659 # bb.msg.fatal(bb.msg.domain.RunQueue, "Corrupted metadata configuration detected, aborting...")
660
661
662 # Create a whitelist usable by the stamp checks
663 stampfnwhitelist = []
664 for entry in self.stampwhitelist.split():
665 entryid = self.taskData.getbuild_id(entry)
666 if entryid not in self.taskData.build_targets:
667 continue
668 fnid = self.taskData.build_targets[entryid][0]
669 fn = self.taskData.fn_index[fnid]
670 stampfnwhitelist.append(fn)
671 self.stampfnwhitelist = stampfnwhitelist
672
673 #self.dump_data(taskData)
674
675 self.state = runQueueRunInit
676
677 def check_stamps(self):
678 unchecked = {}
679 current = []
680 notcurrent = []
681 buildable = []
682
683 if self.stamppolicy == "perfile":
684 fulldeptree = False
685 else:
686 fulldeptree = True
687 stampwhitelist = []
688 if self.stamppolicy == "whitelist":
689 stampwhitelist = self.self.stampfnwhitelist
690
691 for task in range(len(self.runq_fnid)):
692 unchecked[task] = ""
693 if len(self.runq_depends[task]) == 0:
694 buildable.append(task)
695
696 def check_buildable(self, task, buildable):
697 for revdep in self.runq_revdeps[task]:
698 alldeps = 1
699 for dep in self.runq_depends[revdep]:
700 if dep in unchecked:
701 alldeps = 0
702 if alldeps == 1:
703 if revdep in unchecked:
704 buildable.append(revdep)
705
706 for task in range(len(self.runq_fnid)):
707 if task not in unchecked:
708 continue
709 fn = self.taskData.fn_index[self.runq_fnid[task]]
710 taskname = self.runq_task[task]
711 stampfile = "%s.%s" % (self.dataCache.stamp[fn], taskname)
712 # If the stamp is missing its not current
713 if not os.access(stampfile, os.F_OK):
714 del unchecked[task]
715 notcurrent.append(task)
716 check_buildable(self, task, buildable)
717 continue
718 # If its a 'nostamp' task, it's not current
719 taskdep = self.dataCache.task_deps[fn]
720 if 'nostamp' in taskdep and task in taskdep['nostamp']:
721 del unchecked[task]
722 notcurrent.append(task)
723 check_buildable(self, task, buildable)
724 continue
725
726 while (len(buildable) > 0):
727 nextbuildable = []
728 for task in buildable:
729 if task in unchecked:
730 fn = self.taskData.fn_index[self.runq_fnid[task]]
731 taskname = self.runq_task[task]
732 stampfile = "%s.%s" % (self.dataCache.stamp[fn], taskname)
733 iscurrent = True
734
735 t1 = os.stat(stampfile)[stat.ST_MTIME]
736 for dep in self.runq_depends[task]:
737 if iscurrent:
738 fn2 = self.taskData.fn_index[self.runq_fnid[dep]]
739 taskname2 = self.runq_task[dep]
740 stampfile2 = "%s.%s" % (self.dataCache.stamp[fn2], taskname2)
741 if fn == fn2 or (fulldeptree and fn2 not in stampwhitelist):
742 if dep in notcurrent:
743 iscurrent = False
744 else:
745 t2 = os.stat(stampfile2)[stat.ST_MTIME]
746 if t1 < t2:
747 iscurrent = False
748 del unchecked[task]
749 if iscurrent:
750 current.append(task)
751 else:
752 notcurrent.append(task)
753
754 check_buildable(self, task, nextbuildable)
755
756 buildable = nextbuildable
757
758 #for task in range(len(self.runq_fnid)):
759 # fn = self.taskData.fn_index[self.runq_fnid[task]]
760 # taskname = self.runq_task[task]
761 # print "%s %s.%s" % (task, taskname, fn)
762
763 #print "Unchecked: %s" % unchecked
764 #print "Current: %s" % current
765 #print "Not current: %s" % notcurrent
766
767 if len(unchecked) > 0:
768 bb.fatal("check_stamps fatal internal error")
769 return current
770
771 def check_stamp_task(self, task):
772
773 if self.stamppolicy == "perfile":
774 fulldeptree = False
775 else:
776 fulldeptree = True
777 stampwhitelist = []
778 if self.stamppolicy == "whitelist":
779 stampwhitelist = self.stampfnwhitelist
780
781 fn = self.taskData.fn_index[self.runq_fnid[task]]
782 taskname = self.runq_task[task]
783 stampfile = "%s.%s" % (self.dataCache.stamp[fn], taskname)
784 # If the stamp is missing its not current
785 if not os.access(stampfile, os.F_OK):
786 bb.msg.debug(2, bb.msg.domain.RunQueue, "Stampfile %s not available\n" % stampfile)
787 return False
788 # If its a 'nostamp' task, it's not current
789 taskdep = self.dataCache.task_deps[fn]
790 if 'nostamp' in taskdep and taskname in taskdep['nostamp']:
791 bb.msg.debug(2, bb.msg.domain.RunQueue, "%s.%s is nostamp\n" % (fn, taskname))
792 return False
793
794 iscurrent = True
795 t1 = os.stat(stampfile)[stat.ST_MTIME]
796 for dep in self.runq_depends[task]:
797 if iscurrent:
798 fn2 = self.taskData.fn_index[self.runq_fnid[dep]]
799 taskname2 = self.runq_task[dep]
800 stampfile2 = "%s.%s" % (self.dataCache.stamp[fn2], taskname2)
801 if fn == fn2 or (fulldeptree and fn2 not in stampwhitelist):
802 try:
803 t2 = os.stat(stampfile2)[stat.ST_MTIME]
804 if t1 < t2:
805 bb.msg.debug(2, bb.msg.domain.RunQueue, "Stampfile %s < %s" % (stampfile,stampfile2))
806 iscurrent = False
807 except:
808 bb.msg.debug(2, bb.msg.domain.RunQueue, "Exception reading %s for %s" % (stampfile2 ,stampfile))
809 iscurrent = False
810
811 return iscurrent
812
813 def execute_runqueue(self):
814 """
815 Run the tasks in a queue prepared by prepare_runqueue
816 Upon failure, optionally try to recover the build using any alternate providers
817 (if the abort on failure configuration option isn't set)
818 """
819
820 if self.state is runQueuePrepare:
821 self.prepare_runqueue()
822
823 if self.state is runQueueRunInit:
824 bb.msg.note(1, bb.msg.domain.RunQueue, "Executing runqueue")
825 self.execute_runqueue_initVars()
826
827 if self.state is runQueueRunning:
828 self.execute_runqueue_internal()
829
830 if self.state is runQueueCleanUp:
831 self.finish_runqueue()
832
833 if self.state is runQueueFailed:
834 if not self.taskData.tryaltconfigs:
835 raise bb.runqueue.TaskFailure(self.failed_fnids)
836 for fnid in self.failed_fnids:
837 self.taskData.fail_fnid(fnid)
838 self.reset_runqueue()
839
840 if self.state is runQueueComplete:
841 # All done
842 bb.msg.note(1, bb.msg.domain.RunQueue, "Tasks Summary: Attempted %d tasks of which %d didn't need to be rerun and %d failed." % (self.stats.completed, self.stats.skipped, self.stats.failed))
843 return False
844
845 if self.state is runQueueChildProcess:
846 print "Child process"
847 return False
848
849 # Loop
850 return True
851
852 def execute_runqueue_initVars(self):
853
854 self.stats = RunQueueStats(len(self.runq_fnid))
855
856 self.runq_buildable = []
857 self.runq_running = []
858 self.runq_complete = []
859 self.build_pids = {}
860 self.build_pipes = {}
861 self.failed_fnids = []
862
863 # Mark initial buildable tasks
864 for task in range(self.stats.total):
865 self.runq_running.append(0)
866 self.runq_complete.append(0)
867 if len(self.runq_depends[task]) == 0:
868 self.runq_buildable.append(1)
869 else:
870 self.runq_buildable.append(0)
871
872 self.state = runQueueRunning
873
874 event.fire(bb.event.StampUpdate(self.target_pairs, self.dataCache.stamp), self.cfgData)
875
876 def task_complete(self, task):
877 """
878 Mark a task as completed
879 Look at the reverse dependencies and mark any task with
880 completed dependencies as buildable
881 """
882 self.runq_complete[task] = 1
883 for revdep in self.runq_revdeps[task]:
884 if self.runq_running[revdep] == 1:
885 continue
886 if self.runq_buildable[revdep] == 1:
887 continue
888 alldeps = 1
889 for dep in self.runq_depends[revdep]:
890 if self.runq_complete[dep] != 1:
891 alldeps = 0
892 if alldeps == 1:
893 self.runq_buildable[revdep] = 1
894 fn = self.taskData.fn_index[self.runq_fnid[revdep]]
895 taskname = self.runq_task[revdep]
896 bb.msg.debug(1, bb.msg.domain.RunQueue, "Marking task %s (%s, %s) as buildable" % (revdep, fn, taskname))
897
898 def task_fail(self, task, exitcode):
899 """
900 Called when a task has failed
901 Updates the state engine with the failure
902 """
903 bb.msg.error(bb.msg.domain.RunQueue, "Task %s (%s) failed with %s" % (task, self.get_user_idstring(task), exitcode))
904 self.stats.taskFailed()
905 fnid = self.runq_fnid[task]
906 self.failed_fnids.append(fnid)
907 bb.event.fire(runQueueTaskFailed(task, self.stats, self), self.cfgData)
908 if self.taskData.abort:
909 self.state = runQueueCleanup
910
911 def execute_runqueue_internal(self):
912 """
913 Run the tasks in a queue prepared by prepare_runqueue
914 """
915
916 if self.stats.total == 0:
917 # nothing to do
918 self.state = runQueueCleanup
919
920 while True:
921 task = None
922 if self.stats.active < self.number_tasks:
923 task = self.sched.next()
924 if task is not None:
925 fn = self.taskData.fn_index[self.runq_fnid[task]]
926
927 taskname = self.runq_task[task]
928 if self.check_stamp_task(task):
929 bb.msg.debug(2, bb.msg.domain.RunQueue, "Stamp current task %s (%s)" % (task, self.get_user_idstring(task)))
930 self.runq_running[task] = 1
931 self.runq_buildable[task] = 1
932 self.task_complete(task)
933 self.stats.taskCompleted()
934 self.stats.taskSkipped()
935 continue
936
937 sys.stdout.flush()
938 sys.stderr.flush()
939 try:
940 pipein, pipeout = os.pipe()
941 pid = os.fork()
942 except OSError, e:
943 bb.msg.fatal(bb.msg.domain.RunQueue, "fork failed: %d (%s)" % (e.errno, e.strerror))
944 if pid == 0:
945 os.close(pipein)
946 # Save out the PID so that the event can include it the
947 # events
948 bb.event.worker_pid = os.getpid()
949 bb.event.worker_pipe = pipeout
950
951 self.state = runQueueChildProcess
952 # Make the child the process group leader
953 os.setpgid(0, 0)
954 # No stdin
955 newsi = os.open('/dev/null', os.O_RDWR)
956 os.dup2(newsi, sys.stdin.fileno())
957
958 bb.event.fire(runQueueTaskStarted(task, self.stats, self), self.cfgData)
959 bb.msg.note(1, bb.msg.domain.RunQueue,
960 "Running task %d of %d (ID: %s, %s)" % (self.stats.completed + self.stats.active + 1,
961 self.stats.total,
962 task,
963 self.get_user_idstring(task)))
964
965 bb.data.setVar("__RUNQUEUE_DO_NOT_USE_EXTERNALLY", self, self.cooker.configuration.data)
966 try:
967 self.cooker.tryBuild(fn, taskname[3:])
968 except bb.build.EventException:
969 bb.msg.error(bb.msg.domain.Build, "Build of " + fn + " " + taskname + " failed")
970 os._exit(1)
971 except:
972 bb.msg.error(bb.msg.domain.Build, "Build of " + fn + " " + taskname + " failed")
973 os._exit(1)
974 os._exit(0)
975
976 self.build_pids[pid] = task
977 self.build_pipes[pid] = runQueuePipe(pipein, pipeout, self.cfgData)
978 self.runq_running[task] = 1
979 self.stats.taskActive()
980 if self.stats.active < self.number_tasks:
981 continue
982
983 for pipe in self.build_pipes:
984 self.build_pipes[pipe].read()
985
986 if self.stats.active > 0:
987 result = os.waitpid(-1, os.WNOHANG)
988 if result[0] is 0 and result[1] is 0:
989 return
990 task = self.build_pids[result[0]]
991 del self.build_pids[result[0]]
992 self.build_pipes[result[0]].close()
993 del self.build_pipes[result[0]]
994 if result[1] != 0:
995 self.task_fail(task, result[1])
996 return
997 self.task_complete(task)
998 self.stats.taskCompleted()
999 bb.event.fire(runQueueTaskCompleted(task, self.stats, self), self.cfgData)
1000 continue
1001
1002 if len(self.failed_fnids) != 0:
1003 self.state = runQueueFailed
1004 return
1005
1006 # Sanity Checks
1007 for task in range(self.stats.total):
1008 if self.runq_buildable[task] == 0:
1009 bb.msg.error(bb.msg.domain.RunQueue, "Task %s never buildable!" % task)
1010 if self.runq_running[task] == 0:
1011 bb.msg.error(bb.msg.domain.RunQueue, "Task %s never ran!" % task)
1012 if self.runq_complete[task] == 0:
1013 bb.msg.error(bb.msg.domain.RunQueue, "Task %s never completed!" % task)
1014 self.state = runQueueComplete
1015 return
1016
1017 def finish_runqueue_now(self):
1018 bb.msg.note(1, bb.msg.domain.RunQueue, "Sending SIGINT to remaining %s tasks" % self.stats.active)
1019 for k, v in self.build_pids.iteritems():
1020 try:
1021 os.kill(-k, signal.SIGINT)
1022 except:
1023 pass
1024 for pipe in self.build_pipes:
1025 self.build_pipes[pipe].read()
1026
1027 def finish_runqueue(self, now = False):
1028 self.state = runQueueCleanUp
1029 if now:
1030 self.finish_runqueue_now()
1031 try:
1032 while self.stats.active > 0:
1033 bb.event.fire(runQueueExitWait(self.stats.active), self.cfgData)
1034 bb.msg.note(1, bb.msg.domain.RunQueue, "Waiting for %s active tasks to finish" % self.stats.active)
1035 tasknum = 1
1036 for k, v in self.build_pids.iteritems():
1037 bb.msg.note(1, bb.msg.domain.RunQueue, "%s: %s (%s)" % (tasknum, self.get_user_idstring(v), k))
1038 tasknum = tasknum + 1
1039 result = os.waitpid(-1, os.WNOHANG)
1040 if result[0] is 0 and result[1] is 0:
1041 return
1042 task = self.build_pids[result[0]]
1043 del self.build_pids[result[0]]
1044 self.build_pipes[result[0]].close()
1045 del self.build_pipes[result[0]]
1046 if result[1] != 0:
1047 self.task_fail(task, result[1])
1048 else:
1049 self.stats.taskCompleted()
1050 bb.event.fire(runQueueTaskCompleted(task, self.stats, self), self.cfgData)
1051 except:
1052 self.finish_runqueue_now()
1053 raise
1054
1055 if len(self.failed_fnids) != 0:
1056 self.state = runQueueFailed
1057 return
1058
1059 self.state = runQueueComplete
1060 return
1061
1062 def dump_data(self, taskQueue):
1063 """
1064 Dump some debug information on the internal data structures
1065 """
1066 bb.msg.debug(3, bb.msg.domain.RunQueue, "run_tasks:")
1067 for task in range(len(self.runq_task)):
1068 bb.msg.debug(3, bb.msg.domain.RunQueue, " (%s)%s - %s: %s Deps %s RevDeps %s" % (task,
1069 taskQueue.fn_index[self.runq_fnid[task]],
1070 self.runq_task[task],
1071 self.runq_weight[task],
1072 self.runq_depends[task],
1073 self.runq_revdeps[task]))
1074
1075 bb.msg.debug(3, bb.msg.domain.RunQueue, "sorted_tasks:")
1076 for task1 in range(len(self.runq_task)):
1077 if task1 in self.prio_map:
1078 task = self.prio_map[task1]
1079 bb.msg.debug(3, bb.msg.domain.RunQueue, " (%s)%s - %s: %s Deps %s RevDeps %s" % (task,
1080 taskQueue.fn_index[self.runq_fnid[task]],
1081 self.runq_task[task],
1082 self.runq_weight[task],
1083 self.runq_depends[task],
1084 self.runq_revdeps[task]))
1085
1086
1087class TaskFailure(Exception):
1088 """
1089 Exception raised when a task in a runqueue fails
1090 """
1091 def __init__(self, x):
1092 self.args = x
1093
1094
1095class runQueueExitWait(bb.event.Event):
1096 """
1097 Event when waiting for task processes to exit
1098 """
1099
1100 def __init__(self, remain):
1101 self.remain = remain
1102 self.message = "Waiting for %s active tasks to finish" % remain
1103 bb.event.Event.__init__(self)
1104
1105class runQueueEvent(bb.event.Event):
1106 """
1107 Base runQueue event class
1108 """
1109 def __init__(self, task, stats, rq):
1110 self.taskid = task
1111 self.taskstring = rq.get_user_idstring(task)
1112 self.stats = stats
1113 bb.event.Event.__init__(self)
1114
1115class runQueueTaskStarted(runQueueEvent):
1116 """
1117 Event notifing a task was started
1118 """
1119 def __init__(self, task, stats, rq):
1120 runQueueEvent.__init__(self, task, stats, rq)
1121 self.message = "Running task %s (%d of %d) (%s)" % (task, stats.completed + stats.active + 1, self.stats.total, self.taskstring)
1122
1123class runQueueTaskFailed(runQueueEvent):
1124 """
1125 Event notifing a task failed
1126 """
1127 def __init__(self, task, stats, rq):
1128 runQueueEvent.__init__(self, task, stats, rq)
1129 self.message = "Task %s failed (%s)" % (task, self.taskstring)
1130
1131class runQueueTaskCompleted(runQueueEvent):
1132 """
1133 Event notifing a task completed
1134 """
1135 def __init__(self, task, stats, rq):
1136 runQueueEvent.__init__(self, task, stats, rq)
1137 self.message = "Task %s completed (%s)" % (task, self.taskstring)
1138
1139def check_stamp_fn(fn, taskname, d):
1140 rq = bb.data.getVar("__RUNQUEUE_DO_NOT_USE_EXTERNALLY", d)
1141 fnid = rq.taskData.getfn_id(fn)
1142 taskid = rq.get_task_id(fnid, taskname)
1143 if taskid is not None:
1144 return rq.check_stamp_task(taskid)
1145 return None
1146
1147class runQueuePipe():
1148 """
1149 Abstraction for a pipe between a worker thread and the server
1150 """
1151 def __init__(self, pipein, pipeout, d):
1152 self.fd = pipein
1153 os.close(pipeout)
1154 self.queue = ""
1155 self.d = d
1156
1157 def read(self):
1158 start = len(self.queue)
1159 self.queue = self.queue + os.read(self.fd, 1024)
1160 end = len(self.queue)
1161 index = self.queue.find("</event>")
1162 while index != -1:
1163 bb.event.fire_from_worker(self.queue[:index+8], self.d)
1164 self.queue = self.queue[index+8:]
1165 index = self.queue.find("</event>")
1166 return (end > start)
1167
1168 def close(self):
1169 while self.read():
1170 continue
1171 if len(self.queue) > 0:
1172 print "Warning, worker left partial message"
1173 os.close(self.fd)
1174
diff --git a/bitbake-dev/lib/bb/shell.py b/bitbake-dev/lib/bb/shell.py
deleted file mode 100644
index 66e51719a4..0000000000
--- a/bitbake-dev/lib/bb/shell.py
+++ /dev/null
@@ -1,824 +0,0 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3##########################################################################
4#
5# Copyright (C) 2005-2006 Michael 'Mickey' Lauer <mickey@Vanille.de>
6# Copyright (C) 2005-2006 Vanille Media
7#
8# This program is free software; you can redistribute it and/or modify
9# it under the terms of the GNU General Public License version 2 as
10# published by the Free Software Foundation.
11#
12# This program is distributed in the hope that it will be useful,
13# but WITHOUT ANY WARRANTY; without even the implied warranty of
14# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15# GNU General Public License for more details.
16#
17# You should have received a copy of the GNU General Public License along
18# with this program; if not, write to the Free Software Foundation, Inc.,
19# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
20#
21##########################################################################
22#
23# Thanks to:
24# * Holger Freyther <zecke@handhelds.org>
25# * Justin Patrin <papercrane@reversefold.com>
26#
27##########################################################################
28
29"""
30BitBake Shell
31
32IDEAS:
33 * list defined tasks per package
34 * list classes
35 * toggle force
36 * command to reparse just one (or more) bbfile(s)
37 * automatic check if reparsing is necessary (inotify?)
38 * frontend for bb file manipulation
39 * more shell-like features:
40 - output control, i.e. pipe output into grep, sort, etc.
41 - job control, i.e. bring running commands into background and foreground
42 * start parsing in background right after startup
43 * ncurses interface
44
45PROBLEMS:
46 * force doesn't always work
47 * readline completion for commands with more than one parameters
48
49"""
50
51##########################################################################
52# Import and setup global variables
53##########################################################################
54
55try:
56 set
57except NameError:
58 from sets import Set as set
59import sys, os, readline, socket, httplib, urllib, commands, popen2, copy, shlex, Queue, fnmatch
60from bb import data, parse, build, fatal, cache, taskdata, runqueue, providers as Providers
61
62__version__ = "0.5.3.1"
63__credits__ = """BitBake Shell Version %s (C) 2005 Michael 'Mickey' Lauer <mickey@Vanille.de>
64Type 'help' for more information, press CTRL-D to exit.""" % __version__
65
66cmds = {}
67leave_mainloop = False
68last_exception = None
69cooker = None
70parsed = False
71debug = os.environ.get( "BBSHELL_DEBUG", "" )
72
73##########################################################################
74# Class BitBakeShellCommands
75##########################################################################
76
77class BitBakeShellCommands:
78 """This class contains the valid commands for the shell"""
79
80 def __init__( self, shell ):
81 """Register all the commands"""
82 self._shell = shell
83 for attr in BitBakeShellCommands.__dict__:
84 if not attr.startswith( "_" ):
85 if attr.endswith( "_" ):
86 command = attr[:-1].lower()
87 else:
88 command = attr[:].lower()
89 method = getattr( BitBakeShellCommands, attr )
90 debugOut( "registering command '%s'" % command )
91 # scan number of arguments
92 usage = getattr( method, "usage", "" )
93 if usage != "<...>":
94 numArgs = len( usage.split() )
95 else:
96 numArgs = -1
97 shell.registerCommand( command, method, numArgs, "%s %s" % ( command, usage ), method.__doc__ )
98
99 def _checkParsed( self ):
100 if not parsed:
101 print "SHELL: This command needs to parse bbfiles..."
102 self.parse( None )
103
104 def _findProvider( self, item ):
105 self._checkParsed()
106 # Need to use taskData for this information
107 preferred = data.getVar( "PREFERRED_PROVIDER_%s" % item, cooker.configuration.data, 1 )
108 if not preferred: preferred = item
109 try:
110 lv, lf, pv, pf = Providers.findBestProvider(preferred, cooker.configuration.data, cooker.status)
111 except KeyError:
112 if item in cooker.status.providers:
113 pf = cooker.status.providers[item][0]
114 else:
115 pf = None
116 return pf
117
118 def alias( self, params ):
119 """Register a new name for a command"""
120 new, old = params
121 if not old in cmds:
122 print "ERROR: Command '%s' not known" % old
123 else:
124 cmds[new] = cmds[old]
125 print "OK"
126 alias.usage = "<alias> <command>"
127
128 def buffer( self, params ):
129 """Dump specified output buffer"""
130 index = params[0]
131 print self._shell.myout.buffer( int( index ) )
132 buffer.usage = "<index>"
133
134 def buffers( self, params ):
135 """Show the available output buffers"""
136 commands = self._shell.myout.bufferedCommands()
137 if not commands:
138 print "SHELL: No buffered commands available yet. Start doing something."
139 else:
140 print "="*35, "Available Output Buffers", "="*27
141 for index, cmd in enumerate( commands ):
142 print "| %s %s" % ( str( index ).ljust( 3 ), cmd )
143 print "="*88
144
145 def build( self, params, cmd = "build" ):
146 """Build a providee"""
147 global last_exception
148 globexpr = params[0]
149 self._checkParsed()
150 names = globfilter( cooker.status.pkg_pn.keys(), globexpr )
151 if len( names ) == 0: names = [ globexpr ]
152 print "SHELL: Building %s" % ' '.join( names )
153
154 td = taskdata.TaskData(cooker.configuration.abort)
155 localdata = data.createCopy(cooker.configuration.data)
156 data.update_data(localdata)
157 data.expandKeys(localdata)
158
159 try:
160 tasks = []
161 for name in names:
162 td.add_provider(localdata, cooker.status, name)
163 providers = td.get_provider(name)
164
165 if len(providers) == 0:
166 raise Providers.NoProvider
167
168 tasks.append([name, "do_%s" % cmd])
169
170 td.add_unresolved(localdata, cooker.status)
171
172 rq = runqueue.RunQueue(cooker, localdata, cooker.status, td, tasks)
173 rq.prepare_runqueue()
174 rq.execute_runqueue()
175
176 except Providers.NoProvider:
177 print "ERROR: No Provider"
178 last_exception = Providers.NoProvider
179
180 except runqueue.TaskFailure, fnids:
181 for fnid in fnids:
182 print "ERROR: '%s' failed" % td.fn_index[fnid]
183 last_exception = runqueue.TaskFailure
184
185 except build.EventException, e:
186 print "ERROR: Couldn't build '%s'" % names
187 last_exception = e
188
189
190 build.usage = "<providee>"
191
192 def clean( self, params ):
193 """Clean a providee"""
194 self.build( params, "clean" )
195 clean.usage = "<providee>"
196
197 def compile( self, params ):
198 """Execute 'compile' on a providee"""
199 self.build( params, "compile" )
200 compile.usage = "<providee>"
201
202 def configure( self, params ):
203 """Execute 'configure' on a providee"""
204 self.build( params, "configure" )
205 configure.usage = "<providee>"
206
207 def install( self, params ):
208 """Execute 'install' on a providee"""
209 self.build( params, "install" )
210 install.usage = "<providee>"
211
212 def edit( self, params ):
213 """Call $EDITOR on a providee"""
214 name = params[0]
215 bbfile = self._findProvider( name )
216 if bbfile is not None:
217 os.system( "%s %s" % ( os.environ.get( "EDITOR", "vi" ), bbfile ) )
218 else:
219 print "ERROR: Nothing provides '%s'" % name
220 edit.usage = "<providee>"
221
222 def environment( self, params ):
223 """Dump out the outer BitBake environment"""
224 cooker.showEnvironment()
225
226 def exit_( self, params ):
227 """Leave the BitBake Shell"""
228 debugOut( "setting leave_mainloop to true" )
229 global leave_mainloop
230 leave_mainloop = True
231
232 def fetch( self, params ):
233 """Fetch a providee"""
234 self.build( params, "fetch" )
235 fetch.usage = "<providee>"
236
237 def fileBuild( self, params, cmd = "build" ):
238 """Parse and build a .bb file"""
239 global last_exception
240 name = params[0]
241 bf = completeFilePath( name )
242 print "SHELL: Calling '%s' on '%s'" % ( cmd, bf )
243
244 try:
245 cooker.buildFile(bf, cmd)
246 except parse.ParseError:
247 print "ERROR: Unable to open or parse '%s'" % bf
248 except build.EventException, e:
249 print "ERROR: Couldn't build '%s'" % name
250 last_exception = e
251
252 fileBuild.usage = "<bbfile>"
253
254 def fileClean( self, params ):
255 """Clean a .bb file"""
256 self.fileBuild( params, "clean" )
257 fileClean.usage = "<bbfile>"
258
259 def fileEdit( self, params ):
260 """Call $EDITOR on a .bb file"""
261 name = params[0]
262 os.system( "%s %s" % ( os.environ.get( "EDITOR", "vi" ), completeFilePath( name ) ) )
263 fileEdit.usage = "<bbfile>"
264
265 def fileRebuild( self, params ):
266 """Rebuild (clean & build) a .bb file"""
267 self.fileBuild( params, "rebuild" )
268 fileRebuild.usage = "<bbfile>"
269
270 def fileReparse( self, params ):
271 """(re)Parse a bb file"""
272 bbfile = params[0]
273 print "SHELL: Parsing '%s'" % bbfile
274 parse.update_mtime( bbfile )
275 cooker.bb_cache.cacheValidUpdate(bbfile)
276 fromCache = cooker.bb_cache.loadData(bbfile, cooker.configuration.data, cooker.status)
277 cooker.bb_cache.sync()
278 if False: #fromCache:
279 print "SHELL: File has not been updated, not reparsing"
280 else:
281 print "SHELL: Parsed"
282 fileReparse.usage = "<bbfile>"
283
284 def abort( self, params ):
285 """Toggle abort task execution flag (see bitbake -k)"""
286 cooker.configuration.abort = not cooker.configuration.abort
287 print "SHELL: Abort Flag is now '%s'" % repr( cooker.configuration.abort )
288
289 def force( self, params ):
290 """Toggle force task execution flag (see bitbake -f)"""
291 cooker.configuration.force = not cooker.configuration.force
292 print "SHELL: Force Flag is now '%s'" % repr( cooker.configuration.force )
293
294 def help( self, params ):
295 """Show a comprehensive list of commands and their purpose"""
296 print "="*30, "Available Commands", "="*30
297 allcmds = cmds.keys()
298 allcmds.sort()
299 for cmd in allcmds:
300 function,numparams,usage,helptext = cmds[cmd]
301 print "| %s | %s" % (usage.ljust(30), helptext)
302 print "="*78
303
304 def lastError( self, params ):
305 """Show the reason or log that was produced by the last BitBake event exception"""
306 if last_exception is None:
307 print "SHELL: No Errors yet (Phew)..."
308 else:
309 reason, event = last_exception.args
310 print "SHELL: Reason for the last error: '%s'" % reason
311 if ':' in reason:
312 msg, filename = reason.split( ':' )
313 filename = filename.strip()
314 print "SHELL: Dumping log file for last error:"
315 try:
316 print open( filename ).read()
317 except IOError:
318 print "ERROR: Couldn't open '%s'" % filename
319
320 def match( self, params ):
321 """Dump all files or providers matching a glob expression"""
322 what, globexpr = params
323 if what == "files":
324 self._checkParsed()
325 for key in globfilter( cooker.status.pkg_fn.keys(), globexpr ): print key
326 elif what == "providers":
327 self._checkParsed()
328 for key in globfilter( cooker.status.pkg_pn.keys(), globexpr ): print key
329 else:
330 print "Usage: match %s" % self.print_.usage
331 match.usage = "<files|providers> <glob>"
332
333 def new( self, params ):
334 """Create a new .bb file and open the editor"""
335 dirname, filename = params
336 packages = '/'.join( data.getVar( "BBFILES", cooker.configuration.data, 1 ).split('/')[:-2] )
337 fulldirname = "%s/%s" % ( packages, dirname )
338
339 if not os.path.exists( fulldirname ):
340 print "SHELL: Creating '%s'" % fulldirname
341 os.mkdir( fulldirname )
342 if os.path.exists( fulldirname ) and os.path.isdir( fulldirname ):
343 if os.path.exists( "%s/%s" % ( fulldirname, filename ) ):
344 print "SHELL: ERROR: %s/%s already exists" % ( fulldirname, filename )
345 return False
346 print "SHELL: Creating '%s/%s'" % ( fulldirname, filename )
347 newpackage = open( "%s/%s" % ( fulldirname, filename ), "w" )
348 print >>newpackage,"""DESCRIPTION = ""
349SECTION = ""
350AUTHOR = ""
351HOMEPAGE = ""
352MAINTAINER = ""
353LICENSE = "GPL"
354PR = "r0"
355
356SRC_URI = ""
357
358#inherit base
359
360#do_configure() {
361#
362#}
363
364#do_compile() {
365#
366#}
367
368#do_stage() {
369#
370#}
371
372#do_install() {
373#
374#}
375"""
376 newpackage.close()
377 os.system( "%s %s/%s" % ( os.environ.get( "EDITOR" ), fulldirname, filename ) )
378 new.usage = "<directory> <filename>"
379
380 def package( self, params ):
381 """Execute 'package' on a providee"""
382 self.build( params, "package" )
383 package.usage = "<providee>"
384
385 def pasteBin( self, params ):
386 """Send a command + output buffer to the pastebin at http://rafb.net/paste"""
387 index = params[0]
388 contents = self._shell.myout.buffer( int( index ) )
389 sendToPastebin( "output of " + params[0], contents )
390 pasteBin.usage = "<index>"
391
392 def pasteLog( self, params ):
393 """Send the last event exception error log (if there is one) to http://rafb.net/paste"""
394 if last_exception is None:
395 print "SHELL: No Errors yet (Phew)..."
396 else:
397 reason, event = last_exception.args
398 print "SHELL: Reason for the last error: '%s'" % reason
399 if ':' in reason:
400 msg, filename = reason.split( ':' )
401 filename = filename.strip()
402 print "SHELL: Pasting log file to pastebin..."
403
404 file = open( filename ).read()
405 sendToPastebin( "contents of " + filename, file )
406
407 def patch( self, params ):
408 """Execute 'patch' command on a providee"""
409 self.build( params, "patch" )
410 patch.usage = "<providee>"
411
412 def parse( self, params ):
413 """(Re-)parse .bb files and calculate the dependency graph"""
414 cooker.status = cache.CacheData()
415 ignore = data.getVar("ASSUME_PROVIDED", cooker.configuration.data, 1) or ""
416 cooker.status.ignored_dependencies = set( ignore.split() )
417 cooker.handleCollections( data.getVar("BBFILE_COLLECTIONS", cooker.configuration.data, 1) )
418
419 (filelist, masked) = cooker.collect_bbfiles()
420 cooker.parse_bbfiles(filelist, masked, cooker.myProgressCallback)
421 cooker.buildDepgraph()
422 global parsed
423 parsed = True
424 print
425
426 def reparse( self, params ):
427 """(re)Parse a providee's bb file"""
428 bbfile = self._findProvider( params[0] )
429 if bbfile is not None:
430 print "SHELL: Found bbfile '%s' for '%s'" % ( bbfile, params[0] )
431 self.fileReparse( [ bbfile ] )
432 else:
433 print "ERROR: Nothing provides '%s'" % params[0]
434 reparse.usage = "<providee>"
435
436 def getvar( self, params ):
437 """Dump the contents of an outer BitBake environment variable"""
438 var = params[0]
439 value = data.getVar( var, cooker.configuration.data, 1 )
440 print value
441 getvar.usage = "<variable>"
442
443 def peek( self, params ):
444 """Dump contents of variable defined in providee's metadata"""
445 name, var = params
446 bbfile = self._findProvider( name )
447 if bbfile is not None:
448 the_data = cooker.bb_cache.loadDataFull(bbfile, cooker.configuration.data)
449 value = the_data.getVar( var, 1 )
450 print value
451 else:
452 print "ERROR: Nothing provides '%s'" % name
453 peek.usage = "<providee> <variable>"
454
455 def poke( self, params ):
456 """Set contents of variable defined in providee's metadata"""
457 name, var, value = params
458 bbfile = self._findProvider( name )
459 if bbfile is not None:
460 print "ERROR: Sorry, this functionality is currently broken"
461 #d = cooker.pkgdata[bbfile]
462 #data.setVar( var, value, d )
463
464 # mark the change semi persistant
465 #cooker.pkgdata.setDirty(bbfile, d)
466 #print "OK"
467 else:
468 print "ERROR: Nothing provides '%s'" % name
469 poke.usage = "<providee> <variable> <value>"
470
471 def print_( self, params ):
472 """Dump all files or providers"""
473 what = params[0]
474 if what == "files":
475 self._checkParsed()
476 for key in cooker.status.pkg_fn.keys(): print key
477 elif what == "providers":
478 self._checkParsed()
479 for key in cooker.status.providers.keys(): print key
480 else:
481 print "Usage: print %s" % self.print_.usage
482 print_.usage = "<files|providers>"
483
484 def python( self, params ):
485 """Enter the expert mode - an interactive BitBake Python Interpreter"""
486 sys.ps1 = "EXPERT BB>>> "
487 sys.ps2 = "EXPERT BB... "
488 import code
489 interpreter = code.InteractiveConsole( dict( globals() ) )
490 interpreter.interact( "SHELL: Expert Mode - BitBake Python %s\nType 'help' for more information, press CTRL-D to switch back to BBSHELL." % sys.version )
491
492 def showdata( self, params ):
493 """Execute 'showdata' on a providee"""
494 cooker.showEnvironment(None, params)
495 showdata.usage = "<providee>"
496
497 def setVar( self, params ):
498 """Set an outer BitBake environment variable"""
499 var, value = params
500 data.setVar( var, value, cooker.configuration.data )
501 print "OK"
502 setVar.usage = "<variable> <value>"
503
504 def rebuild( self, params ):
505 """Clean and rebuild a .bb file or a providee"""
506 self.build( params, "clean" )
507 self.build( params, "build" )
508 rebuild.usage = "<providee>"
509
510 def shell( self, params ):
511 """Execute a shell command and dump the output"""
512 if params != "":
513 print commands.getoutput( " ".join( params ) )
514 shell.usage = "<...>"
515
516 def stage( self, params ):
517 """Execute 'stage' on a providee"""
518 self.build( params, "populate_staging" )
519 stage.usage = "<providee>"
520
521 def status( self, params ):
522 """<just for testing>"""
523 print "-" * 78
524 print "building list = '%s'" % cooker.building_list
525 print "build path = '%s'" % cooker.build_path
526 print "consider_msgs_cache = '%s'" % cooker.consider_msgs_cache
527 print "build stats = '%s'" % cooker.stats
528 if last_exception is not None: print "last_exception = '%s'" % repr( last_exception.args )
529 print "memory output contents = '%s'" % self._shell.myout._buffer
530
531 def test( self, params ):
532 """<just for testing>"""
533 print "testCommand called with '%s'" % params
534
535 def unpack( self, params ):
536 """Execute 'unpack' on a providee"""
537 self.build( params, "unpack" )
538 unpack.usage = "<providee>"
539
540 def which( self, params ):
541 """Computes the providers for a given providee"""
542 # Need to use taskData for this information
543 item = params[0]
544
545 self._checkParsed()
546
547 preferred = data.getVar( "PREFERRED_PROVIDER_%s" % item, cooker.configuration.data, 1 )
548 if not preferred: preferred = item
549
550 try:
551 lv, lf, pv, pf = Providers.findBestProvider(preferred, cooker.configuration.data, cooker.status)
552 except KeyError:
553 lv, lf, pv, pf = (None,)*4
554
555 try:
556 providers = cooker.status.providers[item]
557 except KeyError:
558 print "SHELL: ERROR: Nothing provides", preferred
559 else:
560 for provider in providers:
561 if provider == pf: provider = " (***) %s" % provider
562 else: provider = " %s" % provider
563 print provider
564 which.usage = "<providee>"
565
566##########################################################################
567# Common helper functions
568##########################################################################
569
570def completeFilePath( bbfile ):
571 """Get the complete bbfile path"""
572 if not cooker.status: return bbfile
573 if not cooker.status.pkg_fn: return bbfile
574 for key in cooker.status.pkg_fn.keys():
575 if key.endswith( bbfile ):
576 return key
577 return bbfile
578
579def sendToPastebin( desc, content ):
580 """Send content to http://oe.pastebin.com"""
581 mydata = {}
582 mydata["lang"] = "Plain Text"
583 mydata["desc"] = desc
584 mydata["cvt_tabs"] = "No"
585 mydata["nick"] = "%s@%s" % ( os.environ.get( "USER", "unknown" ), socket.gethostname() or "unknown" )
586 mydata["text"] = content
587 params = urllib.urlencode( mydata )
588 headers = {"Content-type": "application/x-www-form-urlencoded","Accept": "text/plain"}
589
590 host = "rafb.net"
591 conn = httplib.HTTPConnection( "%s:80" % host )
592 conn.request("POST", "/paste/paste.php", params, headers )
593
594 response = conn.getresponse()
595 conn.close()
596
597 if response.status == 302:
598 location = response.getheader( "location" ) or "unknown"
599 print "SHELL: Pasted to http://%s%s" % ( host, location )
600 else:
601 print "ERROR: %s %s" % ( response.status, response.reason )
602
603def completer( text, state ):
604 """Return a possible readline completion"""
605 debugOut( "completer called with text='%s', state='%d'" % ( text, state ) )
606
607 if state == 0:
608 line = readline.get_line_buffer()
609 if " " in line:
610 line = line.split()
611 # we are in second (or more) argument
612 if line[0] in cmds and hasattr( cmds[line[0]][0], "usage" ): # known command and usage
613 u = getattr( cmds[line[0]][0], "usage" ).split()[0]
614 if u == "<variable>":
615 allmatches = cooker.configuration.data.keys()
616 elif u == "<bbfile>":
617 if cooker.status.pkg_fn is None: allmatches = [ "(No Matches Available. Parsed yet?)" ]
618 else: allmatches = [ x.split("/")[-1] for x in cooker.status.pkg_fn.keys() ]
619 elif u == "<providee>":
620 if cooker.status.pkg_fn is None: allmatches = [ "(No Matches Available. Parsed yet?)" ]
621 else: allmatches = cooker.status.providers.iterkeys()
622 else: allmatches = [ "(No tab completion available for this command)" ]
623 else: allmatches = [ "(No tab completion available for this command)" ]
624 else:
625 # we are in first argument
626 allmatches = cmds.iterkeys()
627
628 completer.matches = [ x for x in allmatches if x[:len(text)] == text ]
629 #print "completer.matches = '%s'" % completer.matches
630 if len( completer.matches ) > state:
631 return completer.matches[state]
632 else:
633 return None
634
635def debugOut( text ):
636 if debug:
637 sys.stderr.write( "( %s )\n" % text )
638
639def columnize( alist, width = 80 ):
640 """
641 A word-wrap function that preserves existing line breaks
642 and most spaces in the text. Expects that existing line
643 breaks are posix newlines (\n).
644 """
645 return reduce(lambda line, word, width=width: '%s%s%s' %
646 (line,
647 ' \n'[(len(line[line.rfind('\n')+1:])
648 + len(word.split('\n',1)[0]
649 ) >= width)],
650 word),
651 alist
652 )
653
654def globfilter( names, pattern ):
655 return fnmatch.filter( names, pattern )
656
657##########################################################################
658# Class MemoryOutput
659##########################################################################
660
661class MemoryOutput:
662 """File-like output class buffering the output of the last 10 commands"""
663 def __init__( self, delegate ):
664 self.delegate = delegate
665 self._buffer = []
666 self.text = []
667 self._command = None
668
669 def startCommand( self, command ):
670 self._command = command
671 self.text = []
672 def endCommand( self ):
673 if self._command is not None:
674 if len( self._buffer ) == 10: del self._buffer[0]
675 self._buffer.append( ( self._command, self.text ) )
676 def removeLast( self ):
677 if self._buffer:
678 del self._buffer[ len( self._buffer ) - 1 ]
679 self.text = []
680 self._command = None
681 def lastBuffer( self ):
682 if self._buffer:
683 return self._buffer[ len( self._buffer ) -1 ][1]
684 def bufferedCommands( self ):
685 return [ cmd for cmd, output in self._buffer ]
686 def buffer( self, i ):
687 if i < len( self._buffer ):
688 return "BB>> %s\n%s" % ( self._buffer[i][0], "".join( self._buffer[i][1] ) )
689 else: return "ERROR: Invalid buffer number. Buffer needs to be in (0, %d)" % ( len( self._buffer ) - 1 )
690 def write( self, text ):
691 if self._command is not None and text != "BB>> ": self.text.append( text )
692 if self.delegate is not None: self.delegate.write( text )
693 def flush( self ):
694 return self.delegate.flush()
695 def fileno( self ):
696 return self.delegate.fileno()
697 def isatty( self ):
698 return self.delegate.isatty()
699
700##########################################################################
701# Class BitBakeShell
702##########################################################################
703
704class BitBakeShell:
705
706 def __init__( self ):
707 """Register commands and set up readline"""
708 self.commandQ = Queue.Queue()
709 self.commands = BitBakeShellCommands( self )
710 self.myout = MemoryOutput( sys.stdout )
711 self.historyfilename = os.path.expanduser( "~/.bbsh_history" )
712 self.startupfilename = os.path.expanduser( "~/.bbsh_startup" )
713
714 readline.set_completer( completer )
715 readline.set_completer_delims( " " )
716 readline.parse_and_bind("tab: complete")
717
718 try:
719 readline.read_history_file( self.historyfilename )
720 except IOError:
721 pass # It doesn't exist yet.
722
723 print __credits__
724
725 def cleanup( self ):
726 """Write readline history and clean up resources"""
727 debugOut( "writing command history" )
728 try:
729 readline.write_history_file( self.historyfilename )
730 except:
731 print "SHELL: Unable to save command history"
732
733 def registerCommand( self, command, function, numparams = 0, usage = "", helptext = "" ):
734 """Register a command"""
735 if usage == "": usage = command
736 if helptext == "": helptext = function.__doc__ or "<not yet documented>"
737 cmds[command] = ( function, numparams, usage, helptext )
738
739 def processCommand( self, command, params ):
740 """Process a command. Check number of params and print a usage string, if appropriate"""
741 debugOut( "processing command '%s'..." % command )
742 try:
743 function, numparams, usage, helptext = cmds[command]
744 except KeyError:
745 print "SHELL: ERROR: '%s' command is not a valid command." % command
746 self.myout.removeLast()
747 else:
748 if (numparams != -1) and (not len( params ) == numparams):
749 print "Usage: '%s'" % usage
750 return
751
752 result = function( self.commands, params )
753 debugOut( "result was '%s'" % result )
754
755 def processStartupFile( self ):
756 """Read and execute all commands found in $HOME/.bbsh_startup"""
757 if os.path.exists( self.startupfilename ):
758 startupfile = open( self.startupfilename, "r" )
759 for cmdline in startupfile:
760 debugOut( "processing startup line '%s'" % cmdline )
761 if not cmdline:
762 continue
763 if "|" in cmdline:
764 print "ERROR: '|' in startup file is not allowed. Ignoring line"
765 continue
766 self.commandQ.put( cmdline.strip() )
767
768 def main( self ):
769 """The main command loop"""
770 while not leave_mainloop:
771 try:
772 if self.commandQ.empty():
773 sys.stdout = self.myout.delegate
774 cmdline = raw_input( "BB>> " )
775 sys.stdout = self.myout
776 else:
777 cmdline = self.commandQ.get()
778 if cmdline:
779 allCommands = cmdline.split( ';' )
780 for command in allCommands:
781 pipecmd = None
782 #
783 # special case for expert mode
784 if command == 'python':
785 sys.stdout = self.myout.delegate
786 self.processCommand( command, "" )
787 sys.stdout = self.myout
788 else:
789 self.myout.startCommand( command )
790 if '|' in command: # disable output
791 command, pipecmd = command.split( '|' )
792 delegate = self.myout.delegate
793 self.myout.delegate = None
794 tokens = shlex.split( command, True )
795 self.processCommand( tokens[0], tokens[1:] or "" )
796 self.myout.endCommand()
797 if pipecmd is not None: # restore output
798 self.myout.delegate = delegate
799
800 pipe = popen2.Popen4( pipecmd )
801 pipe.tochild.write( "\n".join( self.myout.lastBuffer() ) )
802 pipe.tochild.close()
803 sys.stdout.write( pipe.fromchild.read() )
804 #
805 except EOFError:
806 print
807 return
808 except KeyboardInterrupt:
809 print
810
811##########################################################################
812# Start function - called from the BitBake command line utility
813##########################################################################
814
815def start( aCooker ):
816 global cooker
817 cooker = aCooker
818 bbshell = BitBakeShell()
819 bbshell.processStartupFile()
820 bbshell.main()
821 bbshell.cleanup()
822
823if __name__ == "__main__":
824 print "SHELL: Sorry, this program should only be called by BitBake."
diff --git a/bitbake-dev/lib/bb/taskdata.py b/bitbake-dev/lib/bb/taskdata.py
deleted file mode 100644
index 4a88e75f6d..0000000000
--- a/bitbake-dev/lib/bb/taskdata.py
+++ /dev/null
@@ -1,610 +0,0 @@
1#!/usr/bin/env python
2# ex:ts=4:sw=4:sts=4:et
3# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
4"""
5BitBake 'TaskData' implementation
6
7Task data collection and handling
8
9"""
10
11# Copyright (C) 2006 Richard Purdie
12#
13# This program is free software; you can redistribute it and/or modify
14# it under the terms of the GNU General Public License version 2 as
15# published by the Free Software Foundation.
16#
17# This program is distributed in the hope that it will be useful,
18# but WITHOUT ANY WARRANTY; without even the implied warranty of
19# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20# GNU General Public License for more details.
21#
22# You should have received a copy of the GNU General Public License along
23# with this program; if not, write to the Free Software Foundation, Inc.,
24# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
25
26import bb
27
28def re_match_strings(target, strings):
29 """
30 Whether or not the string 'target' matches
31 any one string of the strings which can be regular expression string
32 """
33 import re
34
35 for name in strings:
36 if (name==target or
37 re.search(name,target)!=None):
38 return True
39 return False
40
41class TaskData:
42 """
43 BitBake Task Data implementation
44 """
45 def __init__(self, abort = True, tryaltconfigs = False):
46 self.build_names_index = []
47 self.run_names_index = []
48 self.fn_index = []
49
50 self.build_targets = {}
51 self.run_targets = {}
52
53 self.external_targets = []
54
55 self.tasks_fnid = []
56 self.tasks_name = []
57 self.tasks_tdepends = []
58 self.tasks_idepends = []
59 # Cache to speed up task ID lookups
60 self.tasks_lookup = {}
61
62 self.depids = {}
63 self.rdepids = {}
64
65 self.consider_msgs_cache = []
66
67 self.failed_deps = []
68 self.failed_rdeps = []
69 self.failed_fnids = []
70
71 self.abort = abort
72 self.tryaltconfigs = tryaltconfigs
73
74 def getbuild_id(self, name):
75 """
76 Return an ID number for the build target name.
77 If it doesn't exist, create one.
78 """
79 if not name in self.build_names_index:
80 self.build_names_index.append(name)
81 return len(self.build_names_index) - 1
82
83 return self.build_names_index.index(name)
84
85 def getrun_id(self, name):
86 """
87 Return an ID number for the run target name.
88 If it doesn't exist, create one.
89 """
90 if not name in self.run_names_index:
91 self.run_names_index.append(name)
92 return len(self.run_names_index) - 1
93
94 return self.run_names_index.index(name)
95
96 def getfn_id(self, name):
97 """
98 Return an ID number for the filename.
99 If it doesn't exist, create one.
100 """
101 if not name in self.fn_index:
102 self.fn_index.append(name)
103 return len(self.fn_index) - 1
104
105 return self.fn_index.index(name)
106
107 def gettask_ids(self, fnid):
108 """
109 Return an array of the ID numbers matching a given fnid.
110 """
111 ids = []
112 if fnid in self.tasks_lookup:
113 for task in self.tasks_lookup[fnid]:
114 ids.append(self.tasks_lookup[fnid][task])
115 return ids
116
117 def gettask_id(self, fn, task, create = True):
118 """
119 Return an ID number for the task matching fn and task.
120 If it doesn't exist, create one by default.
121 Optionally return None instead.
122 """
123 fnid = self.getfn_id(fn)
124
125 if fnid in self.tasks_lookup:
126 if task in self.tasks_lookup[fnid]:
127 return self.tasks_lookup[fnid][task]
128
129 if not create:
130 return None
131
132 self.tasks_name.append(task)
133 self.tasks_fnid.append(fnid)
134 self.tasks_tdepends.append([])
135 self.tasks_idepends.append([])
136
137 listid = len(self.tasks_name) - 1
138
139 if fnid not in self.tasks_lookup:
140 self.tasks_lookup[fnid] = {}
141 self.tasks_lookup[fnid][task] = listid
142
143 return listid
144
145 def add_tasks(self, fn, dataCache):
146 """
147 Add tasks for a given fn to the database
148 """
149
150 task_deps = dataCache.task_deps[fn]
151
152 fnid = self.getfn_id(fn)
153
154 if fnid in self.failed_fnids:
155 bb.msg.fatal(bb.msg.domain.TaskData, "Trying to re-add a failed file? Something is broken...")
156
157 # Check if we've already seen this fn
158 if fnid in self.tasks_fnid:
159 return
160
161 for task in task_deps['tasks']:
162
163 # Work out task dependencies
164 parentids = []
165 for dep in task_deps['parents'][task]:
166 parentid = self.gettask_id(fn, dep)
167 parentids.append(parentid)
168 taskid = self.gettask_id(fn, task)
169 self.tasks_tdepends[taskid].extend(parentids)
170
171 # Touch all intertask dependencies
172 if 'depends' in task_deps and task in task_deps['depends']:
173 ids = []
174 for dep in task_deps['depends'][task].split():
175 if dep:
176 if ":" not in dep:
177 bb.msg.fatal(bb.msg.domain.TaskData, "Error, dependency %s does not contain ':' character\n. Task 'depends' should be specified in the form 'packagename:task'" % (depend, fn))
178 ids.append(((self.getbuild_id(dep.split(":")[0])), dep.split(":")[1]))
179 self.tasks_idepends[taskid].extend(ids)
180
181 # Work out build dependencies
182 if not fnid in self.depids:
183 dependids = {}
184 for depend in dataCache.deps[fn]:
185 bb.msg.debug(2, bb.msg.domain.TaskData, "Added dependency %s for %s" % (depend, fn))
186 dependids[self.getbuild_id(depend)] = None
187 self.depids[fnid] = dependids.keys()
188
189 # Work out runtime dependencies
190 if not fnid in self.rdepids:
191 rdependids = {}
192 rdepends = dataCache.rundeps[fn]
193 rrecs = dataCache.runrecs[fn]
194 for package in rdepends:
195 for rdepend in bb.utils.explode_deps(rdepends[package]):
196 bb.msg.debug(2, bb.msg.domain.TaskData, "Added runtime dependency %s for %s" % (rdepend, fn))
197 rdependids[self.getrun_id(rdepend)] = None
198 for package in rrecs:
199 for rdepend in bb.utils.explode_deps(rrecs[package]):
200 bb.msg.debug(2, bb.msg.domain.TaskData, "Added runtime recommendation %s for %s" % (rdepend, fn))
201 rdependids[self.getrun_id(rdepend)] = None
202 self.rdepids[fnid] = rdependids.keys()
203
204 for dep in self.depids[fnid]:
205 if dep in self.failed_deps:
206 self.fail_fnid(fnid)
207 return
208 for dep in self.rdepids[fnid]:
209 if dep in self.failed_rdeps:
210 self.fail_fnid(fnid)
211 return
212
213 def have_build_target(self, target):
214 """
215 Have we a build target matching this name?
216 """
217 targetid = self.getbuild_id(target)
218
219 if targetid in self.build_targets:
220 return True
221 return False
222
223 def have_runtime_target(self, target):
224 """
225 Have we a runtime target matching this name?
226 """
227 targetid = self.getrun_id(target)
228
229 if targetid in self.run_targets:
230 return True
231 return False
232
233 def add_build_target(self, fn, item):
234 """
235 Add a build target.
236 If already present, append the provider fn to the list
237 """
238 targetid = self.getbuild_id(item)
239 fnid = self.getfn_id(fn)
240
241 if targetid in self.build_targets:
242 if fnid in self.build_targets[targetid]:
243 return
244 self.build_targets[targetid].append(fnid)
245 return
246 self.build_targets[targetid] = [fnid]
247
248 def add_runtime_target(self, fn, item):
249 """
250 Add a runtime target.
251 If already present, append the provider fn to the list
252 """
253 targetid = self.getrun_id(item)
254 fnid = self.getfn_id(fn)
255
256 if targetid in self.run_targets:
257 if fnid in self.run_targets[targetid]:
258 return
259 self.run_targets[targetid].append(fnid)
260 return
261 self.run_targets[targetid] = [fnid]
262
263 def mark_external_target(self, item):
264 """
265 Mark a build target as being externally requested
266 """
267 targetid = self.getbuild_id(item)
268
269 if targetid not in self.external_targets:
270 self.external_targets.append(targetid)
271
272 def get_unresolved_build_targets(self, dataCache):
273 """
274 Return a list of build targets who's providers
275 are unknown.
276 """
277 unresolved = []
278 for target in self.build_names_index:
279 if re_match_strings(target, dataCache.ignored_dependencies):
280 continue
281 if self.build_names_index.index(target) in self.failed_deps:
282 continue
283 if not self.have_build_target(target):
284 unresolved.append(target)
285 return unresolved
286
287 def get_unresolved_run_targets(self, dataCache):
288 """
289 Return a list of runtime targets who's providers
290 are unknown.
291 """
292 unresolved = []
293 for target in self.run_names_index:
294 if re_match_strings(target, dataCache.ignored_dependencies):
295 continue
296 if self.run_names_index.index(target) in self.failed_rdeps:
297 continue
298 if not self.have_runtime_target(target):
299 unresolved.append(target)
300 return unresolved
301
302 def get_provider(self, item):
303 """
304 Return a list of providers of item
305 """
306 targetid = self.getbuild_id(item)
307
308 return self.build_targets[targetid]
309
310 def get_dependees(self, itemid):
311 """
312 Return a list of targets which depend on item
313 """
314 dependees = []
315 for fnid in self.depids:
316 if itemid in self.depids[fnid]:
317 dependees.append(fnid)
318 return dependees
319
320 def get_dependees_str(self, item):
321 """
322 Return a list of targets which depend on item as a user readable string
323 """
324 itemid = self.getbuild_id(item)
325 dependees = []
326 for fnid in self.depids:
327 if itemid in self.depids[fnid]:
328 dependees.append(self.fn_index[fnid])
329 return dependees
330
331 def get_rdependees(self, itemid):
332 """
333 Return a list of targets which depend on runtime item
334 """
335 dependees = []
336 for fnid in self.rdepids:
337 if itemid in self.rdepids[fnid]:
338 dependees.append(fnid)
339 return dependees
340
341 def get_rdependees_str(self, item):
342 """
343 Return a list of targets which depend on runtime item as a user readable string
344 """
345 itemid = self.getrun_id(item)
346 dependees = []
347 for fnid in self.rdepids:
348 if itemid in self.rdepids[fnid]:
349 dependees.append(self.fn_index[fnid])
350 return dependees
351
352 def add_provider(self, cfgData, dataCache, item):
353 try:
354 self.add_provider_internal(cfgData, dataCache, item)
355 except bb.providers.NoProvider:
356 if self.abort:
357 if self.get_rdependees_str(item):
358 bb.msg.error(bb.msg.domain.Provider, "Nothing PROVIDES '%s' (but '%s' DEPENDS on or otherwise requires it)" % (item, self.get_dependees_str(item)))
359 else:
360 bb.msg.error(bb.msg.domain.Provider, "Nothing PROVIDES '%s'" % (item))
361 raise
362 targetid = self.getbuild_id(item)
363 self.remove_buildtarget(targetid)
364
365 self.mark_external_target(item)
366
367 def add_provider_internal(self, cfgData, dataCache, item):
368 """
369 Add the providers of item to the task data
370 Mark entries were specifically added externally as against dependencies
371 added internally during dependency resolution
372 """
373
374 if re_match_strings(item, dataCache.ignored_dependencies):
375 return
376
377 if not item in dataCache.providers:
378 if self.get_rdependees_str(item):
379 bb.msg.note(2, bb.msg.domain.Provider, "Nothing PROVIDES '%s' (but '%s' DEPENDS on or otherwise requires it)" % (item, self.get_dependees_str(item)))
380 else:
381 bb.msg.note(2, bb.msg.domain.Provider, "Nothing PROVIDES '%s'" % (item))
382 bb.event.fire(bb.event.NoProvider(item), cfgData)
383 raise bb.providers.NoProvider(item)
384
385 if self.have_build_target(item):
386 return
387
388 all_p = dataCache.providers[item]
389
390 eligible, foundUnique = bb.providers.filterProviders(all_p, item, cfgData, dataCache)
391 eligible = [p for p in eligible if not self.getfn_id(p) in self.failed_fnids]
392
393 if not eligible:
394 bb.msg.note(2, bb.msg.domain.Provider, "No buildable provider PROVIDES '%s' but '%s' DEPENDS on or otherwise requires it. Enable debugging and see earlier logs to find unbuildable providers." % (item, self.get_dependees_str(item)))
395 bb.event.fire(bb.event.NoProvider(item), cfgData)
396 raise bb.providers.NoProvider(item)
397
398 if len(eligible) > 1 and foundUnique == False:
399 if item not in self.consider_msgs_cache:
400 providers_list = []
401 for fn in eligible:
402 providers_list.append(dataCache.pkg_fn[fn])
403 bb.msg.note(1, bb.msg.domain.Provider, "multiple providers are available for %s (%s);" % (item, ", ".join(providers_list)))
404 bb.msg.note(1, bb.msg.domain.Provider, "consider defining PREFERRED_PROVIDER_%s" % item)
405 bb.event.fire(bb.event.MultipleProviders(item, providers_list), cfgData)
406 self.consider_msgs_cache.append(item)
407
408 for fn in eligible:
409 fnid = self.getfn_id(fn)
410 if fnid in self.failed_fnids:
411 continue
412 bb.msg.debug(2, bb.msg.domain.Provider, "adding %s to satisfy %s" % (fn, item))
413 self.add_build_target(fn, item)
414 self.add_tasks(fn, dataCache)
415
416
417 #item = dataCache.pkg_fn[fn]
418
419 def add_rprovider(self, cfgData, dataCache, item):
420 """
421 Add the runtime providers of item to the task data
422 (takes item names from RDEPENDS/PACKAGES namespace)
423 """
424
425 if re_match_strings(item, dataCache.ignored_dependencies):
426 return
427
428 if self.have_runtime_target(item):
429 return
430
431 all_p = bb.providers.getRuntimeProviders(dataCache, item)
432
433 if not all_p:
434 bb.msg.error(bb.msg.domain.Provider, "'%s' RDEPENDS/RRECOMMENDS or otherwise requires the runtime entity '%s' but it wasn't found in any PACKAGE or RPROVIDES variables" % (self.get_rdependees_str(item), item))
435 bb.event.fire(bb.event.NoProvider(item, runtime=True), cfgData)
436 raise bb.providers.NoRProvider(item)
437
438 eligible, numberPreferred = bb.providers.filterProvidersRunTime(all_p, item, cfgData, dataCache)
439 eligible = [p for p in eligible if not self.getfn_id(p) in self.failed_fnids]
440
441 if not eligible:
442 bb.msg.error(bb.msg.domain.Provider, "'%s' RDEPENDS/RRECOMMENDS or otherwise requires the runtime entity '%s' but it wasn't found in any PACKAGE or RPROVIDES variables of any buildable targets.\nEnable debugging and see earlier logs to find unbuildable targets." % (self.get_rdependees_str(item), item))
443 bb.event.fire(bb.event.NoProvider(item, runtime=True), cfgData)
444 raise bb.providers.NoRProvider(item)
445
446 if len(eligible) > 1 and numberPreferred == 0:
447 if item not in self.consider_msgs_cache:
448 providers_list = []
449 for fn in eligible:
450 providers_list.append(dataCache.pkg_fn[fn])
451 bb.msg.note(2, bb.msg.domain.Provider, "multiple providers are available for runtime %s (%s);" % (item, ", ".join(providers_list)))
452 bb.msg.note(2, bb.msg.domain.Provider, "consider defining a PREFERRED_PROVIDER entry to match runtime %s" % item)
453 bb.event.fire(bb.event.MultipleProviders(item,providers_list, runtime=True), cfgData)
454 self.consider_msgs_cache.append(item)
455
456 if numberPreferred > 1:
457 if item not in self.consider_msgs_cache:
458 providers_list = []
459 for fn in eligible:
460 providers_list.append(dataCache.pkg_fn[fn])
461 bb.msg.note(2, bb.msg.domain.Provider, "multiple providers are available for runtime %s (top %s entries preferred) (%s);" % (item, numberPreferred, ", ".join(providers_list)))
462 bb.msg.note(2, bb.msg.domain.Provider, "consider defining only one PREFERRED_PROVIDER entry to match runtime %s" % item)
463 bb.event.fire(bb.event.MultipleProviders(item,providers_list, runtime=True), cfgData)
464 self.consider_msgs_cache.append(item)
465
466 # run through the list until we find one that we can build
467 for fn in eligible:
468 fnid = self.getfn_id(fn)
469 if fnid in self.failed_fnids:
470 continue
471 bb.msg.debug(2, bb.msg.domain.Provider, "adding '%s' to satisfy runtime '%s'" % (fn, item))
472 self.add_runtime_target(fn, item)
473 self.add_tasks(fn, dataCache)
474
475 def fail_fnid(self, fnid, missing_list = []):
476 """
477 Mark a file as failed (unbuildable)
478 Remove any references from build and runtime provider lists
479
480 missing_list, A list of missing requirements for this target
481 """
482 if fnid in self.failed_fnids:
483 return
484 bb.msg.debug(1, bb.msg.domain.Provider, "File '%s' is unbuildable, removing..." % self.fn_index[fnid])
485 self.failed_fnids.append(fnid)
486 for target in self.build_targets:
487 if fnid in self.build_targets[target]:
488 self.build_targets[target].remove(fnid)
489 if len(self.build_targets[target]) == 0:
490 self.remove_buildtarget(target, missing_list)
491 for target in self.run_targets:
492 if fnid in self.run_targets[target]:
493 self.run_targets[target].remove(fnid)
494 if len(self.run_targets[target]) == 0:
495 self.remove_runtarget(target, missing_list)
496
497 def remove_buildtarget(self, targetid, missing_list = []):
498 """
499 Mark a build target as failed (unbuildable)
500 Trigger removal of any files that have this as a dependency
501 """
502 if not missing_list:
503 missing_list = [self.build_names_index[targetid]]
504 else:
505 missing_list = [self.build_names_index[targetid]] + missing_list
506 bb.msg.note(2, bb.msg.domain.Provider, "Target '%s' is unbuildable, removing...\nMissing or unbuildable dependency chain was: %s" % (self.build_names_index[targetid], missing_list))
507 self.failed_deps.append(targetid)
508 dependees = self.get_dependees(targetid)
509 for fnid in dependees:
510 self.fail_fnid(fnid, missing_list)
511 for taskid in range(len(self.tasks_idepends)):
512 idepends = self.tasks_idepends[taskid]
513 for (idependid, idependtask) in idepends:
514 if idependid == targetid:
515 self.fail_fnid(self.tasks_fnid[taskid], missing_list)
516
517 if self.abort and targetid in self.external_targets:
518 bb.msg.error(bb.msg.domain.Provider, "Required build target '%s' has no buildable providers.\nMissing or unbuildable dependency chain was: %s" % (self.build_names_index[targetid], missing_list))
519 raise bb.providers.NoProvider
520
521 def remove_runtarget(self, targetid, missing_list = []):
522 """
523 Mark a run target as failed (unbuildable)
524 Trigger removal of any files that have this as a dependency
525 """
526 if not missing_list:
527 missing_list = [self.run_names_index[targetid]]
528 else:
529 missing_list = [self.run_names_index[targetid]] + missing_list
530
531 bb.msg.note(1, bb.msg.domain.Provider, "Runtime target '%s' is unbuildable, removing...\nMissing or unbuildable dependency chain was: %s" % (self.run_names_index[targetid], missing_list))
532 self.failed_rdeps.append(targetid)
533 dependees = self.get_rdependees(targetid)
534 for fnid in dependees:
535 self.fail_fnid(fnid, missing_list)
536
537 def add_unresolved(self, cfgData, dataCache):
538 """
539 Resolve all unresolved build and runtime targets
540 """
541 bb.msg.note(1, bb.msg.domain.TaskData, "Resolving any missing task queue dependencies")
542 while 1:
543 added = 0
544 for target in self.get_unresolved_build_targets(dataCache):
545 try:
546 self.add_provider_internal(cfgData, dataCache, target)
547 added = added + 1
548 except bb.providers.NoProvider:
549 targetid = self.getbuild_id(target)
550 if self.abort and targetid in self.external_targets:
551 if self.get_rdependees_str(target):
552 bb.msg.error(bb.msg.domain.Provider, "Nothing PROVIDES '%s' (but '%s' DEPENDS on or otherwise requires it)" % (target, self.get_dependees_str(target)))
553 else:
554 bb.msg.error(bb.msg.domain.Provider, "Nothing PROVIDES '%s'" % (target))
555 raise
556 self.remove_buildtarget(targetid)
557 for target in self.get_unresolved_run_targets(dataCache):
558 try:
559 self.add_rprovider(cfgData, dataCache, target)
560 added = added + 1
561 except bb.providers.NoRProvider:
562 self.remove_runtarget(self.getrun_id(target))
563 bb.msg.debug(1, bb.msg.domain.TaskData, "Resolved " + str(added) + " extra dependecies")
564 if added == 0:
565 break
566 # self.dump_data()
567
568 def dump_data(self):
569 """
570 Dump some debug information on the internal data structures
571 """
572 bb.msg.debug(3, bb.msg.domain.TaskData, "build_names:")
573 bb.msg.debug(3, bb.msg.domain.TaskData, ", ".join(self.build_names_index))
574
575 bb.msg.debug(3, bb.msg.domain.TaskData, "run_names:")
576 bb.msg.debug(3, bb.msg.domain.TaskData, ", ".join(self.run_names_index))
577
578 bb.msg.debug(3, bb.msg.domain.TaskData, "build_targets:")
579 for buildid in range(len(self.build_names_index)):
580 target = self.build_names_index[buildid]
581 targets = "None"
582 if buildid in self.build_targets:
583 targets = self.build_targets[buildid]
584 bb.msg.debug(3, bb.msg.domain.TaskData, " (%s)%s: %s" % (buildid, target, targets))
585
586 bb.msg.debug(3, bb.msg.domain.TaskData, "run_targets:")
587 for runid in range(len(self.run_names_index)):
588 target = self.run_names_index[runid]
589 targets = "None"
590 if runid in self.run_targets:
591 targets = self.run_targets[runid]
592 bb.msg.debug(3, bb.msg.domain.TaskData, " (%s)%s: %s" % (runid, target, targets))
593
594 bb.msg.debug(3, bb.msg.domain.TaskData, "tasks:")
595 for task in range(len(self.tasks_name)):
596 bb.msg.debug(3, bb.msg.domain.TaskData, " (%s)%s - %s: %s" % (
597 task,
598 self.fn_index[self.tasks_fnid[task]],
599 self.tasks_name[task],
600 self.tasks_tdepends[task]))
601
602 bb.msg.debug(3, bb.msg.domain.TaskData, "dependency ids (per fn):")
603 for fnid in self.depids:
604 bb.msg.debug(3, bb.msg.domain.TaskData, " %s %s: %s" % (fnid, self.fn_index[fnid], self.depids[fnid]))
605
606 bb.msg.debug(3, bb.msg.domain.TaskData, "runtime dependency ids (per fn):")
607 for fnid in self.rdepids:
608 bb.msg.debug(3, bb.msg.domain.TaskData, " %s %s: %s" % (fnid, self.fn_index[fnid], self.rdepids[fnid]))
609
610
diff --git a/bitbake-dev/lib/bb/utils.py b/bitbake-dev/lib/bb/utils.py
deleted file mode 100644
index 5fc1463e67..0000000000
--- a/bitbake-dev/lib/bb/utils.py
+++ /dev/null
@@ -1,431 +0,0 @@
1# ex:ts=4:sw=4:sts=4:et
2# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
3"""
4BitBake Utility Functions
5"""
6
7# Copyright (C) 2004 Michael Lauer
8#
9# This program is free software; you can redistribute it and/or modify
10# it under the terms of the GNU General Public License version 2 as
11# published by the Free Software Foundation.
12#
13# This program is distributed in the hope that it will be useful,
14# but WITHOUT ANY WARRANTY; without even the implied warranty of
15# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16# GNU General Public License for more details.
17#
18# You should have received a copy of the GNU General Public License along
19# with this program; if not, write to the Free Software Foundation, Inc.,
20# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
21
22digits = "0123456789"
23ascii_letters = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"
24separators = ".-"
25
26import re, fcntl, os, types
27
28def explode_version(s):
29 r = []
30 alpha_regexp = re.compile('^([a-zA-Z]+)(.*)$')
31 numeric_regexp = re.compile('^(\d+)(.*)$')
32 while (s != ''):
33 if s[0] in digits:
34 m = numeric_regexp.match(s)
35 r.append(int(m.group(1)))
36 s = m.group(2)
37 continue
38 if s[0] in ascii_letters:
39 m = alpha_regexp.match(s)
40 r.append(m.group(1))
41 s = m.group(2)
42 continue
43 r.append(s[0])
44 s = s[1:]
45 return r
46
47def vercmp_part(a, b):
48 va = explode_version(a)
49 vb = explode_version(b)
50 sa = False
51 sb = False
52 while True:
53 if va == []:
54 ca = None
55 else:
56 ca = va.pop(0)
57 if vb == []:
58 cb = None
59 else:
60 cb = vb.pop(0)
61 if ca == None and cb == None:
62 return 0
63
64 if type(ca) is types.StringType:
65 sa = ca in separators
66 if type(cb) is types.StringType:
67 sb = cb in separators
68 if sa and not sb:
69 return -1
70 if not sa and sb:
71 return 1
72
73 if ca > cb:
74 return 1
75 if ca < cb:
76 return -1
77
78def vercmp(ta, tb):
79 (ea, va, ra) = ta
80 (eb, vb, rb) = tb
81
82 r = int(ea)-int(eb)
83 if (r == 0):
84 r = vercmp_part(va, vb)
85 if (r == 0):
86 r = vercmp_part(ra, rb)
87 return r
88
89def explode_deps(s):
90 """
91 Take an RDEPENDS style string of format:
92 "DEPEND1 (optional version) DEPEND2 (optional version) ..."
93 and return a list of dependencies.
94 Version information is ignored.
95 """
96 r = []
97 l = s.split()
98 flag = False
99 for i in l:
100 if i[0] == '(':
101 flag = True
102 #j = []
103 if not flag:
104 r.append(i)
105 #else:
106 # j.append(i)
107 if flag and i.endswith(')'):
108 flag = False
109 # Ignore version
110 #r[-1] += ' ' + ' '.join(j)
111 return r
112
113def explode_dep_versions(s):
114 """
115 Take an RDEPENDS style string of format:
116 "DEPEND1 (optional version) DEPEND2 (optional version) ..."
117 and return a dictonary of dependencies and versions.
118 """
119 r = {}
120 l = s.split()
121 lastdep = None
122 lastver = ""
123 inversion = False
124 for i in l:
125 if i[0] == '(':
126 inversion = True
127 lastver = i[1:] or ""
128 #j = []
129 elif inversion and i.endswith(')'):
130 inversion = False
131 lastver = lastver + " " + (i[:-1] or "")
132 r[lastdep] = lastver
133 elif not inversion:
134 r[i] = None
135 lastdep = i
136 lastver = ""
137 elif inversion:
138 lastver = lastver + " " + i
139
140 return r
141
142def _print_trace(body, line):
143 """
144 Print the Environment of a Text Body
145 """
146 import bb
147
148 # print the environment of the method
149 bb.msg.error(bb.msg.domain.Util, "Printing the environment of the function")
150 min_line = max(1,line-4)
151 max_line = min(line+4,len(body)-1)
152 for i in range(min_line,max_line+1):
153 bb.msg.error(bb.msg.domain.Util, "\t%.4d:%s" % (i, body[i-1]) )
154
155
156def better_compile(text, file, realfile):
157 """
158 A better compile method. This method
159 will print the offending lines.
160 """
161 try:
162 return compile(text, file, "exec")
163 except Exception, e:
164 import bb,sys
165
166 # split the text into lines again
167 body = text.split('\n')
168 bb.msg.error(bb.msg.domain.Util, "Error in compiling python function in: ", realfile)
169 bb.msg.error(bb.msg.domain.Util, "The lines resulting into this error were:")
170 bb.msg.error(bb.msg.domain.Util, "\t%d:%s:'%s'" % (e.lineno, e.__class__.__name__, body[e.lineno-1]))
171
172 _print_trace(body, e.lineno)
173
174 # exit now
175 sys.exit(1)
176
177def better_exec(code, context, text, realfile):
178 """
179 Similiar to better_compile, better_exec will
180 print the lines that are responsible for the
181 error.
182 """
183 import bb,sys
184 try:
185 exec code in context
186 except:
187 (t,value,tb) = sys.exc_info()
188
189 if t in [bb.parse.SkipPackage, bb.build.FuncFailed]:
190 raise
191
192 # print the Header of the Error Message
193 bb.msg.error(bb.msg.domain.Util, "Error in executing python function in: %s" % realfile)
194 bb.msg.error(bb.msg.domain.Util, "Exception:%s Message:%s" % (t,value) )
195
196 # let us find the line number now
197 while tb.tb_next:
198 tb = tb.tb_next
199
200 import traceback
201 line = traceback.tb_lineno(tb)
202
203 _print_trace( text.split('\n'), line )
204
205 raise
206
207def Enum(*names):
208 """
209 A simple class to give Enum support
210 """
211
212 assert names, "Empty enums are not supported"
213
214 class EnumClass(object):
215 __slots__ = names
216 def __iter__(self): return iter(constants)
217 def __len__(self): return len(constants)
218 def __getitem__(self, i): return constants[i]
219 def __repr__(self): return 'Enum' + str(names)
220 def __str__(self): return 'enum ' + str(constants)
221
222 class EnumValue(object):
223 __slots__ = ('__value')
224 def __init__(self, value): self.__value = value
225 Value = property(lambda self: self.__value)
226 EnumType = property(lambda self: EnumType)
227 def __hash__(self): return hash(self.__value)
228 def __cmp__(self, other):
229 # C fans might want to remove the following assertion
230 # to make all enums comparable by ordinal value {;))
231 assert self.EnumType is other.EnumType, "Only values from the same enum are comparable"
232 return cmp(self.__value, other.__value)
233 def __invert__(self): return constants[maximum - self.__value]
234 def __nonzero__(self): return bool(self.__value)
235 def __repr__(self): return str(names[self.__value])
236
237 maximum = len(names) - 1
238 constants = [None] * len(names)
239 for i, each in enumerate(names):
240 val = EnumValue(i)
241 setattr(EnumClass, each, val)
242 constants[i] = val
243 constants = tuple(constants)
244 EnumType = EnumClass()
245 return EnumType
246
247def lockfile(name):
248 """
249 Use the file fn as a lock file, return when the lock has been acquired.
250 Returns a variable to pass to unlockfile().
251 """
252 path = os.path.dirname(name)
253 if not os.path.isdir(path):
254 import bb, sys
255 bb.msg.error(bb.msg.domain.Util, "Error, lockfile path does not exist!: %s" % path)
256 sys.exit(1)
257
258 while True:
259 # If we leave the lockfiles lying around there is no problem
260 # but we should clean up after ourselves. This gives potential
261 # for races though. To work around this, when we acquire the lock
262 # we check the file we locked was still the lock file on disk.
263 # by comparing inode numbers. If they don't match or the lockfile
264 # no longer exists, we start again.
265
266 # This implementation is unfair since the last person to request the
267 # lock is the most likely to win it.
268
269 try:
270 lf = open(name, "a+")
271 fcntl.flock(lf.fileno(), fcntl.LOCK_EX)
272 statinfo = os.fstat(lf.fileno())
273 if os.path.exists(lf.name):
274 statinfo2 = os.stat(lf.name)
275 if statinfo.st_ino == statinfo2.st_ino:
276 return lf
277 # File no longer exists or changed, retry
278 lf.close
279 except Exception, e:
280 continue
281
282def unlockfile(lf):
283 """
284 Unlock a file locked using lockfile()
285 """
286 os.unlink(lf.name)
287 fcntl.flock(lf.fileno(), fcntl.LOCK_UN)
288 lf.close
289
290def md5_file(filename):
291 """
292 Return the hex string representation of the MD5 checksum of filename.
293 """
294 try:
295 import hashlib
296 m = hashlib.md5()
297 except ImportError:
298 import md5
299 m = md5.new()
300
301 for line in open(filename):
302 m.update(line)
303 return m.hexdigest()
304
305def sha256_file(filename):
306 """
307 Return the hex string representation of the 256-bit SHA checksum of
308 filename. On Python 2.4 this will return None, so callers will need to
309 handle that by either skipping SHA checks, or running a standalone sha256sum
310 binary.
311 """
312 try:
313 import hashlib
314 except ImportError:
315 return None
316
317 s = hashlib.sha256()
318 for line in open(filename):
319 s.update(line)
320 return s.hexdigest()
321
322def preserved_envvars_list():
323 return [
324 'BBPATH',
325 'BB_PRESERVE_ENV',
326 'BB_ENV_WHITELIST',
327 'BB_ENV_EXTRAWHITE',
328 'COLORTERM',
329 'DBUS_SESSION_BUS_ADDRESS',
330 'DESKTOP_SESSION',
331 'DESKTOP_STARTUP_ID',
332 'DISPLAY',
333 'GNOME_KEYRING_PID',
334 'GNOME_KEYRING_SOCKET',
335 'GPG_AGENT_INFO',
336 'GTK_RC_FILES',
337 'HOME',
338 'LANG',
339 'LOGNAME',
340 'PATH',
341 'PWD',
342 'SESSION_MANAGER',
343 'SHELL',
344 'SSH_AUTH_SOCK',
345 'TERM',
346 'USER',
347 'USERNAME',
348 '_',
349 'XAUTHORITY',
350 'XDG_DATA_DIRS',
351 'XDG_SESSION_COOKIE',
352 ]
353
354def filter_environment(good_vars):
355 """
356 Create a pristine environment for bitbake. This will remove variables that
357 are not known and may influence the build in a negative way.
358 """
359
360 import bb
361
362 removed_vars = []
363 for key in os.environ.keys():
364 if key in good_vars:
365 continue
366
367 removed_vars.append(key)
368 os.unsetenv(key)
369 del os.environ[key]
370
371 if len(removed_vars):
372 bb.debug(1, "Removed the following variables from the environment:", ",".join(removed_vars))
373
374 return removed_vars
375
376def clean_environment():
377 """
378 Clean up any spurious environment variables. This will remove any
379 variables the user hasn't chose to preserve.
380 """
381 if 'BB_PRESERVE_ENV' not in os.environ:
382 if 'BB_ENV_WHITELIST' in os.environ:
383 good_vars = os.environ['BB_ENV_WHITELIST'].split()
384 else:
385 good_vars = preserved_envvars_list()
386 if 'BB_ENV_EXTRAWHITE' in os.environ:
387 good_vars.extend(os.environ['BB_ENV_EXTRAWHITE'].split())
388 filter_environment(good_vars)
389
390def empty_environment():
391 """
392 Remove all variables from the environment.
393 """
394 for s in os.environ.keys():
395 os.unsetenv(s)
396 del os.environ[s]
397
398def build_environment(d):
399 """
400 Build an environment from all exported variables.
401 """
402 import bb
403 for var in bb.data.keys(d):
404 export = bb.data.getVarFlag(var, "export", d)
405 if export:
406 os.environ[var] = bb.data.getVar(var, d, True)
407
408def prunedir(topdir):
409 # Delete everything reachable from the directory named in 'topdir'.
410 # CAUTION: This is dangerous!
411 for root, dirs, files in os.walk(topdir, topdown=False):
412 for name in files:
413 os.remove(os.path.join(root, name))
414 for name in dirs:
415 if os.path.islink(os.path.join(root, name)):
416 os.remove(os.path.join(root, name))
417 else:
418 os.rmdir(os.path.join(root, name))
419 os.rmdir(topdir)
420
421#
422# Could also use return re.compile("(%s)" % "|".join(map(re.escape, suffixes))).sub(lambda mo: "", var)
423# but thats possibly insane and suffixes is probably going to be small
424#
425def prune_suffix(var, suffixes, d):
426 # See if var ends with any of the suffixes listed and
427 # remove it if found
428 for suffix in suffixes:
429 if var.endswith(suffix):
430 return var.replace(suffix, "")
431 return var
diff --git a/bitbake/AUTHORS b/bitbake/AUTHORS
index 9d592608bb..a4014b1e39 100644
--- a/bitbake/AUTHORS
+++ b/bitbake/AUTHORS
@@ -2,7 +2,7 @@ Tim Ansell <mithro@mithis.net>
2Phil Blundell <pb@handhelds.org> 2Phil Blundell <pb@handhelds.org>
3Seb Frankengul <seb@frankengul.org> 3Seb Frankengul <seb@frankengul.org>
4Holger Freyther <zecke@handhelds.org> 4Holger Freyther <zecke@handhelds.org>
5Marcin Juszkiewicz <marcin@haerwu.biz> 5Marcin Juszkiewicz <marcin@juszkiewicz.com.pl>
6Chris Larson <kergoth@handhelds.org> 6Chris Larson <kergoth@handhelds.org>
7Ulrich Luckas <luckas@musoft.de> 7Ulrich Luckas <luckas@musoft.de>
8Mickey Lauer <mickey@Vanille.de> 8Mickey Lauer <mickey@Vanille.de>
diff --git a/bitbake/ChangeLog b/bitbake/ChangeLog
index 9fe3bf3d83..22124cb7ea 100644
--- a/bitbake/ChangeLog
+++ b/bitbake/ChangeLog
@@ -1,8 +1,99 @@
1Changes in BitBake 1.8.x: 1Changes in Bitbake 1.9.x:
2 - Add bb.utils.prune_suffix function 2 - Add PE (Package Epoch) support from Philipp Zabel (pH5)
3 3 - Treat python functions the same as shell functions for logging
4Changes in BitBake 1.8.12: 4 - Use TMPDIR/anonfunc as a __anonfunc temp directory (T)
5 - Fix -f (force) in conjunction with -b 5 - Catch truncated cache file errors
6 - Allow operations other than assignment on flag variables
7 - Add code to handle inter-task dependencies
8 - Fix cache errors when generation dotGraphs
9 - Make sure __inherit_cache is updated before calling include() (from Michael Krelin)
10 - Fix bug when target was in ASSUME_PROVIDED (#2236)
11 - Raise ParseError for filenames with multiple underscores instead of infinitely looping (#2062)
12 - Fix invalid regexp in BBMASK error handling (missing import) (#1124)
13 - Promote certain warnings from debug to note 2 level
14 - Update manual
15 - Correctly redirect stdin when forking
16 - If parsing errors are found, exit, too many users miss the errors
17 - Remove supriours PREFERRED_PROVIDER warnings
18 - svn fetcher: Add _buildsvncommand function
19 - Improve certain error messages
20 - Rewrite svn fetcher to make adding extra operations easier
21 as part of future SRCDATE="now" fixes
22 (requires new FETCHCMD_svn definition in bitbake.conf)
23 - Change SVNDIR layout to be more unique (fixes #2644 and #2624)
24 - Add ConfigParsed Event after configuration parsing is complete
25 - Add SRCREV support for svn fetcher
26 - data.emit_var() - only call getVar if we need the variable
27 - Stop generating the A variable (seems to be legacy code)
28 - Make sure intertask depends get processed correcting in recursive depends
29 - Add pn-PN to overrides when evaluating PREFERRED_VERSION
30 - Improve the progress indicator by skipping tasks that have
31 already run before starting the build rather than during it
32 - Add profiling option (-P)
33 - Add BB_SRCREV_POLICY variable (clear or cache) to control SRCREV cache
34 - Add SRCREV_FORMAT support
35 - Fix local fetcher's localpath return values
36 - Apply OVERRIDES before performing immediate expansions
37 - Allow the -b -e option combination to take regular expressions
38 - Fix handling of variables with expansion in the name using _append/_prepend
39 e.g. RRECOMMENDS_${PN}_append_xyz = "abc"
40 - Add plain message function to bb.msg
41 - Sort the list of providers before processing so dependency problems are
42 reproducible rather than effectively random
43 - Fix/improve bitbake -s output
44 - Add locking for fetchers so only one tries to fetch a given file at a given time
45 - Fix int(0)/None confusion in runqueue.py which causes random gaps in dependency chains
46 - Expand data in addtasks
47 - Print the list of missing DEPENDS,RDEPENDS for the "No buildable providers available for required...."
48 error message.
49 - Rework add_task to be more efficient (6% speedup, 7% number of function calls reduction)
50 - Sort digraph output to make builds more reproducible
51 - Split expandKeys into two for loops to benefit from the expand_cache (12% speedup)
52 - runqueue.py: Fix idepends handling to avoid dependency errors
53 - Clear the terminal TOSTOP flag if set (and warn the user)
54 - Fix regression from r653 and make SRCDATE/CVSDATE work for packages again
55 - Fix a bug in bb.decodeurl where http://some.where.com/somefile.tgz decoded to host="" (#1530)
56 - Warn about malformed PREFERRED_PROVIDERS (#1072)
57 - Add support for BB_NICE_LEVEL option (#1627)
58 - Psyco is used only on x86 as there is no support for other architectures.
59 - Sort initial providers list by default preference (#1145, #2024)
60 - Improve provider sorting so prefered versions have preference over latest versions (#768)
61 - Detect builds of tasks with overlapping providers and warn (will become a fatal error) (#1359)
62 - Add MULTI_PROVIDER_WHITELIST variable to allow known safe multiple providers to be listed
63 - Handle paths in svn fetcher module parameter
64 - Support the syntax "export VARIABLE"
65 - Add bzr fetcher
66 - Add support for cleaning directories before a task in the form:
67 do_taskname[cleandirs] = "dir"
68 - bzr fetcher tweaks from Robert Schuster (#2913)
69 - Add mercurial (hg) fetcher from Robert Schuster (#2913)
70 - Don't add duplicates to BBPATH
71 - Fix preferred_version return values (providers.py)
72 - Fix 'depends' flag splitting
73 - Fix unexport handling (#3135)
74 - Add bb.copyfile function similar to bb.movefile (and improve movefile error reporting)
75 - Allow multiple options for deptask flag
76 - Use git-fetch instead of git-pull removing any need for merges when
77 fetching (we don't care about the index). Fixes fetch errors.
78 - Add BB_GENERATE_MIRROR_TARBALLS option, set to 0 to make git fetches
79 faster at the expense of not creating mirror tarballs.
80 - SRCREV handling updates, improvements and fixes from Poky
81 - Add bb.utils.lockfile() and bb.utils.unlockfile() from Poky
82 - Add support for task selfstamp and lockfiles flags
83 - Disable task number acceleration since it can allow the tasks to run
84 out of sequence
85 - Improve runqueue code comments
86 - Add task scheduler abstraction and some example schedulers
87 - Improve circular dependency chain debugging code and user feedback
88 - Don't give a stacktrace for invalid tasks, have a user friendly message (#3431)
89 - Add support for "-e target" (#3432)
90 - Fix shell showdata command (#3259)
91 - Fix shell data updating problems (#1880)
92 - Properly raise errors for invalid source URI protocols
93 - Change the wget fetcher failure handling to avoid lockfile problems
94 - Add support for branches in git fetcher (Otavio Salvador, Michael Lauer)
95 - Make taskdata and runqueue errors more user friendly
96 - Add norecurse and fullpath options to cvs fetcher
6 - Fix exit code for build failures in --continue mode 97 - Fix exit code for build failures in --continue mode
7 - Fix git branch tags fetching 98 - Fix git branch tags fetching
8 - Change parseConfigurationFile so it works on real data, not a copy 99 - Change parseConfigurationFile so it works on real data, not a copy
@@ -27,8 +118,10 @@ Changes in BitBake 1.8.12:
27 how extensively stamps are looked at for validity 118 how extensively stamps are looked at for validity
28 - When handling build target failures make sure idepends are checked and 119 - When handling build target failures make sure idepends are checked and
29 failed where needed. Fixes --continue mode crashes. 120 failed where needed. Fixes --continue mode crashes.
121 - Fix -f (force) in conjunction with -b
30 - Fix problems with recrdeptask handling where some idepends weren't handled 122 - Fix problems with recrdeptask handling where some idepends weren't handled
31 correctly. 123 correctly.
124 - Handle exit codes correctly (from pH5)
32 - Work around refs/HEAD issues with git over http (#3410) 125 - Work around refs/HEAD issues with git over http (#3410)
33 - Add proxy support to the CVS fetcher (from Cyril Chemparathy) 126 - Add proxy support to the CVS fetcher (from Cyril Chemparathy)
34 - Improve runfetchcmd so errors are seen and various GIT variables are exported 127 - Improve runfetchcmd so errors are seen and various GIT variables are exported
@@ -44,7 +137,6 @@ Changes in BitBake 1.8.12:
44 - Add PERSISTENT_DIR to store the PersistData in a persistent 137 - Add PERSISTENT_DIR to store the PersistData in a persistent
45 directory != the cache dir. 138 directory != the cache dir.
46 - Add md5 and sha256 checksum generation functions to utils.py 139 - Add md5 and sha256 checksum generation functions to utils.py
47 - Make sure Build Completed events are generated even when tasks fail
48 - Correctly handle '-' characters in class names (#2958) 140 - Correctly handle '-' characters in class names (#2958)
49 - Make sure expandKeys has been called on the data dictonary before running tasks 141 - Make sure expandKeys has been called on the data dictonary before running tasks
50 - Correctly add a task override in the form task-TASKNAME. 142 - Correctly add a task override in the form task-TASKNAME.
@@ -63,6 +155,7 @@ Changes in BitBake 1.8.12:
63 used instead of the internal bitbake one. Alternatively, BB_ENV_EXTRAWHITE can be used 155 used instead of the internal bitbake one. Alternatively, BB_ENV_EXTRAWHITE can be used
64 to extend the internal whitelist. 156 to extend the internal whitelist.
65 - Perforce fetcher fix to use commandline options instead of being overriden by the environment 157 - Perforce fetcher fix to use commandline options instead of being overriden by the environment
158 - bb.utils.prunedir can cope with symlinks to directoriees without exceptions
66 - use @rev when doing a svn checkout 159 - use @rev when doing a svn checkout
67 - Add osc fetcher (from Joshua Lock in Poky) 160 - Add osc fetcher (from Joshua Lock in Poky)
68 - When SRCREV autorevisioning for a recipe is in use, don't cache the recipe 161 - When SRCREV autorevisioning for a recipe is in use, don't cache the recipe
@@ -76,109 +169,15 @@ Changes in BitBake 1.8.12:
76 proxies to work better. (from Poky) 169 proxies to work better. (from Poky)
77 - Also allow user and pswd options in SRC_URIs globally (from Poky) 170 - Also allow user and pswd options in SRC_URIs globally (from Poky)
78 - Improve proxy handling when using mirrors (from Poky) 171 - Improve proxy handling when using mirrors (from Poky)
79 172 - Add bb.utils.prune_suffix function
80Changes in BitBake 1.8.10: 173 - Fix hg checkouts of specific revisions (from Poky)
81 - Psyco is available only for x86 - do not use it on other architectures. 174 - Fix wget fetching of urls with parameters specified (from Poky)
82 - Fix a bug in bb.decodeurl where http://some.where.com/somefile.tgz decoded to host="" (#1530) 175 - Add username handling to git fetcher (from Poky)
83 - Warn about malformed PREFERRED_PROVIDERS (#1072) 176 - Set HOME environmental variable when running fetcher commands (from Poky)
84 - Add support for BB_NICE_LEVEL option (#1627) 177 - Make sure allowed variables inherited from the environment are exported again (from Poky)
85 - Sort initial providers list by default preference (#1145, #2024) 178 - When running a stage task in bbshell, run populate_staging, not the stage task (from Poky)
86 - Improve provider sorting so prefered versions have preference over latest versions (#768) 179 - Fix + character escaping from PACKAGES_DYNAMIC (thanks Otavio Salvador)
87 - Detect builds of tasks with overlapping providers and warn (will become a fatal error) (#1359) 180 - Addition of BBCLASSEXTEND support for allowing one recipe to provide multiple targets (from Poky)
88 - Add MULTI_PROVIDER_WHITELIST variable to allow known safe multiple providers to be listed
89 - Handle paths in svn fetcher module parameter
90 - Support the syntax "export VARIABLE"
91 - Add bzr fetcher
92 - Add support for cleaning directories before a task in the form:
93 do_taskname[cleandirs] = "dir"
94 - bzr fetcher tweaks from Robert Schuster (#2913)
95 - Add mercurial (hg) fetcher from Robert Schuster (#2913)
96 - Fix bogus preferred_version return values
97 - Fix 'depends' flag splitting
98 - Fix unexport handling (#3135)
99 - Add bb.copyfile function similar to bb.movefile (and improve movefile error reporting)
100 - Allow multiple options for deptask flag
101 - Use git-fetch instead of git-pull removing any need for merges when
102 fetching (we don't care about the index). Fixes fetch errors.
103 - Add BB_GENERATE_MIRROR_TARBALLS option, set to 0 to make git fetches
104 faster at the expense of not creating mirror tarballs.
105 - SRCREV handling updates, improvements and fixes from Poky
106 - Add bb.utils.lockfile() and bb.utils.unlockfile() from Poky
107 - Add support for task selfstamp and lockfiles flags
108 - Disable task number acceleration since it can allow the tasks to run
109 out of sequence
110 - Improve runqueue code comments
111 - Add task scheduler abstraction and some example schedulers
112 - Improve circular dependency chain debugging code and user feedback
113 - Don't give a stacktrace for invalid tasks, have a user friendly message (#3431)
114 - Add support for "-e target" (#3432)
115 - Fix shell showdata command (#3259)
116 - Fix shell data updating problems (#1880)
117 - Properly raise errors for invalid source URI protocols
118 - Change the wget fetcher failure handling to avoid lockfile problems
119 - Add git branch support
120 - Add support for branches in git fetcher (Otavio Salvador, Michael Lauer)
121 - Make taskdata and runqueue errors more user friendly
122 - Add norecurse and fullpath options to cvs fetcher
123 - bb.utils.prunedir can cope with symlinks to directories without exceptions
124
125Changes in Bitbake 1.8.8:
126 - Rewrite svn fetcher to make adding extra operations easier
127 as part of future SRCDATE="now" fixes
128 (requires new FETCHCMD_svn definition in bitbake.conf)
129 - Change SVNDIR layout to be more unique (fixes #2644 and #2624)
130 - Import persistent data store from trunk
131 - Sync fetcher code with that in trunk, adding SRCREV support for svn
132 - Add ConfigParsed Event after configuration parsing is complete
133 - data.emit_var() - only call getVar if we need the variable
134 - Stop generating the A variable (seems to be legacy code)
135 - Make sure intertask depends get processed correcting in recursive depends
136 - Add pn-PN to overrides when evaluating PREFERRED_VERSION
137 - Improve the progress indicator by skipping tasks that have
138 already run before starting the build rather than during it
139 - Add profiling option (-P)
140 - Add BB_SRCREV_POLICY variable (clear or cache) to control SRCREV cache
141 - Add SRCREV_FORMAT support
142 - Fix local fetcher's localpath return values
143 - Apply OVERRIDES before performing immediate expansions
144 - Allow the -b -e option combination to take regular expressions
145 - Add plain message function to bb.msg
146 - Sort the list of providers before processing so dependency problems are
147 reproducible rather than effectively random
148 - Add locking for fetchers so only one tries to fetch a given file at a given time
149 - Fix int(0)/None confusion in runqueue.py which causes random gaps in dependency chains
150 - Fix handling of variables with expansion in the name using _append/_prepend
151 e.g. RRECOMMENDS_${PN}_append_xyz = "abc"
152 - Expand data in addtasks
153 - Print the list of missing DEPENDS,RDEPENDS for the "No buildable providers available for required...."
154 error message.
155 - Rework add_task to be more efficient (6% speedup, 7% number of function calls reduction)
156 - Sort digraph output to make builds more reproducible
157 - Split expandKeys into two for loops to benefit from the expand_cache (12% speedup)
158 - runqueue.py: Fix idepends handling to avoid dependency errors
159 - Clear the terminal TOSTOP flag if set (and warn the user)
160 - Fix regression from r653 and make SRCDATE/CVSDATE work for packages again
161
162Changes in Bitbake 1.8.6:
163 - Correctly redirect stdin when forking
164 - If parsing errors are found, exit, too many users miss the errors
165 - Remove supriours PREFERRED_PROVIDER warnings
166
167Changes in Bitbake 1.8.4:
168 - Make sure __inherit_cache is updated before calling include() (from Michael Krelin)
169 - Fix bug when target was in ASSUME_PROVIDED (#2236)
170 - Raise ParseError for filenames with multiple underscores instead of infinitely looping (#2062)
171 - Fix invalid regexp in BBMASK error handling (missing import) (#1124)
172 - Don't run build sanity checks on incomplete builds
173 - Promote certain warnings from debug to note 2 level
174 - Update manual
175
176Changes in Bitbake 1.8.2:
177 - Catch truncated cache file errors
178 - Add PE (Package Epoch) support from Philipp Zabel (pH5)
179 - Add code to handle inter-task dependencies
180 - Allow operations other than assignment on flag variables
181 - Fix cache errors when generation dotGraphs
182 181
183Changes in Bitbake 1.8.0: 182Changes in Bitbake 1.8.0:
184 - Release 1.7.x as a stable series 183 - Release 1.7.x as a stable series
diff --git a/bitbake/MANIFEST b/bitbake/MANIFEST
deleted file mode 100644
index 39e8017759..0000000000
--- a/bitbake/MANIFEST
+++ /dev/null
@@ -1,53 +0,0 @@
1AUTHORS
2COPYING
3ChangeLog
4MANIFEST
5setup.py
6bin/bitdoc
7bin/bbimage
8bin/bitbake
9lib/bb/__init__.py
10lib/bb/build.py
11lib/bb/cache.py
12lib/bb/cooker.py
13lib/bb/COW.py
14lib/bb/data.py
15lib/bb/data_smart.py
16lib/bb/event.py
17lib/bb/fetch/__init__.py
18lib/bb/fetch/bzr.py
19lib/bb/fetch/cvs.py
20lib/bb/fetch/git.py
21lib/bb/fetch/hg.py
22lib/bb/fetch/local.py
23lib/bb/fetch/osc.py
24lib/bb/fetch/perforce.py
25lib/bb/fetch/ssh.py
26lib/bb/fetch/svk.py
27lib/bb/fetch/svn.py
28lib/bb/fetch/wget.py
29lib/bb/manifest.py
30lib/bb/methodpool.py
31lib/bb/msg.py
32lib/bb/parse/__init__.py
33lib/bb/parse/parse_py/__init__.py
34lib/bb/parse/parse_py/BBHandler.py
35lib/bb/parse/parse_py/ConfHandler.py
36lib/bb/persist_data.py
37lib/bb/providers.py
38lib/bb/runqueue.py
39lib/bb/shell.py
40lib/bb/taskdata.py
41lib/bb/utils.py
42setup.py
43doc/COPYING.GPL
44doc/COPYING.MIT
45doc/bitbake.1
46doc/manual/html.css
47doc/manual/Makefile
48doc/manual/usermanual.xml
49contrib/bbdev.sh
50contrib/vim/syntax/bitbake.vim
51contrib/vim/ftdetect/bitbake.vim
52conf/bitbake.conf
53classes/base.bbclass
diff --git a/bitbake/bin/bitbake b/bitbake/bin/bitbake
index 842ba0441e..23c9d73ee4 100755
--- a/bitbake/bin/bitbake
+++ b/bitbake/bin/bitbake
@@ -22,12 +22,18 @@
22# with this program; if not, write to the Free Software Foundation, Inc., 22# with this program; if not, write to the Free Software Foundation, Inc.,
23# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 23# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
24 24
25import sys, os, getopt, re, time, optparse 25import sys, os, getopt, re, time, optparse, xmlrpclib
26sys.path.insert(0,os.path.join(os.path.dirname(os.path.dirname(sys.argv[0])), 'lib')) 26sys.path.insert(0,os.path.join(os.path.dirname(os.path.dirname(sys.argv[0])), 'lib'))
27import bb 27import bb
28from bb import cooker 28from bb import cooker
29from bb import ui
29 30
30__version__ = "1.8.13" 31
32__version__ = "1.9.0"
33
34if sys.hexversion < 0x020500F0:
35 print "Sorry, python 2.5 or later is required for this version of bitbake"
36 sys.exit(1)
31 37
32#============================================================================# 38#============================================================================#
33# BBOptions 39# BBOptions
@@ -41,11 +47,28 @@ class BBConfiguration( object ):
41 setattr( self, key, val ) 47 setattr( self, key, val )
42 48
43 49
50def print_exception(exc, value, tb):
51 """
52 Print the exception to stderr, only showing the traceback if bitbake
53 debugging is enabled.
54 """
55 if not bb.msg.debug_level['default']:
56 tb = None
57
58 sys.__excepthook__(exc, value, tb)
59
60
44#============================================================================# 61#============================================================================#
45# main 62# main
46#============================================================================# 63#============================================================================#
47 64
48def main(): 65def main():
66 return_value = 0
67 pythonver = sys.version_info
68 if pythonver[0] < 2 or (pythonver[0] == 2 and pythonver[1] < 5):
69 print "Sorry, bitbake needs python 2.5 or later."
70 sys.exit(1)
71
49 parser = optparse.OptionParser( version = "BitBake Build Tool Core version %s, %%prog version %s" % ( bb.__version__, __version__ ), 72 parser = optparse.OptionParser( version = "BitBake Build Tool Core version %s, %%prog version %s" % ( bb.__version__, __version__ ),
50 usage = """%prog [options] [package ...] 73 usage = """%prog [options] [package ...]
51 74
@@ -99,8 +122,8 @@ Default BBFILES are the .bb files in the current directory.""" )
99 parser.add_option( "-g", "--graphviz", help = "emit the dependency trees of the specified packages in the dot syntax", 122 parser.add_option( "-g", "--graphviz", help = "emit the dependency trees of the specified packages in the dot syntax",
100 action = "store_true", dest = "dot_graph", default = False ) 123 action = "store_true", dest = "dot_graph", default = False )
101 124
102 parser.add_option( "-I", "--ignore-deps", help = """Stop processing at the given list of dependencies when generating dependency graphs. This can help to make the graph more appealing""", 125 parser.add_option( "-I", "--ignore-deps", help = """Assume these dependencies don't exist and are already provided (equivalent to ASSUME_PROVIDED). Useful to make dependency graphs more appealing""",
103 action = "append", dest = "ignored_dot_deps", default = [] ) 126 action = "append", dest = "extra_assume_provided", default = [] )
104 127
105 parser.add_option( "-l", "--log-domains", help = """Show debug logging for the specified logging domains""", 128 parser.add_option( "-l", "--log-domains", help = """Show debug logging for the specified logging domains""",
106 action = "append", dest = "debug_domains", default = [] ) 129 action = "append", dest = "debug_domains", default = [] )
@@ -108,6 +131,9 @@ Default BBFILES are the .bb files in the current directory.""" )
108 parser.add_option( "-P", "--profile", help = "profile the command and print a report", 131 parser.add_option( "-P", "--profile", help = "profile the command and print a report",
109 action = "store_true", dest = "profile", default = False ) 132 action = "store_true", dest = "profile", default = False )
110 133
134 parser.add_option( "-u", "--ui", help = "userinterface to use",
135 action = "store", dest = "ui")
136
111 parser.add_option( "", "--revisions-changed", help = "Set the exit code depending on whether upstream floating revisions have changed or not", 137 parser.add_option( "", "--revisions-changed", help = "Set the exit code depending on whether upstream floating revisions have changed or not",
112 action = "store_true", dest = "revisions_changed", default = False ) 138 action = "store_true", dest = "revisions_changed", default = False )
113 139
@@ -117,30 +143,53 @@ Default BBFILES are the .bb files in the current directory.""" )
117 configuration.pkgs_to_build = [] 143 configuration.pkgs_to_build = []
118 configuration.pkgs_to_build.extend(args[1:]) 144 configuration.pkgs_to_build.extend(args[1:])
119 145
120 cooker = bb.cooker.BBCooker(configuration) 146 #server = bb.server.xmlrpc
147 server = bb.server.none
148
149 # Save a logfile for cooker into the current working directory. When the
150 # server is daemonized this logfile will be truncated.
151 cooker_logfile = os.path.join (os.getcwd(), "cooker.log")
152
153 cooker = bb.cooker.BBCooker(configuration, server)
121 154
122 # Clear away any spurious environment variables. But don't wipe the 155 # Clear away any spurious environment variables. But don't wipe the
123 # environment totally. 156 # environment totally. This is necessary to ensure the correct operation
157 # of the UIs (e.g. for DISPLAY, etc.)
124 bb.utils.clean_environment() 158 bb.utils.clean_environment()
125 159
126 cooker.parseConfiguration() 160 cooker.parseCommandLine()
127 161
128 if configuration.profile: 162 serverinfo = server.BitbakeServerInfo(cooker.server)
129 try: 163
130 import cProfile as profile 164 server.BitBakeServerFork(serverinfo, cooker.serve, cooker_logfile)
131 except: 165 del cooker
132 import profile 166
133 167 sys.excepthook = print_exception
134 profile.runctx("cooker.cook()", globals(), locals(), "profile.log") 168
135 import pstats 169 # Setup a connection to the server (cooker)
136 p = pstats.Stats('profile.log') 170 serverConnection = server.BitBakeServerConnection(serverinfo)
137 p.sort_stats('time') 171
138 p.print_stats() 172 # Launch the UI
139 p.print_callers() 173 if configuration.ui:
140 p.sort_stats('cumulative') 174 ui = configuration.ui
141 p.print_stats()
142 else: 175 else:
143 cooker.cook() 176 ui = "knotty"
177
178 try:
179 # Dynamically load the UI based on the ui name. Although we
180 # suggest a fixed set this allows you to have flexibility in which
181 # ones are available.
182 exec "from bb.ui import " + ui
183 exec "return_value = " + ui + ".init(serverConnection.connection, serverConnection.events)"
184 except ImportError:
185 print "FATAL: Invalid user interface '%s' specified. " % ui
186 print "Valid interfaces are 'ncurses', 'depexp' or the default, 'knotty'."
187 except Exception, e:
188 print "FATAL: Unable to start to '%s' UI: %s." % (configuration.ui, e.message)
189 finally:
190 serverConnection.terminate()
191 return return_value
144 192
145if __name__ == "__main__": 193if __name__ == "__main__":
146 main() 194 ret = main()
195 sys.exit(ret)
diff --git a/bitbake/bin/bitdoc b/bitbake/bin/bitdoc
index 3bcc9b344b..4940f660a6 100755
--- a/bitbake/bin/bitdoc
+++ b/bitbake/bin/bitdoc
@@ -453,6 +453,8 @@ def main():
453 except bb.parse.ParseError: 453 except bb.parse.ParseError:
454 bb.fatal( "Unable to parse %s" % config_file ) 454 bb.fatal( "Unable to parse %s" % config_file )
455 455
456 if isinstance(documentation, dict):
457 documentation = documentation[""]
456 458
457 # Assuming we've the file loaded now, we will initialize the 'tree' 459 # Assuming we've the file loaded now, we will initialize the 'tree'
458 doc = Documentation() 460 doc = Documentation()
diff --git a/bitbake/contrib/vim/syntax/bitbake.vim b/bitbake/contrib/vim/syntax/bitbake.vim
index 43a1990b0b..be55980b3a 100644
--- a/bitbake/contrib/vim/syntax/bitbake.vim
+++ b/bitbake/contrib/vim/syntax/bitbake.vim
@@ -16,12 +16,17 @@ endif
16 16
17syn case match 17syn case match
18 18
19
20" Catch incorrect syntax (only matches if nothing else does) 19" Catch incorrect syntax (only matches if nothing else does)
21" 20"
22syn match bbUnmatched "." 21syn match bbUnmatched "."
23 22
24 23
24syn include @python syntax/python.vim
25if exists("b:current_syntax")
26 unlet b:current_syntax
27endif
28
29
25" Other 30" Other
26 31
27syn match bbComment "^#.*$" display contains=bbTodo 32syn match bbComment "^#.*$" display contains=bbTodo
@@ -34,21 +39,25 @@ syn match bbArrayBrackets "[\[\]]" contained
34" BitBake strings 39" BitBake strings
35 40
36syn match bbContinue "\\$" 41syn match bbContinue "\\$"
37syn region bbString matchgroup=bbQuote start=/"/ skip=/\\$/ excludenl end=/"/ contained keepend contains=bbTodo,bbContinue,bbVarDeref 42syn region bbString matchgroup=bbQuote start=/"/ skip=/\\$/ excludenl end=/"/ contained keepend contains=bbTodo,bbContinue,bbVarInlinePy,bbVarDeref
38syn region bbString matchgroup=bbQuote start=/'/ skip=/\\$/ excludenl end=/'/ contained keepend contains=bbTodo,bbContinue,bbVarDeref 43syn region bbString matchgroup=bbQuote start=/'/ skip=/\\$/ excludenl end=/'/ contained keepend contains=bbTodo,bbContinue,bbVarInlinePy,bbVarDeref
39
40 44
41" BitBake variable metadata 45" BitBake variable metadata
42 46
47syn match bbVarBraces "[\${}]"
48syn region bbVarDeref matchgroup=bbVarBraces start="${" end="}" contained
49" syn region bbVarDeref start="${" end="}" contained
50" syn region bbVarInlinePy start="${@" end="}" contained contains=@python
51syn region bbVarInlinePy matchgroup=bbVarBraces start="${@" end="}" contained contains=@python
52
43syn keyword bbExportFlag export contained nextgroup=bbIdentifier skipwhite 53syn keyword bbExportFlag export contained nextgroup=bbIdentifier skipwhite
44syn match bbVarDeref "${[a-zA-Z0-9\-_\.]\+}" contained 54" syn match bbVarDeref "${[a-zA-Z0-9\-_\.]\+}" contained
45syn match bbVarDef "^\(export\s*\)\?\([a-zA-Z0-9\-_\.]\+\(_[${}a-zA-Z0-9\-_\.]\+\)\?\)\s*\(:=\|+=\|=+\|\.=\|=\.\|?=\|=\)\@=" contains=bbExportFlag,bbIdentifier,bbVarDeref nextgroup=bbVarEq 55syn match bbVarDef "^\(export\s*\)\?\([a-zA-Z0-9\-_\.]\+\(_[${}a-zA/-Z0-9\-_\.]\+\)\?\)\s*\(:=\|+=\|=+\|\.=\|=\.\|?=\|=\)\@=" contains=bbExportFlag,bbIdentifier,bbVarDeref nextgroup=bbVarEq
46 56
47syn match bbIdentifier "[a-zA-Z0-9\-_\.]\+" display contained 57syn match bbIdentifier "[a-zA-Z0-9\-_\./]\+" display contained
48"syn keyword bbVarEq = display contained nextgroup=bbVarValue 58"syn keyword bbVarEq = display contained nextgroup=bbVarValue
49syn match bbVarEq "\(:=\|+=\|=+\|\.=\|=\.\|?=\|=\)" contained nextgroup=bbVarValue 59syn match bbVarEq "\(:=\|+=\|=+\|\.=\|=\.\|?=\|=\)" contained nextgroup=bbVarValue
50syn match bbVarValue ".*$" contained contains=bbString,bbVarDeref 60syn match bbVarValue ".*$" contained contains=bbString
51
52 61
53" BitBake variable metadata flags 62" BitBake variable metadata flags
54syn match bbVarFlagDef "^\([a-zA-Z0-9\-_\.]\+\)\(\[[a-zA-Z0-9\-_\.]\+\]\)\@=" contains=bbIdentifier nextgroup=bbVarFlagFlag 63syn match bbVarFlagDef "^\([a-zA-Z0-9\-_\.]\+\)\(\[[a-zA-Z0-9\-_\.]\+\]\)\@=" contains=bbIdentifier nextgroup=bbVarFlagFlag
@@ -61,10 +70,6 @@ syn match bbFunction "\h\w*" display contained
61 70
62 71
63" BitBake python metadata 72" BitBake python metadata
64syn include @python syntax/python.vim
65if exists("b:current_syntax")
66 unlet b:current_syntax
67endif
68 73
69syn keyword bbPythonFlag python contained nextgroup=bbFunction 74syn keyword bbPythonFlag python contained nextgroup=bbFunction
70syn match bbPythonFuncDef "^\(python\s\+\)\(\w\+\)\?\(\s*()\s*\)\({\)\@=" contains=bbPythonFlag,bbFunction,bbDelimiter nextgroup=bbPythonFuncRegion skipwhite 75syn match bbPythonFuncDef "^\(python\s\+\)\(\w\+\)\?\(\s*()\s*\)\({\)\@=" contains=bbPythonFlag,bbFunction,bbDelimiter nextgroup=bbPythonFuncRegion skipwhite
@@ -98,7 +103,6 @@ syn match bbStatementRest ".*$" contained contains=bbString,bbVarDeref
98" 103"
99hi def link bbArrayBrackets Statement 104hi def link bbArrayBrackets Statement
100hi def link bbUnmatched Error 105hi def link bbUnmatched Error
101hi def link bbVarDeref String
102hi def link bbContinue Special 106hi def link bbContinue Special
103hi def link bbDef Statement 107hi def link bbDef Statement
104hi def link bbPythonFlag Type 108hi def link bbPythonFlag Type
@@ -116,5 +120,8 @@ hi def link bbIdentifier Identifier
116hi def link bbVarEq Operator 120hi def link bbVarEq Operator
117hi def link bbQuote String 121hi def link bbQuote String
118hi def link bbVarValue String 122hi def link bbVarValue String
123" hi def link bbVarInlinePy PreProc
124hi def link bbVarDeref PreProc
125hi def link bbVarBraces PreProc
119 126
120let b:current_syntax = "bb" 127let b:current_syntax = "bb"
diff --git a/bitbake/doc/bitbake.1 b/bitbake/doc/bitbake.1
index e687f0a42a..036402e8ac 100644
--- a/bitbake/doc/bitbake.1
+++ b/bitbake/doc/bitbake.1
@@ -32,7 +32,7 @@ command.
32\fBbitbake\fP is a program that executes the specified task (default is 'build') 32\fBbitbake\fP is a program that executes the specified task (default is 'build')
33for a given set of BitBake files. 33for a given set of BitBake files.
34.br 34.br
35It expects that BBFILES is defined, which is a space seperated list of files to 35It expects that BBFILES is defined, which is a space separated list of files to
36be executed. BBFILES does support wildcards. 36be executed. BBFILES does support wildcards.
37.br 37.br
38Default BBFILES are the .bb files in the current directory. 38Default BBFILES are the .bb files in the current directory.
@@ -67,7 +67,7 @@ drop into the interactive mode also called the BitBake shell.
67Specify task to execute. Note that this only executes the specified task for 67Specify task to execute. Note that this only executes the specified task for
68the providee and the packages it depends on, i.e. 'compile' does not implicitly 68the providee and the packages it depends on, i.e. 'compile' does not implicitly
69call stage for the dependencies (IOW: use only if you know what you are doing). 69call stage for the dependencies (IOW: use only if you know what you are doing).
70Depending on the base.bbclass a listtaks tasks is defined and will show 70Depending on the base.bbclass a listtasks task is defined and will show
71available tasks. 71available tasks.
72.TP 72.TP
73.B \-rFILE, \-\-read=FILE 73.B \-rFILE, \-\-read=FILE
diff --git a/bitbake/doc/manual/usermanual.xml b/bitbake/doc/manual/usermanual.xml
index a01801e03f..cdd05998a5 100644
--- a/bitbake/doc/manual/usermanual.xml
+++ b/bitbake/doc/manual/usermanual.xml
@@ -119,7 +119,7 @@ will be introduced.</para>
119 </section> 119 </section>
120 <section> 120 <section>
121 <title>Conditional metadata set</title> 121 <title>Conditional metadata set</title>
122 <para>OVERRIDES is a <quote>:</quote> seperated variable containing each item you want to satisfy conditions. So, if you have a variable which is conditional on <quote>arm</quote>, and <quote>arm</quote> is in OVERRIDES, then the <quote>arm</quote> specific version of the variable is used rather than the non-conditional version. Example:</para> 122 <para>OVERRIDES is a <quote>:</quote> separated variable containing each item you want to satisfy conditions. So, if you have a variable which is conditional on <quote>arm</quote>, and <quote>arm</quote> is in OVERRIDES, then the <quote>arm</quote> specific version of the variable is used rather than the non-conditional version. Example:</para>
123 <para><screen><varname>OVERRIDES</varname> = "architecture:os:machine" 123 <para><screen><varname>OVERRIDES</varname> = "architecture:os:machine"
124<varname>TEST</varname> = "defaultvalue" 124<varname>TEST</varname> = "defaultvalue"
125<varname>TEST_os</varname> = "osspecificvalue" 125<varname>TEST_os</varname> = "osspecificvalue"
@@ -184,7 +184,7 @@ include</literal> directive.</para>
184 <section> 184 <section>
185 <title>Inheritance</title> 185 <title>Inheritance</title>
186 <para><emphasis>NOTE:</emphasis> This is only supported in .bb and .bbclass files.</para> 186 <para><emphasis>NOTE:</emphasis> This is only supported in .bb and .bbclass files.</para>
187 <para>The <literal>inherit</literal> directive is a means of specifying what classes of functionality your .bb requires. It is a rudamentary form of inheritence. For example, you can easily abstract out the tasks involved in building a package that uses autoconf and automake, and put that into a bbclass for your packages to make use of. A given bbclass is located by searching for classes/filename.oeclass in <envar>BBPATH</envar>, where filename is what you inherited.</para> 187 <para>The <literal>inherit</literal> directive is a means of specifying what classes of functionality your .bb requires. It is a rudimentary form of inheritance. For example, you can easily abstract out the tasks involved in building a package that uses autoconf and automake, and put that into a bbclass for your packages to make use of. A given bbclass is located by searching for classes/filename.oeclass in <envar>BBPATH</envar>, where filename is what you inherited.</para>
188 </section> 188 </section>
189 <section> 189 <section>
190 <title>Tasks</title> 190 <title>Tasks</title>
@@ -263,11 +263,11 @@ of the event and the content of the <varname>FILE</varname> variable.</para>
263 </section> 263 </section>
264 <section> 264 <section>
265 <title>Classes</title> 265 <title>Classes</title>
266 <para>BitBake classes are our rudamentary inheritence mechanism. As briefly mentioned in the metadata introduction, they're parsed when an <literal>inherit</literal> directive is encountered, and they are located in classes/ relative to the dirs in <envar>BBPATH</envar>.</para> 266 <para>BitBake classes are our rudimentary inheritance mechanism. As briefly mentioned in the metadata introduction, they're parsed when an <literal>inherit</literal> directive is encountered, and they are located in classes/ relative to the dirs in <envar>BBPATH</envar>.</para>
267 </section> 267 </section>
268 <section> 268 <section>
269 <title>.bb Files</title> 269 <title>.bb Files</title>
270 <para>A BitBake (.bb) file is a logical unit of tasks to be executed. Normally this is a package to be built. Inter-.bb dependencies are obeyed. The files themselves are located via the <varname>BBFILES</varname> variable, which is set to a space seperated list of .bb files, and does handle wildcards.</para> 270 <para>A BitBake (.bb) file is a logical unit of tasks to be executed. Normally this is a package to be built. Inter-.bb dependencies are obeyed. The files themselves are located via the <varname>BBFILES</varname> variable, which is set to a space separated list of .bb files, and does handle wildcards.</para>
271 </section> 271 </section>
272 </section> 272 </section>
273 </chapter> 273 </chapter>
@@ -352,15 +352,7 @@ will be tried first when fetching a file if that fails the actual file will be t
352 352
353 353
354 <chapter> 354 <chapter>
355 <title>Commands</title> 355 <title>The bitbake command</title>
356 <section>
357 <title>bbread</title>
358 <para>bbread is a command for displaying BitBake metadata. When run with no arguments, it has the core parse 'conf/bitbake.conf', as located in BBPATH, and displays that. If you supply a file on the commandline, such as a .bb, then it parses that afterwards, using the aforementioned configuration metadata.</para>
359 <para><emphasis>NOTE: the stand a lone bbread command was removed. Instead of bbread use bitbake -e.
360 </emphasis></para>
361 </section>
362 <section>
363 <title>bitbake</title>
364 <section> 356 <section>
365 <title>Introduction</title> 357 <title>Introduction</title>
366 <para>bitbake is the primary command in the system. It facilitates executing tasks in a single .bb file, or executing a given task on a set of multiple .bb files, accounting for interdependencies amongst them.</para> 358 <para>bitbake is the primary command in the system. It facilitates executing tasks in a single .bb file, or executing a given task on a set of multiple .bb files, accounting for interdependencies amongst them.</para>
@@ -372,7 +364,7 @@ will be tried first when fetching a file if that fails the actual file will be t
372usage: bitbake [options] [package ...] 364usage: bitbake [options] [package ...]
373 365
374Executes the specified task (default is 'build') for a given set of BitBake files. 366Executes the specified task (default is 'build') for a given set of BitBake files.
375It expects that BBFILES is defined, which is a space seperated list of files to 367It expects that BBFILES is defined, which is a space separated list of files to
376be executed. BBFILES does support wildcards. 368be executed. BBFILES does support wildcards.
377Default BBFILES are the .bb files in the current directory. 369Default BBFILES are the .bb files in the current directory.
378 370
@@ -394,7 +386,7 @@ options:
394 it depends on, i.e. 'compile' does not implicitly call 386 it depends on, i.e. 'compile' does not implicitly call
395 stage for the dependencies (IOW: use only if you know 387 stage for the dependencies (IOW: use only if you know
396 what you are doing). Depending on the base.bbclass a 388 what you are doing). Depending on the base.bbclass a
397 listtasks tasks is defined and will show available 389 listtasks task is defined and will show available
398 tasks 390 tasks
399 -r FILE, --read=FILE read the specified file before bitbake.conf 391 -r FILE, --read=FILE read the specified file before bitbake.conf
400 -v, --verbose output more chit-chat to the terminal 392 -v, --verbose output more chit-chat to the terminal
@@ -417,6 +409,7 @@ options:
417 Show debug logging for the specified logging domains 409 Show debug logging for the specified logging domains
418 -P, --profile profile the command and print a report 410 -P, --profile profile the command and print a report
419 411
412
420</screen> 413</screen>
421 </para> 414 </para>
422 <para> 415 <para>
@@ -462,12 +455,12 @@ Two files will be written into the current working directory, <emphasis>depends.
462 </section> 455 </section>
463 <section> 456 <section>
464 <title>Metadata</title> 457 <title>Metadata</title>
465 <para>As you may have seen in the usage information, or in the information about .bb files, the BBFILES variable is how the bitbake tool locates its files. This variable is a space seperated list of files that are available, and supports wildcards. 458 <para>As you may have seen in the usage information, or in the information about .bb files, the BBFILES variable is how the bitbake tool locates its files. This variable is a space separated list of files that are available, and supports wildcards.
466 <example> 459 <example>
467 <title>Setting BBFILES</title> 460 <title>Setting BBFILES</title>
468 <programlisting><varname>BBFILES</varname> = "/path/to/bbfiles/*.bb"</programlisting> 461 <programlisting><varname>BBFILES</varname> = "/path/to/bbfiles/*.bb"</programlisting>
469 </example></para> 462 </example></para>
470 <para>With regard to dependencies, it expects the .bb to define a <varname>DEPENDS</varname> variable, which contains a space seperated list of <quote>package names</quote>, which themselves are the <varname>PN</varname> variable. The <varname>PN</varname> variable is, in general, by default, set to a component of the .bb filename.</para> 463 <para>With regard to dependencies, it expects the .bb to define a <varname>DEPENDS</varname> variable, which contains a space separated list of <quote>package names</quote>, which themselves are the <varname>PN</varname> variable. The <varname>PN</varname> variable is, in general, by default, set to a component of the .bb filename.</para>
471 <example> 464 <example>
472 <title>Depending on another .bb</title> 465 <title>Depending on another .bb</title>
473 <para>a.bb: 466 <para>a.bb:
@@ -514,6 +507,5 @@ BBFILE_PRIORITY_upstream = "5"
514BBFILE_PRIORITY_local = "10"</screen> 507BBFILE_PRIORITY_local = "10"</screen>
515 </example> 508 </example>
516 </section> 509 </section>
517 </section>
518 </chapter> 510 </chapter>
519</book> 511</book>
diff --git a/bitbake/lib/bb/__init__.py b/bitbake/lib/bb/__init__.py
index b8f7c7f59e..f2f8f656d8 100644
--- a/bitbake/lib/bb/__init__.py
+++ b/bitbake/lib/bb/__init__.py
@@ -21,7 +21,7 @@
21# with this program; if not, write to the Free Software Foundation, Inc., 21# with this program; if not, write to the Free Software Foundation, Inc.,
22# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 22# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
23 23
24__version__ = "1.8.13" 24__version__ = "1.9.0"
25 25
26__all__ = [ 26__all__ = [
27 27
@@ -54,6 +54,7 @@ __all__ = [
54# modules 54# modules
55 "parse", 55 "parse",
56 "data", 56 "data",
57 "command",
57 "event", 58 "event",
58 "build", 59 "build",
59 "fetch", 60 "fetch",
diff --git a/bitbake/lib/bb/build.py b/bitbake/lib/bb/build.py
index 1d6742b6e6..6d80b4b549 100644
--- a/bitbake/lib/bb/build.py
+++ b/bitbake/lib/bb/build.py
@@ -25,8 +25,8 @@
25# 25#
26#Based on functions from the base bb module, Copyright 2003 Holger Schurig 26#Based on functions from the base bb module, Copyright 2003 Holger Schurig
27 27
28from bb import data, fetch, event, mkdirhier, utils 28from bb import data, event, mkdirhier, utils
29import bb, os 29import bb, os, sys
30 30
31# When we execute a python function we'd like certain things 31# When we execute a python function we'd like certain things
32# in all namespaces, hence we add them to __builtins__ 32# in all namespaces, hence we add them to __builtins__
@@ -37,7 +37,11 @@ __builtins__['os'] = os
37 37
38# events 38# events
39class FuncFailed(Exception): 39class FuncFailed(Exception):
40 """Executed function failed""" 40 """
41 Executed function failed
42 First parameter a message
43 Second paramter is a logfile (optional)
44 """
41 45
42class EventException(Exception): 46class EventException(Exception):
43 """Exception which is associated with an Event.""" 47 """Exception which is associated with an Event."""
@@ -50,7 +54,9 @@ class TaskBase(event.Event):
50 54
51 def __init__(self, t, d ): 55 def __init__(self, t, d ):
52 self._task = t 56 self._task = t
53 event.Event.__init__(self, d) 57 self._package = bb.data.getVar("PF", d, 1)
58 event.Event.__init__(self)
59 self._message = "package %s: task %s: %s" % (bb.data.getVar("PF", d, 1), t, bb.event.getName(self)[4:])
54 60
55 def getTask(self): 61 def getTask(self):
56 return self._task 62 return self._task
@@ -68,6 +74,10 @@ class TaskSucceeded(TaskBase):
68 74
69class TaskFailed(TaskBase): 75class TaskFailed(TaskBase):
70 """Task execution failed""" 76 """Task execution failed"""
77 def __init__(self, msg, logfile, t, d ):
78 self.logfile = logfile
79 self.msg = msg
80 TaskBase.__init__(self, t, d)
71 81
72class InvalidTask(TaskBase): 82class InvalidTask(TaskBase):
73 """Invalid Task""" 83 """Invalid Task"""
@@ -104,42 +114,116 @@ def exec_func(func, d, dirs = None):
104 else: 114 else:
105 adir = data.getVar('B', d, 1) 115 adir = data.getVar('B', d, 1)
106 116
117 # Save current directory
107 try: 118 try:
108 prevdir = os.getcwd() 119 prevdir = os.getcwd()
109 except OSError: 120 except OSError:
110 prevdir = data.getVar('TOPDIR', d, True) 121 prevdir = data.getVar('TOPDIR', d, True)
122
123 # Setup logfiles
124 t = data.getVar('T', d, 1)
125 if not t:
126 bb.msg.fatal(bb.msg.domain.Build, "T not set")
127 mkdirhier(t)
128 # Gross hack, FIXME
129 import random
130 logfile = "%s/log.%s.%s.%s" % (t, func, str(os.getpid()),random.random())
131 runfile = "%s/run.%s.%s" % (t, func, str(os.getpid()))
132
133 # Change to correct directory (if specified)
111 if adir and os.access(adir, os.F_OK): 134 if adir and os.access(adir, os.F_OK):
112 os.chdir(adir) 135 os.chdir(adir)
113 136
137 # Handle logfiles
138 si = file('/dev/null', 'r')
139 try:
140 if bb.msg.debug_level['default'] > 0 or ispython:
141 so = os.popen("tee \"%s\"" % logfile, "w")
142 else:
143 so = file(logfile, 'w')
144 except OSError, e:
145 bb.msg.error(bb.msg.domain.Build, "opening log file: %s" % e)
146 pass
147
148 se = so
149
150 # Dup the existing fds so we dont lose them
151 osi = [os.dup(sys.stdin.fileno()), sys.stdin.fileno()]
152 oso = [os.dup(sys.stdout.fileno()), sys.stdout.fileno()]
153 ose = [os.dup(sys.stderr.fileno()), sys.stderr.fileno()]
154
155 # Replace those fds with our own
156 os.dup2(si.fileno(), osi[1])
157 os.dup2(so.fileno(), oso[1])
158 os.dup2(se.fileno(), ose[1])
159
114 locks = [] 160 locks = []
115 lockfiles = (data.expand(flags['lockfiles'], d) or "").split() 161 lockfiles = (data.expand(flags['lockfiles'], d) or "").split()
116 for lock in lockfiles: 162 for lock in lockfiles:
117 locks.append(bb.utils.lockfile(lock)) 163 locks.append(bb.utils.lockfile(lock))
118 164
119 if flags['python']: 165 try:
120 exec_func_python(func, d) 166 # Run the function
121 else: 167 if ispython:
122 exec_func_shell(func, d, flags) 168 exec_func_python(func, d, runfile, logfile)
169 else:
170 exec_func_shell(func, d, runfile, logfile, flags)
171
172 # Restore original directory
173 try:
174 os.chdir(prevdir)
175 except:
176 pass
123 177
124 for lock in locks: 178 finally:
125 bb.utils.unlockfile(lock)
126 179
127 if os.path.exists(prevdir): 180 # Unlock any lockfiles
128 os.chdir(prevdir) 181 for lock in locks:
182 bb.utils.unlockfile(lock)
183
184 # Restore the backup fds
185 os.dup2(osi[0], osi[1])
186 os.dup2(oso[0], oso[1])
187 os.dup2(ose[0], ose[1])
188
189 # Close our logs
190 si.close()
191 so.close()
192 se.close()
129 193
130def exec_func_python(func, d): 194 if os.path.exists(logfile) and os.path.getsize(logfile) == 0:
195 bb.msg.debug(2, bb.msg.domain.Build, "Zero size logfile %s, removing" % logfile)
196 os.remove(logfile)
197
198 # Close the backup fds
199 os.close(osi[0])
200 os.close(oso[0])
201 os.close(ose[0])
202
203def exec_func_python(func, d, runfile, logfile):
131 """Execute a python BB 'function'""" 204 """Execute a python BB 'function'"""
132 import re 205 import re, os
133 206
134 bbfile = bb.data.getVar('FILE', d, 1) 207 bbfile = bb.data.getVar('FILE', d, 1)
135 tmp = "def " + func + "():\n%s" % data.getVar(func, d) 208 tmp = "def " + func + "():\n%s" % data.getVar(func, d)
136 tmp += '\n' + func + '()' 209 tmp += '\n' + func + '()'
210
211 f = open(runfile, "w")
212 f.write(tmp)
137 comp = utils.better_compile(tmp, func, bbfile) 213 comp = utils.better_compile(tmp, func, bbfile)
138 g = {} # globals 214 g = {} # globals
139 g['d'] = d 215 g['d'] = d
140 utils.better_exec(comp, g, tmp, bbfile) 216 try:
217 utils.better_exec(comp, g, tmp, bbfile)
218 except:
219 (t,value,tb) = sys.exc_info()
220
221 if t in [bb.parse.SkipPackage, bb.build.FuncFailed]:
222 raise
223 bb.msg.error(bb.msg.domain.Build, "Function %s failed" % func)
224 raise FuncFailed("function %s failed" % func, logfile)
141 225
142def exec_func_shell(func, d, flags): 226def exec_func_shell(func, d, runfile, logfile, flags):
143 """Execute a shell BB 'function' Returns true if execution was successful. 227 """Execute a shell BB 'function' Returns true if execution was successful.
144 228
145 For this, it creates a bash shell script in the tmp dectory, writes the local 229 For this, it creates a bash shell script in the tmp dectory, writes the local
@@ -149,23 +233,13 @@ def exec_func_shell(func, d, flags):
149 of the directories you need created prior to execution. The last 233 of the directories you need created prior to execution. The last
150 item in the list is where we will chdir/cd to. 234 item in the list is where we will chdir/cd to.
151 """ 235 """
152 import sys
153 236
154 deps = flags['deps'] 237 deps = flags['deps']
155 check = flags['check'] 238 check = flags['check']
156 interact = flags['interactive']
157 if check in globals(): 239 if check in globals():
158 if globals()[check](func, deps): 240 if globals()[check](func, deps):
159 return 241 return
160 242
161 global logfile
162 t = data.getVar('T', d, 1)
163 if not t:
164 return 0
165 mkdirhier(t)
166 logfile = "%s/log.%s.%s" % (t, func, str(os.getpid()))
167 runfile = "%s/run.%s.%s" % (t, func, str(os.getpid()))
168
169 f = open(runfile, "w") 243 f = open(runfile, "w")
170 f.write("#!/bin/sh -e\n") 244 f.write("#!/bin/sh -e\n")
171 if bb.msg.debug_level['default'] > 0: f.write("set -x\n") 245 if bb.msg.debug_level['default'] > 0: f.write("set -x\n")
@@ -177,91 +251,21 @@ def exec_func_shell(func, d, flags):
177 os.chmod(runfile, 0775) 251 os.chmod(runfile, 0775)
178 if not func: 252 if not func:
179 bb.msg.error(bb.msg.domain.Build, "Function not specified") 253 bb.msg.error(bb.msg.domain.Build, "Function not specified")
180 raise FuncFailed() 254 raise FuncFailed("Function not specified for exec_func_shell")
181
182 # open logs
183 si = file('/dev/null', 'r')
184 try:
185 if bb.msg.debug_level['default'] > 0:
186 so = os.popen("tee \"%s\"" % logfile, "w")
187 else:
188 so = file(logfile, 'w')
189 except OSError, e:
190 bb.msg.error(bb.msg.domain.Build, "opening log file: %s" % e)
191 pass
192
193 se = so
194
195 if not interact:
196 # dup the existing fds so we dont lose them
197 osi = [os.dup(sys.stdin.fileno()), sys.stdin.fileno()]
198 oso = [os.dup(sys.stdout.fileno()), sys.stdout.fileno()]
199 ose = [os.dup(sys.stderr.fileno()), sys.stderr.fileno()]
200
201 # replace those fds with our own
202 os.dup2(si.fileno(), osi[1])
203 os.dup2(so.fileno(), oso[1])
204 os.dup2(se.fileno(), ose[1])
205 255
206 # execute function 256 # execute function
207 prevdir = os.getcwd()
208 if flags['fakeroot']: 257 if flags['fakeroot']:
209 maybe_fakeroot = "PATH=\"%s\" fakeroot " % bb.data.getVar("PATH", d, 1) 258 maybe_fakeroot = "PATH=\"%s\" fakeroot " % bb.data.getVar("PATH", d, 1)
210 else: 259 else:
211 maybe_fakeroot = '' 260 maybe_fakeroot = ''
212 lang_environment = "LC_ALL=C " 261 lang_environment = "LC_ALL=C "
213 ret = os.system('%s%ssh -e %s' % (lang_environment, maybe_fakeroot, runfile)) 262 ret = os.system('%s%ssh -e %s' % (lang_environment, maybe_fakeroot, runfile))
214 try:
215 os.chdir(prevdir)
216 except:
217 pass
218
219 if not interact:
220 # restore the backups
221 os.dup2(osi[0], osi[1])
222 os.dup2(oso[0], oso[1])
223 os.dup2(ose[0], ose[1])
224 263
225 # close our logs 264 if ret == 0:
226 si.close()
227 so.close()
228 se.close()
229
230 if os.path.exists(logfile) and os.path.getsize(logfile) == 0:
231 bb.msg.debug(2, bb.msg.domain.Build, "Zero size logfile %s, removing" % logfile)
232 os.remove(logfile)
233
234 # close the backup fds
235 os.close(osi[0])
236 os.close(oso[0])
237 os.close(ose[0])
238
239 if ret==0:
240 if bb.msg.debug_level['default'] > 0:
241 os.remove(runfile)
242# os.remove(logfile)
243 return 265 return
244 else: 266
245 bb.msg.error(bb.msg.domain.Build, "function %s failed" % func) 267 bb.msg.error(bb.msg.domain.Build, "Function %s failed" % func)
246 if data.getVar("BBINCLUDELOGS", d): 268 raise FuncFailed("function %s failed" % func, logfile)
247 bb.msg.error(bb.msg.domain.Build, "log data follows (%s)" % logfile)
248 number_of_lines = data.getVar("BBINCLUDELOGS_LINES", d)
249 if number_of_lines:
250 os.system('tail -n%s %s' % (number_of_lines, logfile))
251 elif os.path.exists(logfile):
252 f = open(logfile, "r")
253 while True:
254 l = f.readline()
255 if l == '':
256 break
257 l = l.rstrip()
258 print '| %s' % l
259 f.close()
260 else:
261 bb.msg.error(bb.msg.domain.Build, "There was no logfile output")
262 else:
263 bb.msg.error(bb.msg.domain.Build, "see log in %s" % logfile)
264 raise FuncFailed( logfile )
265 269
266 270
267def exec_task(task, d): 271def exec_task(task, d):
@@ -282,14 +286,20 @@ def exec_task(task, d):
282 data.setVar('OVERRIDES', 'task-%s:%s' % (task[3:], old_overrides), localdata) 286 data.setVar('OVERRIDES', 'task-%s:%s' % (task[3:], old_overrides), localdata)
283 data.update_data(localdata) 287 data.update_data(localdata)
284 data.expandKeys(localdata) 288 data.expandKeys(localdata)
285 event.fire(TaskStarted(task, localdata)) 289 event.fire(TaskStarted(task, localdata), localdata)
286 exec_func(task, localdata) 290 exec_func(task, localdata)
287 event.fire(TaskSucceeded(task, localdata)) 291 event.fire(TaskSucceeded(task, localdata), localdata)
288 except FuncFailed, reason: 292 except FuncFailed, message:
289 bb.msg.note(1, bb.msg.domain.Build, "Task failed: %s" % reason ) 293 # Try to extract the optional logfile
290 failedevent = TaskFailed(task, d) 294 try:
291 event.fire(failedevent) 295 (msg, logfile) = message
292 raise EventException("Function failed in task: %s" % reason, failedevent) 296 except:
297 logfile = None
298 msg = message
299 bb.msg.note(1, bb.msg.domain.Build, "Task failed: %s" % message )
300 failedevent = TaskFailed(msg, logfile, task, d)
301 event.fire(failedevent, d)
302 raise EventException("Function failed in task: %s" % message, failedevent)
293 303
294 # make stamp, or cause event and raise exception 304 # make stamp, or cause event and raise exception
295 if not data.getVarFlag(task, 'nostamp', d) and not data.getVarFlag(task, 'selfstamp', d): 305 if not data.getVarFlag(task, 'nostamp', d) and not data.getVarFlag(task, 'selfstamp', d):
diff --git a/bitbake/lib/bb/cache.py b/bitbake/lib/bb/cache.py
index d30d57d33b..2f1b8fa601 100644
--- a/bitbake/lib/bb/cache.py
+++ b/bitbake/lib/bb/cache.py
@@ -134,7 +134,18 @@ class Cache:
134 self.data = data 134 self.data = data
135 135
136 # Make sure __depends makes the depends_cache 136 # Make sure __depends makes the depends_cache
137 self.getVar("__depends", virtualfn, True) 137 # If we're a virtual class we need to make sure all our depends are appended
138 # to the depends of fn.
139 depends = self.getVar("__depends", virtualfn, True) or []
140 if "__depends" not in self.depends_cache[fn] or not self.depends_cache[fn]["__depends"]:
141 self.depends_cache[fn]["__depends"] = depends
142 for dep in depends:
143 if dep not in self.depends_cache[fn]["__depends"]:
144 self.depends_cache[fn]["__depends"].append(dep)
145
146 # Make sure BBCLASSEXTEND always makes the cache too
147 self.getVar('BBCLASSEXTEND', virtualfn, True)
148
138 self.depends_cache[virtualfn]["CACHETIMESTAMP"] = bb.parse.cached_mtime(fn) 149 self.depends_cache[virtualfn]["CACHETIMESTAMP"] = bb.parse.cached_mtime(fn)
139 150
140 def virtualfn2realfn(self, virtualfn): 151 def virtualfn2realfn(self, virtualfn):
@@ -170,11 +181,8 @@ class Cache:
170 181
171 bb.msg.debug(1, bb.msg.domain.Cache, "Parsing %s (full)" % fn) 182 bb.msg.debug(1, bb.msg.domain.Cache, "Parsing %s (full)" % fn)
172 183
173 bb_data, skipped = self.load_bbfile(fn, cfgData) 184 bb_data = self.load_bbfile(fn, cfgData)
174 if isinstance(bb_data, dict): 185 return bb_data[cls]
175 return bb_data[cls]
176
177 return bb_data
178 186
179 def loadData(self, fn, cfgData, cacheData): 187 def loadData(self, fn, cfgData, cacheData):
180 """ 188 """
@@ -184,42 +192,39 @@ class Cache:
184 to record the variables accessed. 192 to record the variables accessed.
185 Return the cache status and whether the file was skipped when parsed 193 Return the cache status and whether the file was skipped when parsed
186 """ 194 """
195 skipped = 0
196 virtuals = 0
197
187 if fn not in self.checked: 198 if fn not in self.checked:
188 self.cacheValidUpdate(fn) 199 self.cacheValidUpdate(fn)
200
189 if self.cacheValid(fn): 201 if self.cacheValid(fn):
190 if "SKIPPED" in self.depends_cache[fn]:
191 return True, True
192 self.handle_data(fn, cacheData)
193 multi = self.getVar('BBCLASSEXTEND', fn, True) 202 multi = self.getVar('BBCLASSEXTEND', fn, True)
194 if multi: 203 for cls in (multi or "").split() + [""]:
195 for cls in multi.split(): 204 virtualfn = self.realfn2virtual(fn, cls)
196 virtualfn = self.realfn2virtual(fn, cls) 205 if self.depends_cache[virtualfn]["__SKIPPED"]:
197 # Pretend we're clean so getVar works 206 skipped += 1
198 self.clean[virtualfn] = "" 207 bb.msg.debug(1, bb.msg.domain.Cache, "Skipping %s" % virtualfn)
199 self.handle_data(virtualfn, cacheData) 208 continue
200 return True, False 209 self.handle_data(virtualfn, cacheData)
210 virtuals += 1
211 return True, skipped, virtuals
201 212
202 bb.msg.debug(1, bb.msg.domain.Cache, "Parsing %s" % fn) 213 bb.msg.debug(1, bb.msg.domain.Cache, "Parsing %s" % fn)
203 214
204 bb_data, skipped = self.load_bbfile(fn, cfgData) 215 bb_data = self.load_bbfile(fn, cfgData)
205
206 if skipped:
207 if isinstance(bb_data, dict):
208 self.setData(fn, fn, bb_data[""])
209 else:
210 self.setData(fn, fn, bb_data)
211 return False, skipped
212 216
213 if isinstance(bb_data, dict): 217 for data in bb_data:
214 for data in bb_data: 218 virtualfn = self.realfn2virtual(fn, data)
215 virtualfn = self.realfn2virtual(fn, data) 219 self.setData(virtualfn, fn, bb_data[data])
216 self.setData(virtualfn, fn, bb_data[data]) 220 if self.getVar("__SKIPPED", virtualfn, True):
221 skipped += 1
222 bb.msg.debug(1, bb.msg.domain.Cache, "Skipping %s" % virtualfn)
223 else:
217 self.handle_data(virtualfn, cacheData) 224 self.handle_data(virtualfn, cacheData)
218 return False, skipped 225 virtuals += 1
226 return False, skipped, virtuals
219 227
220 self.setData(fn, fn, bb_data)
221 self.handle_data(fn, cacheData)
222 return False, skipped
223 228
224 def cacheValid(self, fn): 229 def cacheValid(self, fn):
225 """ 230 """
@@ -286,16 +291,13 @@ class Cache:
286 if not fn in self.clean: 291 if not fn in self.clean:
287 self.clean[fn] = "" 292 self.clean[fn] = ""
288 293
289 return True 294 # Mark extended class data as clean too
295 multi = self.getVar('BBCLASSEXTEND', fn, True)
296 for cls in (multi or "").split():
297 virtualfn = self.realfn2virtual(fn, cls)
298 self.clean[virtualfn] = ""
290 299
291 def skip(self, fn): 300 return True
292 """
293 Mark a fn as skipped
294 Called from the parser
295 """
296 if not fn in self.depends_cache:
297 self.depends_cache[fn] = {}
298 self.depends_cache[fn]["SKIPPED"] = "1"
299 301
300 def remove(self, fn): 302 def remove(self, fn):
301 """ 303 """
@@ -462,10 +464,7 @@ class Cache:
462 try: 464 try:
463 bb_data = parse.handle(bbfile, bb_data) # read .bb data 465 bb_data = parse.handle(bbfile, bb_data) # read .bb data
464 os.chdir(oldpath) 466 os.chdir(oldpath)
465 return bb_data, False 467 return bb_data
466 except bb.parse.SkipPackage:
467 os.chdir(oldpath)
468 return bb_data, True
469 except: 468 except:
470 os.chdir(oldpath) 469 os.chdir(oldpath)
471 raise 470 raise
diff --git a/bitbake-dev/lib/bb/command.py b/bitbake/lib/bb/command.py
index 2bb5365c0c..2bb5365c0c 100644
--- a/bitbake-dev/lib/bb/command.py
+++ b/bitbake/lib/bb/command.py
diff --git a/bitbake/lib/bb/cooker.py b/bitbake/lib/bb/cooker.py
index 14ccfb59aa..8036d7e9d5 100644
--- a/bitbake/lib/bb/cooker.py
+++ b/bitbake/lib/bb/cooker.py
@@ -7,7 +7,7 @@
7# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer 7# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
8# Copyright (C) 2005 Holger Hans Peter Freyther 8# Copyright (C) 2005 Holger Hans Peter Freyther
9# Copyright (C) 2005 ROAD GmbH 9# Copyright (C) 2005 ROAD GmbH
10# Copyright (C) 2006 Richard Purdie 10# Copyright (C) 2006 - 2007 Richard Purdie
11# 11#
12# This program is free software; you can redistribute it and/or modify 12# This program is free software; you can redistribute it and/or modify
13# it under the terms of the GNU General Public License version 2 as 13# it under the terms of the GNU General Public License version 2 as
@@ -25,9 +25,35 @@
25import sys, os, getopt, glob, copy, os.path, re, time 25import sys, os, getopt, glob, copy, os.path, re, time
26import bb 26import bb
27from bb import utils, data, parse, event, cache, providers, taskdata, runqueue 27from bb import utils, data, parse, event, cache, providers, taskdata, runqueue
28from bb import command
29import bb.server.xmlrpc
28import itertools, sre_constants 30import itertools, sre_constants
29 31
30parsespin = itertools.cycle( r'|/-\\' ) 32class MultipleMatches(Exception):
33 """
34 Exception raised when multiple file matches are found
35 """
36
37class ParsingErrorsFound(Exception):
38 """
39 Exception raised when parsing errors are found
40 """
41
42class NothingToBuild(Exception):
43 """
44 Exception raised when there is nothing to build
45 """
46
47
48# Different states cooker can be in
49cookerClean = 1
50cookerParsing = 2
51cookerParsed = 3
52
53# Different action states the cooker can be in
54cookerRun = 1 # Cooker is running normally
55cookerShutdown = 2 # Active tasks should be brought to a controlled stop
56cookerStop = 3 # Stop, now!
31 57
32#============================================================================# 58#============================================================================#
33# BBCooker 59# BBCooker
@@ -37,12 +63,14 @@ class BBCooker:
37 Manages one bitbake build run 63 Manages one bitbake build run
38 """ 64 """
39 65
40 def __init__(self, configuration): 66 def __init__(self, configuration, server):
41 self.status = None 67 self.status = None
42 68
43 self.cache = None 69 self.cache = None
44 self.bb_cache = None 70 self.bb_cache = None
45 71
72 self.server = server.BitBakeServer(self)
73
46 self.configuration = configuration 74 self.configuration = configuration
47 75
48 if self.configuration.verbose: 76 if self.configuration.verbose:
@@ -58,17 +86,15 @@ class BBCooker:
58 86
59 self.configuration.data = bb.data.init() 87 self.configuration.data = bb.data.init()
60 88
61 def parseConfiguration(self):
62
63 bb.data.inheritFromOS(self.configuration.data) 89 bb.data.inheritFromOS(self.configuration.data)
64 90
65 # Add conf/bitbake.conf to the list of configuration files to read 91 for f in self.configuration.file:
66 self.configuration.file.append( os.path.join( "conf", "bitbake.conf" ) ) 92 self.parseConfigurationFile( f )
67 93
68 self.parseConfigurationFile(self.configuration.file) 94 self.parseConfigurationFile( os.path.join( "conf", "bitbake.conf" ) )
69 95
70 if not self.configuration.cmd: 96 if not self.configuration.cmd:
71 self.configuration.cmd = bb.data.getVar("BB_DEFAULT_TASK", self.configuration.data) or "build" 97 self.configuration.cmd = bb.data.getVar("BB_DEFAULT_TASK", self.configuration.data, True) or "build"
72 98
73 bbpkgs = bb.data.getVar('BBPKGS', self.configuration.data, True) 99 bbpkgs = bb.data.getVar('BBPKGS', self.configuration.data, True)
74 if bbpkgs and len(self.configuration.pkgs_to_build) == 0: 100 if bbpkgs and len(self.configuration.pkgs_to_build) == 0:
@@ -80,9 +106,7 @@ class BBCooker:
80 self.configuration.event_data = bb.data.createCopy(self.configuration.data) 106 self.configuration.event_data = bb.data.createCopy(self.configuration.data)
81 bb.data.update_data(self.configuration.event_data) 107 bb.data.update_data(self.configuration.event_data)
82 108
83 #
84 # TOSTOP must not be set or our children will hang when they output 109 # TOSTOP must not be set or our children will hang when they output
85 #
86 fd = sys.stdout.fileno() 110 fd = sys.stdout.fileno()
87 if os.isatty(fd): 111 if os.isatty(fd):
88 import termios 112 import termios
@@ -92,40 +116,91 @@ class BBCooker:
92 tcattr[3] = tcattr[3] & ~termios.TOSTOP 116 tcattr[3] = tcattr[3] & ~termios.TOSTOP
93 termios.tcsetattr(fd, termios.TCSANOW, tcattr) 117 termios.tcsetattr(fd, termios.TCSANOW, tcattr)
94 118
119 self.command = bb.command.Command(self)
120 self.cookerState = cookerClean
121 self.cookerAction = cookerRun
122
123 def parseConfiguration(self):
124
125
95 # Change nice level if we're asked to 126 # Change nice level if we're asked to
96 nice = bb.data.getVar("BB_NICE_LEVEL", self.configuration.data, True) 127 nice = bb.data.getVar("BB_NICE_LEVEL", self.configuration.data, True)
97 if nice: 128 if nice:
98 curnice = os.nice(0) 129 curnice = os.nice(0)
99 nice = int(nice) - curnice 130 nice = int(nice) - curnice
100 bb.msg.note(2, bb.msg.domain.Build, "Renice to %s " % os.nice(nice)) 131 bb.msg.note(2, bb.msg.domain.Build, "Renice to %s " % os.nice(nice))
101 132
133 def parseCommandLine(self):
134 # Parse any commandline into actions
135 if self.configuration.show_environment:
136 self.commandlineAction = None
137
138 if 'world' in self.configuration.pkgs_to_build:
139 bb.error("'world' is not a valid target for --environment.")
140 elif len(self.configuration.pkgs_to_build) > 1:
141 bb.error("Only one target can be used with the --environment option.")
142 elif self.configuration.buildfile and len(self.configuration.pkgs_to_build) > 0:
143 bb.error("No target should be used with the --environment and --buildfile options.")
144 elif len(self.configuration.pkgs_to_build) > 0:
145 self.commandlineAction = ["showEnvironmentTarget", self.configuration.pkgs_to_build]
146 else:
147 self.commandlineAction = ["showEnvironment", self.configuration.buildfile]
148 elif self.configuration.buildfile is not None:
149 self.commandlineAction = ["buildFile", self.configuration.buildfile, self.configuration.cmd]
150 elif self.configuration.revisions_changed:
151 self.commandlineAction = ["compareRevisions"]
152 elif self.configuration.show_versions:
153 self.commandlineAction = ["showVersions"]
154 elif self.configuration.parse_only:
155 self.commandlineAction = ["parseFiles"]
156 # FIXME - implement
157 #elif self.configuration.interactive:
158 # self.interactiveMode()
159 elif self.configuration.dot_graph:
160 if self.configuration.pkgs_to_build:
161 self.commandlineAction = ["generateDotGraph", self.configuration.pkgs_to_build, self.configuration.cmd]
162 else:
163 self.commandlineAction = None
164 bb.error("Please specify a package name for dependency graph generation.")
165 else:
166 if self.configuration.pkgs_to_build:
167 self.commandlineAction = ["buildTargets", self.configuration.pkgs_to_build, self.configuration.cmd]
168 else:
169 self.commandlineAction = None
170 bb.error("Nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information.")
171
172 def runCommands(self, server, data, abort):
173 """
174 Run any queued asynchronous command
175 This is done by the idle handler so it runs in true context rather than
176 tied to any UI.
177 """
178
179 return self.command.runAsyncCommand()
102 180
103 def tryBuildPackage(self, fn, item, task, the_data): 181 def tryBuildPackage(self, fn, item, task, the_data):
104 """ 182 """
105 Build one task of a package, optionally build following task depends 183 Build one task of a package, optionally build following task depends
106 """ 184 """
107 bb.event.fire(bb.event.PkgStarted(item, the_data))
108 try: 185 try:
109 if not self.configuration.dry_run: 186 if not self.configuration.dry_run:
110 bb.build.exec_task('do_%s' % task, the_data) 187 bb.build.exec_task('do_%s' % task, the_data)
111 bb.event.fire(bb.event.PkgSucceeded(item, the_data))
112 return True 188 return True
113 except bb.build.FuncFailed: 189 except bb.build.FuncFailed:
114 bb.msg.error(bb.msg.domain.Build, "task stack execution failed") 190 bb.msg.error(bb.msg.domain.Build, "task stack execution failed")
115 bb.event.fire(bb.event.PkgFailed(item, the_data))
116 raise 191 raise
117 except bb.build.EventException, e: 192 except bb.build.EventException, e:
118 event = e.args[1] 193 event = e.args[1]
119 bb.msg.error(bb.msg.domain.Build, "%s event exception, aborting" % bb.event.getName(event)) 194 bb.msg.error(bb.msg.domain.Build, "%s event exception, aborting" % bb.event.getName(event))
120 bb.event.fire(bb.event.PkgFailed(item, the_data))
121 raise 195 raise
122 196
123 def tryBuild(self, fn): 197 def tryBuild(self, fn, task):
124 """ 198 """
125 Build a provider and its dependencies. 199 Build a provider and its dependencies.
126 build_depends is a list of previous build dependencies (not runtime) 200 build_depends is a list of previous build dependencies (not runtime)
127 If build_depends is empty, we're dealing with a runtime depends 201 If build_depends is empty, we're dealing with a runtime depends
128 """ 202 """
203
129 the_data = self.bb_cache.loadDataFull(fn, self.configuration.data) 204 the_data = self.bb_cache.loadDataFull(fn, self.configuration.data)
130 205
131 item = self.status.pkg_fn[fn] 206 item = self.status.pkg_fn[fn]
@@ -133,9 +208,13 @@ class BBCooker:
133 #if bb.build.stamp_is_current('do_%s' % self.configuration.cmd, the_data): 208 #if bb.build.stamp_is_current('do_%s' % self.configuration.cmd, the_data):
134 # return True 209 # return True
135 210
136 return self.tryBuildPackage(fn, item, self.configuration.cmd, the_data) 211 return self.tryBuildPackage(fn, item, task, the_data)
137 212
138 def showVersions(self): 213 def showVersions(self):
214
215 # Need files parsed
216 self.updateCache()
217
139 pkg_pn = self.status.pkg_pn 218 pkg_pn = self.status.pkg_pn
140 preferred_versions = {} 219 preferred_versions = {}
141 latest_versions = {} 220 latest_versions = {}
@@ -149,43 +228,36 @@ class BBCooker:
149 pkg_list = pkg_pn.keys() 228 pkg_list = pkg_pn.keys()
150 pkg_list.sort() 229 pkg_list.sort()
151 230
231 bb.msg.plain("%-35s %25s %25s" % ("Package Name", "Latest Version", "Preferred Version"))
232 bb.msg.plain("%-35s %25s %25s\n" % ("============", "==============", "================="))
233
152 for p in pkg_list: 234 for p in pkg_list:
153 pref = preferred_versions[p] 235 pref = preferred_versions[p]
154 latest = latest_versions[p] 236 latest = latest_versions[p]
155 237
156 if pref != latest: 238 prefstr = pref[0][0] + ":" + pref[0][1] + '-' + pref[0][2]
157 prefstr = pref[0][0] + ":" + pref[0][1] + '-' + pref[0][2] 239 lateststr = latest[0][0] + ":" + latest[0][1] + "-" + latest[0][2]
158 else: 240
241 if pref == latest:
159 prefstr = "" 242 prefstr = ""
160 243
161 print "%-30s %20s %20s" % (p, latest[0][0] + ":" + latest[0][1] + "-" + latest[0][2], 244 bb.msg.plain("%-35s %25s %25s" % (p, lateststr, prefstr))
162 prefstr)
163 245
246 def compareRevisions(self):
247 ret = bb.fetch.fetcher_compare_revisons(self.configuration.data)
248 bb.event.fire(bb.command.CookerCommandSetExitCode(ret), self.configuration.event_data)
164 249
165 def showEnvironment(self , buildfile = None, pkgs_to_build = []): 250 def showEnvironment(self, buildfile = None, pkgs_to_build = []):
166 """ 251 """
167 Show the outer or per-package environment 252 Show the outer or per-package environment
168 """ 253 """
169 fn = None 254 fn = None
170 envdata = None 255 envdata = None
171 256
172 if 'world' in pkgs_to_build:
173 print "'world' is not a valid target for --environment."
174 sys.exit(1)
175
176 if len(pkgs_to_build) > 1:
177 print "Only one target can be used with the --environment option."
178 sys.exit(1)
179
180 if buildfile: 257 if buildfile:
181 if len(pkgs_to_build) > 0:
182 print "No target should be used with the --environment and --buildfile options."
183 sys.exit(1)
184 self.cb = None 258 self.cb = None
185 self.bb_cache = bb.cache.init(self) 259 self.bb_cache = bb.cache.init(self)
186 fn = self.matchFile(buildfile) 260 fn = self.matchFile(buildfile)
187 if not fn:
188 sys.exit(1)
189 elif len(pkgs_to_build) == 1: 261 elif len(pkgs_to_build) == 1:
190 self.updateCache() 262 self.updateCache()
191 263
@@ -193,13 +265,9 @@ class BBCooker:
193 bb.data.update_data(localdata) 265 bb.data.update_data(localdata)
194 bb.data.expandKeys(localdata) 266 bb.data.expandKeys(localdata)
195 267
196 taskdata = bb.taskdata.TaskData(self.configuration.abort, self.configuration.tryaltconfigs) 268 taskdata = bb.taskdata.TaskData(self.configuration.abort)
197 269 taskdata.add_provider(localdata, self.status, pkgs_to_build[0])
198 try: 270 taskdata.add_unresolved(localdata, self.status)
199 taskdata.add_provider(localdata, self.status, pkgs_to_build[0])
200 taskdata.add_unresolved(localdata, self.status)
201 except bb.providers.NoProvider:
202 sys.exit(1)
203 271
204 targetid = taskdata.getbuild_id(pkgs_to_build[0]) 272 targetid = taskdata.getbuild_id(pkgs_to_build[0])
205 fnid = taskdata.build_targets[targetid][0] 273 fnid = taskdata.build_targets[targetid][0]
@@ -211,55 +279,69 @@ class BBCooker:
211 try: 279 try:
212 envdata = self.bb_cache.loadDataFull(fn, self.configuration.data) 280 envdata = self.bb_cache.loadDataFull(fn, self.configuration.data)
213 except IOError, e: 281 except IOError, e:
214 bb.msg.fatal(bb.msg.domain.Parsing, "Unable to read %s: %s" % (fn, e)) 282 bb.msg.error(bb.msg.domain.Parsing, "Unable to read %s: %s" % (fn, e))
283 raise
215 except Exception, e: 284 except Exception, e:
216 bb.msg.fatal(bb.msg.domain.Parsing, "%s" % e) 285 bb.msg.error(bb.msg.domain.Parsing, "%s" % e)
286 raise
287
288 class dummywrite:
289 def __init__(self):
290 self.writebuf = ""
291 def write(self, output):
292 self.writebuf = self.writebuf + output
217 293
218 # emit variables and shell functions 294 # emit variables and shell functions
219 try: 295 try:
220 data.update_data( envdata ) 296 data.update_data(envdata)
221 data.emit_env(sys.__stdout__, envdata, True) 297 wb = dummywrite()
298 data.emit_env(wb, envdata, True)
299 bb.msg.plain(wb.writebuf)
222 except Exception, e: 300 except Exception, e:
223 bb.msg.fatal(bb.msg.domain.Parsing, "%s" % e) 301 bb.msg.fatal(bb.msg.domain.Parsing, "%s" % e)
224 # emit the metadata which isnt valid shell 302 # emit the metadata which isnt valid shell
225 data.expandKeys( envdata ) 303 data.expandKeys(envdata)
226 for e in envdata.keys(): 304 for e in envdata.keys():
227 if data.getVarFlag( e, 'python', envdata ): 305 if data.getVarFlag( e, 'python', envdata ):
228 sys.__stdout__.write("\npython %s () {\n%s}\n" % (e, data.getVar(e, envdata, 1))) 306 bb.msg.plain("\npython %s () {\n%s}\n" % (e, data.getVar(e, envdata, 1)))
229 307
230 def generateDotGraph( self, pkgs_to_build, ignore_deps ): 308 def generateDepTreeData(self, pkgs_to_build, task):
231 """ 309 """
232 Generate a task dependency graph. 310 Create a dependency tree of pkgs_to_build, returning the data.
233
234 pkgs_to_build A list of packages that needs to be built
235 ignore_deps A list of names where processing of dependencies
236 should be stopped. e.g. dependencies that get
237 """ 311 """
238 312
239 for dep in ignore_deps: 313 # Need files parsed
240 self.status.ignored_dependencies.add(dep) 314 self.updateCache()
315
316 # If we are told to do the None task then query the default task
317 if (task == None):
318 task = self.configuration.cmd
319
320 pkgs_to_build = self.checkPackages(pkgs_to_build)
241 321
242 localdata = data.createCopy(self.configuration.data) 322 localdata = data.createCopy(self.configuration.data)
243 bb.data.update_data(localdata) 323 bb.data.update_data(localdata)
244 bb.data.expandKeys(localdata) 324 bb.data.expandKeys(localdata)
245 taskdata = bb.taskdata.TaskData(self.configuration.abort, self.configuration.tryaltconfigs) 325 taskdata = bb.taskdata.TaskData(self.configuration.abort)
246 326
247 runlist = [] 327 runlist = []
248 try: 328 for k in pkgs_to_build:
249 for k in pkgs_to_build: 329 taskdata.add_provider(localdata, self.status, k)
250 taskdata.add_provider(localdata, self.status, k) 330 runlist.append([k, "do_%s" % task])
251 runlist.append([k, "do_%s" % self.configuration.cmd]) 331 taskdata.add_unresolved(localdata, self.status)
252 taskdata.add_unresolved(localdata, self.status) 332
253 except bb.providers.NoProvider:
254 sys.exit(1)
255 rq = bb.runqueue.RunQueue(self, self.configuration.data, self.status, taskdata, runlist) 333 rq = bb.runqueue.RunQueue(self, self.configuration.data, self.status, taskdata, runlist)
256 rq.prepare_runqueue() 334 rq.prepare_runqueue()
257 335
258 seen_fnids = [] 336 seen_fnids = []
259 depends_file = file('depends.dot', 'w' ) 337 depend_tree = {}
260 tdepends_file = file('task-depends.dot', 'w' ) 338 depend_tree["depends"] = {}
261 print >> depends_file, "digraph depends {" 339 depend_tree["tdepends"] = {}
262 print >> tdepends_file, "digraph depends {" 340 depend_tree["pn"] = {}
341 depend_tree["rdepends-pn"] = {}
342 depend_tree["packages"] = {}
343 depend_tree["rdepends-pkg"] = {}
344 depend_tree["rrecs-pkg"] = {}
263 345
264 for task in range(len(rq.runq_fnid)): 346 for task in range(len(rq.runq_fnid)):
265 taskname = rq.runq_task[task] 347 taskname = rq.runq_task[task]
@@ -267,43 +349,118 @@ class BBCooker:
267 fn = taskdata.fn_index[fnid] 349 fn = taskdata.fn_index[fnid]
268 pn = self.status.pkg_fn[fn] 350 pn = self.status.pkg_fn[fn]
269 version = "%s:%s-%s" % self.status.pkg_pepvpr[fn] 351 version = "%s:%s-%s" % self.status.pkg_pepvpr[fn]
270 print >> tdepends_file, '"%s.%s" [label="%s %s\\n%s\\n%s"]' % (pn, taskname, pn, taskname, version, fn) 352 if pn not in depend_tree["pn"]:
353 depend_tree["pn"][pn] = {}
354 depend_tree["pn"][pn]["filename"] = fn
355 depend_tree["pn"][pn]["version"] = version
271 for dep in rq.runq_depends[task]: 356 for dep in rq.runq_depends[task]:
272 depfn = taskdata.fn_index[rq.runq_fnid[dep]] 357 depfn = taskdata.fn_index[rq.runq_fnid[dep]]
273 deppn = self.status.pkg_fn[depfn] 358 deppn = self.status.pkg_fn[depfn]
274 print >> tdepends_file, '"%s.%s" -> "%s.%s"' % (pn, rq.runq_task[task], deppn, rq.runq_task[dep]) 359 dotname = "%s.%s" % (pn, rq.runq_task[task])
360 if not dotname in depend_tree["tdepends"]:
361 depend_tree["tdepends"][dotname] = []
362 depend_tree["tdepends"][dotname].append("%s.%s" % (deppn, rq.runq_task[dep]))
275 if fnid not in seen_fnids: 363 if fnid not in seen_fnids:
276 seen_fnids.append(fnid) 364 seen_fnids.append(fnid)
277 packages = [] 365 packages = []
278 print >> depends_file, '"%s" [label="%s %s\\n%s"]' % (pn, pn, version, fn) 366
279 for depend in self.status.deps[fn]: 367 depend_tree["depends"][pn] = []
280 print >> depends_file, '"%s" -> "%s"' % (pn, depend) 368 for dep in taskdata.depids[fnid]:
369 depend_tree["depends"][pn].append(taskdata.build_names_index[dep])
370
371 depend_tree["rdepends-pn"][pn] = []
372 for rdep in taskdata.rdepids[fnid]:
373 depend_tree["rdepends-pn"][pn].append(taskdata.run_names_index[rdep])
374
281 rdepends = self.status.rundeps[fn] 375 rdepends = self.status.rundeps[fn]
282 for package in rdepends: 376 for package in rdepends:
283 for rdepend in re.findall("([\w.-]+)(\ \(.+\))?", rdepends[package]): 377 depend_tree["rdepends-pkg"][package] = []
284 print >> depends_file, '"%s" -> "%s%s" [style=dashed]' % (package, rdepend[0], rdepend[1]) 378 for rdepend in rdepends[package]:
379 depend_tree["rdepends-pkg"][package].append(rdepend)
285 packages.append(package) 380 packages.append(package)
381
286 rrecs = self.status.runrecs[fn] 382 rrecs = self.status.runrecs[fn]
287 for package in rrecs: 383 for package in rrecs:
288 for rdepend in re.findall("([\w.-]+)(\ \(.+\))?", rrecs[package]): 384 depend_tree["rrecs-pkg"][package] = []
289 print >> depends_file, '"%s" -> "%s%s" [style=dashed]' % (package, rdepend[0], rdepend[1]) 385 for rdepend in rrecs[package]:
386 depend_tree["rrecs-pkg"][package].append(rdepend)
290 if not package in packages: 387 if not package in packages:
291 packages.append(package) 388 packages.append(package)
389
292 for package in packages: 390 for package in packages:
293 if package != pn: 391 if package not in depend_tree["packages"]:
294 print >> depends_file, '"%s" [label="%s(%s) %s\\n%s"]' % (package, package, pn, version, fn) 392 depend_tree["packages"][package] = {}
295 for depend in self.status.deps[fn]: 393 depend_tree["packages"][package]["pn"] = pn
296 print >> depends_file, '"%s" -> "%s"' % (package, depend) 394 depend_tree["packages"][package]["filename"] = fn
297 # Prints a flattened form of the above where subpackages of a package are merged into the main pn 395 depend_tree["packages"][package]["version"] = version
298 #print >> depends_file, '"%s" [label="%s %s\\n%s\\n%s"]' % (pn, pn, taskname, version, fn) 396
299 #for rdep in taskdata.rdepids[fnid]: 397 return depend_tree
300 # print >> depends_file, '"%s" -> "%s" [style=dashed]' % (pn, taskdata.run_names_index[rdep]) 398
301 #for dep in taskdata.depids[fnid]: 399
302 # print >> depends_file, '"%s" -> "%s"' % (pn, taskdata.build_names_index[dep]) 400 def generateDepTreeEvent(self, pkgs_to_build, task):
401 """
402 Create a task dependency graph of pkgs_to_build.
403 Generate an event with the result
404 """
405 depgraph = self.generateDepTreeData(pkgs_to_build, task)
406 bb.event.fire(bb.event.DepTreeGenerated(depgraph), self.configuration.data)
407
408 def generateDotGraphFiles(self, pkgs_to_build, task):
409 """
410 Create a task dependency graph of pkgs_to_build.
411 Save the result to a set of .dot files.
412 """
413
414 depgraph = self.generateDepTreeData(pkgs_to_build, task)
415
416 # Prints a flattened form of package-depends below where subpackages of a package are merged into the main pn
417 depends_file = file('pn-depends.dot', 'w' )
418 print >> depends_file, "digraph depends {"
419 for pn in depgraph["pn"]:
420 fn = depgraph["pn"][pn]["filename"]
421 version = depgraph["pn"][pn]["version"]
422 print >> depends_file, '"%s" [label="%s %s\\n%s"]' % (pn, pn, version, fn)
423 for pn in depgraph["depends"]:
424 for depend in depgraph["depends"][pn]:
425 print >> depends_file, '"%s" -> "%s"' % (pn, depend)
426 for pn in depgraph["rdepends-pn"]:
427 for rdepend in depgraph["rdepends-pn"][pn]:
428 print >> depends_file, '"%s" -> "%s" [style=dashed]' % (pn, rdepend)
429 print >> depends_file, "}"
430 bb.msg.plain("PN dependencies saved to 'pn-depends.dot'")
431
432 depends_file = file('package-depends.dot', 'w' )
433 print >> depends_file, "digraph depends {"
434 for package in depgraph["packages"]:
435 pn = depgraph["packages"][package]["pn"]
436 fn = depgraph["packages"][package]["filename"]
437 version = depgraph["packages"][package]["version"]
438 if package == pn:
439 print >> depends_file, '"%s" [label="%s %s\\n%s"]' % (pn, pn, version, fn)
440 else:
441 print >> depends_file, '"%s" [label="%s(%s) %s\\n%s"]' % (package, package, pn, version, fn)
442 for depend in depgraph["depends"][pn]:
443 print >> depends_file, '"%s" -> "%s"' % (package, depend)
444 for package in depgraph["rdepends-pkg"]:
445 for rdepend in depgraph["rdepends-pkg"][package]:
446 print >> depends_file, '"%s" -> "%s" [style=dashed]' % (package, rdepend)
447 for package in depgraph["rrecs-pkg"]:
448 for rdepend in depgraph["rrecs-pkg"][package]:
449 print >> depends_file, '"%s" -> "%s" [style=dashed]' % (package, rdepend)
303 print >> depends_file, "}" 450 print >> depends_file, "}"
451 bb.msg.plain("Package dependencies saved to 'package-depends.dot'")
452
453 tdepends_file = file('task-depends.dot', 'w' )
454 print >> tdepends_file, "digraph depends {"
455 for task in depgraph["tdepends"]:
456 (pn, taskname) = task.rsplit(".", 1)
457 fn = depgraph["pn"][pn]["filename"]
458 version = depgraph["pn"][pn]["version"]
459 print >> tdepends_file, '"%s.%s" [label="%s %s\\n%s\\n%s"]' % (pn, taskname, pn, taskname, version, fn)
460 for dep in depgraph["tdepends"][task]:
461 print >> tdepends_file, '"%s" -> "%s"' % (task, dep)
304 print >> tdepends_file, "}" 462 print >> tdepends_file, "}"
305 bb.msg.note(1, bb.msg.domain.Collection, "Dependencies saved to 'depends.dot'") 463 bb.msg.plain("Task dependencies saved to 'task-depends.dot'")
306 bb.msg.note(1, bb.msg.domain.Collection, "Task dependencies saved to 'task-depends.dot'")
307 464
308 def buildDepgraph( self ): 465 def buildDepgraph( self ):
309 all_depends = self.status.all_depends 466 all_depends = self.status.all_depends
@@ -324,7 +481,7 @@ class BBCooker:
324 try: 481 try:
325 (providee, provider) = p.split(':') 482 (providee, provider) = p.split(':')
326 except: 483 except:
327 bb.msg.error(bb.msg.domain.Provider, "Malformed option in PREFERRED_PROVIDERS variable: %s" % p) 484 bb.msg.fatal(bb.msg.domain.Provider, "Malformed option in PREFERRED_PROVIDERS variable: %s" % p)
328 continue 485 continue
329 if providee in self.status.preferred and self.status.preferred[providee] != provider: 486 if providee in self.status.preferred and self.status.preferred[providee] != provider:
330 bb.msg.error(bb.msg.domain.Provider, "conflicting preferences for %s: both %s and %s specified" % (providee, provider, self.status.preferred[providee])) 487 bb.msg.error(bb.msg.domain.Provider, "conflicting preferences for %s: both %s and %s specified" % (providee, provider, self.status.preferred[providee]))
@@ -362,19 +519,6 @@ class BBCooker:
362 self.status.possible_world = None 519 self.status.possible_world = None
363 self.status.all_depends = None 520 self.status.all_depends = None
364 521
365 def myProgressCallback( self, x, y, f, from_cache ):
366 """Update any tty with the progress change"""
367 if os.isatty(sys.stdout.fileno()):
368 sys.stdout.write("\rNOTE: Handling BitBake files: %s (%04d/%04d) [%2d %%]" % ( parsespin.next(), x, y, x*100/y ) )
369 sys.stdout.flush()
370 else:
371 if x == 1:
372 sys.stdout.write("Parsing .bb files, please wait...")
373 sys.stdout.flush()
374 if x == y:
375 sys.stdout.write("done.")
376 sys.stdout.flush()
377
378 def interactiveMode( self ): 522 def interactiveMode( self ):
379 """Drop off into a shell""" 523 """Drop off into a shell"""
380 try: 524 try:
@@ -383,12 +527,10 @@ class BBCooker:
383 bb.msg.fatal(bb.msg.domain.Parsing, "Sorry, shell not available (%s)" % details ) 527 bb.msg.fatal(bb.msg.domain.Parsing, "Sorry, shell not available (%s)" % details )
384 else: 528 else:
385 shell.start( self ) 529 shell.start( self )
386 sys.exit( 0 )
387 530
388 def parseConfigurationFile( self, afiles ): 531 def parseConfigurationFile( self, afile ):
389 try: 532 try:
390 for afile in afiles: 533 self.configuration.data = bb.parse.handle( afile, self.configuration.data )
391 self.configuration.data = bb.parse.handle( afile, self.configuration.data )
392 534
393 # Handle any INHERITs and inherit the base class 535 # Handle any INHERITs and inherit the base class
394 inherits = ["base"] + (bb.data.getVar('INHERIT', self.configuration.data, True ) or "").split() 536 inherits = ["base"] + (bb.data.getVar('INHERIT', self.configuration.data, True ) or "").split()
@@ -402,10 +544,10 @@ class BBCooker:
402 544
403 bb.fetch.fetcher_init(self.configuration.data) 545 bb.fetch.fetcher_init(self.configuration.data)
404 546
405 bb.event.fire(bb.event.ConfigParsed(self.configuration.data)) 547 bb.event.fire(bb.event.ConfigParsed(), self.configuration.data)
406 548
407 except IOError, e: 549 except IOError, e:
408 bb.msg.fatal(bb.msg.domain.Parsing, "IO Error: %s" % str(e) ) 550 bb.msg.fatal(bb.msg.domain.Parsing, "Error when parsing %s: %s" % (afile, str(e)))
409 except bb.parse.ParseError, details: 551 except bb.parse.ParseError, details:
410 bb.msg.fatal(bb.msg.domain.Parsing, "Unable to parse %s (%s)" % (afile, details) ) 552 bb.msg.fatal(bb.msg.domain.Parsing, "Unable to parse %s (%s)" % (afile, details) )
411 553
@@ -439,17 +581,17 @@ class BBCooker:
439 """ 581 """
440 if not bb.data.getVar("BUILDNAME", self.configuration.data): 582 if not bb.data.getVar("BUILDNAME", self.configuration.data):
441 bb.data.setVar("BUILDNAME", os.popen('date +%Y%m%d%H%M').readline().strip(), self.configuration.data) 583 bb.data.setVar("BUILDNAME", os.popen('date +%Y%m%d%H%M').readline().strip(), self.configuration.data)
442 bb.data.setVar("BUILDSTART", time.strftime('%m/%d/%Y %H:%M:%S',time.gmtime()),self.configuration.data) 584 bb.data.setVar("BUILDSTART", time.strftime('%m/%d/%Y %H:%M:%S',time.gmtime()), self.configuration.data)
443 585
444 def matchFile(self, buildfile): 586 def matchFiles(self, buildfile):
445 """ 587 """
446 Convert the fragment buildfile into a real file 588 Find the .bb files which match the expression in 'buildfile'.
447 Error if there are too many matches
448 """ 589 """
590
449 bf = os.path.abspath(buildfile) 591 bf = os.path.abspath(buildfile)
450 try: 592 try:
451 os.stat(bf) 593 os.stat(bf)
452 return bf 594 return [bf]
453 except OSError: 595 except OSError:
454 (filelist, masked) = self.collect_bbfiles() 596 (filelist, masked) = self.collect_bbfiles()
455 regexp = re.compile(buildfile) 597 regexp = re.compile(buildfile)
@@ -458,27 +600,41 @@ class BBCooker:
458 if regexp.search(f) and os.path.isfile(f): 600 if regexp.search(f) and os.path.isfile(f):
459 bf = f 601 bf = f
460 matches.append(f) 602 matches.append(f)
461 if len(matches) != 1: 603 return matches
462 bb.msg.error(bb.msg.domain.Parsing, "Unable to match %s (%s matches found):" % (buildfile, len(matches)))
463 for f in matches:
464 bb.msg.error(bb.msg.domain.Parsing, " %s" % f)
465 return False
466 return matches[0]
467 604
468 def buildFile(self, buildfile): 605 def matchFile(self, buildfile):
606 """
607 Find the .bb file which matches the expression in 'buildfile'.
608 Raise an error if multiple files
609 """
610 matches = self.matchFiles(buildfile)
611 if len(matches) != 1:
612 bb.msg.error(bb.msg.domain.Parsing, "Unable to match %s (%s matches found):" % (buildfile, len(matches)))
613 for f in matches:
614 bb.msg.error(bb.msg.domain.Parsing, " %s" % f)
615 raise MultipleMatches
616 return matches[0]
617
618 def buildFile(self, buildfile, task):
469 """ 619 """
470 Build the file matching regexp buildfile 620 Build the file matching regexp buildfile
471 """ 621 """
472 622
473 # Make sure our target is a fully qualified filename 623 # Parse the configuration here. We need to do it explicitly here since
624 # buildFile() doesn't use the cache
625 self.parseConfiguration()
626
627 # If we are told to do the None task then query the default task
628 if (task == None):
629 task = self.configuration.cmd
630
474 fn = self.matchFile(buildfile) 631 fn = self.matchFile(buildfile)
475 if not fn: 632 self.buildSetVars()
476 return False
477 633
478 # Load data into the cache for fn and parse the loaded cache data 634 # Load data into the cache for fn and parse the loaded cache data
479 self.bb_cache = bb.cache.init(self) 635 self.bb_cache = bb.cache.init(self)
480 self.status = bb.cache.CacheData() 636 self.status = bb.cache.CacheData()
481 self.bb_cache.loadData(fn, self.configuration.data, self.status) 637 self.bb_cache.loadData(fn, self.configuration.data, self.status)
482 638
483 # Tweak some variables 639 # Tweak some variables
484 item = self.bb_cache.getVar('PN', fn, True) 640 item = self.bb_cache.getVar('PN', fn, True)
@@ -493,159 +649,157 @@ class BBCooker:
493 649
494 # Remove stamp for target if force mode active 650 # Remove stamp for target if force mode active
495 if self.configuration.force: 651 if self.configuration.force:
496 bb.msg.note(2, bb.msg.domain.RunQueue, "Remove stamp %s, %s" % (self.configuration.cmd, fn)) 652 bb.msg.note(2, bb.msg.domain.RunQueue, "Remove stamp %s, %s" % (task, fn))
497 bb.build.del_stamp('do_%s' % self.configuration.cmd, self.configuration.data) 653 bb.build.del_stamp('do_%s' % task, self.status, fn)
498 654
499 # Setup taskdata structure 655 # Setup taskdata structure
500 taskdata = bb.taskdata.TaskData(self.configuration.abort, self.configuration.tryaltconfigs) 656 taskdata = bb.taskdata.TaskData(self.configuration.abort)
501 taskdata.add_provider(self.configuration.data, self.status, item) 657 taskdata.add_provider(self.configuration.data, self.status, item)
502 658
503 buildname = bb.data.getVar("BUILDNAME", self.configuration.data) 659 buildname = bb.data.getVar("BUILDNAME", self.configuration.data)
504 bb.event.fire(bb.event.BuildStarted(buildname, [item], self.configuration.event_data)) 660 bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.configuration.event_data)
505 661
506 # Execute the runqueue 662 # Execute the runqueue
507 runlist = [[item, "do_%s" % self.configuration.cmd]] 663 runlist = [[item, "do_%s" % task]]
664
508 rq = bb.runqueue.RunQueue(self, self.configuration.data, self.status, taskdata, runlist) 665 rq = bb.runqueue.RunQueue(self, self.configuration.data, self.status, taskdata, runlist)
509 rq.prepare_runqueue() 666
510 try: 667 def buildFileIdle(server, rq, abort):
511 failures = rq.execute_runqueue() 668
512 except runqueue.TaskFailure, fnids: 669 if abort or self.cookerAction == cookerStop:
670 rq.finish_runqueue(True)
671 elif self.cookerAction == cookerShutdown:
672 rq.finish_runqueue(False)
513 failures = 0 673 failures = 0
514 for fnid in fnids: 674 try:
515 bb.msg.error(bb.msg.domain.Build, "'%s' failed" % taskdata.fn_index[fnid]) 675 retval = rq.execute_runqueue()
516 failures = failures + 1 676 except runqueue.TaskFailure, fnids:
517 bb.event.fire(bb.event.BuildCompleted(buildname, [item], self.configuration.event_data, failures)) 677 for fnid in fnids:
518 return False 678 bb.msg.error(bb.msg.domain.Build, "'%s' failed" % taskdata.fn_index[fnid])
519 bb.event.fire(bb.event.BuildCompleted(buildname, [item], self.configuration.event_data, failures)) 679 failures = failures + 1
520 return True 680 retval = False
681 if not retval:
682 self.command.finishAsyncCommand()
683 bb.event.fire(bb.event.BuildCompleted(buildname, item, failures), self.configuration.event_data)
684 return False
685 return 0.5
686
687 self.server.register_idle_function(buildFileIdle, rq)
521 688
522 def buildTargets(self, targets): 689 def buildTargets(self, targets, task):
523 """ 690 """
524 Attempt to build the targets specified 691 Attempt to build the targets specified
525 """ 692 """
526 693
527 buildname = bb.data.getVar("BUILDNAME", self.configuration.data) 694 # Need files parsed
528 bb.event.fire(bb.event.BuildStarted(buildname, targets, self.configuration.event_data)) 695 self.updateCache()
529 696
530 localdata = data.createCopy(self.configuration.data) 697 # If we are told to do the NULL task then query the default task
531 bb.data.update_data(localdata) 698 if (task == None):
532 bb.data.expandKeys(localdata) 699 task = self.configuration.cmd
533 700
534 taskdata = bb.taskdata.TaskData(self.configuration.abort, self.configuration.tryaltconfigs) 701 targets = self.checkPackages(targets)
535 702
536 runlist = [] 703 def buildTargetsIdle(server, rq, abort):
537 try:
538 for k in targets:
539 taskdata.add_provider(localdata, self.status, k)
540 runlist.append([k, "do_%s" % self.configuration.cmd])
541 taskdata.add_unresolved(localdata, self.status)
542 except bb.providers.NoProvider:
543 sys.exit(1)
544 704
545 rq = bb.runqueue.RunQueue(self, self.configuration.data, self.status, taskdata, runlist) 705 if abort or self.cookerAction == cookerStop:
546 rq.prepare_runqueue() 706 rq.finish_runqueue(True)
547 try: 707 elif self.cookerAction == cookerShutdown:
548 failures = rq.execute_runqueue() 708 rq.finish_runqueue(False)
549 except runqueue.TaskFailure, fnids:
550 failures = 0 709 failures = 0
551 for fnid in fnids: 710 try:
552 bb.msg.error(bb.msg.domain.Build, "'%s' failed" % taskdata.fn_index[fnid]) 711 retval = rq.execute_runqueue()
553 failures = failures + 1 712 except runqueue.TaskFailure, fnids:
554 bb.event.fire(bb.event.BuildCompleted(buildname, targets, self.configuration.event_data, failures)) 713 for fnid in fnids:
555 sys.exit(1) 714 bb.msg.error(bb.msg.domain.Build, "'%s' failed" % taskdata.fn_index[fnid])
556 bb.event.fire(bb.event.BuildCompleted(buildname, targets, self.configuration.event_data, failures)) 715 failures = failures + 1
716 retval = False
717 if not retval:
718 self.command.finishAsyncCommand()
719 bb.event.fire(bb.event.BuildCompleted(buildname, targets, failures), self.configuration.event_data)
720 return None
721 return 0.5
557 722
558 sys.exit(0) 723 self.buildSetVars()
559 724
560 def updateCache(self): 725 buildname = bb.data.getVar("BUILDNAME", self.configuration.data)
561 # Import Psyco if available and not disabled 726 bb.event.fire(bb.event.BuildStarted(buildname, targets), self.configuration.event_data)
562 import platform
563 if platform.machine() in ['i386', 'i486', 'i586', 'i686']:
564 if not self.configuration.disable_psyco:
565 try:
566 import psyco
567 except ImportError:
568 bb.msg.note(1, bb.msg.domain.Collection, "Psyco JIT Compiler (http://psyco.sf.net) not available. Install it to increase performance.")
569 else:
570 psyco.bind( self.parse_bbfiles )
571 else:
572 bb.msg.note(1, bb.msg.domain.Collection, "You have disabled Psyco. This decreases performance.")
573 727
574 self.status = bb.cache.CacheData() 728 localdata = data.createCopy(self.configuration.data)
729 bb.data.update_data(localdata)
730 bb.data.expandKeys(localdata)
575 731
576 ignore = bb.data.getVar("ASSUME_PROVIDED", self.configuration.data, 1) or "" 732 taskdata = bb.taskdata.TaskData(self.configuration.abort)
577 self.status.ignored_dependencies = set( ignore.split() )
578 733
579 self.handleCollections( bb.data.getVar("BBFILE_COLLECTIONS", self.configuration.data, 1) ) 734 runlist = []
735 for k in targets:
736 taskdata.add_provider(localdata, self.status, k)
737 runlist.append([k, "do_%s" % task])
738 taskdata.add_unresolved(localdata, self.status)
580 739
581 bb.msg.debug(1, bb.msg.domain.Collection, "collecting .bb files") 740 rq = bb.runqueue.RunQueue(self, self.configuration.data, self.status, taskdata, runlist)
582 (filelist, masked) = self.collect_bbfiles()
583 bb.data.renameVar("__depends", "__base_depends", self.configuration.data)
584 self.parse_bbfiles(filelist, masked, self.myProgressCallback)
585 bb.msg.debug(1, bb.msg.domain.Collection, "parsing complete")
586 741
587 self.buildDepgraph() 742 self.server.register_idle_function(buildTargetsIdle, rq)
588 743
589 def cook(self): 744 def updateCache(self):
590 """
591 We are building stuff here. We do the building
592 from here. By default we try to execute task
593 build.
594 """
595 745
596 # Wipe the OS environment 746 if self.cookerState == cookerParsed:
597 bb.utils.empty_environment() 747 return
598 748
599 if self.configuration.show_environment: 749 if self.cookerState != cookerParsing:
600 self.showEnvironment(self.configuration.buildfile, self.configuration.pkgs_to_build)
601 sys.exit( 0 )
602 750
603 self.buildSetVars() 751 self.parseConfiguration ()
604 752
605 if self.configuration.interactive: 753 # Import Psyco if available and not disabled
606 self.interactiveMode() 754 import platform
755 if platform.machine() in ['i386', 'i486', 'i586', 'i686']:
756 if not self.configuration.disable_psyco:
757 try:
758 import psyco
759 except ImportError:
760 bb.msg.note(1, bb.msg.domain.Collection, "Psyco JIT Compiler (http://psyco.sf.net) not available. Install it to increase performance.")
761 else:
762 psyco.bind( CookerParser.parse_next )
763 else:
764 bb.msg.note(1, bb.msg.domain.Collection, "You have disabled Psyco. This decreases performance.")
607 765
608 if self.configuration.buildfile is not None: 766 self.status = bb.cache.CacheData()
609 if not self.buildFile(self.configuration.buildfile):
610 sys.exit(1)
611 sys.exit(0)
612 767
613 # initialise the parsing status now we know we will need deps 768 ignore = bb.data.getVar("ASSUME_PROVIDED", self.configuration.data, 1) or ""
614 self.updateCache() 769 self.status.ignored_dependencies = set(ignore.split())
770
771 for dep in self.configuration.extra_assume_provided:
772 self.status.ignored_dependencies.add(dep)
773
774 self.handleCollections( bb.data.getVar("BBFILE_COLLECTIONS", self.configuration.data, 1) )
615 775
616 if self.configuration.revisions_changed: 776 bb.msg.debug(1, bb.msg.domain.Collection, "collecting .bb files")
617 sys.exit(bb.fetch.fetcher_compare_revisons(self.configuration.data)) 777 (filelist, masked) = self.collect_bbfiles()
778 bb.data.renameVar("__depends", "__base_depends", self.configuration.data)
618 779
619 if self.configuration.parse_only: 780 self.parser = CookerParser(self, filelist, masked)
620 bb.msg.note(1, bb.msg.domain.Collection, "Requested parsing .bb files only. Exiting.") 781 self.cookerState = cookerParsing
621 return 0
622 782
623 pkgs_to_build = self.configuration.pkgs_to_build 783 if not self.parser.parse_next():
784 bb.msg.debug(1, bb.msg.domain.Collection, "parsing complete")
785 self.buildDepgraph()
786 self.cookerState = cookerParsed
787 return None
624 788
625 if len(pkgs_to_build) == 0 and not self.configuration.show_versions: 789 return True
626 print "Nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help'"
627 print "for usage information."
628 sys.exit(0)
629 790
630 try: 791 def checkPackages(self, pkgs_to_build):
631 if self.configuration.show_versions:
632 self.showVersions()
633 sys.exit( 0 )
634 if 'world' in pkgs_to_build:
635 self.buildWorldTargetList()
636 pkgs_to_build.remove('world')
637 for t in self.status.world_target:
638 pkgs_to_build.append(t)
639 792
640 if self.configuration.dot_graph: 793 if len(pkgs_to_build) == 0:
641 self.generateDotGraph( pkgs_to_build, self.configuration.ignored_dot_deps ) 794 raise NothingToBuild
642 sys.exit( 0 )
643 795
644 return self.buildTargets(pkgs_to_build) 796 if 'world' in pkgs_to_build:
797 self.buildWorldTargetList()
798 pkgs_to_build.remove('world')
799 for t in self.status.world_target:
800 pkgs_to_build.append(t)
645 801
646 except KeyboardInterrupt: 802 return pkgs_to_build
647 bb.msg.note(1, bb.msg.domain.Collection, "KeyboardInterrupt - Build not completed.")
648 sys.exit(1)
649 803
650 def get_bbfiles( self, path = os.getcwd() ): 804 def get_bbfiles( self, path = os.getcwd() ):
651 """Get list of default .bb files by reading out the current directory""" 805 """Get list of default .bb files by reading out the current directory"""
@@ -717,59 +871,108 @@ class BBCooker:
717 871
718 return (finalfiles, masked) 872 return (finalfiles, masked)
719 873
720 def parse_bbfiles(self, filelist, masked, progressCallback = None): 874 def serve(self):
721 parsed, cached, skipped, error = 0, 0, 0, 0
722 for i in xrange( len( filelist ) ):
723 f = filelist[i]
724 875
725 #bb.msg.debug(1, bb.msg.domain.Collection, "parsing %s" % f) 876 # Empty the environment. The environment will be populated as
877 # necessary from the data store.
878 bb.utils.empty_environment()
726 879
727 # read a file's metadata 880 if self.configuration.profile:
728 try: 881 try:
729 fromCache, skip = self.bb_cache.loadData(f, self.configuration.data, self.status) 882 import cProfile as profile
730 if skip: 883 except:
731 skipped += 1 884 import profile
732 bb.msg.debug(2, bb.msg.domain.Collection, "skipping %s" % f) 885
733 self.bb_cache.skip(f) 886 profile.runctx("self.server.serve_forever()", globals(), locals(), "profile.log")
734 continue 887
735 elif fromCache: cached += 1 888 # Redirect stdout to capture profile information
736 else: parsed += 1 889 pout = open('profile.log.processed', 'w')
737 890 so = sys.stdout.fileno()
738 # Disabled by RP as was no longer functional 891 os.dup2(pout.fileno(), so)
739 # allow metadata files to add items to BBFILES 892
740 #data.update_data(self.pkgdata[f]) 893 import pstats
741 #addbbfiles = self.bb_cache.getVar('BBFILES', f, False) or None 894 p = pstats.Stats('profile.log')
742 #if addbbfiles: 895 p.sort_stats('time')
743 # for aof in addbbfiles.split(): 896 p.print_stats()
744 # if not files.count(aof): 897 p.print_callers()
745 # if not os.path.isabs(aof): 898 p.sort_stats('cumulative')
746 # aof = os.path.join(os.path.dirname(f),aof) 899 p.print_stats()
747 # files.append(aof) 900
748 901 os.dup2(so, pout.fileno())
749 # now inform the caller 902 pout.flush()
750 if progressCallback is not None: 903 pout.close()
751 progressCallback( i + 1, len( filelist ), f, fromCache ) 904 else:
905 self.server.serve_forever()
906
907 bb.event.fire(CookerExit(), self.configuration.event_data)
908
909class CookerExit(bb.event.Event):
910 """
911 Notify clients of the Cooker shutdown
912 """
913
914 def __init__(self):
915 bb.event.Event.__init__(self)
916
917class CookerParser:
918 def __init__(self, cooker, filelist, masked):
919 # Internal data
920 self.filelist = filelist
921 self.cooker = cooker
922
923 # Accounting statistics
924 self.parsed = 0
925 self.cached = 0
926 self.error = 0
927 self.masked = masked
928 self.total = len(filelist)
929
930 self.skipped = 0
931 self.virtuals = 0
932
933 # Pointer to the next file to parse
934 self.pointer = 0
935
936 def parse_next(self):
937 if self.pointer < len(self.filelist):
938 f = self.filelist[self.pointer]
939 cooker = self.cooker
940
941 try:
942 fromCache, skipped, virtuals = cooker.bb_cache.loadData(f, cooker.configuration.data, cooker.status)
943 if fromCache:
944 self.cached += 1
945 else:
946 self.parsed += 1
947
948 self.skipped += skipped
949 self.virtuals += virtuals
752 950
753 except IOError, e: 951 except IOError, e:
754 self.bb_cache.remove(f) 952 self.error += 1
953 cooker.bb_cache.remove(f)
755 bb.msg.error(bb.msg.domain.Collection, "opening %s: %s" % (f, e)) 954 bb.msg.error(bb.msg.domain.Collection, "opening %s: %s" % (f, e))
756 pass 955 pass
757 except KeyboardInterrupt: 956 except KeyboardInterrupt:
758 self.bb_cache.sync() 957 cooker.bb_cache.remove(f)
958 cooker.bb_cache.sync()
759 raise 959 raise
760 except Exception, e: 960 except Exception, e:
761 error += 1 961 self.error += 1
762 self.bb_cache.remove(f) 962 cooker.bb_cache.remove(f)
763 bb.msg.error(bb.msg.domain.Collection, "%s while parsing %s" % (e, f)) 963 bb.msg.error(bb.msg.domain.Collection, "%s while parsing %s" % (e, f))
764 except: 964 except:
765 self.bb_cache.remove(f) 965 cooker.bb_cache.remove(f)
766 raise 966 raise
967 finally:
968 bb.event.fire(bb.event.ParseProgress(self.cached, self.parsed, self.skipped, self.masked, self.virtuals, self.error, self.total), cooker.configuration.event_data)
767 969
768 if progressCallback is not None: 970 self.pointer += 1
769 print "\r" # need newline after Handling Bitbake files message
770 bb.msg.note(1, bb.msg.domain.Collection, "Parsing finished. %d cached, %d parsed, %d skipped, %d masked." % ( cached, parsed, skipped, masked ))
771 971
772 self.bb_cache.sync() 972 if self.pointer >= self.total:
973 cooker.bb_cache.sync()
974 if self.error > 0:
975 raise ParsingErrorsFound
976 return False
977 return True
773 978
774 if error > 0:
775 bb.msg.fatal(bb.msg.domain.Collection, "Parsing errors found, exiting...")
diff --git a/bitbake-dev/lib/bb/daemonize.py b/bitbake/lib/bb/daemonize.py
index 1a8bb379f4..1a8bb379f4 100644
--- a/bitbake-dev/lib/bb/daemonize.py
+++ b/bitbake/lib/bb/daemonize.py
diff --git a/bitbake/lib/bb/data.py b/bitbake/lib/bb/data.py
index f424ac7a22..d3058b9a1d 100644
--- a/bitbake/lib/bb/data.py
+++ b/bitbake/lib/bb/data.py
@@ -37,7 +37,7 @@ the speed is more critical here.
37# 37#
38#Based on functions from the base bb module, Copyright 2003 Holger Schurig 38#Based on functions from the base bb module, Copyright 2003 Holger Schurig
39 39
40import sys, os, re, time, types 40import sys, os, re, types
41if sys.argv[0][-5:] == "pydoc": 41if sys.argv[0][-5:] == "pydoc":
42 path = os.path.dirname(os.path.dirname(sys.argv[1])) 42 path = os.path.dirname(os.path.dirname(sys.argv[1]))
43else: 43else:
diff --git a/bitbake/lib/bb/event.py b/bitbake/lib/bb/event.py
index 9d7341f878..7251d78715 100644
--- a/bitbake/lib/bb/event.py
+++ b/bitbake/lib/bb/event.py
@@ -24,21 +24,18 @@ BitBake build tools.
24 24
25import os, re 25import os, re
26import bb.utils 26import bb.utils
27import pickle
28
29# This is the pid for which we should generate the event. This is set when
30# the runqueue forks off.
31worker_pid = 0
32worker_pipe = None
27 33
28class Event: 34class Event:
29 """Base class for events""" 35 """Base class for events"""
30 type = "Event"
31
32 def __init__(self, d):
33 self._data = d
34
35 def getData(self):
36 return self._data
37
38 def setData(self, data):
39 self._data = data
40 36
41 data = property(getData, setData, None, "data property") 37 def __init__(self):
38 self.pid = worker_pid
42 39
43NotHandled = 0 40NotHandled = 0
44Handled = 1 41Handled = 1
@@ -47,75 +44,83 @@ Registered = 10
47AlreadyRegistered = 14 44AlreadyRegistered = 14
48 45
49# Internal 46# Internal
50_handlers = [] 47_handlers = {}
51_handlers_dict = {} 48_ui_handlers = {}
49_ui_handler_seq = 0
52 50
53def tmpHandler(event): 51def fire(event, d):
54 """Default handler for code events""" 52 """Fire off an Event"""
55 return NotHandled
56 53
57def defaultTmpHandler(): 54 if worker_pid != 0:
58 tmp = "def tmpHandler(e):\n\t\"\"\"heh\"\"\"\n\treturn NotHandled" 55 worker_fire(event, d)
59 comp = bb.utils.better_compile(tmp, "tmpHandler(e)", "bb.event.defaultTmpHandler") 56 return
60 return comp
61 57
62def fire(event): 58 for handler in _handlers:
63 """Fire off an Event""" 59 h = _handlers[handler]
64 for h in _handlers: 60 event.data = d
65 if type(h).__name__ == "code": 61 if type(h).__name__ == "code":
66 exec(h) 62 exec(h)
67 if tmpHandler(event) == Handled: 63 tmpHandler(event)
68 return Handled
69 else: 64 else:
70 if h(event) == Handled: 65 h(event)
71 return Handled 66 del event.data
72 return NotHandled 67
68 errors = []
69 for h in _ui_handlers:
70 #print "Sending event %s" % event
71 try:
72 # We use pickle here since it better handles object instances
73 # which xmlrpc's marshaller does not. Events *must* be serializable
74 # by pickle.
75 _ui_handlers[h].event.send((pickle.dumps(event)))
76 except:
77 errors.append(h)
78 for h in errors:
79 del _ui_handlers[h]
80
81def worker_fire(event, d):
82 data = "<event>" + pickle.dumps(event) + "</event>"
83 if os.write(worker_pipe, data) != len (data):
84 print "Error sending event to server (short write)"
85
86def fire_from_worker(event, d):
87 if not event.startswith("<event>") or not event.endswith("</event>"):
88 print "Error, not an event"
89 return
90 event = pickle.loads(event[7:-8])
91 bb.event.fire(event, d)
73 92
74def register(name, handler): 93def register(name, handler):
75 """Register an Event handler""" 94 """Register an Event handler"""
76 95
77 # already registered 96 # already registered
78 if name in _handlers_dict: 97 if name in _handlers:
79 return AlreadyRegistered 98 return AlreadyRegistered
80 99
81 if handler is not None: 100 if handler is not None:
82# handle string containing python code 101 # handle string containing python code
83 if type(handler).__name__ == "str": 102 if type(handler).__name__ == "str":
84 _registerCode(handler) 103 tmp = "def tmpHandler(e):\n%s" % handler
104 comp = bb.utils.better_compile(tmp, "tmpHandler(e)", "bb.event._registerCode")
105 _handlers[name] = comp
85 else: 106 else:
86 _handlers.append(handler) 107 _handlers[name] = handler
87 108
88 _handlers_dict[name] = 1
89 return Registered 109 return Registered
90 110
91def _registerCode(handlerStr):
92 """Register a 'code' Event.
93 Deprecated interface; call register instead.
94
95 Expects to be passed python code as a string, which will
96 be passed in turn to compile() and then exec(). Note that
97 the code will be within a function, so should have had
98 appropriate tabbing put in place."""
99 tmp = "def tmpHandler(e):\n%s" % handlerStr
100 comp = bb.utils.better_compile(tmp, "tmpHandler(e)", "bb.event._registerCode")
101# prevent duplicate registration
102 _handlers.append(comp)
103
104def remove(name, handler): 111def remove(name, handler):
105 """Remove an Event handler""" 112 """Remove an Event handler"""
113 _handlers.pop(name)
106 114
107 _handlers_dict.pop(name) 115def register_UIHhandler(handler):
108 if type(handler).__name__ == "str": 116 bb.event._ui_handler_seq = bb.event._ui_handler_seq + 1
109 return _removeCode(handler) 117 _ui_handlers[_ui_handler_seq] = handler
110 else: 118 return _ui_handler_seq
111 _handlers.remove(handler)
112 119
113def _removeCode(handlerStr): 120def unregister_UIHhandler(handlerNum):
114 """Remove a 'code' Event handler 121 if handlerNum in _ui_handlers:
115 Deprecated interface; call remove instead.""" 122 del _ui_handlers[handlerNum]
116 tmp = "def tmpHandler(e):\n%s" % handlerStr 123 return
117 comp = bb.utils.better_compile(tmp, "tmpHandler(e)", "bb.event._removeCode")
118 _handlers.remove(comp)
119 124
120def getName(e): 125def getName(e):
121 """Returns the name of a class or class instance""" 126 """Returns the name of a class or class instance"""
@@ -130,17 +135,17 @@ class ConfigParsed(Event):
130class RecipeParsed(Event): 135class RecipeParsed(Event):
131 """ Recipe Parsing Complete """ 136 """ Recipe Parsing Complete """
132 137
133 def __init__(self, fn, d): 138 def __init__(self, fn):
134 self.fn = fn 139 self.fn = fn
135 Event.__init__(self, d) 140 Event.__init__(self)
136 141
137class StampUpdate(Event): 142class StampUpdate(Event):
138 """Trigger for any adjustment of the stamp files to happen""" 143 """Trigger for any adjustment of the stamp files to happen"""
139 144
140 def __init__(self, targets, stampfns, d): 145 def __init__(self, targets, stampfns):
141 self._targets = targets 146 self._targets = targets
142 self._stampfns = stampfns 147 self._stampfns = stampfns
143 Event.__init__(self, d) 148 Event.__init__(self)
144 149
145 def getStampPrefix(self): 150 def getStampPrefix(self):
146 return self._stampfns 151 return self._stampfns
@@ -151,29 +156,13 @@ class StampUpdate(Event):
151 stampPrefix = property(getStampPrefix) 156 stampPrefix = property(getStampPrefix)
152 targets = property(getTargets) 157 targets = property(getTargets)
153 158
154class PkgBase(Event):
155 """Base class for package events"""
156
157 def __init__(self, t, d):
158 self._pkg = t
159 Event.__init__(self, d)
160
161 def getPkg(self):
162 return self._pkg
163
164 def setPkg(self, pkg):
165 self._pkg = pkg
166
167 pkg = property(getPkg, setPkg, None, "pkg property")
168
169
170class BuildBase(Event): 159class BuildBase(Event):
171 """Base class for bbmake run events""" 160 """Base class for bbmake run events"""
172 161
173 def __init__(self, n, p, c, failures = 0): 162 def __init__(self, n, p, failures = 0):
174 self._name = n 163 self._name = n
175 self._pkgs = p 164 self._pkgs = p
176 Event.__init__(self, c) 165 Event.__init__(self)
177 self._failures = failures 166 self._failures = failures
178 167
179 def getPkgs(self): 168 def getPkgs(self):
@@ -205,33 +194,8 @@ class BuildBase(Event):
205 cfg = property(getCfg, setCfg, None, "cfg property") 194 cfg = property(getCfg, setCfg, None, "cfg property")
206 195
207 196
208class DepBase(PkgBase):
209 """Base class for dependency events"""
210
211 def __init__(self, t, data, d):
212 self._dep = d
213 PkgBase.__init__(self, t, data)
214
215 def getDep(self):
216 return self._dep
217
218 def setDep(self, dep):
219 self._dep = dep
220
221 dep = property(getDep, setDep, None, "dep property")
222
223
224class PkgStarted(PkgBase):
225 """Package build started"""
226 197
227 198
228class PkgFailed(PkgBase):
229 """Package build failed"""
230
231
232class PkgSucceeded(PkgBase):
233 """Package build completed"""
234
235 199
236class BuildStarted(BuildBase): 200class BuildStarted(BuildBase):
237 """bbmake build run started""" 201 """bbmake build run started"""
@@ -241,18 +205,13 @@ class BuildCompleted(BuildBase):
241 """bbmake build run completed""" 205 """bbmake build run completed"""
242 206
243 207
244class UnsatisfiedDep(DepBase):
245 """Unsatisfied Dependency"""
246 208
247 209
248class RecursiveDep(DepBase):
249 """Recursive Dependency"""
250
251class NoProvider(Event): 210class NoProvider(Event):
252 """No Provider for an Event""" 211 """No Provider for an Event"""
253 212
254 def __init__(self, item, data,runtime=False): 213 def __init__(self, item, runtime=False):
255 Event.__init__(self, data) 214 Event.__init__(self)
256 self._item = item 215 self._item = item
257 self._runtime = runtime 216 self._runtime = runtime
258 217
@@ -265,8 +224,8 @@ class NoProvider(Event):
265class MultipleProviders(Event): 224class MultipleProviders(Event):
266 """Multiple Providers""" 225 """Multiple Providers"""
267 226
268 def __init__(self, item, candidates, data, runtime = False): 227 def __init__(self, item, candidates, runtime = False):
269 Event.__init__(self, data) 228 Event.__init__(self)
270 self._item = item 229 self._item = item
271 self._candidates = candidates 230 self._candidates = candidates
272 self._is_runtime = runtime 231 self._is_runtime = runtime
@@ -288,3 +247,29 @@ class MultipleProviders(Event):
288 Get the possible Candidates for a PROVIDER. 247 Get the possible Candidates for a PROVIDER.
289 """ 248 """
290 return self._candidates 249 return self._candidates
250
251class ParseProgress(Event):
252 """
253 Parsing Progress Event
254 """
255
256 def __init__(self, cached, parsed, skipped, masked, virtuals, errors, total):
257 Event.__init__(self)
258 self.cached = cached
259 self.parsed = parsed
260 self.skipped = skipped
261 self.virtuals = virtuals
262 self.masked = masked
263 self.errors = errors
264 self.sofar = cached + parsed
265 self.total = total
266
267class DepTreeGenerated(Event):
268 """
269 Event when a dependency tree has been generated
270 """
271
272 def __init__(self, depgraph):
273 Event.__init__(self)
274 self._depgraph = depgraph
275
diff --git a/bitbake/lib/bb/fetch/__init__.py b/bitbake/lib/bb/fetch/__init__.py
index 7326ed0f46..ab4658bc3b 100644
--- a/bitbake/lib/bb/fetch/__init__.py
+++ b/bitbake/lib/bb/fetch/__init__.py
@@ -99,6 +99,11 @@ def fetcher_init(d):
99 pd.delDomain("BB_URI_HEADREVS") 99 pd.delDomain("BB_URI_HEADREVS")
100 else: 100 else:
101 bb.msg.fatal(bb.msg.domain.Fetcher, "Invalid SRCREV cache policy of: %s" % srcrev_policy) 101 bb.msg.fatal(bb.msg.domain.Fetcher, "Invalid SRCREV cache policy of: %s" % srcrev_policy)
102
103 for m in methods:
104 if hasattr(m, "init"):
105 m.init(d)
106
102 # Make sure our domains exist 107 # Make sure our domains exist
103 pd.addDomain("BB_URI_HEADREVS") 108 pd.addDomain("BB_URI_HEADREVS")
104 pd.addDomain("BB_URI_LOCALCOUNT") 109 pd.addDomain("BB_URI_LOCALCOUNT")
@@ -467,6 +472,23 @@ class Fetch(object):
467 472
468 srcrev_internal_helper = staticmethod(srcrev_internal_helper) 473 srcrev_internal_helper = staticmethod(srcrev_internal_helper)
469 474
475 def localcount_internal_helper(ud, d):
476 """
477 Return:
478 a) a locked localcount if specified
479 b) None otherwise
480 """
481
482 localcount= None
483 if 'name' in ud.parm:
484 pn = data.getVar("PN", d, 1)
485 localcount = data.getVar("LOCALCOUNT_" + ud.parm['name'], d, 1)
486 if not localcount:
487 localcount = data.getVar("LOCALCOUNT", d, 1)
488 return localcount
489
490 localcount_internal_helper = staticmethod(localcount_internal_helper)
491
470 def try_mirror(d, tarfn): 492 def try_mirror(d, tarfn):
471 """ 493 """
472 Try to use a mirrored version of the sources. We do this 494 Try to use a mirrored version of the sources. We do this
@@ -555,12 +577,7 @@ class Fetch(object):
555 """ 577 """
556 578
557 """ 579 """
558 has_sortable_valid = hasattr(self, "_sortable_revision_valid") 580 if hasattr(self, "_sortable_revision"):
559 has_sortable = hasattr(self, "_sortable_revision")
560
561 if has_sortable and not has_sortable_valid:
562 return self._sortable_revision(url, ud, d)
563 elif has_sortable and self._sortable_revision_valid(url, ud, d):
564 return self._sortable_revision(url, ud, d) 581 return self._sortable_revision(url, ud, d)
565 582
566 pd = persist_data.PersistData(d) 583 pd = persist_data.PersistData(d)
@@ -568,13 +585,24 @@ class Fetch(object):
568 585
569 latest_rev = self._build_revision(url, ud, d) 586 latest_rev = self._build_revision(url, ud, d)
570 last_rev = pd.getValue("BB_URI_LOCALCOUNT", key + "_rev") 587 last_rev = pd.getValue("BB_URI_LOCALCOUNT", key + "_rev")
571 count = pd.getValue("BB_URI_LOCALCOUNT", key + "_count") 588 uselocalcount = bb.data.getVar("BB_LOCALCOUNT_OVERRIDE", d, True) or False
589 count = None
590 if uselocalcount:
591 count = Fetch.localcount_internal_helper(ud, d)
592 if count is None:
593 count = pd.getValue("BB_URI_LOCALCOUNT", key + "_count")
572 594
573 if last_rev == latest_rev: 595 if last_rev == latest_rev:
574 return str(count + "+" + latest_rev) 596 return str(count + "+" + latest_rev)
575 597
598 buildindex_provided = hasattr(self, "_sortable_buildindex")
599 if buildindex_provided:
600 count = self._sortable_buildindex(url, ud, d, latest_rev)
601
576 if count is None: 602 if count is None:
577 count = "0" 603 count = "0"
604 elif uselocalcount or buildindex_provided:
605 count = str(count)
578 else: 606 else:
579 count = str(int(count) + 1) 607 count = str(int(count) + 1)
580 608
diff --git a/bitbake/lib/bb/fetch/cvs.py b/bitbake/lib/bb/fetch/cvs.py
index d8bd4eaf75..90a006500e 100644
--- a/bitbake/lib/bb/fetch/cvs.py
+++ b/bitbake/lib/bb/fetch/cvs.py
@@ -41,7 +41,7 @@ class Cvs(Fetch):
41 """ 41 """
42 Check to see if a given url can be fetched with cvs. 42 Check to see if a given url can be fetched with cvs.
43 """ 43 """
44 return ud.type in ['cvs', 'pserver'] 44 return ud.type in ['cvs']
45 45
46 def localpath(self, url, ud, d): 46 def localpath(self, url, ud, d):
47 if not "module" in ud.parm: 47 if not "module" in ud.parm:
diff --git a/bitbake/lib/bb/fetch/git.py b/bitbake/lib/bb/fetch/git.py
index 3016f0f00d..0e68325db9 100644
--- a/bitbake/lib/bb/fetch/git.py
+++ b/bitbake/lib/bb/fetch/git.py
@@ -28,6 +28,12 @@ from bb.fetch import runfetchcmd
28 28
29class Git(Fetch): 29class Git(Fetch):
30 """Class to fetch a module or modules from git repositories""" 30 """Class to fetch a module or modules from git repositories"""
31 def init(self, d):
32 #
33 # Only enable _sortable revision if the key is set
34 #
35 if bb.data.getVar("BB_GIT_CLONE_FOR_SRCREV", d, True):
36 self._sortable_buildindex = self._sortable_buildindex_disabled
31 def supports(self, url, ud, d): 37 def supports(self, url, ud, d):
32 """ 38 """
33 Check to see if a given url can be fetched with git. 39 Check to see if a given url can be fetched with git.
@@ -58,10 +64,18 @@ class Git(Fetch):
58 if not ud.tag or ud.tag == "master": 64 if not ud.tag or ud.tag == "master":
59 ud.tag = self.latest_revision(url, ud, d) 65 ud.tag = self.latest_revision(url, ud, d)
60 66
67 subdir = ud.parm.get("subpath", "")
68 if subdir != "":
69 if subdir.endswith("/"):
70 subdir = subdir[:-1]
71 subdirpath = os.path.join(ud.path, subdir);
72 else:
73 subdirpath = ud.path;
74
61 if 'fullclone' in ud.parm: 75 if 'fullclone' in ud.parm:
62 ud.localfile = ud.mirrortarball 76 ud.localfile = ud.mirrortarball
63 else: 77 else:
64 ud.localfile = data.expand('git_%s%s_%s.tar.gz' % (ud.host, ud.path.replace('/', '.'), ud.tag), d) 78 ud.localfile = data.expand('git_%s%s_%s.tar.gz' % (ud.host, subdirpath.replace('/', '.'), ud.tag), d)
65 79
66 return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile) 80 return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
67 81
@@ -111,10 +125,27 @@ class Git(Fetch):
111 if os.path.exists(codir): 125 if os.path.exists(codir):
112 bb.utils.prunedir(codir) 126 bb.utils.prunedir(codir)
113 127
128 subdir = ud.parm.get("subpath", "")
129 if subdir != "":
130 if subdir.endswith("/"):
131 subdirbase = os.path.basename(subdir[:-1])
132 else:
133 subdirbase = os.path.basename(subdir)
134 else:
135 subdirbase = ""
136
137 if subdir != "":
138 readpathspec = ":%s" % (subdir)
139 codir = os.path.join(codir, "git")
140 coprefix = os.path.join(codir, subdirbase, "")
141 else:
142 readpathspec = ""
143 coprefix = os.path.join(codir, "git", "")
144
114 bb.mkdirhier(codir) 145 bb.mkdirhier(codir)
115 os.chdir(ud.clonedir) 146 os.chdir(ud.clonedir)
116 runfetchcmd("git read-tree %s" % (ud.tag), d) 147 runfetchcmd("git read-tree %s%s" % (ud.tag, readpathspec), d)
117 runfetchcmd("git checkout-index -q -f --prefix=%s -a" % (os.path.join(codir, "git", "")), d) 148 runfetchcmd("git checkout-index -q -f --prefix=%s -a" % (coprefix), d)
118 149
119 os.chdir(codir) 150 os.chdir(codir)
120 bb.msg.note(1, bb.msg.domain.Fetcher, "Creating tarball of git checkout") 151 bb.msg.note(1, bb.msg.domain.Fetcher, "Creating tarball of git checkout")
@@ -154,42 +185,32 @@ class Git(Fetch):
154 def _build_revision(self, url, ud, d): 185 def _build_revision(self, url, ud, d):
155 return ud.tag 186 return ud.tag
156 187
157 def _sortable_revision_valid(self, url, ud, d): 188 def _sortable_buildindex_disabled(self, url, ud, d, rev):
158 return bb.data.getVar("BB_GIT_CLONE_FOR_SRCREV", d, True) or False
159
160 def _sortable_revision(self, url, ud, d):
161 """ 189 """
162 This is only called when _sortable_revision_valid called true 190 Return a suitable buildindex for the revision specified. This is done by counting revisions
163 191 using "git rev-list" which may or may not work in different circumstances.
164 We will have to get the updated revision.
165 """ 192 """
166 193
167 key = "GIT_CACHED_REVISION-%s-%s" % (gitsrcname, ud.tag)
168 if bb.data.getVar(key, d):
169 return bb.data.getVar(key, d)
170
171
172 # Runtime warning on wrongly configured sources
173 if ud.tag == "1":
174 bb.msg.error(1, bb.msg.domain.Fetcher, "SRCREV is '1'. This indicates a configuration error of %s" % url)
175 return "0+1"
176
177 cwd = os.getcwd() 194 cwd = os.getcwd()
178 195
179 # Check if we have the rev already 196 # Check if we have the rev already
197
180 if not os.path.exists(ud.clonedir): 198 if not os.path.exists(ud.clonedir):
181 print "no repo" 199 print "no repo"
182 self.go(None, ud, d) 200 self.go(None, ud, d)
201 if not os.path.exists(ud.clonedir):
202 bb.msg.error(bb.msg.domain.Fetcher, "GIT repository for %s doesn't exist in %s, cannot get sortable buildnumber, using old value" % (url, ud.clonedir))
203 return None
204
183 205
184 os.chdir(ud.clonedir) 206 os.chdir(ud.clonedir)
185 if not self._contains_ref(ud.tag, d): 207 if not self._contains_ref(rev, d):
186 self.go(None, ud, d) 208 self.go(None, ud, d)
187 209
188 output = runfetchcmd("git rev-list %s -- 2> /dev/null | wc -l" % ud.tag, d, quiet=True) 210 output = runfetchcmd("git rev-list %s -- 2> /dev/null | wc -l" % rev, d, quiet=True)
189 os.chdir(cwd) 211 os.chdir(cwd)
190 212
191 sortable_revision = "%s+%s" % (output.split()[0], ud.tag) 213 buildindex = "%s" % output.split()[0]
192 bb.data.setVar(key, sortable_revision, d) 214 bb.msg.debug(1, bb.msg.domain.Fetcher, "GIT repository for %s in %s is returning %s revisions in rev-list before %s" % (url, repodir, buildindex, rev))
193 return sortable_revision 215 return buildindex
194
195 216
diff --git a/bitbake/lib/bb/fetch/local.py b/bitbake/lib/bb/fetch/local.py
index 577774e597..f9bdf589cb 100644
--- a/bitbake/lib/bb/fetch/local.py
+++ b/bitbake/lib/bb/fetch/local.py
@@ -33,9 +33,9 @@ from bb.fetch import Fetch
33class Local(Fetch): 33class Local(Fetch):
34 def supports(self, url, urldata, d): 34 def supports(self, url, urldata, d):
35 """ 35 """
36 Check to see if a given url can be fetched with cvs. 36 Check to see if a given url represents a local fetch.
37 """ 37 """
38 return urldata.type in ['file','patch'] 38 return urldata.type in ['file']
39 39
40 def localpath(self, url, urldata, d): 40 def localpath(self, url, urldata, d):
41 """ 41 """
diff --git a/bitbake/lib/bb/fetch/svk.py b/bitbake/lib/bb/fetch/svk.py
index 442f85804f..120dad9d4e 100644
--- a/bitbake/lib/bb/fetch/svk.py
+++ b/bitbake/lib/bb/fetch/svk.py
@@ -36,7 +36,7 @@ class Svk(Fetch):
36 """Class to fetch a module or modules from svk repositories""" 36 """Class to fetch a module or modules from svk repositories"""
37 def supports(self, url, ud, d): 37 def supports(self, url, ud, d):
38 """ 38 """
39 Check to see if a given url can be fetched with cvs. 39 Check to see if a given url can be fetched with svk.
40 """ 40 """
41 return ud.type in ['svk'] 41 return ud.type in ['svk']
42 42
diff --git a/bitbake/lib/bb/fetch/wget.py b/bitbake/lib/bb/fetch/wget.py
index a0dca94040..fd93c7ec46 100644
--- a/bitbake/lib/bb/fetch/wget.py
+++ b/bitbake/lib/bb/fetch/wget.py
@@ -36,7 +36,7 @@ class Wget(Fetch):
36 """Class to fetch urls via 'wget'""" 36 """Class to fetch urls via 'wget'"""
37 def supports(self, url, ud, d): 37 def supports(self, url, ud, d):
38 """ 38 """
39 Check to see if a given url can be fetched with cvs. 39 Check to see if a given url can be fetched with wget.
40 """ 40 """
41 return ud.type in ['http','https','ftp'] 41 return ud.type in ['http','https','ftp']
42 42
diff --git a/bitbake/lib/bb/msg.py b/bitbake/lib/bb/msg.py
index a1b31e5d60..3fcf7091be 100644
--- a/bitbake/lib/bb/msg.py
+++ b/bitbake/lib/bb/msg.py
@@ -22,8 +22,8 @@ Message handling infrastructure for bitbake
22# with this program; if not, write to the Free Software Foundation, Inc., 22# with this program; if not, write to the Free Software Foundation, Inc.,
23# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 23# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
24 24
25import sys, os, re, bb 25import sys, bb
26from bb import utils, event 26from bb import event
27 27
28debug_level = {} 28debug_level = {}
29 29
@@ -47,9 +47,9 @@ domain = bb.utils.Enum(
47class MsgBase(bb.event.Event): 47class MsgBase(bb.event.Event):
48 """Base class for messages""" 48 """Base class for messages"""
49 49
50 def __init__(self, msg, d ): 50 def __init__(self, msg):
51 self._message = msg 51 self._message = msg
52 event.Event.__init__(self, d) 52 event.Event.__init__(self)
53 53
54class MsgDebug(MsgBase): 54class MsgDebug(MsgBase):
55 """Debug Message""" 55 """Debug Message"""
@@ -97,33 +97,29 @@ def set_debug_domains(domains):
97# 97#
98 98
99def debug(level, domain, msg, fn = None): 99def debug(level, domain, msg, fn = None):
100 bb.event.fire(MsgDebug(msg, None))
101 if not domain: 100 if not domain:
102 domain = 'default' 101 domain = 'default'
103 if debug_level[domain] >= level: 102 if debug_level[domain] >= level:
104 print 'DEBUG: ' + msg 103 bb.event.fire(MsgDebug(msg), None)
105 104
106def note(level, domain, msg, fn = None): 105def note(level, domain, msg, fn = None):
107 bb.event.fire(MsgNote(msg, None))
108 if not domain: 106 if not domain:
109 domain = 'default' 107 domain = 'default'
110 if level == 1 or verbose or debug_level[domain] >= 1: 108 if level == 1 or verbose or debug_level[domain] >= 1:
111 print 'NOTE: ' + msg 109 bb.event.fire(MsgNote(msg), None)
112 110
113def warn(domain, msg, fn = None): 111def warn(domain, msg, fn = None):
114 bb.event.fire(MsgWarn(msg, None)) 112 bb.event.fire(MsgWarn(msg), None)
115 print 'WARNING: ' + msg
116 113
117def error(domain, msg, fn = None): 114def error(domain, msg, fn = None):
118 bb.event.fire(MsgError(msg, None)) 115 bb.event.fire(MsgError(msg), None)
119 print 'ERROR: ' + msg 116 print 'ERROR: ' + msg
120 117
121def fatal(domain, msg, fn = None): 118def fatal(domain, msg, fn = None):
122 bb.event.fire(MsgFatal(msg, None)) 119 bb.event.fire(MsgFatal(msg), None)
123 print 'ERROR: ' + msg 120 print 'FATAL: ' + msg
124 sys.exit(1) 121 sys.exit(1)
125 122
126def plain(msg, fn = None): 123def plain(msg, fn = None):
127 bb.event.fire(MsgPlain(msg, None)) 124 bb.event.fire(MsgPlain(msg), None)
128 print msg
129 125
diff --git a/bitbake/lib/bb/parse/parse_py/BBHandler.py b/bitbake/lib/bb/parse/parse_py/BBHandler.py
index 915db214f5..86fa18ebd2 100644
--- a/bitbake/lib/bb/parse/parse_py/BBHandler.py
+++ b/bitbake/lib/bb/parse/parse_py/BBHandler.py
@@ -94,7 +94,7 @@ def finalise(fn, d):
94 for f in anonfuncs: 94 for f in anonfuncs:
95 code = code + " %s(d)\n" % f 95 code = code + " %s(d)\n" % f
96 data.setVar("__anonfunc", code, d) 96 data.setVar("__anonfunc", code, d)
97 build.exec_func_python("__anonfunc", d) 97 build.exec_func("__anonfunc", d)
98 data.delVar('T', d) 98 data.delVar('T', d)
99 if t: 99 if t:
100 data.setVar('T', t, d) 100 data.setVar('T', t, d)
@@ -114,7 +114,7 @@ def finalise(fn, d):
114 tasklist = data.getVar('__BBTASKS', d) or [] 114 tasklist = data.getVar('__BBTASKS', d) or []
115 bb.build.add_tasks(tasklist, d) 115 bb.build.add_tasks(tasklist, d)
116 116
117 bb.event.fire(bb.event.RecipeParsed(fn, d)) 117 bb.event.fire(bb.event.RecipeParsed(fn), d)
118 118
119 119
120def handle(fn, d, include = 0): 120def handle(fn, d, include = 0):
@@ -185,18 +185,26 @@ def handle(fn, d, include = 0):
185 multi = data.getVar('BBCLASSEXTEND', d, 1) 185 multi = data.getVar('BBCLASSEXTEND', d, 1)
186 if multi: 186 if multi:
187 based = bb.data.createCopy(d) 187 based = bb.data.createCopy(d)
188 else:
189 based = d
190 try:
188 finalise(fn, based) 191 finalise(fn, based)
189 darray = {"": based} 192 except bb.parse.SkipPackage:
190 for cls in multi.split(): 193 bb.data.setVar("__SKIPPED", True, based)
191 pn = data.getVar('PN', d, True) 194 darray = {"": based}
192 based = bb.data.createCopy(d) 195
193 data.setVar('PN', pn + '-' + cls, based) 196 for cls in (multi or "").split():
194 inherit([cls], based) 197 pn = data.getVar('PN', d, True)
198 based = bb.data.createCopy(d)
199 data.setVar('PN', pn + '-' + cls, based)
200 inherit([cls], based)
201 try:
195 finalise(fn, based) 202 finalise(fn, based)
196 darray[cls] = based 203 except bb.parse.SkipPackage:
197 return darray 204 bb.data.setVar("__SKIPPED", True, based)
198 else: 205 darray[cls] = based
199 finalise(fn, d) 206 return darray
207
200 bbpath.pop(0) 208 bbpath.pop(0)
201 if oldfile: 209 if oldfile:
202 bb.data.setVar("FILE", oldfile, d) 210 bb.data.setVar("FILE", oldfile, d)
diff --git a/bitbake/lib/bb/parse/parse_py/ConfHandler.py b/bitbake/lib/bb/parse/parse_py/ConfHandler.py
index c9f1ea13fb..23316ada58 100644
--- a/bitbake/lib/bb/parse/parse_py/ConfHandler.py
+++ b/bitbake/lib/bb/parse/parse_py/ConfHandler.py
@@ -34,10 +34,17 @@ __require_regexp__ = re.compile( r"require\s+(.+)" )
34__export_regexp__ = re.compile( r"export\s+(.+)" ) 34__export_regexp__ = re.compile( r"export\s+(.+)" )
35 35
36def init(data): 36def init(data):
37 if not bb.data.getVar('TOPDIR', data): 37 topdir = bb.data.getVar('TOPDIR', data)
38 bb.data.setVar('TOPDIR', os.getcwd(), data) 38 if not topdir:
39 topdir = os.getcwd()
40 bb.data.setVar('TOPDIR', topdir, data)
39 if not bb.data.getVar('BBPATH', data): 41 if not bb.data.getVar('BBPATH', data):
40 bb.data.setVar('BBPATH', os.path.join(sys.prefix, 'share', 'bitbake'), data) 42 from pkg_resources import Requirement, resource_filename
43 bitbake = Requirement.parse("bitbake")
44 datadir = resource_filename(bitbake, "../share/bitbake")
45 basedir = resource_filename(bitbake, "..")
46 bb.data.setVar('BBPATH', '%s:%s:%s' % (topdir, datadir, basedir), data)
47
41 48
42def supports(fn, d): 49def supports(fn, d):
43 return localpath(fn, d)[-5:] == ".conf" 50 return localpath(fn, d)[-5:] == ".conf"
diff --git a/bitbake/lib/bb/providers.py b/bitbake/lib/bb/providers.py
index 001281a293..8617251ca3 100644
--- a/bitbake/lib/bb/providers.py
+++ b/bitbake/lib/bb/providers.py
@@ -21,7 +21,7 @@
21# with this program; if not, write to the Free Software Foundation, Inc., 21# with this program; if not, write to the Free Software Foundation, Inc.,
22# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 22# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
23 23
24import os, re 24import re
25from bb import data, utils 25from bb import data, utils
26import bb 26import bb
27 27
@@ -203,7 +203,7 @@ def _filterProviders(providers, item, cfgData, dataCache):
203 eligible.append(preferred_versions[pn][1]) 203 eligible.append(preferred_versions[pn][1])
204 204
205 # Now add latest verisons 205 # Now add latest verisons
206 for pn in pkg_pn.keys(): 206 for pn in sortpkg_pn.keys():
207 if pn in preferred_versions and preferred_versions[pn][1]: 207 if pn in preferred_versions and preferred_versions[pn][1]:
208 continue 208 continue
209 preferred_versions[pn] = findLatestProvider(pn, cfgData, dataCache, sortpkg_pn[pn][0]) 209 preferred_versions[pn] = findLatestProvider(pn, cfgData, dataCache, sortpkg_pn[pn][0])
diff --git a/bitbake/lib/bb/runqueue.py b/bitbake/lib/bb/runqueue.py
index cce5da4057..c3ad442e47 100644
--- a/bitbake/lib/bb/runqueue.py
+++ b/bitbake/lib/bb/runqueue.py
@@ -37,20 +37,38 @@ class RunQueueStats:
37 """ 37 """
38 Holds statistics on the tasks handled by the associated runQueue 38 Holds statistics on the tasks handled by the associated runQueue
39 """ 39 """
40 def __init__(self): 40 def __init__(self, total):
41 self.completed = 0 41 self.completed = 0
42 self.skipped = 0 42 self.skipped = 0
43 self.failed = 0 43 self.failed = 0
44 self.active = 0
45 self.total = total
44 46
45 def taskFailed(self): 47 def taskFailed(self):
48 self.active = self.active - 1
46 self.failed = self.failed + 1 49 self.failed = self.failed + 1
47 50
48 def taskCompleted(self, number = 1): 51 def taskCompleted(self, number = 1):
52 self.active = self.active - number
49 self.completed = self.completed + number 53 self.completed = self.completed + number
50 54
51 def taskSkipped(self, number = 1): 55 def taskSkipped(self, number = 1):
56 self.active = self.active + number
52 self.skipped = self.skipped + number 57 self.skipped = self.skipped + number
53 58
59 def taskActive(self):
60 self.active = self.active + 1
61
62# These values indicate the next step due to be run in the
63# runQueue state machine
64runQueuePrepare = 2
65runQueueRunInit = 3
66runQueueRunning = 4
67runQueueFailed = 6
68runQueueCleanUp = 7
69runQueueComplete = 8
70runQueueChildProcess = 9
71
54class RunQueueScheduler: 72class RunQueueScheduler:
55 """ 73 """
56 Control the order tasks are scheduled in. 74 Control the order tasks are scheduled in.
@@ -142,9 +160,9 @@ class RunQueue:
142 self.cooker = cooker 160 self.cooker = cooker
143 self.dataCache = dataCache 161 self.dataCache = dataCache
144 self.taskData = taskData 162 self.taskData = taskData
163 self.cfgData = cfgData
145 self.targets = targets 164 self.targets = targets
146 165
147 self.cfgdata = cfgData
148 self.number_tasks = int(bb.data.getVar("BB_NUMBER_THREADS", cfgData, 1) or 1) 166 self.number_tasks = int(bb.data.getVar("BB_NUMBER_THREADS", cfgData, 1) or 1)
149 self.multi_provider_whitelist = (bb.data.getVar("MULTI_PROVIDER_WHITELIST", cfgData, 1) or "").split() 167 self.multi_provider_whitelist = (bb.data.getVar("MULTI_PROVIDER_WHITELIST", cfgData, 1) or "").split()
150 self.scheduler = bb.data.getVar("BB_SCHEDULER", cfgData, 1) or "speed" 168 self.scheduler = bb.data.getVar("BB_SCHEDULER", cfgData, 1) or "speed"
@@ -152,12 +170,13 @@ class RunQueue:
152 self.stampwhitelist = bb.data.getVar("BB_STAMP_WHITELIST", cfgData, 1) or "" 170 self.stampwhitelist = bb.data.getVar("BB_STAMP_WHITELIST", cfgData, 1) or ""
153 171
154 def reset_runqueue(self): 172 def reset_runqueue(self):
155
156 self.runq_fnid = [] 173 self.runq_fnid = []
157 self.runq_task = [] 174 self.runq_task = []
158 self.runq_depends = [] 175 self.runq_depends = []
159 self.runq_revdeps = [] 176 self.runq_revdeps = []
160 177
178 self.state = runQueuePrepare
179
161 def get_user_idstring(self, task): 180 def get_user_idstring(self, task):
162 fn = self.taskData.fn_index[self.runq_fnid[task]] 181 fn = self.taskData.fn_index[self.runq_fnid[task]]
163 taskname = self.runq_task[task] 182 taskname = self.runq_task[task]
@@ -653,6 +672,8 @@ class RunQueue:
653 672
654 #self.dump_data(taskData) 673 #self.dump_data(taskData)
655 674
675 self.state = runQueueRunInit
676
656 def check_stamps(self): 677 def check_stamps(self):
657 unchecked = {} 678 unchecked = {}
658 current = [] 679 current = []
@@ -796,39 +817,51 @@ class RunQueue:
796 (if the abort on failure configuration option isn't set) 817 (if the abort on failure configuration option isn't set)
797 """ 818 """
798 819
799 failures = 0 820 if self.state is runQueuePrepare:
800 while 1: 821 self.prepare_runqueue()
801 failed_fnids = [] 822
802 try: 823 if self.state is runQueueRunInit:
803 self.execute_runqueue_internal() 824 bb.msg.note(1, bb.msg.domain.RunQueue, "Executing runqueue")
804 finally: 825 self.execute_runqueue_initVars()
805 if self.master_process: 826
806 failed_fnids = self.finish_runqueue() 827 if self.state is runQueueRunning:
807 if len(failed_fnids) == 0: 828 self.execute_runqueue_internal()
808 return failures 829
830 if self.state is runQueueCleanUp:
831 self.finish_runqueue()
832
833 if self.state is runQueueFailed:
809 if not self.taskData.tryaltconfigs: 834 if not self.taskData.tryaltconfigs:
810 raise bb.runqueue.TaskFailure(failed_fnids) 835 raise bb.runqueue.TaskFailure(self.failed_fnids)
811 for fnid in failed_fnids: 836 for fnid in self.failed_fnids:
812 #print "Failure: %s %s %s" % (fnid, self.taskData.fn_index[fnid], self.runq_task[fnid])
813 self.taskData.fail_fnid(fnid) 837 self.taskData.fail_fnid(fnid)
814 failures = failures + 1
815 self.reset_runqueue() 838 self.reset_runqueue()
816 self.prepare_runqueue() 839
840 if self.state is runQueueComplete:
841 # All done
842 bb.msg.note(1, bb.msg.domain.RunQueue, "Tasks Summary: Attempted %d tasks of which %d didn't need to be rerun and %d failed." % (self.stats.completed, self.stats.skipped, self.stats.failed))
843 return False
844
845 if self.state is runQueueChildProcess:
846 print "Child process"
847 return False
848
849 # Loop
850 return True
817 851
818 def execute_runqueue_initVars(self): 852 def execute_runqueue_initVars(self):
819 853
820 self.stats = RunQueueStats() 854 self.stats = RunQueueStats(len(self.runq_fnid))
821 855
822 self.active_builds = 0
823 self.runq_buildable = [] 856 self.runq_buildable = []
824 self.runq_running = [] 857 self.runq_running = []
825 self.runq_complete = [] 858 self.runq_complete = []
826 self.build_pids = {} 859 self.build_pids = {}
860 self.build_pipes = {}
827 self.failed_fnids = [] 861 self.failed_fnids = []
828 self.master_process = True
829 862
830 # Mark initial buildable tasks 863 # Mark initial buildable tasks
831 for task in range(len(self.runq_fnid)): 864 for task in range(self.stats.total):
832 self.runq_running.append(0) 865 self.runq_running.append(0)
833 self.runq_complete.append(0) 866 self.runq_complete.append(0)
834 if len(self.runq_depends[task]) == 0: 867 if len(self.runq_depends[task]) == 0:
@@ -836,6 +869,10 @@ class RunQueue:
836 else: 869 else:
837 self.runq_buildable.append(0) 870 self.runq_buildable.append(0)
838 871
872 self.state = runQueueRunning
873
874 event.fire(bb.event.StampUpdate(self.target_pairs, self.dataCache.stamp), self.cfgData)
875
839 def task_complete(self, task): 876 def task_complete(self, task):
840 """ 877 """
841 Mark a task as completed 878 Mark a task as completed
@@ -858,26 +895,32 @@ class RunQueue:
858 taskname = self.runq_task[revdep] 895 taskname = self.runq_task[revdep]
859 bb.msg.debug(1, bb.msg.domain.RunQueue, "Marking task %s (%s, %s) as buildable" % (revdep, fn, taskname)) 896 bb.msg.debug(1, bb.msg.domain.RunQueue, "Marking task %s (%s, %s) as buildable" % (revdep, fn, taskname))
860 897
898 def task_fail(self, task, exitcode):
899 """
900 Called when a task has failed
901 Updates the state engine with the failure
902 """
903 bb.msg.error(bb.msg.domain.RunQueue, "Task %s (%s) failed with %s" % (task, self.get_user_idstring(task), exitcode))
904 self.stats.taskFailed()
905 fnid = self.runq_fnid[task]
906 self.failed_fnids.append(fnid)
907 bb.event.fire(runQueueTaskFailed(task, self.stats, self), self.cfgData)
908 if self.taskData.abort:
909 self.state = runQueueCleanup
910
861 def execute_runqueue_internal(self): 911 def execute_runqueue_internal(self):
862 """ 912 """
863 Run the tasks in a queue prepared by prepare_runqueue 913 Run the tasks in a queue prepared by prepare_runqueue
864 """ 914 """
865 915
866 bb.msg.note(1, bb.msg.domain.RunQueue, "Executing runqueue") 916 if self.stats.total == 0:
867
868 self.execute_runqueue_initVars()
869
870 if len(self.runq_fnid) == 0:
871 # nothing to do 917 # nothing to do
872 return [] 918 self.state = runQueueCleanup
873
874 def sigint_handler(signum, frame):
875 raise KeyboardInterrupt
876
877 event.fire(bb.event.StampUpdate(self.target_pairs, self.dataCache.stamp, self.cfgdata))
878 919
879 while True: 920 while True:
880 task = self.sched.next() 921 task = None
922 if self.stats.active < self.number_tasks:
923 task = self.sched.next()
881 if task is not None: 924 if task is not None:
882 fn = self.taskData.fn_index[self.runq_fnid[task]] 925 fn = self.taskData.fn_index[self.runq_fnid[task]]
883 926
@@ -885,107 +928,143 @@ class RunQueue:
885 if self.check_stamp_task(task): 928 if self.check_stamp_task(task):
886 bb.msg.debug(2, bb.msg.domain.RunQueue, "Stamp current task %s (%s)" % (task, self.get_user_idstring(task))) 929 bb.msg.debug(2, bb.msg.domain.RunQueue, "Stamp current task %s (%s)" % (task, self.get_user_idstring(task)))
887 self.runq_running[task] = 1 930 self.runq_running[task] = 1
931 self.runq_buildable[task] = 1
888 self.task_complete(task) 932 self.task_complete(task)
889 self.stats.taskCompleted() 933 self.stats.taskCompleted()
890 self.stats.taskSkipped() 934 self.stats.taskSkipped()
891 continue 935 continue
892 936
893 bb.msg.note(1, bb.msg.domain.RunQueue, "Running task %d of %d (ID: %s, %s)" % (self.stats.completed + self.active_builds + 1, len(self.runq_fnid), task, self.get_user_idstring(task)))
894 sys.stdout.flush() 937 sys.stdout.flush()
895 sys.stderr.flush() 938 sys.stderr.flush()
896 try: 939 try:
940 pipein, pipeout = os.pipe()
897 pid = os.fork() 941 pid = os.fork()
898 except OSError, e: 942 except OSError, e:
899 bb.msg.fatal(bb.msg.domain.RunQueue, "fork failed: %d (%s)" % (e.errno, e.strerror)) 943 bb.msg.fatal(bb.msg.domain.RunQueue, "fork failed: %d (%s)" % (e.errno, e.strerror))
900 if pid == 0: 944 if pid == 0:
901 # Bypass master process' handling 945 os.close(pipein)
902 self.master_process = False 946 # Save out the PID so that the event can include it the
903 # Stop Ctrl+C being sent to children 947 # events
904 # signal.signal(signal.SIGINT, signal.SIG_IGN) 948 bb.event.worker_pid = os.getpid()
949 bb.event.worker_pipe = pipeout
950
951 self.state = runQueueChildProcess
905 # Make the child the process group leader 952 # Make the child the process group leader
906 os.setpgid(0, 0) 953 os.setpgid(0, 0)
954 # No stdin
907 newsi = os.open('/dev/null', os.O_RDWR) 955 newsi = os.open('/dev/null', os.O_RDWR)
908 os.dup2(newsi, sys.stdin.fileno()) 956 os.dup2(newsi, sys.stdin.fileno())
909 self.cooker.configuration.cmd = taskname[3:] 957
958 bb.event.fire(runQueueTaskStarted(task, self.stats, self), self.cfgData)
959 bb.msg.note(1, bb.msg.domain.RunQueue,
960 "Running task %d of %d (ID: %s, %s)" % (self.stats.completed + self.stats.active + 1,
961 self.stats.total,
962 task,
963 self.get_user_idstring(task)))
964
910 bb.data.setVar("__RUNQUEUE_DO_NOT_USE_EXTERNALLY", self, self.cooker.configuration.data) 965 bb.data.setVar("__RUNQUEUE_DO_NOT_USE_EXTERNALLY", self, self.cooker.configuration.data)
911 try: 966 try:
912 self.cooker.tryBuild(fn) 967 self.cooker.tryBuild(fn, taskname[3:])
913 except bb.build.EventException: 968 except bb.build.EventException:
914 bb.msg.error(bb.msg.domain.Build, "Build of " + fn + " " + taskname + " failed") 969 bb.msg.error(bb.msg.domain.Build, "Build of " + fn + " " + taskname + " failed")
915 sys.exit(1) 970 os._exit(1)
916 except: 971 except:
917 bb.msg.error(bb.msg.domain.Build, "Build of " + fn + " " + taskname + " failed") 972 bb.msg.error(bb.msg.domain.Build, "Build of " + fn + " " + taskname + " failed")
918 raise 973 os._exit(1)
919 sys.exit(0) 974 os._exit(0)
975
920 self.build_pids[pid] = task 976 self.build_pids[pid] = task
977 self.build_pipes[pid] = runQueuePipe(pipein, pipeout, self.cfgData)
921 self.runq_running[task] = 1 978 self.runq_running[task] = 1
922 self.active_builds = self.active_builds + 1 979 self.stats.taskActive()
923 if self.active_builds < self.number_tasks: 980 if self.stats.active < self.number_tasks:
924 continue 981 continue
925 if self.active_builds > 0: 982
926 result = os.waitpid(-1, 0) 983 for pipe in self.build_pipes:
927 self.active_builds = self.active_builds - 1 984 self.build_pipes[pipe].read()
985
986 if self.stats.active > 0:
987 result = os.waitpid(-1, os.WNOHANG)
988 if result[0] is 0 and result[1] is 0:
989 return
928 task = self.build_pids[result[0]] 990 task = self.build_pids[result[0]]
991 del self.build_pids[result[0]]
992 self.build_pipes[result[0]].close()
993 del self.build_pipes[result[0]]
929 if result[1] != 0: 994 if result[1] != 0:
930 del self.build_pids[result[0]] 995 self.task_fail(task, result[1])
931 bb.msg.error(bb.msg.domain.RunQueue, "Task %s (%s) failed" % (task, self.get_user_idstring(task))) 996 return
932 self.failed_fnids.append(self.runq_fnid[task])
933 self.stats.taskFailed()
934 if not self.taskData.abort:
935 continue
936 break
937 self.task_complete(task) 997 self.task_complete(task)
938 self.stats.taskCompleted() 998 self.stats.taskCompleted()
939 del self.build_pids[result[0]] 999 bb.event.fire(runQueueTaskCompleted(task, self.stats, self), self.cfgData)
940 continue 1000 continue
1001
1002 if len(self.failed_fnids) != 0:
1003 self.state = runQueueFailed
1004 return
1005
1006 # Sanity Checks
1007 for task in range(self.stats.total):
1008 if self.runq_buildable[task] == 0:
1009 bb.msg.error(bb.msg.domain.RunQueue, "Task %s never buildable!" % task)
1010 if self.runq_running[task] == 0:
1011 bb.msg.error(bb.msg.domain.RunQueue, "Task %s never ran!" % task)
1012 if self.runq_complete[task] == 0:
1013 bb.msg.error(bb.msg.domain.RunQueue, "Task %s never completed!" % task)
1014 self.state = runQueueComplete
941 return 1015 return
942 1016
943 def finish_runqueue(self): 1017 def finish_runqueue_now(self):
1018 bb.msg.note(1, bb.msg.domain.RunQueue, "Sending SIGINT to remaining %s tasks" % self.stats.active)
1019 for k, v in self.build_pids.iteritems():
1020 try:
1021 os.kill(-k, signal.SIGINT)
1022 except:
1023 pass
1024 for pipe in self.build_pipes:
1025 self.build_pipes[pipe].read()
1026
1027 def finish_runqueue(self, now = False):
1028 self.state = runQueueCleanUp
1029 if now:
1030 self.finish_runqueue_now()
944 try: 1031 try:
945 while self.active_builds > 0: 1032 while self.stats.active > 0:
946 bb.msg.note(1, bb.msg.domain.RunQueue, "Waiting for %s active tasks to finish" % self.active_builds) 1033 bb.event.fire(runQueueExitWait(self.stats.active), self.cfgData)
1034 bb.msg.note(1, bb.msg.domain.RunQueue, "Waiting for %s active tasks to finish" % self.stats.active)
947 tasknum = 1 1035 tasknum = 1
948 for k, v in self.build_pids.iteritems(): 1036 for k, v in self.build_pids.iteritems():
949 bb.msg.note(1, bb.msg.domain.RunQueue, "%s: %s (%s)" % (tasknum, self.get_user_idstring(v), k)) 1037 bb.msg.note(1, bb.msg.domain.RunQueue, "%s: %s (%s)" % (tasknum, self.get_user_idstring(v), k))
950 tasknum = tasknum + 1 1038 tasknum = tasknum + 1
951 result = os.waitpid(-1, 0) 1039 result = os.waitpid(-1, os.WNOHANG)
1040 if result[0] is 0 and result[1] is 0:
1041 return
952 task = self.build_pids[result[0]] 1042 task = self.build_pids[result[0]]
953 if result[1] != 0:
954 bb.msg.error(bb.msg.domain.RunQueue, "Task %s (%s) failed" % (task, self.get_user_idstring(task)))
955 self.failed_fnids.append(self.runq_fnid[task])
956 self.stats.taskFailed()
957 del self.build_pids[result[0]] 1043 del self.build_pids[result[0]]
958 self.active_builds = self.active_builds - 1 1044 self.build_pipes[result[0]].close()
959 bb.msg.note(1, bb.msg.domain.RunQueue, "Tasks Summary: Attempted %d tasks of which %d didn't need to be rerun and %d failed." % (self.stats.completed, self.stats.skipped, self.stats.failed)) 1045 del self.build_pipes[result[0]]
960 return self.failed_fnids 1046 if result[1] != 0:
961 except KeyboardInterrupt: 1047 self.task_fail(task, result[1])
962 bb.msg.note(1, bb.msg.domain.RunQueue, "Sending SIGINT to remaining %s tasks" % self.active_builds) 1048 else:
963 for k, v in self.build_pids.iteritems(): 1049 self.stats.taskCompleted()
964 try: 1050 bb.event.fire(runQueueTaskCompleted(task, self.stats, self), self.cfgData)
965 os.kill(-k, signal.SIGINT) 1051 except:
966 except: 1052 self.finish_runqueue_now()
967 pass
968 raise 1053 raise
969 1054
970 # Sanity Checks 1055 if len(self.failed_fnids) != 0:
971 for task in range(len(self.runq_fnid)): 1056 self.state = runQueueFailed
972 if self.runq_buildable[task] == 0: 1057 return
973 bb.msg.error(bb.msg.domain.RunQueue, "Task %s never buildable!" % task)
974 if self.runq_running[task] == 0:
975 bb.msg.error(bb.msg.domain.RunQueue, "Task %s never ran!" % task)
976 if self.runq_complete[task] == 0:
977 bb.msg.error(bb.msg.domain.RunQueue, "Task %s never completed!" % task)
978
979 bb.msg.note(1, bb.msg.domain.RunQueue, "Tasks Summary: Attempted %d tasks of which %d didn't need to be rerun and %d failed." % (self.stats.completed, self.stats.skipped, self.stats.failed))
980 1058
981 return self.failed_fnids 1059 self.state = runQueueComplete
1060 return
982 1061
983 def dump_data(self, taskQueue): 1062 def dump_data(self, taskQueue):
984 """ 1063 """
985 Dump some debug information on the internal data structures 1064 Dump some debug information on the internal data structures
986 """ 1065 """
987 bb.msg.debug(3, bb.msg.domain.RunQueue, "run_tasks:") 1066 bb.msg.debug(3, bb.msg.domain.RunQueue, "run_tasks:")
988 for task in range(len(self.runq_fnid)): 1067 for task in range(len(self.runq_task)):
989 bb.msg.debug(3, bb.msg.domain.RunQueue, " (%s)%s - %s: %s Deps %s RevDeps %s" % (task, 1068 bb.msg.debug(3, bb.msg.domain.RunQueue, " (%s)%s - %s: %s Deps %s RevDeps %s" % (task,
990 taskQueue.fn_index[self.runq_fnid[task]], 1069 taskQueue.fn_index[self.runq_fnid[task]],
991 self.runq_task[task], 1070 self.runq_task[task],
@@ -994,7 +1073,7 @@ class RunQueue:
994 self.runq_revdeps[task])) 1073 self.runq_revdeps[task]))
995 1074
996 bb.msg.debug(3, bb.msg.domain.RunQueue, "sorted_tasks:") 1075 bb.msg.debug(3, bb.msg.domain.RunQueue, "sorted_tasks:")
997 for task1 in range(len(self.runq_fnid)): 1076 for task1 in range(len(self.runq_task)):
998 if task1 in self.prio_map: 1077 if task1 in self.prio_map:
999 task = self.prio_map[task1] 1078 task = self.prio_map[task1]
1000 bb.msg.debug(3, bb.msg.domain.RunQueue, " (%s)%s - %s: %s Deps %s RevDeps %s" % (task, 1079 bb.msg.debug(3, bb.msg.domain.RunQueue, " (%s)%s - %s: %s Deps %s RevDeps %s" % (task,
@@ -1005,6 +1084,58 @@ class RunQueue:
1005 self.runq_revdeps[task])) 1084 self.runq_revdeps[task]))
1006 1085
1007 1086
1087class TaskFailure(Exception):
1088 """
1089 Exception raised when a task in a runqueue fails
1090 """
1091 def __init__(self, x):
1092 self.args = x
1093
1094
1095class runQueueExitWait(bb.event.Event):
1096 """
1097 Event when waiting for task processes to exit
1098 """
1099
1100 def __init__(self, remain):
1101 self.remain = remain
1102 self.message = "Waiting for %s active tasks to finish" % remain
1103 bb.event.Event.__init__(self)
1104
1105class runQueueEvent(bb.event.Event):
1106 """
1107 Base runQueue event class
1108 """
1109 def __init__(self, task, stats, rq):
1110 self.taskid = task
1111 self.taskstring = rq.get_user_idstring(task)
1112 self.stats = stats
1113 bb.event.Event.__init__(self)
1114
1115class runQueueTaskStarted(runQueueEvent):
1116 """
1117 Event notifing a task was started
1118 """
1119 def __init__(self, task, stats, rq):
1120 runQueueEvent.__init__(self, task, stats, rq)
1121 self.message = "Running task %s (%d of %d) (%s)" % (task, stats.completed + stats.active + 1, self.stats.total, self.taskstring)
1122
1123class runQueueTaskFailed(runQueueEvent):
1124 """
1125 Event notifing a task failed
1126 """
1127 def __init__(self, task, stats, rq):
1128 runQueueEvent.__init__(self, task, stats, rq)
1129 self.message = "Task %s failed (%s)" % (task, self.taskstring)
1130
1131class runQueueTaskCompleted(runQueueEvent):
1132 """
1133 Event notifing a task completed
1134 """
1135 def __init__(self, task, stats, rq):
1136 runQueueEvent.__init__(self, task, stats, rq)
1137 self.message = "Task %s completed (%s)" % (task, self.taskstring)
1138
1008def check_stamp_fn(fn, taskname, d): 1139def check_stamp_fn(fn, taskname, d):
1009 rq = bb.data.getVar("__RUNQUEUE_DO_NOT_USE_EXTERNALLY", d) 1140 rq = bb.data.getVar("__RUNQUEUE_DO_NOT_USE_EXTERNALLY", d)
1010 fnid = rq.taskData.getfn_id(fn) 1141 fnid = rq.taskData.getfn_id(fn)
@@ -1013,3 +1144,31 @@ def check_stamp_fn(fn, taskname, d):
1013 return rq.check_stamp_task(taskid) 1144 return rq.check_stamp_task(taskid)
1014 return None 1145 return None
1015 1146
1147class runQueuePipe():
1148 """
1149 Abstraction for a pipe between a worker thread and the server
1150 """
1151 def __init__(self, pipein, pipeout, d):
1152 self.fd = pipein
1153 os.close(pipeout)
1154 self.queue = ""
1155 self.d = d
1156
1157 def read(self):
1158 start = len(self.queue)
1159 self.queue = self.queue + os.read(self.fd, 1024)
1160 end = len(self.queue)
1161 index = self.queue.find("</event>")
1162 while index != -1:
1163 bb.event.fire_from_worker(self.queue[:index+8], self.d)
1164 self.queue = self.queue[index+8:]
1165 index = self.queue.find("</event>")
1166 return (end > start)
1167
1168 def close(self):
1169 while self.read():
1170 continue
1171 if len(self.queue) > 0:
1172 print "Warning, worker left partial message"
1173 os.close(self.fd)
1174
diff --git a/bitbake-dev/lib/bb/server/__init__.py b/bitbake/lib/bb/server/__init__.py
index 1a732236e2..1a732236e2 100644
--- a/bitbake-dev/lib/bb/server/__init__.py
+++ b/bitbake/lib/bb/server/__init__.py
diff --git a/bitbake-dev/lib/bb/server/none.py b/bitbake/lib/bb/server/none.py
index ebda111582..ebda111582 100644
--- a/bitbake-dev/lib/bb/server/none.py
+++ b/bitbake/lib/bb/server/none.py
diff --git a/bitbake-dev/lib/bb/server/xmlrpc.py b/bitbake/lib/bb/server/xmlrpc.py
index 3364918c77..3364918c77 100644
--- a/bitbake-dev/lib/bb/server/xmlrpc.py
+++ b/bitbake/lib/bb/server/xmlrpc.py
diff --git a/bitbake/lib/bb/shell.py b/bitbake/lib/bb/shell.py
index b1ad78306d..66e51719a4 100644
--- a/bitbake/lib/bb/shell.py
+++ b/bitbake/lib/bb/shell.py
@@ -151,9 +151,6 @@ class BitBakeShellCommands:
151 if len( names ) == 0: names = [ globexpr ] 151 if len( names ) == 0: names = [ globexpr ]
152 print "SHELL: Building %s" % ' '.join( names ) 152 print "SHELL: Building %s" % ' '.join( names )
153 153
154 oldcmd = cooker.configuration.cmd
155 cooker.configuration.cmd = cmd
156
157 td = taskdata.TaskData(cooker.configuration.abort) 154 td = taskdata.TaskData(cooker.configuration.abort)
158 localdata = data.createCopy(cooker.configuration.data) 155 localdata = data.createCopy(cooker.configuration.data)
159 data.update_data(localdata) 156 data.update_data(localdata)
@@ -168,7 +165,7 @@ class BitBakeShellCommands:
168 if len(providers) == 0: 165 if len(providers) == 0:
169 raise Providers.NoProvider 166 raise Providers.NoProvider
170 167
171 tasks.append([name, "do_%s" % cooker.configuration.cmd]) 168 tasks.append([name, "do_%s" % cmd])
172 169
173 td.add_unresolved(localdata, cooker.status) 170 td.add_unresolved(localdata, cooker.status)
174 171
@@ -189,7 +186,6 @@ class BitBakeShellCommands:
189 print "ERROR: Couldn't build '%s'" % names 186 print "ERROR: Couldn't build '%s'" % names
190 last_exception = e 187 last_exception = e
191 188
192 cooker.configuration.cmd = oldcmd
193 189
194 build.usage = "<providee>" 190 build.usage = "<providee>"
195 191
@@ -208,6 +204,11 @@ class BitBakeShellCommands:
208 self.build( params, "configure" ) 204 self.build( params, "configure" )
209 configure.usage = "<providee>" 205 configure.usage = "<providee>"
210 206
207 def install( self, params ):
208 """Execute 'install' on a providee"""
209 self.build( params, "install" )
210 install.usage = "<providee>"
211
211 def edit( self, params ): 212 def edit( self, params ):
212 """Call $EDITOR on a providee""" 213 """Call $EDITOR on a providee"""
213 name = params[0] 214 name = params[0]
@@ -240,18 +241,14 @@ class BitBakeShellCommands:
240 bf = completeFilePath( name ) 241 bf = completeFilePath( name )
241 print "SHELL: Calling '%s' on '%s'" % ( cmd, bf ) 242 print "SHELL: Calling '%s' on '%s'" % ( cmd, bf )
242 243
243 oldcmd = cooker.configuration.cmd
244 cooker.configuration.cmd = cmd
245
246 try: 244 try:
247 cooker.buildFile(bf) 245 cooker.buildFile(bf, cmd)
248 except parse.ParseError: 246 except parse.ParseError:
249 print "ERROR: Unable to open or parse '%s'" % bf 247 print "ERROR: Unable to open or parse '%s'" % bf
250 except build.EventException, e: 248 except build.EventException, e:
251 print "ERROR: Couldn't build '%s'" % name 249 print "ERROR: Couldn't build '%s'" % name
252 last_exception = e 250 last_exception = e
253 251
254 cooker.configuration.cmd = oldcmd
255 fileBuild.usage = "<bbfile>" 252 fileBuild.usage = "<bbfile>"
256 253
257 def fileClean( self, params ): 254 def fileClean( self, params ):
@@ -493,7 +490,7 @@ SRC_URI = ""
493 interpreter.interact( "SHELL: Expert Mode - BitBake Python %s\nType 'help' for more information, press CTRL-D to switch back to BBSHELL." % sys.version ) 490 interpreter.interact( "SHELL: Expert Mode - BitBake Python %s\nType 'help' for more information, press CTRL-D to switch back to BBSHELL." % sys.version )
494 491
495 def showdata( self, params ): 492 def showdata( self, params ):
496 """Show the parsed metadata for a given providee""" 493 """Execute 'showdata' on a providee"""
497 cooker.showEnvironment(None, params) 494 cooker.showEnvironment(None, params)
498 showdata.usage = "<providee>" 495 showdata.usage = "<providee>"
499 496
diff --git a/bitbake/lib/bb/taskdata.py b/bitbake/lib/bb/taskdata.py
index 976e0ca1f9..4a88e75f6d 100644
--- a/bitbake/lib/bb/taskdata.py
+++ b/bitbake/lib/bb/taskdata.py
@@ -23,8 +23,20 @@ Task data collection and handling
23# with this program; if not, write to the Free Software Foundation, Inc., 23# with this program; if not, write to the Free Software Foundation, Inc.,
24# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 24# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
25 25
26from bb import data, event, mkdirhier, utils 26import bb
27import bb, os 27
28def re_match_strings(target, strings):
29 """
30 Whether or not the string 'target' matches
31 any one string of the strings which can be regular expression string
32 """
33 import re
34
35 for name in strings:
36 if (name==target or
37 re.search(name,target)!=None):
38 return True
39 return False
28 40
29class TaskData: 41class TaskData:
30 """ 42 """
@@ -264,7 +276,7 @@ class TaskData:
264 """ 276 """
265 unresolved = [] 277 unresolved = []
266 for target in self.build_names_index: 278 for target in self.build_names_index:
267 if target in dataCache.ignored_dependencies: 279 if re_match_strings(target, dataCache.ignored_dependencies):
268 continue 280 continue
269 if self.build_names_index.index(target) in self.failed_deps: 281 if self.build_names_index.index(target) in self.failed_deps:
270 continue 282 continue
@@ -279,7 +291,7 @@ class TaskData:
279 """ 291 """
280 unresolved = [] 292 unresolved = []
281 for target in self.run_names_index: 293 for target in self.run_names_index:
282 if target in dataCache.ignored_dependencies: 294 if re_match_strings(target, dataCache.ignored_dependencies):
283 continue 295 continue
284 if self.run_names_index.index(target) in self.failed_rdeps: 296 if self.run_names_index.index(target) in self.failed_rdeps:
285 continue 297 continue
@@ -359,7 +371,7 @@ class TaskData:
359 added internally during dependency resolution 371 added internally during dependency resolution
360 """ 372 """
361 373
362 if item in dataCache.ignored_dependencies: 374 if re_match_strings(item, dataCache.ignored_dependencies):
363 return 375 return
364 376
365 if not item in dataCache.providers: 377 if not item in dataCache.providers:
@@ -367,7 +379,7 @@ class TaskData:
367 bb.msg.note(2, bb.msg.domain.Provider, "Nothing PROVIDES '%s' (but '%s' DEPENDS on or otherwise requires it)" % (item, self.get_dependees_str(item))) 379 bb.msg.note(2, bb.msg.domain.Provider, "Nothing PROVIDES '%s' (but '%s' DEPENDS on or otherwise requires it)" % (item, self.get_dependees_str(item)))
368 else: 380 else:
369 bb.msg.note(2, bb.msg.domain.Provider, "Nothing PROVIDES '%s'" % (item)) 381 bb.msg.note(2, bb.msg.domain.Provider, "Nothing PROVIDES '%s'" % (item))
370 bb.event.fire(bb.event.NoProvider(item, cfgData)) 382 bb.event.fire(bb.event.NoProvider(item), cfgData)
371 raise bb.providers.NoProvider(item) 383 raise bb.providers.NoProvider(item)
372 384
373 if self.have_build_target(item): 385 if self.have_build_target(item):
@@ -380,7 +392,7 @@ class TaskData:
380 392
381 if not eligible: 393 if not eligible:
382 bb.msg.note(2, bb.msg.domain.Provider, "No buildable provider PROVIDES '%s' but '%s' DEPENDS on or otherwise requires it. Enable debugging and see earlier logs to find unbuildable providers." % (item, self.get_dependees_str(item))) 394 bb.msg.note(2, bb.msg.domain.Provider, "No buildable provider PROVIDES '%s' but '%s' DEPENDS on or otherwise requires it. Enable debugging and see earlier logs to find unbuildable providers." % (item, self.get_dependees_str(item)))
383 bb.event.fire(bb.event.NoProvider(item, cfgData)) 395 bb.event.fire(bb.event.NoProvider(item), cfgData)
384 raise bb.providers.NoProvider(item) 396 raise bb.providers.NoProvider(item)
385 397
386 if len(eligible) > 1 and foundUnique == False: 398 if len(eligible) > 1 and foundUnique == False:
@@ -390,7 +402,7 @@ class TaskData:
390 providers_list.append(dataCache.pkg_fn[fn]) 402 providers_list.append(dataCache.pkg_fn[fn])
391 bb.msg.note(1, bb.msg.domain.Provider, "multiple providers are available for %s (%s);" % (item, ", ".join(providers_list))) 403 bb.msg.note(1, bb.msg.domain.Provider, "multiple providers are available for %s (%s);" % (item, ", ".join(providers_list)))
392 bb.msg.note(1, bb.msg.domain.Provider, "consider defining PREFERRED_PROVIDER_%s" % item) 404 bb.msg.note(1, bb.msg.domain.Provider, "consider defining PREFERRED_PROVIDER_%s" % item)
393 bb.event.fire(bb.event.MultipleProviders(item, providers_list, cfgData)) 405 bb.event.fire(bb.event.MultipleProviders(item, providers_list), cfgData)
394 self.consider_msgs_cache.append(item) 406 self.consider_msgs_cache.append(item)
395 407
396 for fn in eligible: 408 for fn in eligible:
@@ -410,7 +422,7 @@ class TaskData:
410 (takes item names from RDEPENDS/PACKAGES namespace) 422 (takes item names from RDEPENDS/PACKAGES namespace)
411 """ 423 """
412 424
413 if item in dataCache.ignored_dependencies: 425 if re_match_strings(item, dataCache.ignored_dependencies):
414 return 426 return
415 427
416 if self.have_runtime_target(item): 428 if self.have_runtime_target(item):
@@ -420,7 +432,7 @@ class TaskData:
420 432
421 if not all_p: 433 if not all_p:
422 bb.msg.error(bb.msg.domain.Provider, "'%s' RDEPENDS/RRECOMMENDS or otherwise requires the runtime entity '%s' but it wasn't found in any PACKAGE or RPROVIDES variables" % (self.get_rdependees_str(item), item)) 434 bb.msg.error(bb.msg.domain.Provider, "'%s' RDEPENDS/RRECOMMENDS or otherwise requires the runtime entity '%s' but it wasn't found in any PACKAGE or RPROVIDES variables" % (self.get_rdependees_str(item), item))
423 bb.event.fire(bb.event.NoProvider(item, cfgData, runtime=True)) 435 bb.event.fire(bb.event.NoProvider(item, runtime=True), cfgData)
424 raise bb.providers.NoRProvider(item) 436 raise bb.providers.NoRProvider(item)
425 437
426 eligible, numberPreferred = bb.providers.filterProvidersRunTime(all_p, item, cfgData, dataCache) 438 eligible, numberPreferred = bb.providers.filterProvidersRunTime(all_p, item, cfgData, dataCache)
@@ -428,7 +440,7 @@ class TaskData:
428 440
429 if not eligible: 441 if not eligible:
430 bb.msg.error(bb.msg.domain.Provider, "'%s' RDEPENDS/RRECOMMENDS or otherwise requires the runtime entity '%s' but it wasn't found in any PACKAGE or RPROVIDES variables of any buildable targets.\nEnable debugging and see earlier logs to find unbuildable targets." % (self.get_rdependees_str(item), item)) 442 bb.msg.error(bb.msg.domain.Provider, "'%s' RDEPENDS/RRECOMMENDS or otherwise requires the runtime entity '%s' but it wasn't found in any PACKAGE or RPROVIDES variables of any buildable targets.\nEnable debugging and see earlier logs to find unbuildable targets." % (self.get_rdependees_str(item), item))
431 bb.event.fire(bb.event.NoProvider(item, cfgData, runtime=True)) 443 bb.event.fire(bb.event.NoProvider(item, runtime=True), cfgData)
432 raise bb.providers.NoRProvider(item) 444 raise bb.providers.NoRProvider(item)
433 445
434 if len(eligible) > 1 and numberPreferred == 0: 446 if len(eligible) > 1 and numberPreferred == 0:
@@ -438,7 +450,7 @@ class TaskData:
438 providers_list.append(dataCache.pkg_fn[fn]) 450 providers_list.append(dataCache.pkg_fn[fn])
439 bb.msg.note(2, bb.msg.domain.Provider, "multiple providers are available for runtime %s (%s);" % (item, ", ".join(providers_list))) 451 bb.msg.note(2, bb.msg.domain.Provider, "multiple providers are available for runtime %s (%s);" % (item, ", ".join(providers_list)))
440 bb.msg.note(2, bb.msg.domain.Provider, "consider defining a PREFERRED_PROVIDER entry to match runtime %s" % item) 452 bb.msg.note(2, bb.msg.domain.Provider, "consider defining a PREFERRED_PROVIDER entry to match runtime %s" % item)
441 bb.event.fire(bb.event.MultipleProviders(item,providers_list, cfgData, runtime=True)) 453 bb.event.fire(bb.event.MultipleProviders(item,providers_list, runtime=True), cfgData)
442 self.consider_msgs_cache.append(item) 454 self.consider_msgs_cache.append(item)
443 455
444 if numberPreferred > 1: 456 if numberPreferred > 1:
@@ -448,7 +460,7 @@ class TaskData:
448 providers_list.append(dataCache.pkg_fn[fn]) 460 providers_list.append(dataCache.pkg_fn[fn])
449 bb.msg.note(2, bb.msg.domain.Provider, "multiple providers are available for runtime %s (top %s entries preferred) (%s);" % (item, numberPreferred, ", ".join(providers_list))) 461 bb.msg.note(2, bb.msg.domain.Provider, "multiple providers are available for runtime %s (top %s entries preferred) (%s);" % (item, numberPreferred, ", ".join(providers_list)))
450 bb.msg.note(2, bb.msg.domain.Provider, "consider defining only one PREFERRED_PROVIDER entry to match runtime %s" % item) 462 bb.msg.note(2, bb.msg.domain.Provider, "consider defining only one PREFERRED_PROVIDER entry to match runtime %s" % item)
451 bb.event.fire(bb.event.MultipleProviders(item,providers_list, cfgData, runtime=True)) 463 bb.event.fire(bb.event.MultipleProviders(item,providers_list, runtime=True), cfgData)
452 self.consider_msgs_cache.append(item) 464 self.consider_msgs_cache.append(item)
453 465
454 # run through the list until we find one that we can build 466 # run through the list until we find one that we can build
diff --git a/bitbake-dev/lib/bb/ui/__init__.py b/bitbake/lib/bb/ui/__init__.py
index c6a377a8e6..c6a377a8e6 100644
--- a/bitbake-dev/lib/bb/ui/__init__.py
+++ b/bitbake/lib/bb/ui/__init__.py
diff --git a/bitbake-dev/lib/bb/ui/crumbs/__init__.py b/bitbake/lib/bb/ui/crumbs/__init__.py
index c6a377a8e6..c6a377a8e6 100644
--- a/bitbake-dev/lib/bb/ui/crumbs/__init__.py
+++ b/bitbake/lib/bb/ui/crumbs/__init__.py
diff --git a/bitbake-dev/lib/bb/ui/crumbs/buildmanager.py b/bitbake/lib/bb/ui/crumbs/buildmanager.py
index f89e8eefd4..f89e8eefd4 100644
--- a/bitbake-dev/lib/bb/ui/crumbs/buildmanager.py
+++ b/bitbake/lib/bb/ui/crumbs/buildmanager.py
diff --git a/bitbake-dev/lib/bb/ui/crumbs/puccho.glade b/bitbake/lib/bb/ui/crumbs/puccho.glade
index d7553a6e14..d7553a6e14 100644
--- a/bitbake-dev/lib/bb/ui/crumbs/puccho.glade
+++ b/bitbake/lib/bb/ui/crumbs/puccho.glade
diff --git a/bitbake-dev/lib/bb/ui/crumbs/runningbuild.py b/bitbake/lib/bb/ui/crumbs/runningbuild.py
index 401559255b..401559255b 100644
--- a/bitbake-dev/lib/bb/ui/crumbs/runningbuild.py
+++ b/bitbake/lib/bb/ui/crumbs/runningbuild.py
diff --git a/bitbake-dev/lib/bb/ui/depexp.py b/bitbake/lib/bb/ui/depexp.py
index cfa5b6564e..cfa5b6564e 100644
--- a/bitbake-dev/lib/bb/ui/depexp.py
+++ b/bitbake/lib/bb/ui/depexp.py
diff --git a/bitbake-dev/lib/bb/ui/goggle.py b/bitbake/lib/bb/ui/goggle.py
index 94995d82db..94995d82db 100644
--- a/bitbake-dev/lib/bb/ui/goggle.py
+++ b/bitbake/lib/bb/ui/goggle.py
diff --git a/bitbake-dev/lib/bb/ui/knotty.py b/bitbake/lib/bb/ui/knotty.py
index c69fd6ca64..c69fd6ca64 100644
--- a/bitbake-dev/lib/bb/ui/knotty.py
+++ b/bitbake/lib/bb/ui/knotty.py
diff --git a/bitbake-dev/lib/bb/ui/ncurses.py b/bitbake/lib/bb/ui/ncurses.py
index 14310dc124..14310dc124 100644
--- a/bitbake-dev/lib/bb/ui/ncurses.py
+++ b/bitbake/lib/bb/ui/ncurses.py
diff --git a/bitbake-dev/lib/bb/ui/puccho.py b/bitbake/lib/bb/ui/puccho.py
index 713aa1f4a6..713aa1f4a6 100644
--- a/bitbake-dev/lib/bb/ui/puccho.py
+++ b/bitbake/lib/bb/ui/puccho.py
diff --git a/bitbake-dev/lib/bb/ui/uievent.py b/bitbake/lib/bb/ui/uievent.py
index 36302f4da7..36302f4da7 100644
--- a/bitbake-dev/lib/bb/ui/uievent.py
+++ b/bitbake/lib/bb/ui/uievent.py
diff --git a/bitbake-dev/lib/bb/ui/uihelper.py b/bitbake/lib/bb/ui/uihelper.py
index 151ffc5854..151ffc5854 100644
--- a/bitbake-dev/lib/bb/ui/uihelper.py
+++ b/bitbake/lib/bb/ui/uihelper.py
diff --git a/bitbake/lib/bb/utils.py b/bitbake/lib/bb/utils.py
index 3017ecfa4a..5fc1463e67 100644
--- a/bitbake/lib/bb/utils.py
+++ b/bitbake/lib/bb/utils.py
@@ -21,8 +21,9 @@ BitBake Utility Functions
21 21
22digits = "0123456789" 22digits = "0123456789"
23ascii_letters = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ" 23ascii_letters = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"
24separators = ".-"
24 25
25import re, fcntl, os 26import re, fcntl, os, types
26 27
27def explode_version(s): 28def explode_version(s):
28 r = [] 29 r = []
@@ -39,12 +40,15 @@ def explode_version(s):
39 r.append(m.group(1)) 40 r.append(m.group(1))
40 s = m.group(2) 41 s = m.group(2)
41 continue 42 continue
43 r.append(s[0])
42 s = s[1:] 44 s = s[1:]
43 return r 45 return r
44 46
45def vercmp_part(a, b): 47def vercmp_part(a, b):
46 va = explode_version(a) 48 va = explode_version(a)
47 vb = explode_version(b) 49 vb = explode_version(b)
50 sa = False
51 sb = False
48 while True: 52 while True:
49 if va == []: 53 if va == []:
50 ca = None 54 ca = None
@@ -56,6 +60,16 @@ def vercmp_part(a, b):
56 cb = vb.pop(0) 60 cb = vb.pop(0)
57 if ca == None and cb == None: 61 if ca == None and cb == None:
58 return 0 62 return 0
63
64 if type(ca) is types.StringType:
65 sa = ca in separators
66 if type(cb) is types.StringType:
67 sb = cb in separators
68 if sa and not sb:
69 return -1
70 if not sa and sb:
71 return 1
72
59 if ca > cb: 73 if ca > cb:
60 return 1 74 return 1
61 if ca < cb: 75 if ca < cb:
@@ -151,7 +165,7 @@ def better_compile(text, file, realfile):
151 165
152 # split the text into lines again 166 # split the text into lines again
153 body = text.split('\n') 167 body = text.split('\n')
154 bb.msg.error(bb.msg.domain.Util, "Error in compiling: ", realfile) 168 bb.msg.error(bb.msg.domain.Util, "Error in compiling python function in: ", realfile)
155 bb.msg.error(bb.msg.domain.Util, "The lines resulting into this error were:") 169 bb.msg.error(bb.msg.domain.Util, "The lines resulting into this error were:")
156 bb.msg.error(bb.msg.domain.Util, "\t%d:%s:'%s'" % (e.lineno, e.__class__.__name__, body[e.lineno-1])) 170 bb.msg.error(bb.msg.domain.Util, "\t%d:%s:'%s'" % (e.lineno, e.__class__.__name__, body[e.lineno-1]))
157 171
@@ -176,7 +190,7 @@ def better_exec(code, context, text, realfile):
176 raise 190 raise
177 191
178 # print the Header of the Error Message 192 # print the Header of the Error Message
179 bb.msg.error(bb.msg.domain.Util, "Error in executing: %s" % realfile) 193 bb.msg.error(bb.msg.domain.Util, "Error in executing python function in: %s" % realfile)
180 bb.msg.error(bb.msg.domain.Util, "Exception:%s Message:%s" % (t,value) ) 194 bb.msg.error(bb.msg.domain.Util, "Exception:%s Message:%s" % (t,value) )
181 195
182 # let us find the line number now 196 # let us find the line number now